mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 21:39:57 +00:00
Compare commits
42 Commits
python-v0.
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c36767f20 | ||
|
|
1fa7e96aa1 | ||
|
|
7ae327242b | ||
|
|
1f4a051070 | ||
|
|
92c93b08bf | ||
|
|
a363b02ca7 | ||
|
|
ff8eaab894 | ||
|
|
11959cc5d6 | ||
|
|
7c65cec8d7 | ||
|
|
82621d5b13 | ||
|
|
0708428357 | ||
|
|
137d86d3c5 | ||
|
|
bb2e624ff0 | ||
|
|
fdc949bafb | ||
|
|
31be9212da | ||
|
|
cef24801f4 | ||
|
|
b4436e0804 | ||
|
|
58c2cd01a5 | ||
|
|
a1a1891c0c | ||
|
|
3c6c21c137 | ||
|
|
fd5ca20f34 | ||
|
|
ef30f87fd1 | ||
|
|
08d25c5a80 | ||
|
|
a5ff623443 | ||
|
|
b8ccea9f71 | ||
|
|
46c6ff889d | ||
|
|
12b3c87964 | ||
|
|
020a437230 | ||
|
|
34f1aeb84c | ||
|
|
5c3a88b6b2 | ||
|
|
e780b2f51c | ||
|
|
b8a1719174 | ||
|
|
ccded130ed | ||
|
|
48f8d1b3b7 | ||
|
|
865ed99881 | ||
|
|
d6485f1215 | ||
|
|
79a1667753 | ||
|
|
a866b78a31 | ||
|
|
c7d37b3e6e | ||
|
|
4b71552b73 | ||
|
|
5ce5f64da3 | ||
|
|
c582b0fc63 |
@@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.5.2"
|
current_version = "0.7.0"
|
||||||
parse = """(?x)
|
parse = """(?x)
|
||||||
(?P<major>0|[1-9]\\d*)\\.
|
(?P<major>0|[1-9]\\d*)\\.
|
||||||
(?P<minor>0|[1-9]\\d*)\\.
|
(?P<minor>0|[1-9]\\d*)\\.
|
||||||
|
|||||||
4
.github/workflows/docs_test.yml
vendored
4
.github/workflows/docs_test.yml
vendored
@@ -24,7 +24,7 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
test-python:
|
test-python:
|
||||||
name: Test doc python code
|
name: Test doc python code
|
||||||
runs-on: "buildjet-8vcpu-ubuntu-2204"
|
runs-on: "warp-ubuntu-latest-x64-4x"
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
for d in *; do cd "$d"; echo "$d".py; python "$d".py; cd ..; done
|
for d in *; do cd "$d"; echo "$d".py; python "$d".py; cd ..; done
|
||||||
test-node:
|
test-node:
|
||||||
name: Test doc nodejs code
|
name: Test doc nodejs code
|
||||||
runs-on: "buildjet-8vcpu-ubuntu-2204"
|
runs-on: "warp-ubuntu-latest-x64-4x"
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,6 +4,7 @@
|
|||||||
**/__pycache__
|
**/__pycache__
|
||||||
.DS_Store
|
.DS_Store
|
||||||
venv
|
venv
|
||||||
|
.venv
|
||||||
|
|
||||||
.vscode
|
.vscode
|
||||||
.zed
|
.zed
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: local-biome-check
|
- id: local-biome-check
|
||||||
name: biome check
|
name: biome check
|
||||||
entry: npx @biomejs/biome@1.7.3 check --config-path nodejs/biome.json nodejs/
|
entry: npx @biomejs/biome@1.8.3 check --config-path nodejs/biome.json nodejs/
|
||||||
language: system
|
language: system
|
||||||
types: [text]
|
types: [text]
|
||||||
files: "nodejs/.*"
|
files: "nodejs/.*"
|
||||||
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*
|
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*|nodejs/examples/.*
|
||||||
|
|||||||
10
Cargo.toml
10
Cargo.toml
@@ -20,11 +20,11 @@ keywords = ["lancedb", "lance", "database", "vector", "search"]
|
|||||||
categories = ["database-implementations"]
|
categories = ["database-implementations"]
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lance = { "version" = "=0.13.0", "features" = ["dynamodb"] }
|
lance = { "version" = "=0.14.1", "features" = ["dynamodb"] }
|
||||||
lance-index = { "version" = "=0.13.0" }
|
lance-index = { "version" = "=0.14.1" }
|
||||||
lance-linalg = { "version" = "=0.13.0" }
|
lance-linalg = { "version" = "=0.14.1" }
|
||||||
lance-testing = { "version" = "=0.13.0" }
|
lance-testing = { "version" = "=0.14.1" }
|
||||||
lance-datafusion = { "version" = "=0.13.0" }
|
lance-datafusion = { "version" = "=0.14.1" }
|
||||||
# Note that this one does not include pyarrow
|
# Note that this one does not include pyarrow
|
||||||
arrow = { version = "51.0", optional = false }
|
arrow = { version = "51.0", optional = false }
|
||||||
arrow-array = "51.0"
|
arrow-array = "51.0"
|
||||||
|
|||||||
@@ -57,6 +57,8 @@ plugins:
|
|||||||
- https://arrow.apache.org/docs/objects.inv
|
- https://arrow.apache.org/docs/objects.inv
|
||||||
- https://pandas.pydata.org/docs/objects.inv
|
- https://pandas.pydata.org/docs/objects.inv
|
||||||
- mkdocs-jupyter
|
- mkdocs-jupyter
|
||||||
|
- render_swagger:
|
||||||
|
allow_arbitrary_locations : true
|
||||||
|
|
||||||
markdown_extensions:
|
markdown_extensions:
|
||||||
- admonition
|
- admonition
|
||||||
@@ -100,15 +102,18 @@ nav:
|
|||||||
- Linear Combination Reranker: reranking/linear_combination.md
|
- Linear Combination Reranker: reranking/linear_combination.md
|
||||||
- Cross Encoder Reranker: reranking/cross_encoder.md
|
- Cross Encoder Reranker: reranking/cross_encoder.md
|
||||||
- ColBERT Reranker: reranking/colbert.md
|
- ColBERT Reranker: reranking/colbert.md
|
||||||
|
- Jina Reranker: reranking/jina.md
|
||||||
- OpenAI Reranker: reranking/openai.md
|
- OpenAI Reranker: reranking/openai.md
|
||||||
- Building Custom Rerankers: reranking/custom_reranker.md
|
- Building Custom Rerankers: reranking/custom_reranker.md
|
||||||
|
- Example: notebooks/lancedb_reranking.ipynb
|
||||||
- Filtering: sql.md
|
- Filtering: sql.md
|
||||||
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
||||||
- Configuring Storage: guides/storage.md
|
- Configuring Storage: guides/storage.md
|
||||||
- Sync -> Async Migration Guide: migration.md
|
- Migration Guide: migration.md
|
||||||
- Tuning retrieval performance:
|
- Tuning retrieval performance:
|
||||||
- Choosing right query type: guides/tuning_retrievers/1_query_types.md
|
- Choosing right query type: guides/tuning_retrievers/1_query_types.md
|
||||||
- Reranking: guides/tuning_retrievers/2_reranking.md
|
- Reranking: guides/tuning_retrievers/2_reranking.md
|
||||||
|
- Embedding fine-tuning: guides/tuning_retrievers/3_embed_tuning.md
|
||||||
- 🧬 Managing embeddings:
|
- 🧬 Managing embeddings:
|
||||||
- Overview: embeddings/index.md
|
- Overview: embeddings/index.md
|
||||||
- Embedding functions: embeddings/embedding_functions.md
|
- Embedding functions: embeddings/embedding_functions.md
|
||||||
@@ -123,10 +128,11 @@ nav:
|
|||||||
- DuckDB: python/duckdb.md
|
- DuckDB: python/duckdb.md
|
||||||
- LangChain:
|
- LangChain:
|
||||||
- LangChain 🔗: integrations/langchain.md
|
- LangChain 🔗: integrations/langchain.md
|
||||||
|
- LangChain demo: notebooks/langchain_demo.ipynb
|
||||||
- LangChain JS/TS 🔗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
- LangChain JS/TS 🔗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
||||||
- LlamaIndex 🦙:
|
- LlamaIndex 🦙:
|
||||||
- LlamaIndex docs: integrations/llamaIndex.md
|
- LlamaIndex docs: integrations/llamaIndex.md
|
||||||
- LlamaIndex demo: https://docs.llamaindex.ai/en/stable/examples/vector_stores/LanceDBIndexDemo/
|
- LlamaIndex demo: notebooks/llamaIndex_demo.ipynb
|
||||||
- Pydantic: python/pydantic.md
|
- Pydantic: python/pydantic.md
|
||||||
- Voxel51: integrations/voxel51.md
|
- Voxel51: integrations/voxel51.md
|
||||||
- PromptTools: integrations/prompttools.md
|
- PromptTools: integrations/prompttools.md
|
||||||
@@ -158,6 +164,7 @@ nav:
|
|||||||
- API reference:
|
- API reference:
|
||||||
- 🐍 Python: python/saas-python.md
|
- 🐍 Python: python/saas-python.md
|
||||||
- 👾 JavaScript: javascript/modules.md
|
- 👾 JavaScript: javascript/modules.md
|
||||||
|
- REST API: cloud/rest.md
|
||||||
|
|
||||||
- Quick start: basic.md
|
- Quick start: basic.md
|
||||||
- Concepts:
|
- Concepts:
|
||||||
@@ -180,15 +187,18 @@ nav:
|
|||||||
- Linear Combination Reranker: reranking/linear_combination.md
|
- Linear Combination Reranker: reranking/linear_combination.md
|
||||||
- Cross Encoder Reranker: reranking/cross_encoder.md
|
- Cross Encoder Reranker: reranking/cross_encoder.md
|
||||||
- ColBERT Reranker: reranking/colbert.md
|
- ColBERT Reranker: reranking/colbert.md
|
||||||
|
- Jina Reranker: reranking/jina.md
|
||||||
- OpenAI Reranker: reranking/openai.md
|
- OpenAI Reranker: reranking/openai.md
|
||||||
- Building Custom Rerankers: reranking/custom_reranker.md
|
- Building Custom Rerankers: reranking/custom_reranker.md
|
||||||
|
- Example: notebooks/lancedb_reranking.ipynb
|
||||||
- Filtering: sql.md
|
- Filtering: sql.md
|
||||||
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
||||||
- Configuring Storage: guides/storage.md
|
- Configuring Storage: guides/storage.md
|
||||||
- Sync -> Async Migration Guide: migration.md
|
- Migration Guide: migration.md
|
||||||
- Tuning retrieval performance:
|
- Tuning retrieval performance:
|
||||||
- Choosing right query type: guides/tuning_retrievers/1_query_types.md
|
- Choosing right query type: guides/tuning_retrievers/1_query_types.md
|
||||||
- Reranking: guides/tuning_retrievers/2_reranking.md
|
- Reranking: guides/tuning_retrievers/2_reranking.md
|
||||||
|
- Embedding fine-tuning: guides/tuning_retrievers/3_embed_tuning.md
|
||||||
- Managing Embeddings:
|
- Managing Embeddings:
|
||||||
- Overview: embeddings/index.md
|
- Overview: embeddings/index.md
|
||||||
- Embedding functions: embeddings/embedding_functions.md
|
- Embedding functions: embeddings/embedding_functions.md
|
||||||
@@ -201,9 +211,9 @@ nav:
|
|||||||
- Pandas and PyArrow: python/pandas_and_pyarrow.md
|
- Pandas and PyArrow: python/pandas_and_pyarrow.md
|
||||||
- Polars: python/polars_arrow.md
|
- Polars: python/polars_arrow.md
|
||||||
- DuckDB: python/duckdb.md
|
- DuckDB: python/duckdb.md
|
||||||
- LangChain 🦜️🔗↗: https://python.langchain.com/docs/integrations/vectorstores/lancedb
|
- LangChain 🦜️🔗↗: integrations/langchain.md
|
||||||
- LangChain.js 🦜️🔗↗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
- LangChain.js 🦜️🔗↗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
||||||
- LlamaIndex 🦙↗: https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html
|
- LlamaIndex 🦙↗: integrations/llamaIndex.md
|
||||||
- Pydantic: python/pydantic.md
|
- Pydantic: python/pydantic.md
|
||||||
- Voxel51: integrations/voxel51.md
|
- Voxel51: integrations/voxel51.md
|
||||||
- PromptTools: integrations/prompttools.md
|
- PromptTools: integrations/prompttools.md
|
||||||
@@ -228,6 +238,7 @@ nav:
|
|||||||
- API reference:
|
- API reference:
|
||||||
- 🐍 Python: python/saas-python.md
|
- 🐍 Python: python/saas-python.md
|
||||||
- 👾 JavaScript: javascript/modules.md
|
- 👾 JavaScript: javascript/modules.md
|
||||||
|
- REST API: cloud/rest.md
|
||||||
|
|
||||||
extra_css:
|
extra_css:
|
||||||
- styles/global.css
|
- styles/global.css
|
||||||
|
|||||||
487
docs/openapi.yml
Normal file
487
docs/openapi.yml
Normal file
@@ -0,0 +1,487 @@
|
|||||||
|
openapi: 3.1.0
|
||||||
|
info:
|
||||||
|
version: 1.0.0
|
||||||
|
title: LanceDB Cloud API
|
||||||
|
description: |
|
||||||
|
LanceDB Cloud API is a RESTful API that allows users to access and modify data stored in LanceDB Cloud.
|
||||||
|
Table actions are considered temporary resource creations and all use POST method.
|
||||||
|
contact:
|
||||||
|
name: LanceDB support
|
||||||
|
url: https://lancedb.com
|
||||||
|
email: contact@lancedb.com
|
||||||
|
|
||||||
|
servers:
|
||||||
|
- url: https://{db}.{region}.api.lancedb.com
|
||||||
|
description: LanceDB Cloud REST endpoint.
|
||||||
|
variables:
|
||||||
|
db:
|
||||||
|
default: ""
|
||||||
|
description: the name of DB
|
||||||
|
region:
|
||||||
|
default: "us-east-1"
|
||||||
|
description: the service region of the DB
|
||||||
|
|
||||||
|
security:
|
||||||
|
- key_auth: []
|
||||||
|
|
||||||
|
components:
|
||||||
|
securitySchemes:
|
||||||
|
key_auth:
|
||||||
|
name: x-api-key
|
||||||
|
type: apiKey
|
||||||
|
in: header
|
||||||
|
parameters:
|
||||||
|
table_name:
|
||||||
|
name: name
|
||||||
|
in: path
|
||||||
|
description: name of the table
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
invalid_request:
|
||||||
|
description: Invalid request
|
||||||
|
content:
|
||||||
|
text/plain:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
not_found:
|
||||||
|
description: Not found
|
||||||
|
content:
|
||||||
|
text/plain:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
unauthorized:
|
||||||
|
description: Unauthorized
|
||||||
|
content:
|
||||||
|
text/plain:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
requestBodies:
|
||||||
|
arrow_stream_buffer:
|
||||||
|
description: Arrow IPC stream buffer
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/vnd.apache.arrow.stream:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
|
||||||
|
paths:
|
||||||
|
/v1/table/:
|
||||||
|
get:
|
||||||
|
description: List tables, optionally, with pagination.
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: List Tables
|
||||||
|
operationId: listTables
|
||||||
|
parameters:
|
||||||
|
- name: limit
|
||||||
|
in: query
|
||||||
|
description: Limits the number of items to return.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: page_token
|
||||||
|
in: query
|
||||||
|
description: Specifies the starting position of the next query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Successfully returned a list of tables in the DB
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
tables:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
page_token:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
|
||||||
|
/v1/table/{name}/create/:
|
||||||
|
post:
|
||||||
|
description: Create a new table
|
||||||
|
summary: Create a new table
|
||||||
|
operationId: createTable
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
$ref: "#/components/requestBodies/arrow_stream_buffer"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Table successfully created
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
|
||||||
|
/v1/table/{name}/query/:
|
||||||
|
post:
|
||||||
|
description: Vector Query
|
||||||
|
url: https://{db-uri}.{aws-region}.api.lancedb.com/v1/table/{name}/query/
|
||||||
|
tags:
|
||||||
|
- Data
|
||||||
|
summary: Vector Query
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
vector:
|
||||||
|
type: FixedSizeList
|
||||||
|
description: |
|
||||||
|
The targetted vector to search for. Required.
|
||||||
|
vector_column:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The column to query, it can be inferred from the schema if there is only one vector column.
|
||||||
|
prefilter:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Whether to prefilter the data. Optional.
|
||||||
|
k:
|
||||||
|
type: integer
|
||||||
|
description: |
|
||||||
|
The number of search results to return. Default is 10.
|
||||||
|
distance_type:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The distance metric to use for search. L2, Cosine, Dot and Hamming are supported. Default is L2.
|
||||||
|
bypass_vector_index:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Whether to bypass vector index. Optional.
|
||||||
|
filter:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
A filter expression that specifies the rows to query. Optional.
|
||||||
|
columns:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The columns to return. Optional.
|
||||||
|
nprobe:
|
||||||
|
type: integer
|
||||||
|
description: |
|
||||||
|
The number of probes to use for search. Optional.
|
||||||
|
refine_factor:
|
||||||
|
type: integer
|
||||||
|
description: |
|
||||||
|
The refine factor to use for search. Optional.
|
||||||
|
default: null
|
||||||
|
fast_search:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Whether to use fast search. Optional.
|
||||||
|
default: false
|
||||||
|
required:
|
||||||
|
- vector
|
||||||
|
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: top k results if query is successfully executed
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
results:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: integer
|
||||||
|
selected_col_1_to_return:
|
||||||
|
type: col_1_type
|
||||||
|
selected_col_n_to_return:
|
||||||
|
type: col_n_type
|
||||||
|
_distance:
|
||||||
|
type: float
|
||||||
|
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
|
||||||
|
/v1/table/{name}/insert/:
|
||||||
|
post:
|
||||||
|
description: Insert new data to the Table.
|
||||||
|
tags:
|
||||||
|
- Data
|
||||||
|
operationId: insertData
|
||||||
|
summary: Insert new data.
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
$ref: "#/components/requestBodies/arrow_stream_buffer"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Insert successful
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
/v1/table/{name}/merge_insert/:
|
||||||
|
post:
|
||||||
|
description: Create a "merge insert" operation
|
||||||
|
This operation can add rows, update rows, and remove rows all in a single
|
||||||
|
transaction. See python method `lancedb.table.Table.merge_insert` for examples.
|
||||||
|
tags:
|
||||||
|
- Data
|
||||||
|
summary: Merge Insert
|
||||||
|
operationId: mergeInsert
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
- name: on
|
||||||
|
in: query
|
||||||
|
description: |
|
||||||
|
The column to use as the primary key for the merge operation.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: when_matched_update_all
|
||||||
|
in: query
|
||||||
|
description: |
|
||||||
|
Rows that exist in both the source table (new data) and
|
||||||
|
the target table (old data) will be updated, replacing
|
||||||
|
the old row with the corresponding matching row.
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: boolean
|
||||||
|
- name: when_matched_update_all_filt
|
||||||
|
in: query
|
||||||
|
description: |
|
||||||
|
If present then only rows that satisfy the filter expression will
|
||||||
|
be updated
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: when_not_matched_insert_all
|
||||||
|
in: query
|
||||||
|
description: |
|
||||||
|
Rows that exist only in the source table (new data) will be
|
||||||
|
inserted into the target table (old data).
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: boolean
|
||||||
|
- name: when_not_matched_by_source_delete
|
||||||
|
in: query
|
||||||
|
description: |
|
||||||
|
Rows that exist only in the target table (old data) will be
|
||||||
|
deleted. An optional condition (`when_not_matched_by_source_delete_filt`)
|
||||||
|
can be provided to limit what data is deleted.
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: boolean
|
||||||
|
- name: when_not_matched_by_source_delete_filt
|
||||||
|
in: query
|
||||||
|
description: |
|
||||||
|
The filter expression that specifies the rows to delete.
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
requestBody:
|
||||||
|
$ref: "#/components/requestBodies/arrow_stream_buffer"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Merge Insert successful
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
/v1/table/{name}/delete/:
|
||||||
|
post:
|
||||||
|
description: Delete rows from a table.
|
||||||
|
tags:
|
||||||
|
- Data
|
||||||
|
summary: Delete rows from a table
|
||||||
|
operationId: deleteData
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
predicate:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
A filter expression that specifies the rows to delete.
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Delete successful
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
/v1/table/{name}/drop/:
|
||||||
|
post:
|
||||||
|
description: Drop a table
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: Drop a table
|
||||||
|
operationId: dropTable
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
$ref: "#/components/requestBodies/arrow_stream_buffer"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Drop successful
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
|
||||||
|
/v1/table/{name}/describe/:
|
||||||
|
post:
|
||||||
|
description: Describe a table and return Table Information.
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: Describe a table
|
||||||
|
operationId: describeTable
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Table information
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
table:
|
||||||
|
type: string
|
||||||
|
version:
|
||||||
|
type: integer
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
stats:
|
||||||
|
type: object
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
|
||||||
|
/v1/table/{name}/index/list/:
|
||||||
|
post:
|
||||||
|
description: List indexes of a table
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: List indexes of a table
|
||||||
|
operationId: listIndexes
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Available list of indexes on the table.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
indexes:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
columns:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
index_name:
|
||||||
|
type: string
|
||||||
|
index_uuid:
|
||||||
|
type: string
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
/v1/table/{name}/create_index/:
|
||||||
|
post:
|
||||||
|
description: Create vector index on a Table
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: Create vector index on a Table
|
||||||
|
operationId: createIndex
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
column:
|
||||||
|
type: string
|
||||||
|
metric_type:
|
||||||
|
type: string
|
||||||
|
nullable: false
|
||||||
|
description: |
|
||||||
|
The metric type to use for the index. L2, Cosine, Dot are supported.
|
||||||
|
index_type:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Index successfully created
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
|
/v1/table/{name}/create_scalar_index/:
|
||||||
|
post:
|
||||||
|
description: Create a scalar index on a table
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: Create a scalar index on a table
|
||||||
|
operationId: createScalarIndex
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
column:
|
||||||
|
type: string
|
||||||
|
index_type:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Scalar Index successfully created
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
@@ -2,4 +2,5 @@ mkdocs==1.5.3
|
|||||||
mkdocs-jupyter==0.24.1
|
mkdocs-jupyter==0.24.1
|
||||||
mkdocs-material==9.5.3
|
mkdocs-material==9.5.3
|
||||||
mkdocstrings[python]==0.20.0
|
mkdocstrings[python]==0.20.0
|
||||||
|
mkdocs-render-swagger-plugin
|
||||||
pydantic
|
pydantic
|
||||||
@@ -38,7 +38,21 @@ Lance supports `IVF_PQ` index type by default.
|
|||||||
tbl.create_index(num_partitions=256, num_sub_vectors=96)
|
tbl.create_index(num_partitions=256, num_sub_vectors=96)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
Creating indexes is done via the [lancedb.Table.createIndex](../js/classes/Table.md/#createIndex) method.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<--- "nodejs/examples/ann_indexes.ts:import"
|
||||||
|
|
||||||
|
--8<-- "nodejs/examples/ann_indexes.ts:ingest"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
Creating indexes is done via the [lancedb.Table.createIndex](../javascript/interfaces/Table.md/#createIndex) method.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<--- "docs/src/ann_indexes.ts:import"
|
--8<--- "docs/src/ann_indexes.ts:import"
|
||||||
@@ -150,7 +164,15 @@ There are a couple of parameters that can be used to fine-tune the search:
|
|||||||
1 [0.48587373, 0.269207, 0.15095535, 0.65531915,... item 3953 108.393867
|
1 [0.48587373, 0.269207, 0.15095535, 0.65531915,... item 3953 108.393867
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/ann_indexes.ts:search1"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/ann_indexes.ts:search1"
|
--8<-- "docs/src/ann_indexes.ts:search1"
|
||||||
@@ -176,7 +198,15 @@ You can further filter the elements returned by a search using a where clause.
|
|||||||
tbl.search(np.random.random((1536))).where("item != 'item 1141'").to_pandas()
|
tbl.search(np.random.random((1536))).where("item != 'item 1141'").to_pandas()
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/ann_indexes.ts:search2"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
--8<-- "docs/src/ann_indexes.ts:search2"
|
--8<-- "docs/src/ann_indexes.ts:search2"
|
||||||
@@ -200,7 +230,15 @@ You can select the columns returned by the query using a select clause.
|
|||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/ann_indexes.ts:search3"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/ann_indexes.ts:search3"
|
--8<-- "docs/src/ann_indexes.ts:search3"
|
||||||
|
|||||||
@@ -16,12 +16,43 @@
|
|||||||
pip install lancedb
|
pip install lancedb
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm install @lancedb/lancedb
|
||||||
|
```
|
||||||
|
!!! note "Bundling `@lancedb/lancedb` apps with Webpack"
|
||||||
|
|
||||||
|
Since LanceDB contains a prebuilt Node binary, you must configure `next.config.js` to exclude it from webpack. This is required for both using Next.js and deploying a LanceDB app on Vercel.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
/** @type {import('next').NextConfig} */
|
||||||
|
module.exports = ({
|
||||||
|
webpack(config) {
|
||||||
|
config.externals.push({ '@lancedb/lancedb': '@lancedb/lancedb' })
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
npm install vectordb
|
npm install vectordb
|
||||||
```
|
```
|
||||||
|
!!! note "Bundling `vectordb` apps with Webpack"
|
||||||
|
|
||||||
|
Since LanceDB contains a prebuilt Node binary, you must configure `next.config.js` to exclude it from webpack. This is required for both using Next.js and deploying a LanceDB app on Vercel.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
/** @type {import('next').NextConfig} */
|
||||||
|
module.exports = ({
|
||||||
|
webpack(config) {
|
||||||
|
config.externals.push({ vectordb: 'vectordb' })
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
=== "Rust"
|
=== "Rust"
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@@ -58,7 +89,14 @@ recommend switching to stable releases.
|
|||||||
pip install --pre --extra-index-url https://pypi.fury.io/lancedb/ lancedb
|
pip install --pre --extra-index-url https://pypi.fury.io/lancedb/ lancedb
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm install @lancedb/lancedb@preview
|
||||||
|
```
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
npm install vectordb@preview
|
npm install vectordb@preview
|
||||||
@@ -93,23 +131,22 @@ recommend switching to stable releases.
|
|||||||
use the same syntax as the asynchronous API. To help with this migration we
|
use the same syntax as the asynchronous API. To help with this migration we
|
||||||
have created a [migration guide](migration.md) detailing the differences.
|
have created a [migration guide](migration.md) detailing the differences.
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:import"
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
import * as arrow from "apache-arrow";
|
||||||
|
|
||||||
--8<-- "docs/src/basic_legacy.ts:open_db"
|
--8<-- "nodejs/examples/basic.ts:connect"
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! note "`@lancedb/lancedb` vs. `vectordb`"
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
The Javascript SDK was originally released as `vectordb`. In an effort to
|
```typescript
|
||||||
reduce maintenance we are aligning our SDKs. The new, aligned, Javascript
|
--8<-- "docs/src/basic_legacy.ts:open_db"
|
||||||
API is being released as `lancedb`. If you are starting new work we encourage
|
```
|
||||||
you to try out `lancedb`. Once the new API is feature complete we will begin
|
|
||||||
slowly deprecating `vectordb` in favor of `lancedb`. There is a
|
|
||||||
[migration guide](migration.md) detailing the differences which will assist
|
|
||||||
you in this process.
|
|
||||||
|
|
||||||
=== "Rust"
|
=== "Rust"
|
||||||
|
|
||||||
@@ -152,14 +189,22 @@ table.
|
|||||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_async_pandas"
|
--8<-- "python/python/tests/docs/test_basic.py:create_table_async_pandas"
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:create_table"
|
--8<-- "docs/src/basic_legacy.ts:create_table"
|
||||||
```
|
```
|
||||||
|
|
||||||
If the table already exists, LanceDB will raise an error by default.
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
If you want to overwrite the table, you can pass in `mode:"overwrite"`
|
||||||
to the `createTable` function.
|
to the `createTable` function.
|
||||||
|
|
||||||
=== "Rust"
|
=== "Rust"
|
||||||
@@ -200,7 +245,15 @@ similar to a `CREATE TABLE` statement in SQL.
|
|||||||
!!! note "You can define schema in Pydantic"
|
!!! note "You can define schema in Pydantic"
|
||||||
LanceDB comes with Pydantic support, which allows you to define the schema of your data using Pydantic models. This makes it easy to work with LanceDB tables and data. Learn more about all supported types in [tables guide](./guides/tables.md).
|
LanceDB comes with Pydantic support, which allows you to define the schema of your data using Pydantic models. This makes it easy to work with LanceDB tables and data. Learn more about all supported types in [tables guide](./guides/tables.md).
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_empty_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:create_empty_table"
|
--8<-- "docs/src/basic_legacy.ts:create_empty_table"
|
||||||
@@ -223,12 +276,20 @@ Once created, you can open a table as follows:
|
|||||||
--8<-- "python/python/tests/docs/test_basic.py:open_table_async"
|
--8<-- "python/python/tests/docs/test_basic.py:open_table_async"
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:open_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
const tbl = await db.openTable("myTable");
|
const tbl = await db.openTable("myTable");
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
=== "Rust"
|
=== "Rust"
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
@@ -244,9 +305,16 @@ If you forget the name of your table, you can always get a listing of all table
|
|||||||
--8<-- "python/python/tests/docs/test_basic.py:table_names_async"
|
--8<-- "python/python/tests/docs/test_basic.py:table_names_async"
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Javascript"
|
=== "Typescript[^1]"
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
```javascript
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:table_names"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```typescript
|
||||||
console.log(await db.tableNames());
|
console.log(await db.tableNames());
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -267,7 +335,14 @@ After a table has been created, you can always add more data to it as follows:
|
|||||||
--8<-- "python/python/tests/docs/test_basic.py:add_data_async"
|
--8<-- "python/python/tests/docs/test_basic.py:add_data_async"
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:add_data"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:add"
|
--8<-- "docs/src/basic_legacy.ts:add"
|
||||||
@@ -292,7 +367,14 @@ Once you've embedded the query, you can find its nearest neighbors as follows:
|
|||||||
|
|
||||||
This returns a pandas DataFrame with the results.
|
This returns a pandas DataFrame with the results.
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:vector_search"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:search"
|
--8<-- "docs/src/basic_legacy.ts:search"
|
||||||
@@ -325,7 +407,14 @@ LanceDB allows you to create an ANN index on a table as follows:
|
|||||||
--8<-- "python/python/tests/docs/test_basic.py:create_index_async"
|
--8<-- "python/python/tests/docs/test_basic.py:create_index_async"
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_index"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```{.typescript .ignore}
|
```{.typescript .ignore}
|
||||||
--8<-- "docs/src/basic_legacy.ts:create_index"
|
--8<-- "docs/src/basic_legacy.ts:create_index"
|
||||||
@@ -357,7 +446,15 @@ This can delete any number of rows that match the filter.
|
|||||||
--8<-- "python/python/tests/docs/test_basic.py:delete_rows_async"
|
--8<-- "python/python/tests/docs/test_basic.py:delete_rows_async"
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:delete_rows"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:delete"
|
--8<-- "docs/src/basic_legacy.ts:delete"
|
||||||
@@ -378,7 +475,13 @@ simple or complex as needed. To see what expressions are supported, see the
|
|||||||
|
|
||||||
Read more: [lancedb.table.Table.delete][]
|
Read more: [lancedb.table.Table.delete][]
|
||||||
|
|
||||||
=== "Javascript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
Read more: [lancedb.Table.delete](javascript/interfaces/Table.md#delete)
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
Read more: [vectordb.Table.delete](javascript/interfaces/Table.md#delete)
|
Read more: [vectordb.Table.delete](javascript/interfaces/Table.md#delete)
|
||||||
|
|
||||||
@@ -401,7 +504,15 @@ Use the `drop_table()` method on the database to remove a table.
|
|||||||
By default, if the table does not exist an exception is raised. To suppress this,
|
By default, if the table does not exist an exception is raised. To suppress this,
|
||||||
you can pass in `ignore_missing=True`.
|
you can pass in `ignore_missing=True`.
|
||||||
|
|
||||||
=== "Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:drop_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:drop_table"
|
--8<-- "docs/src/basic_legacy.ts:drop_table"
|
||||||
@@ -416,19 +527,6 @@ Use the `drop_table()` method on the database to remove a table.
|
|||||||
--8<-- "rust/lancedb/examples/simple.rs:drop_table"
|
--8<-- "rust/lancedb/examples/simple.rs:drop_table"
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! note "Bundling `vectordb` apps with Webpack"
|
|
||||||
|
|
||||||
If you're using the `vectordb` module in JavaScript, since LanceDB contains a prebuilt Node binary, you must configure `next.config.js` to exclude it from webpack. This is required for both using Next.js and deploying a LanceDB app on Vercel.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
/** @type {import('next').NextConfig} */
|
|
||||||
module.exports = ({
|
|
||||||
webpack(config) {
|
|
||||||
config.externals.push({ vectordb: 'vectordb' })
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using the Embedding API
|
## Using the Embedding API
|
||||||
You can use the embedding API when working with embedding models. It automatically vectorizes the data at ingestion and query time and comes with built-in integrations with popular embedding models like Openai, Hugging Face, Sentence Transformers, CLIP and more.
|
You can use the embedding API when working with embedding models. It automatically vectorizes the data at ingestion and query time and comes with built-in integrations with popular embedding models like Openai, Hugging Face, Sentence Transformers, CLIP and more.
|
||||||
@@ -440,6 +538,22 @@ You can use the embedding API when working with embedding models. It automatical
|
|||||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:openai_embeddings"
|
--8<-- "python/python/tests/docs/test_embeddings_optional.py:openai_embeddings"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/embedding.ts:imports"
|
||||||
|
--8<-- "nodejs/examples/embedding.ts:openai_embeddings"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Rust"
|
||||||
|
|
||||||
|
```rust
|
||||||
|
--8<-- "rust/lancedb/examples/openai.rs:imports"
|
||||||
|
--8<-- "rust/lancedb/examples/openai.rs:openai_embeddings"
|
||||||
|
```
|
||||||
|
|
||||||
Learn about using the existing integrations and creating custom embedding functions in the [embedding API guide](./embeddings/).
|
Learn about using the existing integrations and creating custom embedding functions in the [embedding API guide](./embeddings/).
|
||||||
|
|
||||||
|
|
||||||
@@ -448,3 +562,5 @@ Learn about using the existing integrations and creating custom embedding functi
|
|||||||
This section covered the very basics of using LanceDB. If you're learning about vector databases for the first time, you may want to read the page on [indexing](concepts/index_ivfpq.md) to get familiar with the concepts.
|
This section covered the very basics of using LanceDB. If you're learning about vector databases for the first time, you may want to read the page on [indexing](concepts/index_ivfpq.md) to get familiar with the concepts.
|
||||||
|
|
||||||
If you've already worked with other vector databases, you may want to read the [guides](guides/tables.md) to learn how to work with LanceDB in more detail.
|
If you've already worked with other vector databases, you may want to read the [guides](guides/tables.md) to learn how to work with LanceDB in more detail.
|
||||||
|
|
||||||
|
[^1]: The `vectordb` package is a legacy package that is deprecated in favor of `@lancedb/lancedb`. The `vectordb` package will continue to receive bug fixes and security updates until September 2024. We recommend all new projects use `@lancedb/lancedb`. See the [migration guide](migration.md) for more information.
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ const example = async () => {
|
|||||||
);
|
);
|
||||||
// --8<-- [end:create_table]
|
// --8<-- [end:create_table]
|
||||||
|
|
||||||
|
|
||||||
// --8<-- [start:add]
|
// --8<-- [start:add]
|
||||||
const newData = Array.from({ length: 500 }, (_, i) => ({
|
const newData = Array.from({ length: 500 }, (_, i) => ({
|
||||||
vector: [i, i + 1],
|
vector: [i, i + 1],
|
||||||
|
|||||||
1
docs/src/cloud/rest.md
Normal file
1
docs/src/cloud/rest.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
!!swagger ../../openapi.yml!!
|
||||||
@@ -193,13 +193,13 @@ from lancedb.pydantic import LanceModel, Vector
|
|||||||
|
|
||||||
model = get_registry().get("huggingface").create(name='facebook/bart-base')
|
model = get_registry().get("huggingface").create(name='facebook/bart-base')
|
||||||
|
|
||||||
class TextModel(LanceModel):
|
class Words(LanceModel):
|
||||||
text: str = model.SourceField()
|
text: str = model.SourceField()
|
||||||
vector: Vector(model.ndims()) = model.VectorField()
|
vector: Vector(model.ndims()) = model.VectorField()
|
||||||
|
|
||||||
df = pd.DataFrame({"text": ["hi hello sayonara", "goodbye world"]})
|
df = pd.DataFrame({"text": ["hi hello sayonara", "goodbye world"]})
|
||||||
table = db.create_table("greets", schema=Words)
|
table = db.create_table("greets", schema=Words)
|
||||||
table.add()
|
table.add(df)
|
||||||
query = "old greeting"
|
query = "old greeting"
|
||||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
||||||
print(actual.text)
|
print(actual.text)
|
||||||
@@ -427,6 +427,45 @@ Usage Example:
|
|||||||
tbl.add(data)
|
tbl.add(data)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Jina Embeddings
|
||||||
|
Jina embeddings are used to generate embeddings for text and image data.
|
||||||
|
You also need to set the `JINA_API_KEY` environment variable to use the Jina API.
|
||||||
|
|
||||||
|
You can find a list of supported models under [https://jina.ai/embeddings/](https://jina.ai/embeddings/)
|
||||||
|
|
||||||
|
Supported parameters (to be passed in `create` method) are:
|
||||||
|
|
||||||
|
| Parameter | Type | Default Value | Description |
|
||||||
|
|---|---|---|---|
|
||||||
|
| `name` | `str` | `"jina-clip-v1"` | The model ID of the jina model to use |
|
||||||
|
|
||||||
|
Usage Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
import lancedb
|
||||||
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
|
from lancedb.embeddings import EmbeddingFunctionRegistry
|
||||||
|
|
||||||
|
os.environ['JINA_API_KEY'] = 'jina_*'
|
||||||
|
|
||||||
|
jina_embed = EmbeddingFunctionRegistry.get_instance().get("jina").create(name="jina-embeddings-v2-base-en")
|
||||||
|
|
||||||
|
|
||||||
|
class TextModel(LanceModel):
|
||||||
|
text: str = jina_embed.SourceField()
|
||||||
|
vector: Vector(jina_embed.ndims()) = jina_embed.VectorField()
|
||||||
|
|
||||||
|
|
||||||
|
data = [{"text": "hello world"},
|
||||||
|
{"text": "goodbye world"}]
|
||||||
|
|
||||||
|
db = lancedb.connect("~/.lancedb-2")
|
||||||
|
tbl = db.create_table("test", schema=TextModel, mode="overwrite")
|
||||||
|
|
||||||
|
tbl.add(data)
|
||||||
|
```
|
||||||
|
|
||||||
### AWS Bedrock Text Embedding Functions
|
### AWS Bedrock Text Embedding Functions
|
||||||
AWS Bedrock supports multiple base models for generating text embeddings. You need to setup the AWS credentials to use this embedding function.
|
AWS Bedrock supports multiple base models for generating text embeddings. You need to setup the AWS credentials to use this embedding function.
|
||||||
You can do so by using `awscli` and also add your session_token:
|
You can do so by using `awscli` and also add your session_token:
|
||||||
@@ -524,7 +563,7 @@ uris = [
|
|||||||
# get each uri as bytes
|
# get each uri as bytes
|
||||||
image_bytes = [requests.get(uri).content for uri in uris]
|
image_bytes = [requests.get(uri).content for uri in uris]
|
||||||
table.add(
|
table.add(
|
||||||
[{"label": labels, "image_uri": uris, "image_bytes": image_bytes}]
|
pd.DataFrame({"label": labels, "image_uri": uris, "image_bytes": image_bytes})
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
Now we can search using text from both the default vector column and the custom vector column
|
Now we can search using text from both the default vector column and the custom vector column
|
||||||
@@ -630,3 +669,54 @@ print(actual.text == "bird")
|
|||||||
```
|
```
|
||||||
|
|
||||||
If you have any questions about the embeddings API, supported models, or see a relevant model missing, please raise an issue [on GitHub](https://github.com/lancedb/lancedb/issues).
|
If you have any questions about the embeddings API, supported models, or see a relevant model missing, please raise an issue [on GitHub](https://github.com/lancedb/lancedb/issues).
|
||||||
|
|
||||||
|
### Jina Embeddings
|
||||||
|
Jina embeddings can also be used to embed both text and image data, only some of the models support image data and you can check the list
|
||||||
|
under [https://jina.ai/embeddings/](https://jina.ai/embeddings/)
|
||||||
|
|
||||||
|
Supported parameters (to be passed in `create` method) are:
|
||||||
|
|
||||||
|
| Parameter | Type | Default Value | Description |
|
||||||
|
|---|---|---|---|
|
||||||
|
| `name` | `str` | `"jina-clip-v1"` | The model ID of the jina model to use |
|
||||||
|
|
||||||
|
Usage Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import lancedb
|
||||||
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
|
from lancedb.embeddings import get_registry
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
os.environ['JINA_API_KEY'] = 'jina_*'
|
||||||
|
|
||||||
|
db = lancedb.connect("~/.lancedb")
|
||||||
|
func = get_registry().get("jina").create()
|
||||||
|
|
||||||
|
|
||||||
|
class Images(LanceModel):
|
||||||
|
label: str
|
||||||
|
image_uri: str = func.SourceField() # image uri as the source
|
||||||
|
image_bytes: bytes = func.SourceField() # image bytes as the source
|
||||||
|
vector: Vector(func.ndims()) = func.VectorField() # vector column
|
||||||
|
vec_from_bytes: Vector(func.ndims()) = func.VectorField() # Another vector column
|
||||||
|
|
||||||
|
|
||||||
|
table = db.create_table("images", schema=Images)
|
||||||
|
labels = ["cat", "cat", "dog", "dog", "horse", "horse"]
|
||||||
|
uris = [
|
||||||
|
"http://farm1.staticflickr.com/53/167798175_7c7845bbbd_z.jpg",
|
||||||
|
"http://farm1.staticflickr.com/134/332220238_da527d8140_z.jpg",
|
||||||
|
"http://farm9.staticflickr.com/8387/8602747737_2e5c2a45d4_z.jpg",
|
||||||
|
"http://farm5.staticflickr.com/4092/5017326486_1f46057f5f_z.jpg",
|
||||||
|
"http://farm9.staticflickr.com/8216/8434969557_d37882c42d_z.jpg",
|
||||||
|
"http://farm6.staticflickr.com/5142/5835678453_4f3a4edb45_z.jpg",
|
||||||
|
]
|
||||||
|
# get each uri as bytes
|
||||||
|
image_bytes = [requests.get(uri).content for uri in uris]
|
||||||
|
table.add(
|
||||||
|
pd.DataFrame({"label": labels, "image_uri": uris, "image_bytes": image_bytes})
|
||||||
|
)
|
||||||
|
```
|
||||||
@@ -29,17 +29,32 @@ For this purpose, LanceDB introduces an **embedding functions API**, that allow
|
|||||||
You can also define your own embedding function by implementing the `EmbeddingFunction`
|
You can also define your own embedding function by implementing the `EmbeddingFunction`
|
||||||
abstract base interface. It subclasses Pydantic Model which can be utilized to write complex schemas simply as we'll see next!
|
abstract base interface. It subclasses Pydantic Model which can be utilized to write complex schemas simply as we'll see next!
|
||||||
|
|
||||||
=== "JavaScript""
|
=== "TypeScript"
|
||||||
In the TypeScript SDK, the choices are more limited. For now, only the OpenAI
|
In the TypeScript SDK, the choices are more limited. For now, only the OpenAI
|
||||||
embedding function is available.
|
embedding function is available.
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
const lancedb = require("vectordb");
|
import * as lancedb from '@lancedb/lancedb'
|
||||||
|
import { getRegistry } from '@lancedb/lancedb/embeddings'
|
||||||
|
|
||||||
// You need to provide an OpenAI API key
|
// You need to provide an OpenAI API key
|
||||||
const apiKey = "sk-..."
|
const apiKey = "sk-..."
|
||||||
// The embedding function will create embeddings for the 'text' column
|
// The embedding function will create embeddings for the 'text' column
|
||||||
const embedding = new lancedb.OpenAIEmbeddingFunction('text', apiKey)
|
const func = getRegistry().get("openai").create({apiKey})
|
||||||
|
```
|
||||||
|
=== "Rust"
|
||||||
|
In the Rust SDK, the choices are more limited. For now, only the OpenAI
|
||||||
|
embedding function is available. But unlike the Python and TypeScript SDKs, you need manually register the OpenAI embedding function.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
// Make sure to include the `openai` feature
|
||||||
|
[dependencies]
|
||||||
|
lancedb = {version = "*", features = ["openai"]}
|
||||||
|
```
|
||||||
|
|
||||||
|
```rust
|
||||||
|
--8<-- "rust/lancedb/examples/openai.rs:imports"
|
||||||
|
--8<-- "rust/lancedb/examples/openai.rs:openai_embeddings"
|
||||||
```
|
```
|
||||||
|
|
||||||
## 2. Define the data model or schema
|
## 2. Define the data model or schema
|
||||||
@@ -55,7 +70,7 @@ For this purpose, LanceDB introduces an **embedding functions API**, that allow
|
|||||||
|
|
||||||
`VectorField` tells LanceDB to use the clip embedding function to generate query embeddings for the `vector` column and `SourceField` ensures that when adding data, we automatically use the specified embedding function to encode `image_uri`.
|
`VectorField` tells LanceDB to use the clip embedding function to generate query embeddings for the `vector` column and `SourceField` ensures that when adding data, we automatically use the specified embedding function to encode `image_uri`.
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
For the TypeScript SDK, a schema can be inferred from input data, or an explicit
|
For the TypeScript SDK, a schema can be inferred from input data, or an explicit
|
||||||
Arrow schema can be provided.
|
Arrow schema can be provided.
|
||||||
@@ -74,9 +89,18 @@ the embeddings at all:
|
|||||||
table.add([{"image_uri": u} for u in uris])
|
table.add([{"image_uri": u} for u in uris])
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/embedding.ts:imports"
|
||||||
|
--8<-- "nodejs/examples/embedding.ts:embedding_function"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
const db = await lancedb.connect("data/sample-lancedb");
|
const db = await lancedb.connect("data/sample-lancedb");
|
||||||
const data = [
|
const data = [
|
||||||
{ text: "pepperoni"},
|
{ text: "pepperoni"},
|
||||||
@@ -116,9 +140,19 @@ need to worry about it when you query the table:
|
|||||||
|
|
||||||
Both of the above snippet returns a pandas DataFrame with the 10 closest vectors to the query.
|
Both of the above snippet returns a pandas DataFrame with the 10 closest vectors to the query.
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const results = await table.search("What's the best pizza topping?")
|
||||||
|
.limit(10)
|
||||||
|
.toArray()
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)
|
||||||
|
|
||||||
|
```ts
|
||||||
const results = await table
|
const results = await table
|
||||||
.search("What's the best pizza topping?")
|
.search("What's the best pizza topping?")
|
||||||
.limit(10)
|
.limit(10)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ LanceDB supports 3 methods of working with embeddings.
|
|||||||
|
|
||||||
1. You can manually generate embeddings for the data and queries. This is done outside of LanceDB.
|
1. You can manually generate embeddings for the data and queries. This is done outside of LanceDB.
|
||||||
2. You can use the built-in [embedding functions](./embedding_functions.md) to embed the data and queries in the background.
|
2. You can use the built-in [embedding functions](./embedding_functions.md) to embed the data and queries in the background.
|
||||||
3. For python users, you can define your own [custom embedding function](./custom_embedding_function.md)
|
3. You can define your own [custom embedding function](./custom_embedding_function.md)
|
||||||
that extends the default embedding functions.
|
that extends the default embedding functions.
|
||||||
|
|
||||||
For python users, there is also a legacy [with_embeddings API](./legacy.md).
|
For python users, there is also a legacy [with_embeddings API](./legacy.md).
|
||||||
@@ -18,8 +18,11 @@ It is retained for compatibility and will be removed in a future version.
|
|||||||
To get started with embeddings, you can use the built-in embedding functions.
|
To get started with embeddings, you can use the built-in embedding functions.
|
||||||
|
|
||||||
### OpenAI Embedding function
|
### OpenAI Embedding function
|
||||||
|
|
||||||
LanceDB registers the OpenAI embeddings function in the registry as `openai`. You can pass any supported model name to the `create`. By default it uses `"text-embedding-ada-002"`.
|
LanceDB registers the OpenAI embeddings function in the registry as `openai`. You can pass any supported model name to the `create`. By default it uses `"text-embedding-ada-002"`.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
@@ -45,9 +48,24 @@ actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
|||||||
print(actual.text)
|
print(actual.text)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "TypeScript"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<--- "nodejs/examples/embedding.ts:imports"
|
||||||
|
--8<--- "nodejs/examples/embedding.ts:openai_embeddings"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Rust"
|
||||||
|
|
||||||
|
```rust
|
||||||
|
--8<--- "rust/lancedb/examples/openai.rs:imports"
|
||||||
|
--8<--- "rust/lancedb/examples/openai.rs:openai_embeddings"
|
||||||
|
```
|
||||||
|
|
||||||
### Sentence Transformers Embedding function
|
### Sentence Transformers Embedding function
|
||||||
LanceDB registers the Sentence Transformers embeddings function in the registry as `sentence-transformers`. You can pass any supported model name to the `create`. By default it uses `"sentence-transformers/paraphrase-MiniLM-L6-v2"`.
|
LanceDB registers the Sentence Transformers embeddings function in the registry as `sentence-transformers`. You can pass any supported model name to the `create`. By default it uses `"sentence-transformers/paraphrase-MiniLM-L6-v2"`.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
@@ -72,3 +90,45 @@ query = "greetings"
|
|||||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
||||||
print(actual.text)
|
print(actual.text)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "TypeScript"
|
||||||
|
|
||||||
|
Coming Soon!
|
||||||
|
|
||||||
|
=== "Rust"
|
||||||
|
|
||||||
|
Coming Soon!
|
||||||
|
|
||||||
|
### Jina Embeddings
|
||||||
|
|
||||||
|
LanceDB registers the JinaAI embeddings function in the registry as `jina`. You can pass any supported model name to the `create`. By default it uses `"jina-clip-v1"`.
|
||||||
|
`jina-clip-v1` can handle both text and images and other models only support `text`.
|
||||||
|
|
||||||
|
You need to pass `JINA_API_KEY` in the environment variable or pass it as `api_key` to `create` method.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
import lancedb
|
||||||
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
|
from lancedb.embeddings import get_registry
|
||||||
|
os.environ['JINA_API_KEY'] = "jina_*"
|
||||||
|
|
||||||
|
db = lancedb.connect("/tmp/db")
|
||||||
|
func = get_registry().get("jina").create(name="jina-clip-v1")
|
||||||
|
|
||||||
|
class Words(LanceModel):
|
||||||
|
text: str = func.SourceField()
|
||||||
|
vector: Vector(func.ndims()) = func.VectorField()
|
||||||
|
|
||||||
|
table = db.create_table("words", schema=Words, mode="overwrite")
|
||||||
|
table.add(
|
||||||
|
[
|
||||||
|
{"text": "hello world"},
|
||||||
|
{"text": "goodbye world"}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
query = "greetings"
|
||||||
|
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
||||||
|
print(actual.text)
|
||||||
|
```
|
||||||
|
|||||||
@@ -32,25 +32,51 @@ LanceDB OSS supports object stores such as AWS S3 (and compatible stores), Azure
|
|||||||
db = lancedb.connect("az://bucket/path")
|
db = lancedb.connect("az://bucket/path")
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
AWS S3:
|
AWS S3:
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect("s3://bucket/path");
|
||||||
|
```
|
||||||
|
|
||||||
|
Google Cloud Storage:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect("gs://bucket/path");
|
||||||
|
```
|
||||||
|
|
||||||
|
Azure Blob Storage:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect("az://bucket/path");
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
AWS S3:
|
||||||
|
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect("s3://bucket/path");
|
const db = await lancedb.connect("s3://bucket/path");
|
||||||
```
|
```
|
||||||
|
|
||||||
Google Cloud Storage:
|
Google Cloud Storage:
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect("gs://bucket/path");
|
const db = await lancedb.connect("gs://bucket/path");
|
||||||
```
|
```
|
||||||
|
|
||||||
Azure Blob Storage:
|
Azure Blob Storage:
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect("az://bucket/path");
|
const db = await lancedb.connect("az://bucket/path");
|
||||||
```
|
```
|
||||||
@@ -78,12 +104,25 @@ If you only want this to apply to one particular connection, you can pass the `s
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
|
||||||
|
const db = await lancedb.connect("s3://bucket/path", {
|
||||||
|
storageOptions: {timeout: "60s"}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect("s3://bucket/path",
|
const db = await lancedb.connect("s3://bucket/path", {
|
||||||
{storageOptions: {timeout: "60s"}});
|
storageOptions: {timeout: "60s"}
|
||||||
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
Getting even more specific, you can set the `timeout` for only a particular table:
|
Getting even more specific, you can set the `timeout` for only a particular table:
|
||||||
@@ -101,10 +140,25 @@ Getting even more specific, you can set the `timeout` for only a particular tabl
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
<!-- skip-test -->
|
<!-- skip-test -->
|
||||||
```javascript
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect("s3://bucket/path");
|
||||||
|
const table = db.createTable(
|
||||||
|
"table",
|
||||||
|
[{ a: 1, b: 2}],
|
||||||
|
{storageOptions: {timeout: "60s"}}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
<!-- skip-test -->
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect("s3://bucket/path");
|
const db = await lancedb.connect("s3://bucket/path");
|
||||||
const table = db.createTable(
|
const table = db.createTable(
|
||||||
@@ -135,7 +189,6 @@ There are several options that can be set for all object stores, mostly related
|
|||||||
| `proxy_ca_certificate` | PEM-formatted CA certificate for proxy connections. |
|
| `proxy_ca_certificate` | PEM-formatted CA certificate for proxy connections. |
|
||||||
| `proxy_excludes` | List of hosts that bypass the proxy. This is a comma-separated list of domains and IP masks. Any subdomain of the provided domain will be bypassed. For example, `example.com, 192.168.1.0/24` would bypass `https://api.example.com`, `https://www.example.com`, and any IP in the range `192.168.1.0/24`. |
|
| `proxy_excludes` | List of hosts that bypass the proxy. This is a comma-separated list of domains and IP masks. Any subdomain of the provided domain will be bypassed. For example, `example.com, 192.168.1.0/24` would bypass `https://api.example.com`, `https://www.example.com`, and any IP in the range `192.168.1.0/24`. |
|
||||||
|
|
||||||
|
|
||||||
### AWS S3
|
### AWS S3
|
||||||
|
|
||||||
To configure credentials for AWS S3, you can use the `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and `AWS_SESSION_TOKEN` keys. Region can also be set, but it is not mandatory when using AWS.
|
To configure credentials for AWS S3, you can use the `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and `AWS_SESSION_TOKEN` keys. Region can also be set, but it is not mandatory when using AWS.
|
||||||
@@ -155,9 +208,27 @@ These can be set as environment variables or passed in the `storage_options` par
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect(
|
||||||
|
"s3://bucket/path",
|
||||||
|
{
|
||||||
|
storageOptions: {
|
||||||
|
awsAccessKeyId: "my-access-key",
|
||||||
|
awsSecretAccessKey: "my-secret-key",
|
||||||
|
awsSessionToken: "my-session-token",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect(
|
const db = await lancedb.connect(
|
||||||
"s3://bucket/path",
|
"s3://bucket/path",
|
||||||
@@ -188,7 +259,6 @@ The following keys can be used as both environment variables or keys in the `sto
|
|||||||
| `aws_sse_kms_key_id` | The KMS key ID to use for server-side encryption. If set, `aws_server_side_encryption` must be `"aws:kms"` or `"aws:kms:dsse"`. |
|
| `aws_sse_kms_key_id` | The KMS key ID to use for server-side encryption. If set, `aws_server_side_encryption` must be `"aws:kms"` or `"aws:kms:dsse"`. |
|
||||||
| `aws_sse_bucket_key_enabled` | Whether to use bucket keys for server-side encryption. |
|
| `aws_sse_bucket_key_enabled` | Whether to use bucket keys for server-side encryption. |
|
||||||
|
|
||||||
|
|
||||||
!!! tip "Automatic cleanup for failed writes"
|
!!! tip "Automatic cleanup for failed writes"
|
||||||
|
|
||||||
LanceDB uses [multi-part uploads](https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html) when writing data to S3 in order to maximize write speed. LanceDB will abort these uploads when it shuts down gracefully, such as when cancelled by keyboard interrupt. However, in the rare case that LanceDB crashes, it is possible that some data will be left lingering in your account. To cleanup this data, we recommend (as AWS themselves do) that you setup a lifecycle rule to delete in-progress uploads after 7 days. See the AWS guide:
|
LanceDB uses [multi-part uploads](https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html) when writing data to S3 in order to maximize write speed. LanceDB will abort these uploads when it shuts down gracefully, such as when cancelled by keyboard interrupt. However, in the rare case that LanceDB crashes, it is possible that some data will be left lingering in your account. To cleanup this data, we recommend (as AWS themselves do) that you setup a lifecycle rule to delete in-progress uploads after 7 days. See the AWS guide:
|
||||||
@@ -265,6 +335,108 @@ For **read-only access**, LanceDB will need a policy such as:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### DynamoDB Commit Store for concurrent writes
|
||||||
|
|
||||||
|
By default, S3 does not support concurrent writes. Having two or more processes
|
||||||
|
writing to the same table at the same time can lead to data corruption. This is
|
||||||
|
because S3, unlike other object stores, does not have any atomic put or copy
|
||||||
|
operation.
|
||||||
|
|
||||||
|
To enable concurrent writes, you can configure LanceDB to use a DynamoDB table
|
||||||
|
as a commit store. This table will be used to coordinate writes between
|
||||||
|
different processes. To enable this feature, you must modify your connection
|
||||||
|
URI to use the `s3+ddb` scheme and add a query parameter `ddbTableName` with the
|
||||||
|
name of the table to use.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
```python
|
||||||
|
import lancedb
|
||||||
|
db = await lancedb.connect_async(
|
||||||
|
"s3+ddb://bucket/path?ddbTableName=my-dynamodb-table",
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lancedb = require("lancedb");
|
||||||
|
|
||||||
|
const db = await lancedb.connect(
|
||||||
|
"s3+ddb://bucket/path?ddbTableName=my-dynamodb-table",
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
The DynamoDB table must be created with the following schema:
|
||||||
|
|
||||||
|
- Hash key: `base_uri` (string)
|
||||||
|
- Range key: `version` (number)
|
||||||
|
|
||||||
|
You can create this programmatically with:
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
<!-- skip-test -->
|
||||||
|
```python
|
||||||
|
import boto3
|
||||||
|
|
||||||
|
dynamodb = boto3.client("dynamodb")
|
||||||
|
table = dynamodb.create_table(
|
||||||
|
TableName=table_name,
|
||||||
|
KeySchema=[
|
||||||
|
{"AttributeName": "base_uri", "KeyType": "HASH"},
|
||||||
|
{"AttributeName": "version", "KeyType": "RANGE"},
|
||||||
|
],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{"AttributeName": "base_uri", "AttributeType": "S"},
|
||||||
|
{"AttributeName": "version", "AttributeType": "N"},
|
||||||
|
],
|
||||||
|
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
<!-- skip-test -->
|
||||||
|
```javascript
|
||||||
|
import {
|
||||||
|
CreateTableCommand,
|
||||||
|
DynamoDBClient,
|
||||||
|
} from "@aws-sdk/client-dynamodb";
|
||||||
|
|
||||||
|
const dynamodb = new DynamoDBClient({
|
||||||
|
region: CONFIG.awsRegion,
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: CONFIG.awsAccessKeyId,
|
||||||
|
secretAccessKey: CONFIG.awsSecretAccessKey,
|
||||||
|
},
|
||||||
|
endpoint: CONFIG.awsEndpoint,
|
||||||
|
});
|
||||||
|
const command = new CreateTableCommand({
|
||||||
|
TableName: table_name,
|
||||||
|
AttributeDefinitions: [
|
||||||
|
{
|
||||||
|
AttributeName: "base_uri",
|
||||||
|
AttributeType: "S",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
AttributeName: "version",
|
||||||
|
AttributeType: "N",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
KeySchema: [
|
||||||
|
{ AttributeName: "base_uri", KeyType: "HASH" },
|
||||||
|
{ AttributeName: "version", KeyType: "RANGE" },
|
||||||
|
],
|
||||||
|
ProvisionedThroughput: {
|
||||||
|
ReadCapacityUnits: 1,
|
||||||
|
WriteCapacityUnits: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await client.send(command);
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
#### S3-compatible stores
|
#### S3-compatible stores
|
||||||
|
|
||||||
LanceDB can also connect to S3-compatible stores, such as MinIO. To do so, you must specify both region and endpoint:
|
LanceDB can also connect to S3-compatible stores, such as MinIO. To do so, you must specify both region and endpoint:
|
||||||
@@ -282,9 +454,26 @@ LanceDB can also connect to S3-compatible stores, such as MinIO. To do so, you m
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect(
|
||||||
|
"s3://bucket/path",
|
||||||
|
{
|
||||||
|
storageOptions: {
|
||||||
|
region: "us-east-1",
|
||||||
|
endpoint: "http://minio:9000",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect(
|
const db = await lancedb.connect(
|
||||||
"s3://bucket/path",
|
"s3://bucket/path",
|
||||||
@@ -326,10 +515,12 @@ To configure LanceDB to use an S3 Express endpoint, you must set the storage opt
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
const lancedb = require("lancedb");
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
const db = await lancedb.connect(
|
const db = await lancedb.connect(
|
||||||
"s3://my-bucket--use1-az4--x-s3/path",
|
"s3://my-bucket--use1-az4--x-s3/path",
|
||||||
{
|
{
|
||||||
@@ -341,6 +532,20 @@ To configure LanceDB to use an S3 Express endpoint, you must set the storage opt
|
|||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const lancedb = require("lancedb");
|
||||||
|
const db = await lancedb.connect(
|
||||||
|
"s3://my-bucket--use1-az4--x-s3/path",
|
||||||
|
{
|
||||||
|
storageOptions: {
|
||||||
|
region: "us-east-1",
|
||||||
|
s3Express: "true",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
### Google Cloud Storage
|
### Google Cloud Storage
|
||||||
|
|
||||||
@@ -359,9 +564,25 @@ GCS credentials are configured by setting the `GOOGLE_SERVICE_ACCOUNT` environme
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect(
|
||||||
|
"gs://my-bucket/my-database",
|
||||||
|
{
|
||||||
|
storageOptions: {
|
||||||
|
serviceAccount: "path/to/service-account.json",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect(
|
const db = await lancedb.connect(
|
||||||
"gs://my-bucket/my-database",
|
"gs://my-bucket/my-database",
|
||||||
@@ -373,12 +594,10 @@ GCS credentials are configured by setting the `GOOGLE_SERVICE_ACCOUNT` environme
|
|||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
!!! info "HTTP/2 support"
|
!!! info "HTTP/2 support"
|
||||||
|
|
||||||
By default, GCS uses HTTP/1 for communication, as opposed to HTTP/2. This improves maximum throughput significantly. However, if you wish to use HTTP/2 for some reason, you can set the environment variable `HTTP1_ONLY` to `false`.
|
By default, GCS uses HTTP/1 for communication, as opposed to HTTP/2. This improves maximum throughput significantly. However, if you wish to use HTTP/2 for some reason, you can set the environment variable `HTTP1_ONLY` to `false`.
|
||||||
|
|
||||||
|
|
||||||
The following keys can be used as both environment variables or keys in the `storage_options` parameter:
|
The following keys can be used as both environment variables or keys in the `storage_options` parameter:
|
||||||
<!-- source: https://docs.rs/object_store/latest/object_store/gcp/enum.GoogleConfigKey.html -->
|
<!-- source: https://docs.rs/object_store/latest/object_store/gcp/enum.GoogleConfigKey.html -->
|
||||||
|
|
||||||
@@ -388,7 +607,6 @@ The following keys can be used as both environment variables or keys in the `sto
|
|||||||
| ``google_service_account_key`` | The serialized service account key. |
|
| ``google_service_account_key`` | The serialized service account key. |
|
||||||
| ``google_application_credentials`` | Path to the application credentials. |
|
| ``google_application_credentials`` | Path to the application credentials. |
|
||||||
|
|
||||||
|
|
||||||
### Azure Blob Storage
|
### Azure Blob Storage
|
||||||
|
|
||||||
Azure Blob Storage credentials can be configured by setting the `AZURE_STORAGE_ACCOUNT_NAME`and `AZURE_STORAGE_ACCOUNT_KEY` environment variables. Alternatively, you can pass the account name and key in the `storage_options` parameter:
|
Azure Blob Storage credentials can be configured by setting the `AZURE_STORAGE_ACCOUNT_NAME`and `AZURE_STORAGE_ACCOUNT_KEY` environment variables. Alternatively, you can pass the account name and key in the `storage_options` parameter:
|
||||||
@@ -407,9 +625,26 @@ Azure Blob Storage credentials can be configured by setting the `AZURE_STORAGE_A
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
const db = await lancedb.connect(
|
||||||
|
"az://my-container/my-database",
|
||||||
|
{
|
||||||
|
storageOptions: {
|
||||||
|
accountName: "some-account",
|
||||||
|
accountKey: "some-key",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
const lancedb = require("lancedb");
|
const lancedb = require("lancedb");
|
||||||
const db = await lancedb.connect(
|
const db = await lancedb.connect(
|
||||||
"az://my-container/my-database",
|
"az://my-container/my-database",
|
||||||
|
|||||||
@@ -8,26 +8,39 @@ This guide will show how to create tables, insert data into them, and update the
|
|||||||
|
|
||||||
## Creating a LanceDB Table
|
## Creating a LanceDB Table
|
||||||
|
|
||||||
|
Initialize a LanceDB connection and create a table
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
Initialize a LanceDB connection and create a table using one of the many methods listed below.
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
db = lancedb.connect("./.lancedb")
|
db = lancedb.connect("./.lancedb")
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Javascript"
|
LanceDB allows ingesting data from various sources - `dict`, `list[dict]`, `pd.DataFrame`, `pa.Table` or a `Iterator[pa.RecordBatch]`. Let's take a look at some of the these.
|
||||||
|
|
||||||
Initialize a VectorDB connection and create a table using one of the many methods listed below.
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
import * as arrow from "apache-arrow";
|
||||||
|
|
||||||
|
const uri = "data/sample-lancedb";
|
||||||
|
const db = await lancedb.connect(uri);
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```typescript
|
||||||
const lancedb = require("vectordb");
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
const uri = "data/sample-lancedb";
|
const uri = "data/sample-lancedb";
|
||||||
const db = await lancedb.connect(uri);
|
const db = await lancedb.connect(uri);
|
||||||
```
|
```
|
||||||
|
|
||||||
LanceDB allows ingesting data from various sources - `dict`, `list[dict]`, `pd.DataFrame`, `pa.Table` or a `Iterator[pa.RecordBatch]`. Let's take a look at some of the these.
|
|
||||||
|
|
||||||
### From list of tuples or dictionaries
|
### From list of tuples or dictionaries
|
||||||
|
|
||||||
@@ -45,6 +58,7 @@ This guide will show how to create tables, insert data into them, and update the
|
|||||||
|
|
||||||
db["my_table"].head()
|
db["my_table"].head()
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! info "Note"
|
!!! info "Note"
|
||||||
If the table already exists, LanceDB will raise an error by default.
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
|
|
||||||
@@ -63,24 +77,52 @@ This guide will show how to create tables, insert data into them, and update the
|
|||||||
db.create_table("name", data, mode="overwrite")
|
db.create_table("name", data, mode="overwrite")
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Javascript"
|
=== "Typescript[^1]"
|
||||||
You can create a LanceDB table in JavaScript using an array of JSON records as follows.
|
You can create a LanceDB table in JavaScript using an array of records as follows.
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
const tb = await db.createTable("my_table", [{
|
|
||||||
"vector": [3.1, 4.1],
|
|
||||||
"item": "foo",
|
```ts
|
||||||
"price": 10.0
|
--8<-- "nodejs/examples/basic.ts:create_table"
|
||||||
}, {
|
|
||||||
"vector": [5.9, 26.5],
|
|
||||||
"item": "bar",
|
|
||||||
"price": 20.0
|
|
||||||
}]);
|
|
||||||
```
|
```
|
||||||
!!! info "Note"
|
|
||||||
If the table already exists, LanceDB will raise an error by default. If you want to overwrite the table, you need to specify the `WriteMode` in the createTable function.
|
|
||||||
|
|
||||||
```javascript
|
This will infer the schema from the provided data. If you want to explicitly provide a schema, you can use `apache-arrow` to declare a schema
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_table_with_schema"
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! info "Note"
|
||||||
|
`createTable` supports an optional `existsOk` parameter. When set to true
|
||||||
|
and the table exists, then it simply opens the existing table. The data you
|
||||||
|
passed in will NOT be appended to the table in that case.
|
||||||
|
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_table_exists_ok"
|
||||||
|
```
|
||||||
|
|
||||||
|
Sometimes you want to make sure that you start fresh. If you want to
|
||||||
|
overwrite the table, you can pass in mode: "overwrite" to the createTable function.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_table_overwrite"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "docs/src/basic_legacy.ts:create_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
`existsOk` option is not supported in `vectordb`
|
||||||
|
|
||||||
|
Sometimes you want to make sure that you start fresh. If you want to
|
||||||
|
overwrite the table, you can pass in mode: "overwrite" to the createTable function.
|
||||||
|
|
||||||
|
```ts
|
||||||
const table = await con.createTable(tableName, data, { writeMode: WriteMode.Overwrite })
|
const table = await con.createTable(tableName, data, { writeMode: WriteMode.Overwrite })
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -99,6 +141,7 @@ This guide will show how to create tables, insert data into them, and update the
|
|||||||
|
|
||||||
db["my_table"].head()
|
db["my_table"].head()
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! info "Note"
|
!!! info "Note"
|
||||||
Data is converted to Arrow before being written to disk. For maximum control over how data is saved, either provide the PyArrow schema to convert to or else provide a PyArrow Table directly.
|
Data is converted to Arrow before being written to disk. For maximum control over how data is saved, either provide the PyArrow schema to convert to or else provide a PyArrow Table directly.
|
||||||
|
|
||||||
@@ -133,10 +176,11 @@ This guide will show how to create tables, insert data into them, and update the
|
|||||||
```
|
```
|
||||||
|
|
||||||
### From an Arrow Table
|
### From an Arrow Table
|
||||||
=== "Python"
|
|
||||||
You can also create LanceDB tables directly from Arrow tables.
|
You can also create LanceDB tables directly from Arrow tables.
|
||||||
LanceDB supports float16 data type!
|
LanceDB supports float16 data type!
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import pyarrows as pa
|
import pyarrows as pa
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -160,11 +204,17 @@ This guide will show how to create tables, insert data into them, and update the
|
|||||||
tbl = db.create_table("f16_tbl", data, schema=schema)
|
tbl = db.create_table("f16_tbl", data, schema=schema)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Javascript"
|
=== "Typescript[^1]"
|
||||||
You can also create LanceDB tables directly from Arrow tables.
|
|
||||||
LanceDB supports Float16 data type!
|
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_f16_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:create_f16_table"
|
--8<-- "docs/src/basic_legacy.ts:create_f16_table"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -329,23 +379,24 @@ You can also use iterators of other types like Pandas DataFrame or Pylists direc
|
|||||||
tbl = db.open_table("my_table")
|
tbl = db.open_table("my_table")
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
If you forget the name of your table, you can always get a listing of all table names.
|
If you forget the name of your table, you can always get a listing of all table names.
|
||||||
|
|
||||||
```javascript
|
```typescript
|
||||||
console.log(await db.tableNames());
|
console.log(await db.tableNames());
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, you can open any existing tables.
|
Then, you can open any existing tables.
|
||||||
|
|
||||||
```javascript
|
```typescript
|
||||||
const tbl = await db.openTable("my_table");
|
const tbl = await db.openTable("my_table");
|
||||||
```
|
```
|
||||||
|
|
||||||
## Creating empty table
|
## Creating empty table
|
||||||
|
You can create an empty table for scenarios where you want to add data to the table later. An example would be when you want to collect data from a stream/external file and then add it to a table in batches.
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
In Python, you can create an empty table for scenarios where you want to add data to the table later. An example would be when you want to collect data from a stream/external file and then add it to a table in batches.
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
|
||||||
@@ -382,9 +433,23 @@ You can also use iterators of other types like Pandas DataFrame or Pylists direc
|
|||||||
|
|
||||||
Once the empty table has been created, you can add data to it via the various methods listed in the [Adding to a table](#adding-to-a-table) section.
|
Once the empty table has been created, you can add data to it via the various methods listed in the [Adding to a table](#adding-to-a-table) section.
|
||||||
|
|
||||||
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/basic.ts:create_empty_table"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "docs/src/basic_legacy.ts:create_empty_table"
|
||||||
|
```
|
||||||
|
|
||||||
## Adding to a table
|
## Adding to a table
|
||||||
|
|
||||||
After a table has been created, you can always add more data to it using the various methods available.
|
After a table has been created, you can always add more data to it usind the `add` method
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
You can add any of the valid data structures accepted by LanceDB table, i.e, `dict`, `list[dict]`, `pd.DataFrame`, or `Iterator[pa.RecordBatch]`. Below are some examples.
|
You can add any of the valid data structures accepted by LanceDB table, i.e, `dict`, `list[dict]`, `pd.DataFrame`, or `Iterator[pa.RecordBatch]`. Below are some examples.
|
||||||
@@ -472,9 +537,7 @@ After a table has been created, you can always add more data to it using the var
|
|||||||
tbl.add(models)
|
tbl.add(models)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
await tbl.add(
|
await tbl.add(
|
||||||
@@ -530,15 +593,15 @@ Use the `delete()` method on tables to delete rows from a table. To choose which
|
|||||||
# 0 3 [5.0, 6.0]
|
# 0 3 [5.0, 6.0]
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
await tbl.delete('item = "fizz"')
|
await tbl.delete('item = "fizz"')
|
||||||
```
|
```
|
||||||
|
|
||||||
### Deleting row with specific column value
|
### Deleting row with specific column value
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const con = await lancedb.connect("./.lancedb")
|
const con = await lancedb.connect("./.lancedb")
|
||||||
const data = [
|
const data = [
|
||||||
{id: 1, vector: [1, 2]},
|
{id: 1, vector: [1, 2]},
|
||||||
@@ -552,7 +615,7 @@ Use the `delete()` method on tables to delete rows from a table. To choose which
|
|||||||
|
|
||||||
### Delete from a list of values
|
### Delete from a list of values
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const to_remove = [1, 5];
|
const to_remove = [1, 5];
|
||||||
await tbl.delete(`id IN (${to_remove.join(",")})`)
|
await tbl.delete(`id IN (${to_remove.join(",")})`)
|
||||||
await tbl.countRows() // Returns 1
|
await tbl.countRows() // Returns 1
|
||||||
@@ -609,11 +672,32 @@ This can be used to update zero to all rows depending on how many rows match the
|
|||||||
2 2 [10.0, 10.0]
|
2 2 [10.0, 10.0]
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript/Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
API Reference: [lancedb.Table.update](../js/classes/Table.md/#update)
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
|
||||||
|
const db = await lancedb.connect("./.lancedb");
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{x: 1, vector: [1, 2]},
|
||||||
|
{x: 2, vector: [3, 4]},
|
||||||
|
{x: 3, vector: [5, 6]},
|
||||||
|
];
|
||||||
|
const tbl = await db.createTable("my_table", data)
|
||||||
|
|
||||||
|
await tbl.update({vector: [10, 10]}, { where: "x = 2"})
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
API Reference: [vectordb.Table.update](../javascript/interfaces/Table.md/#update)
|
API Reference: [vectordb.Table.update](../javascript/interfaces/Table.md/#update)
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const lancedb = require("vectordb");
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
const db = await lancedb.connect("./.lancedb");
|
const db = await lancedb.connect("./.lancedb");
|
||||||
@@ -628,6 +712,8 @@ This can be used to update zero to all rows depending on how many rows match the
|
|||||||
await tbl.update({ where: "x = 2", values: {vector: [10, 10]} })
|
await tbl.update({ where: "x = 2", values: {vector: [10, 10]} })
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Updating using a sql query
|
||||||
|
|
||||||
The `values` parameter is used to provide the new values for the columns as literal values. You can also use the `values_sql` / `valuesSql` parameter to provide SQL expressions for the new values. For example, you can use `values_sql="x + 1"` to increment the value of the `x` column by 1.
|
The `values` parameter is used to provide the new values for the columns as literal values. You can also use the `values_sql` / `valuesSql` parameter to provide SQL expressions for the new values. For example, you can use `values_sql="x + 1"` to increment the value of the `x` column by 1.
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
@@ -647,9 +733,15 @@ The `values` parameter is used to provide the new values for the columns as lite
|
|||||||
2 3 [10.0, 10.0]
|
2 3 [10.0, 10.0]
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript/Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
Coming Soon!
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
await tbl.update({ valuesSql: { x: "x + 1" } })
|
await tbl.update({ valuesSql: { x: "x + 1" } })
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -672,7 +764,7 @@ Use the `drop_table()` method on the database to remove a table.
|
|||||||
By default, if the table does not exist an exception is raised. To suppress this,
|
By default, if the table does not exist an exception is raised. To suppress this,
|
||||||
you can pass in `ignore_missing=True`.
|
you can pass in `ignore_missing=True`.
|
||||||
|
|
||||||
=== "Javascript/Typescript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
--8<-- "docs/src/basic_legacy.ts:drop_table"
|
--8<-- "docs/src/basic_legacy.ts:drop_table"
|
||||||
@@ -726,18 +818,18 @@ There are three possible settings for `read_consistency_interval`:
|
|||||||
table.checkout_latest()
|
table.checkout_latest()
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript/Typescript"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
To set strong consistency, use `0`:
|
To set strong consistency, use `0`:
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const db = await lancedb.connect({ uri: "./.lancedb", readConsistencyInterval: 0 });
|
const db = await lancedb.connect({ uri: "./.lancedb", readConsistencyInterval: 0 });
|
||||||
const table = await db.openTable("my_table");
|
const table = await db.openTable("my_table");
|
||||||
```
|
```
|
||||||
|
|
||||||
For eventual consistency, specify the update interval as seconds:
|
For eventual consistency, specify the update interval as seconds:
|
||||||
|
|
||||||
```javascript
|
```ts
|
||||||
const db = await lancedb.connect({ uri: "./.lancedb", readConsistencyInterval: 5 });
|
const db = await lancedb.connect({ uri: "./.lancedb", readConsistencyInterval: 5 });
|
||||||
const table = await db.openTable("my_table");
|
const table = await db.openTable("my_table");
|
||||||
```
|
```
|
||||||
@@ -749,3 +841,5 @@ There are three possible settings for `read_consistency_interval`:
|
|||||||
## What's next?
|
## What's next?
|
||||||
|
|
||||||
Learn the best practices on creating an ANN index and getting the most out of it.
|
Learn the best practices on creating an ANN index and getting the most out of it.
|
||||||
|
|
||||||
|
[^1]: The `vectordb` package is a legacy package that is deprecated in favor of `@lancedb/lancedb`. The `vectordb` package will continue to receive bug fixes and security updates until September 2024. We recommend all new projects use `@lancedb/lancedb`. See the [migration guide](migration.md) for more information.
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
## Improving retriever performance
|
## Improving retriever performance
|
||||||
|
|
||||||
|
Try it yourself - <a href="https://colab.research.google.com/github/lancedb/lancedb/blob/main/docs/src/notebooks/lancedb_reranking.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a><br/>
|
||||||
|
|
||||||
VectorDBs are used as retreivers in recommender or chatbot-based systems for retrieving relevant data based on user queries. For example, retriever is a critical component of Retrieval Augmented Generation (RAG) acrhitectures. In this section, we will discuss how to improve the performance of retrievers.
|
VectorDBs are used as retreivers in recommender or chatbot-based systems for retrieving relevant data based on user queries. For example, retriever is a critical component of Retrieval Augmented Generation (RAG) acrhitectures. In this section, we will discuss how to improve the performance of retrievers.
|
||||||
|
|
||||||
There are serveral ways to improve the performance of retrievers. Some of the common techniques are:
|
There are serveral ways to improve the performance of retrievers. Some of the common techniques are:
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
Continuing from the previous example, we can now rerank the results using more complex rerankers.
|
Continuing from the previous section, we can now rerank the results using more complex rerankers.
|
||||||
|
|
||||||
|
Try it yourself - <a href="https://colab.research.google.com/github/lancedb/lancedb/blob/main/docs/src/notebooks/lancedb_reranking.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a><br/>
|
||||||
|
|
||||||
## Reranking search results
|
## Reranking search results
|
||||||
You can rerank any search results using a reranker. The syntax for reranking is as follows:
|
You can rerank any search results using a reranker. The syntax for reranking is as follows:
|
||||||
|
|||||||
82
docs/src/guides/tuning_retrievers/3_embed_tuning.md
Normal file
82
docs/src/guides/tuning_retrievers/3_embed_tuning.md
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
## Finetuning the Embedding Model
|
||||||
|
Try it yourself - <a href="https://colab.research.google.com/github/lancedb/lancedb/blob/main/docs/src/notebooks/embedding_tuner.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a><br/>
|
||||||
|
|
||||||
|
Another way to improve retriever performance is to fine-tune the embedding model itself. Fine-tuning the embedding model can help in learning better representations for the documents and queries in the dataset. This can be particularly useful when the dataset is very different from the pre-trained data used to train the embedding model.
|
||||||
|
|
||||||
|
We'll use the same dataset as in the previous sections. Start off by splitting the dataset into training and validation sets:
|
||||||
|
```python
|
||||||
|
from sklearn.model_selection import train_test_split
|
||||||
|
|
||||||
|
train_df, validation_df = train_test_split("data_qa.csv", test_size=0.2, random_state=42)
|
||||||
|
|
||||||
|
train_df.to_csv("data_train.csv", index=False)
|
||||||
|
validation_df.to_csv("data_val.csv", index=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use any tuning API to fine-tune embedding models. In this example, we'll utilise Llama-index as it also comes with utilities for synthetic data generation and training the model.
|
||||||
|
|
||||||
|
|
||||||
|
Then parse the dataset as llama-index text nodes and generate synthetic QA pairs from each node.
|
||||||
|
```python
|
||||||
|
from llama_index.core.node_parser import SentenceSplitter
|
||||||
|
from llama_index.readers.file import PagedCSVReader
|
||||||
|
from llama_index.finetuning import generate_qa_embedding_pairs
|
||||||
|
from llama_index.core.evaluation import EmbeddingQAFinetuneDataset
|
||||||
|
|
||||||
|
def load_corpus(file):
|
||||||
|
loader = PagedCSVReader(encoding="utf-8")
|
||||||
|
docs = loader.load_data(file=Path(file))
|
||||||
|
|
||||||
|
parser = SentenceSplitter()
|
||||||
|
nodes = parser.get_nodes_from_documents(docs)
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
from llama_index.llms.openai import OpenAI
|
||||||
|
|
||||||
|
|
||||||
|
train_dataset = generate_qa_embedding_pairs(
|
||||||
|
llm=OpenAI(model="gpt-3.5-turbo"), nodes=train_nodes, verbose=False
|
||||||
|
)
|
||||||
|
val_dataset = generate_qa_embedding_pairs(
|
||||||
|
llm=OpenAI(model="gpt-3.5-turbo"), nodes=val_nodes, verbose=False
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Now we'll use `SentenceTransformersFinetuneEngine` engine to fine-tune the model. You can also use `sentence-transformers` or `transformers` library to fine-tune the model.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_index.finetuning import SentenceTransformersFinetuneEngine
|
||||||
|
|
||||||
|
finetune_engine = SentenceTransformersFinetuneEngine(
|
||||||
|
train_dataset,
|
||||||
|
model_id="BAAI/bge-small-en-v1.5",
|
||||||
|
model_output_path="tuned_model",
|
||||||
|
val_dataset=val_dataset,
|
||||||
|
)
|
||||||
|
finetune_engine.finetune()
|
||||||
|
embed_model = finetune_engine.get_finetuned_model()
|
||||||
|
```
|
||||||
|
This saves the fine tuned embedding model in `tuned_model` folder. This al
|
||||||
|
|
||||||
|
# Evaluation results
|
||||||
|
In order to eval the retriever, you can either use this model to ingest the data into LanceDB directly or llama-index's LanceDB integration to create a `VectorStoreIndex` and use it as a retriever.
|
||||||
|
On performing the same hit-rate evaluation as before, we see a significant improvement in the hit-rate across all query types.
|
||||||
|
|
||||||
|
### Baseline
|
||||||
|
| Query Type | Hit-rate@5 |
|
||||||
|
| --- | --- |
|
||||||
|
| Vector Search | 0.640 |
|
||||||
|
| Full-text Search | 0.595 |
|
||||||
|
| Reranked Vector Search | 0.677 |
|
||||||
|
| Reranked Full-text Search | 0.672 |
|
||||||
|
| Hybrid Search (w/ CohereReranker) | 0.759|
|
||||||
|
|
||||||
|
### Fine-tuned model ( 2 iterations )
|
||||||
|
| Query Type | Hit-rate@5 |
|
||||||
|
| --- | --- |
|
||||||
|
| Vector Search | 0.672 |
|
||||||
|
| Full-text Search | 0.595 |
|
||||||
|
| Reranked Vector Search | 0.754 |
|
||||||
|
| Reranked Full-text Search | 0.672|
|
||||||
|
| Hybrid Search (w/ CohereReranker) | 0.768 |
|
||||||
@@ -2,7 +2,7 @@
|
|||||||

|

|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
You can load your document data using langchain's loaders, for this example we are using `TextLoader` and `OpenAIEmbeddings` as the embedding model.
|
You can load your document data using langchain's loaders, for this example we are using `TextLoader` and `OpenAIEmbeddings` as the embedding model. Checkout Complete example here - [LangChain demo](../notebooks/langchain_example.ipynb)
|
||||||
```python
|
```python
|
||||||
import os
|
import os
|
||||||
from langchain.document_loaders import TextLoader
|
from langchain.document_loaders import TextLoader
|
||||||
@@ -38,6 +38,8 @@ The exhaustive list of parameters for `LanceDB` vector store are :
|
|||||||
- `api_key`: (Optional) API key to use for LanceDB cloud database. Defaults to `None`.
|
- `api_key`: (Optional) API key to use for LanceDB cloud database. Defaults to `None`.
|
||||||
- `region`: (Optional) Region to use for LanceDB cloud database. Only for LanceDB Cloud, defaults to `None`.
|
- `region`: (Optional) Region to use for LanceDB cloud database. Only for LanceDB Cloud, defaults to `None`.
|
||||||
- `mode`: (Optional) Mode to use for adding data to the table. Defaults to `'overwrite'`.
|
- `mode`: (Optional) Mode to use for adding data to the table. Defaults to `'overwrite'`.
|
||||||
|
- `reranker`: (Optional) The reranker to use for LanceDB.
|
||||||
|
- `relevance_score_fn`: (Optional[Callable[[float], float]]) Langchain relevance score function to be used. Defaults to `None`.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
db_url = "db://lang_test" # url of db you created
|
db_url = "db://lang_test" # url of db you created
|
||||||
@@ -54,12 +56,14 @@ vector_store = LanceDB(
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Methods
|
### Methods
|
||||||
To add texts and store respective embeddings automatically:
|
|
||||||
##### add_texts()
|
##### add_texts()
|
||||||
- `texts`: `Iterable` of strings to add to the vectorstore.
|
- `texts`: `Iterable` of strings to add to the vectorstore.
|
||||||
- `metadatas`: Optional `list[dict()]` of metadatas associated with the texts.
|
- `metadatas`: Optional `list[dict()]` of metadatas associated with the texts.
|
||||||
- `ids`: Optional `list` of ids to associate with the texts.
|
- `ids`: Optional `list` of ids to associate with the texts.
|
||||||
|
- `kwargs`: `Any`
|
||||||
|
|
||||||
|
This method adds texts and stores respective embeddings automatically.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
vector_store.add_texts(texts = ['test_123'], metadatas =[{'source' :'wiki'}])
|
vector_store.add_texts(texts = ['test_123'], metadatas =[{'source' :'wiki'}])
|
||||||
@@ -74,7 +78,6 @@ pd_df.to_csv("docsearch.csv", index=False)
|
|||||||
# you can also create a new vector store object using an older connection object:
|
# you can also create a new vector store object using an older connection object:
|
||||||
vector_store = LanceDB(connection=tbl, embedding=embeddings)
|
vector_store = LanceDB(connection=tbl, embedding=embeddings)
|
||||||
```
|
```
|
||||||
For index creation make sure your table has enough data in it. An ANN index is ususally not needed for datasets ~100K vectors. For large-scale (>1M) or higher dimension vectors, it is beneficial to create an ANN index.
|
|
||||||
##### create_index()
|
##### create_index()
|
||||||
- `col_name`: `Optional[str] = None`
|
- `col_name`: `Optional[str] = None`
|
||||||
- `vector_col`: `Optional[str] = None`
|
- `vector_col`: `Optional[str] = None`
|
||||||
@@ -82,6 +85,8 @@ For index creation make sure your table has enough data in it. An ANN index is u
|
|||||||
- `num_sub_vectors`: `Optional[int] = 96`
|
- `num_sub_vectors`: `Optional[int] = 96`
|
||||||
- `index_cache_size`: `Optional[int] = None`
|
- `index_cache_size`: `Optional[int] = None`
|
||||||
|
|
||||||
|
This method creates an index for the vector store. For index creation make sure your table has enough data in it. An ANN index is ususally not needed for datasets ~100K vectors. For large-scale (>1M) or higher dimension vectors, it is beneficial to create an ANN index.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# for creating vector index
|
# for creating vector index
|
||||||
vector_store.create_index(vector_col='vector', metric = 'cosine')
|
vector_store.create_index(vector_col='vector', metric = 'cosine')
|
||||||
@@ -90,3 +95,107 @@ vector_store.create_index(vector_col='vector', metric = 'cosine')
|
|||||||
vector_store.create_index(col_name='text')
|
vector_store.create_index(col_name='text')
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
##### similarity_search()
|
||||||
|
- `query`: `str`
|
||||||
|
- `k`: `Optional[int] = None`
|
||||||
|
- `filter`: `Optional[Dict[str, str]] = None`
|
||||||
|
- `fts`: `Optional[bool] = False`
|
||||||
|
- `name`: `Optional[str] = None`
|
||||||
|
- `kwargs`: `Any`
|
||||||
|
|
||||||
|
Return documents most similar to the query without relevance scores
|
||||||
|
|
||||||
|
```python
|
||||||
|
docs = docsearch.similarity_search(query)
|
||||||
|
print(docs[0].page_content)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### similarity_search_by_vector()
|
||||||
|
- `embedding`: `List[float]`
|
||||||
|
- `k`: `Optional[int] = None`
|
||||||
|
- `filter`: `Optional[Dict[str, str]] = None`
|
||||||
|
- `name`: `Optional[str] = None`
|
||||||
|
- `kwargs`: `Any`
|
||||||
|
|
||||||
|
Returns documents most similar to the query vector.
|
||||||
|
|
||||||
|
```python
|
||||||
|
docs = docsearch.similarity_search_by_vector(query)
|
||||||
|
print(docs[0].page_content)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### similarity_search_with_score()
|
||||||
|
- `query`: `str`
|
||||||
|
- `k`: `Optional[int] = None`
|
||||||
|
- `filter`: `Optional[Dict[str, str]] = None`
|
||||||
|
- `kwargs`: `Any`
|
||||||
|
|
||||||
|
Returns documents most similar to the query string with relevance scores, gets called by base class's `similarity_search_with_relevance_scores` which selects relevance score based on our `_select_relevance_score_fn`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
docs = docsearch.similarity_search_with_relevance_scores(query)
|
||||||
|
print("relevance score - ", docs[0][1])
|
||||||
|
print("text- ", docs[0][0].page_content[:1000])
|
||||||
|
```
|
||||||
|
|
||||||
|
##### similarity_search_by_vector_with_relevance_scores()
|
||||||
|
- `embedding`: `List[float]`
|
||||||
|
- `k`: `Optional[int] = None`
|
||||||
|
- `filter`: `Optional[Dict[str, str]] = None`
|
||||||
|
- `name`: `Optional[str] = None`
|
||||||
|
- `kwargs`: `Any`
|
||||||
|
|
||||||
|
Return documents most similar to the query vector with relevance scores.
|
||||||
|
Relevance score
|
||||||
|
|
||||||
|
```python
|
||||||
|
docs = docsearch.similarity_search_by_vector_with_relevance_scores(query_embedding)
|
||||||
|
print("relevance score - ", docs[0][1])
|
||||||
|
print("text- ", docs[0][0].page_content[:1000])
|
||||||
|
```
|
||||||
|
|
||||||
|
##### max_marginal_relevance_search()
|
||||||
|
- `query`: `str`
|
||||||
|
- `k`: `Optional[int] = None`
|
||||||
|
- `fetch_k` : Number of Documents to fetch to pass to MMR algorithm, `Optional[int] = None`
|
||||||
|
- `lambda_mult`: Number between 0 and 1 that determines the degree
|
||||||
|
of diversity among the results with 0 corresponding
|
||||||
|
to maximum diversity and 1 to minimum diversity.
|
||||||
|
Defaults to 0.5. `float = 0.5`
|
||||||
|
- `filter`: `Optional[Dict[str, str]] = None`
|
||||||
|
- `kwargs`: `Any`
|
||||||
|
|
||||||
|
Returns docs selected using the maximal marginal relevance(MMR).
|
||||||
|
Maximal marginal relevance optimizes for similarity to query AND diversity among selected documents.
|
||||||
|
|
||||||
|
Similarly, `max_marginal_relevance_search_by_vector()` function returns docs most similar to the embedding passed to the function using MMR. instead of a string query you need to pass the embedding to be searched for.
|
||||||
|
|
||||||
|
```python
|
||||||
|
result = docsearch.max_marginal_relevance_search(
|
||||||
|
query="text"
|
||||||
|
)
|
||||||
|
result_texts = [doc.page_content for doc in result]
|
||||||
|
print(result_texts)
|
||||||
|
|
||||||
|
## search by vector :
|
||||||
|
result = docsearch.max_marginal_relevance_search_by_vector(
|
||||||
|
embeddings.embed_query("text")
|
||||||
|
)
|
||||||
|
result_texts = [doc.page_content for doc in result]
|
||||||
|
print(result_texts)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### add_images()
|
||||||
|
- `uris` : File path to the image. `List[str]`.
|
||||||
|
- `metadatas` : Optional list of metadatas. `(Optional[List[dict]], optional)`
|
||||||
|
- `ids` : Optional list of IDs. `(Optional[List[str]], optional)`
|
||||||
|
|
||||||
|
Adds images by automatically creating their embeddings and adds them to the vectorstore.
|
||||||
|
|
||||||
|
```python
|
||||||
|
vec_store.add_images(uris=image_uris)
|
||||||
|
# here image_uris are local fs paths to the images.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||

|

|
||||||
|
|
||||||
## Quick start
|
## Quick start
|
||||||
You would need to install the integration via `pip install llama-index-vector-stores-lancedb` in order to use it. You can run the below script to try it out :
|
You would need to install the integration via `pip install llama-index-vector-stores-lancedb` in order to use it.
|
||||||
|
You can run the below script to try it out :
|
||||||
```python
|
```python
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
@@ -43,6 +44,8 @@ retriever = index.as_retriever(vector_store_kwargs={"where": lance_filter})
|
|||||||
response = retriever.retrieve("What did the author do growing up?")
|
response = retriever.retrieve("What did the author do growing up?")
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Checkout Complete example here - [LlamaIndex demo](../notebooks/LlamaIndex_example.ipynb)
|
||||||
|
|
||||||
### Filtering
|
### Filtering
|
||||||
For metadata filtering, you can use a Lance SQL-like string filter as demonstrated in the example above. Additionally, you can also filter using the `MetadataFilters` class from LlamaIndex:
|
For metadata filtering, you can use a Lance SQL-like string filter as demonstrated in the example above. Additionally, you can also filter using the `MetadataFilters` class from LlamaIndex:
|
||||||
```python
|
```python
|
||||||
|
|||||||
@@ -9,7 +9,8 @@ around the asynchronous client.
|
|||||||
This guide describes the differences between the two APIs and will hopefully assist users
|
This guide describes the differences between the two APIs and will hopefully assist users
|
||||||
that would like to migrate to the new API.
|
that would like to migrate to the new API.
|
||||||
|
|
||||||
## Closeable Connections
|
## Python
|
||||||
|
### Closeable Connections
|
||||||
|
|
||||||
The Connection now has a `close` method. You can call this when
|
The Connection now has a `close` method. You can call this when
|
||||||
you are done with the connection to eagerly free resources. Currently
|
you are done with the connection to eagerly free resources. Currently
|
||||||
@@ -32,20 +33,20 @@ async def my_async_fn():
|
|||||||
It is not mandatory to call the `close` method. If you do not call it
|
It is not mandatory to call the `close` method. If you do not call it
|
||||||
then the connection will be closed when the object is garbage collected.
|
then the connection will be closed when the object is garbage collected.
|
||||||
|
|
||||||
## Closeable Table
|
### Closeable Table
|
||||||
|
|
||||||
The Table now also has a `close` method, similar to the connection. This
|
The Table now also has a `close` method, similar to the connection. This
|
||||||
can be used to eagerly free the cache used by a Table object. Similar to
|
can be used to eagerly free the cache used by a Table object. Similar to
|
||||||
the connection, it can be used as a context manager and it is not mandatory
|
the connection, it can be used as a context manager and it is not mandatory
|
||||||
to call the `close` method.
|
to call the `close` method.
|
||||||
|
|
||||||
### Changes to Table APIs
|
#### Changes to Table APIs
|
||||||
|
|
||||||
- Previously `Table.schema` was a property. Now it is an async method.
|
- Previously `Table.schema` was a property. Now it is an async method.
|
||||||
- The method `Table.__len__` was removed and `len(table)` will no longer
|
- The method `Table.__len__` was removed and `len(table)` will no longer
|
||||||
work. Use `Table.count_rows` instead.
|
work. Use `Table.count_rows` instead.
|
||||||
|
|
||||||
### Creating Indices
|
#### Creating Indices
|
||||||
|
|
||||||
The `Table.create_index` method is now used for creating both vector indices
|
The `Table.create_index` method is now used for creating both vector indices
|
||||||
and scalar indices. It currently requires a column name to be specified (the
|
and scalar indices. It currently requires a column name to be specified (the
|
||||||
@@ -55,12 +56,12 @@ the size of the data.
|
|||||||
To specify index configuration details you will need to specify which kind of
|
To specify index configuration details you will need to specify which kind of
|
||||||
index you are using.
|
index you are using.
|
||||||
|
|
||||||
### Querying
|
#### Querying
|
||||||
|
|
||||||
The `Table.search` method has been renamed to `AsyncTable.vector_search` for
|
The `Table.search` method has been renamed to `AsyncTable.vector_search` for
|
||||||
clarity.
|
clarity.
|
||||||
|
|
||||||
## Features not yet supported
|
### Features not yet supported
|
||||||
|
|
||||||
The following features are not yet supported by the asynchronous API. However,
|
The following features are not yet supported by the asynchronous API. However,
|
||||||
we plan to support them soon.
|
we plan to support them soon.
|
||||||
@@ -74,3 +75,22 @@ we plan to support them soon.
|
|||||||
search
|
search
|
||||||
- Remote connections to LanceDb Cloud are not yet supported.
|
- Remote connections to LanceDb Cloud are not yet supported.
|
||||||
- The method Table.head is not yet supported.
|
- The method Table.head is not yet supported.
|
||||||
|
|
||||||
|
## TypeScript/JavaScript
|
||||||
|
|
||||||
|
For JS/TS users, we offer a brand new SDK [@lancedb/lancedb](https://www.npmjs.com/package/@lancedb/lancedb)
|
||||||
|
|
||||||
|
### Changes to Table APIs
|
||||||
|
|
||||||
|
Previously `Table.schema` was a property. Now it is an async method.
|
||||||
|
|
||||||
|
|
||||||
|
#### Creating Indices
|
||||||
|
|
||||||
|
The `Table.createIndex` method is now used for creating both vector indices
|
||||||
|
and scalar indices. It currently requires a column name to be specified (the
|
||||||
|
column to index). Vector index defaults are now smarter and scale better with
|
||||||
|
the size of the data.
|
||||||
|
|
||||||
|
To specify index configuration details you will need to specify which kind of
|
||||||
|
index you are using.
|
||||||
|
|||||||
538
docs/src/notebooks/LlamaIndex_example.ipynb
Normal file
538
docs/src/notebooks/LlamaIndex_example.ipynb
Normal file
@@ -0,0 +1,538 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "2db56c9b",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"<a href=\"https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/docs/examples/vector_stores/LanceDBIndexDemo.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "db0855d0",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# LanceDB Vector Store\n",
|
||||||
|
"In this notebook we are going to show how to use [LanceDB](https://www.lancedb.com) to perform vector searches in LlamaIndex"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "f44170b2",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"If you're opening this Notebook on colab, you will probably need to install LlamaIndex 🦙."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "6c84199c",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"%pip install llama-index llama-index-vector-stores-lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "1a90ce34",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"%pip install lancedb==0.6.13 #Only required if the above cell installs an older version of lancedb (pypi package may not be released yet)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "39c62671",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Refresh vector store URI if restarting or re-using the same notebook\n",
|
||||||
|
"! rm -rf ./lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "59b54276",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import logging\n",
|
||||||
|
"import sys\n",
|
||||||
|
"\n",
|
||||||
|
"# Uncomment to see debug logs\n",
|
||||||
|
"# logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n",
|
||||||
|
"# logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"from llama_index.core import SimpleDirectoryReader, Document, StorageContext\n",
|
||||||
|
"from llama_index.core import VectorStoreIndex\n",
|
||||||
|
"from llama_index.vector_stores.lancedb import LanceDBVectorStore\n",
|
||||||
|
"import textwrap"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "26c71b6d",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Setup OpenAI\n",
|
||||||
|
"The first step is to configure the openai key. It will be used to created embeddings for the documents loaded into the index"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "67b86621",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import openai\n",
|
||||||
|
"\n",
|
||||||
|
"openai.api_key = \"sk-\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "073f0a68",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"Download Data"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "eef1b911",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"--2024-06-11 16:42:37-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n",
|
||||||
|
"Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.110.133, 185.199.108.133, ...\n",
|
||||||
|
"Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n",
|
||||||
|
"HTTP request sent, awaiting response... 200 OK\n",
|
||||||
|
"Length: 75042 (73K) [text/plain]\n",
|
||||||
|
"Saving to: ‘data/paul_graham/paul_graham_essay.txt’\n",
|
||||||
|
"\n",
|
||||||
|
"data/paul_graham/pa 100%[===================>] 73.28K --.-KB/s in 0.02s \n",
|
||||||
|
"\n",
|
||||||
|
"2024-06-11 16:42:37 (3.97 MB/s) - ‘data/paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"!mkdir -p 'data/paul_graham/'\n",
|
||||||
|
"!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "f7010b1d-d1bb-4f08-9309-a328bb4ea396",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Loading documents\n",
|
||||||
|
"Load the documents stored in the `data/paul_graham/` using the SimpleDirectoryReader"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "c154dd4b",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Document ID: cac1ba78-5007-4cf8-89ba-280264790115 Document Hash: fe2d4d3ef3a860780f6c2599808caa587c8be6516fe0ba4ca53cf117044ba953\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"documents = SimpleDirectoryReader(\"./data/paul_graham/\").load_data()\n",
|
||||||
|
"print(\"Document ID:\", documents[0].doc_id, \"Document Hash:\", documents[0].hash)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "c0232fd1",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Create the index\n",
|
||||||
|
"Here we create an index backed by LanceDB using the documents loaded previously. LanceDBVectorStore takes a few arguments.\n",
|
||||||
|
"- uri (str, required): Location where LanceDB will store its files.\n",
|
||||||
|
"- table_name (str, optional): The table name where the embeddings will be stored. Defaults to \"vectors\".\n",
|
||||||
|
"- nprobes (int, optional): The number of probes used. A higher number makes search more accurate but also slower. Defaults to 20.\n",
|
||||||
|
"- refine_factor: (int, optional): Refine the results by reading extra elements and re-ranking them in memory. Defaults to None\n",
|
||||||
|
"\n",
|
||||||
|
"- More details can be found at [LanceDB docs](https://lancedb.github.io/lancedb/ann_indexes)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "1f2e20ef",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"##### For LanceDB cloud :\n",
|
||||||
|
"```python\n",
|
||||||
|
"vector_store = LanceDBVectorStore( \n",
|
||||||
|
" uri=\"db://db_name\", # your remote DB URI\n",
|
||||||
|
" api_key=\"sk_..\", # lancedb cloud api key\n",
|
||||||
|
" region=\"your-region\" # the region you configured\n",
|
||||||
|
" ...\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "8731da62",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"vector_store = LanceDBVectorStore(\n",
|
||||||
|
" uri=\"./lancedb\", mode=\"overwrite\", query_type=\"hybrid\"\n",
|
||||||
|
")\n",
|
||||||
|
"storage_context = StorageContext.from_defaults(vector_store=vector_store)\n",
|
||||||
|
"\n",
|
||||||
|
"index = VectorStoreIndex.from_documents(\n",
|
||||||
|
" documents, storage_context=storage_context\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "8ee4473a-094f-4d0a-a825-e1213db07240",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Query the index\n",
|
||||||
|
"We can now ask questions using our index. We can use filtering via `MetadataFilters` or use native lance `where` clause."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "5eb6419b",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from llama_index.core.vector_stores import (\n",
|
||||||
|
" MetadataFilters,\n",
|
||||||
|
" FilterOperator,\n",
|
||||||
|
" FilterCondition,\n",
|
||||||
|
" MetadataFilter,\n",
|
||||||
|
")\n",
|
||||||
|
"\n",
|
||||||
|
"from datetime import datetime\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"query_filters = MetadataFilters(\n",
|
||||||
|
" filters=[\n",
|
||||||
|
" MetadataFilter(\n",
|
||||||
|
" key=\"creation_date\",\n",
|
||||||
|
" operator=FilterOperator.EQ,\n",
|
||||||
|
" value=datetime.now().strftime(\"%Y-%m-%d\"),\n",
|
||||||
|
" ),\n",
|
||||||
|
" MetadataFilter(\n",
|
||||||
|
" key=\"file_size\", value=75040, operator=FilterOperator.GT\n",
|
||||||
|
" ),\n",
|
||||||
|
" ],\n",
|
||||||
|
" condition=FilterCondition.AND,\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "ee201930",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Hybrid Search\n",
|
||||||
|
"\n",
|
||||||
|
"LanceDB offers hybrid search with reranking capabilities. For complete documentation, refer [here](https://lancedb.github.io/lancedb/hybrid_search/hybrid_search/).\n",
|
||||||
|
"\n",
|
||||||
|
"This example uses the `colbert` reranker. The following cell installs the necessary dependencies for `colbert`. If you choose a different reranker, make sure to adjust the dependencies accordingly."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "e12d1454",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! pip install -U torch transformers tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "c742cb07",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"if you want to add a reranker at vector store initialization, you can pass it in the arguments like below :\n",
|
||||||
|
"```\n",
|
||||||
|
"from lancedb.rerankers import ColbertReranker\n",
|
||||||
|
"reranker = ColbertReranker()\n",
|
||||||
|
"vector_store = LanceDBVectorStore(uri=\"./lancedb\", reranker=reranker, mode=\"overwrite\")\n",
|
||||||
|
"```"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "27ea047b",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "8414517f",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from lancedb.rerankers import ColbertReranker\n",
|
||||||
|
"\n",
|
||||||
|
"reranker = ColbertReranker()\n",
|
||||||
|
"vector_store._add_reranker(reranker)\n",
|
||||||
|
"\n",
|
||||||
|
"query_engine = index.as_query_engine(\n",
|
||||||
|
" filters=query_filters,\n",
|
||||||
|
" # vector_store_kwargs={\n",
|
||||||
|
" # \"query_type\": \"fts\",\n",
|
||||||
|
" # },\n",
|
||||||
|
")\n",
|
||||||
|
"\n",
|
||||||
|
"response = query_engine.query(\"How much did Viaweb charge per month?\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "dc6ccb7a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Viaweb charged $100 a month for a small store and $300 a month for a big one.\n",
|
||||||
|
"metadata - {'65ed5f07-5b8a-4143-a939-e8764884828e': {'file_path': '/Users/raghavdixit/Desktop/open_source/llama_index_lance/docs/docs/examples/vector_stores/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-06-11', 'last_modified_date': '2024-06-11'}, 'be231827-20b8-4988-ac75-94fa79b3c22e': {'file_path': '/Users/raghavdixit/Desktop/open_source/llama_index_lance/docs/docs/examples/vector_stores/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-06-11', 'last_modified_date': '2024-06-11'}}\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"print(response)\n",
|
||||||
|
"print(\"metadata -\", response.metadata)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "0c1c6c73",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"##### lance filters(SQL like) directly via the `where` clause :"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "0a2bcc07",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"lance_filter = \"metadata.file_name = 'paul_graham_essay.txt' \"\n",
|
||||||
|
"retriever = index.as_retriever(vector_store_kwargs={\"where\": lance_filter})\n",
|
||||||
|
"response = retriever.retrieve(\"What did the author do growing up?\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "7ac47cf9",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"What I Worked On\n",
|
||||||
|
"\n",
|
||||||
|
"February 2021\n",
|
||||||
|
"\n",
|
||||||
|
"Before college the two main things I worked on, outside of school, were writing and programming. I didn't write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\n",
|
||||||
|
"\n",
|
||||||
|
"The first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district's 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain's lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\n",
|
||||||
|
"\n",
|
||||||
|
"The language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\n",
|
||||||
|
"\n",
|
||||||
|
"I was puzzled by the 1401. I couldn't figure out what to do with it. And in retrospect there's not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn't have any data stored on punched cards. The only other option was to do things that didn't rely on any input, like calculate approximations of pi, but I didn't know enough math to do anything interesting of that type. So I'm not surprised I can't remember any programs I wrote, because they can't have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn't. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager's expression made clear.\n",
|
||||||
|
"\n",
|
||||||
|
"With microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\n",
|
||||||
|
"\n",
|
||||||
|
"The first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\n",
|
||||||
|
"\n",
|
||||||
|
"Computers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he'd write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\n",
|
||||||
|
"\n",
|
||||||
|
"Though I liked programming, I didn't plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn't much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\n",
|
||||||
|
"\n",
|
||||||
|
"I couldn't have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\n",
|
||||||
|
"\n",
|
||||||
|
"AI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven't tried rereading The Moon is a Harsh Mistress, so I don't know how well it has aged, but when I read it I was drawn entirely into its world.\n",
|
||||||
|
"metadata - {'file_path': '/Users/raghavdixit/Desktop/open_source/llama_index_lance/docs/docs/examples/vector_stores/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-06-11', 'last_modified_date': '2024-06-11'}\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"print(response[0].get_content())\n",
|
||||||
|
"print(\"metadata -\", response[0].metadata)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "6afc84ac",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Appending data\n",
|
||||||
|
"You can also add data to an existing index"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "759a532e",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"nodes = [node.node for node in response]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "069fc099",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"del index\n",
|
||||||
|
"\n",
|
||||||
|
"index = VectorStoreIndex.from_documents(\n",
|
||||||
|
" [Document(text=\"The sky is purple in Portland, Maine\")],\n",
|
||||||
|
" uri=\"/tmp/new_dataset\",\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "a64ed441",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"index.insert_nodes(nodes)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "b5cffcfe",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Portland, Maine\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"query_engine = index.as_query_engine()\n",
|
||||||
|
"response = query_engine.query(\"Where is the sky purple?\")\n",
|
||||||
|
"print(textwrap.fill(str(response), 100))"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "ec548a02",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"You can also create an index from an existing table"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "dc99404d",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"del index\n",
|
||||||
|
"\n",
|
||||||
|
"vec_store = LanceDBVectorStore.from_table(vector_store._table)\n",
|
||||||
|
"index = VectorStoreIndex.from_vector_store(vec_store)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "7b2e8cca",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"The author started Viaweb and Aspra.\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"query_engine = index.as_query_engine()\n",
|
||||||
|
"response = query_engine.query(\"What companies did the author start?\")\n",
|
||||||
|
"print(textwrap.fill(str(response), 100))"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3 (ipykernel)",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
||||||
1437
docs/src/notebooks/embedding_tuner.ipynb
Normal file
1437
docs/src/notebooks/embedding_tuner.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
1481
docs/src/notebooks/lancedb_reranking.ipynb
Normal file
1481
docs/src/notebooks/lancedb_reranking.ipynb
Normal file
File diff suppressed because one or more lines are too long
566
docs/src/notebooks/langchain_example.ipynb
Normal file
566
docs/src/notebooks/langchain_example.ipynb
Normal file
@@ -0,0 +1,566 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "683953b3",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# LanceDB\n",
|
||||||
|
"\n",
|
||||||
|
">[LanceDB](https://lancedb.com/) is an open-source database for vector-search built with persistent storage, which greatly simplifies retrevial, filtering and management of embeddings. Fully open source.\n",
|
||||||
|
"\n",
|
||||||
|
"This notebook shows how to use functionality related to the `LanceDB` vector database based on the Lance data format."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "b1051ba9",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! pip install tantivy"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "88ac92c0",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! pip install -U langchain-openai langchain-community"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "5a1c84d6-a10f-428c-95cd-46d3a1702e07",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! pip install lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "99134dd1-b91e-486f-8d90-534248e43b9d",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"We want to use OpenAIEmbeddings so we have to get the OpenAI API Key. "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "a0361f5c-e6f4-45f4-b829-11680cf03cec",
|
||||||
|
"metadata": {
|
||||||
|
"tags": []
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import getpass\n",
|
||||||
|
"import os\n",
|
||||||
|
"\n",
|
||||||
|
"os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "d114ed78",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! rm -rf /tmp/lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "a3c3999a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from langchain_community.document_loaders import TextLoader\n",
|
||||||
|
"from langchain_community.vectorstores import LanceDB\n",
|
||||||
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
|
"from langchain_text_splitters import CharacterTextSplitter\n",
|
||||||
|
"\n",
|
||||||
|
"loader = TextLoader(\"../../how_to/state_of_the_union.txt\")\n",
|
||||||
|
"documents = loader.load()\n",
|
||||||
|
"\n",
|
||||||
|
"documents = CharacterTextSplitter().split_documents(documents)\n",
|
||||||
|
"embeddings = OpenAIEmbeddings()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "e9517bb0",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"##### For LanceDB cloud, you can invoke the vector store as follows :\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"```python\n",
|
||||||
|
"db_url = \"db://lang_test\" # url of db you created\n",
|
||||||
|
"api_key = \"xxxxx\" # your API key\n",
|
||||||
|
"region=\"us-east-1-dev\" # your selected region\n",
|
||||||
|
"\n",
|
||||||
|
"vector_store = LanceDB(\n",
|
||||||
|
" uri=db_url,\n",
|
||||||
|
" api_key=api_key,\n",
|
||||||
|
" region=region,\n",
|
||||||
|
" embedding=embeddings,\n",
|
||||||
|
" table_name='langchain_test'\n",
|
||||||
|
" )\n",
|
||||||
|
"```\n",
|
||||||
|
"\n",
|
||||||
|
"You can also add `region`, `api_key`, `uri` to `from_documents()` classmethod\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"id": "6e104aee",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from lancedb.rerankers import LinearCombinationReranker\n",
|
||||||
|
"\n",
|
||||||
|
"reranker = LinearCombinationReranker(weight=0.3)\n",
|
||||||
|
"\n",
|
||||||
|
"docsearch = LanceDB.from_documents(documents, embeddings, reranker=reranker)\n",
|
||||||
|
"query = \"What did the president say about Ketanji Brown Jackson\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 31,
|
||||||
|
"id": "259c7988",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"relevance score - 0.7066475030191711\n",
|
||||||
|
"text- They were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n",
|
||||||
|
"\n",
|
||||||
|
"Officer Mora was 27 years old. \n",
|
||||||
|
"\n",
|
||||||
|
"Officer Rivera was 22. \n",
|
||||||
|
"\n",
|
||||||
|
"Both Dominican Americans who’d grown up on the same streets they later chose to patrol as police officers. \n",
|
||||||
|
"\n",
|
||||||
|
"I spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n",
|
||||||
|
"\n",
|
||||||
|
"I’ve worked on these issues a long time. \n",
|
||||||
|
"\n",
|
||||||
|
"I know what works: Investing in crime prevention and community police officers who’ll walk the beat, who’ll know the neighborhood, and who can restore trust and safety. \n",
|
||||||
|
"\n",
|
||||||
|
"So let’s not abandon our streets. Or choose between safety and equal justice. \n",
|
||||||
|
"\n",
|
||||||
|
"Let’s come together to protect our communities, restore trust, and hold law enforcement accountable. \n",
|
||||||
|
"\n",
|
||||||
|
"That’s why the Justice Department required body cameras, banned chokeholds, and restricted no-knock warrants for its officers. \n",
|
||||||
|
"\n",
|
||||||
|
"That’s why the American Rescue \n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"docs = docsearch.similarity_search_with_relevance_scores(query)\n",
|
||||||
|
"print(\"relevance score - \", docs[0][1])\n",
|
||||||
|
"print(\"text- \", docs[0][0].page_content[:1000])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 33,
|
||||||
|
"id": "9fa29dae",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"distance - 0.30000001192092896\n",
|
||||||
|
"text- My administration is providing assistance with job training and housing, and now helping lower-income veterans get VA care debt-free. \n",
|
||||||
|
"\n",
|
||||||
|
"Our troops in Iraq and Afghanistan faced many dangers. \n",
|
||||||
|
"\n",
|
||||||
|
"One was stationed at bases and breathing in toxic smoke from “burn pits” that incinerated wastes of war—medical and hazard material, jet fuel, and more. \n",
|
||||||
|
"\n",
|
||||||
|
"When they came home, many of the world’s fittest and best trained warriors were never the same. \n",
|
||||||
|
"\n",
|
||||||
|
"Headaches. Numbness. Dizziness. \n",
|
||||||
|
"\n",
|
||||||
|
"A cancer that would put them in a flag-draped coffin. \n",
|
||||||
|
"\n",
|
||||||
|
"I know. \n",
|
||||||
|
"\n",
|
||||||
|
"One of those soldiers was my son Major Beau Biden. \n",
|
||||||
|
"\n",
|
||||||
|
"We don’t know for sure if a burn pit was the cause of his brain cancer, or the diseases of so many of our troops. \n",
|
||||||
|
"\n",
|
||||||
|
"But I’m committed to finding out everything we can. \n",
|
||||||
|
"\n",
|
||||||
|
"Committed to military families like Danielle Robinson from Ohio. \n",
|
||||||
|
"\n",
|
||||||
|
"The widow of Sergeant First Class Heath Robinson. \n",
|
||||||
|
"\n",
|
||||||
|
"He was born a soldier. Army National Guard. Combat medic in Kosovo and Iraq. \n",
|
||||||
|
"\n",
|
||||||
|
"Stationed near Baghdad, just ya\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"docs = docsearch.similarity_search_with_score(query=\"Headaches\", query_type=\"hybrid\")\n",
|
||||||
|
"print(\"distance - \", docs[0][1])\n",
|
||||||
|
"print(\"text- \", docs[0][0].page_content[:1000])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 8,
|
||||||
|
"id": "e70ad201",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"reranker : <lancedb.rerankers.linear_combination.LinearCombinationReranker object at 0x107ef1130>\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"print(\"reranker : \", docsearch._reranker)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "f5e1cdfd",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"Additionaly, to explore the table you can load it into a df or save it in a csv file: \n",
|
||||||
|
"```python\n",
|
||||||
|
"tbl = docsearch.get_table()\n",
|
||||||
|
"print(\"tbl:\", tbl)\n",
|
||||||
|
"pd_df = tbl.to_pandas()\n",
|
||||||
|
"# pd_df.to_csv(\"docsearch.csv\", index=False)\n",
|
||||||
|
"\n",
|
||||||
|
"# you can also create a new vector store object using an older connection object:\n",
|
||||||
|
"vector_store = LanceDB(connection=tbl, embedding=embeddings)\n",
|
||||||
|
"```"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 15,
|
||||||
|
"id": "9c608226",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"metadata : {'source': '../../how_to/state_of_the_union.txt'}\n",
|
||||||
|
"\n",
|
||||||
|
"SQL filtering :\n",
|
||||||
|
"\n",
|
||||||
|
"They were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n",
|
||||||
|
"\n",
|
||||||
|
"Officer Mora was 27 years old. \n",
|
||||||
|
"\n",
|
||||||
|
"Officer Rivera was 22. \n",
|
||||||
|
"\n",
|
||||||
|
"Both Dominican Americans who’d grown up on the same streets they later chose to patrol as police officers. \n",
|
||||||
|
"\n",
|
||||||
|
"I spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n",
|
||||||
|
"\n",
|
||||||
|
"I’ve worked on these issues a long time. \n",
|
||||||
|
"\n",
|
||||||
|
"I know what works: Investing in crime prevention and community police officers who’ll walk the beat, who’ll know the neighborhood, and who can restore trust and safety. \n",
|
||||||
|
"\n",
|
||||||
|
"So let’s not abandon our streets. Or choose between safety and equal justice. \n",
|
||||||
|
"\n",
|
||||||
|
"Let’s come together to protect our communities, restore trust, and hold law enforcement accountable. \n",
|
||||||
|
"\n",
|
||||||
|
"That’s why the Justice Department required body cameras, banned chokeholds, and restricted no-knock warrants for its officers. \n",
|
||||||
|
"\n",
|
||||||
|
"That’s why the American Rescue Plan provided $350 Billion that cities, states, and counties can use to hire more police and invest in proven strategies like community violence interruption—trusted messengers breaking the cycle of violence and trauma and giving young people hope. \n",
|
||||||
|
"\n",
|
||||||
|
"We should all agree: The answer is not to Defund the police. The answer is to FUND the police with the resources and training they need to protect our communities. \n",
|
||||||
|
"\n",
|
||||||
|
"I ask Democrats and Republicans alike: Pass my budget and keep our neighborhoods safe. \n",
|
||||||
|
"\n",
|
||||||
|
"And I will keep doing everything in my power to crack down on gun trafficking and ghost guns you can buy online and make at home—they have no serial numbers and can’t be traced. \n",
|
||||||
|
"\n",
|
||||||
|
"And I ask Congress to pass proven measures to reduce gun violence. Pass universal background checks. Why should anyone on a terrorist list be able to purchase a weapon? \n",
|
||||||
|
"\n",
|
||||||
|
"Ban assault weapons and high-capacity magazines. \n",
|
||||||
|
"\n",
|
||||||
|
"Repeal the liability shield that makes gun manufacturers the only industry in America that can’t be sued. \n",
|
||||||
|
"\n",
|
||||||
|
"These laws don’t infringe on the Second Amendment. They save lives. \n",
|
||||||
|
"\n",
|
||||||
|
"The most fundamental right in America is the right to vote – and to have it counted. And it’s under assault. \n",
|
||||||
|
"\n",
|
||||||
|
"In state after state, new laws have been passed, not only to suppress the vote, but to subvert entire elections. \n",
|
||||||
|
"\n",
|
||||||
|
"We cannot let this happen. \n",
|
||||||
|
"\n",
|
||||||
|
"Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n",
|
||||||
|
"\n",
|
||||||
|
"Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n",
|
||||||
|
"\n",
|
||||||
|
"One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n",
|
||||||
|
"\n",
|
||||||
|
"And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence. \n",
|
||||||
|
"\n",
|
||||||
|
"A former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since she’s been nominated, she’s received a broad range of support—from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n",
|
||||||
|
"\n",
|
||||||
|
"And if we are to advance liberty and justice, we need to secure the Border and fix the immigration system. \n",
|
||||||
|
"\n",
|
||||||
|
"We can do both. At our border, we’ve installed new technology like cutting-edge scanners to better detect drug smuggling. \n",
|
||||||
|
"\n",
|
||||||
|
"We’ve set up joint patrols with Mexico and Guatemala to catch more human traffickers. \n",
|
||||||
|
"\n",
|
||||||
|
"We’re putting in place dedicated immigration judges so families fleeing persecution and violence can have their cases heard faster.\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"docs = docsearch.similarity_search(\n",
|
||||||
|
" query=query, filter={\"metadata.source\": \"../../how_to/state_of_the_union.txt\"}\n",
|
||||||
|
")\n",
|
||||||
|
"\n",
|
||||||
|
"print(\"metadata :\", docs[0].metadata)\n",
|
||||||
|
"\n",
|
||||||
|
"# or you can directly supply SQL string filters :\n",
|
||||||
|
"\n",
|
||||||
|
"print(\"\\nSQL filtering :\\n\")\n",
|
||||||
|
"docs = docsearch.similarity_search(query=query, filter=\"text LIKE '%Officer Rivera%'\")\n",
|
||||||
|
"print(docs[0].page_content)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "9a173c94",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Adding images "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "05f669d7",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! pip install -U langchain-experimental"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "3ed69810",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! pip install open_clip_torch torch"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 16,
|
||||||
|
"id": "2cacb5ee",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"! rm -rf '/tmp/multimmodal_lance'"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 17,
|
||||||
|
"id": "b3456e2c",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from langchain_experimental.open_clip import OpenCLIPEmbeddings"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 18,
|
||||||
|
"id": "3848eba2",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import os\n",
|
||||||
|
"\n",
|
||||||
|
"import requests\n",
|
||||||
|
"\n",
|
||||||
|
"# List of image URLs to download\n",
|
||||||
|
"image_urls = [\n",
|
||||||
|
" \"https://github.com/raghavdixit99/assets/assets/34462078/abf47cc4-d979-4aaa-83be-53a2115bf318\",\n",
|
||||||
|
" \"https://github.com/raghavdixit99/assets/assets/34462078/93be928e-522b-4e37-889d-d4efd54b2112\",\n",
|
||||||
|
"]\n",
|
||||||
|
"\n",
|
||||||
|
"texts = [\"bird\", \"dragon\"]\n",
|
||||||
|
"\n",
|
||||||
|
"# Directory to save images\n",
|
||||||
|
"dir_name = \"./photos/\"\n",
|
||||||
|
"\n",
|
||||||
|
"# Create directory if it doesn't exist\n",
|
||||||
|
"os.makedirs(dir_name, exist_ok=True)\n",
|
||||||
|
"\n",
|
||||||
|
"image_uris = []\n",
|
||||||
|
"# Download and save each image\n",
|
||||||
|
"for i, url in enumerate(image_urls, start=1):\n",
|
||||||
|
" response = requests.get(url)\n",
|
||||||
|
" path = os.path.join(dir_name, f\"image{i}.jpg\")\n",
|
||||||
|
" image_uris.append(path)\n",
|
||||||
|
" with open(path, \"wb\") as f:\n",
|
||||||
|
" f.write(response.content)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 21,
|
||||||
|
"id": "3d62c2a0",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from langchain_community.vectorstores import LanceDB\n",
|
||||||
|
"\n",
|
||||||
|
"vec_store = LanceDB(\n",
|
||||||
|
" table_name=\"multimodal_test\",\n",
|
||||||
|
" embedding=OpenCLIPEmbeddings(),\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 22,
|
||||||
|
"id": "ebbb4881",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"['b673620b-01f0-42ca-a92e-d033bb92c0a6',\n",
|
||||||
|
" '99c3a5b0-b577-417a-8177-92f4a655dbfb']"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 22,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vec_store.add_images(uris=image_uris)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 23,
|
||||||
|
"id": "3c29dea3",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"['f7adde5d-a4a3-402b-9e73-088b230722c3',\n",
|
||||||
|
" 'cbed59da-0aec-4bff-8820-9e59d81a2140']"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 23,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vec_store.add_texts(texts)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 24,
|
||||||
|
"id": "8b2f25ce",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"img_embed = vec_store._embedding.embed_query(\"bird\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 25,
|
||||||
|
"id": "87a24079",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"Document(page_content='bird', metadata={'id': 'f7adde5d-a4a3-402b-9e73-088b230722c3'})"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 25,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vec_store.similarity_search_by_vector(img_embed)[0]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 26,
|
||||||
|
"id": "78557867",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"LanceTable(connection=LanceDBConnection(/tmp/lancedb), name=\"multimodal_test\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 26,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vec_store._table"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3 (ipykernel)",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.12.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
||||||
@@ -15,7 +15,6 @@ LanceDB comes with some built-in rerankers. Some of the rerankers that are avail
|
|||||||
Using rerankers is optional for vector and FTS. However, for hybrid search, rerankers are required. To use a reranker, you need to create an instance of the reranker and pass it to the `rerank` method of the query builder.
|
Using rerankers is optional for vector and FTS. However, for hybrid search, rerankers are required. To use a reranker, you need to create an instance of the reranker and pass it to the `rerank` method of the query builder.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import numpy
|
|
||||||
import lancedb
|
import lancedb
|
||||||
from lancedb.embeddings import get_registry
|
from lancedb.embeddings import get_registry
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
@@ -54,6 +53,7 @@ LanceDB comes with some built-in rerankers. Here are some of the rerankers that
|
|||||||
- [ColBERT Reranker](./colbert.md)
|
- [ColBERT Reranker](./colbert.md)
|
||||||
- [OpenAI Reranker](./openai.md)
|
- [OpenAI Reranker](./openai.md)
|
||||||
- [Linear Combination Reranker](./linear_combination.md)
|
- [Linear Combination Reranker](./linear_combination.md)
|
||||||
|
- [Jina Reranker](./jina.md)
|
||||||
|
|
||||||
## Creating Custom Rerankers
|
## Creating Custom Rerankers
|
||||||
|
|
||||||
|
|||||||
78
docs/src/reranking/jina.md
Normal file
78
docs/src/reranking/jina.md
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
# Jina Reranker
|
||||||
|
|
||||||
|
This re-ranker uses the [Jina](https://jina.ai/reranker/) API to rerank the search results. You can use this re-ranker by passing `JinaReranker()` to the `rerank()` method. Note that you'll either need to set the `JINA_API_KEY` environment variable or pass the `api_key` argument to use this re-ranker.
|
||||||
|
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
Supported Query Types: Hybrid, Vector, FTS
|
||||||
|
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
import lancedb
|
||||||
|
from lancedb.embeddings import get_registry
|
||||||
|
from lancedb.pydantic import LanceModel, Vector
|
||||||
|
from lancedb.rerankers import JinaReranker
|
||||||
|
|
||||||
|
os.environ['JINA_API_KEY'] = "jina_*"
|
||||||
|
|
||||||
|
|
||||||
|
embedder = get_registry().get("jina").create()
|
||||||
|
db = lancedb.connect("~/.lancedb")
|
||||||
|
|
||||||
|
class Schema(LanceModel):
|
||||||
|
text: str = embedder.SourceField()
|
||||||
|
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
||||||
|
|
||||||
|
data = [
|
||||||
|
{"text": "hello world"},
|
||||||
|
{"text": "goodbye world"}
|
||||||
|
]
|
||||||
|
tbl = db.create_table("test", schema=Schema, mode="overwrite")
|
||||||
|
tbl.add(data)
|
||||||
|
reranker = JinaReranker(api_key="key")
|
||||||
|
|
||||||
|
# Run vector search with a reranker
|
||||||
|
result = tbl.search("hello").rerank(reranker=reranker).to_list()
|
||||||
|
|
||||||
|
# Run FTS search with a reranker
|
||||||
|
result = tbl.search("hello", query_type="fts").rerank(reranker=reranker).to_list()
|
||||||
|
|
||||||
|
# Run hybrid search with a reranker
|
||||||
|
tbl.create_fts_index("text", replace=True)
|
||||||
|
result = tbl.search("hello", query_type="hybrid").rerank(reranker=reranker).to_list()
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Accepted Arguments
|
||||||
|
----------------
|
||||||
|
| Argument | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `model_name` | `str` | `"jina-reranker-v2-base-multilingual"` | The name of the reranker model to use. You can find the list of available models in https://jina.ai/reranker/|
|
||||||
|
| `column` | `str` | `"text"` | The name of the column to use as input to the cross encoder model. |
|
||||||
|
| `top_n` | `str` | `None` | The number of results to return. If None, will return all results. |
|
||||||
|
| `api_key` | `str` | `None` | The API key for the Jina API. If not provided, the `JINA_API_KEY` environment variable is used. |
|
||||||
|
| `return_score` | str | `"relevance"` | Options are "relevance" or "all". The type of score to return. If "relevance", will return only the `_relevance_score. If "all" is supported, will return relevance score along with the vector and/or fts scores depending on query type |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Supported Scores for each query type
|
||||||
|
You can specify the type of scores you want the reranker to return. The following are the supported scores for each query type:
|
||||||
|
|
||||||
|
### Hybrid Search
|
||||||
|
|`return_score`| Status | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
||||||
|
| `all` | ❌ Not Supported | Returns have vector(`_distance`) and FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
||||||
|
|
||||||
|
### Vector Search
|
||||||
|
|`return_score`| Status | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
||||||
|
| `all` | ✅ Supported | Returns have vector(`_distance`) along with Hybrid Search score(`_relevance_score`) |
|
||||||
|
|
||||||
|
### FTS Search
|
||||||
|
|`return_score`| Status | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
||||||
|
| `all` | ✅ Supported | Returns have FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
||||||
@@ -53,9 +53,20 @@ db.create_table("my_vectors", data=data)
|
|||||||
.to_list()
|
.to_list()
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/search.ts:import"
|
||||||
|
|
||||||
|
--8<-- "nodejs/examples/search.ts:search1"
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
--8<-- "docs/src/search_legacy.ts:import"
|
--8<-- "docs/src/search_legacy.ts:import"
|
||||||
|
|
||||||
--8<-- "docs/src/search_legacy.ts:search1"
|
--8<-- "docs/src/search_legacy.ts:search1"
|
||||||
@@ -73,7 +84,15 @@ By default, `l2` will be used as metric type. You can specify the metric type as
|
|||||||
.to_list()
|
.to_list()
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/search.ts:search2"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
--8<-- "docs/src/search_legacy.ts:search2"
|
--8<-- "docs/src/search_legacy.ts:search2"
|
||||||
|
|||||||
@@ -44,9 +44,17 @@ const tbl = await db.createTable('myVectors', data)
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/filtering.ts:search"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
--8<-- "docs/src/sql_legacy.ts:search"
|
--8<-- "docs/src/sql_legacy.ts:search"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -78,9 +86,17 @@ For example, the following filter string is acceptable:
|
|||||||
.to_arrow()
|
.to_arrow()
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Javascript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/filtering.ts:vec_search"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
--8<-- "docs/src/sql_legacy.ts:vec_search"
|
--8<-- "docs/src/sql_legacy.ts:vec_search"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -148,9 +164,17 @@ You can also filter your data without search.
|
|||||||
tbl.search().where("id = 10").limit(10).to_arrow()
|
tbl.search().where("id = 10").limit(10).to_arrow()
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "JavaScript"
|
=== "TypeScript"
|
||||||
|
|
||||||
```javascript
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```ts
|
||||||
|
--8<-- "nodejs/examples/filtering.ts:sql_search"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "vectordb (deprecated)"
|
||||||
|
|
||||||
|
```ts
|
||||||
--8<---- "docs/src/sql_legacy.ts:sql_search"
|
--8<---- "docs/src/sql_legacy.ts:sql_search"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
4
node/package-lock.json
generated
4
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64",
|
"x64",
|
||||||
"arm64"
|
"arm64"
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"tsc": "tsc -b",
|
"tsc": "tsc -b",
|
||||||
"build": "npm run tsc && cargo-cp-artifact --artifact cdylib lancedb_node index.node -- cargo build --message-format=json",
|
"build": "npm run tsc && cargo-cp-artifact --artifact cdylib lancedb_node index.node -- cargo build -p lancedb-node --message-format=json",
|
||||||
"build-release": "npm run build -- --release",
|
"build-release": "npm run build -- --release",
|
||||||
"test": "npm run tsc && mocha -recursive dist/test",
|
"test": "npm run tsc && mocha -recursive dist/test",
|
||||||
"integration-test": "npm run tsc && mocha -recursive dist/integration_test",
|
"integration-test": "npm run tsc && mocha -recursive dist/integration_test",
|
||||||
|
|||||||
@@ -62,6 +62,8 @@ export {
|
|||||||
|
|
||||||
const defaultAwsRegion = "us-west-2";
|
const defaultAwsRegion = "us-west-2";
|
||||||
|
|
||||||
|
const defaultRequestTimeout = 10_000
|
||||||
|
|
||||||
export interface AwsCredentials {
|
export interface AwsCredentials {
|
||||||
accessKeyId: string
|
accessKeyId: string
|
||||||
|
|
||||||
@@ -119,6 +121,11 @@ export interface ConnectionOptions {
|
|||||||
*/
|
*/
|
||||||
hostOverride?: string
|
hostOverride?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Duration in milliseconds for request timeout. Default = 10,000 (10 seconds)
|
||||||
|
*/
|
||||||
|
timeout?: number
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* (For LanceDB OSS only): The interval, in seconds, at which to check for
|
* (For LanceDB OSS only): The interval, in seconds, at which to check for
|
||||||
* updates to the table from other processes. If None, then consistency is not
|
* updates to the table from other processes. If None, then consistency is not
|
||||||
@@ -204,7 +211,8 @@ export async function connect(
|
|||||||
awsCredentials: undefined,
|
awsCredentials: undefined,
|
||||||
awsRegion: defaultAwsRegion,
|
awsRegion: defaultAwsRegion,
|
||||||
apiKey: undefined,
|
apiKey: undefined,
|
||||||
region: defaultAwsRegion
|
region: defaultAwsRegion,
|
||||||
|
timeout: defaultRequestTimeout
|
||||||
},
|
},
|
||||||
arg
|
arg
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ async function callWithMiddlewares (
|
|||||||
if (i > middlewares.length) {
|
if (i > middlewares.length) {
|
||||||
const headers = Object.fromEntries(req.headers.entries())
|
const headers = Object.fromEntries(req.headers.entries())
|
||||||
const params = Object.fromEntries(req.params?.entries() ?? [])
|
const params = Object.fromEntries(req.params?.entries() ?? [])
|
||||||
const timeout = 10000
|
const timeout = opts?.timeout
|
||||||
let res
|
let res
|
||||||
if (req.method === Method.POST) {
|
if (req.method === Method.POST) {
|
||||||
res = await axios.post(
|
res = await axios.post(
|
||||||
@@ -82,6 +82,7 @@ async function callWithMiddlewares (
|
|||||||
|
|
||||||
interface MiddlewareInvocationOptions {
|
interface MiddlewareInvocationOptions {
|
||||||
responseType?: ResponseType
|
responseType?: ResponseType
|
||||||
|
timeout?: number,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -123,15 +124,19 @@ export class HttpLancedbClient {
|
|||||||
private readonly _url: string
|
private readonly _url: string
|
||||||
private readonly _apiKey: () => string
|
private readonly _apiKey: () => string
|
||||||
private readonly _middlewares: HttpLancedbClientMiddleware[]
|
private readonly _middlewares: HttpLancedbClientMiddleware[]
|
||||||
|
private readonly _timeout: number | undefined
|
||||||
|
|
||||||
public constructor (
|
public constructor (
|
||||||
url: string,
|
url: string,
|
||||||
apiKey: string,
|
apiKey: string,
|
||||||
private readonly _dbName?: string
|
timeout?: number,
|
||||||
|
private readonly _dbName?: string,
|
||||||
|
|
||||||
) {
|
) {
|
||||||
this._url = url
|
this._url = url
|
||||||
this._apiKey = () => apiKey
|
this._apiKey = () => apiKey
|
||||||
this._middlewares = []
|
this._middlewares = []
|
||||||
|
this._timeout = timeout
|
||||||
}
|
}
|
||||||
|
|
||||||
get uri (): string {
|
get uri (): string {
|
||||||
@@ -230,7 +235,10 @@ export class HttpLancedbClient {
|
|||||||
|
|
||||||
let response
|
let response
|
||||||
try {
|
try {
|
||||||
response = await callWithMiddlewares(req, this._middlewares, { responseType })
|
response = await callWithMiddlewares(req, this._middlewares, {
|
||||||
|
responseType,
|
||||||
|
timeout: this._timeout,
|
||||||
|
})
|
||||||
|
|
||||||
// return response
|
// return response
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
@@ -267,7 +275,7 @@ export class HttpLancedbClient {
|
|||||||
* Make a clone of this client
|
* Make a clone of this client
|
||||||
*/
|
*/
|
||||||
private clone (): HttpLancedbClient {
|
private clone (): HttpLancedbClient {
|
||||||
const clone = new HttpLancedbClient(this._url, this._apiKey(), this._dbName)
|
const clone = new HttpLancedbClient(this._url, this._apiKey(), this._timeout, this._dbName)
|
||||||
for (const mw of this._middlewares) {
|
for (const mw of this._middlewares) {
|
||||||
clone._middlewares.push(mw)
|
clone._middlewares.push(mw)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ export class RemoteConnection implements Connection {
|
|||||||
this._client = new HttpLancedbClient(
|
this._client = new HttpLancedbClient(
|
||||||
server,
|
server,
|
||||||
opts.apiKey,
|
opts.apiKey,
|
||||||
|
opts.timeout,
|
||||||
opts.hostOverride === undefined ? undefined : this._dbName
|
opts.hostOverride === undefined ? undefined : this._dbName
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,11 +15,11 @@ crate-type = ["cdylib"]
|
|||||||
arrow-ipc.workspace = true
|
arrow-ipc.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
lancedb = { path = "../rust/lancedb" }
|
lancedb = { path = "../rust/lancedb" }
|
||||||
napi = { version = "2.15", default-features = false, features = [
|
napi = { version = "2.16.8", default-features = false, features = [
|
||||||
"napi7",
|
"napi9",
|
||||||
"async",
|
"async",
|
||||||
] }
|
] }
|
||||||
napi-derive = "2"
|
napi-derive = "2.16.4"
|
||||||
|
|
||||||
# Prevent dynamic linking of lzma, which comes from datafusion
|
# Prevent dynamic linking of lzma, which comes from datafusion
|
||||||
lzma-sys = { version = "*", features = ["static"] }
|
lzma-sys = { version = "*", features = ["static"] }
|
||||||
|
|||||||
@@ -63,6 +63,7 @@ describe("Registry", () => {
|
|||||||
return data.map(() => [1, 2, 3]);
|
return data.map(() => [1, 2, 3]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const func = getRegistry()
|
const func = getRegistry()
|
||||||
.get<MockEmbeddingFunction>("mock-embedding")!
|
.get<MockEmbeddingFunction>("mock-embedding")!
|
||||||
.create();
|
.create();
|
||||||
|
|||||||
@@ -14,6 +14,11 @@
|
|||||||
|
|
||||||
/* eslint-disable @typescript-eslint/naming-convention */
|
/* eslint-disable @typescript-eslint/naming-convention */
|
||||||
|
|
||||||
|
import {
|
||||||
|
CreateTableCommand,
|
||||||
|
DeleteTableCommand,
|
||||||
|
DynamoDBClient,
|
||||||
|
} from "@aws-sdk/client-dynamodb";
|
||||||
import {
|
import {
|
||||||
CreateKeyCommand,
|
CreateKeyCommand,
|
||||||
KMSClient,
|
KMSClient,
|
||||||
@@ -38,6 +43,7 @@ const CONFIG = {
|
|||||||
awsAccessKeyId: "ACCESSKEY",
|
awsAccessKeyId: "ACCESSKEY",
|
||||||
awsSecretAccessKey: "SECRETKEY",
|
awsSecretAccessKey: "SECRETKEY",
|
||||||
awsEndpoint: "http://127.0.0.1:4566",
|
awsEndpoint: "http://127.0.0.1:4566",
|
||||||
|
dynamodbEndpoint: "http://127.0.0.1:4566",
|
||||||
awsRegion: "us-east-1",
|
awsRegion: "us-east-1",
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -66,7 +72,6 @@ class S3Bucket {
|
|||||||
} catch {
|
} catch {
|
||||||
// It's fine if the bucket doesn't exist
|
// It's fine if the bucket doesn't exist
|
||||||
}
|
}
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
await client.send(new CreateBucketCommand({ Bucket: name }));
|
await client.send(new CreateBucketCommand({ Bucket: name }));
|
||||||
return new S3Bucket(name);
|
return new S3Bucket(name);
|
||||||
}
|
}
|
||||||
@@ -79,32 +84,27 @@ class S3Bucket {
|
|||||||
static async deleteBucket(client: S3Client, name: string) {
|
static async deleteBucket(client: S3Client, name: string) {
|
||||||
// Must delete all objects before we can delete the bucket
|
// Must delete all objects before we can delete the bucket
|
||||||
const objects = await client.send(
|
const objects = await client.send(
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new ListObjectsV2Command({ Bucket: name }),
|
new ListObjectsV2Command({ Bucket: name }),
|
||||||
);
|
);
|
||||||
if (objects.Contents) {
|
if (objects.Contents) {
|
||||||
for (const object of objects.Contents) {
|
for (const object of objects.Contents) {
|
||||||
await client.send(
|
await client.send(
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new DeleteObjectCommand({ Bucket: name, Key: object.Key }),
|
new DeleteObjectCommand({ Bucket: name, Key: object.Key }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
await client.send(new DeleteBucketCommand({ Bucket: name }));
|
await client.send(new DeleteBucketCommand({ Bucket: name }));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async assertAllEncrypted(path: string, keyId: string) {
|
public async assertAllEncrypted(path: string, keyId: string) {
|
||||||
const client = S3Bucket.s3Client();
|
const client = S3Bucket.s3Client();
|
||||||
const objects = await client.send(
|
const objects = await client.send(
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new ListObjectsV2Command({ Bucket: this.name, Prefix: path }),
|
new ListObjectsV2Command({ Bucket: this.name, Prefix: path }),
|
||||||
);
|
);
|
||||||
if (objects.Contents) {
|
if (objects.Contents) {
|
||||||
for (const object of objects.Contents) {
|
for (const object of objects.Contents) {
|
||||||
const metadata = await client.send(
|
const metadata = await client.send(
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new HeadObjectCommand({ Bucket: this.name, Key: object.Key }),
|
new HeadObjectCommand({ Bucket: this.name, Key: object.Key }),
|
||||||
);
|
);
|
||||||
expect(metadata.ServerSideEncryption).toBe("aws:kms");
|
expect(metadata.ServerSideEncryption).toBe("aws:kms");
|
||||||
@@ -143,7 +143,6 @@ class KmsKey {
|
|||||||
|
|
||||||
public async delete() {
|
public async delete() {
|
||||||
const client = KmsKey.kmsClient();
|
const client = KmsKey.kmsClient();
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
await client.send(new ScheduleKeyDeletionCommand({ KeyId: this.keyId }));
|
await client.send(new ScheduleKeyDeletionCommand({ KeyId: this.keyId }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -224,3 +223,91 @@ maybeDescribe("storage_options", () => {
|
|||||||
await bucket.assertAllEncrypted("test/table2.lance", kmsKey.keyId);
|
await bucket.assertAllEncrypted("test/table2.lance", kmsKey.keyId);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
class DynamoDBCommitTable {
|
||||||
|
name: string;
|
||||||
|
constructor(name: string) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
static dynamoClient() {
|
||||||
|
return new DynamoDBClient({
|
||||||
|
region: CONFIG.awsRegion,
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: CONFIG.awsAccessKeyId,
|
||||||
|
secretAccessKey: CONFIG.awsSecretAccessKey,
|
||||||
|
},
|
||||||
|
endpoint: CONFIG.awsEndpoint,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async create(name: string): Promise<DynamoDBCommitTable> {
|
||||||
|
const client = DynamoDBCommitTable.dynamoClient();
|
||||||
|
const command = new CreateTableCommand({
|
||||||
|
TableName: name,
|
||||||
|
AttributeDefinitions: [
|
||||||
|
{
|
||||||
|
AttributeName: "base_uri",
|
||||||
|
AttributeType: "S",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
AttributeName: "version",
|
||||||
|
AttributeType: "N",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
KeySchema: [
|
||||||
|
{ AttributeName: "base_uri", KeyType: "HASH" },
|
||||||
|
{ AttributeName: "version", KeyType: "RANGE" },
|
||||||
|
],
|
||||||
|
ProvisionedThroughput: {
|
||||||
|
ReadCapacityUnits: 1,
|
||||||
|
WriteCapacityUnits: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await client.send(command);
|
||||||
|
return new DynamoDBCommitTable(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async delete() {
|
||||||
|
const client = DynamoDBCommitTable.dynamoClient();
|
||||||
|
await client.send(new DeleteTableCommand({ TableName: this.name }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
maybeDescribe("DynamoDB Lock", () => {
|
||||||
|
let bucket: S3Bucket;
|
||||||
|
let commitTable: DynamoDBCommitTable;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
bucket = await S3Bucket.create("lancedb2");
|
||||||
|
commitTable = await DynamoDBCommitTable.create("commitTable");
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await commitTable.delete();
|
||||||
|
await bucket.delete();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("can be used to configure a DynamoDB table for commit log", async () => {
|
||||||
|
const uri = `s3+ddb://${bucket.name}/test?ddbTableName=${commitTable.name}`;
|
||||||
|
const db = await connect(uri, {
|
||||||
|
storageOptions: CONFIG,
|
||||||
|
readConsistencyInterval: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const table = await db.createTable("test", [{ a: 1, b: 2 }]);
|
||||||
|
|
||||||
|
// 5 concurrent appends
|
||||||
|
const futs = Array.from({ length: 5 }, async () => {
|
||||||
|
// Open a table so each append has a separate table reference. Otherwise
|
||||||
|
// they will share the same table reference and the internal ReadWriteLock
|
||||||
|
// will prevent any real concurrency.
|
||||||
|
const table = await db.openTable("test");
|
||||||
|
await table.add([{ a: 2, b: 3 }]);
|
||||||
|
});
|
||||||
|
await Promise.all(futs);
|
||||||
|
|
||||||
|
const rowCount = await table.countRows();
|
||||||
|
expect(rowCount).toBe(6);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -39,7 +39,9 @@ describe.each([arrow, arrowOld])("Given a table", (arrow: any) => {
|
|||||||
let tmpDir: tmp.DirResult;
|
let tmpDir: tmp.DirResult;
|
||||||
let table: Table;
|
let table: Table;
|
||||||
|
|
||||||
const schema = new arrow.Schema([
|
const schema:
|
||||||
|
| import("apache-arrow").Schema
|
||||||
|
| import("apache-arrow-old").Schema = new arrow.Schema([
|
||||||
new arrow.Field("id", new arrow.Float64(), true),
|
new arrow.Field("id", new arrow.Float64(), true),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -94,6 +96,50 @@ describe.each([arrow, arrowOld])("Given a table", (arrow: any) => {
|
|||||||
expect(await table.countRows("id == 10")).toBe(1);
|
expect(await table.countRows("id == 10")).toBe(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should let me update values with `values`", async () => {
|
||||||
|
await table.add([{ id: 1 }]);
|
||||||
|
expect(await table.countRows("id == 1")).toBe(1);
|
||||||
|
expect(await table.countRows("id == 7")).toBe(0);
|
||||||
|
await table.update({ values: { id: 7 } });
|
||||||
|
expect(await table.countRows("id == 1")).toBe(0);
|
||||||
|
expect(await table.countRows("id == 7")).toBe(1);
|
||||||
|
await table.add([{ id: 2 }]);
|
||||||
|
// Test Map as input
|
||||||
|
await table.update({
|
||||||
|
values: {
|
||||||
|
id: "10",
|
||||||
|
},
|
||||||
|
where: "id % 2 == 0",
|
||||||
|
});
|
||||||
|
expect(await table.countRows("id == 2")).toBe(0);
|
||||||
|
expect(await table.countRows("id == 7")).toBe(1);
|
||||||
|
expect(await table.countRows("id == 10")).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should let me update values with `valuesSql`", async () => {
|
||||||
|
await table.add([{ id: 1 }]);
|
||||||
|
expect(await table.countRows("id == 1")).toBe(1);
|
||||||
|
expect(await table.countRows("id == 7")).toBe(0);
|
||||||
|
await table.update({
|
||||||
|
valuesSql: {
|
||||||
|
id: "7",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(await table.countRows("id == 1")).toBe(0);
|
||||||
|
expect(await table.countRows("id == 7")).toBe(1);
|
||||||
|
await table.add([{ id: 2 }]);
|
||||||
|
// Test Map as input
|
||||||
|
await table.update({
|
||||||
|
valuesSql: {
|
||||||
|
id: "10",
|
||||||
|
},
|
||||||
|
where: "id % 2 == 0",
|
||||||
|
});
|
||||||
|
expect(await table.countRows("id == 2")).toBe(0);
|
||||||
|
expect(await table.countRows("id == 7")).toBe(1);
|
||||||
|
expect(await table.countRows("id == 10")).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
// https://github.com/lancedb/lancedb/issues/1293
|
// https://github.com/lancedb/lancedb/issues/1293
|
||||||
test.each([new arrow.Float16(), new arrow.Float32(), new arrow.Float64()])(
|
test.each([new arrow.Float16(), new arrow.Float32(), new arrow.Float64()])(
|
||||||
"can create empty table with non default float type: %s",
|
"can create empty table with non default float type: %s",
|
||||||
@@ -315,7 +361,7 @@ describe("When creating an index", () => {
|
|||||||
.query()
|
.query()
|
||||||
.limit(2)
|
.limit(2)
|
||||||
.nearestTo(queryVec)
|
.nearestTo(queryVec)
|
||||||
.distanceType("DoT")
|
.distanceType("dot")
|
||||||
.toArrow();
|
.toArrow();
|
||||||
expect(rst.numRows).toBe(2);
|
expect(rst.numRows).toBe(2);
|
||||||
|
|
||||||
@@ -704,10 +750,10 @@ describe("table.search", () => {
|
|||||||
const data = [{ text: "hello world" }, { text: "goodbye world" }];
|
const data = [{ text: "hello world" }, { text: "goodbye world" }];
|
||||||
const table = await db.createTable("test", data, { schema });
|
const table = await db.createTable("test", data, { schema });
|
||||||
|
|
||||||
const results = await table.search("greetings").then((r) => r.toArray());
|
const results = await table.search("greetings").toArray();
|
||||||
expect(results[0].text).toBe(data[0].text);
|
expect(results[0].text).toBe(data[0].text);
|
||||||
|
|
||||||
const results2 = await table.search("farewell").then((r) => r.toArray());
|
const results2 = await table.search("farewell").toArray();
|
||||||
expect(results2[0].text).toBe(data[1].text);
|
expect(results2[0].text).toBe(data[1].text);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -719,7 +765,7 @@ describe("table.search", () => {
|
|||||||
];
|
];
|
||||||
const table = await db.createTable("test", data);
|
const table = await db.createTable("test", data);
|
||||||
|
|
||||||
expect(table.search("hello")).rejects.toThrow(
|
expect(table.search("hello").toArray()).rejects.toThrow(
|
||||||
"No embedding functions are defined in the table",
|
"No embedding functions are defined in the table",
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -743,3 +789,27 @@ describe("table.search", () => {
|
|||||||
expect(results[0].text).toBe(data[1].text);
|
expect(results[0].text).toBe(data[1].text);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("when calling explainPlan", () => {
|
||||||
|
let tmpDir: tmp.DirResult;
|
||||||
|
let table: Table;
|
||||||
|
let queryVec: number[];
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||||
|
const con = await connect(tmpDir.name);
|
||||||
|
table = await con.createTable("vectors", [{ id: 1, vector: [0.1, 0.2] }]);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
tmpDir.removeCallback();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("retrieves query plan", async () => {
|
||||||
|
queryVec = Array(2)
|
||||||
|
.fill(1)
|
||||||
|
.map(() => Math.random());
|
||||||
|
const plan = await table.query().nearestTo(queryVec).explainPlan(true);
|
||||||
|
|
||||||
|
expect(plan).toMatch("KNN");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"target": "es2022",
|
"target": "es2022",
|
||||||
"types": ["jest", "node"]
|
"types": ["jest", "node"]
|
||||||
},
|
},
|
||||||
"include": ["**/*"]
|
"include": ["**/*", "../examples/ann_indexes.ts"]
|
||||||
}
|
}
|
||||||
|
|||||||
28
nodejs/__test__/util.test.ts
Normal file
28
nodejs/__test__/util.test.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import { IntoSql, toSQL } from "../lancedb/util";
|
||||||
|
test.each([
|
||||||
|
["string", "'string'"],
|
||||||
|
[123, "123"],
|
||||||
|
[1.11, "1.11"],
|
||||||
|
[true, "TRUE"],
|
||||||
|
[false, "FALSE"],
|
||||||
|
[null, "NULL"],
|
||||||
|
[new Date("2021-01-01T00:00:00.000Z"), "'2021-01-01T00:00:00.000Z'"],
|
||||||
|
[[1, 2, 3], "[1, 2, 3]"],
|
||||||
|
[new ArrayBuffer(8), "X'0000000000000000'"],
|
||||||
|
[Buffer.from("hello"), "X'68656c6c6f'"],
|
||||||
|
["Hello 'world'", "'Hello ''world'''"],
|
||||||
|
])("toSQL(%p) === %p", (value, expected) => {
|
||||||
|
expect(toSQL(value)).toBe(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("toSQL({}) throws on unsupported value type", () => {
|
||||||
|
expect(() => toSQL({} as unknown as IntoSql)).toThrow(
|
||||||
|
"Unsupported value type: object value: ([object Object])",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test("toSQL() throws on unsupported value type", () => {
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
expect(() => (<any>toSQL)()).toThrow(
|
||||||
|
"Unsupported value type: undefined value: (undefined)",
|
||||||
|
);
|
||||||
|
});
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://biomejs.dev/schemas/1.7.3/schema.json",
|
"$schema": "https://biomejs.dev/schemas/1.8.3/schema.json",
|
||||||
"organizeImports": {
|
"organizeImports": {
|
||||||
"enabled": true
|
"enabled": true
|
||||||
},
|
},
|
||||||
@@ -94,12 +94,28 @@
|
|||||||
"useValidTypeof": "error"
|
"useValidTypeof": "error"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ignore": ["**/dist/**/*", "**/native.js", "**/native.d.ts"]
|
"ignore": [
|
||||||
|
"**/dist/**/*",
|
||||||
|
"**/native.js",
|
||||||
|
"**/native.d.ts",
|
||||||
|
"__test__/docs/**/*",
|
||||||
|
"examples/**/*"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"javascript": {
|
"javascript": {
|
||||||
"globals": []
|
"globals": []
|
||||||
},
|
},
|
||||||
"overrides": [
|
"overrides": [
|
||||||
|
{
|
||||||
|
"include": ["__test__/s3_integration.test.ts"],
|
||||||
|
"linter": {
|
||||||
|
"rules": {
|
||||||
|
"style": {
|
||||||
|
"useNamingConvention": "off"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"include": [
|
"include": [
|
||||||
"**/*.ts",
|
"**/*.ts",
|
||||||
|
|||||||
1
nodejs/examples/.gitignore
vendored
Normal file
1
nodejs/examples/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
data/
|
||||||
49
nodejs/examples/ann_indexes.ts
Normal file
49
nodejs/examples/ann_indexes.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
// --8<-- [start:import]
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
// --8<-- [end:import]
|
||||||
|
|
||||||
|
// --8<-- [start:ingest]
|
||||||
|
const db = await lancedb.connect("/tmp/lancedb/");
|
||||||
|
|
||||||
|
const data = Array.from({ length: 10_000 }, (_, i) => ({
|
||||||
|
vector: Array(1536).fill(i),
|
||||||
|
id: `${i}`,
|
||||||
|
content: "",
|
||||||
|
longId: `${i}`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const table = await db.createTable("my_vectors", data, { mode: "overwrite" });
|
||||||
|
await table.createIndex("vector", {
|
||||||
|
config: lancedb.Index.ivfPq({
|
||||||
|
numPartitions: 16,
|
||||||
|
numSubVectors: 48,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
// --8<-- [end:ingest]
|
||||||
|
|
||||||
|
// --8<-- [start:search1]
|
||||||
|
const _results1 = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.limit(2)
|
||||||
|
.nprobes(20)
|
||||||
|
.refineFactor(10)
|
||||||
|
.toArray();
|
||||||
|
// --8<-- [end:search1]
|
||||||
|
|
||||||
|
// --8<-- [start:search2]
|
||||||
|
const _results2 = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.where("id != '1141'")
|
||||||
|
.limit(2)
|
||||||
|
.toArray();
|
||||||
|
// --8<-- [end:search2]
|
||||||
|
|
||||||
|
// --8<-- [start:search3]
|
||||||
|
const _results3 = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.select(["id"])
|
||||||
|
.limit(2)
|
||||||
|
.toArray();
|
||||||
|
// --8<-- [end:search3]
|
||||||
|
|
||||||
|
console.log("Ann indexes: done");
|
||||||
149
nodejs/examples/basic.ts
Normal file
149
nodejs/examples/basic.ts
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
// --8<-- [start:imports]
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
import * as arrow from "apache-arrow";
|
||||||
|
import { Field, FixedSizeList, Float16, Int32, Schema } from "apache-arrow";
|
||||||
|
|
||||||
|
// --8<-- [end:imports]
|
||||||
|
|
||||||
|
// --8<-- [start:connect]
|
||||||
|
const uri = "/tmp/lancedb/";
|
||||||
|
const db = await lancedb.connect(uri);
|
||||||
|
// --8<-- [end:connect]
|
||||||
|
{
|
||||||
|
// --8<-- [start:create_table]
|
||||||
|
const data = [
|
||||||
|
{ vector: [3.1, 4.1], item: "foo", price: 10.0 },
|
||||||
|
{ vector: [5.9, 26.5], item: "bar", price: 20.0 },
|
||||||
|
];
|
||||||
|
const _tbl = await db.createTable("myTable", data);
|
||||||
|
// --8<-- [end:create_table]
|
||||||
|
{
|
||||||
|
// --8<-- [start:create_table_exists_ok]
|
||||||
|
const _tbl = await db.createTable("myTable", data, {
|
||||||
|
existsOk: true,
|
||||||
|
});
|
||||||
|
// --8<-- [end:create_table_exists_ok]
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// --8<-- [start:create_table_overwrite]
|
||||||
|
const _tbl = await db.createTable("myTable", data, {
|
||||||
|
mode: "overwrite",
|
||||||
|
});
|
||||||
|
// --8<-- [end:create_table_overwrite]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// --8<-- [start:create_table_with_schema]
|
||||||
|
const schema = new arrow.Schema([
|
||||||
|
new arrow.Field(
|
||||||
|
"vector",
|
||||||
|
new arrow.FixedSizeList(
|
||||||
|
2,
|
||||||
|
new arrow.Field("item", new arrow.Float32(), true),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
new arrow.Field("item", new arrow.Utf8(), true),
|
||||||
|
new arrow.Field("price", new arrow.Float32(), true),
|
||||||
|
]);
|
||||||
|
const data = [
|
||||||
|
{ vector: [3.1, 4.1], item: "foo", price: 10.0 },
|
||||||
|
{ vector: [5.9, 26.5], item: "bar", price: 20.0 },
|
||||||
|
];
|
||||||
|
const _tbl = await db.createTable("myTable", data, {
|
||||||
|
schema,
|
||||||
|
});
|
||||||
|
// --8<-- [end:create_table_with_schema]
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// --8<-- [start:create_empty_table]
|
||||||
|
const schema = new arrow.Schema([
|
||||||
|
new arrow.Field(
|
||||||
|
"vector",
|
||||||
|
new arrow.FixedSizeList(
|
||||||
|
2,
|
||||||
|
new arrow.Field("item", new arrow.Float32(), true),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
const _tbl = await db.createEmptyTable("empty_table", schema);
|
||||||
|
// --8<-- [end:create_empty_table]
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// --8<-- [start:open_table]
|
||||||
|
const _tbl = await db.openTable("myTable");
|
||||||
|
// --8<-- [end:open_table]
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// --8<-- [start:table_names]
|
||||||
|
const tableNames = await db.tableNames();
|
||||||
|
console.log(tableNames);
|
||||||
|
// --8<-- [end:table_names]
|
||||||
|
}
|
||||||
|
|
||||||
|
const tbl = await db.openTable("myTable");
|
||||||
|
{
|
||||||
|
// --8<-- [start:add_data]
|
||||||
|
const data = [
|
||||||
|
{ vector: [1.3, 1.4], item: "fizz", price: 100.0 },
|
||||||
|
{ vector: [9.5, 56.2], item: "buzz", price: 200.0 },
|
||||||
|
];
|
||||||
|
await tbl.add(data);
|
||||||
|
// --8<-- [end:add_data]
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// --8<-- [start:vector_search]
|
||||||
|
const _res = tbl.search([100, 100]).limit(2).toArray();
|
||||||
|
// --8<-- [end:vector_search]
|
||||||
|
}
|
||||||
|
{
|
||||||
|
const data = Array.from({ length: 1000 })
|
||||||
|
.fill(null)
|
||||||
|
.map(() => ({
|
||||||
|
vector: [Math.random(), Math.random()],
|
||||||
|
item: "autogen",
|
||||||
|
price: Math.round(Math.random() * 100),
|
||||||
|
}));
|
||||||
|
|
||||||
|
await tbl.add(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --8<-- [start:create_index]
|
||||||
|
await tbl.createIndex("vector");
|
||||||
|
// --8<-- [end:create_index]
|
||||||
|
|
||||||
|
// --8<-- [start:delete_rows]
|
||||||
|
await tbl.delete('item = "fizz"');
|
||||||
|
// --8<-- [end:delete_rows]
|
||||||
|
|
||||||
|
// --8<-- [start:drop_table]
|
||||||
|
await db.dropTable("myTable");
|
||||||
|
// --8<-- [end:drop_table]
|
||||||
|
await db.dropTable("empty_table");
|
||||||
|
|
||||||
|
{
|
||||||
|
// --8<-- [start:create_f16_table]
|
||||||
|
const db = await lancedb.connect("/tmp/lancedb");
|
||||||
|
const dim = 16;
|
||||||
|
const total = 10;
|
||||||
|
const f16Schema = new Schema([
|
||||||
|
new Field("id", new Int32()),
|
||||||
|
new Field(
|
||||||
|
"vector",
|
||||||
|
new FixedSizeList(dim, new Field("item", new Float16(), true)),
|
||||||
|
false,
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
const data = lancedb.makeArrowTable(
|
||||||
|
Array.from(Array(total), (_, i) => ({
|
||||||
|
id: i,
|
||||||
|
vector: Array.from(Array(dim), Math.random),
|
||||||
|
})),
|
||||||
|
{ schema: f16Schema },
|
||||||
|
);
|
||||||
|
const _table = await db.createTable("f16_tbl", data);
|
||||||
|
// --8<-- [end:create_f16_table]
|
||||||
|
await db.dropTable("f16_tbl");
|
||||||
|
}
|
||||||
83
nodejs/examples/embedding.ts
Normal file
83
nodejs/examples/embedding.ts
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
// --8<-- [start:imports]
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
import { LanceSchema, getRegistry, register } from "@lancedb/lancedb/embedding";
|
||||||
|
import { EmbeddingFunction } from "@lancedb/lancedb/embedding";
|
||||||
|
import { type Float, Float32, Utf8 } from "apache-arrow";
|
||||||
|
// --8<-- [end:imports]
|
||||||
|
|
||||||
|
{
|
||||||
|
// --8<-- [start:openai_embeddings]
|
||||||
|
|
||||||
|
const db = await lancedb.connect("/tmp/db");
|
||||||
|
const func = getRegistry()
|
||||||
|
.get("openai")
|
||||||
|
?.create({ model: "text-embedding-ada-002" }) as EmbeddingFunction;
|
||||||
|
|
||||||
|
const wordsSchema = LanceSchema({
|
||||||
|
text: func.sourceField(new Utf8()),
|
||||||
|
vector: func.vectorField(),
|
||||||
|
});
|
||||||
|
const tbl = await db.createEmptyTable("words", wordsSchema, {
|
||||||
|
mode: "overwrite",
|
||||||
|
});
|
||||||
|
await tbl.add([{ text: "hello world" }, { text: "goodbye world" }]);
|
||||||
|
|
||||||
|
const query = "greetings";
|
||||||
|
const actual = (await (await tbl.search(query)).limit(1).toArray())[0];
|
||||||
|
|
||||||
|
// --8<-- [end:openai_embeddings]
|
||||||
|
console.log("result = ", actual.text);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// --8<-- [start:embedding_function]
|
||||||
|
const db = await lancedb.connect("/tmp/db");
|
||||||
|
|
||||||
|
@register("my_embedding")
|
||||||
|
class MyEmbeddingFunction extends EmbeddingFunction<string> {
|
||||||
|
toJSON(): object {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
ndims() {
|
||||||
|
return 3;
|
||||||
|
}
|
||||||
|
embeddingDataType(): Float {
|
||||||
|
return new Float32();
|
||||||
|
}
|
||||||
|
async computeQueryEmbeddings(_data: string) {
|
||||||
|
// This is a placeholder for a real embedding function
|
||||||
|
return [1, 2, 3];
|
||||||
|
}
|
||||||
|
async computeSourceEmbeddings(data: string[]) {
|
||||||
|
// This is a placeholder for a real embedding function
|
||||||
|
return Array.from({ length: data.length }).fill([1, 2, 3]) as number[][];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const func = new MyEmbeddingFunction();
|
||||||
|
|
||||||
|
const data = [{ text: "pepperoni" }, { text: "pineapple" }];
|
||||||
|
|
||||||
|
// Option 1: manually specify the embedding function
|
||||||
|
const table = await db.createTable("vectors", data, {
|
||||||
|
embeddingFunction: {
|
||||||
|
function: func,
|
||||||
|
sourceColumn: "text",
|
||||||
|
vectorColumn: "vector",
|
||||||
|
},
|
||||||
|
mode: "overwrite",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Option 2: provide the embedding function through a schema
|
||||||
|
|
||||||
|
const schema = LanceSchema({
|
||||||
|
text: func.sourceField(new Utf8()),
|
||||||
|
vector: func.vectorField(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const table2 = await db.createTable("vectors2", data, {
|
||||||
|
schema,
|
||||||
|
mode: "overwrite",
|
||||||
|
});
|
||||||
|
// --8<-- [end:embedding_function]
|
||||||
|
}
|
||||||
34
nodejs/examples/filtering.ts
Normal file
34
nodejs/examples/filtering.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
|
||||||
|
const db = await lancedb.connect("data/sample-lancedb");
|
||||||
|
|
||||||
|
const data = Array.from({ length: 10_000 }, (_, i) => ({
|
||||||
|
vector: Array(1536).fill(i),
|
||||||
|
id: i,
|
||||||
|
item: `item ${i}`,
|
||||||
|
strId: `${i}`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const tbl = await db.createTable("myVectors", data, { mode: "overwrite" });
|
||||||
|
|
||||||
|
// --8<-- [start:search]
|
||||||
|
const _result = await tbl
|
||||||
|
.search(Array(1536).fill(0.5))
|
||||||
|
.limit(1)
|
||||||
|
.where("id = 10")
|
||||||
|
.toArray();
|
||||||
|
// --8<-- [end:search]
|
||||||
|
|
||||||
|
// --8<-- [start:vec_search]
|
||||||
|
await tbl
|
||||||
|
.search(Array(1536).fill(0))
|
||||||
|
.where("(item IN ('item 0', 'item 2')) AND (id > 10)")
|
||||||
|
.postfilter()
|
||||||
|
.toArray();
|
||||||
|
// --8<-- [end:vec_search]
|
||||||
|
|
||||||
|
// --8<-- [start:sql_search]
|
||||||
|
await tbl.query().where("id = 10").limit(10).toArray();
|
||||||
|
// --8<-- [end:sql_search]
|
||||||
|
|
||||||
|
console.log("SQL search: done");
|
||||||
27
nodejs/examples/jsconfig.json
Normal file
27
nodejs/examples/jsconfig.json
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
// Enable latest features
|
||||||
|
"lib": ["ESNext", "DOM"],
|
||||||
|
"target": "ESNext",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"allowJs": true,
|
||||||
|
|
||||||
|
// Bundler mode
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
// Best practices
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
|
||||||
|
// Some stricter flags (disabled by default)
|
||||||
|
"noUnusedLocals": false,
|
||||||
|
"noUnusedParameters": false,
|
||||||
|
"noPropertyAccessFromIndexSignature": false
|
||||||
|
}
|
||||||
|
}
|
||||||
79
nodejs/examples/package-lock.json
generated
Normal file
79
nodejs/examples/package-lock.json
generated
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
{
|
||||||
|
"name": "examples",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "examples",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@lancedb/lancedb": "file:../"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": "^5.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"..": {
|
||||||
|
"name": "@lancedb/lancedb",
|
||||||
|
"version": "0.6.0",
|
||||||
|
"cpu": [
|
||||||
|
"x64",
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"license": "Apache 2.0",
|
||||||
|
"os": [
|
||||||
|
"darwin",
|
||||||
|
"linux",
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"apache-arrow": "^15.0.0",
|
||||||
|
"axios": "^1.7.2",
|
||||||
|
"openai": "^4.29.2",
|
||||||
|
"reflect-metadata": "^0.2.2"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@aws-sdk/client-kms": "^3.33.0",
|
||||||
|
"@aws-sdk/client-s3": "^3.33.0",
|
||||||
|
"@biomejs/biome": "^1.7.3",
|
||||||
|
"@jest/globals": "^29.7.0",
|
||||||
|
"@napi-rs/cli": "^2.18.0",
|
||||||
|
"@types/axios": "^0.14.0",
|
||||||
|
"@types/jest": "^29.1.2",
|
||||||
|
"@types/tmp": "^0.2.6",
|
||||||
|
"apache-arrow-old": "npm:apache-arrow@13.0.0",
|
||||||
|
"eslint": "^8.57.0",
|
||||||
|
"jest": "^29.7.0",
|
||||||
|
"shx": "^0.3.4",
|
||||||
|
"tmp": "^0.2.3",
|
||||||
|
"ts-jest": "^29.1.2",
|
||||||
|
"typedoc": "^0.25.7",
|
||||||
|
"typedoc-plugin-markdown": "^3.17.1",
|
||||||
|
"typescript": "^5.3.3",
|
||||||
|
"typescript-eslint": "^7.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@lancedb/lancedb": {
|
||||||
|
"resolved": "..",
|
||||||
|
"link": true
|
||||||
|
},
|
||||||
|
"node_modules/typescript": {
|
||||||
|
"version": "5.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
|
||||||
|
"integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
|
||||||
|
"peer": true,
|
||||||
|
"bin": {
|
||||||
|
"tsc": "bin/tsc",
|
||||||
|
"tsserver": "bin/tsserver"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.17"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
18
nodejs/examples/package.json
Normal file
18
nodejs/examples/package.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"name": "examples",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Examples for LanceDB",
|
||||||
|
"main": "index.js",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@lancedb/lancedb": "file:../"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": "^5.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
37
nodejs/examples/search.ts
Normal file
37
nodejs/examples/search.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
// --8<-- [end:import]
|
||||||
|
import * as fs from "node:fs";
|
||||||
|
// --8<-- [start:import]
|
||||||
|
import * as lancedb from "@lancedb/lancedb";
|
||||||
|
|
||||||
|
async function setup() {
|
||||||
|
fs.rmSync("data/sample-lancedb", { recursive: true, force: true });
|
||||||
|
const db = await lancedb.connect("data/sample-lancedb");
|
||||||
|
|
||||||
|
const data = Array.from({ length: 10_000 }, (_, i) => ({
|
||||||
|
vector: Array(1536).fill(i),
|
||||||
|
id: `${i}`,
|
||||||
|
content: "",
|
||||||
|
longId: `${i}`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
await db.createTable("my_vectors", data);
|
||||||
|
}
|
||||||
|
|
||||||
|
await setup();
|
||||||
|
|
||||||
|
// --8<-- [start:search1]
|
||||||
|
const db = await lancedb.connect("data/sample-lancedb");
|
||||||
|
const tbl = await db.openTable("my_vectors");
|
||||||
|
|
||||||
|
const _results1 = await tbl.search(Array(1536).fill(1.2)).limit(10).toArray();
|
||||||
|
// --8<-- [end:search1]
|
||||||
|
|
||||||
|
// --8<-- [start:search2]
|
||||||
|
const _results2 = await tbl
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.distanceType("cosine")
|
||||||
|
.limit(10)
|
||||||
|
.toArray();
|
||||||
|
// --8<-- [end:search2]
|
||||||
|
|
||||||
|
console.log("search: done");
|
||||||
@@ -15,6 +15,7 @@
|
|||||||
import {
|
import {
|
||||||
Table as ArrowTable,
|
Table as ArrowTable,
|
||||||
Binary,
|
Binary,
|
||||||
|
BufferType,
|
||||||
DataType,
|
DataType,
|
||||||
Field,
|
Field,
|
||||||
FixedSizeBinary,
|
FixedSizeBinary,
|
||||||
@@ -37,14 +38,72 @@ import {
|
|||||||
type makeTable,
|
type makeTable,
|
||||||
vectorFromArray,
|
vectorFromArray,
|
||||||
} from "apache-arrow";
|
} from "apache-arrow";
|
||||||
|
import { Buffers } from "apache-arrow/data";
|
||||||
import { type EmbeddingFunction } from "./embedding/embedding_function";
|
import { type EmbeddingFunction } from "./embedding/embedding_function";
|
||||||
import { EmbeddingFunctionConfig, getRegistry } from "./embedding/registry";
|
import { EmbeddingFunctionConfig, getRegistry } from "./embedding/registry";
|
||||||
import { sanitizeField, sanitizeSchema, sanitizeType } from "./sanitize";
|
import {
|
||||||
|
sanitizeField,
|
||||||
|
sanitizeSchema,
|
||||||
|
sanitizeTable,
|
||||||
|
sanitizeType,
|
||||||
|
} from "./sanitize";
|
||||||
export * from "apache-arrow";
|
export * from "apache-arrow";
|
||||||
|
export type SchemaLike =
|
||||||
|
| Schema
|
||||||
|
| {
|
||||||
|
fields: FieldLike[];
|
||||||
|
metadata: Map<string, string>;
|
||||||
|
get names(): unknown[];
|
||||||
|
};
|
||||||
|
export type FieldLike =
|
||||||
|
| Field
|
||||||
|
| {
|
||||||
|
type: string;
|
||||||
|
name: string;
|
||||||
|
nullable?: boolean;
|
||||||
|
metadata?: Map<string, string>;
|
||||||
|
};
|
||||||
|
|
||||||
export type IntoVector = Float32Array | Float64Array | number[];
|
export type DataLike =
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
| import("apache-arrow").Data<Struct<any>>
|
||||||
|
| {
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
type: any;
|
||||||
|
length: number;
|
||||||
|
offset: number;
|
||||||
|
stride: number;
|
||||||
|
nullable: boolean;
|
||||||
|
children: DataLike[];
|
||||||
|
get nullCount(): number;
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
values: Buffers<any>[BufferType.DATA];
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
typeIds: Buffers<any>[BufferType.TYPE];
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
nullBitmap: Buffers<any>[BufferType.VALIDITY];
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
valueOffsets: Buffers<any>[BufferType.OFFSET];
|
||||||
|
};
|
||||||
|
|
||||||
export function isArrowTable(value: object): value is ArrowTable {
|
export type RecordBatchLike =
|
||||||
|
| RecordBatch
|
||||||
|
| {
|
||||||
|
schema: SchemaLike;
|
||||||
|
data: DataLike;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TableLike =
|
||||||
|
| ArrowTable
|
||||||
|
| { schema: SchemaLike; batches: RecordBatchLike[] };
|
||||||
|
|
||||||
|
export type IntoVector =
|
||||||
|
| Float32Array
|
||||||
|
| Float64Array
|
||||||
|
| number[]
|
||||||
|
| Promise<Float32Array | Float64Array | number[]>;
|
||||||
|
|
||||||
|
export function isArrowTable(value: object): value is TableLike {
|
||||||
if (value instanceof ArrowTable) return true;
|
if (value instanceof ArrowTable) return true;
|
||||||
return "schema" in value && "batches" in value;
|
return "schema" in value && "batches" in value;
|
||||||
}
|
}
|
||||||
@@ -135,7 +194,7 @@ export function isFixedSizeList(value: unknown): value is FixedSizeList {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/** Data type accepted by NodeJS SDK */
|
/** Data type accepted by NodeJS SDK */
|
||||||
export type Data = Record<string, unknown>[] | ArrowTable;
|
export type Data = Record<string, unknown>[] | TableLike;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Options to control how a column should be converted to a vector array
|
* Options to control how a column should be converted to a vector array
|
||||||
@@ -162,7 +221,7 @@ export class MakeArrowTableOptions {
|
|||||||
* The schema must be specified if there are no records (e.g. to make
|
* The schema must be specified if there are no records (e.g. to make
|
||||||
* an empty table)
|
* an empty table)
|
||||||
*/
|
*/
|
||||||
schema?: Schema;
|
schema?: SchemaLike;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Mapping from vector column name to expected type
|
* Mapping from vector column name to expected type
|
||||||
@@ -310,7 +369,7 @@ export function makeArrowTable(
|
|||||||
if (opt.schema !== undefined && opt.schema !== null) {
|
if (opt.schema !== undefined && opt.schema !== null) {
|
||||||
opt.schema = sanitizeSchema(opt.schema);
|
opt.schema = sanitizeSchema(opt.schema);
|
||||||
opt.schema = validateSchemaEmbeddings(
|
opt.schema = validateSchemaEmbeddings(
|
||||||
opt.schema,
|
opt.schema as Schema,
|
||||||
data,
|
data,
|
||||||
options?.embeddingFunction,
|
options?.embeddingFunction,
|
||||||
);
|
);
|
||||||
@@ -394,7 +453,7 @@ export function makeArrowTable(
|
|||||||
// `new ArrowTable(schema, batches)` which does not do any schema inference
|
// `new ArrowTable(schema, batches)` which does not do any schema inference
|
||||||
const firstTable = new ArrowTable(columns);
|
const firstTable = new ArrowTable(columns);
|
||||||
const batchesFixed = firstTable.batches.map(
|
const batchesFixed = firstTable.batches.map(
|
||||||
(batch) => new RecordBatch(opt.schema!, batch.data),
|
(batch) => new RecordBatch(opt.schema as Schema, batch.data),
|
||||||
);
|
);
|
||||||
let schema: Schema;
|
let schema: Schema;
|
||||||
if (metadata !== undefined) {
|
if (metadata !== undefined) {
|
||||||
@@ -407,9 +466,9 @@ export function makeArrowTable(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
schema = new Schema(opt.schema.fields, schemaMetadata);
|
schema = new Schema(opt.schema.fields as Field[], schemaMetadata);
|
||||||
} else {
|
} else {
|
||||||
schema = opt.schema;
|
schema = opt.schema as Schema;
|
||||||
}
|
}
|
||||||
return new ArrowTable(schema, batchesFixed);
|
return new ArrowTable(schema, batchesFixed);
|
||||||
}
|
}
|
||||||
@@ -425,7 +484,7 @@ export function makeArrowTable(
|
|||||||
* Create an empty Arrow table with the provided schema
|
* Create an empty Arrow table with the provided schema
|
||||||
*/
|
*/
|
||||||
export function makeEmptyTable(
|
export function makeEmptyTable(
|
||||||
schema: Schema,
|
schema: SchemaLike,
|
||||||
metadata?: Map<string, string>,
|
metadata?: Map<string, string>,
|
||||||
): ArrowTable {
|
): ArrowTable {
|
||||||
return makeArrowTable([], { schema }, metadata);
|
return makeArrowTable([], { schema }, metadata);
|
||||||
@@ -563,17 +622,16 @@ async function applyEmbeddingsFromMetadata(
|
|||||||
async function applyEmbeddings<T>(
|
async function applyEmbeddings<T>(
|
||||||
table: ArrowTable,
|
table: ArrowTable,
|
||||||
embeddings?: EmbeddingFunctionConfig,
|
embeddings?: EmbeddingFunctionConfig,
|
||||||
schema?: Schema,
|
schema?: SchemaLike,
|
||||||
): Promise<ArrowTable> {
|
): Promise<ArrowTable> {
|
||||||
if (schema?.metadata.has("embedding_functions")) {
|
|
||||||
return applyEmbeddingsFromMetadata(table, schema!);
|
|
||||||
} else if (embeddings == null || embeddings === undefined) {
|
|
||||||
return table;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (schema !== undefined && schema !== null) {
|
if (schema !== undefined && schema !== null) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema);
|
||||||
}
|
}
|
||||||
|
if (schema?.metadata.has("embedding_functions")) {
|
||||||
|
return applyEmbeddingsFromMetadata(table, schema! as Schema);
|
||||||
|
} else if (embeddings == null || embeddings === undefined) {
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
// Convert from ArrowTable to Record<String, Vector>
|
// Convert from ArrowTable to Record<String, Vector>
|
||||||
const colEntries = [...Array(table.numCols).keys()].map((_, idx) => {
|
const colEntries = [...Array(table.numCols).keys()].map((_, idx) => {
|
||||||
@@ -650,7 +708,7 @@ async function applyEmbeddings<T>(
|
|||||||
`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`,
|
`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return alignTable(newTable, schema);
|
return alignTable(newTable, schema as Schema);
|
||||||
}
|
}
|
||||||
return newTable;
|
return newTable;
|
||||||
}
|
}
|
||||||
@@ -744,7 +802,7 @@ export async function fromRecordsToStreamBuffer(
|
|||||||
export async function fromTableToBuffer(
|
export async function fromTableToBuffer(
|
||||||
table: ArrowTable,
|
table: ArrowTable,
|
||||||
embeddings?: EmbeddingFunctionConfig,
|
embeddings?: EmbeddingFunctionConfig,
|
||||||
schema?: Schema,
|
schema?: SchemaLike,
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
if (schema !== undefined && schema !== null) {
|
if (schema !== undefined && schema !== null) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema);
|
||||||
@@ -771,7 +829,7 @@ export async function fromDataToBuffer(
|
|||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema);
|
||||||
}
|
}
|
||||||
if (isArrowTable(data)) {
|
if (isArrowTable(data)) {
|
||||||
return fromTableToBuffer(data, embeddings, schema);
|
return fromTableToBuffer(sanitizeTable(data), embeddings, schema);
|
||||||
} else {
|
} else {
|
||||||
const table = await convertToTable(data, embeddings, { schema });
|
const table = await convertToTable(data, embeddings, { schema });
|
||||||
return fromTableToBuffer(table);
|
return fromTableToBuffer(table);
|
||||||
@@ -789,7 +847,7 @@ export async function fromDataToBuffer(
|
|||||||
export async function fromTableToStreamBuffer(
|
export async function fromTableToStreamBuffer(
|
||||||
table: ArrowTable,
|
table: ArrowTable,
|
||||||
embeddings?: EmbeddingFunctionConfig,
|
embeddings?: EmbeddingFunctionConfig,
|
||||||
schema?: Schema,
|
schema?: SchemaLike,
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
||||||
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings);
|
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings);
|
||||||
@@ -854,7 +912,6 @@ function validateSchemaEmbeddings(
|
|||||||
for (let field of schema.fields) {
|
for (let field of schema.fields) {
|
||||||
if (isFixedSizeList(field.type)) {
|
if (isFixedSizeList(field.type)) {
|
||||||
field = sanitizeField(field);
|
field = sanitizeField(field);
|
||||||
|
|
||||||
if (data.length !== 0 && data?.[0]?.[field.name] === undefined) {
|
if (data.length !== 0 && data?.[0]?.[field.name] === undefined) {
|
||||||
if (schema.metadata.has("embedding_functions")) {
|
if (schema.metadata.has("embedding_functions")) {
|
||||||
const embeddings = JSON.parse(
|
const embeddings = JSON.parse(
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import { Table as ArrowTable, Data, Schema } from "./arrow";
|
import { Data, Schema, SchemaLike, TableLike } from "./arrow";
|
||||||
import { fromTableToBuffer, makeEmptyTable } from "./arrow";
|
import { fromTableToBuffer, makeEmptyTable } from "./arrow";
|
||||||
import { EmbeddingFunctionConfig, getRegistry } from "./embedding/registry";
|
import { EmbeddingFunctionConfig, getRegistry } from "./embedding/registry";
|
||||||
import { Connection as LanceDbConnection } from "./native";
|
import { Connection as LanceDbConnection } from "./native";
|
||||||
@@ -50,7 +50,7 @@ export interface CreateTableOptions {
|
|||||||
* The default is true while the new format is in beta
|
* The default is true while the new format is in beta
|
||||||
*/
|
*/
|
||||||
useLegacyFormat?: boolean;
|
useLegacyFormat?: boolean;
|
||||||
schema?: Schema;
|
schema?: SchemaLike;
|
||||||
embeddingFunction?: EmbeddingFunctionConfig;
|
embeddingFunction?: EmbeddingFunctionConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -167,12 +167,12 @@ export abstract class Connection {
|
|||||||
/**
|
/**
|
||||||
* Creates a new Table and initialize it with new data.
|
* Creates a new Table and initialize it with new data.
|
||||||
* @param {string} name - The name of the table.
|
* @param {string} name - The name of the table.
|
||||||
* @param {Record<string, unknown>[] | ArrowTable} data - Non-empty Array of Records
|
* @param {Record<string, unknown>[] | TableLike} data - Non-empty Array of Records
|
||||||
* to be inserted into the table
|
* to be inserted into the table
|
||||||
*/
|
*/
|
||||||
abstract createTable(
|
abstract createTable(
|
||||||
name: string,
|
name: string,
|
||||||
data: Record<string, unknown>[] | ArrowTable,
|
data: Record<string, unknown>[] | TableLike,
|
||||||
options?: Partial<CreateTableOptions>,
|
options?: Partial<CreateTableOptions>,
|
||||||
): Promise<Table>;
|
): Promise<Table>;
|
||||||
|
|
||||||
@@ -183,7 +183,7 @@ export abstract class Connection {
|
|||||||
*/
|
*/
|
||||||
abstract createEmptyTable(
|
abstract createEmptyTable(
|
||||||
name: string,
|
name: string,
|
||||||
schema: Schema,
|
schema: import("./arrow").SchemaLike,
|
||||||
options?: Partial<CreateTableOptions>,
|
options?: Partial<CreateTableOptions>,
|
||||||
): Promise<Table>;
|
): Promise<Table>;
|
||||||
|
|
||||||
@@ -235,7 +235,7 @@ export class LocalConnection extends Connection {
|
|||||||
nameOrOptions:
|
nameOrOptions:
|
||||||
| string
|
| string
|
||||||
| ({ name: string; data: Data } & Partial<CreateTableOptions>),
|
| ({ name: string; data: Data } & Partial<CreateTableOptions>),
|
||||||
data?: Record<string, unknown>[] | ArrowTable,
|
data?: Record<string, unknown>[] | TableLike,
|
||||||
options?: Partial<CreateTableOptions>,
|
options?: Partial<CreateTableOptions>,
|
||||||
): Promise<Table> {
|
): Promise<Table> {
|
||||||
if (typeof nameOrOptions !== "string" && "name" in nameOrOptions) {
|
if (typeof nameOrOptions !== "string" && "name" in nameOrOptions) {
|
||||||
@@ -259,7 +259,7 @@ export class LocalConnection extends Connection {
|
|||||||
|
|
||||||
async createEmptyTable(
|
async createEmptyTable(
|
||||||
name: string,
|
name: string,
|
||||||
schema: Schema,
|
schema: import("./arrow").SchemaLike,
|
||||||
options?: Partial<CreateTableOptions>,
|
options?: Partial<CreateTableOptions>,
|
||||||
): Promise<Table> {
|
): Promise<Table> {
|
||||||
let mode: string = options?.mode ?? "create";
|
let mode: string = options?.mode ?? "create";
|
||||||
|
|||||||
@@ -35,6 +35,11 @@ export interface FunctionOptions {
|
|||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface EmbeddingFunctionConstructor<
|
||||||
|
T extends EmbeddingFunction = EmbeddingFunction,
|
||||||
|
> {
|
||||||
|
new (modelOptions?: T["TOptions"]): T;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* An embedding function that automatically creates vector representation for a given column.
|
* An embedding function that automatically creates vector representation for a given column.
|
||||||
*/
|
*/
|
||||||
@@ -43,6 +48,12 @@ export abstract class EmbeddingFunction<
|
|||||||
T = any,
|
T = any,
|
||||||
M extends FunctionOptions = FunctionOptions,
|
M extends FunctionOptions = FunctionOptions,
|
||||||
> {
|
> {
|
||||||
|
/**
|
||||||
|
* @ignore
|
||||||
|
* This is only used for associating the options type with the class for type checking
|
||||||
|
*/
|
||||||
|
// biome-ignore lint/style/useNamingConvention: we want to keep the name as it is
|
||||||
|
readonly TOptions!: M;
|
||||||
/**
|
/**
|
||||||
* Convert the embedding function to a JSON object
|
* Convert the embedding function to a JSON object
|
||||||
* It is used to serialize the embedding function to the schema
|
* It is used to serialize the embedding function to the schema
|
||||||
@@ -170,7 +181,7 @@ export abstract class EmbeddingFunction<
|
|||||||
/**
|
/**
|
||||||
Compute the embeddings for a single query
|
Compute the embeddings for a single query
|
||||||
*/
|
*/
|
||||||
async computeQueryEmbeddings(data: T): Promise<IntoVector> {
|
async computeQueryEmbeddings(data: T): Promise<Awaited<IntoVector>> {
|
||||||
return this.computeSourceEmbeddings([data]).then(
|
return this.computeSourceEmbeddings([data]).then(
|
||||||
(embeddings) => embeddings[0],
|
(embeddings) => embeddings[0],
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -13,24 +13,29 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import type OpenAI from "openai";
|
import type OpenAI from "openai";
|
||||||
|
import { type EmbeddingCreateParams } from "openai/resources";
|
||||||
import { Float, Float32 } from "../arrow";
|
import { Float, Float32 } from "../arrow";
|
||||||
import { EmbeddingFunction } from "./embedding_function";
|
import { EmbeddingFunction } from "./embedding_function";
|
||||||
import { register } from "./registry";
|
import { register } from "./registry";
|
||||||
|
|
||||||
export type OpenAIOptions = {
|
export type OpenAIOptions = {
|
||||||
apiKey?: string;
|
apiKey: string;
|
||||||
model?: string;
|
model: EmbeddingCreateParams["model"];
|
||||||
};
|
};
|
||||||
|
|
||||||
@register("openai")
|
@register("openai")
|
||||||
export class OpenAIEmbeddingFunction extends EmbeddingFunction<
|
export class OpenAIEmbeddingFunction extends EmbeddingFunction<
|
||||||
string,
|
string,
|
||||||
OpenAIOptions
|
Partial<OpenAIOptions>
|
||||||
> {
|
> {
|
||||||
#openai: OpenAI;
|
#openai: OpenAI;
|
||||||
#modelName: string;
|
#modelName: OpenAIOptions["model"];
|
||||||
|
|
||||||
constructor(options: OpenAIOptions = { model: "text-embedding-ada-002" }) {
|
constructor(
|
||||||
|
options: Partial<OpenAIOptions> = {
|
||||||
|
model: "text-embedding-ada-002",
|
||||||
|
},
|
||||||
|
) {
|
||||||
super();
|
super();
|
||||||
const openAIKey = options?.apiKey ?? process.env.OPENAI_API_KEY;
|
const openAIKey = options?.apiKey ?? process.env.OPENAI_API_KEY;
|
||||||
if (!openAIKey) {
|
if (!openAIKey) {
|
||||||
@@ -73,7 +78,7 @@ export class OpenAIEmbeddingFunction extends EmbeddingFunction<
|
|||||||
case "text-embedding-3-small":
|
case "text-embedding-3-small":
|
||||||
return 1536;
|
return 1536;
|
||||||
default:
|
default:
|
||||||
return null as never;
|
throw new Error(`Unknown model: ${this.#modelName}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,21 +12,15 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import type { EmbeddingFunction } from "./embedding_function";
|
import {
|
||||||
|
type EmbeddingFunction,
|
||||||
|
type EmbeddingFunctionConstructor,
|
||||||
|
} from "./embedding_function";
|
||||||
import "reflect-metadata";
|
import "reflect-metadata";
|
||||||
|
import { OpenAIEmbeddingFunction } from "./openai";
|
||||||
export interface EmbeddingFunctionOptions {
|
|
||||||
[key: string]: unknown;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EmbeddingFunctionFactory<
|
|
||||||
T extends EmbeddingFunction = EmbeddingFunction,
|
|
||||||
> {
|
|
||||||
new (modelOptions?: EmbeddingFunctionOptions): T;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface EmbeddingFunctionCreate<T extends EmbeddingFunction> {
|
interface EmbeddingFunctionCreate<T extends EmbeddingFunction> {
|
||||||
create(options?: EmbeddingFunctionOptions): T;
|
create(options?: T["TOptions"]): T;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -36,7 +30,7 @@ interface EmbeddingFunctionCreate<T extends EmbeddingFunction> {
|
|||||||
* or TextEmbeddingFunction and registering it with the registry
|
* or TextEmbeddingFunction and registering it with the registry
|
||||||
*/
|
*/
|
||||||
export class EmbeddingFunctionRegistry {
|
export class EmbeddingFunctionRegistry {
|
||||||
#functions: Map<string, EmbeddingFunctionFactory> = new Map();
|
#functions = new Map<string, EmbeddingFunctionConstructor>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Register an embedding function
|
* Register an embedding function
|
||||||
@@ -44,7 +38,9 @@ export class EmbeddingFunctionRegistry {
|
|||||||
* @param func The function to register
|
* @param func The function to register
|
||||||
* @throws Error if the function is already registered
|
* @throws Error if the function is already registered
|
||||||
*/
|
*/
|
||||||
register<T extends EmbeddingFunctionFactory = EmbeddingFunctionFactory>(
|
register<
|
||||||
|
T extends EmbeddingFunctionConstructor = EmbeddingFunctionConstructor,
|
||||||
|
>(
|
||||||
this: EmbeddingFunctionRegistry,
|
this: EmbeddingFunctionRegistry,
|
||||||
alias?: string,
|
alias?: string,
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
@@ -69,18 +65,34 @@ export class EmbeddingFunctionRegistry {
|
|||||||
* Fetch an embedding function by name
|
* Fetch an embedding function by name
|
||||||
* @param name The name of the function
|
* @param name The name of the function
|
||||||
*/
|
*/
|
||||||
get<T extends EmbeddingFunction<unknown> = EmbeddingFunction>(
|
get<T extends EmbeddingFunction<unknown>, Name extends string = "">(
|
||||||
name: string,
|
name: Name extends "openai" ? "openai" : string,
|
||||||
): EmbeddingFunctionCreate<T> | undefined {
|
//This makes it so that you can use string constants as "types", or use an explicitly supplied type
|
||||||
|
// ex:
|
||||||
|
// `registry.get("openai") -> EmbeddingFunctionCreate<OpenAIEmbeddingFunction>`
|
||||||
|
// `registry.get<MyCustomEmbeddingFunction>("my_func") -> EmbeddingFunctionCreate<MyCustomEmbeddingFunction> | undefined`
|
||||||
|
//
|
||||||
|
// the reason this is important is that we always know our built in functions are defined so the user isnt forced to do a non null/undefined
|
||||||
|
// ```ts
|
||||||
|
// const openai: OpenAIEmbeddingFunction = registry.get("openai").create()
|
||||||
|
// ```
|
||||||
|
): Name extends "openai"
|
||||||
|
? EmbeddingFunctionCreate<OpenAIEmbeddingFunction>
|
||||||
|
: EmbeddingFunctionCreate<T> | undefined {
|
||||||
|
type Output = Name extends "openai"
|
||||||
|
? EmbeddingFunctionCreate<OpenAIEmbeddingFunction>
|
||||||
|
: EmbeddingFunctionCreate<T> | undefined;
|
||||||
|
|
||||||
const factory = this.#functions.get(name);
|
const factory = this.#functions.get(name);
|
||||||
if (!factory) {
|
if (!factory) {
|
||||||
return undefined;
|
return undefined as Output;
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
create: function (options: EmbeddingFunctionOptions) {
|
create: function (options?: T["TOptions"]) {
|
||||||
return new factory(options) as unknown as T;
|
return new factory(options);
|
||||||
},
|
},
|
||||||
};
|
} as Output;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -104,7 +116,7 @@ export class EmbeddingFunctionRegistry {
|
|||||||
name: string;
|
name: string;
|
||||||
sourceColumn: string;
|
sourceColumn: string;
|
||||||
vectorColumn: string;
|
vectorColumn: string;
|
||||||
model: EmbeddingFunctionOptions;
|
model: EmbeddingFunction["TOptions"];
|
||||||
};
|
};
|
||||||
const functions = <FunctionConfig[]>(
|
const functions = <FunctionConfig[]>(
|
||||||
JSON.parse(metadata.get("embedding_functions")!)
|
JSON.parse(metadata.get("embedding_functions")!)
|
||||||
|
|||||||
@@ -89,15 +89,26 @@ export interface QueryExecutionOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/** Common methods supported by all query types */
|
/** Common methods supported by all query types */
|
||||||
export class QueryBase<
|
export class QueryBase<NativeQueryType extends NativeQuery | NativeVectorQuery>
|
||||||
NativeQueryType extends NativeQuery | NativeVectorQuery,
|
implements AsyncIterable<RecordBatch>
|
||||||
QueryType,
|
|
||||||
> implements AsyncIterable<RecordBatch>
|
|
||||||
{
|
{
|
||||||
protected constructor(protected inner: NativeQueryType) {
|
protected constructor(
|
||||||
|
protected inner: NativeQueryType | Promise<NativeQueryType>,
|
||||||
|
) {
|
||||||
// intentionally empty
|
// intentionally empty
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// call a function on the inner (either a promise or the actual object)
|
||||||
|
protected doCall(fn: (inner: NativeQueryType) => void) {
|
||||||
|
if (this.inner instanceof Promise) {
|
||||||
|
this.inner = this.inner.then((inner) => {
|
||||||
|
fn(inner);
|
||||||
|
return inner;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
fn(this.inner);
|
||||||
|
}
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* A filter statement to be applied to this query.
|
* A filter statement to be applied to this query.
|
||||||
*
|
*
|
||||||
@@ -110,16 +121,16 @@ export class QueryBase<
|
|||||||
* Filtering performance can often be improved by creating a scalar index
|
* Filtering performance can often be improved by creating a scalar index
|
||||||
* on the filter column(s).
|
* on the filter column(s).
|
||||||
*/
|
*/
|
||||||
where(predicate: string): QueryType {
|
where(predicate: string): this {
|
||||||
this.inner.onlyIf(predicate);
|
this.doCall((inner: NativeQueryType) => inner.onlyIf(predicate));
|
||||||
return this as unknown as QueryType;
|
return this;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* A filter statement to be applied to this query.
|
* A filter statement to be applied to this query.
|
||||||
* @alias where
|
* @alias where
|
||||||
* @deprecated Use `where` instead
|
* @deprecated Use `where` instead
|
||||||
*/
|
*/
|
||||||
filter(predicate: string): QueryType {
|
filter(predicate: string): this {
|
||||||
return this.where(predicate);
|
return this.where(predicate);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,7 +166,7 @@ export class QueryBase<
|
|||||||
*/
|
*/
|
||||||
select(
|
select(
|
||||||
columns: string[] | Map<string, string> | Record<string, string> | string,
|
columns: string[] | Map<string, string> | Record<string, string> | string,
|
||||||
): QueryType {
|
): this {
|
||||||
let columnTuples: [string, string][];
|
let columnTuples: [string, string][];
|
||||||
if (typeof columns === "string") {
|
if (typeof columns === "string") {
|
||||||
columns = [columns];
|
columns = [columns];
|
||||||
@@ -167,8 +178,10 @@ export class QueryBase<
|
|||||||
} else {
|
} else {
|
||||||
columnTuples = Object.entries(columns);
|
columnTuples = Object.entries(columns);
|
||||||
}
|
}
|
||||||
this.inner.select(columnTuples);
|
this.doCall((inner: NativeQueryType) => {
|
||||||
return this as unknown as QueryType;
|
inner.select(columnTuples);
|
||||||
|
});
|
||||||
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -177,16 +190,20 @@ export class QueryBase<
|
|||||||
* By default, a plain search has no limit. If this method is not
|
* By default, a plain search has no limit. If this method is not
|
||||||
* called then every valid row from the table will be returned.
|
* called then every valid row from the table will be returned.
|
||||||
*/
|
*/
|
||||||
limit(limit: number): QueryType {
|
limit(limit: number): this {
|
||||||
this.inner.limit(limit);
|
this.doCall((inner: NativeQueryType) => inner.limit(limit));
|
||||||
return this as unknown as QueryType;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected nativeExecute(
|
protected nativeExecute(
|
||||||
options?: Partial<QueryExecutionOptions>,
|
options?: Partial<QueryExecutionOptions>,
|
||||||
): Promise<NativeBatchIterator> {
|
): Promise<NativeBatchIterator> {
|
||||||
|
if (this.inner instanceof Promise) {
|
||||||
|
return this.inner.then((inner) => inner.execute(options?.maxBatchLength));
|
||||||
|
} else {
|
||||||
return this.inner.execute(options?.maxBatchLength);
|
return this.inner.execute(options?.maxBatchLength);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute the query and return the results as an @see {@link AsyncIterator}
|
* Execute the query and return the results as an @see {@link AsyncIterator}
|
||||||
@@ -214,7 +231,13 @@ export class QueryBase<
|
|||||||
/** Collect the results as an Arrow @see {@link ArrowTable}. */
|
/** Collect the results as an Arrow @see {@link ArrowTable}. */
|
||||||
async toArrow(options?: Partial<QueryExecutionOptions>): Promise<ArrowTable> {
|
async toArrow(options?: Partial<QueryExecutionOptions>): Promise<ArrowTable> {
|
||||||
const batches = [];
|
const batches = [];
|
||||||
for await (const batch of new RecordBatchIterable(this.inner, options)) {
|
let inner;
|
||||||
|
if (this.inner instanceof Promise) {
|
||||||
|
inner = await this.inner;
|
||||||
|
} else {
|
||||||
|
inner = this.inner;
|
||||||
|
}
|
||||||
|
for await (const batch of new RecordBatchIterable(inner, options)) {
|
||||||
batches.push(batch);
|
batches.push(batch);
|
||||||
}
|
}
|
||||||
return new ArrowTable(batches);
|
return new ArrowTable(batches);
|
||||||
@@ -226,6 +249,28 @@ export class QueryBase<
|
|||||||
const tbl = await this.toArrow(options);
|
const tbl = await this.toArrow(options);
|
||||||
return tbl.toArray();
|
return tbl.toArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates an explanation of the query execution plan.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* import * as lancedb from "@lancedb/lancedb"
|
||||||
|
* const db = await lancedb.connect("./.lancedb");
|
||||||
|
* const table = await db.createTable("my_table", [
|
||||||
|
* { vector: [1.1, 0.9], id: "1" },
|
||||||
|
* ]);
|
||||||
|
* const plan = await table.query().nearestTo([0.5, 0.2]).explainPlan();
|
||||||
|
*
|
||||||
|
* @param verbose - If true, provides a more detailed explanation. Defaults to false.
|
||||||
|
* @returns A Promise that resolves to a string containing the query execution plan explanation.
|
||||||
|
*/
|
||||||
|
async explainPlan(verbose = false): Promise<string> {
|
||||||
|
if (this.inner instanceof Promise) {
|
||||||
|
return this.inner.then((inner) => inner.explainPlan(verbose));
|
||||||
|
} else {
|
||||||
|
return this.inner.explainPlan(verbose);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -240,8 +285,8 @@ export interface ExecutableQuery {}
|
|||||||
*
|
*
|
||||||
* This builder can be reused to execute the query many times.
|
* This builder can be reused to execute the query many times.
|
||||||
*/
|
*/
|
||||||
export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
export class VectorQuery extends QueryBase<NativeVectorQuery> {
|
||||||
constructor(inner: NativeVectorQuery) {
|
constructor(inner: NativeVectorQuery | Promise<NativeVectorQuery>) {
|
||||||
super(inner);
|
super(inner);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -268,7 +313,8 @@ export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
|||||||
* you the desired recall.
|
* you the desired recall.
|
||||||
*/
|
*/
|
||||||
nprobes(nprobes: number): VectorQuery {
|
nprobes(nprobes: number): VectorQuery {
|
||||||
this.inner.nprobes(nprobes);
|
super.doCall((inner) => inner.nprobes(nprobes));
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -282,7 +328,7 @@ export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
|||||||
* whose data type is a fixed-size-list of floats.
|
* whose data type is a fixed-size-list of floats.
|
||||||
*/
|
*/
|
||||||
column(column: string): VectorQuery {
|
column(column: string): VectorQuery {
|
||||||
this.inner.column(column);
|
super.doCall((inner) => inner.column(column));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -300,8 +346,10 @@ export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
|||||||
*
|
*
|
||||||
* By default "l2" is used.
|
* By default "l2" is used.
|
||||||
*/
|
*/
|
||||||
distanceType(distanceType: string): VectorQuery {
|
distanceType(
|
||||||
this.inner.distanceType(distanceType);
|
distanceType: Required<IvfPqOptions>["distanceType"],
|
||||||
|
): VectorQuery {
|
||||||
|
super.doCall((inner) => inner.distanceType(distanceType));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -335,7 +383,7 @@ export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
|||||||
* distance between the query vector and the actual uncompressed vector.
|
* distance between the query vector and the actual uncompressed vector.
|
||||||
*/
|
*/
|
||||||
refineFactor(refineFactor: number): VectorQuery {
|
refineFactor(refineFactor: number): VectorQuery {
|
||||||
this.inner.refineFactor(refineFactor);
|
super.doCall((inner) => inner.refineFactor(refineFactor));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -360,7 +408,7 @@ export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
|||||||
* factor can often help restore some of the results lost by post filtering.
|
* factor can often help restore some of the results lost by post filtering.
|
||||||
*/
|
*/
|
||||||
postfilter(): VectorQuery {
|
postfilter(): VectorQuery {
|
||||||
this.inner.postfilter();
|
super.doCall((inner) => inner.postfilter());
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -374,13 +422,13 @@ export class VectorQuery extends QueryBase<NativeVectorQuery, VectorQuery> {
|
|||||||
* calculate your recall to select an appropriate value for nprobes.
|
* calculate your recall to select an appropriate value for nprobes.
|
||||||
*/
|
*/
|
||||||
bypassVectorIndex(): VectorQuery {
|
bypassVectorIndex(): VectorQuery {
|
||||||
this.inner.bypassVectorIndex();
|
super.doCall((inner) => inner.bypassVectorIndex());
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** A builder for LanceDB queries. */
|
/** A builder for LanceDB queries. */
|
||||||
export class Query extends QueryBase<NativeQuery, Query> {
|
export class Query extends QueryBase<NativeQuery> {
|
||||||
constructor(tbl: NativeTable) {
|
constructor(tbl: NativeTable) {
|
||||||
super(tbl.query());
|
super(tbl.query());
|
||||||
}
|
}
|
||||||
@@ -423,7 +471,37 @@ export class Query extends QueryBase<NativeQuery, Query> {
|
|||||||
* a default `limit` of 10 will be used. @see {@link Query#limit}
|
* a default `limit` of 10 will be used. @see {@link Query#limit}
|
||||||
*/
|
*/
|
||||||
nearestTo(vector: IntoVector): VectorQuery {
|
nearestTo(vector: IntoVector): VectorQuery {
|
||||||
|
if (this.inner instanceof Promise) {
|
||||||
|
const nativeQuery = this.inner.then(async (inner) => {
|
||||||
|
if (vector instanceof Promise) {
|
||||||
|
const arr = await vector.then((v) => Float32Array.from(v));
|
||||||
|
return inner.nearestTo(arr);
|
||||||
|
} else {
|
||||||
|
return inner.nearestTo(Float32Array.from(vector));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return new VectorQuery(nativeQuery);
|
||||||
|
}
|
||||||
|
if (vector instanceof Promise) {
|
||||||
|
const res = (async () => {
|
||||||
|
try {
|
||||||
|
const v = await vector;
|
||||||
|
const arr = Float32Array.from(v);
|
||||||
|
//
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: we need to get the `inner`, but js has no package scoping
|
||||||
|
const value: any = this.nearestTo(arr);
|
||||||
|
const inner = value.inner as
|
||||||
|
| NativeVectorQuery
|
||||||
|
| Promise<NativeVectorQuery>;
|
||||||
|
return inner;
|
||||||
|
} catch (e) {
|
||||||
|
return Promise.reject(e);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
return new VectorQuery(res);
|
||||||
|
} else {
|
||||||
const vectorQuery = this.inner.nearestTo(Float32Array.from(vector));
|
const vectorQuery = this.inner.nearestTo(Float32Array.from(vector));
|
||||||
return new VectorQuery(vectorQuery);
|
return new VectorQuery(vectorQuery);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ export class RestfulLanceDBClient {
|
|||||||
return axios.create({
|
return axios.create({
|
||||||
baseURL: this.url,
|
baseURL: this.url,
|
||||||
headers: {
|
headers: {
|
||||||
// biome-ignore lint/style/useNamingConvention: external api
|
// biome-ignore lint: external API
|
||||||
Authorization: `Bearer ${this.#apiKey}`,
|
Authorization: `Bearer ${this.#apiKey}`,
|
||||||
},
|
},
|
||||||
transformResponse: decodeErrorData,
|
transformResponse: decodeErrorData,
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
import { Schema } from "apache-arrow";
|
import { Schema } from "apache-arrow";
|
||||||
import { Data, fromTableToStreamBuffer, makeEmptyTable } from "../arrow";
|
import {
|
||||||
|
Data,
|
||||||
|
SchemaLike,
|
||||||
|
fromTableToStreamBuffer,
|
||||||
|
makeEmptyTable,
|
||||||
|
} from "../arrow";
|
||||||
import {
|
import {
|
||||||
Connection,
|
Connection,
|
||||||
CreateTableOptions,
|
CreateTableOptions,
|
||||||
@@ -156,7 +161,7 @@ export class RemoteConnection extends Connection {
|
|||||||
|
|
||||||
async createEmptyTable(
|
async createEmptyTable(
|
||||||
name: string,
|
name: string,
|
||||||
schema: Schema,
|
schema: SchemaLike,
|
||||||
options?: Partial<CreateTableOptions> | undefined,
|
options?: Partial<CreateTableOptions> | undefined,
|
||||||
): Promise<Table> {
|
): Promise<Table> {
|
||||||
if (options?.mode) {
|
if (options?.mode) {
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ import { IndexOptions } from "../indices";
|
|||||||
import { MergeInsertBuilder } from "../merge";
|
import { MergeInsertBuilder } from "../merge";
|
||||||
import { VectorQuery } from "../query";
|
import { VectorQuery } from "../query";
|
||||||
import { AddDataOptions, Table, UpdateOptions } from "../table";
|
import { AddDataOptions, Table, UpdateOptions } from "../table";
|
||||||
|
import { IntoSql, toSQL } from "../util";
|
||||||
import { RestfulLanceDBClient } from "./client";
|
import { RestfulLanceDBClient } from "./client";
|
||||||
|
|
||||||
export class RemoteTable extends Table {
|
export class RemoteTable extends Table {
|
||||||
@@ -84,12 +85,66 @@ export class RemoteTable extends Table {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async update(
|
async update(
|
||||||
updates: Map<string, string> | Record<string, string>,
|
optsOrUpdates:
|
||||||
|
| (Map<string, string> | Record<string, string>)
|
||||||
|
| ({
|
||||||
|
values: Map<string, IntoSql> | Record<string, IntoSql>;
|
||||||
|
} & Partial<UpdateOptions>)
|
||||||
|
| ({
|
||||||
|
valuesSql: Map<string, string> | Record<string, string>;
|
||||||
|
} & Partial<UpdateOptions>),
|
||||||
options?: Partial<UpdateOptions>,
|
options?: Partial<UpdateOptions>,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
const isValues =
|
||||||
|
"values" in optsOrUpdates && typeof optsOrUpdates.values !== "string";
|
||||||
|
const isValuesSql =
|
||||||
|
"valuesSql" in optsOrUpdates &&
|
||||||
|
typeof optsOrUpdates.valuesSql !== "string";
|
||||||
|
const isMap = (obj: unknown): obj is Map<string, string> => {
|
||||||
|
return obj instanceof Map;
|
||||||
|
};
|
||||||
|
|
||||||
|
let predicate;
|
||||||
|
let columns: [string, string][];
|
||||||
|
switch (true) {
|
||||||
|
case isMap(optsOrUpdates):
|
||||||
|
columns = Array.from(optsOrUpdates.entries());
|
||||||
|
predicate = options?.where;
|
||||||
|
break;
|
||||||
|
case isValues && isMap(optsOrUpdates.values):
|
||||||
|
columns = Array.from(optsOrUpdates.values.entries()).map(([k, v]) => [
|
||||||
|
k,
|
||||||
|
toSQL(v),
|
||||||
|
]);
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
case isValues && !isMap(optsOrUpdates.values):
|
||||||
|
columns = Object.entries(optsOrUpdates.values).map(([k, v]) => [
|
||||||
|
k,
|
||||||
|
toSQL(v),
|
||||||
|
]);
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case isValuesSql && isMap(optsOrUpdates.valuesSql):
|
||||||
|
columns = Array.from(optsOrUpdates.valuesSql.entries());
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
case isValuesSql && !isMap(optsOrUpdates.valuesSql):
|
||||||
|
columns = Object.entries(optsOrUpdates.valuesSql).map(([k, v]) => [
|
||||||
|
k,
|
||||||
|
v,
|
||||||
|
]);
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
columns = Object.entries(optsOrUpdates as Record<string, string>);
|
||||||
|
predicate = options?.where;
|
||||||
|
}
|
||||||
|
|
||||||
await this.#client.post(`${this.#tablePrefix}/update/`, {
|
await this.#client.post(`${this.#tablePrefix}/update/`, {
|
||||||
predicate: options?.where ?? null,
|
predicate: predicate ?? null,
|
||||||
updates: Object.entries(updates).map(([key, value]) => [key, value]),
|
updates: columns,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
async countRows(filter?: unknown): Promise<number> {
|
async countRows(filter?: unknown): Promise<number> {
|
||||||
@@ -122,9 +177,8 @@ export class RemoteTable extends Table {
|
|||||||
query(): import("..").Query {
|
query(): import("..").Query {
|
||||||
throw new Error("query() is not yet supported on the LanceDB cloud");
|
throw new Error("query() is not yet supported on the LanceDB cloud");
|
||||||
}
|
}
|
||||||
search(query: IntoVector): VectorQuery;
|
|
||||||
search(query: string): Promise<VectorQuery>;
|
search(_query: string | IntoVector): VectorQuery {
|
||||||
search(_query: string | IntoVector): VectorQuery | Promise<VectorQuery> {
|
|
||||||
throw new Error("search() is not yet supported on the LanceDB cloud");
|
throw new Error("search() is not yet supported on the LanceDB cloud");
|
||||||
}
|
}
|
||||||
vectorSearch(_vector: unknown): import("..").VectorQuery {
|
vectorSearch(_vector: unknown): import("..").VectorQuery {
|
||||||
|
|||||||
@@ -20,10 +20,12 @@
|
|||||||
// comes from the exact same library instance. This is not always the case
|
// comes from the exact same library instance. This is not always the case
|
||||||
// and so we must sanitize the input to ensure that it is compatible.
|
// and so we must sanitize the input to ensure that it is compatible.
|
||||||
|
|
||||||
|
import { BufferType, Data } from "apache-arrow";
|
||||||
import type { IntBitWidth, TKeys, TimeBitWidth } from "apache-arrow/type";
|
import type { IntBitWidth, TKeys, TimeBitWidth } from "apache-arrow/type";
|
||||||
import {
|
import {
|
||||||
Binary,
|
Binary,
|
||||||
Bool,
|
Bool,
|
||||||
|
DataLike,
|
||||||
DataType,
|
DataType,
|
||||||
DateDay,
|
DateDay,
|
||||||
DateMillisecond,
|
DateMillisecond,
|
||||||
@@ -56,9 +58,14 @@ import {
|
|||||||
Map_,
|
Map_,
|
||||||
Null,
|
Null,
|
||||||
type Precision,
|
type Precision,
|
||||||
|
RecordBatch,
|
||||||
|
RecordBatchLike,
|
||||||
Schema,
|
Schema,
|
||||||
|
SchemaLike,
|
||||||
SparseUnion,
|
SparseUnion,
|
||||||
Struct,
|
Struct,
|
||||||
|
Table,
|
||||||
|
TableLike,
|
||||||
Time,
|
Time,
|
||||||
TimeMicrosecond,
|
TimeMicrosecond,
|
||||||
TimeMillisecond,
|
TimeMillisecond,
|
||||||
@@ -488,7 +495,7 @@ export function sanitizeField(fieldLike: unknown): Field {
|
|||||||
* instance because they might be using a different instance of apache-arrow
|
* instance because they might be using a different instance of apache-arrow
|
||||||
* than lancedb is using.
|
* than lancedb is using.
|
||||||
*/
|
*/
|
||||||
export function sanitizeSchema(schemaLike: unknown): Schema {
|
export function sanitizeSchema(schemaLike: SchemaLike): Schema {
|
||||||
if (schemaLike instanceof Schema) {
|
if (schemaLike instanceof Schema) {
|
||||||
return schemaLike;
|
return schemaLike;
|
||||||
}
|
}
|
||||||
@@ -514,3 +521,68 @@ export function sanitizeSchema(schemaLike: unknown): Schema {
|
|||||||
);
|
);
|
||||||
return new Schema(sanitizedFields, metadata);
|
return new Schema(sanitizedFields, metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function sanitizeTable(tableLike: TableLike): Table {
|
||||||
|
if (tableLike instanceof Table) {
|
||||||
|
return tableLike;
|
||||||
|
}
|
||||||
|
if (typeof tableLike !== "object" || tableLike === null) {
|
||||||
|
throw Error("Expected a Table but object was null/undefined");
|
||||||
|
}
|
||||||
|
if (!("schema" in tableLike)) {
|
||||||
|
throw Error(
|
||||||
|
"The table passed in does not appear to be a table (no 'schema' property)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!("batches" in tableLike)) {
|
||||||
|
throw Error(
|
||||||
|
"The table passed in does not appear to be a table (no 'columns' property)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const schema = sanitizeSchema(tableLike.schema);
|
||||||
|
|
||||||
|
const batches = tableLike.batches.map(sanitizeRecordBatch);
|
||||||
|
return new Table(schema, batches);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sanitizeRecordBatch(batchLike: RecordBatchLike): RecordBatch {
|
||||||
|
if (batchLike instanceof RecordBatch) {
|
||||||
|
return batchLike;
|
||||||
|
}
|
||||||
|
if (typeof batchLike !== "object" || batchLike === null) {
|
||||||
|
throw Error("Expected a RecordBatch but object was null/undefined");
|
||||||
|
}
|
||||||
|
if (!("schema" in batchLike)) {
|
||||||
|
throw Error(
|
||||||
|
"The record batch passed in does not appear to be a record batch (no 'schema' property)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!("data" in batchLike)) {
|
||||||
|
throw Error(
|
||||||
|
"The record batch passed in does not appear to be a record batch (no 'data' property)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const schema = sanitizeSchema(batchLike.schema);
|
||||||
|
const data = sanitizeData(batchLike.data);
|
||||||
|
return new RecordBatch(schema, data);
|
||||||
|
}
|
||||||
|
function sanitizeData(
|
||||||
|
dataLike: DataLike,
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
|
): import("apache-arrow").Data<Struct<any>> {
|
||||||
|
if (dataLike instanceof Data) {
|
||||||
|
return dataLike;
|
||||||
|
}
|
||||||
|
return new Data(
|
||||||
|
dataLike.type,
|
||||||
|
dataLike.offset,
|
||||||
|
dataLike.length,
|
||||||
|
dataLike.nullCount,
|
||||||
|
{
|
||||||
|
[BufferType.OFFSET]: dataLike.valueOffsets,
|
||||||
|
[BufferType.DATA]: dataLike.values,
|
||||||
|
[BufferType.VALIDITY]: dataLike.nullBitmap,
|
||||||
|
[BufferType.TYPE]: dataLike.typeIds,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ import {
|
|||||||
Data,
|
Data,
|
||||||
IntoVector,
|
IntoVector,
|
||||||
Schema,
|
Schema,
|
||||||
|
TableLike,
|
||||||
fromDataToBuffer,
|
fromDataToBuffer,
|
||||||
fromTableToBuffer,
|
fromTableToBuffer,
|
||||||
fromTableToStreamBuffer,
|
fromTableToStreamBuffer,
|
||||||
@@ -38,6 +39,9 @@ import {
|
|||||||
Table as _NativeTable,
|
Table as _NativeTable,
|
||||||
} from "./native";
|
} from "./native";
|
||||||
import { Query, VectorQuery } from "./query";
|
import { Query, VectorQuery } from "./query";
|
||||||
|
import { sanitizeTable } from "./sanitize";
|
||||||
|
import { IntoSql, toSQL } from "./util";
|
||||||
|
export { IndexConfig } from "./native";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Options for adding data to a table.
|
* Options for adding data to a table.
|
||||||
@@ -120,6 +124,34 @@ export abstract class Table {
|
|||||||
* @param {Data} data Records to be inserted into the Table
|
* @param {Data} data Records to be inserted into the Table
|
||||||
*/
|
*/
|
||||||
abstract add(data: Data, options?: Partial<AddDataOptions>): Promise<void>;
|
abstract add(data: Data, options?: Partial<AddDataOptions>): Promise<void>;
|
||||||
|
/**
|
||||||
|
* Update existing records in the Table
|
||||||
|
* @param opts.values The values to update. The keys are the column names and the values
|
||||||
|
* are the values to set.
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* table.update({where:"x = 2", values:{"vector": [10, 10]}})
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
abstract update(
|
||||||
|
opts: {
|
||||||
|
values: Map<string, IntoSql> | Record<string, IntoSql>;
|
||||||
|
} & Partial<UpdateOptions>,
|
||||||
|
): Promise<void>;
|
||||||
|
/**
|
||||||
|
* Update existing records in the Table
|
||||||
|
* @param opts.valuesSql The values to update. The keys are the column names and the values
|
||||||
|
* are the values to set. The values are SQL expressions.
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* table.update({where:"x = 2", valuesSql:{"x": "x + 1"}})
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
abstract update(
|
||||||
|
opts: {
|
||||||
|
valuesSql: Map<string, string> | Record<string, string>;
|
||||||
|
} & Partial<UpdateOptions>,
|
||||||
|
): Promise<void>;
|
||||||
/**
|
/**
|
||||||
* Update existing records in the Table
|
* Update existing records in the Table
|
||||||
*
|
*
|
||||||
@@ -149,6 +181,7 @@ export abstract class Table {
|
|||||||
updates: Map<string, string> | Record<string, string>,
|
updates: Map<string, string> | Record<string, string>,
|
||||||
options?: Partial<UpdateOptions>,
|
options?: Partial<UpdateOptions>,
|
||||||
): Promise<void>;
|
): Promise<void>;
|
||||||
|
|
||||||
/** Count the total number of rows in the dataset. */
|
/** Count the total number of rows in the dataset. */
|
||||||
abstract countRows(filter?: string): Promise<number>;
|
abstract countRows(filter?: string): Promise<number>;
|
||||||
/** Delete the rows that satisfy the predicate. */
|
/** Delete the rows that satisfy the predicate. */
|
||||||
@@ -241,9 +274,9 @@ export abstract class Table {
|
|||||||
* Create a search query to find the nearest neighbors
|
* Create a search query to find the nearest neighbors
|
||||||
* of the given query vector
|
* of the given query vector
|
||||||
* @param {string} query - the query. This will be converted to a vector using the table's provided embedding function
|
* @param {string} query - the query. This will be converted to a vector using the table's provided embedding function
|
||||||
* @rejects {Error} If no embedding functions are defined in the table
|
* @note If no embedding functions are defined in the table, this will error when collecting the results.
|
||||||
*/
|
*/
|
||||||
abstract search(query: string): Promise<VectorQuery>;
|
abstract search(query: string): VectorQuery;
|
||||||
/**
|
/**
|
||||||
* Create a search query to find the nearest neighbors
|
* Create a search query to find the nearest neighbors
|
||||||
* of the given query vector
|
* of the given query vector
|
||||||
@@ -381,8 +414,7 @@ export abstract class Table {
|
|||||||
abstract indexStats(name: string): Promise<IndexStatistics | undefined>;
|
abstract indexStats(name: string): Promise<IndexStatistics | undefined>;
|
||||||
|
|
||||||
static async parseTableData(
|
static async parseTableData(
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
data: Record<string, unknown>[] | TableLike,
|
||||||
data: Record<string, unknown>[] | ArrowTable<any>,
|
|
||||||
options?: Partial<CreateTableOptions>,
|
options?: Partial<CreateTableOptions>,
|
||||||
streaming = false,
|
streaming = false,
|
||||||
) {
|
) {
|
||||||
@@ -395,9 +427,9 @@ export abstract class Table {
|
|||||||
|
|
||||||
let table: ArrowTable;
|
let table: ArrowTable;
|
||||||
if (isArrowTable(data)) {
|
if (isArrowTable(data)) {
|
||||||
table = data;
|
table = sanitizeTable(data);
|
||||||
} else {
|
} else {
|
||||||
table = makeArrowTable(data, options);
|
table = makeArrowTable(data as Record<string, unknown>[], options);
|
||||||
}
|
}
|
||||||
if (streaming) {
|
if (streaming) {
|
||||||
const buf = await fromTableToStreamBuffer(
|
const buf = await fromTableToStreamBuffer(
|
||||||
@@ -469,17 +501,63 @@ export class LocalTable extends Table {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async update(
|
async update(
|
||||||
updates: Map<string, string> | Record<string, string>,
|
optsOrUpdates:
|
||||||
|
| (Map<string, string> | Record<string, string>)
|
||||||
|
| ({
|
||||||
|
values: Map<string, IntoSql> | Record<string, IntoSql>;
|
||||||
|
} & Partial<UpdateOptions>)
|
||||||
|
| ({
|
||||||
|
valuesSql: Map<string, string> | Record<string, string>;
|
||||||
|
} & Partial<UpdateOptions>),
|
||||||
options?: Partial<UpdateOptions>,
|
options?: Partial<UpdateOptions>,
|
||||||
) {
|
) {
|
||||||
const onlyIf = options?.where;
|
const isValues =
|
||||||
|
"values" in optsOrUpdates && typeof optsOrUpdates.values !== "string";
|
||||||
|
const isValuesSql =
|
||||||
|
"valuesSql" in optsOrUpdates &&
|
||||||
|
typeof optsOrUpdates.valuesSql !== "string";
|
||||||
|
const isMap = (obj: unknown): obj is Map<string, string> => {
|
||||||
|
return obj instanceof Map;
|
||||||
|
};
|
||||||
|
|
||||||
|
let predicate;
|
||||||
let columns: [string, string][];
|
let columns: [string, string][];
|
||||||
if (updates instanceof Map) {
|
switch (true) {
|
||||||
columns = Array.from(updates.entries());
|
case isMap(optsOrUpdates):
|
||||||
} else {
|
columns = Array.from(optsOrUpdates.entries());
|
||||||
columns = Object.entries(updates);
|
predicate = options?.where;
|
||||||
|
break;
|
||||||
|
case isValues && isMap(optsOrUpdates.values):
|
||||||
|
columns = Array.from(optsOrUpdates.values.entries()).map(([k, v]) => [
|
||||||
|
k,
|
||||||
|
toSQL(v),
|
||||||
|
]);
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
case isValues && !isMap(optsOrUpdates.values):
|
||||||
|
columns = Object.entries(optsOrUpdates.values).map(([k, v]) => [
|
||||||
|
k,
|
||||||
|
toSQL(v),
|
||||||
|
]);
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case isValuesSql && isMap(optsOrUpdates.valuesSql):
|
||||||
|
columns = Array.from(optsOrUpdates.valuesSql.entries());
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
case isValuesSql && !isMap(optsOrUpdates.valuesSql):
|
||||||
|
columns = Object.entries(optsOrUpdates.valuesSql).map(([k, v]) => [
|
||||||
|
k,
|
||||||
|
v,
|
||||||
|
]);
|
||||||
|
predicate = optsOrUpdates.where;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
columns = Object.entries(optsOrUpdates as Record<string, string>);
|
||||||
|
predicate = options?.where;
|
||||||
}
|
}
|
||||||
await this.inner.update(onlyIf, columns);
|
await this.inner.update(predicate, columns);
|
||||||
}
|
}
|
||||||
|
|
||||||
async countRows(filter?: string): Promise<number> {
|
async countRows(filter?: string): Promise<number> {
|
||||||
@@ -500,15 +578,12 @@ export class LocalTable extends Table {
|
|||||||
query(): Query {
|
query(): Query {
|
||||||
return new Query(this.inner);
|
return new Query(this.inner);
|
||||||
}
|
}
|
||||||
|
search(query: string | IntoVector): VectorQuery {
|
||||||
search(query: string): Promise<VectorQuery>;
|
|
||||||
|
|
||||||
search(query: IntoVector): VectorQuery;
|
|
||||||
search(query: string | IntoVector): Promise<VectorQuery> | VectorQuery {
|
|
||||||
if (typeof query !== "string") {
|
if (typeof query !== "string") {
|
||||||
return this.vectorSearch(query);
|
return this.vectorSearch(query);
|
||||||
} else {
|
} else {
|
||||||
return this.getEmbeddingFunctions().then(async (functions) => {
|
const queryPromise = this.getEmbeddingFunctions().then(
|
||||||
|
async (functions) => {
|
||||||
// TODO: Support multiple embedding functions
|
// TODO: Support multiple embedding functions
|
||||||
const embeddingFunc: EmbeddingFunctionConfig | undefined = functions
|
const embeddingFunc: EmbeddingFunctionConfig | undefined = functions
|
||||||
.values()
|
.values()
|
||||||
@@ -518,10 +593,11 @@ export class LocalTable extends Table {
|
|||||||
new Error("No embedding functions are defined in the table"),
|
new Error("No embedding functions are defined in the table"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const embeddings =
|
return await embeddingFunc.function.computeQueryEmbeddings(query);
|
||||||
await embeddingFunc.function.computeQueryEmbeddings(query);
|
},
|
||||||
return this.query().nearestTo(embeddings);
|
);
|
||||||
});
|
|
||||||
|
return this.query().nearestTo(queryPromise);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,37 @@
|
|||||||
|
export type IntoSql =
|
||||||
|
| string
|
||||||
|
| number
|
||||||
|
| boolean
|
||||||
|
| null
|
||||||
|
| Date
|
||||||
|
| ArrayBufferLike
|
||||||
|
| Buffer
|
||||||
|
| IntoSql[];
|
||||||
|
|
||||||
|
export function toSQL(value: IntoSql): string {
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return `'${value.replace(/'/g, "''")}'`;
|
||||||
|
} else if (typeof value === "number") {
|
||||||
|
return value.toString();
|
||||||
|
} else if (typeof value === "boolean") {
|
||||||
|
return value ? "TRUE" : "FALSE";
|
||||||
|
} else if (value === null) {
|
||||||
|
return "NULL";
|
||||||
|
} else if (value instanceof Date) {
|
||||||
|
return `'${value.toISOString()}'`;
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
return `[${value.map(toSQL).join(", ")}]`;
|
||||||
|
} else if (Buffer.isBuffer(value)) {
|
||||||
|
return `X'${value.toString("hex")}'`;
|
||||||
|
} else if (value instanceof ArrayBuffer) {
|
||||||
|
return `X'${Buffer.from(value).toString("hex")}'`;
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Unsupported value type: ${typeof value} value: (${value})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export class TTLCache {
|
export class TTLCache {
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||||
private readonly cache: Map<string, { value: any; expires: number }>;
|
private readonly cache: Map<string, { value: any; expires: number }>;
|
||||||
|
|||||||
208
nodejs/native.d.ts
vendored
Normal file
208
nodejs/native.d.ts
vendored
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
/* tslint:disable */
|
||||||
|
/* eslint-disable */
|
||||||
|
|
||||||
|
/* auto-generated by NAPI-RS */
|
||||||
|
|
||||||
|
/** A description of an index currently configured on a column */
|
||||||
|
export interface IndexConfig {
|
||||||
|
/** The name of the index */
|
||||||
|
name: string
|
||||||
|
/** The type of the index */
|
||||||
|
indexType: string
|
||||||
|
/**
|
||||||
|
* The columns in the index
|
||||||
|
*
|
||||||
|
* Currently this is always an array of size 1. In the future there may
|
||||||
|
* be more columns to represent composite indices.
|
||||||
|
*/
|
||||||
|
columns: Array<string>
|
||||||
|
}
|
||||||
|
/** Statistics about a compaction operation. */
|
||||||
|
export interface CompactionStats {
|
||||||
|
/** The number of fragments removed */
|
||||||
|
fragmentsRemoved: number
|
||||||
|
/** The number of new, compacted fragments added */
|
||||||
|
fragmentsAdded: number
|
||||||
|
/** The number of data files removed */
|
||||||
|
filesRemoved: number
|
||||||
|
/** The number of new, compacted data files added */
|
||||||
|
filesAdded: number
|
||||||
|
}
|
||||||
|
/** Statistics about a cleanup operation */
|
||||||
|
export interface RemovalStats {
|
||||||
|
/** The number of bytes removed */
|
||||||
|
bytesRemoved: number
|
||||||
|
/** The number of old versions removed */
|
||||||
|
oldVersionsRemoved: number
|
||||||
|
}
|
||||||
|
/** Statistics about an optimize operation */
|
||||||
|
export interface OptimizeStats {
|
||||||
|
/** Statistics about the compaction operation */
|
||||||
|
compaction: CompactionStats
|
||||||
|
/** Statistics about the removal operation */
|
||||||
|
prune: RemovalStats
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A definition of a column alteration. The alteration changes the column at
|
||||||
|
* `path` to have the new name `name`, to be nullable if `nullable` is true,
|
||||||
|
* and to have the data type `data_type`. At least one of `rename` or `nullable`
|
||||||
|
* must be provided.
|
||||||
|
*/
|
||||||
|
export interface ColumnAlteration {
|
||||||
|
/**
|
||||||
|
* The path to the column to alter. This is a dot-separated path to the column.
|
||||||
|
* If it is a top-level column then it is just the name of the column. If it is
|
||||||
|
* a nested column then it is the path to the column, e.g. "a.b.c" for a column
|
||||||
|
* `c` nested inside a column `b` nested inside a column `a`.
|
||||||
|
*/
|
||||||
|
path: string
|
||||||
|
/**
|
||||||
|
* The new name of the column. If not provided then the name will not be changed.
|
||||||
|
* This must be distinct from the names of all other columns in the table.
|
||||||
|
*/
|
||||||
|
rename?: string
|
||||||
|
/** Set the new nullability. Note that a nullable column cannot be made non-nullable. */
|
||||||
|
nullable?: boolean
|
||||||
|
}
|
||||||
|
/** A definition of a new column to add to a table. */
|
||||||
|
export interface AddColumnsSql {
|
||||||
|
/** The name of the new column. */
|
||||||
|
name: string
|
||||||
|
/**
|
||||||
|
* The values to populate the new column with, as a SQL expression.
|
||||||
|
* The expression can reference other columns in the table.
|
||||||
|
*/
|
||||||
|
valueSql: string
|
||||||
|
}
|
||||||
|
export interface IndexStatistics {
|
||||||
|
/** The number of rows indexed by the index */
|
||||||
|
numIndexedRows: number
|
||||||
|
/** The number of rows not indexed */
|
||||||
|
numUnindexedRows: number
|
||||||
|
/** The type of the index */
|
||||||
|
indexType?: string
|
||||||
|
/** The metadata for each index */
|
||||||
|
indices: Array<IndexMetadata>
|
||||||
|
}
|
||||||
|
export interface IndexMetadata {
|
||||||
|
metricType?: string
|
||||||
|
indexType?: string
|
||||||
|
}
|
||||||
|
export interface ConnectionOptions {
|
||||||
|
/**
|
||||||
|
* (For LanceDB OSS only): The interval, in seconds, at which to check for
|
||||||
|
* updates to the table from other processes. If None, then consistency is not
|
||||||
|
* checked. For performance reasons, this is the default. For strong
|
||||||
|
* consistency, set this to zero seconds. Then every read will check for
|
||||||
|
* updates from other processes. As a compromise, you can set this to a
|
||||||
|
* non-zero value for eventual consistency. If more than that interval
|
||||||
|
* has passed since the last check, then the table will be checked for updates.
|
||||||
|
* Note: this consistency only applies to read operations. Write operations are
|
||||||
|
* always consistent.
|
||||||
|
*/
|
||||||
|
readConsistencyInterval?: number
|
||||||
|
/**
|
||||||
|
* (For LanceDB OSS only): configuration for object storage.
|
||||||
|
*
|
||||||
|
* The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||||
|
*/
|
||||||
|
storageOptions?: Record<string, string>
|
||||||
|
}
|
||||||
|
/** Write mode for writing a table. */
|
||||||
|
export const enum WriteMode {
|
||||||
|
Create = 'Create',
|
||||||
|
Append = 'Append',
|
||||||
|
Overwrite = 'Overwrite'
|
||||||
|
}
|
||||||
|
/** Write options when creating a Table. */
|
||||||
|
export interface WriteOptions {
|
||||||
|
/** Write mode for writing to a table. */
|
||||||
|
mode?: WriteMode
|
||||||
|
}
|
||||||
|
export interface OpenTableOptions {
|
||||||
|
storageOptions?: Record<string, string>
|
||||||
|
}
|
||||||
|
export class Connection {
|
||||||
|
/** Create a new Connection instance from the given URI. */
|
||||||
|
static new(uri: string, options: ConnectionOptions): Promise<Connection>
|
||||||
|
display(): string
|
||||||
|
isOpen(): boolean
|
||||||
|
close(): void
|
||||||
|
/** List all tables in the dataset. */
|
||||||
|
tableNames(startAfter?: string | undefined | null, limit?: number | undefined | null): Promise<Array<string>>
|
||||||
|
/**
|
||||||
|
* Create table from a Apache Arrow IPC (file) buffer.
|
||||||
|
*
|
||||||
|
* Parameters:
|
||||||
|
* - name: The name of the table.
|
||||||
|
* - buf: The buffer containing the IPC file.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
createTable(name: string, buf: Buffer, mode: string, storageOptions?: Record<string, string> | undefined | null, useLegacyFormat?: boolean | undefined | null): Promise<Table>
|
||||||
|
createEmptyTable(name: string, schemaBuf: Buffer, mode: string, storageOptions?: Record<string, string> | undefined | null, useLegacyFormat?: boolean | undefined | null): Promise<Table>
|
||||||
|
openTable(name: string, storageOptions?: Record<string, string> | undefined | null, indexCacheSize?: number | undefined | null): Promise<Table>
|
||||||
|
/** Drop table with the name. Or raise an error if the table does not exist. */
|
||||||
|
dropTable(name: string): Promise<void>
|
||||||
|
}
|
||||||
|
export class Index {
|
||||||
|
static ivfPq(distanceType?: string | undefined | null, numPartitions?: number | undefined | null, numSubVectors?: number | undefined | null, maxIterations?: number | undefined | null, sampleRate?: number | undefined | null): Index
|
||||||
|
static btree(): Index
|
||||||
|
}
|
||||||
|
/** Typescript-style Async Iterator over RecordBatches */
|
||||||
|
export class RecordBatchIterator {
|
||||||
|
next(): Promise<Buffer | null>
|
||||||
|
}
|
||||||
|
/** A builder used to create and run a merge insert operation */
|
||||||
|
export class NativeMergeInsertBuilder {
|
||||||
|
whenMatchedUpdateAll(condition?: string | undefined | null): NativeMergeInsertBuilder
|
||||||
|
whenNotMatchedInsertAll(): NativeMergeInsertBuilder
|
||||||
|
whenNotMatchedBySourceDelete(filter?: string | undefined | null): NativeMergeInsertBuilder
|
||||||
|
execute(buf: Buffer): Promise<void>
|
||||||
|
}
|
||||||
|
export class Query {
|
||||||
|
onlyIf(predicate: string): void
|
||||||
|
select(columns: Array<[string, string]>): void
|
||||||
|
limit(limit: number): void
|
||||||
|
nearestTo(vector: Float32Array): VectorQuery
|
||||||
|
execute(maxBatchLength?: number | undefined | null): Promise<RecordBatchIterator>
|
||||||
|
explainPlan(verbose: boolean): Promise<string>
|
||||||
|
}
|
||||||
|
export class VectorQuery {
|
||||||
|
column(column: string): void
|
||||||
|
distanceType(distanceType: string): void
|
||||||
|
postfilter(): void
|
||||||
|
refineFactor(refineFactor: number): void
|
||||||
|
nprobes(nprobe: number): void
|
||||||
|
bypassVectorIndex(): void
|
||||||
|
onlyIf(predicate: string): void
|
||||||
|
select(columns: Array<[string, string]>): void
|
||||||
|
limit(limit: number): void
|
||||||
|
execute(maxBatchLength?: number | undefined | null): Promise<RecordBatchIterator>
|
||||||
|
explainPlan(verbose: boolean): Promise<string>
|
||||||
|
}
|
||||||
|
export class Table {
|
||||||
|
name: string
|
||||||
|
display(): string
|
||||||
|
isOpen(): boolean
|
||||||
|
close(): void
|
||||||
|
/** Return Schema as empty Arrow IPC file. */
|
||||||
|
schema(): Promise<Buffer>
|
||||||
|
add(buf: Buffer, mode: string): Promise<void>
|
||||||
|
countRows(filter?: string | undefined | null): Promise<number>
|
||||||
|
delete(predicate: string): Promise<void>
|
||||||
|
createIndex(index: Index | undefined | null, column: string, replace?: boolean | undefined | null): Promise<void>
|
||||||
|
update(onlyIf: string | undefined | null, columns: Array<[string, string]>): Promise<void>
|
||||||
|
query(): Query
|
||||||
|
vectorSearch(vector: Float32Array): VectorQuery
|
||||||
|
addColumns(transforms: Array<AddColumnsSql>): Promise<void>
|
||||||
|
alterColumns(alterations: Array<ColumnAlteration>): Promise<void>
|
||||||
|
dropColumns(columns: Array<string>): Promise<void>
|
||||||
|
version(): Promise<number>
|
||||||
|
checkout(version: number): Promise<void>
|
||||||
|
checkoutLatest(): Promise<void>
|
||||||
|
restore(): Promise<void>
|
||||||
|
optimize(olderThanMs?: number | undefined | null): Promise<OptimizeStats>
|
||||||
|
listIndices(): Promise<Array<IndexConfig>>
|
||||||
|
indexStats(indexName: string): Promise<IndexStatistics | null>
|
||||||
|
mergeInsert(on: Array<string>): NativeMergeInsertBuilder
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-darwin-arm64",
|
"name": "@lancedb/lancedb-darwin-arm64",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"os": ["darwin"],
|
"os": ["darwin"],
|
||||||
"cpu": ["arm64"],
|
"cpu": ["arm64"],
|
||||||
"main": "lancedb.darwin-arm64.node",
|
"main": "lancedb.darwin-arm64.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-darwin-x64",
|
"name": "@lancedb/lancedb-darwin-x64",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"os": ["darwin"],
|
"os": ["darwin"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.darwin-x64.node",
|
"main": "lancedb.darwin-x64.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-linux-arm64-gnu",
|
"name": "@lancedb/lancedb-linux-arm64-gnu",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"os": ["linux"],
|
"os": ["linux"],
|
||||||
"cpu": ["arm64"],
|
"cpu": ["arm64"],
|
||||||
"main": "lancedb.linux-arm64-gnu.node",
|
"main": "lancedb.linux-arm64-gnu.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-linux-x64-gnu",
|
"name": "@lancedb/lancedb-linux-x64-gnu",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"os": ["linux"],
|
"os": ["linux"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.linux-x64-gnu.node",
|
"main": "lancedb.linux-x64-gnu.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-win32-x64-msvc",
|
"name": "@lancedb/lancedb-win32-x64-msvc",
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"os": ["win32"],
|
"os": ["win32"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.win32-x64-msvc.node",
|
"main": "lancedb.win32-x64-msvc.node",
|
||||||
|
|||||||
1403
nodejs/package-lock.json
generated
1403
nodejs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,7 @@
|
|||||||
"vector database",
|
"vector database",
|
||||||
"ann"
|
"ann"
|
||||||
],
|
],
|
||||||
"version": "0.5.2",
|
"version": "0.7.0",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": "./dist/index.js",
|
".": "./dist/index.js",
|
||||||
@@ -34,9 +34,10 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@aws-sdk/client-kms": "^3.33.0",
|
"@aws-sdk/client-kms": "^3.33.0",
|
||||||
"@aws-sdk/client-s3": "^3.33.0",
|
"@aws-sdk/client-s3": "^3.33.0",
|
||||||
|
"@aws-sdk/client-dynamodb": "^3.33.0",
|
||||||
"@biomejs/biome": "^1.7.3",
|
"@biomejs/biome": "^1.7.3",
|
||||||
"@jest/globals": "^29.7.0",
|
"@jest/globals": "^29.7.0",
|
||||||
"@napi-rs/cli": "^2.18.0",
|
"@napi-rs/cli": "^2.18.3",
|
||||||
"@types/jest": "^29.1.2",
|
"@types/jest": "^29.1.2",
|
||||||
"@types/tmp": "^0.2.6",
|
"@types/tmp": "^0.2.6",
|
||||||
"apache-arrow-old": "npm:apache-arrow@13.0.0",
|
"apache-arrow-old": "npm:apache-arrow@13.0.0",
|
||||||
@@ -68,7 +69,7 @@
|
|||||||
"lint-ci": "biome ci .",
|
"lint-ci": "biome ci .",
|
||||||
"docs": "typedoc --plugin typedoc-plugin-markdown --out ../docs/src/js lancedb/index.ts",
|
"docs": "typedoc --plugin typedoc-plugin-markdown --out ../docs/src/js lancedb/index.ts",
|
||||||
"lint": "biome check . && biome format .",
|
"lint": "biome check . && biome format .",
|
||||||
"lint-fix": "biome check --apply-unsafe . && biome format --write .",
|
"lint-fix": "biome check --write . && biome format --write .",
|
||||||
"prepublishOnly": "napi prepublish -t npm",
|
"prepublishOnly": "napi prepublish -t npm",
|
||||||
"test": "jest --verbose",
|
"test": "jest --verbose",
|
||||||
"integration": "S3_TEST=1 npm run test",
|
"integration": "S3_TEST=1 npm run test",
|
||||||
@@ -76,9 +77,13 @@
|
|||||||
"version": "napi version"
|
"version": "napi version"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"apache-arrow": "^15.0.0",
|
|
||||||
"axios": "^1.7.2",
|
"axios": "^1.7.2",
|
||||||
"openai": "^4.29.2",
|
|
||||||
"reflect-metadata": "^0.2.2"
|
"reflect-metadata": "^0.2.2"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"openai": "^4.29.2"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"apache-arrow": "^15.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ impl Connection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// List all tables in the dataset.
|
/// List all tables in the dataset.
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn table_names(
|
pub async fn table_names(
|
||||||
&self,
|
&self,
|
||||||
start_after: Option<String>,
|
start_after: Option<String>,
|
||||||
@@ -113,7 +113,7 @@ impl Connection {
|
|||||||
/// - name: The name of the table.
|
/// - name: The name of the table.
|
||||||
/// - buf: The buffer containing the IPC file.
|
/// - buf: The buffer containing the IPC file.
|
||||||
///
|
///
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn create_table(
|
pub async fn create_table(
|
||||||
&self,
|
&self,
|
||||||
name: String,
|
name: String,
|
||||||
@@ -141,7 +141,7 @@ impl Connection {
|
|||||||
Ok(Table::new(tbl))
|
Ok(Table::new(tbl))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn create_empty_table(
|
pub async fn create_empty_table(
|
||||||
&self,
|
&self,
|
||||||
name: String,
|
name: String,
|
||||||
@@ -173,7 +173,7 @@ impl Connection {
|
|||||||
Ok(Table::new(tbl))
|
Ok(Table::new(tbl))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn open_table(
|
pub async fn open_table(
|
||||||
&self,
|
&self,
|
||||||
name: String,
|
name: String,
|
||||||
@@ -197,7 +197,7 @@ impl Connection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Drop table with the name. Or raise an error if the table does not exist.
|
/// Drop table with the name. Or raise an error if the table does not exist.
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn drop_table(&self, name: String) -> napi::Result<()> {
|
pub async fn drop_table(&self, name: String) -> napi::Result<()> {
|
||||||
self.get_inner()?
|
self.get_inner()?
|
||||||
.drop_table(&name)
|
.drop_table(&name)
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ impl RecordBatchIterator {
|
|||||||
Self { inner }
|
Self { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async unsafe fn next(&mut self) -> napi::Result<Option<Buffer>> {
|
pub async unsafe fn next(&mut self) -> napi::Result<Option<Buffer>> {
|
||||||
if let Some(rst) = self.inner.next().await {
|
if let Some(rst) = self.inner.next().await {
|
||||||
let batch = rst.map_err(|e| {
|
let batch = rst.map_err(|e| {
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ impl NativeMergeInsertBuilder {
|
|||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn execute(&self, buf: Buffer) -> napi::Result<()> {
|
pub async fn execute(&self, buf: Buffer) -> napi::Result<()> {
|
||||||
let data = ipc_file_to_batches(buf.to_vec())
|
let data = ipc_file_to_batches(buf.to_vec())
|
||||||
.and_then(IntoArrow::into_arrow)
|
.and_then(IntoArrow::into_arrow)
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ impl Query {
|
|||||||
Ok(VectorQuery { inner })
|
Ok(VectorQuery { inner })
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn execute(
|
pub async fn execute(
|
||||||
&self,
|
&self,
|
||||||
max_batch_length: Option<u32>,
|
max_batch_length: Option<u32>,
|
||||||
@@ -80,6 +80,13 @@ impl Query {
|
|||||||
})?;
|
})?;
|
||||||
Ok(RecordBatchIterator::new(inner_stream))
|
Ok(RecordBatchIterator::new(inner_stream))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub async fn explain_plan(&self, verbose: bool) -> napi::Result<String> {
|
||||||
|
self.inner.explain_plan(verbose).await.map_err(|e| {
|
||||||
|
napi::Error::from_reason(format!("Failed to retrieve the query plan: {}", e))
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
@@ -136,7 +143,7 @@ impl VectorQuery {
|
|||||||
self.inner = self.inner.clone().limit(limit as usize);
|
self.inner = self.inner.clone().limit(limit as usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn execute(
|
pub async fn execute(
|
||||||
&self,
|
&self,
|
||||||
max_batch_length: Option<u32>,
|
max_batch_length: Option<u32>,
|
||||||
@@ -154,4 +161,11 @@ impl VectorQuery {
|
|||||||
})?;
|
})?;
|
||||||
Ok(RecordBatchIterator::new(inner_stream))
|
Ok(RecordBatchIterator::new(inner_stream))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub async fn explain_plan(&self, verbose: bool) -> napi::Result<String> {
|
||||||
|
self.inner.explain_plan(verbose).await.map_err(|e| {
|
||||||
|
napi::Error::from_reason(format!("Failed to retrieve the query plan: {}", e))
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ impl Table {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return Schema as empty Arrow IPC file.
|
/// Return Schema as empty Arrow IPC file.
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn schema(&self) -> napi::Result<Buffer> {
|
pub async fn schema(&self) -> napi::Result<Buffer> {
|
||||||
let schema =
|
let schema =
|
||||||
self.inner_ref()?.schema().await.map_err(|e| {
|
self.inner_ref()?.schema().await.map_err(|e| {
|
||||||
@@ -86,7 +86,7 @@ impl Table {
|
|||||||
})?))
|
})?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn add(&self, buf: Buffer, mode: String) -> napi::Result<()> {
|
pub async fn add(&self, buf: Buffer, mode: String) -> napi::Result<()> {
|
||||||
let batches = ipc_file_to_batches(buf.to_vec())
|
let batches = ipc_file_to_batches(buf.to_vec())
|
||||||
.map_err(|e| napi::Error::from_reason(format!("Failed to read IPC file: {}", e)))?;
|
.map_err(|e| napi::Error::from_reason(format!("Failed to read IPC file: {}", e)))?;
|
||||||
@@ -108,7 +108,7 @@ impl Table {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn count_rows(&self, filter: Option<String>) -> napi::Result<i64> {
|
pub async fn count_rows(&self, filter: Option<String>) -> napi::Result<i64> {
|
||||||
self.inner_ref()?
|
self.inner_ref()?
|
||||||
.count_rows(filter)
|
.count_rows(filter)
|
||||||
@@ -122,7 +122,7 @@ impl Table {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn delete(&self, predicate: String) -> napi::Result<()> {
|
pub async fn delete(&self, predicate: String) -> napi::Result<()> {
|
||||||
self.inner_ref()?.delete(&predicate).await.map_err(|e| {
|
self.inner_ref()?.delete(&predicate).await.map_err(|e| {
|
||||||
napi::Error::from_reason(format!(
|
napi::Error::from_reason(format!(
|
||||||
@@ -132,7 +132,7 @@ impl Table {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn create_index(
|
pub async fn create_index(
|
||||||
&self,
|
&self,
|
||||||
index: Option<&Index>,
|
index: Option<&Index>,
|
||||||
@@ -151,7 +151,7 @@ impl Table {
|
|||||||
builder.execute().await.default_error()
|
builder.execute().await.default_error()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn update(
|
pub async fn update(
|
||||||
&self,
|
&self,
|
||||||
only_if: Option<String>,
|
only_if: Option<String>,
|
||||||
@@ -167,17 +167,17 @@ impl Table {
|
|||||||
op.execute().await.default_error()
|
op.execute().await.default_error()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub fn query(&self) -> napi::Result<Query> {
|
pub fn query(&self) -> napi::Result<Query> {
|
||||||
Ok(Query::new(self.inner_ref()?.query()))
|
Ok(Query::new(self.inner_ref()?.query()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub fn vector_search(&self, vector: Float32Array) -> napi::Result<VectorQuery> {
|
pub fn vector_search(&self, vector: Float32Array) -> napi::Result<VectorQuery> {
|
||||||
self.query()?.nearest_to(vector)
|
self.query()?.nearest_to(vector)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn add_columns(&self, transforms: Vec<AddColumnsSql>) -> napi::Result<()> {
|
pub async fn add_columns(&self, transforms: Vec<AddColumnsSql>) -> napi::Result<()> {
|
||||||
let transforms = transforms
|
let transforms = transforms
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -196,7 +196,7 @@ impl Table {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn alter_columns(&self, alterations: Vec<ColumnAlteration>) -> napi::Result<()> {
|
pub async fn alter_columns(&self, alterations: Vec<ColumnAlteration>) -> napi::Result<()> {
|
||||||
for alteration in &alterations {
|
for alteration in &alterations {
|
||||||
if alteration.rename.is_none() && alteration.nullable.is_none() {
|
if alteration.rename.is_none() && alteration.nullable.is_none() {
|
||||||
@@ -222,7 +222,7 @@ impl Table {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn drop_columns(&self, columns: Vec<String>) -> napi::Result<()> {
|
pub async fn drop_columns(&self, columns: Vec<String>) -> napi::Result<()> {
|
||||||
let col_refs = columns.iter().map(String::as_str).collect::<Vec<_>>();
|
let col_refs = columns.iter().map(String::as_str).collect::<Vec<_>>();
|
||||||
self.inner_ref()?
|
self.inner_ref()?
|
||||||
@@ -237,7 +237,7 @@ impl Table {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn version(&self) -> napi::Result<i64> {
|
pub async fn version(&self) -> napi::Result<i64> {
|
||||||
self.inner_ref()?
|
self.inner_ref()?
|
||||||
.version()
|
.version()
|
||||||
@@ -246,7 +246,7 @@ impl Table {
|
|||||||
.default_error()
|
.default_error()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn checkout(&self, version: i64) -> napi::Result<()> {
|
pub async fn checkout(&self, version: i64) -> napi::Result<()> {
|
||||||
self.inner_ref()?
|
self.inner_ref()?
|
||||||
.checkout(version as u64)
|
.checkout(version as u64)
|
||||||
@@ -254,17 +254,17 @@ impl Table {
|
|||||||
.default_error()
|
.default_error()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn checkout_latest(&self) -> napi::Result<()> {
|
pub async fn checkout_latest(&self) -> napi::Result<()> {
|
||||||
self.inner_ref()?.checkout_latest().await.default_error()
|
self.inner_ref()?.checkout_latest().await.default_error()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn restore(&self) -> napi::Result<()> {
|
pub async fn restore(&self) -> napi::Result<()> {
|
||||||
self.inner_ref()?.restore().await.default_error()
|
self.inner_ref()?.restore().await.default_error()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn optimize(&self, older_than_ms: Option<i64>) -> napi::Result<OptimizeStats> {
|
pub async fn optimize(&self, older_than_ms: Option<i64>) -> napi::Result<OptimizeStats> {
|
||||||
let inner = self.inner_ref()?;
|
let inner = self.inner_ref()?;
|
||||||
|
|
||||||
@@ -318,7 +318,7 @@ impl Table {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn list_indices(&self) -> napi::Result<Vec<IndexConfig>> {
|
pub async fn list_indices(&self) -> napi::Result<Vec<IndexConfig>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.inner_ref()?
|
.inner_ref()?
|
||||||
@@ -330,14 +330,14 @@ impl Table {
|
|||||||
.collect::<Vec<_>>())
|
.collect::<Vec<_>>())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub async fn index_stats(&self, index_name: String) -> napi::Result<Option<IndexStatistics>> {
|
pub async fn index_stats(&self, index_name: String) -> napi::Result<Option<IndexStatistics>> {
|
||||||
let tbl = self.inner_ref()?.as_native().unwrap();
|
let tbl = self.inner_ref()?.as_native().unwrap();
|
||||||
let stats = tbl.index_stats(&index_name).await.default_error()?;
|
let stats = tbl.index_stats(&index_name).await.default_error()?;
|
||||||
Ok(stats.map(IndexStatistics::from))
|
Ok(stats.map(IndexStatistics::from))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi(catch_unwind)]
|
||||||
pub fn merge_insert(&self, on: Vec<String>) -> napi::Result<NativeMergeInsertBuilder> {
|
pub fn merge_insert(&self, on: Vec<String>) -> napi::Result<NativeMergeInsertBuilder> {
|
||||||
let on: Vec<_> = on.iter().map(String::as_str).collect();
|
let on: Vec<_> = on.iter().map(String::as_str).collect();
|
||||||
Ok(self.inner_ref()?.merge_insert(on.as_slice()).into())
|
Ok(self.inner_ref()?.merge_insert(on.as_slice()).into())
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.9.0"
|
current_version = "0.10.1"
|
||||||
parse = """(?x)
|
parse = """(?x)
|
||||||
(?P<major>0|[1-9]\\d*)\\.
|
(?P<major>0|[1-9]\\d*)\\.
|
||||||
(?P<minor>0|[1-9]\\d*)\\.
|
(?P<minor>0|[1-9]\\d*)\\.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb-python"
|
name = "lancedb-python"
|
||||||
version = "0.9.0"
|
version = "0.10.1"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
description = "Python bindings for LanceDB"
|
description = "Python bindings for LanceDB"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ name = "lancedb"
|
|||||||
# version in Cargo.toml
|
# version in Cargo.toml
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deprecation",
|
"deprecation",
|
||||||
"pylance==0.13.0",
|
"pylance==0.14.1",
|
||||||
"ratelimiter~=1.0",
|
"ratelimiter~=1.0",
|
||||||
"requests>=2.31.0",
|
"requests>=2.31.0",
|
||||||
"retry>=0.9.2",
|
"retry>=0.9.2",
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import importlib.metadata
|
|||||||
import os
|
import os
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Dict, Optional, Union
|
from typing import Dict, Optional, Union, Any
|
||||||
|
|
||||||
__version__ = importlib.metadata.version("lancedb")
|
__version__ = importlib.metadata.version("lancedb")
|
||||||
|
|
||||||
@@ -35,7 +35,7 @@ def connect(
|
|||||||
host_override: Optional[str] = None,
|
host_override: Optional[str] = None,
|
||||||
read_consistency_interval: Optional[timedelta] = None,
|
read_consistency_interval: Optional[timedelta] = None,
|
||||||
request_thread_pool: Optional[Union[int, ThreadPoolExecutor]] = None,
|
request_thread_pool: Optional[Union[int, ThreadPoolExecutor]] = None,
|
||||||
**kwargs,
|
**kwargs: Any,
|
||||||
) -> DBConnection:
|
) -> DBConnection:
|
||||||
"""Connect to a LanceDB database.
|
"""Connect to a LanceDB database.
|
||||||
|
|
||||||
|
|||||||
@@ -28,12 +28,11 @@ from lancedb.common import data_to_reader, validate_schema
|
|||||||
|
|
||||||
from ._lancedb import connect as lancedb_connect
|
from ._lancedb import connect as lancedb_connect
|
||||||
from .pydantic import LanceModel
|
from .pydantic import LanceModel
|
||||||
from .table import AsyncTable, LanceTable, Table, _sanitize_data
|
from .table import AsyncTable, LanceTable, Table, _sanitize_data, _table_path
|
||||||
from .util import (
|
from .util import (
|
||||||
fs_from_uri,
|
fs_from_uri,
|
||||||
get_uri_location,
|
get_uri_location,
|
||||||
get_uri_scheme,
|
get_uri_scheme,
|
||||||
join_uri,
|
|
||||||
validate_table_name,
|
validate_table_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -457,16 +456,18 @@ class LanceDBConnection(DBConnection):
|
|||||||
If True, ignore if the table does not exist.
|
If True, ignore if the table does not exist.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
filesystem, path = fs_from_uri(self.uri)
|
table_uri = _table_path(self.uri, name)
|
||||||
table_path = join_uri(path, name + ".lance")
|
filesystem, path = fs_from_uri(table_uri)
|
||||||
filesystem.delete_dir(table_path)
|
filesystem.delete_dir(path)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
if not ignore_missing:
|
if not ignore_missing:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@override
|
@override
|
||||||
def drop_database(self):
|
def drop_database(self):
|
||||||
filesystem, path = fs_from_uri(self.uri)
|
dummy_table_uri = _table_path(self.uri, "dummy")
|
||||||
|
uri = dummy_table_uri.removesuffix("dummy.lance")
|
||||||
|
filesystem, path = fs_from_uri(uri)
|
||||||
filesystem.delete_dir(path)
|
filesystem.delete_dir(path)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -25,3 +25,4 @@ from .gte import GteEmbeddings
|
|||||||
from .transformers import TransformersEmbeddingFunction, ColbertEmbeddings
|
from .transformers import TransformersEmbeddingFunction, ColbertEmbeddings
|
||||||
from .imagebind import ImageBindEmbeddings
|
from .imagebind import ImageBindEmbeddings
|
||||||
from .utils import with_embeddings
|
from .utils import with_embeddings
|
||||||
|
from .jinaai import JinaEmbeddings
|
||||||
|
|||||||
236
python/python/lancedb/embeddings/jinaai.py
Normal file
236
python/python/lancedb/embeddings/jinaai.py
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
# Copyright (c) 2023. LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
import requests
|
||||||
|
import base64
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING, ClassVar, List, Union, Optional, Any, Dict
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from ..util import attempt_import_or_raise
|
||||||
|
from .base import EmbeddingFunction
|
||||||
|
from .registry import register
|
||||||
|
from .utils import api_key_not_found_help, TEXT, IMAGES, url_retrieve
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
import PIL
|
||||||
|
|
||||||
|
API_URL = "https://api.jina.ai/v1/embeddings"
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_url(text):
|
||||||
|
try:
|
||||||
|
parsed = urlparse(text)
|
||||||
|
return bool(parsed.scheme) and bool(parsed.netloc)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@register("jina")
|
||||||
|
class JinaEmbeddings(EmbeddingFunction):
|
||||||
|
"""
|
||||||
|
An embedding function that uses the Jina API
|
||||||
|
|
||||||
|
https://jina.ai/embeddings/
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name: str, default "jina-clip-v1". Note that some models support both image
|
||||||
|
and text embeddings and some just text embedding
|
||||||
|
|
||||||
|
api_key: str, default None
|
||||||
|
The api key to access Jina API. If you pass None, you can set JINA_API_KEY
|
||||||
|
environment variable
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str = "jina-clip-v1"
|
||||||
|
api_key: Optional[str] = None
|
||||||
|
_session: ClassVar = None
|
||||||
|
|
||||||
|
def ndims(self):
|
||||||
|
# TODO: fix hardcoding
|
||||||
|
return 768
|
||||||
|
|
||||||
|
def sanitize_input(
|
||||||
|
self, inputs: Union[TEXT, IMAGES]
|
||||||
|
) -> Union[List[Any], np.ndarray]:
|
||||||
|
"""
|
||||||
|
Sanitize the input to the embedding function.
|
||||||
|
"""
|
||||||
|
if isinstance(inputs, (str, bytes, Path)):
|
||||||
|
inputs = [inputs]
|
||||||
|
elif isinstance(inputs, pa.Array):
|
||||||
|
inputs = inputs.to_pylist()
|
||||||
|
elif isinstance(inputs, pa.ChunkedArray):
|
||||||
|
inputs = inputs.combine_chunks().to_pylist()
|
||||||
|
else:
|
||||||
|
if isinstance(inputs, list):
|
||||||
|
inputs = inputs
|
||||||
|
else:
|
||||||
|
PIL = attempt_import_or_raise("PIL", "pillow")
|
||||||
|
if isinstance(inputs, PIL.Image.Image):
|
||||||
|
inputs = [inputs]
|
||||||
|
return inputs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _generate_image_input_dict(image: Union[str, bytes, "PIL.Image.Image"]) -> Dict:
|
||||||
|
if isinstance(image, bytes):
|
||||||
|
image_dict = {"image": base64.b64encode(image).decode("utf-8")}
|
||||||
|
elif isinstance(image, (str, Path)):
|
||||||
|
parsed = urlparse.urlparse(image)
|
||||||
|
# TODO handle drive letter on windows.
|
||||||
|
PIL = attempt_import_or_raise("PIL", "pillow")
|
||||||
|
if parsed.scheme == "file":
|
||||||
|
pil_image = PIL.Image.open(parsed.path)
|
||||||
|
elif parsed.scheme == "":
|
||||||
|
pil_image = PIL.Image.open(image if os.name == "nt" else parsed.path)
|
||||||
|
elif parsed.scheme.startswith("http"):
|
||||||
|
pil_image = PIL.Image.open(io.BytesIO(url_retrieve(image)))
|
||||||
|
else:
|
||||||
|
raise NotImplementedError("Only local and http(s) urls are supported")
|
||||||
|
buffered = io.BytesIO()
|
||||||
|
pil_image.save(buffered, format="PNG")
|
||||||
|
image_bytes = buffered.getvalue()
|
||||||
|
image_dict = {"image": base64.b64encode(image_bytes).decode("utf-8")}
|
||||||
|
else:
|
||||||
|
PIL = attempt_import_or_raise("PIL", "pillow")
|
||||||
|
|
||||||
|
if isinstance(image, PIL.Image.Image):
|
||||||
|
buffered = io.BytesIO()
|
||||||
|
image.save(buffered, format="PNG")
|
||||||
|
image_bytes = buffered.getvalue()
|
||||||
|
image_dict = {"image": base64.b64encode(image_bytes).decode("utf-8")}
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f"JinaEmbeddingFunction supports str, Path, bytes or PIL Image"
|
||||||
|
f" as query, but {type(image)} is given"
|
||||||
|
)
|
||||||
|
return image_dict
|
||||||
|
|
||||||
|
def compute_query_embeddings(
|
||||||
|
self, query: Union[str, bytes, "Path", "PIL.Image.Image"], *args, **kwargs
|
||||||
|
) -> List[np.ndarray]:
|
||||||
|
"""
|
||||||
|
Compute the embeddings for a given user query
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
query : Union[str, PIL.Image.Image]
|
||||||
|
The query to embed. A query can be either text or an image.
|
||||||
|
"""
|
||||||
|
if isinstance(query, str):
|
||||||
|
if not is_valid_url(query):
|
||||||
|
return self.generate_text_embeddings([query])
|
||||||
|
else:
|
||||||
|
return [self.generate_image_embedding(query)]
|
||||||
|
elif isinstance(query, (Path, bytes)):
|
||||||
|
return [self.generate_image_embedding(query)]
|
||||||
|
else:
|
||||||
|
PIL = attempt_import_or_raise("PIL", "pillow")
|
||||||
|
|
||||||
|
if isinstance(query, PIL.Image.Image):
|
||||||
|
return [self.generate_image_embedding(query)]
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f"JinaEmbeddingFunction supports str, Path, bytes or PIL Image"
|
||||||
|
f" as query, but {type(query)} is given"
|
||||||
|
)
|
||||||
|
|
||||||
|
def compute_source_embeddings(
|
||||||
|
self, inputs: Union[TEXT, IMAGES], *args, **kwargs
|
||||||
|
) -> List[np.array]:
|
||||||
|
inputs = self.sanitize_input(inputs)
|
||||||
|
model_inputs = []
|
||||||
|
image_inputs = 0
|
||||||
|
|
||||||
|
def process_input(input, model_inputs, image_inputs):
|
||||||
|
if isinstance(input, str):
|
||||||
|
if not is_valid_url(input):
|
||||||
|
model_inputs.append({"text": input})
|
||||||
|
else:
|
||||||
|
image_inputs += 1
|
||||||
|
model_inputs.append(self._generate_image_input_dict(input))
|
||||||
|
elif isinstance(input, list):
|
||||||
|
for _input in input:
|
||||||
|
image_inputs = process_input(_input, model_inputs, image_inputs)
|
||||||
|
else:
|
||||||
|
image_inputs += 1
|
||||||
|
model_inputs.append(self._generate_image_input_dict(input))
|
||||||
|
return image_inputs
|
||||||
|
|
||||||
|
for input in inputs:
|
||||||
|
image_inputs = process_input(input, model_inputs, image_inputs)
|
||||||
|
|
||||||
|
if image_inputs > 0:
|
||||||
|
return self._generate_embeddings(model_inputs)
|
||||||
|
else:
|
||||||
|
return self.generate_text_embeddings(inputs)
|
||||||
|
|
||||||
|
def generate_image_embedding(
|
||||||
|
self, image: Union[str, bytes, Path, "PIL.Image.Image"]
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""
|
||||||
|
Generate the embedding for a single image
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
image : Union[str, bytes, PIL.Image.Image]
|
||||||
|
The image to embed. If the image is a str, it is treated as a uri.
|
||||||
|
If the image is bytes, it is treated as the raw image bytes.
|
||||||
|
"""
|
||||||
|
image_dict = self._generate_image_input_dict(image)
|
||||||
|
return self._generate_embeddings(input=[image_dict])[0]
|
||||||
|
|
||||||
|
def generate_text_embeddings(
|
||||||
|
self, texts: Union[List[str], np.ndarray], *args, **kwargs
|
||||||
|
) -> List[np.array]:
|
||||||
|
return self._generate_embeddings(input=texts)
|
||||||
|
|
||||||
|
def _generate_embeddings(self, input: List, *args, **kwargs) -> List[np.array]:
|
||||||
|
"""
|
||||||
|
Get the embeddings for the given texts
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
texts: list[str] or np.ndarray (of str)
|
||||||
|
The texts to embed
|
||||||
|
"""
|
||||||
|
self._init_client()
|
||||||
|
resp = JinaEmbeddings._session.post( # type: ignore
|
||||||
|
API_URL, json={"input": input, "model": self.name}
|
||||||
|
).json()
|
||||||
|
if "data" not in resp:
|
||||||
|
raise RuntimeError(resp["detail"])
|
||||||
|
|
||||||
|
embeddings = resp["data"]
|
||||||
|
|
||||||
|
# Sort resulting embeddings by index
|
||||||
|
sorted_embeddings = sorted(embeddings, key=lambda e: e["index"]) # type: ignore
|
||||||
|
|
||||||
|
return [result["embedding"] for result in sorted_embeddings]
|
||||||
|
|
||||||
|
def _init_client(self):
|
||||||
|
if JinaEmbeddings._session is None:
|
||||||
|
if self.api_key is None and os.environ.get("JINA_API_KEY") is None:
|
||||||
|
api_key_not_found_help("jina")
|
||||||
|
api_key = self.api_key or os.environ.get("JINA_API_KEY")
|
||||||
|
JinaEmbeddings._session = requests.Session()
|
||||||
|
JinaEmbeddings._session.headers.update(
|
||||||
|
{"Authorization": f"Bearer {api_key}", "Accept-Encoding": "identity"}
|
||||||
|
)
|
||||||
@@ -417,6 +417,40 @@ class LanceQueryBuilder(ABC):
|
|||||||
self._with_row_id = with_row_id
|
self._with_row_id = with_row_id
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def explain_plan(self, verbose: Optional[bool] = False) -> str:
|
||||||
|
"""Return the execution plan for this query.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", [{"vector": [99, 99]}])
|
||||||
|
>>> query = [100, 100]
|
||||||
|
>>> plan = table.search(query).explain_plan(True)
|
||||||
|
>>> print(plan) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
|
||||||
|
Projection: fields=[vector, _distance]
|
||||||
|
FilterExec: _distance@2 IS NOT NULL
|
||||||
|
SortExec: TopK(fetch=10), expr=[_distance@2 ASC NULLS LAST]
|
||||||
|
KNNVectorDistance: metric=l2
|
||||||
|
LanceScan: uri=..., projection=[vector], row_id=true, row_addr=false, ordered=false
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
verbose : bool, default False
|
||||||
|
Use a verbose output format.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
plan : str
|
||||||
|
""" # noqa: E501
|
||||||
|
ds = self._table.to_lance()
|
||||||
|
return ds.scanner(
|
||||||
|
nearest={
|
||||||
|
"column": self._vector_column,
|
||||||
|
"q": self._query,
|
||||||
|
},
|
||||||
|
).explain_plan(verbose)
|
||||||
|
|
||||||
|
|
||||||
class LanceVectorQueryBuilder(LanceQueryBuilder):
|
class LanceVectorQueryBuilder(LanceQueryBuilder):
|
||||||
"""
|
"""
|
||||||
@@ -1166,6 +1200,37 @@ class AsyncQueryBase(object):
|
|||||||
"""
|
"""
|
||||||
return (await self.to_arrow()).to_pandas()
|
return (await self.to_arrow()).to_pandas()
|
||||||
|
|
||||||
|
async def explain_plan(self, verbose: Optional[bool] = False):
|
||||||
|
"""Return the execution plan for this query.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import asyncio
|
||||||
|
>>> from lancedb import connect_async
|
||||||
|
>>> async def doctest_example():
|
||||||
|
... conn = await connect_async("./.lancedb")
|
||||||
|
... table = await conn.create_table("my_table", [{"vector": [99, 99]}])
|
||||||
|
... query = [100, 100]
|
||||||
|
... plan = await table.query().nearest_to([1, 2]).explain_plan(True)
|
||||||
|
... print(plan)
|
||||||
|
>>> asyncio.run(doctest_example()) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
|
||||||
|
Projection: fields=[vector, _distance]
|
||||||
|
FilterExec: _distance@2 IS NOT NULL
|
||||||
|
SortExec: TopK(fetch=10), expr=[_distance@2 ASC NULLS LAST]
|
||||||
|
KNNVectorDistance: metric=l2
|
||||||
|
LanceScan: uri=..., projection=[vector], row_id=true, row_addr=false, ordered=false
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
verbose : bool, default False
|
||||||
|
Use a verbose output format.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
plan : str
|
||||||
|
""" # noqa: E501
|
||||||
|
return await self._inner.explain_plan(verbose)
|
||||||
|
|
||||||
|
|
||||||
class AsyncQuery(AsyncQueryBase):
|
class AsyncQuery(AsyncQueryBase):
|
||||||
def __init__(self, inner: LanceQuery):
|
def __init__(self, inner: LanceQuery):
|
||||||
|
|||||||
@@ -111,6 +111,7 @@ class RemoteTable(Table):
|
|||||||
num_sub_vectors: Optional[int] = None,
|
num_sub_vectors: Optional[int] = None,
|
||||||
replace: Optional[bool] = None,
|
replace: Optional[bool] = None,
|
||||||
accelerator: Optional[str] = None,
|
accelerator: Optional[str] = None,
|
||||||
|
index_type="vector",
|
||||||
):
|
):
|
||||||
"""Create an index on the table.
|
"""Create an index on the table.
|
||||||
Currently, the only parameters that matter are
|
Currently, the only parameters that matter are
|
||||||
@@ -166,7 +167,6 @@ class RemoteTable(Table):
|
|||||||
"replace is not supported on LanceDB cloud."
|
"replace is not supported on LanceDB cloud."
|
||||||
"Existing indexes will always be replaced."
|
"Existing indexes will always be replaced."
|
||||||
)
|
)
|
||||||
index_type = "vector"
|
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"column": vector_column_name,
|
"column": vector_column_name,
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from .colbert import ColbertReranker
|
|||||||
from .cross_encoder import CrossEncoderReranker
|
from .cross_encoder import CrossEncoderReranker
|
||||||
from .linear_combination import LinearCombinationReranker
|
from .linear_combination import LinearCombinationReranker
|
||||||
from .openai import OpenaiReranker
|
from .openai import OpenaiReranker
|
||||||
|
from .jinaai import JinaReranker
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Reranker",
|
"Reranker",
|
||||||
@@ -12,4 +13,5 @@ __all__ = [
|
|||||||
"LinearCombinationReranker",
|
"LinearCombinationReranker",
|
||||||
"OpenaiReranker",
|
"OpenaiReranker",
|
||||||
"ColbertReranker",
|
"ColbertReranker",
|
||||||
|
"JinaReranker",
|
||||||
]
|
]
|
||||||
|
|||||||
122
python/python/lancedb/rerankers/jinaai.py
Normal file
122
python/python/lancedb/rerankers/jinaai.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
from functools import cached_property
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from .base import Reranker
|
||||||
|
|
||||||
|
API_URL = "https://api.jina.ai/v1/rerank"
|
||||||
|
|
||||||
|
|
||||||
|
class JinaReranker(Reranker):
|
||||||
|
"""
|
||||||
|
Reranks the results using the Jina Rerank API.
|
||||||
|
https://jina.ai/rerank
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
model_name : str, default "jina-reranker-v2-base-multilingual"
|
||||||
|
The name of the cross reanker model to use
|
||||||
|
column : str, default "text"
|
||||||
|
The name of the column to use as input to the cross encoder model.
|
||||||
|
top_n : str, default None
|
||||||
|
The number of results to return. If None, will return all results.
|
||||||
|
api_key : str, default None
|
||||||
|
The api key to access Jina API. If you pass None, you can set JINA_API_KEY
|
||||||
|
environment variable
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
model_name: str = "jina-reranker-v2-base-multilingual",
|
||||||
|
column: str = "text",
|
||||||
|
top_n: Union[int, None] = None,
|
||||||
|
return_score="relevance",
|
||||||
|
api_key: Union[str, None] = None,
|
||||||
|
):
|
||||||
|
super().__init__(return_score)
|
||||||
|
self.model_name = model_name
|
||||||
|
self.column = column
|
||||||
|
self.top_n = top_n
|
||||||
|
self.api_key = api_key
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def _client(self):
|
||||||
|
if os.environ.get("JINA_API_KEY") is None and self.api_key is None:
|
||||||
|
raise ValueError(
|
||||||
|
"JINA_API_KEY not set. Either set it in your environment or \
|
||||||
|
pass it as `api_key` argument to the JinaReranker."
|
||||||
|
)
|
||||||
|
self.api_key = self.api_key or os.environ.get("JINA_API_KEY")
|
||||||
|
self._session = requests.Session()
|
||||||
|
self._session.headers.update(
|
||||||
|
{"Authorization": f"Bearer {self.api_key}", "Accept-Encoding": "identity"}
|
||||||
|
)
|
||||||
|
return self._session
|
||||||
|
|
||||||
|
def _rerank(self, result_set: pa.Table, query: str):
|
||||||
|
docs = result_set[self.column].to_pylist()
|
||||||
|
response = self._client.post( # type: ignore
|
||||||
|
API_URL,
|
||||||
|
json={
|
||||||
|
"query": query,
|
||||||
|
"documents": docs,
|
||||||
|
"model": self.model_name,
|
||||||
|
"top_n": self.top_n,
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
if "results" not in response:
|
||||||
|
raise RuntimeError(response["detail"])
|
||||||
|
|
||||||
|
results = response["results"]
|
||||||
|
|
||||||
|
indices, scores = list(
|
||||||
|
zip(*[(result["index"], result["relevance_score"]) for result in results])
|
||||||
|
) # tuples
|
||||||
|
result_set = result_set.take(list(indices))
|
||||||
|
# add the scores
|
||||||
|
result_set = result_set.append_column(
|
||||||
|
"_relevance_score", pa.array(scores, type=pa.float32())
|
||||||
|
)
|
||||||
|
|
||||||
|
return result_set
|
||||||
|
|
||||||
|
def rerank_hybrid(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
vector_results: pa.Table,
|
||||||
|
fts_results: pa.Table,
|
||||||
|
):
|
||||||
|
combined_results = self.merge_results(vector_results, fts_results)
|
||||||
|
combined_results = self._rerank(combined_results, query)
|
||||||
|
if self.score == "relevance":
|
||||||
|
combined_results = combined_results.drop_columns(["score", "_distance"])
|
||||||
|
elif self.score == "all":
|
||||||
|
raise NotImplementedError(
|
||||||
|
"return_score='all' not implemented for JinaReranker"
|
||||||
|
)
|
||||||
|
return combined_results
|
||||||
|
|
||||||
|
def rerank_vector(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
vector_results: pa.Table,
|
||||||
|
):
|
||||||
|
result_set = self._rerank(vector_results, query)
|
||||||
|
if self.score == "relevance":
|
||||||
|
result_set = result_set.drop_columns(["_distance"])
|
||||||
|
|
||||||
|
return result_set
|
||||||
|
|
||||||
|
def rerank_fts(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
fts_results: pa.Table,
|
||||||
|
):
|
||||||
|
result_set = self._rerank(fts_results, query)
|
||||||
|
if self.score == "relevance":
|
||||||
|
result_set = result_set.drop_columns(["score"])
|
||||||
|
|
||||||
|
return result_set
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
from typing import List
|
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
from .base import Reranker
|
from .base import Reranker
|
||||||
@@ -112,6 +110,6 @@ class LinearCombinationReranker(Reranker):
|
|||||||
# these scores represent distance
|
# these scores represent distance
|
||||||
return 1 - (self.weight * score1 + (1 - self.weight) * score2)
|
return 1 - (self.weight * score1 + (1 - self.weight) * score2)
|
||||||
|
|
||||||
def _invert_score(self, scores: List[float]):
|
def _invert_score(self, score: float):
|
||||||
# Invert the scores between relevance and distance
|
# Invert the score between relevance and distance
|
||||||
return 1 - scores
|
return 1 - score
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ from typing import (
|
|||||||
Tuple,
|
Tuple,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import lance
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -47,6 +48,7 @@ from .pydantic import LanceModel, model_to_dict
|
|||||||
from .query import AsyncQuery, AsyncVectorQuery, LanceQueryBuilder, Query
|
from .query import AsyncQuery, AsyncVectorQuery, LanceQueryBuilder, Query
|
||||||
from .util import (
|
from .util import (
|
||||||
fs_from_uri,
|
fs_from_uri,
|
||||||
|
get_uri_scheme,
|
||||||
inf_vector_column_query,
|
inf_vector_column_query,
|
||||||
join_uri,
|
join_uri,
|
||||||
safe_import_pandas,
|
safe_import_pandas,
|
||||||
@@ -208,6 +210,26 @@ def _to_record_batch_generator(
|
|||||||
yield b
|
yield b
|
||||||
|
|
||||||
|
|
||||||
|
def _table_path(base: str, table_name: str) -> str:
|
||||||
|
"""
|
||||||
|
Get a table path that can be used in PyArrow FS.
|
||||||
|
|
||||||
|
Removes any weird schemes (such as "s3+ddb") and drops any query params.
|
||||||
|
"""
|
||||||
|
uri = _table_uri(base, table_name)
|
||||||
|
# Parse as URL
|
||||||
|
parsed = urlparse(uri)
|
||||||
|
# If scheme is s3+ddb, convert to s3
|
||||||
|
if parsed.scheme == "s3+ddb":
|
||||||
|
parsed = parsed._replace(scheme="s3")
|
||||||
|
# Remove query parameters
|
||||||
|
return parsed._replace(query=None).geturl()
|
||||||
|
|
||||||
|
|
||||||
|
def _table_uri(base: str, table_name: str) -> str:
|
||||||
|
return join_uri(base, f"{table_name}.lance")
|
||||||
|
|
||||||
|
|
||||||
class Table(ABC):
|
class Table(ABC):
|
||||||
"""
|
"""
|
||||||
A Table is a collection of Records in a LanceDB Database.
|
A Table is a collection of Records in a LanceDB Database.
|
||||||
@@ -908,7 +930,7 @@ class LanceTable(Table):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def open(cls, db, name, **kwargs):
|
def open(cls, db, name, **kwargs):
|
||||||
tbl = cls(db, name, **kwargs)
|
tbl = cls(db, name, **kwargs)
|
||||||
fs, path = fs_from_uri(tbl._dataset_uri)
|
fs, path = fs_from_uri(tbl._dataset_path)
|
||||||
file_info = fs.get_file_info(path)
|
file_info = fs.get_file_info(path)
|
||||||
if file_info.type != pa.fs.FileType.Directory:
|
if file_info.type != pa.fs.FileType.Directory:
|
||||||
raise FileNotFoundError(
|
raise FileNotFoundError(
|
||||||
@@ -918,9 +940,14 @@ class LanceTable(Table):
|
|||||||
|
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
@property
|
@cached_property
|
||||||
|
def _dataset_path(self) -> str:
|
||||||
|
# Cacheable since it's deterministic
|
||||||
|
return _table_path(self._conn.uri, self.name)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
def _dataset_uri(self) -> str:
|
def _dataset_uri(self) -> str:
|
||||||
return join_uri(self._conn.uri, f"{self.name}.lance")
|
return _table_uri(self._conn.uri, self.name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _dataset(self) -> LanceDataset:
|
def _dataset(self) -> LanceDataset:
|
||||||
@@ -1146,11 +1173,12 @@ class LanceTable(Table):
|
|||||||
replace: bool = True,
|
replace: bool = True,
|
||||||
accelerator: Optional[str] = None,
|
accelerator: Optional[str] = None,
|
||||||
index_cache_size: Optional[int] = None,
|
index_cache_size: Optional[int] = None,
|
||||||
|
index_type="IVF_PQ",
|
||||||
):
|
):
|
||||||
"""Create an index on the table."""
|
"""Create an index on the table."""
|
||||||
self._dataset_mut.create_index(
|
self._dataset_mut.create_index(
|
||||||
column=vector_column_name,
|
column=vector_column_name,
|
||||||
index_type="IVF_PQ",
|
index_type=index_type,
|
||||||
metric=metric,
|
metric=metric,
|
||||||
num_partitions=num_partitions,
|
num_partitions=num_partitions,
|
||||||
num_sub_vectors=num_sub_vectors,
|
num_sub_vectors=num_sub_vectors,
|
||||||
@@ -1230,6 +1258,10 @@ class LanceTable(Table):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _get_fts_index_path(self):
|
def _get_fts_index_path(self):
|
||||||
|
if get_uri_scheme(self._dataset_uri) != "file":
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Full-text search is not supported on object stores."
|
||||||
|
)
|
||||||
return join_uri(self._dataset_uri, "_indices", "tantivy")
|
return join_uri(self._dataset_uri, "_indices", "tantivy")
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
|
|||||||
@@ -1,16 +1,7 @@
|
|||||||
# Copyright 2023 LanceDB Developers
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
#
|
# SPDX-FileCopyrightText: Copyright The Lance Authors
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
|
import binascii
|
||||||
import functools
|
import functools
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os
|
||||||
@@ -139,8 +130,11 @@ def join_uri(base: Union[str, pathlib.Path], *parts: str) -> str:
|
|||||||
# using pathlib for local paths make this windows compatible
|
# using pathlib for local paths make this windows compatible
|
||||||
# `get_uri_scheme` returns `file` for windows drive names (e.g. `c:\path`)
|
# `get_uri_scheme` returns `file` for windows drive names (e.g. `c:\path`)
|
||||||
return str(pathlib.Path(base, *parts))
|
return str(pathlib.Path(base, *parts))
|
||||||
# for remote paths, just use os.path.join
|
else:
|
||||||
return "/".join([p.rstrip("/") for p in [base, *parts]])
|
# there might be query parameters in the base URI
|
||||||
|
url = urlparse(base)
|
||||||
|
new_path = "/".join([p.rstrip("/") for p in [url.path, *parts]])
|
||||||
|
return url._replace(path=new_path).geturl()
|
||||||
|
|
||||||
|
|
||||||
def attempt_import_or_raise(module: str, mitigation=None):
|
def attempt_import_or_raise(module: str, mitigation=None):
|
||||||
@@ -228,6 +222,15 @@ def _(value: str):
|
|||||||
return f"'{value}'"
|
return f"'{value}'"
|
||||||
|
|
||||||
|
|
||||||
|
@value_to_sql.register(bytes)
|
||||||
|
def _(value: bytes):
|
||||||
|
"""Convert bytes to a hex string literal.
|
||||||
|
|
||||||
|
See https://datafusion.apache.org/user-guide/sql/data_types.html#binary-types
|
||||||
|
"""
|
||||||
|
return f"X'{binascii.hexlify(value).decode()}'"
|
||||||
|
|
||||||
|
|
||||||
@value_to_sql.register(int)
|
@value_to_sql.register(int)
|
||||||
def _(value: int):
|
def _(value: int):
|
||||||
return str(value)
|
return str(value)
|
||||||
|
|||||||
@@ -333,3 +333,15 @@ async def test_query_to_pandas_async(table_async: AsyncTable):
|
|||||||
|
|
||||||
df = await table_async.query().where("id < 0").to_pandas()
|
df = await table_async.query().where("id < 0").to_pandas()
|
||||||
assert df.shape == (0, 4)
|
assert df.shape == (0, 4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_explain_plan(table):
|
||||||
|
q = LanceVectorQueryBuilder(table, [0, 0], "vector")
|
||||||
|
plan = q.explain_plan(verbose=True)
|
||||||
|
assert "KNN" in plan
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_explain_plan_async(table_async: AsyncTable):
|
||||||
|
plan = await table_async.query().nearest_to(pa.array([1, 2])).explain_plan(True)
|
||||||
|
assert "KNN" in plan
|
||||||
|
|||||||
@@ -13,6 +13,8 @@
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import copy
|
import copy
|
||||||
|
from datetime import timedelta
|
||||||
|
import threading
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
@@ -25,6 +27,7 @@ CONFIG = {
|
|||||||
"aws_access_key_id": "ACCESSKEY",
|
"aws_access_key_id": "ACCESSKEY",
|
||||||
"aws_secret_access_key": "SECRETKEY",
|
"aws_secret_access_key": "SECRETKEY",
|
||||||
"aws_endpoint": "http://localhost:4566",
|
"aws_endpoint": "http://localhost:4566",
|
||||||
|
"dynamodb_endpoint": "http://localhost:4566",
|
||||||
"aws_region": "us-east-1",
|
"aws_region": "us-east-1",
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -156,3 +159,104 @@ def test_s3_sse(s3_bucket: str, kms_key: str):
|
|||||||
validate_objects_encrypted(s3_bucket, path, kms_key)
|
validate_objects_encrypted(s3_bucket, path, kms_key)
|
||||||
|
|
||||||
asyncio.run(test())
|
asyncio.run(test())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def commit_table():
|
||||||
|
ddb = get_boto3_client("dynamodb", endpoint_url=CONFIG["dynamodb_endpoint"])
|
||||||
|
table_name = "lance-integtest"
|
||||||
|
try:
|
||||||
|
ddb.delete_table(TableName=table_name)
|
||||||
|
except ddb.exceptions.ResourceNotFoundException:
|
||||||
|
pass
|
||||||
|
ddb.create_table(
|
||||||
|
TableName=table_name,
|
||||||
|
KeySchema=[
|
||||||
|
{"AttributeName": "base_uri", "KeyType": "HASH"},
|
||||||
|
{"AttributeName": "version", "KeyType": "RANGE"},
|
||||||
|
],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{"AttributeName": "base_uri", "AttributeType": "S"},
|
||||||
|
{"AttributeName": "version", "AttributeType": "N"},
|
||||||
|
],
|
||||||
|
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
|
||||||
|
)
|
||||||
|
yield table_name
|
||||||
|
ddb.delete_table(TableName=table_name)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.s3_test
|
||||||
|
def test_s3_dynamodb(s3_bucket: str, commit_table: str):
|
||||||
|
storage_options = copy.copy(CONFIG)
|
||||||
|
|
||||||
|
uri = f"s3+ddb://{s3_bucket}/test?ddbTableName={commit_table}"
|
||||||
|
data = pa.table({"x": [1, 2, 3]})
|
||||||
|
|
||||||
|
async def test():
|
||||||
|
db = await lancedb.connect_async(
|
||||||
|
uri,
|
||||||
|
storage_options=storage_options,
|
||||||
|
read_consistency_interval=timedelta(0),
|
||||||
|
)
|
||||||
|
|
||||||
|
table = await db.create_table("test", data)
|
||||||
|
|
||||||
|
# Five concurrent writers
|
||||||
|
async def insert():
|
||||||
|
# independent table refs for true concurrent writes.
|
||||||
|
table = await db.open_table("test")
|
||||||
|
await table.add(data, mode="append")
|
||||||
|
|
||||||
|
tasks = [insert() for _ in range(5)]
|
||||||
|
await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
row_count = await table.count_rows()
|
||||||
|
assert row_count == 3 * 6
|
||||||
|
|
||||||
|
asyncio.run(test())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.s3_test
|
||||||
|
def test_s3_dynamodb_sync(s3_bucket: str, commit_table: str, monkeypatch):
|
||||||
|
# Sync API doesn't support storage_options, so we have to provide as env vars
|
||||||
|
for key, value in CONFIG.items():
|
||||||
|
monkeypatch.setenv(key.upper(), value)
|
||||||
|
|
||||||
|
uri = f"s3+ddb://{s3_bucket}/test2?ddbTableName={commit_table}"
|
||||||
|
data = pa.table({"x": ["a", "b", "c"]})
|
||||||
|
|
||||||
|
db = lancedb.connect(
|
||||||
|
uri,
|
||||||
|
read_consistency_interval=timedelta(0),
|
||||||
|
)
|
||||||
|
|
||||||
|
table = db.create_table("test_ddb_sync", data)
|
||||||
|
|
||||||
|
# Five concurrent writers
|
||||||
|
def insert():
|
||||||
|
table = db.open_table("test_ddb_sync")
|
||||||
|
table.add(data, mode="append")
|
||||||
|
|
||||||
|
threads = []
|
||||||
|
for _ in range(5):
|
||||||
|
thread = threading.Thread(target=insert)
|
||||||
|
threads.append(thread)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
row_count = table.count_rows()
|
||||||
|
assert row_count == 3 * 6
|
||||||
|
|
||||||
|
# FTS indices should error since they are not supported yet.
|
||||||
|
with pytest.raises(
|
||||||
|
NotImplementedError, match="Full-text search is not supported on object stores."
|
||||||
|
):
|
||||||
|
table.create_fts_index("x")
|
||||||
|
|
||||||
|
# make sure list tables still works
|
||||||
|
assert db.table_names() == ["test_ddb_sync"]
|
||||||
|
db.drop_table("test_ddb_sync")
|
||||||
|
assert db.table_names() == []
|
||||||
|
db.drop_database()
|
||||||
|
|||||||
@@ -1,15 +1,5 @@
|
|||||||
# Copyright 2023 LanceDB Developers
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
#
|
# SPDX-FileCopyrightText: Copyright The Lance Authors
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
from copy import copy
|
from copy import copy
|
||||||
@@ -499,6 +489,7 @@ def test_update_types(db):
|
|||||||
"date": date(2021, 1, 1),
|
"date": date(2021, 1, 1),
|
||||||
"vector1": [1.0, 0.0],
|
"vector1": [1.0, 0.0],
|
||||||
"vector2": [1.0, 1.0],
|
"vector2": [1.0, 1.0],
|
||||||
|
"binary": b"abc",
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@@ -512,6 +503,7 @@ def test_update_types(db):
|
|||||||
date="DATE '2021-01-02'",
|
date="DATE '2021-01-02'",
|
||||||
vector1="[2.0, 2.0]",
|
vector1="[2.0, 2.0]",
|
||||||
vector2="[3.0, 3.0]",
|
vector2="[3.0, 3.0]",
|
||||||
|
binary="X'646566'",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
actual = table.to_arrow().to_pylist()[0]
|
actual = table.to_arrow().to_pylist()[0]
|
||||||
@@ -523,6 +515,7 @@ def test_update_types(db):
|
|||||||
date=date(2021, 1, 2),
|
date=date(2021, 1, 2),
|
||||||
vector1=[2.0, 2.0],
|
vector1=[2.0, 2.0],
|
||||||
vector2=[3.0, 3.0],
|
vector2=[3.0, 3.0],
|
||||||
|
binary=b"def",
|
||||||
)
|
)
|
||||||
assert actual == expected
|
assert actual == expected
|
||||||
|
|
||||||
@@ -536,6 +529,7 @@ def test_update_types(db):
|
|||||||
date=date(2021, 1, 3),
|
date=date(2021, 1, 3),
|
||||||
vector1=[3.0, 3.0],
|
vector1=[3.0, 3.0],
|
||||||
vector2=np.array([4.0, 4.0]),
|
vector2=np.array([4.0, 4.0]),
|
||||||
|
binary=b"def",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
actual = table.to_arrow().to_pylist()[0]
|
actual = table.to_arrow().to_pylist()[0]
|
||||||
@@ -547,6 +541,7 @@ def test_update_types(db):
|
|||||||
date=date(2021, 1, 3),
|
date=date(2021, 1, 3),
|
||||||
vector1=[3.0, 3.0],
|
vector1=[3.0, 3.0],
|
||||||
vector2=[4.0, 4.0],
|
vector2=[4.0, 4.0],
|
||||||
|
binary=b"def",
|
||||||
)
|
)
|
||||||
assert actual == expected
|
assert actual == expected
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ use lancedb::query::QueryExecutionOptions;
|
|||||||
use lancedb::query::{
|
use lancedb::query::{
|
||||||
ExecutableQuery, Query as LanceDbQuery, QueryBase, Select, VectorQuery as LanceDbVectorQuery,
|
ExecutableQuery, Query as LanceDbQuery, QueryBase, Select, VectorQuery as LanceDbVectorQuery,
|
||||||
};
|
};
|
||||||
|
use pyo3::exceptions::PyRuntimeError;
|
||||||
use pyo3::pyclass;
|
use pyo3::pyclass;
|
||||||
use pyo3::pymethods;
|
use pyo3::pymethods;
|
||||||
use pyo3::PyAny;
|
use pyo3::PyAny;
|
||||||
@@ -73,6 +74,16 @@ impl Query {
|
|||||||
Ok(RecordBatchStream::new(inner_stream))
|
Ok(RecordBatchStream::new(inner_stream))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn explain_plan(self_: PyRef<'_, Self>, verbose: bool) -> PyResult<&PyAny> {
|
||||||
|
let inner = self_.inner.clone();
|
||||||
|
future_into_py(self_.py(), async move {
|
||||||
|
inner
|
||||||
|
.explain_plan(verbose)
|
||||||
|
.await
|
||||||
|
.map_err(|e| PyRuntimeError::new_err(e.to_string()))
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pyclass]
|
#[pyclass]
|
||||||
@@ -131,4 +142,14 @@ impl VectorQuery {
|
|||||||
Ok(RecordBatchStream::new(inner_stream))
|
Ok(RecordBatchStream::new(inner_stream))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn explain_plan(self_: PyRef<'_, Self>, verbose: bool) -> PyResult<&PyAny> {
|
||||||
|
let inner = self_.inner.clone();
|
||||||
|
future_into_py(self_.py(), async move {
|
||||||
|
inner
|
||||||
|
.explain_plan(verbose)
|
||||||
|
.await
|
||||||
|
.map_err(|e| PyRuntimeError::new_err(e.to_string()))
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb-node"
|
name = "lancedb-node"
|
||||||
version = "0.5.2"
|
version = "0.7.0"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.5.2"
|
version = "0.7.0"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
description = "LanceDB: A serverless, low-latency vector database for AI applications"
|
description = "LanceDB: A serverless, low-latency vector database for AI applications"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
@@ -55,10 +55,11 @@ walkdir = "2"
|
|||||||
# For s3 integration tests (dev deps aren't allowed to be optional atm)
|
# For s3 integration tests (dev deps aren't allowed to be optional atm)
|
||||||
# We pin these because the content-length check breaks with localstack
|
# We pin these because the content-length check breaks with localstack
|
||||||
# https://github.com/smithy-lang/smithy-rs/releases/tag/release-2024-05-21
|
# https://github.com/smithy-lang/smithy-rs/releases/tag/release-2024-05-21
|
||||||
|
aws-sdk-dynamodb = { version = "=1.23.0" }
|
||||||
aws-sdk-s3 = { version = "=1.23.0" }
|
aws-sdk-s3 = { version = "=1.23.0" }
|
||||||
aws-sdk-kms = { version = "=1.21.0" }
|
aws-sdk-kms = { version = "=1.21.0" }
|
||||||
aws-config = { version = "1.0" }
|
aws-config = { version = "1.0" }
|
||||||
aws-smithy-runtime = { version = "=1.3.0" }
|
aws-smithy-runtime = { version = "=1.3.1" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
|||||||
@@ -6,3 +6,12 @@
|
|||||||
LanceDB Rust SDK, a serverless vector database.
|
LanceDB Rust SDK, a serverless vector database.
|
||||||
|
|
||||||
Read more at: https://lancedb.com/
|
Read more at: https://lancedb.com/
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> A transitive dependency of `lancedb` is `lzma-sys`, which uses dynamic linking
|
||||||
|
> by default. If you want to statically link `lzma-sys`, you should activate it's
|
||||||
|
> `static` feature by adding the following to your dependencies:
|
||||||
|
>
|
||||||
|
> ```toml
|
||||||
|
> lzma-sys = { version = "*", features = ["static"] }
|
||||||
|
> ```
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
// --8<-- [start:imports]
|
||||||
|
|
||||||
use std::{iter::once, sync::Arc};
|
use std::{iter::once, sync::Arc};
|
||||||
|
|
||||||
use arrow_array::{Float64Array, Int32Array, RecordBatch, RecordBatchIterator, StringArray};
|
use arrow_array::{Float64Array, Int32Array, RecordBatch, RecordBatchIterator, StringArray};
|
||||||
@@ -11,6 +13,9 @@ use lancedb::{
|
|||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// --8<-- [end:imports]
|
||||||
|
|
||||||
|
// --8<-- [start:openai_embeddings]
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
let tempdir = tempfile::tempdir().unwrap();
|
let tempdir = tempfile::tempdir().unwrap();
|
||||||
@@ -35,7 +40,6 @@ async fn main() -> Result<()> {
|
|||||||
.execute()
|
.execute()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// there is no equivalent to '.search(<query>)' yet
|
|
||||||
let query = Arc::new(StringArray::from_iter_values(once("something warm")));
|
let query = Arc::new(StringArray::from_iter_values(once("something warm")));
|
||||||
let query_vector = embedding.compute_query_embeddings(query)?;
|
let query_vector = embedding.compute_query_embeddings(query)?;
|
||||||
let mut results = table
|
let mut results = table
|
||||||
@@ -53,9 +57,9 @@ async fn main() -> Result<()> {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
let text = out.iter().next().unwrap().unwrap();
|
let text = out.iter().next().unwrap().unwrap();
|
||||||
println!("Closest match: {}", text);
|
println!("Closest match: {}", text);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
// --8<-- [end:openai_embeddings]
|
||||||
|
|
||||||
fn make_data() -> impl IntoArrow {
|
fn make_data() -> impl IntoArrow {
|
||||||
let schema = Schema::new(vec![
|
let schema = Schema::new(vec![
|
||||||
|
|||||||
@@ -1191,6 +1191,7 @@ mod tests {
|
|||||||
.query()
|
.query()
|
||||||
.execute_with_options(QueryExecutionOptions {
|
.execute_with_options(QueryExecutionOptions {
|
||||||
max_batch_length: 50000,
|
max_batch_length: 50000,
|
||||||
|
..Default::default()
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@@ -1211,6 +1212,7 @@ mod tests {
|
|||||||
.query()
|
.query()
|
||||||
.execute_with_options(QueryExecutionOptions {
|
.execute_with_options(QueryExecutionOptions {
|
||||||
max_batch_length: 50000,
|
max_batch_length: 50000,
|
||||||
|
..Default::default()
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user