Compare commits

...

16 Commits

Author SHA1 Message Date
Lance Release
018314a5c1 [python] Bump version: 0.3.6 → 0.4.0 2023-12-18 17:27:26 +00:00
Lei Xu
409eb30ea5 chore: bump lance version to 0.9 (#715) 2023-12-17 22:11:42 -05:00
Lance Release
ff9872fd44 Updating package-lock.json 2023-12-15 18:25:06 +00:00
Lance Release
a0608044a1 [python] Bump version: 0.3.5 → 0.3.6 2023-12-15 18:20:55 +00:00
Lance Release
2e4ea7d2bc Updating package-lock.json 2023-12-15 18:01:45 +00:00
Lance Release
57e5695a54 Bump version: 0.3.10 → 0.3.11 2023-12-15 18:01:34 +00:00
Bert
ce58ea7c38 chore: fix package lock (#711) 2023-12-15 11:49:16 -05:00
Bert
57207eff4a implement update for remote clients (#706) 2023-12-15 09:06:40 -05:00
Rob Meng
2d78bff120 feat: pass vector column name to remote backend (#710)
pass vector column name to remote as well.

`vector_column` is already part of `Query` just declearing it as part to
`remote.VectorQuery` as well
2023-12-15 00:19:08 -05:00
Rob Meng
7c09b9b9a9 feat: allow custom column name in query (#709) 2023-12-14 23:29:26 -05:00
Chang She
bd0034a157 feat: support nested pydantic schema (#707) 2023-12-14 18:20:45 -08:00
Will Jones
144b3b5d83 ci: fix broken npm publication (#704)
Most recent release failed because `release` depends on `node-macos`,
but we renamed `node-macos` to `node-macos-{x86,arm64}`. This fixes that
by consolidating them back to a single `node-macos` job, which also has
the side effect of making the file shorter.
2023-12-14 12:09:28 -08:00
Lance Release
b6f0a31686 Updating package-lock.json 2023-12-14 19:31:56 +00:00
Lance Release
9ec526f73f Bump version: 0.3.9 → 0.3.10 2023-12-14 19:31:41 +00:00
Lance Release
600bfd7237 [python] Bump version: 0.3.4 → 0.3.5 2023-12-14 19:31:22 +00:00
Will Jones
d087e7891d feat(python): add update query support for Python (#654)
Closes #69

Will not pass until https://github.com/lancedb/lance/pull/1585 is
released
2023-12-14 11:28:32 -08:00
19 changed files with 369 additions and 127 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.3.9
current_version = 0.3.11
commit = True
message = Bump version: {current_version} → {new_version}
tag = True

View File

@@ -37,8 +37,16 @@ jobs:
path: |
node/vectordb-*.tgz
node-macos-x86:
runs-on: macos-13
node-macos:
strategy:
matrix:
config:
- arch: x86_64-apple-darwin
runner: macos-13
- arch: aarch64-apple-darwin
# xlarge is implicitly arm64.
runner: macos-13-xlarge
runs-on: ${{ matrix.config.runner }}
# Only runs on tags that matches the make-release action
if: startsWith(github.ref, 'refs/tags/v')
steps:
@@ -51,35 +59,14 @@ jobs:
cd node
npm ci
- name: Build MacOS native node modules
run: bash ci/build_macos_artifacts.sh x86_64-apple-darwin
run: bash ci/build_macos_artifacts.sh ${{ matrix.config.arch }}
- name: Upload Darwin Artifacts
uses: actions/upload-artifact@v3
with:
name: native-darwin
path: |
node/dist/lancedb-vectordb-darwin*.tgz
node-macos-arm64:
runs-on: macos-13-xlarge
# Only runs on tags that matches the make-release action
if: startsWith(github.ref, 'refs/tags/v')
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install system dependencies
run: brew install protobuf
- name: Install npm dependencies
run: |
cd node
npm ci
- name: Build MacOS native node modules
run: bash ci/build_macos_artifacts.sh aarch64-apple-darwin
- name: Upload Darwin Artifacts
uses: actions/upload-artifact@v3
with:
name: native-darwin
path: |
node/dist/lancedb-vectordb-darwin*.tgz
node-linux:
name: node-linux (${{ matrix.config.arch}}-unknown-linux-gnu

View File

@@ -91,11 +91,7 @@ jobs:
pip install "pydantic<2"
pip install -e .[tests]
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
pip install pytest pytest-mock black isort
- name: Black
run: black --check --diff --no-color --quiet .
- name: isort
run: isort --check --diff --quiet .
pip install pytest pytest-mock
- name: Run tests
run: pytest -m "not slow" -x -v --durations=30 tests
- name: doctest

View File

@@ -5,10 +5,10 @@ exclude = ["python"]
resolver = "2"
[workspace.dependencies]
lance = { "version" = "=0.8.20", "features" = ["dynamodb"] }
lance-index = { "version" = "=0.8.20" }
lance-linalg = { "version" = "=0.8.20" }
lance-testing = { "version" = "=0.8.20" }
lance = { "version" = "=0.9.0", "features" = ["dynamodb"] }
lance-index = { "version" = "=0.9.0" }
lance-linalg = { "version" = "=0.9.0" }
lance-testing = { "version" = "=0.9.0" }
# Note that this one does not include pyarrow
arrow = { version = "47.0.0", optional = false }
arrow-array = "47.0"

74
node/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "vectordb",
"version": "0.3.9",
"version": "0.3.11",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "vectordb",
"version": "0.3.9",
"version": "0.3.11",
"cpu": [
"x64",
"arm64"
@@ -53,11 +53,11 @@
"uuid": "^9.0.0"
},
"optionalDependencies": {
"@lancedb/vectordb-darwin-arm64": "0.3.9",
"@lancedb/vectordb-darwin-x64": "0.3.9",
"@lancedb/vectordb-linux-arm64-gnu": "0.3.9",
"@lancedb/vectordb-linux-x64-gnu": "0.3.9",
"@lancedb/vectordb-win32-x64-msvc": "0.3.9"
"@lancedb/vectordb-darwin-arm64": "0.3.11",
"@lancedb/vectordb-darwin-x64": "0.3.11",
"@lancedb/vectordb-linux-arm64-gnu": "0.3.11",
"@lancedb/vectordb-linux-x64-gnu": "0.3.11",
"@lancedb/vectordb-win32-x64-msvc": "0.3.11"
}
},
"node_modules/@apache-arrow/ts": {
@@ -317,9 +317,9 @@
}
},
"node_modules/@lancedb/vectordb-darwin-arm64": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.3.9.tgz",
"integrity": "sha512-irtAdfSRQDcfnMnB8T7D0atLFfu1MMZZ1JaxMKu24DDZ8e4IMYKUplxwvWni3241yA9yDE/pliRZCNQbQCEfrg==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.3.11.tgz",
"integrity": "sha512-N0Ak0jWmSh+QIUJKgtD85+/N0UMBZxaHrd9leusWgjEdtZdQqyzd6VWYAFPR6W6p8tt1hUZiuTRQ6ugfNhyEsg==",
"cpu": [
"arm64"
],
@@ -329,9 +329,9 @@
]
},
"node_modules/@lancedb/vectordb-darwin-x64": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.3.9.tgz",
"integrity": "sha512-4xXQoPheyIl1P5kRoKmZtaAHFrYdL9pw5yq+r6ewIx0TCemN4LSvzSUTqM5nZl3QPU8FeL0CGD8Gt2gMU0HQ2A==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.3.11.tgz",
"integrity": "sha512-vugA+Z4XDrV1gFW5PfqJImw0w84NpGrZsaTZ9afw2oc5a37alx5zOoHEoBQimaX88j+YjWme38h3B98qoNTP5w==",
"cpu": [
"x64"
],
@@ -341,9 +341,9 @@
]
},
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.3.9.tgz",
"integrity": "sha512-WIxCZKnLeSlz0PGURtKSX6hJ4CYE2o5P+IFmmuWOWB1uNapQu6zOpea6rNxcRFHUA0IJdO02lVxVfn2hDX4SMg==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.3.11.tgz",
"integrity": "sha512-mArXy17URht7cTdGgNc+yL6BOxvK4vAtNaPh68WBOy7e438l6++s2E4bZyaeyeoIv8sPENDmJZzBr4YuBEc7yw==",
"cpu": [
"arm64"
],
@@ -353,9 +353,9 @@
]
},
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.3.9.tgz",
"integrity": "sha512-bQbcV9adKzYbJLNzDjk9OYsMnT2IjmieLfb4IQ1hj5IUoWfbg80Bd0+gZUnrmrhG6fe56TIriFZYQR9i7TSE9Q==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.3.11.tgz",
"integrity": "sha512-AoF0f/mUP1d2r5nirLQiajHBVnhsYCD/vDGUlTmLWH4lX4v9zVqlh9HmXjpLBcaK4klGmt5CBmcb+tj5v2/ySA==",
"cpu": [
"x64"
],
@@ -365,9 +365,9 @@
]
},
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.3.9.tgz",
"integrity": "sha512-7EXI7P1QvAfgJNPWWBMDOkoJ696gSBAClcyEJNYg0JV21jVFZRwJVI3bZXflesWduFi/mTuzPkFFA68us1u19A==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.3.11.tgz",
"integrity": "sha512-Zq+JHtkaGaoozHcOdXid3jRkEj6u2d1C0VD+Wg+7AIpRokzYt5zcKWPzjDnqoRuD+VTv6YFjYN58RmYwa2Ktiw==",
"cpu": [
"x64"
],
@@ -4869,33 +4869,33 @@
}
},
"@lancedb/vectordb-darwin-arm64": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.3.9.tgz",
"integrity": "sha512-irtAdfSRQDcfnMnB8T7D0atLFfu1MMZZ1JaxMKu24DDZ8e4IMYKUplxwvWni3241yA9yDE/pliRZCNQbQCEfrg==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.3.11.tgz",
"integrity": "sha512-N0Ak0jWmSh+QIUJKgtD85+/N0UMBZxaHrd9leusWgjEdtZdQqyzd6VWYAFPR6W6p8tt1hUZiuTRQ6ugfNhyEsg==",
"optional": true
},
"@lancedb/vectordb-darwin-x64": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.3.9.tgz",
"integrity": "sha512-4xXQoPheyIl1P5kRoKmZtaAHFrYdL9pw5yq+r6ewIx0TCemN4LSvzSUTqM5nZl3QPU8FeL0CGD8Gt2gMU0HQ2A==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.3.11.tgz",
"integrity": "sha512-vugA+Z4XDrV1gFW5PfqJImw0w84NpGrZsaTZ9afw2oc5a37alx5zOoHEoBQimaX88j+YjWme38h3B98qoNTP5w==",
"optional": true
},
"@lancedb/vectordb-linux-arm64-gnu": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.3.9.tgz",
"integrity": "sha512-WIxCZKnLeSlz0PGURtKSX6hJ4CYE2o5P+IFmmuWOWB1uNapQu6zOpea6rNxcRFHUA0IJdO02lVxVfn2hDX4SMg==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.3.11.tgz",
"integrity": "sha512-mArXy17URht7cTdGgNc+yL6BOxvK4vAtNaPh68WBOy7e438l6++s2E4bZyaeyeoIv8sPENDmJZzBr4YuBEc7yw==",
"optional": true
},
"@lancedb/vectordb-linux-x64-gnu": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.3.9.tgz",
"integrity": "sha512-bQbcV9adKzYbJLNzDjk9OYsMnT2IjmieLfb4IQ1hj5IUoWfbg80Bd0+gZUnrmrhG6fe56TIriFZYQR9i7TSE9Q==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.3.11.tgz",
"integrity": "sha512-AoF0f/mUP1d2r5nirLQiajHBVnhsYCD/vDGUlTmLWH4lX4v9zVqlh9HmXjpLBcaK4klGmt5CBmcb+tj5v2/ySA==",
"optional": true
},
"@lancedb/vectordb-win32-x64-msvc": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.3.9.tgz",
"integrity": "sha512-7EXI7P1QvAfgJNPWWBMDOkoJ696gSBAClcyEJNYg0JV21jVFZRwJVI3bZXflesWduFi/mTuzPkFFA68us1u19A==",
"version": "0.3.11",
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.3.11.tgz",
"integrity": "sha512-Zq+JHtkaGaoozHcOdXid3jRkEj6u2d1C0VD+Wg+7AIpRokzYt5zcKWPzjDnqoRuD+VTv6YFjYN58RmYwa2Ktiw==",
"optional": true
},
"@neon-rs/cli": {

View File

@@ -1,6 +1,6 @@
{
"name": "vectordb",
"version": "0.3.9",
"version": "0.3.11",
"description": " Serverless, low-latency vector database for AI applications",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -81,10 +81,10 @@
}
},
"optionalDependencies": {
"@lancedb/vectordb-darwin-arm64": "0.3.9",
"@lancedb/vectordb-darwin-x64": "0.3.9",
"@lancedb/vectordb-linux-arm64-gnu": "0.3.9",
"@lancedb/vectordb-linux-x64-gnu": "0.3.9",
"@lancedb/vectordb-win32-x64-msvc": "0.3.9"
"@lancedb/vectordb-darwin-arm64": "0.3.11",
"@lancedb/vectordb-darwin-x64": "0.3.11",
"@lancedb/vectordb-linux-arm64-gnu": "0.3.11",
"@lancedb/vectordb-linux-x64-gnu": "0.3.11",
"@lancedb/vectordb-win32-x64-msvc": "0.3.11"
}
}

View File

@@ -25,6 +25,7 @@ import { Vector, Table as ArrowTable } from 'apache-arrow'
import { HttpLancedbClient } from './client'
import { isEmbeddingFunction } from '../embedding/embedding_function'
import { createEmptyTable, fromRecordsToStreamBuffer, fromTableToStreamBuffer } from '../arrow'
import { toSQL } from '../util'
/**
* Remote connection.
@@ -248,7 +249,23 @@ export class RemoteTable<T = number[]> implements Table<T> {
}
async update (args: UpdateArgs | UpdateSqlArgs): Promise<void> {
throw new Error('Not implemented')
let filter: string | null
let updates: Record<string, string>
if ('valuesSql' in args) {
filter = args.where ?? null
updates = args.valuesSql
} else {
filter = args.where ?? null
updates = {}
for (const [key, value] of Object.entries(args.values)) {
updates[key] = toSQL(value)
}
}
await this._client.post(`/v1/table/${this._name}/update/`, {
predicate: filter,
updates: Object.entries(updates).map(([key, value]) => [key, value])
})
}
async listIndices (): Promise<VectorIndex[]> {

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.3.4
current_version = 0.4.0
commit = True
message = [python] Bump version: {current_version} → {new_version}
tag = True

View File

@@ -348,3 +348,20 @@ def get_extras(field_info: pydantic.fields.FieldInfo, key: str) -> Any:
if PYDANTIC_VERSION.major >= 2:
return (field_info.json_schema_extra or {}).get(key)
return (field_info.field_info.extra or {}).get("json_schema_extra", {}).get(key)
if PYDANTIC_VERSION.major < 2:
def model_to_dict(model: pydantic.BaseModel) -> Dict[str, Any]:
"""
Convert a Pydantic model to a dictionary.
"""
return model.dict()
else:
def model_to_dict(model: pydantic.BaseModel) -> Dict[str, Any]:
"""
Convert a Pydantic model to a dictionary.
"""
return model.model_dump()

View File

@@ -18,6 +18,8 @@ import attrs
import pyarrow as pa
from pydantic import BaseModel
from lancedb.common import VECTOR_COLUMN_NAME
__all__ = ["LanceDBClient", "VectorQuery", "VectorQueryResult"]
@@ -43,6 +45,8 @@ class VectorQuery(BaseModel):
refine_factor: Optional[int] = None
vector_column: str = VECTOR_COLUMN_NAME
@attrs.define
class VectorQueryResult:

View File

@@ -13,7 +13,7 @@
import uuid
from functools import cached_property
from typing import Optional, Union
from typing import Dict, Optional, Union
import pyarrow as pa
from lance import json_to_schema
@@ -22,6 +22,7 @@ from lancedb.common import DATA, VEC, VECTOR_COLUMN_NAME
from ..query import LanceVectorQueryBuilder
from ..table import Query, Table, _sanitize_data
from ..util import value_to_sql
from .arrow import to_ipc_binary
from .client import ARROW_STREAM_CONTENT_TYPE
from .db import RemoteDBConnection
@@ -273,3 +274,65 @@ class RemoteTable(Table):
self._conn._loop.run_until_complete(
self._conn._client.post(f"/v1/table/{self._name}/delete/", data=payload)
)
def update(
self,
where: Optional[str] = None,
values: Optional[dict] = None,
*,
values_sql: Optional[Dict[str, str]] = None,
):
"""
This can be used to update zero to all rows depending on how many
rows match the where clause.
Parameters
----------
where: str, optional
The SQL where clause to use when updating rows. For example, 'x = 2'
or 'x IN (1, 2, 3)'. The filter must not be empty, or it will error.
values: dict, optional
The values to update. The keys are the column names and the values
are the values to set.
values_sql: dict, optional
The values to update, expressed as SQL expression strings. These can
reference existing columns. For example, {"x": "x + 1"} will increment
the x column by 1.
Examples
--------
>>> import lancedb
>>> data = [
... {"x": 1, "vector": [1, 2]},
... {"x": 2, "vector": [3, 4]},
... {"x": 3, "vector": [5, 6]}
... ]
>>> db = lancedb.connect("db://...", api_key="...", region="...") # doctest: +SKIP
>>> table = db.create_table("my_table", data) # doctest: +SKIP
>>> table.to_pandas() # doctest: +SKIP
x vector # doctest: +SKIP
0 1 [1.0, 2.0] # doctest: +SKIP
1 2 [3.0, 4.0] # doctest: +SKIP
2 3 [5.0, 6.0] # doctest: +SKIP
>>> table.update(where="x = 2", values={"vector": [10, 10]}) # doctest: +SKIP
>>> table.to_pandas() # doctest: +SKIP
x vector # doctest: +SKIP
0 1 [1.0, 2.0] # doctest: +SKIP
1 3 [5.0, 6.0] # doctest: +SKIP
2 2 [10.0, 10.0] # doctest: +SKIP
"""
if values is not None and values_sql is not None:
raise ValueError("Only one of values or values_sql can be provided")
if values is None and values_sql is None:
raise ValueError("Either values or values_sql must be provided")
if values is not None:
updates = [[k, value_to_sql(v)] for k, v in values.items()]
else:
updates = [[k, v] for k, v in values_sql.items()]
payload = {"predicate": where, "updates": updates}
self._conn._loop.run_until_complete(
self._conn._client.post(f"/v1/table/{self._name}/update/", data=payload)
)

View File

@@ -17,7 +17,7 @@ import inspect
import os
from abc import ABC, abstractmethod
from functools import cached_property
from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Union
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union
import lance
import numpy as np
@@ -28,9 +28,9 @@ from lance.vector import vec_to_table
from .common import DATA, VEC, VECTOR_COLUMN_NAME
from .embeddings import EmbeddingFunctionConfig, EmbeddingFunctionRegistry
from .pydantic import LanceModel
from .pydantic import LanceModel, model_to_dict
from .query import LanceQueryBuilder, Query
from .util import fs_from_uri, safe_import_pandas
from .util import fs_from_uri, safe_import_pandas, value_to_sql
from .utils.events import register_event
if TYPE_CHECKING:
@@ -53,8 +53,10 @@ def _sanitize_data(
# convert to list of dict if data is a bunch of LanceModels
if isinstance(data[0], LanceModel):
schema = data[0].__class__.to_arrow_schema()
data = [dict(d) for d in data]
data = pa.Table.from_pylist(data)
data = [model_to_dict(d) for d in data]
data = pa.Table.from_pylist(data, schema=schema)
else:
data = pa.Table.from_pylist(data)
elif isinstance(data, dict):
data = vec_to_table(data)
elif pd is not None and isinstance(data, pd.DataFrame):
@@ -913,30 +915,35 @@ class LanceTable(Table):
def delete(self, where: str):
self._dataset.delete(where)
def update(self, where: str, values: dict):
def update(
self,
where: Optional[str] = None,
values: Optional[dict] = None,
*,
values_sql: Optional[Dict[str, str]] = None,
):
"""
EXPERIMENTAL: Update rows in the table (not threadsafe).
This can be used to update zero to all rows depending on how many
rows match the where clause.
Parameters
----------
where: str
where: str, optional
The SQL where clause to use when updating rows. For example, 'x = 2'
or 'x IN (1, 2, 3)'. The filter must not be empty, or it will error.
values: dict
values: dict, optional
The values to update. The keys are the column names and the values
are the values to set.
values_sql: dict, optional
The values to update, expressed as SQL expression strings. These can
reference existing columns. For example, {"x": "x + 1"} will increment
the x column by 1.
Examples
--------
>>> import lancedb
>>> data = [
... {"x": 1, "vector": [1, 2]},
... {"x": 2, "vector": [3, 4]},
... {"x": 3, "vector": [5, 6]}
... ]
>>> import pandas as pd
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
>>> db = lancedb.connect("./.lancedb")
>>> table = db.create_table("my_table", data)
>>> table.to_pandas()
@@ -952,18 +959,15 @@ class LanceTable(Table):
2 2 [10.0, 10.0]
"""
orig_data = self._dataset.to_table(filter=where).combine_chunks()
if len(orig_data) == 0:
return
for col, val in values.items():
i = orig_data.column_names.index(col)
if i < 0:
raise ValueError(f"Column {col} does not exist")
orig_data = orig_data.set_column(
i, col, pa.array([val] * len(orig_data), type=orig_data[col].type)
)
self.delete(where)
self.add(orig_data, mode="append")
if values is not None and values_sql is not None:
raise ValueError("Only one of values or values_sql can be provided")
if values is None and values_sql is None:
raise ValueError("Either values or values_sql must be provided")
if values is not None:
values_sql = {k: value_to_sql(v) for k, v in values.items()}
self.to_lance().update(values_sql, where)
self._reset_dataset()
register_event("update")

View File

@@ -12,9 +12,12 @@
# limitations under the License.
import os
from datetime import date, datetime
from functools import singledispatch
from typing import Tuple
from urllib.parse import urlparse
import numpy as np
import pyarrow.fs as pa_fs
@@ -88,3 +91,53 @@ def safe_import_pandas():
return pd
except ImportError:
return None
@singledispatch
def value_to_sql(value):
raise NotImplementedError("SQL conversion is not implemented for this type")
@value_to_sql.register(str)
def _(value: str):
return f"'{value}'"
@value_to_sql.register(int)
def _(value: int):
return str(value)
@value_to_sql.register(float)
def _(value: float):
return str(value)
@value_to_sql.register(bool)
def _(value: bool):
return str(value).upper()
@value_to_sql.register(type(None))
def _(value: type(None)):
return "NULL"
@value_to_sql.register(datetime)
def _(value: datetime):
return f"'{value.isoformat()}'"
@value_to_sql.register(date)
def _(value: date):
return f"'{value.isoformat()}'"
@value_to_sql.register(list)
def _(value: list):
return "[" + ", ".join(map(value_to_sql, value)) + "]"
@value_to_sql.register(np.ndarray)
def _(value: np.ndarray):
return value_to_sql(value.tolist())

View File

@@ -1,12 +1,12 @@
[project]
name = "lancedb"
version = "0.3.4"
version = "0.4.0"
dependencies = [
"deprecation",
"pylance==0.8.17",
"pylance==0.9.0",
"ratelimiter~=1.0",
"retry>=0.9.2",
"tqdm>=4.1.0",
"tqdm>=4.27.0",
"aiohttp",
"pydantic>=1.10",
"attrs>=21.3.0",

View File

@@ -12,7 +12,7 @@
# limitations under the License.
import functools
from datetime import timedelta
from datetime import date, datetime, timedelta
from pathlib import Path
from typing import List
from unittest.mock import PropertyMock, patch
@@ -21,6 +21,7 @@ import lance
import numpy as np
import pandas as pd
import pyarrow as pa
from pydantic import BaseModel
import pytest
from lancedb.conftest import MockTextEmbeddingFunction
@@ -141,14 +142,32 @@ def test_add(db):
def test_add_pydantic_model(db):
class TestModel(LanceModel):
vector: Vector(16)
li: List[int]
# https://github.com/lancedb/lancedb/issues/562
data = TestModel(vector=list(range(16)), li=[1, 2, 3])
table = LanceTable.create(db, "test", data=[data])
assert len(table) == 1
assert table.schema == TestModel.to_arrow_schema()
class Document(BaseModel):
content: str
source: str
class LanceSchema(LanceModel):
id: str
vector: Vector(2)
li: List[int]
payload: Document
tbl = LanceTable.create(db, "mytable", schema=LanceSchema, mode="overwrite")
assert tbl.schema == LanceSchema.to_arrow_schema()
# add works
expected = LanceSchema(
id="id",
vector=[0.0, 0.0],
li=[1, 2, 3],
payload=Document(content="foo", source="bar"),
)
tbl.add([expected])
result = tbl.search([0.0, 0.0]).limit(1).to_pydantic(LanceSchema)[0]
assert result == expected
def _add(table, schema):
@@ -348,14 +367,79 @@ def test_update(db):
assert len(table) == 2
assert len(table.list_versions()) == 2
table.update(where="id=0", values={"vector": [1.1, 1.1]})
assert len(table.list_versions()) == 4
assert table.version == 4
assert len(table.list_versions()) == 3
assert table.version == 3
assert len(table) == 2
v = table.to_arrow()["vector"].combine_chunks()
v = v.values.to_numpy().reshape(2, 2)
assert np.allclose(v, np.array([[1.2, 1.9], [1.1, 1.1]]))
def test_update_types(db):
table = LanceTable.create(
db,
"my_table",
data=[
{
"id": 0,
"str": "foo",
"float": 1.1,
"timestamp": datetime(2021, 1, 1),
"date": date(2021, 1, 1),
"vector1": [1.0, 0.0],
"vector2": [1.0, 1.0],
}
],
)
# Update with SQL
table.update(
values_sql=dict(
id="1",
str="'bar'",
float="2.2",
timestamp="TIMESTAMP '2021-01-02 00:00:00'",
date="DATE '2021-01-02'",
vector1="[2.0, 2.0]",
vector2="[3.0, 3.0]",
)
)
actual = table.to_arrow().to_pylist()[0]
expected = dict(
id=1,
str="bar",
float=2.2,
timestamp=datetime(2021, 1, 2),
date=date(2021, 1, 2),
vector1=[2.0, 2.0],
vector2=[3.0, 3.0],
)
assert actual == expected
# Update with values
table.update(
values=dict(
id=2,
str="baz",
float=3.3,
timestamp=datetime(2021, 1, 3),
date=date(2021, 1, 3),
vector1=[3.0, 3.0],
vector2=np.array([4.0, 4.0]),
)
)
actual = table.to_arrow().to_pylist()[0]
expected = dict(
id=2,
str="baz",
float=3.3,
timestamp=datetime(2021, 1, 3),
date=date(2021, 1, 3),
vector1=[3.0, 3.0],
vector2=[4.0, 4.0],
)
assert actual == expected
def test_create_with_embedding_function(db):
class MyTable(LanceModel):
text: str

View File

@@ -1,6 +1,6 @@
[package]
name = "vectordb-node"
version = "0.3.9"
version = "0.3.11"
description = "Serverless, low-latency vector database for AI applications"
license = "Apache-2.0"
edition = "2018"

View File

@@ -1,6 +1,6 @@
[package]
name = "vectordb"
version = "0.3.9"
version = "0.3.11"
edition = "2021"
description = "LanceDB: A serverless, low-latency vector database for AI applications"
license = "Apache-2.0"

View File

@@ -359,7 +359,9 @@ mod test {
assert_eq!(t.count_rows().await.unwrap(), 100);
let q = t
.search(Some(PrimitiveArray::from_iter_values(vec![0.1, 0.1, 0.1, 0.1])))
.search(Some(PrimitiveArray::from_iter_values(vec![
0.1, 0.1, 0.1, 0.1,
])))
.limit(10)
.execute()
.await

View File

@@ -25,6 +25,7 @@ use crate::error::Result;
pub struct Query {
pub dataset: Arc<Dataset>,
pub query_vector: Option<Float32Array>,
pub column: String,
pub limit: Option<usize>,
pub filter: Option<String>,
pub select: Option<Vec<String>>,
@@ -50,6 +51,7 @@ impl Query {
Query {
dataset,
query_vector: vector,
column: crate::table::VECTOR_COLUMN_NAME.to_string(),
limit: None,
nprobes: 20,
refine_factor: None,
@@ -71,7 +73,7 @@ impl Query {
if let Some(query) = self.query_vector.as_ref() {
// If there is a vector query, default to limit=10 if unspecified
scanner.nearest(crate::table::VECTOR_COLUMN_NAME, query, self.limit.unwrap_or(10))?;
scanner.nearest(&self.column, query, self.limit.unwrap_or(10))?;
} else {
// If there is no vector query, it's ok to not have a limit
scanner.limit(self.limit.map(|limit| limit as i64), None)?;
@@ -87,6 +89,16 @@ impl Query {
Ok(scanner.try_into_stream().await?)
}
/// Set the column to query
///
/// # Arguments
///
/// * `column` - The column name
pub fn column(mut self, column: &str) -> Query {
self.column = column.into();
self
}
/// Set the maximum number of results to return.
///
/// # Arguments
@@ -176,7 +188,10 @@ mod tests {
use std::sync::Arc;
use super::*;
use arrow_array::{Float32Array, RecordBatch, RecordBatchIterator, RecordBatchReader, cast::AsArray, Int32Array};
use arrow_array::{
cast::AsArray, Float32Array, Int32Array, RecordBatch, RecordBatchIterator,
RecordBatchReader,
};
use arrow_schema::{DataType, Field as ArrowField, Schema as ArrowSchema};
use futures::StreamExt;
use lance::dataset::Dataset;
@@ -260,7 +275,7 @@ mod tests {
let mut stream = result.expect("should have result");
// should only have one batch
while let Some(batch) = stream.next().await {
let b = batch.expect("should be Ok");
let b = batch.expect("should be Ok");
// cast arr into Int32Array
let arr: &Int32Array = b["id"].as_primitive();
assert!(arr.iter().all(|x| x.unwrap() % 2 == 0));