mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 05:19:58 +00:00
Compare commits
23 Commits
python-v0.
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3c6213333 | ||
|
|
00552439d9 | ||
|
|
c0ee370f83 | ||
|
|
17e4022045 | ||
|
|
c3ebac1a92 | ||
|
|
10f919a0a9 | ||
|
|
8af5476395 | ||
|
|
bcbbeb7a00 | ||
|
|
d6c0f75078 | ||
|
|
e820e356a0 | ||
|
|
509286492f | ||
|
|
f9789ec962 | ||
|
|
347515aa51 | ||
|
|
3324e7d525 | ||
|
|
ab5316b4fa | ||
|
|
db125013fc | ||
|
|
a43193c99b | ||
|
|
b70513ca72 | ||
|
|
78165801c6 | ||
|
|
6e5927ce6d | ||
|
|
6c1f32ac11 | ||
|
|
4fdf084777 | ||
|
|
1fad24fcd8 |
@@ -1,5 +1,5 @@
|
||||
[tool.bumpversion]
|
||||
current_version = "0.14.0"
|
||||
current_version = "0.14.1-beta.3"
|
||||
parse = """(?x)
|
||||
(?P<major>0|[1-9]\\d*)\\.
|
||||
(?P<minor>0|[1-9]\\d*)\\.
|
||||
|
||||
18
Cargo.toml
18
Cargo.toml
@@ -21,16 +21,16 @@ categories = ["database-implementations"]
|
||||
rust-version = "1.80.0" # TODO: lower this once we upgrade Lance again.
|
||||
|
||||
[workspace.dependencies]
|
||||
lance = { "version" = "=0.20.0", "features" = [
|
||||
lance = { "version" = "=0.21.0", "features" = [
|
||||
"dynamodb",
|
||||
] }
|
||||
lance-io = "0.20.0"
|
||||
lance-index = "0.20.0"
|
||||
lance-linalg = "0.20.0"
|
||||
lance-table = "0.20.0"
|
||||
lance-testing = "0.20.0"
|
||||
lance-datafusion = "0.20.0"
|
||||
lance-encoding = "0.20.0"
|
||||
], git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-io = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-index = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-linalg = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-table = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-testing = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-datafusion = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
lance-encoding = { version = "=0.21.0", git = "https://github.com/lancedb/lance.git", tag = "v0.21.0-beta.3" }
|
||||
# Note that this one does not include pyarrow
|
||||
arrow = { version = "53.2", optional = false }
|
||||
arrow-array = "53.2"
|
||||
|
||||
@@ -231,6 +231,7 @@ nav:
|
||||
- 🐍 Python: python/saas-python.md
|
||||
- 👾 JavaScript: javascript/modules.md
|
||||
- REST API: cloud/rest.md
|
||||
- FAQs: cloud/cloud_faq.md
|
||||
|
||||
- Quick start: basic.md
|
||||
- Concepts:
|
||||
@@ -357,6 +358,7 @@ nav:
|
||||
- 🐍 Python: python/saas-python.md
|
||||
- 👾 JavaScript: javascript/modules.md
|
||||
- REST API: cloud/rest.md
|
||||
- FAQs: cloud/cloud_faq.md
|
||||
|
||||
extra_css:
|
||||
- styles/global.css
|
||||
|
||||
@@ -83,6 +83,7 @@ The following IVF_PQ paramters can be specified:
|
||||
- **num_sub_vectors**: The number of sub-vectors (M) that will be created during Product Quantization (PQ).
|
||||
For D dimensional vector, it will be divided into `M` subvectors with dimension `D/M`, each of which is replaced by
|
||||
a single PQ code. The default is the dimension of the vector divided by 16.
|
||||
- **num_bits**: The number of bits used to encode each sub-vector. Only 4 and 8 are supported. The higher the number of bits, the higher the accuracy of the index, also the slower search. The default is 8.
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -142,11 +143,11 @@ There are a couple of parameters that can be used to fine-tune the search:
|
||||
- **nprobes** (default: 20): The number of probes used. A higher number makes search more accurate but also slower.<br/>
|
||||
Most of the time, setting nprobes to cover 5-15% of the dataset should achieve high recall with low latency.<br/>
|
||||
- _For example_, For a dataset of 1 million vectors divided into 256 partitions, `nprobes` should be set to ~20-40. This value can be adjusted to achieve the optimal balance between search latency and search quality. <br/>
|
||||
|
||||
|
||||
- **refine_factor** (default: None): Refine the results by reading extra elements and re-ranking them in memory.<br/>
|
||||
A higher number makes search more accurate but also slower. If you find the recall is less than ideal, try refine_factor=10 to start.<br/>
|
||||
- _For example_, For a dataset of 1 million vectors divided into 256 partitions, setting the `refine_factor` to 200 will initially retrieve the top 4,000 candidates (top k * refine_factor) from all searched partitions. These candidates are then reranked to determine the final top 20 results.<br/>
|
||||
!!! note
|
||||
!!! note
|
||||
Both `nprobes` and `refine_factor` are only applicable if an ANN index is present. If specified on a table without an ANN index, those parameters are ignored.
|
||||
|
||||
|
||||
@@ -288,4 +289,4 @@ less space distortion, and thus yields better accuracy. However, a higher `num_s
|
||||
|
||||
`m` determines the number of connections a new node establishes with its closest neighbors upon entering the graph. Typically, `m` falls within the range of 5 to 48. Lower `m` values are suitable for low-dimensional data or scenarios where recall is less critical. Conversely, higher `m` values are beneficial for high-dimensional data or when high recall is required. In essence, a larger `m` results in a denser graph with increased connectivity, but at the expense of higher memory consumption.
|
||||
|
||||
`ef_construction` balances build speed and accuracy. Higher values increase accuracy but slow down the build process. A typical range is 150 to 300. For good search results, a minimum value of 100 is recommended. In most cases, setting this value above 500 offers no additional benefit. Ensure that `ef_construction` is always set to a value equal to or greater than `ef` in the search phase
|
||||
`ef_construction` balances build speed and accuracy. Higher values increase accuracy but slow down the build process. A typical range is 150 to 300. For good search results, a minimum value of 100 is recommended. In most cases, setting this value above 500 offers no additional benefit. Ensure that `ef_construction` is always set to a value equal to or greater than `ef` in the search phase
|
||||
|
||||
34
docs/src/cloud/cloud_faq.md
Normal file
34
docs/src/cloud/cloud_faq.md
Normal file
@@ -0,0 +1,34 @@
|
||||
This section provides answers to the most common questions asked about LanceDB Cloud. By following these guidelines, you can ensure a smooth, performant experience with LanceDB Cloud.
|
||||
|
||||
### Should I reuse the database connection?
|
||||
Yes! It is recommended to establish a single database connection and maintain it throughout your interaction with the tables within.
|
||||
|
||||
LanceDB uses HTTP connections to communicate with the servers. By re-using the Connection object, you avoid the overhead of repeatedly establishing HTTP connections, significantly improving efficiency.
|
||||
|
||||
### Should I re-use the `Table` object?
|
||||
`table = db.open_table()` should be called once and used for all subsequent table operations. If there are changes to the opened table, `table` always reflect the **latest version** of the data.
|
||||
|
||||
### What should I do if I need to search for rows by `id`?
|
||||
LanceDB Cloud currently does not support an ID or primary key column. You are recommended to add a
|
||||
user-defined ID column. To significantly improve the query performance with SQL causes, a scalar BITMAP/BTREE index should be created on this column.
|
||||
|
||||
### What are the vector indexing types supported by LanceDB Cloud?
|
||||
We support `IVF_PQ` and `IVF_HNSW_SQ` as the `index_type` which is passed to `create_index`. LanceDB Cloud tunes the indexing parameters automatically to achieve the best tradeoff between query latency and query quality.
|
||||
|
||||
### When I add new rows to a table, do I need to manually update the index?
|
||||
No! LanceDB Cloud triggers an asynchronous background job to index the new vectors.
|
||||
|
||||
Even though indexing is asynchronous, your vectors will still be immediately searchable. LanceDB uses brute-force search to search over unindexed rows. This makes you new data is immediately available, but does increase latency temporarily. To disable the brute-force part of search, set the `fast_search` flag in your query to `true`.
|
||||
|
||||
### Do I need to reindex the whole dataset if only a small portion of the data is deleted or updated?
|
||||
No! Similar to adding data to the table, LanceDB Cloud triggers an asynchronous background job to update the existing indices. Therefore, no action is needed from users and there is absolutely no
|
||||
downtime expected.
|
||||
|
||||
### How do I know whether an index has been created?
|
||||
While index creation in LanceDB Cloud is generally fast, querying immediately after a `create_index` call may result in errors. It's recommended to use `list_indices` to verify index creation before querying.
|
||||
|
||||
### Why is my query latency higher than expected?
|
||||
Multiple factors can impact query latency. To reduce query latency, consider the following:
|
||||
- Send pre-warm queries: send a few queries to warm up the cache before an actual user query.
|
||||
- Check network latency: LanceDB Cloud is hosted in AWS `us-east-1` region. It is recommended to run queries from an EC2 instance that is in the same region.
|
||||
- Create scalar indices: If you are filtering on metadata, it is recommended to create scalar indices on those columns. This will speedup searches with metadata filtering. See [here](../guides/scalar_index.md) for more details on creating a scalar index.
|
||||
@@ -804,12 +804,13 @@ a table:
|
||||
|
||||
You can add new columns to the table with the `add_columns` method. New columns
|
||||
are filled with values based on a SQL expression. For example, you can add a new
|
||||
column `y` to the table and fill it with the value of `x + 1`.
|
||||
column `y` to the table, fill it with the value of `x * 2` and set the expected
|
||||
data type for it.
|
||||
|
||||
=== "Python"
|
||||
|
||||
```python
|
||||
table.add_columns({"double_price": "price * 2"})
|
||||
--8<-- "python/python/tests/docs/test_basic.py:add_columns"
|
||||
```
|
||||
**API Reference:** [lancedb.table.Table.add_columns][]
|
||||
|
||||
@@ -849,8 +850,7 @@ rewriting the column, which can be a heavy operation.
|
||||
|
||||
```python
|
||||
import pyarrow as pa
|
||||
table.alter_column({"path": "double_price", "rename": "dbl_price",
|
||||
"data_type": pa.float32(), "nullable": False})
|
||||
--8<-- "python/python/tests/docs/test_basic.py:alter_columns"
|
||||
```
|
||||
**API Reference:** [lancedb.table.Table.alter_columns][]
|
||||
|
||||
@@ -873,7 +873,7 @@ will remove the column from the schema.
|
||||
=== "Python"
|
||||
|
||||
```python
|
||||
table.drop_columns(["dbl_price"])
|
||||
--8<-- "python/python/tests/docs/test_basic.py:drop_columns"
|
||||
```
|
||||
**API Reference:** [lancedb.table.Table.drop_columns][]
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.
|
||||
@@ -27,7 +27,9 @@ the underlying connection has been closed.
|
||||
|
||||
### new Connection()
|
||||
|
||||
> **new Connection**(): [`Connection`](Connection.md)
|
||||
```ts
|
||||
new Connection(): Connection
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -37,7 +39,9 @@ the underlying connection has been closed.
|
||||
|
||||
### close()
|
||||
|
||||
> `abstract` **close**(): `void`
|
||||
```ts
|
||||
abstract close(): void
|
||||
```
|
||||
|
||||
Close the connection, releasing any underlying resources.
|
||||
|
||||
@@ -53,21 +57,24 @@ Any attempt to use the connection after it is closed will result in an error.
|
||||
|
||||
### createEmptyTable()
|
||||
|
||||
> `abstract` **createEmptyTable**(`name`, `schema`, `options`?): `Promise`<[`Table`](Table.md)>
|
||||
```ts
|
||||
abstract createEmptyTable(
|
||||
name,
|
||||
schema,
|
||||
options?): Promise<Table>
|
||||
```
|
||||
|
||||
Creates a new empty Table
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **name**: `string`
|
||||
* **name**: `string`
|
||||
The name of the table.
|
||||
|
||||
The name of the table.
|
||||
* **schema**: `SchemaLike`
|
||||
The schema of the table
|
||||
|
||||
• **schema**: `SchemaLike`
|
||||
|
||||
The schema of the table
|
||||
|
||||
• **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -79,15 +86,16 @@ The schema of the table
|
||||
|
||||
#### createTable(options)
|
||||
|
||||
> `abstract` **createTable**(`options`): `Promise`<[`Table`](Table.md)>
|
||||
```ts
|
||||
abstract createTable(options): Promise<Table>
|
||||
```
|
||||
|
||||
Creates a new Table and initialize it with new data.
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **options**: `object` & `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
|
||||
The options object.
|
||||
* **options**: `object` & `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
The options object.
|
||||
|
||||
##### Returns
|
||||
|
||||
@@ -95,22 +103,25 @@ The options object.
|
||||
|
||||
#### createTable(name, data, options)
|
||||
|
||||
> `abstract` **createTable**(`name`, `data`, `options`?): `Promise`<[`Table`](Table.md)>
|
||||
```ts
|
||||
abstract createTable(
|
||||
name,
|
||||
data,
|
||||
options?): Promise<Table>
|
||||
```
|
||||
|
||||
Creates a new Table and initialize it with new data.
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **name**: `string`
|
||||
* **name**: `string`
|
||||
The name of the table.
|
||||
|
||||
The name of the table.
|
||||
* **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
||||
Non-empty Array of Records
|
||||
to be inserted into the table
|
||||
|
||||
• **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
||||
|
||||
Non-empty Array of Records
|
||||
to be inserted into the table
|
||||
|
||||
• **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
|
||||
##### Returns
|
||||
|
||||
@@ -120,7 +131,9 @@ to be inserted into the table
|
||||
|
||||
### display()
|
||||
|
||||
> `abstract` **display**(): `string`
|
||||
```ts
|
||||
abstract display(): string
|
||||
```
|
||||
|
||||
Return a brief description of the connection
|
||||
|
||||
@@ -132,15 +145,16 @@ Return a brief description of the connection
|
||||
|
||||
### dropTable()
|
||||
|
||||
> `abstract` **dropTable**(`name`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract dropTable(name): Promise<void>
|
||||
```
|
||||
|
||||
Drop an existing table.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
The name of the table to drop.
|
||||
* **name**: `string`
|
||||
The name of the table to drop.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -150,7 +164,9 @@ The name of the table to drop.
|
||||
|
||||
### isOpen()
|
||||
|
||||
> `abstract` **isOpen**(): `boolean`
|
||||
```ts
|
||||
abstract isOpen(): boolean
|
||||
```
|
||||
|
||||
Return true if the connection has not been closed
|
||||
|
||||
@@ -162,17 +178,18 @@ Return true if the connection has not been closed
|
||||
|
||||
### openTable()
|
||||
|
||||
> `abstract` **openTable**(`name`, `options`?): `Promise`<[`Table`](Table.md)>
|
||||
```ts
|
||||
abstract openTable(name, options?): Promise<Table>
|
||||
```
|
||||
|
||||
Open a table in the database.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **name**: `string`
|
||||
* **name**: `string`
|
||||
The name of the table
|
||||
|
||||
The name of the table
|
||||
|
||||
• **options?**: `Partial`<`OpenTableOptions`>
|
||||
* **options?**: `Partial`<`OpenTableOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -182,7 +199,9 @@ The name of the table
|
||||
|
||||
### tableNames()
|
||||
|
||||
> `abstract` **tableNames**(`options`?): `Promise`<`string`[]>
|
||||
```ts
|
||||
abstract tableNames(options?): Promise<string[]>
|
||||
```
|
||||
|
||||
List all the table names in this database.
|
||||
|
||||
@@ -190,10 +209,9 @@ Tables will be returned in lexicographical order.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<[`TableNamesOptions`](../interfaces/TableNamesOptions.md)>
|
||||
|
||||
options to control the
|
||||
paging / start point
|
||||
* **options?**: `Partial`<[`TableNamesOptions`](../interfaces/TableNamesOptions.md)>
|
||||
options to control the
|
||||
paging / start point
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -8,9 +8,30 @@
|
||||
|
||||
## Methods
|
||||
|
||||
### bitmap()
|
||||
|
||||
```ts
|
||||
static bitmap(): Index
|
||||
```
|
||||
|
||||
Create a bitmap index.
|
||||
|
||||
A `Bitmap` index stores a bitmap for each distinct value in the column for every row.
|
||||
|
||||
This index works best for low-cardinality columns, where the number of unique values
|
||||
is small (i.e., less than a few hundreds).
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Index`](Index.md)
|
||||
|
||||
***
|
||||
|
||||
### btree()
|
||||
|
||||
> `static` **btree**(): [`Index`](Index.md)
|
||||
```ts
|
||||
static btree(): Index
|
||||
```
|
||||
|
||||
Create a btree index
|
||||
|
||||
@@ -36,9 +57,82 @@ block size may be added in the future.
|
||||
|
||||
***
|
||||
|
||||
### fts()
|
||||
|
||||
```ts
|
||||
static fts(options?): Index
|
||||
```
|
||||
|
||||
Create a full text search index
|
||||
|
||||
A full text search index is an index on a string column, so that you can conduct full
|
||||
text searches on the column.
|
||||
|
||||
The results of a full text search are ordered by relevance measured by BM25.
|
||||
|
||||
You can combine filters with full text search.
|
||||
|
||||
For now, the full text search index only supports English, and doesn't support phrase search.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`FtsOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Index`](Index.md)
|
||||
|
||||
***
|
||||
|
||||
### hnswPq()
|
||||
|
||||
```ts
|
||||
static hnswPq(options?): Index
|
||||
```
|
||||
|
||||
Create a hnswPq index
|
||||
|
||||
HNSW-PQ stands for Hierarchical Navigable Small World - Product Quantization.
|
||||
It is a variant of the HNSW algorithm that uses product quantization to compress
|
||||
the vectors.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`HnswPqOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Index`](Index.md)
|
||||
|
||||
***
|
||||
|
||||
### hnswSq()
|
||||
|
||||
```ts
|
||||
static hnswSq(options?): Index
|
||||
```
|
||||
|
||||
Create a hnswSq index
|
||||
|
||||
HNSW-SQ stands for Hierarchical Navigable Small World - Scalar Quantization.
|
||||
It is a variant of the HNSW algorithm that uses scalar quantization to compress
|
||||
the vectors.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`HnswSqOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Index`](Index.md)
|
||||
|
||||
***
|
||||
|
||||
### ivfPq()
|
||||
|
||||
> `static` **ivfPq**(`options`?): [`Index`](Index.md)
|
||||
```ts
|
||||
static ivfPq(options?): Index
|
||||
```
|
||||
|
||||
Create an IvfPq index
|
||||
|
||||
@@ -63,29 +157,25 @@ currently is also a memory intensive operation.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<[`IvfPqOptions`](../interfaces/IvfPqOptions.md)>
|
||||
* **options?**: `Partial`<[`IvfPqOptions`](../interfaces/IvfPqOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Index`](Index.md)
|
||||
|
||||
### fts()
|
||||
***
|
||||
|
||||
> `static` **fts**(`options`?): [`Index`](Index.md)
|
||||
### labelList()
|
||||
|
||||
Create a full text search index
|
||||
```ts
|
||||
static labelList(): Index
|
||||
```
|
||||
|
||||
This index is used to search for text data. The index is created by tokenizing the text
|
||||
into words and then storing occurrences of these words in a data structure called inverted index
|
||||
that allows for fast search.
|
||||
Create a label list index.
|
||||
|
||||
During a search the query is tokenized and the inverted index is used to find the rows that
|
||||
contain the query words. The rows are then scored based on BM25 and the top scoring rows are
|
||||
sorted and returned.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<[`FtsOptions`](../interfaces/FtsOptions.md)>
|
||||
LabelList index is a scalar index that can be used on `List<T>` columns to
|
||||
support queries with `array_contains_all` and `array_contains_any`
|
||||
using an underlying bitmap index.
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -12,11 +12,13 @@ Options to control the makeArrowTable call.
|
||||
|
||||
### new MakeArrowTableOptions()
|
||||
|
||||
> **new MakeArrowTableOptions**(`values`?): [`MakeArrowTableOptions`](MakeArrowTableOptions.md)
|
||||
```ts
|
||||
new MakeArrowTableOptions(values?): MakeArrowTableOptions
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **values?**: `Partial`<[`MakeArrowTableOptions`](MakeArrowTableOptions.md)>
|
||||
* **values?**: `Partial`<[`MakeArrowTableOptions`](MakeArrowTableOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -26,7 +28,9 @@ Options to control the makeArrowTable call.
|
||||
|
||||
### dictionaryEncodeStrings
|
||||
|
||||
> **dictionaryEncodeStrings**: `boolean` = `false`
|
||||
```ts
|
||||
dictionaryEncodeStrings: boolean = false;
|
||||
```
|
||||
|
||||
If true then string columns will be encoded with dictionary encoding
|
||||
|
||||
@@ -40,22 +44,30 @@ If `schema` is provided then this property is ignored.
|
||||
|
||||
### embeddingFunction?
|
||||
|
||||
> `optional` **embeddingFunction**: [`EmbeddingFunctionConfig`](../namespaces/embedding/interfaces/EmbeddingFunctionConfig.md)
|
||||
```ts
|
||||
optional embeddingFunction: EmbeddingFunctionConfig;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### embeddings?
|
||||
|
||||
> `optional` **embeddings**: [`EmbeddingFunction`](../namespaces/embedding/classes/EmbeddingFunction.md)<`unknown`, `FunctionOptions`>
|
||||
```ts
|
||||
optional embeddings: EmbeddingFunction<unknown, FunctionOptions>;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### schema?
|
||||
|
||||
> `optional` **schema**: `SchemaLike`
|
||||
```ts
|
||||
optional schema: SchemaLike;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### vectorColumns
|
||||
|
||||
> **vectorColumns**: `Record`<`string`, [`VectorColumnOptions`](VectorColumnOptions.md)>
|
||||
```ts
|
||||
vectorColumns: Record<string, VectorColumnOptions>;
|
||||
```
|
||||
|
||||
@@ -16,11 +16,13 @@ A builder for LanceDB queries.
|
||||
|
||||
### new Query()
|
||||
|
||||
> **new Query**(`tbl`): [`Query`](Query.md)
|
||||
```ts
|
||||
new Query(tbl): Query
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **tbl**: `Table`
|
||||
* **tbl**: `Table`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -34,7 +36,9 @@ A builder for LanceDB queries.
|
||||
|
||||
### inner
|
||||
|
||||
> `protected` **inner**: `Query` \| `Promise`<`Query`>
|
||||
```ts
|
||||
protected inner: Query | Promise<Query>;
|
||||
```
|
||||
|
||||
#### Inherited from
|
||||
|
||||
@@ -44,7 +48,9 @@ A builder for LanceDB queries.
|
||||
|
||||
### \[asyncIterator\]()
|
||||
|
||||
> **\[asyncIterator\]**(): `AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
||||
```ts
|
||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -58,11 +64,13 @@ A builder for LanceDB queries.
|
||||
|
||||
### doCall()
|
||||
|
||||
> `protected` **doCall**(`fn`): `void`
|
||||
```ts
|
||||
protected doCall(fn): void
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **fn**
|
||||
* **fn**
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -76,13 +84,15 @@ A builder for LanceDB queries.
|
||||
|
||||
### execute()
|
||||
|
||||
> `protected` **execute**(`options`?): [`RecordBatchIterator`](RecordBatchIterator.md)
|
||||
```ts
|
||||
protected execute(options?): RecordBatchIterator
|
||||
```
|
||||
|
||||
Execute the query and return the results as an
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -108,15 +118,16 @@ single query)
|
||||
|
||||
### explainPlan()
|
||||
|
||||
> **explainPlan**(`verbose`): `Promise`<`string`>
|
||||
```ts
|
||||
explainPlan(verbose): Promise<string>
|
||||
```
|
||||
|
||||
Generates an explanation of the query execution plan.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **verbose**: `boolean` = `false`
|
||||
|
||||
If true, provides a more detailed explanation. Defaults to false.
|
||||
* **verbose**: `boolean` = `false`
|
||||
If true, provides a more detailed explanation. Defaults to false.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -141,15 +152,38 @@ const plan = await table.query().nearestTo([0.5, 0.2]).explainPlan();
|
||||
|
||||
***
|
||||
|
||||
### fastSearch()
|
||||
|
||||
```ts
|
||||
fastSearch(): this
|
||||
```
|
||||
|
||||
Skip searching un-indexed data. This can make search faster, but will miss
|
||||
any data that is not yet indexed.
|
||||
|
||||
Use lancedb.Table#optimize to index all un-indexed data.
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`fastSearch`](QueryBase.md#fastsearch)
|
||||
|
||||
***
|
||||
|
||||
### ~~filter()~~
|
||||
|
||||
> **filter**(`predicate`): `this`
|
||||
```ts
|
||||
filter(predicate): this
|
||||
```
|
||||
|
||||
A filter statement to be applied to this query.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -169,9 +203,33 @@ Use `where` instead
|
||||
|
||||
***
|
||||
|
||||
### fullTextSearch()
|
||||
|
||||
```ts
|
||||
fullTextSearch(query, options?): this
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`fullTextSearch`](QueryBase.md#fulltextsearch)
|
||||
|
||||
***
|
||||
|
||||
### limit()
|
||||
|
||||
> **limit**(`limit`): `this`
|
||||
```ts
|
||||
limit(limit): this
|
||||
```
|
||||
|
||||
Set the maximum number of results to return.
|
||||
|
||||
@@ -180,7 +238,7 @@ called then every valid row from the table will be returned.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **limit**: `number`
|
||||
* **limit**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -194,11 +252,13 @@ called then every valid row from the table will be returned.
|
||||
|
||||
### nativeExecute()
|
||||
|
||||
> `protected` **nativeExecute**(`options`?): `Promise`<`RecordBatchIterator`>
|
||||
```ts
|
||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -212,7 +272,9 @@ called then every valid row from the table will be returned.
|
||||
|
||||
### nearestTo()
|
||||
|
||||
> **nearestTo**(`vector`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
nearestTo(vector): VectorQuery
|
||||
```
|
||||
|
||||
Find the nearest vectors to the given query vector.
|
||||
|
||||
@@ -232,7 +294,7 @@ If there is more than one vector column you must use
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **vector**: `IntoVector`
|
||||
* **vector**: `IntoVector`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -264,9 +326,49 @@ a default `limit` of 10 will be used.
|
||||
|
||||
***
|
||||
|
||||
### nearestToText()
|
||||
|
||||
```ts
|
||||
nearestToText(query, columns?): Query
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **columns?**: `string`[]
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Query`](Query.md)
|
||||
|
||||
***
|
||||
|
||||
### offset()
|
||||
|
||||
```ts
|
||||
offset(offset): this
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **offset**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`offset`](QueryBase.md#offset)
|
||||
|
||||
***
|
||||
|
||||
### select()
|
||||
|
||||
> **select**(`columns`): `this`
|
||||
```ts
|
||||
select(columns): this
|
||||
```
|
||||
|
||||
Return only the specified columns.
|
||||
|
||||
@@ -290,7 +392,7 @@ input to this method would be:
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **columns**: `string` \| `string`[] \| `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
* **columns**: `string` \| `string`[] \| `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -317,13 +419,15 @@ object insertion order is easy to get wrong and `Map` is more foolproof.
|
||||
|
||||
### toArray()
|
||||
|
||||
> **toArray**(`options`?): `Promise`<`any`[]>
|
||||
```ts
|
||||
toArray(options?): Promise<any[]>
|
||||
```
|
||||
|
||||
Collect the results as an array of objects.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -337,13 +441,15 @@ Collect the results as an array of objects.
|
||||
|
||||
### toArrow()
|
||||
|
||||
> **toArrow**(`options`?): `Promise`<`Table`<`any`>>
|
||||
```ts
|
||||
toArrow(options?): Promise<Table<any>>
|
||||
```
|
||||
|
||||
Collect the results as an Arrow
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -361,7 +467,9 @@ ArrowTable.
|
||||
|
||||
### where()
|
||||
|
||||
> **where**(`predicate`): `this`
|
||||
```ts
|
||||
where(predicate): this
|
||||
```
|
||||
|
||||
A filter statement to be applied to this query.
|
||||
|
||||
@@ -369,7 +477,7 @@ The filter should be supplied as an SQL query string. For example:
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -389,3 +497,25 @@ on the filter column(s).
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`where`](QueryBase.md#where)
|
||||
|
||||
***
|
||||
|
||||
### withRowId()
|
||||
|
||||
```ts
|
||||
withRowId(): this
|
||||
```
|
||||
|
||||
Whether to return the row id in the results.
|
||||
|
||||
This column can be used to match results between different queries. For
|
||||
example, to match results from a full text search and a vector search in
|
||||
order to perform hybrid search.
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`withRowId`](QueryBase.md#withrowid)
|
||||
|
||||
@@ -25,11 +25,13 @@ Common methods supported by all query types
|
||||
|
||||
### new QueryBase()
|
||||
|
||||
> `protected` **new QueryBase**<`NativeQueryType`>(`inner`): [`QueryBase`](QueryBase.md)<`NativeQueryType`>
|
||||
```ts
|
||||
protected new QueryBase<NativeQueryType>(inner): QueryBase<NativeQueryType>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **inner**: `NativeQueryType` \| `Promise`<`NativeQueryType`>
|
||||
* **inner**: `NativeQueryType` \| `Promise`<`NativeQueryType`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -39,13 +41,17 @@ Common methods supported by all query types
|
||||
|
||||
### inner
|
||||
|
||||
> `protected` **inner**: `NativeQueryType` \| `Promise`<`NativeQueryType`>
|
||||
```ts
|
||||
protected inner: NativeQueryType | Promise<NativeQueryType>;
|
||||
```
|
||||
|
||||
## Methods
|
||||
|
||||
### \[asyncIterator\]()
|
||||
|
||||
> **\[asyncIterator\]**(): `AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
||||
```ts
|
||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -59,11 +65,13 @@ Common methods supported by all query types
|
||||
|
||||
### doCall()
|
||||
|
||||
> `protected` **doCall**(`fn`): `void`
|
||||
```ts
|
||||
protected doCall(fn): void
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **fn**
|
||||
* **fn**
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -73,13 +81,15 @@ Common methods supported by all query types
|
||||
|
||||
### execute()
|
||||
|
||||
> `protected` **execute**(`options`?): [`RecordBatchIterator`](RecordBatchIterator.md)
|
||||
```ts
|
||||
protected execute(options?): RecordBatchIterator
|
||||
```
|
||||
|
||||
Execute the query and return the results as an
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -101,15 +111,16 @@ single query)
|
||||
|
||||
### explainPlan()
|
||||
|
||||
> **explainPlan**(`verbose`): `Promise`<`string`>
|
||||
```ts
|
||||
explainPlan(verbose): Promise<string>
|
||||
```
|
||||
|
||||
Generates an explanation of the query execution plan.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **verbose**: `boolean` = `false`
|
||||
|
||||
If true, provides a more detailed explanation. Defaults to false.
|
||||
* **verbose**: `boolean` = `false`
|
||||
If true, provides a more detailed explanation. Defaults to false.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -130,15 +141,34 @@ const plan = await table.query().nearestTo([0.5, 0.2]).explainPlan();
|
||||
|
||||
***
|
||||
|
||||
### fastSearch()
|
||||
|
||||
```ts
|
||||
fastSearch(): this
|
||||
```
|
||||
|
||||
Skip searching un-indexed data. This can make search faster, but will miss
|
||||
any data that is not yet indexed.
|
||||
|
||||
Use lancedb.Table#optimize to index all un-indexed data.
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
***
|
||||
|
||||
### ~~filter()~~
|
||||
|
||||
> **filter**(`predicate`): `this`
|
||||
```ts
|
||||
filter(predicate): this
|
||||
```
|
||||
|
||||
A filter statement to be applied to this query.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -154,9 +184,29 @@ Use `where` instead
|
||||
|
||||
***
|
||||
|
||||
### fullTextSearch()
|
||||
|
||||
```ts
|
||||
fullTextSearch(query, options?): this
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
***
|
||||
|
||||
### limit()
|
||||
|
||||
> **limit**(`limit`): `this`
|
||||
```ts
|
||||
limit(limit): this
|
||||
```
|
||||
|
||||
Set the maximum number of results to return.
|
||||
|
||||
@@ -165,7 +215,7 @@ called then every valid row from the table will be returned.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **limit**: `number`
|
||||
* **limit**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -175,11 +225,13 @@ called then every valid row from the table will be returned.
|
||||
|
||||
### nativeExecute()
|
||||
|
||||
> `protected` **nativeExecute**(`options`?): `Promise`<`RecordBatchIterator`>
|
||||
```ts
|
||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -187,9 +239,27 @@ called then every valid row from the table will be returned.
|
||||
|
||||
***
|
||||
|
||||
### offset()
|
||||
|
||||
```ts
|
||||
offset(offset): this
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **offset**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
***
|
||||
|
||||
### select()
|
||||
|
||||
> **select**(`columns`): `this`
|
||||
```ts
|
||||
select(columns): this
|
||||
```
|
||||
|
||||
Return only the specified columns.
|
||||
|
||||
@@ -213,7 +283,7 @@ input to this method would be:
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **columns**: `string` \| `string`[] \| `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
* **columns**: `string` \| `string`[] \| `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -236,13 +306,15 @@ object insertion order is easy to get wrong and `Map` is more foolproof.
|
||||
|
||||
### toArray()
|
||||
|
||||
> **toArray**(`options`?): `Promise`<`any`[]>
|
||||
```ts
|
||||
toArray(options?): Promise<any[]>
|
||||
```
|
||||
|
||||
Collect the results as an array of objects.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -252,13 +324,15 @@ Collect the results as an array of objects.
|
||||
|
||||
### toArrow()
|
||||
|
||||
> **toArrow**(`options`?): `Promise`<`Table`<`any`>>
|
||||
```ts
|
||||
toArrow(options?): Promise<Table<any>>
|
||||
```
|
||||
|
||||
Collect the results as an Arrow
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -272,7 +346,9 @@ ArrowTable.
|
||||
|
||||
### where()
|
||||
|
||||
> **where**(`predicate`): `this`
|
||||
```ts
|
||||
where(predicate): this
|
||||
```
|
||||
|
||||
A filter statement to be applied to this query.
|
||||
|
||||
@@ -280,7 +356,7 @@ The filter should be supplied as an SQL query string. For example:
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -296,3 +372,21 @@ x > 5 OR y = 'test'
|
||||
Filtering performance can often be improved by creating a scalar index
|
||||
on the filter column(s).
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### withRowId()
|
||||
|
||||
```ts
|
||||
withRowId(): this
|
||||
```
|
||||
|
||||
Whether to return the row id in the results.
|
||||
|
||||
This column can be used to match results between different queries. For
|
||||
example, to match results from a full text search and a vector search in
|
||||
order to perform hybrid search.
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
@@ -14,11 +14,13 @@
|
||||
|
||||
### new RecordBatchIterator()
|
||||
|
||||
> **new RecordBatchIterator**(`promise`?): [`RecordBatchIterator`](RecordBatchIterator.md)
|
||||
```ts
|
||||
new RecordBatchIterator(promise?): RecordBatchIterator
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **promise?**: `Promise`<`RecordBatchIterator`>
|
||||
* **promise?**: `Promise`<`RecordBatchIterator`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -28,7 +30,9 @@
|
||||
|
||||
### next()
|
||||
|
||||
> **next**(): `Promise`<`IteratorResult`<`RecordBatch`<`any`>, `any`>>
|
||||
```ts
|
||||
next(): Promise<IteratorResult<RecordBatch<any>, any>>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -21,7 +21,9 @@ collected.
|
||||
|
||||
### new Table()
|
||||
|
||||
> **new Table**(): [`Table`](Table.md)
|
||||
```ts
|
||||
new Table(): Table
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -31,7 +33,9 @@ collected.
|
||||
|
||||
### name
|
||||
|
||||
> `get` `abstract` **name**(): `string`
|
||||
```ts
|
||||
get abstract name(): string
|
||||
```
|
||||
|
||||
Returns the name of the table
|
||||
|
||||
@@ -43,17 +47,18 @@ Returns the name of the table
|
||||
|
||||
### add()
|
||||
|
||||
> `abstract` **add**(`data`, `options`?): `Promise`<`void`>
|
||||
```ts
|
||||
abstract add(data, options?): Promise<void>
|
||||
```
|
||||
|
||||
Insert records into this Table.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **data**: [`Data`](../type-aliases/Data.md)
|
||||
* **data**: [`Data`](../type-aliases/Data.md)
|
||||
Records to be inserted into the Table
|
||||
|
||||
Records to be inserted into the Table
|
||||
|
||||
• **options?**: `Partial`<[`AddDataOptions`](../interfaces/AddDataOptions.md)>
|
||||
* **options?**: `Partial`<[`AddDataOptions`](../interfaces/AddDataOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -63,18 +68,19 @@ Records to be inserted into the Table
|
||||
|
||||
### addColumns()
|
||||
|
||||
> `abstract` **addColumns**(`newColumnTransforms`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract addColumns(newColumnTransforms): Promise<void>
|
||||
```
|
||||
|
||||
Add new columns with defined values.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **newColumnTransforms**: [`AddColumnsSql`](../interfaces/AddColumnsSql.md)[]
|
||||
|
||||
pairs of column names and
|
||||
the SQL expression to use to calculate the value of the new column. These
|
||||
expressions will be evaluated for each row in the table, and can
|
||||
reference existing columns in the table.
|
||||
* **newColumnTransforms**: [`AddColumnsSql`](../interfaces/AddColumnsSql.md)[]
|
||||
pairs of column names and
|
||||
the SQL expression to use to calculate the value of the new column. These
|
||||
expressions will be evaluated for each row in the table, and can
|
||||
reference existing columns in the table.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -84,16 +90,17 @@ reference existing columns in the table.
|
||||
|
||||
### alterColumns()
|
||||
|
||||
> `abstract` **alterColumns**(`columnAlterations`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract alterColumns(columnAlterations): Promise<void>
|
||||
```
|
||||
|
||||
Alter the name or nullability of columns.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **columnAlterations**: [`ColumnAlteration`](../interfaces/ColumnAlteration.md)[]
|
||||
|
||||
One or more alterations to
|
||||
apply to columns.
|
||||
* **columnAlterations**: [`ColumnAlteration`](../interfaces/ColumnAlteration.md)[]
|
||||
One or more alterations to
|
||||
apply to columns.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -103,7 +110,9 @@ apply to columns.
|
||||
|
||||
### checkout()
|
||||
|
||||
> `abstract` **checkout**(`version`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract checkout(version): Promise<void>
|
||||
```
|
||||
|
||||
Checks out a specific version of the table _This is an in-place operation._
|
||||
|
||||
@@ -116,9 +125,8 @@ wish to return to standard mode, call `checkoutLatest`.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **version**: `number`
|
||||
|
||||
The version to checkout
|
||||
* **version**: `number`
|
||||
The version to checkout
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -144,7 +152,9 @@ console.log(await table.version()); // 2
|
||||
|
||||
### checkoutLatest()
|
||||
|
||||
> `abstract` **checkoutLatest**(): `Promise`<`void`>
|
||||
```ts
|
||||
abstract checkoutLatest(): Promise<void>
|
||||
```
|
||||
|
||||
Checkout the latest version of the table. _This is an in-place operation._
|
||||
|
||||
@@ -159,7 +169,9 @@ version of the table.
|
||||
|
||||
### close()
|
||||
|
||||
> `abstract` **close**(): `void`
|
||||
```ts
|
||||
abstract close(): void
|
||||
```
|
||||
|
||||
Close the table, releasing any underlying resources.
|
||||
|
||||
@@ -175,13 +187,15 @@ Any attempt to use the table after it is closed will result in an error.
|
||||
|
||||
### countRows()
|
||||
|
||||
> `abstract` **countRows**(`filter`?): `Promise`<`number`>
|
||||
```ts
|
||||
abstract countRows(filter?): Promise<number>
|
||||
```
|
||||
|
||||
Count the total number of rows in the dataset.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **filter?**: `string`
|
||||
* **filter?**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -191,7 +205,9 @@ Count the total number of rows in the dataset.
|
||||
|
||||
### createIndex()
|
||||
|
||||
> `abstract` **createIndex**(`column`, `options`?): `Promise`<`void`>
|
||||
```ts
|
||||
abstract createIndex(column, options?): Promise<void>
|
||||
```
|
||||
|
||||
Create an index to speed up queries.
|
||||
|
||||
@@ -202,9 +218,9 @@ vector and non-vector searches)
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **column**: `string`
|
||||
* **column**: `string`
|
||||
|
||||
• **options?**: `Partial`<[`IndexOptions`](../interfaces/IndexOptions.md)>
|
||||
* **options?**: `Partial`<[`IndexOptions`](../interfaces/IndexOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -245,13 +261,15 @@ await table.createIndex("my_float_col");
|
||||
|
||||
### delete()
|
||||
|
||||
> `abstract` **delete**(`predicate`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract delete(predicate): Promise<void>
|
||||
```
|
||||
|
||||
Delete the rows that satisfy the predicate.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -261,7 +279,9 @@ Delete the rows that satisfy the predicate.
|
||||
|
||||
### display()
|
||||
|
||||
> `abstract` **display**(): `string`
|
||||
```ts
|
||||
abstract display(): string
|
||||
```
|
||||
|
||||
Return a brief description of the table
|
||||
|
||||
@@ -273,7 +293,9 @@ Return a brief description of the table
|
||||
|
||||
### dropColumns()
|
||||
|
||||
> `abstract` **dropColumns**(`columnNames`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract dropColumns(columnNames): Promise<void>
|
||||
```
|
||||
|
||||
Drop one or more columns from the dataset
|
||||
|
||||
@@ -284,11 +306,10 @@ then call ``cleanup_files`` to remove the old files.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **columnNames**: `string`[]
|
||||
|
||||
The names of the columns to drop. These can
|
||||
be nested column references (e.g. "a.b.c") or top-level column names
|
||||
(e.g. "a").
|
||||
* **columnNames**: `string`[]
|
||||
The names of the columns to drop. These can
|
||||
be nested column references (e.g. "a.b.c") or top-level column names
|
||||
(e.g. "a").
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -298,15 +319,16 @@ be nested column references (e.g. "a.b.c") or top-level column names
|
||||
|
||||
### indexStats()
|
||||
|
||||
> `abstract` **indexStats**(`name`): `Promise`<`undefined` \| [`IndexStatistics`](../interfaces/IndexStatistics.md)>
|
||||
```ts
|
||||
abstract indexStats(name): Promise<undefined | IndexStatistics>
|
||||
```
|
||||
|
||||
List all the stats of a specified index
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
The name of the index.
|
||||
* **name**: `string`
|
||||
The name of the index.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -318,7 +340,9 @@ The stats of the index. If the index does not exist, it will return undefined
|
||||
|
||||
### isOpen()
|
||||
|
||||
> `abstract` **isOpen**(): `boolean`
|
||||
```ts
|
||||
abstract isOpen(): boolean
|
||||
```
|
||||
|
||||
Return true if the table has not been closed
|
||||
|
||||
@@ -330,7 +354,9 @@ Return true if the table has not been closed
|
||||
|
||||
### listIndices()
|
||||
|
||||
> `abstract` **listIndices**(): `Promise`<[`IndexConfig`](../interfaces/IndexConfig.md)[]>
|
||||
```ts
|
||||
abstract listIndices(): Promise<IndexConfig[]>
|
||||
```
|
||||
|
||||
List all indices that have been created with [Table.createIndex](Table.md#createindex)
|
||||
|
||||
@@ -340,13 +366,29 @@ List all indices that have been created with [Table.createIndex](Table.md#create
|
||||
|
||||
***
|
||||
|
||||
### listVersions()
|
||||
|
||||
```ts
|
||||
abstract listVersions(): Promise<Version[]>
|
||||
```
|
||||
|
||||
List all the versions of the table
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`Version`[]>
|
||||
|
||||
***
|
||||
|
||||
### mergeInsert()
|
||||
|
||||
> `abstract` **mergeInsert**(`on`): `MergeInsertBuilder`
|
||||
```ts
|
||||
abstract mergeInsert(on): MergeInsertBuilder
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **on**: `string` \| `string`[]
|
||||
* **on**: `string` \| `string`[]
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -356,7 +398,9 @@ List all indices that have been created with [Table.createIndex](Table.md#create
|
||||
|
||||
### optimize()
|
||||
|
||||
> `abstract` **optimize**(`options`?): `Promise`<`OptimizeStats`>
|
||||
```ts
|
||||
abstract optimize(options?): Promise<OptimizeStats>
|
||||
```
|
||||
|
||||
Optimize the on-disk data and indices for better performance.
|
||||
|
||||
@@ -388,7 +432,7 @@ Modeled after ``VACUUM`` in PostgreSQL.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`OptimizeOptions`>
|
||||
* **options?**: `Partial`<[`OptimizeOptions`](../interfaces/OptimizeOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -398,7 +442,9 @@ Modeled after ``VACUUM`` in PostgreSQL.
|
||||
|
||||
### query()
|
||||
|
||||
> `abstract` **query**(): [`Query`](Query.md)
|
||||
```ts
|
||||
abstract query(): Query
|
||||
```
|
||||
|
||||
Create a [Query](Query.md) Builder.
|
||||
|
||||
@@ -466,7 +512,9 @@ for await (const batch of table.query()) {
|
||||
|
||||
### restore()
|
||||
|
||||
> `abstract` **restore**(): `Promise`<`void`>
|
||||
```ts
|
||||
abstract restore(): Promise<void>
|
||||
```
|
||||
|
||||
Restore the table to the currently checked out version
|
||||
|
||||
@@ -487,7 +535,9 @@ out state and the read_consistency_interval, if any, will apply.
|
||||
|
||||
### schema()
|
||||
|
||||
> `abstract` **schema**(): `Promise`<`Schema`<`any`>>
|
||||
```ts
|
||||
abstract schema(): Promise<Schema<any>>
|
||||
```
|
||||
|
||||
Get the schema of the table.
|
||||
|
||||
@@ -499,61 +549,41 @@ Get the schema of the table.
|
||||
|
||||
### search()
|
||||
|
||||
#### search(query)
|
||||
|
||||
> `abstract` **search**(`query`, `queryType`, `ftsColumns`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
abstract search(
|
||||
query,
|
||||
queryType?,
|
||||
ftsColumns?): VectorQuery | Query
|
||||
```
|
||||
|
||||
Create a search query to find the nearest neighbors
|
||||
of the given query vector, or the documents
|
||||
with the highest relevance to the query string.
|
||||
of the given query
|
||||
|
||||
##### Parameters
|
||||
#### Parameters
|
||||
|
||||
• **query**: `string`
|
||||
* **query**: `string` \| `IntoVector`
|
||||
the query, a vector or string
|
||||
|
||||
the query. This will be converted to a vector using the table's provided embedding function,
|
||||
or the query string for full-text search if `queryType` is "fts".
|
||||
* **queryType?**: `string`
|
||||
the type of the query, "vector", "fts", or "auto"
|
||||
|
||||
• **queryType**: `string` = `"auto"` \| `"fts"`
|
||||
* **ftsColumns?**: `string` \| `string`[]
|
||||
the columns to search in for full text search
|
||||
for now, only one column can be searched at a time.
|
||||
when "auto" is used, if the query is a string and an embedding function is defined, it will be treated as a vector query
|
||||
if the query is a string and no embedding function is defined, it will be treated as a full text search query
|
||||
|
||||
the type of query to run. If "auto", the query type will be determined based on the query.
|
||||
#### Returns
|
||||
|
||||
• **ftsColumns**: `string[] | str` = undefined
|
||||
|
||||
the columns to search in. If not provided, all indexed columns will be searched.
|
||||
|
||||
For now, this can support to search only one column.
|
||||
|
||||
##### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
|
||||
##### Note
|
||||
|
||||
If no embedding functions are defined in the table, this will error when collecting the results.
|
||||
|
||||
#### search(query)
|
||||
|
||||
> `abstract` **search**(`query`): [`VectorQuery`](VectorQuery.md)
|
||||
|
||||
Create a search query to find the nearest neighbors
|
||||
of the given query vector
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **query**: `IntoVector`
|
||||
|
||||
the query vector
|
||||
|
||||
##### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
[`VectorQuery`](VectorQuery.md) \| [`Query`](Query.md)
|
||||
|
||||
***
|
||||
|
||||
### toArrow()
|
||||
|
||||
> `abstract` **toArrow**(): `Promise`<`Table`<`any`>>
|
||||
```ts
|
||||
abstract toArrow(): Promise<Table<any>>
|
||||
```
|
||||
|
||||
Return the table as an arrow table
|
||||
|
||||
@@ -567,13 +597,15 @@ Return the table as an arrow table
|
||||
|
||||
#### update(opts)
|
||||
|
||||
> `abstract` **update**(`opts`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract update(opts): Promise<void>
|
||||
```
|
||||
|
||||
Update existing records in the Table
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **opts**: `object` & `Partial`<[`UpdateOptions`](../interfaces/UpdateOptions.md)>
|
||||
* **opts**: `object` & `Partial`<[`UpdateOptions`](../interfaces/UpdateOptions.md)>
|
||||
|
||||
##### Returns
|
||||
|
||||
@@ -587,13 +619,15 @@ table.update({where:"x = 2", values:{"vector": [10, 10]}})
|
||||
|
||||
#### update(opts)
|
||||
|
||||
> `abstract` **update**(`opts`): `Promise`<`void`>
|
||||
```ts
|
||||
abstract update(opts): Promise<void>
|
||||
```
|
||||
|
||||
Update existing records in the Table
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **opts**: `object` & `Partial`<[`UpdateOptions`](../interfaces/UpdateOptions.md)>
|
||||
* **opts**: `object` & `Partial`<[`UpdateOptions`](../interfaces/UpdateOptions.md)>
|
||||
|
||||
##### Returns
|
||||
|
||||
@@ -607,7 +641,9 @@ table.update({where:"x = 2", valuesSql:{"x": "x + 1"}})
|
||||
|
||||
#### update(updates, options)
|
||||
|
||||
> `abstract` **update**(`updates`, `options`?): `Promise`<`void`>
|
||||
```ts
|
||||
abstract update(updates, options?): Promise<void>
|
||||
```
|
||||
|
||||
Update existing records in the Table
|
||||
|
||||
@@ -626,20 +662,17 @@ repeatedly calilng this method.
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **updates**: `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
* **updates**: `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
the
|
||||
columns to update
|
||||
Keys in the map should specify the name of the column to update.
|
||||
Values in the map provide the new value of the column. These can
|
||||
be SQL literal strings (e.g. "7" or "'foo'") or they can be expressions
|
||||
based on the row being updated (e.g. "my_col + 1")
|
||||
|
||||
the
|
||||
columns to update
|
||||
|
||||
Keys in the map should specify the name of the column to update.
|
||||
Values in the map provide the new value of the column. These can
|
||||
be SQL literal strings (e.g. "7" or "'foo'") or they can be expressions
|
||||
based on the row being updated (e.g. "my_col + 1")
|
||||
|
||||
• **options?**: `Partial`<[`UpdateOptions`](../interfaces/UpdateOptions.md)>
|
||||
|
||||
additional options to control
|
||||
the update behavior
|
||||
* **options?**: `Partial`<[`UpdateOptions`](../interfaces/UpdateOptions.md)>
|
||||
additional options to control
|
||||
the update behavior
|
||||
|
||||
##### Returns
|
||||
|
||||
@@ -649,7 +682,9 @@ the update behavior
|
||||
|
||||
### vectorSearch()
|
||||
|
||||
> `abstract` **vectorSearch**(`vector`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
abstract vectorSearch(vector): VectorQuery
|
||||
```
|
||||
|
||||
Search the table with a given query vector.
|
||||
|
||||
@@ -659,7 +694,7 @@ by `query`.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **vector**: `IntoVector`
|
||||
* **vector**: `IntoVector`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -673,7 +708,9 @@ by `query`.
|
||||
|
||||
### version()
|
||||
|
||||
> `abstract` **version**(): `Promise`<`number`>
|
||||
```ts
|
||||
abstract version(): Promise<number>
|
||||
```
|
||||
|
||||
Retrieve the version of the table
|
||||
|
||||
@@ -685,15 +722,20 @@ Retrieve the version of the table
|
||||
|
||||
### parseTableData()
|
||||
|
||||
> `static` **parseTableData**(`data`, `options`?, `streaming`?): `Promise`<`object`>
|
||||
```ts
|
||||
static parseTableData(
|
||||
data,
|
||||
options?,
|
||||
streaming?): Promise<object>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
||||
* **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
||||
|
||||
• **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
|
||||
• **streaming?**: `boolean` = `false`
|
||||
* **streaming?**: `boolean` = `false`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -701,8 +743,12 @@ Retrieve the version of the table
|
||||
|
||||
##### buf
|
||||
|
||||
> **buf**: `Buffer`
|
||||
```ts
|
||||
buf: Buffer;
|
||||
```
|
||||
|
||||
##### mode
|
||||
|
||||
> **mode**: `string`
|
||||
```ts
|
||||
mode: string;
|
||||
```
|
||||
|
||||
@@ -10,11 +10,13 @@
|
||||
|
||||
### new VectorColumnOptions()
|
||||
|
||||
> **new VectorColumnOptions**(`values`?): [`VectorColumnOptions`](VectorColumnOptions.md)
|
||||
```ts
|
||||
new VectorColumnOptions(values?): VectorColumnOptions
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **values?**: `Partial`<[`VectorColumnOptions`](VectorColumnOptions.md)>
|
||||
* **values?**: `Partial`<[`VectorColumnOptions`](VectorColumnOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -24,6 +26,8 @@
|
||||
|
||||
### type
|
||||
|
||||
> **type**: `Float`<`Floats`>
|
||||
```ts
|
||||
type: Float<Floats>;
|
||||
```
|
||||
|
||||
Vector column type.
|
||||
|
||||
@@ -18,11 +18,13 @@ This builder can be reused to execute the query many times.
|
||||
|
||||
### new VectorQuery()
|
||||
|
||||
> **new VectorQuery**(`inner`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
new VectorQuery(inner): VectorQuery
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **inner**: `VectorQuery` \| `Promise`<`VectorQuery`>
|
||||
* **inner**: `VectorQuery` \| `Promise`<`VectorQuery`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -36,7 +38,9 @@ This builder can be reused to execute the query many times.
|
||||
|
||||
### inner
|
||||
|
||||
> `protected` **inner**: `VectorQuery` \| `Promise`<`VectorQuery`>
|
||||
```ts
|
||||
protected inner: VectorQuery | Promise<VectorQuery>;
|
||||
```
|
||||
|
||||
#### Inherited from
|
||||
|
||||
@@ -46,7 +50,9 @@ This builder can be reused to execute the query many times.
|
||||
|
||||
### \[asyncIterator\]()
|
||||
|
||||
> **\[asyncIterator\]**(): `AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
||||
```ts
|
||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -58,9 +64,27 @@ This builder can be reused to execute the query many times.
|
||||
|
||||
***
|
||||
|
||||
### addQueryVector()
|
||||
|
||||
```ts
|
||||
addQueryVector(vector): VectorQuery
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **vector**: `IntoVector`
|
||||
|
||||
#### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
|
||||
***
|
||||
|
||||
### bypassVectorIndex()
|
||||
|
||||
> **bypassVectorIndex**(): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
bypassVectorIndex(): VectorQuery
|
||||
```
|
||||
|
||||
If this is called then any vector index is skipped
|
||||
|
||||
@@ -78,7 +102,9 @@ calculate your recall to select an appropriate value for nprobes.
|
||||
|
||||
### column()
|
||||
|
||||
> **column**(`column`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
column(column): VectorQuery
|
||||
```
|
||||
|
||||
Set the vector column to query
|
||||
|
||||
@@ -87,7 +113,7 @@ the call to
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **column**: `string`
|
||||
* **column**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -104,7 +130,9 @@ whose data type is a fixed-size-list of floats.
|
||||
|
||||
### distanceType()
|
||||
|
||||
> **distanceType**(`distanceType`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
distanceType(distanceType): VectorQuery
|
||||
```
|
||||
|
||||
Set the distance metric to use
|
||||
|
||||
@@ -114,7 +142,7 @@ use. See
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **distanceType**: `"l2"` \| `"cosine"` \| `"dot"`
|
||||
* **distanceType**: `"l2"` \| `"cosine"` \| `"dot"`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -135,11 +163,13 @@ By default "l2" is used.
|
||||
|
||||
### doCall()
|
||||
|
||||
> `protected` **doCall**(`fn`): `void`
|
||||
```ts
|
||||
protected doCall(fn): void
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **fn**
|
||||
* **fn**
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -151,15 +181,41 @@ By default "l2" is used.
|
||||
|
||||
***
|
||||
|
||||
### ef()
|
||||
|
||||
```ts
|
||||
ef(ef): VectorQuery
|
||||
```
|
||||
|
||||
Set the number of candidates to consider during the search
|
||||
|
||||
This argument is only used when the vector column has an HNSW index.
|
||||
If there is no index then this value is ignored.
|
||||
|
||||
Increasing this value will increase the recall of your query but will
|
||||
also increase the latency of your query. The default value is 1.5*limit.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **ef**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
|
||||
***
|
||||
|
||||
### execute()
|
||||
|
||||
> `protected` **execute**(`options`?): [`RecordBatchIterator`](RecordBatchIterator.md)
|
||||
```ts
|
||||
protected execute(options?): RecordBatchIterator
|
||||
```
|
||||
|
||||
Execute the query and return the results as an
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -185,15 +241,16 @@ single query)
|
||||
|
||||
### explainPlan()
|
||||
|
||||
> **explainPlan**(`verbose`): `Promise`<`string`>
|
||||
```ts
|
||||
explainPlan(verbose): Promise<string>
|
||||
```
|
||||
|
||||
Generates an explanation of the query execution plan.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **verbose**: `boolean` = `false`
|
||||
|
||||
If true, provides a more detailed explanation. Defaults to false.
|
||||
* **verbose**: `boolean` = `false`
|
||||
If true, provides a more detailed explanation. Defaults to false.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -218,15 +275,38 @@ const plan = await table.query().nearestTo([0.5, 0.2]).explainPlan();
|
||||
|
||||
***
|
||||
|
||||
### fastSearch()
|
||||
|
||||
```ts
|
||||
fastSearch(): this
|
||||
```
|
||||
|
||||
Skip searching un-indexed data. This can make search faster, but will miss
|
||||
any data that is not yet indexed.
|
||||
|
||||
Use lancedb.Table#optimize to index all un-indexed data.
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`fastSearch`](QueryBase.md#fastsearch)
|
||||
|
||||
***
|
||||
|
||||
### ~~filter()~~
|
||||
|
||||
> **filter**(`predicate`): `this`
|
||||
```ts
|
||||
filter(predicate): this
|
||||
```
|
||||
|
||||
A filter statement to be applied to this query.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -246,9 +326,33 @@ Use `where` instead
|
||||
|
||||
***
|
||||
|
||||
### fullTextSearch()
|
||||
|
||||
```ts
|
||||
fullTextSearch(query, options?): this
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`fullTextSearch`](QueryBase.md#fulltextsearch)
|
||||
|
||||
***
|
||||
|
||||
### limit()
|
||||
|
||||
> **limit**(`limit`): `this`
|
||||
```ts
|
||||
limit(limit): this
|
||||
```
|
||||
|
||||
Set the maximum number of results to return.
|
||||
|
||||
@@ -257,7 +361,7 @@ called then every valid row from the table will be returned.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **limit**: `number`
|
||||
* **limit**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -271,11 +375,13 @@ called then every valid row from the table will be returned.
|
||||
|
||||
### nativeExecute()
|
||||
|
||||
> `protected` **nativeExecute**(`options`?): `Promise`<`RecordBatchIterator`>
|
||||
```ts
|
||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -289,7 +395,9 @@ called then every valid row from the table will be returned.
|
||||
|
||||
### nprobes()
|
||||
|
||||
> **nprobes**(`nprobes`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
nprobes(nprobes): VectorQuery
|
||||
```
|
||||
|
||||
Set the number of partitions to search (probe)
|
||||
|
||||
@@ -314,7 +422,7 @@ you the desired recall.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **nprobes**: `number`
|
||||
* **nprobes**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -322,9 +430,31 @@ you the desired recall.
|
||||
|
||||
***
|
||||
|
||||
### offset()
|
||||
|
||||
```ts
|
||||
offset(offset): this
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **offset**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`offset`](QueryBase.md#offset)
|
||||
|
||||
***
|
||||
|
||||
### postfilter()
|
||||
|
||||
> **postfilter**(): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
postfilter(): VectorQuery
|
||||
```
|
||||
|
||||
If this is called then filtering will happen after the vector search instead of
|
||||
before.
|
||||
@@ -356,7 +486,9 @@ factor can often help restore some of the results lost by post filtering.
|
||||
|
||||
### refineFactor()
|
||||
|
||||
> **refineFactor**(`refineFactor`): [`VectorQuery`](VectorQuery.md)
|
||||
```ts
|
||||
refineFactor(refineFactor): VectorQuery
|
||||
```
|
||||
|
||||
A multiplier to control how many additional rows are taken during the refine step
|
||||
|
||||
@@ -388,7 +520,7 @@ distance between the query vector and the actual uncompressed vector.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **refineFactor**: `number`
|
||||
* **refineFactor**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -398,7 +530,9 @@ distance between the query vector and the actual uncompressed vector.
|
||||
|
||||
### select()
|
||||
|
||||
> **select**(`columns`): `this`
|
||||
```ts
|
||||
select(columns): this
|
||||
```
|
||||
|
||||
Return only the specified columns.
|
||||
|
||||
@@ -422,7 +556,7 @@ input to this method would be:
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **columns**: `string` \| `string`[] \| `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
* **columns**: `string` \| `string`[] \| `Record`<`string`, `string`> \| `Map`<`string`, `string`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -449,13 +583,15 @@ object insertion order is easy to get wrong and `Map` is more foolproof.
|
||||
|
||||
### toArray()
|
||||
|
||||
> **toArray**(`options`?): `Promise`<`any`[]>
|
||||
```ts
|
||||
toArray(options?): Promise<any[]>
|
||||
```
|
||||
|
||||
Collect the results as an array of objects.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -469,13 +605,15 @@ Collect the results as an array of objects.
|
||||
|
||||
### toArrow()
|
||||
|
||||
> **toArrow**(`options`?): `Promise`<`Table`<`any`>>
|
||||
```ts
|
||||
toArrow(options?): Promise<Table<any>>
|
||||
```
|
||||
|
||||
Collect the results as an Arrow
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -493,7 +631,9 @@ ArrowTable.
|
||||
|
||||
### where()
|
||||
|
||||
> **where**(`predicate`): `this`
|
||||
```ts
|
||||
where(predicate): this
|
||||
```
|
||||
|
||||
A filter statement to be applied to this query.
|
||||
|
||||
@@ -501,7 +641,7 @@ The filter should be supplied as an SQL query string. For example:
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **predicate**: `string`
|
||||
* **predicate**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -521,3 +661,25 @@ on the filter column(s).
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`where`](QueryBase.md#where)
|
||||
|
||||
***
|
||||
|
||||
### withRowId()
|
||||
|
||||
```ts
|
||||
withRowId(): this
|
||||
```
|
||||
|
||||
Whether to return the row id in the results.
|
||||
|
||||
This column can be used to match results between different queries. For
|
||||
example, to match results from a full text search and a vector search in
|
||||
order to perform hybrid search.
|
||||
|
||||
#### Returns
|
||||
|
||||
`this`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`withRowId`](QueryBase.md#withrowid)
|
||||
|
||||
@@ -12,16 +12,22 @@ Write mode for writing a table.
|
||||
|
||||
### Append
|
||||
|
||||
> **Append**: `"Append"`
|
||||
```ts
|
||||
Append: "Append";
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### Create
|
||||
|
||||
> **Create**: `"Create"`
|
||||
```ts
|
||||
Create: "Create";
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### Overwrite
|
||||
|
||||
> **Overwrite**: `"Overwrite"`
|
||||
```ts
|
||||
Overwrite: "Overwrite";
|
||||
```
|
||||
|
||||
@@ -8,7 +8,9 @@
|
||||
|
||||
## connect(uri, opts)
|
||||
|
||||
> **connect**(`uri`, `opts`?): `Promise`<[`Connection`](../classes/Connection.md)>
|
||||
```ts
|
||||
function connect(uri, opts?): Promise<Connection>
|
||||
```
|
||||
|
||||
Connect to a LanceDB instance at the given URI.
|
||||
|
||||
@@ -20,12 +22,11 @@ Accepted formats:
|
||||
|
||||
### Parameters
|
||||
|
||||
• **uri**: `string`
|
||||
* **uri**: `string`
|
||||
The uri of the database. If the database uri starts
|
||||
with `db://` then it connects to a remote database.
|
||||
|
||||
The uri of the database. If the database uri starts
|
||||
with `db://` then it connects to a remote database.
|
||||
|
||||
• **opts?**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md) \| `RemoteConnectionOptions`>
|
||||
* **opts?**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)>
|
||||
|
||||
### Returns
|
||||
|
||||
@@ -50,7 +51,9 @@ const conn = await connect(
|
||||
|
||||
## connect(opts)
|
||||
|
||||
> **connect**(`opts`): `Promise`<[`Connection`](../classes/Connection.md)>
|
||||
```ts
|
||||
function connect(opts): Promise<Connection>
|
||||
```
|
||||
|
||||
Connect to a LanceDB instance at the given URI.
|
||||
|
||||
@@ -62,7 +65,7 @@ Accepted formats:
|
||||
|
||||
### Parameters
|
||||
|
||||
• **opts**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md) \| `RemoteConnectionOptions`> & `object`
|
||||
* **opts**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)> & `object`
|
||||
|
||||
### Returns
|
||||
|
||||
|
||||
@@ -6,7 +6,12 @@
|
||||
|
||||
# Function: makeArrowTable()
|
||||
|
||||
> **makeArrowTable**(`data`, `options`?, `metadata`?): `ArrowTable`
|
||||
```ts
|
||||
function makeArrowTable(
|
||||
data,
|
||||
options?,
|
||||
metadata?): ArrowTable
|
||||
```
|
||||
|
||||
An enhanced version of the makeTable function from Apache Arrow
|
||||
that supports nested fields and embeddings columns.
|
||||
@@ -40,11 +45,11 @@ rules are as follows:
|
||||
|
||||
## Parameters
|
||||
|
||||
• **data**: `Record`<`string`, `unknown`>[]
|
||||
* **data**: `Record`<`string`, `unknown`>[]
|
||||
|
||||
• **options?**: `Partial`<[`MakeArrowTableOptions`](../classes/MakeArrowTableOptions.md)>
|
||||
* **options?**: `Partial`<[`MakeArrowTableOptions`](../classes/MakeArrowTableOptions.md)>
|
||||
|
||||
• **metadata?**: `Map`<`string`, `string`>
|
||||
* **metadata?**: `Map`<`string`, `string`>
|
||||
|
||||
## Returns
|
||||
|
||||
|
||||
@@ -28,17 +28,19 @@
|
||||
|
||||
- [AddColumnsSql](interfaces/AddColumnsSql.md)
|
||||
- [AddDataOptions](interfaces/AddDataOptions.md)
|
||||
- [ClientConfig](interfaces/ClientConfig.md)
|
||||
- [ColumnAlteration](interfaces/ColumnAlteration.md)
|
||||
- [ConnectionOptions](interfaces/ConnectionOptions.md)
|
||||
- [CreateTableOptions](interfaces/CreateTableOptions.md)
|
||||
- [ExecutableQuery](interfaces/ExecutableQuery.md)
|
||||
- [IndexConfig](interfaces/IndexConfig.md)
|
||||
- [IndexMetadata](interfaces/IndexMetadata.md)
|
||||
- [IndexOptions](interfaces/IndexOptions.md)
|
||||
- [IndexStatistics](interfaces/IndexStatistics.md)
|
||||
- [IvfPqOptions](interfaces/IvfPqOptions.md)
|
||||
- [FtsOptions](interfaces/FtsOptions.md)
|
||||
- [OptimizeOptions](interfaces/OptimizeOptions.md)
|
||||
- [RetryConfig](interfaces/RetryConfig.md)
|
||||
- [TableNamesOptions](interfaces/TableNamesOptions.md)
|
||||
- [TimeoutConfig](interfaces/TimeoutConfig.md)
|
||||
- [UpdateOptions](interfaces/UpdateOptions.md)
|
||||
- [WriteOptions](interfaces/WriteOptions.md)
|
||||
|
||||
|
||||
@@ -12,7 +12,9 @@ A definition of a new column to add to a table.
|
||||
|
||||
### name
|
||||
|
||||
> **name**: `string`
|
||||
```ts
|
||||
name: string;
|
||||
```
|
||||
|
||||
The name of the new column.
|
||||
|
||||
@@ -20,7 +22,9 @@ The name of the new column.
|
||||
|
||||
### valueSql
|
||||
|
||||
> **valueSql**: `string`
|
||||
```ts
|
||||
valueSql: string;
|
||||
```
|
||||
|
||||
The values to populate the new column with, as a SQL expression.
|
||||
The expression can reference other columns in the table.
|
||||
|
||||
@@ -12,7 +12,9 @@ Options for adding data to a table.
|
||||
|
||||
### mode
|
||||
|
||||
> **mode**: `"append"` \| `"overwrite"`
|
||||
```ts
|
||||
mode: "append" | "overwrite";
|
||||
```
|
||||
|
||||
If "append" (the default) then the new data will be added to the table
|
||||
|
||||
|
||||
31
docs/src/js/interfaces/ClientConfig.md
Normal file
31
docs/src/js/interfaces/ClientConfig.md
Normal file
@@ -0,0 +1,31 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / ClientConfig
|
||||
|
||||
# Interface: ClientConfig
|
||||
|
||||
## Properties
|
||||
|
||||
### retryConfig?
|
||||
|
||||
```ts
|
||||
optional retryConfig: RetryConfig;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### timeoutConfig?
|
||||
|
||||
```ts
|
||||
optional timeoutConfig: TimeoutConfig;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### userAgent?
|
||||
|
||||
```ts
|
||||
optional userAgent: string;
|
||||
```
|
||||
@@ -13,9 +13,29 @@ must be provided.
|
||||
|
||||
## Properties
|
||||
|
||||
### dataType?
|
||||
|
||||
```ts
|
||||
optional dataType: string;
|
||||
```
|
||||
|
||||
A new data type for the column. If not provided then the data type will not be changed.
|
||||
Changing data types is limited to casting to the same general type. For example, these
|
||||
changes are valid:
|
||||
* `int32` -> `int64` (integers)
|
||||
* `double` -> `float` (floats)
|
||||
* `string` -> `large_string` (strings)
|
||||
But these changes are not:
|
||||
* `int32` -> `double` (mix integers and floats)
|
||||
* `string` -> `int32` (mix strings and integers)
|
||||
|
||||
***
|
||||
|
||||
### nullable?
|
||||
|
||||
> `optional` **nullable**: `boolean`
|
||||
```ts
|
||||
optional nullable: boolean;
|
||||
```
|
||||
|
||||
Set the new nullability. Note that a nullable column cannot be made non-nullable.
|
||||
|
||||
@@ -23,7 +43,9 @@ Set the new nullability. Note that a nullable column cannot be made non-nullable
|
||||
|
||||
### path
|
||||
|
||||
> **path**: `string`
|
||||
```ts
|
||||
path: string;
|
||||
```
|
||||
|
||||
The path to the column to alter. This is a dot-separated path to the column.
|
||||
If it is a top-level column then it is just the name of the column. If it is
|
||||
@@ -34,7 +56,9 @@ a nested column then it is the path to the column, e.g. "a.b.c" for a column
|
||||
|
||||
### rename?
|
||||
|
||||
> `optional` **rename**: `string`
|
||||
```ts
|
||||
optional rename: string;
|
||||
```
|
||||
|
||||
The new name of the column. If not provided then the name will not be changed.
|
||||
This must be distinct from the names of all other columns in the table.
|
||||
|
||||
@@ -8,9 +8,44 @@
|
||||
|
||||
## Properties
|
||||
|
||||
### apiKey?
|
||||
|
||||
```ts
|
||||
optional apiKey: string;
|
||||
```
|
||||
|
||||
(For LanceDB cloud only): the API key to use with LanceDB Cloud.
|
||||
|
||||
Can also be set via the environment variable `LANCEDB_API_KEY`.
|
||||
|
||||
***
|
||||
|
||||
### clientConfig?
|
||||
|
||||
```ts
|
||||
optional clientConfig: ClientConfig;
|
||||
```
|
||||
|
||||
(For LanceDB cloud only): configuration for the remote HTTP client.
|
||||
|
||||
***
|
||||
|
||||
### hostOverride?
|
||||
|
||||
```ts
|
||||
optional hostOverride: string;
|
||||
```
|
||||
|
||||
(For LanceDB cloud only): the host to use for LanceDB cloud. Used
|
||||
for testing purposes.
|
||||
|
||||
***
|
||||
|
||||
### readConsistencyInterval?
|
||||
|
||||
> `optional` **readConsistencyInterval**: `number`
|
||||
```ts
|
||||
optional readConsistencyInterval: number;
|
||||
```
|
||||
|
||||
(For LanceDB OSS only): The interval, in seconds, at which to check for
|
||||
updates to the table from other processes. If None, then consistency is not
|
||||
@@ -24,9 +59,22 @@ always consistent.
|
||||
|
||||
***
|
||||
|
||||
### region?
|
||||
|
||||
```ts
|
||||
optional region: string;
|
||||
```
|
||||
|
||||
(For LanceDB cloud only): the region to use for LanceDB cloud.
|
||||
Defaults to 'us-east-1'.
|
||||
|
||||
***
|
||||
|
||||
### storageOptions?
|
||||
|
||||
> `optional` **storageOptions**: `Record`<`string`, `string`>
|
||||
```ts
|
||||
optional storageOptions: Record<string, string>;
|
||||
```
|
||||
|
||||
(For LanceDB OSS only): configuration for object storage.
|
||||
|
||||
|
||||
@@ -8,15 +8,46 @@
|
||||
|
||||
## Properties
|
||||
|
||||
### dataStorageVersion?
|
||||
|
||||
```ts
|
||||
optional dataStorageVersion: string;
|
||||
```
|
||||
|
||||
The version of the data storage format to use.
|
||||
|
||||
The default is `stable`.
|
||||
Set to "legacy" to use the old format.
|
||||
|
||||
***
|
||||
|
||||
### embeddingFunction?
|
||||
|
||||
> `optional` **embeddingFunction**: [`EmbeddingFunctionConfig`](../namespaces/embedding/interfaces/EmbeddingFunctionConfig.md)
|
||||
```ts
|
||||
optional embeddingFunction: EmbeddingFunctionConfig;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### enableV2ManifestPaths?
|
||||
|
||||
```ts
|
||||
optional enableV2ManifestPaths: boolean;
|
||||
```
|
||||
|
||||
Use the new V2 manifest paths. These paths provide more efficient
|
||||
opening of datasets with many versions on object stores. WARNING:
|
||||
turning this on will make the dataset unreadable for older versions
|
||||
of LanceDB (prior to 0.10.0). To migrate an existing dataset, instead
|
||||
use the LocalTable#migrateManifestPathsV2 method.
|
||||
|
||||
***
|
||||
|
||||
### existOk
|
||||
|
||||
> **existOk**: `boolean`
|
||||
```ts
|
||||
existOk: boolean;
|
||||
```
|
||||
|
||||
If this is true and the table already exists and the mode is "create"
|
||||
then no error will be raised.
|
||||
@@ -25,7 +56,9 @@ then no error will be raised.
|
||||
|
||||
### mode
|
||||
|
||||
> **mode**: `"overwrite"` \| `"create"`
|
||||
```ts
|
||||
mode: "overwrite" | "create";
|
||||
```
|
||||
|
||||
The mode to use when creating the table.
|
||||
|
||||
@@ -39,13 +72,17 @@ If this is set to "overwrite" then any existing table will be replaced.
|
||||
|
||||
### schema?
|
||||
|
||||
> `optional` **schema**: `SchemaLike`
|
||||
```ts
|
||||
optional schema: SchemaLike;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### storageOptions?
|
||||
|
||||
> `optional` **storageOptions**: `Record`<`string`, `string`>
|
||||
```ts
|
||||
optional storageOptions: Record<string, string>;
|
||||
```
|
||||
|
||||
Configuration for object storage.
|
||||
|
||||
@@ -58,8 +95,12 @@ The available options are described at https://lancedb.github.io/lancedb/guides/
|
||||
|
||||
### useLegacyFormat?
|
||||
|
||||
> `optional` **useLegacyFormat**: `boolean`
|
||||
```ts
|
||||
optional useLegacyFormat: boolean;
|
||||
```
|
||||
|
||||
If true then data files will be written with the legacy format
|
||||
|
||||
The default is true while the new format is in beta
|
||||
The default is false.
|
||||
|
||||
Deprecated. Use data storage version instead.
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / FtsOptions
|
||||
|
||||
# Interface: FtsOptions
|
||||
|
||||
Options to create an `FTS` index
|
||||
|
||||
## Properties
|
||||
|
||||
### withPosition?
|
||||
|
||||
> `optional` **withPosition**: `boolean`
|
||||
|
||||
Whether to store the positions of the term in the document.
|
||||
|
||||
If this is true then the index will store the positions of the term in the document.
|
||||
This allows phrase queries to be run. But it also increases the size of the index,
|
||||
and the time to build the index.
|
||||
|
||||
The default value is true.
|
||||
|
||||
***
|
||||
@@ -12,7 +12,9 @@ A description of an index currently configured on a column
|
||||
|
||||
### columns
|
||||
|
||||
> **columns**: `string`[]
|
||||
```ts
|
||||
columns: string[];
|
||||
```
|
||||
|
||||
The columns in the index
|
||||
|
||||
@@ -23,7 +25,9 @@ be more columns to represent composite indices.
|
||||
|
||||
### indexType
|
||||
|
||||
> **indexType**: `string`
|
||||
```ts
|
||||
indexType: string;
|
||||
```
|
||||
|
||||
The type of the index
|
||||
|
||||
@@ -31,6 +35,8 @@ The type of the index
|
||||
|
||||
### name
|
||||
|
||||
> **name**: `string`
|
||||
```ts
|
||||
name: string;
|
||||
```
|
||||
|
||||
The name of the index
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / IndexMetadata
|
||||
|
||||
# Interface: IndexMetadata
|
||||
|
||||
## Properties
|
||||
|
||||
### indexType?
|
||||
|
||||
> `optional` **indexType**: `string`
|
||||
|
||||
***
|
||||
|
||||
### metricType?
|
||||
|
||||
> `optional` **metricType**: `string`
|
||||
@@ -10,7 +10,9 @@
|
||||
|
||||
### config?
|
||||
|
||||
> `optional` **config**: [`Index`](../classes/Index.md)
|
||||
```ts
|
||||
optional config: Index;
|
||||
```
|
||||
|
||||
Advanced index configuration
|
||||
|
||||
@@ -26,7 +28,9 @@ will be used to determine the most useful kind of index to create.
|
||||
|
||||
### replace?
|
||||
|
||||
> `optional` **replace**: `boolean`
|
||||
```ts
|
||||
optional replace: boolean;
|
||||
```
|
||||
|
||||
Whether to replace the existing index
|
||||
|
||||
|
||||
@@ -8,32 +8,52 @@
|
||||
|
||||
## Properties
|
||||
|
||||
### indexType?
|
||||
### distanceType?
|
||||
|
||||
> `optional` **indexType**: `string`
|
||||
```ts
|
||||
optional distanceType: string;
|
||||
```
|
||||
|
||||
The type of the distance function used by the index. This is only
|
||||
present for vector indices. Scalar and full text search indices do
|
||||
not have a distance function.
|
||||
|
||||
***
|
||||
|
||||
### indexType
|
||||
|
||||
```ts
|
||||
indexType: string;
|
||||
```
|
||||
|
||||
The type of the index
|
||||
|
||||
***
|
||||
|
||||
### indices
|
||||
|
||||
> **indices**: [`IndexMetadata`](IndexMetadata.md)[]
|
||||
|
||||
The metadata for each index
|
||||
|
||||
***
|
||||
|
||||
### numIndexedRows
|
||||
|
||||
> **numIndexedRows**: `number`
|
||||
```ts
|
||||
numIndexedRows: number;
|
||||
```
|
||||
|
||||
The number of rows indexed by the index
|
||||
|
||||
***
|
||||
|
||||
### numIndices?
|
||||
|
||||
```ts
|
||||
optional numIndices: number;
|
||||
```
|
||||
|
||||
The number of parts this index is split into.
|
||||
|
||||
***
|
||||
|
||||
### numUnindexedRows
|
||||
|
||||
> **numUnindexedRows**: `number`
|
||||
```ts
|
||||
numUnindexedRows: number;
|
||||
```
|
||||
|
||||
The number of rows not indexed
|
||||
|
||||
@@ -12,7 +12,9 @@ Options to create an `IVF_PQ` index
|
||||
|
||||
### distanceType?
|
||||
|
||||
> `optional` **distanceType**: `"l2"` \| `"cosine"` \| `"dot"`
|
||||
```ts
|
||||
optional distanceType: "l2" | "cosine" | "dot";
|
||||
```
|
||||
|
||||
Distance type to use to build the index.
|
||||
|
||||
@@ -50,7 +52,9 @@ L2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||
|
||||
### maxIterations?
|
||||
|
||||
> `optional` **maxIterations**: `number`
|
||||
```ts
|
||||
optional maxIterations: number;
|
||||
```
|
||||
|
||||
Max iteration to train IVF kmeans.
|
||||
|
||||
@@ -66,7 +70,9 @@ The default value is 50.
|
||||
|
||||
### numPartitions?
|
||||
|
||||
> `optional` **numPartitions**: `number`
|
||||
```ts
|
||||
optional numPartitions: number;
|
||||
```
|
||||
|
||||
The number of IVF partitions to create.
|
||||
|
||||
@@ -82,7 +88,9 @@ part of the search (searching within a partition) will be slow.
|
||||
|
||||
### numSubVectors?
|
||||
|
||||
> `optional` **numSubVectors**: `number`
|
||||
```ts
|
||||
optional numSubVectors: number;
|
||||
```
|
||||
|
||||
Number of sub-vectors of PQ.
|
||||
|
||||
@@ -101,7 +109,9 @@ will likely result in poor performance.
|
||||
|
||||
### sampleRate?
|
||||
|
||||
> `optional` **sampleRate**: `number`
|
||||
```ts
|
||||
optional sampleRate: number;
|
||||
```
|
||||
|
||||
The number of vectors, per partition, to sample when training IVF kmeans.
|
||||
|
||||
|
||||
39
docs/src/js/interfaces/OptimizeOptions.md
Normal file
39
docs/src/js/interfaces/OptimizeOptions.md
Normal file
@@ -0,0 +1,39 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / OptimizeOptions
|
||||
|
||||
# Interface: OptimizeOptions
|
||||
|
||||
## Properties
|
||||
|
||||
### cleanupOlderThan
|
||||
|
||||
```ts
|
||||
cleanupOlderThan: Date;
|
||||
```
|
||||
|
||||
If set then all versions older than the given date
|
||||
be removed. The current version will never be removed.
|
||||
The default is 7 days
|
||||
|
||||
#### Example
|
||||
|
||||
```ts
|
||||
// Delete all versions older than 1 day
|
||||
const olderThan = new Date();
|
||||
olderThan.setDate(olderThan.getDate() - 1));
|
||||
tbl.cleanupOlderVersions(olderThan);
|
||||
|
||||
// Delete all versions except the current version
|
||||
tbl.cleanupOlderVersions(new Date());
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### deleteUnverified
|
||||
|
||||
```ts
|
||||
deleteUnverified: boolean;
|
||||
```
|
||||
90
docs/src/js/interfaces/RetryConfig.md
Normal file
90
docs/src/js/interfaces/RetryConfig.md
Normal file
@@ -0,0 +1,90 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / RetryConfig
|
||||
|
||||
# Interface: RetryConfig
|
||||
|
||||
Retry configuration for the remote HTTP client.
|
||||
|
||||
## Properties
|
||||
|
||||
### backoffFactor?
|
||||
|
||||
```ts
|
||||
optional backoffFactor: number;
|
||||
```
|
||||
|
||||
The backoff factor to apply between retries. Default is 0.25. Between each retry
|
||||
the client will wait for the amount of seconds:
|
||||
`{backoff factor} * (2 ** ({number of previous retries}))`. So for the default
|
||||
of 0.25, the first retry will wait 0.25 seconds, the second retry will wait 0.5
|
||||
seconds, the third retry will wait 1 second, etc.
|
||||
|
||||
You can also set this via the environment variable
|
||||
`LANCE_CLIENT_RETRY_BACKOFF_FACTOR`.
|
||||
|
||||
***
|
||||
|
||||
### backoffJitter?
|
||||
|
||||
```ts
|
||||
optional backoffJitter: number;
|
||||
```
|
||||
|
||||
The jitter to apply to the backoff factor, in seconds. Default is 0.25.
|
||||
|
||||
A random value between 0 and `backoff_jitter` will be added to the backoff
|
||||
factor in seconds. So for the default of 0.25 seconds, between 0 and 250
|
||||
milliseconds will be added to the sleep between each retry.
|
||||
|
||||
You can also set this via the environment variable
|
||||
`LANCE_CLIENT_RETRY_BACKOFF_JITTER`.
|
||||
|
||||
***
|
||||
|
||||
### connectRetries?
|
||||
|
||||
```ts
|
||||
optional connectRetries: number;
|
||||
```
|
||||
|
||||
The maximum number of retries for connection errors. Default is 3. You
|
||||
can also set this via the environment variable `LANCE_CLIENT_CONNECT_RETRIES`.
|
||||
|
||||
***
|
||||
|
||||
### readRetries?
|
||||
|
||||
```ts
|
||||
optional readRetries: number;
|
||||
```
|
||||
|
||||
The maximum number of retries for read errors. Default is 3. You can also
|
||||
set this via the environment variable `LANCE_CLIENT_READ_RETRIES`.
|
||||
|
||||
***
|
||||
|
||||
### retries?
|
||||
|
||||
```ts
|
||||
optional retries: number;
|
||||
```
|
||||
|
||||
The maximum number of retries for a request. Default is 3. You can also
|
||||
set this via the environment variable `LANCE_CLIENT_MAX_RETRIES`.
|
||||
|
||||
***
|
||||
|
||||
### statuses?
|
||||
|
||||
```ts
|
||||
optional statuses: number[];
|
||||
```
|
||||
|
||||
The HTTP status codes for which to retry the request. Default is
|
||||
[429, 500, 502, 503].
|
||||
|
||||
You can also set this via the environment variable
|
||||
`LANCE_CLIENT_RETRY_STATUSES`. Use a comma-separated list of integers.
|
||||
@@ -10,7 +10,9 @@
|
||||
|
||||
### limit?
|
||||
|
||||
> `optional` **limit**: `number`
|
||||
```ts
|
||||
optional limit: number;
|
||||
```
|
||||
|
||||
An optional limit to the number of results to return.
|
||||
|
||||
@@ -18,7 +20,9 @@ An optional limit to the number of results to return.
|
||||
|
||||
### startAfter?
|
||||
|
||||
> `optional` **startAfter**: `string`
|
||||
```ts
|
||||
optional startAfter: string;
|
||||
```
|
||||
|
||||
If present, only return names that come lexicographically after the
|
||||
supplied value.
|
||||
|
||||
46
docs/src/js/interfaces/TimeoutConfig.md
Normal file
46
docs/src/js/interfaces/TimeoutConfig.md
Normal file
@@ -0,0 +1,46 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / TimeoutConfig
|
||||
|
||||
# Interface: TimeoutConfig
|
||||
|
||||
Timeout configuration for remote HTTP client.
|
||||
|
||||
## Properties
|
||||
|
||||
### connectTimeout?
|
||||
|
||||
```ts
|
||||
optional connectTimeout: number;
|
||||
```
|
||||
|
||||
The timeout for establishing a connection in seconds. Default is 120
|
||||
seconds (2 minutes). This can also be set via the environment variable
|
||||
`LANCE_CLIENT_CONNECT_TIMEOUT`, as an integer number of seconds.
|
||||
|
||||
***
|
||||
|
||||
### poolIdleTimeout?
|
||||
|
||||
```ts
|
||||
optional poolIdleTimeout: number;
|
||||
```
|
||||
|
||||
The timeout for keeping idle connections in the connection pool in seconds.
|
||||
Default is 300 seconds (5 minutes). This can also be set via the
|
||||
environment variable `LANCE_CLIENT_CONNECTION_TIMEOUT`, as an integer
|
||||
number of seconds.
|
||||
|
||||
***
|
||||
|
||||
### readTimeout?
|
||||
|
||||
```ts
|
||||
optional readTimeout: number;
|
||||
```
|
||||
|
||||
The timeout for reading data from the server in seconds. Default is 300
|
||||
seconds (5 minutes). This can also be set via the environment variable
|
||||
`LANCE_CLIENT_READ_TIMEOUT`, as an integer number of seconds.
|
||||
@@ -10,7 +10,9 @@
|
||||
|
||||
### where
|
||||
|
||||
> **where**: `string`
|
||||
```ts
|
||||
where: string;
|
||||
```
|
||||
|
||||
A filter that limits the scope of the update.
|
||||
|
||||
|
||||
@@ -12,6 +12,8 @@ Write options when creating a Table.
|
||||
|
||||
### mode?
|
||||
|
||||
> `optional` **mode**: [`WriteMode`](../enumerations/WriteMode.md)
|
||||
```ts
|
||||
optional mode: WriteMode;
|
||||
```
|
||||
|
||||
Write mode for writing to a table.
|
||||
|
||||
@@ -12,16 +12,12 @@
|
||||
|
||||
- [EmbeddingFunction](classes/EmbeddingFunction.md)
|
||||
- [EmbeddingFunctionRegistry](classes/EmbeddingFunctionRegistry.md)
|
||||
- [OpenAIEmbeddingFunction](classes/OpenAIEmbeddingFunction.md)
|
||||
- [TextEmbeddingFunction](classes/TextEmbeddingFunction.md)
|
||||
|
||||
### Interfaces
|
||||
|
||||
- [EmbeddingFunctionConfig](interfaces/EmbeddingFunctionConfig.md)
|
||||
|
||||
### Type Aliases
|
||||
|
||||
- [OpenAIOptions](type-aliases/OpenAIOptions.md)
|
||||
|
||||
### Functions
|
||||
|
||||
- [LanceSchema](functions/LanceSchema.md)
|
||||
|
||||
@@ -10,7 +10,7 @@ An embedding function that automatically creates vector representation for a giv
|
||||
|
||||
## Extended by
|
||||
|
||||
- [`OpenAIEmbeddingFunction`](OpenAIEmbeddingFunction.md)
|
||||
- [`TextEmbeddingFunction`](TextEmbeddingFunction.md)
|
||||
|
||||
## Type Parameters
|
||||
|
||||
@@ -22,7 +22,9 @@ An embedding function that automatically creates vector representation for a giv
|
||||
|
||||
### new EmbeddingFunction()
|
||||
|
||||
> **new EmbeddingFunction**<`T`, `M`>(): [`EmbeddingFunction`](EmbeddingFunction.md)<`T`, `M`>
|
||||
```ts
|
||||
new EmbeddingFunction<T, M>(): EmbeddingFunction<T, M>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -32,13 +34,15 @@ An embedding function that automatically creates vector representation for a giv
|
||||
|
||||
### computeQueryEmbeddings()
|
||||
|
||||
> **computeQueryEmbeddings**(`data`): `Promise`<`number`[] \| `Float32Array` \| `Float64Array`>
|
||||
```ts
|
||||
computeQueryEmbeddings(data): Promise<number[] | Float32Array | Float64Array>
|
||||
```
|
||||
|
||||
Compute the embeddings for a single query
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **data**: `T`
|
||||
* **data**: `T`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -48,13 +52,15 @@ Compute the embeddings for a single query
|
||||
|
||||
### computeSourceEmbeddings()
|
||||
|
||||
> `abstract` **computeSourceEmbeddings**(`data`): `Promise`<`number`[][] \| `Float32Array`[] \| `Float64Array`[]>
|
||||
```ts
|
||||
abstract computeSourceEmbeddings(data): Promise<number[][] | Float32Array[] | Float64Array[]>
|
||||
```
|
||||
|
||||
Creates a vector representation for the given values.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **data**: `T`[]
|
||||
* **data**: `T`[]
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -64,7 +70,9 @@ Creates a vector representation for the given values.
|
||||
|
||||
### embeddingDataType()
|
||||
|
||||
> `abstract` **embeddingDataType**(): `Float`<`Floats`>
|
||||
```ts
|
||||
abstract embeddingDataType(): Float<Floats>
|
||||
```
|
||||
|
||||
The datatype of the embeddings
|
||||
|
||||
@@ -74,9 +82,23 @@ The datatype of the embeddings
|
||||
|
||||
***
|
||||
|
||||
### init()?
|
||||
|
||||
```ts
|
||||
optional init(): Promise<void>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`void`>
|
||||
|
||||
***
|
||||
|
||||
### ndims()
|
||||
|
||||
> **ndims**(): `undefined` \| `number`
|
||||
```ts
|
||||
ndims(): undefined | number
|
||||
```
|
||||
|
||||
The number of dimensions of the embeddings
|
||||
|
||||
@@ -88,15 +110,16 @@ The number of dimensions of the embeddings
|
||||
|
||||
### sourceField()
|
||||
|
||||
> **sourceField**(`optionsOrDatatype`): [`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
```ts
|
||||
sourceField(optionsOrDatatype): [DataType<Type, any>, Map<string, EmbeddingFunction<any, FunctionOptions>>]
|
||||
```
|
||||
|
||||
sourceField is used in combination with `LanceSchema` to provide a declarative data model
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
|
||||
The options for the field or the datatype
|
||||
* **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
The options for the field or the datatype
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -110,7 +133,9 @@ lancedb.LanceSchema
|
||||
|
||||
### toJSON()
|
||||
|
||||
> `abstract` **toJSON**(): `Partial`<`M`>
|
||||
```ts
|
||||
abstract toJSON(): Partial<M>
|
||||
```
|
||||
|
||||
Convert the embedding function to a JSON object
|
||||
It is used to serialize the embedding function to the schema
|
||||
@@ -145,13 +170,15 @@ class MyEmbeddingFunction extends EmbeddingFunction {
|
||||
|
||||
### vectorField()
|
||||
|
||||
> **vectorField**(`optionsOrDatatype`?): [`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
```ts
|
||||
vectorField(optionsOrDatatype?): [DataType<Type, any>, Map<string, EmbeddingFunction<any, FunctionOptions>>]
|
||||
```
|
||||
|
||||
vectorField is used in combination with `LanceSchema` to provide a declarative data model
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -15,7 +15,9 @@ or TextEmbeddingFunction and registering it with the registry
|
||||
|
||||
### new EmbeddingFunctionRegistry()
|
||||
|
||||
> **new EmbeddingFunctionRegistry**(): [`EmbeddingFunctionRegistry`](EmbeddingFunctionRegistry.md)
|
||||
```ts
|
||||
new EmbeddingFunctionRegistry(): EmbeddingFunctionRegistry
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -25,11 +27,13 @@ or TextEmbeddingFunction and registering it with the registry
|
||||
|
||||
### functionToMetadata()
|
||||
|
||||
> **functionToMetadata**(`conf`): `Record`<`string`, `any`>
|
||||
```ts
|
||||
functionToMetadata(conf): Record<string, any>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **conf**: [`EmbeddingFunctionConfig`](../interfaces/EmbeddingFunctionConfig.md)
|
||||
* **conf**: [`EmbeddingFunctionConfig`](../interfaces/EmbeddingFunctionConfig.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -39,7 +43,9 @@ or TextEmbeddingFunction and registering it with the registry
|
||||
|
||||
### get()
|
||||
|
||||
> **get**<`T`, `Name`>(`name`): `Name` *extends* `"openai"` ? `EmbeddingFunctionCreate`<[`OpenAIEmbeddingFunction`](OpenAIEmbeddingFunction.md)> : `undefined` \| `EmbeddingFunctionCreate`<`T`>
|
||||
```ts
|
||||
get<T>(name): undefined | EmbeddingFunctionCreate<T>
|
||||
```
|
||||
|
||||
Fetch an embedding function by name
|
||||
|
||||
@@ -47,27 +53,26 @@ Fetch an embedding function by name
|
||||
|
||||
• **T** *extends* [`EmbeddingFunction`](EmbeddingFunction.md)<`unknown`, `FunctionOptions`>
|
||||
|
||||
• **Name** *extends* `string` = `""`
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **name**: `Name` *extends* `"openai"` ? `"openai"` : `string`
|
||||
|
||||
The name of the function
|
||||
* **name**: `string`
|
||||
The name of the function
|
||||
|
||||
#### Returns
|
||||
|
||||
`Name` *extends* `"openai"` ? `EmbeddingFunctionCreate`<[`OpenAIEmbeddingFunction`](OpenAIEmbeddingFunction.md)> : `undefined` \| `EmbeddingFunctionCreate`<`T`>
|
||||
`undefined` \| `EmbeddingFunctionCreate`<`T`>
|
||||
|
||||
***
|
||||
|
||||
### getTableMetadata()
|
||||
|
||||
> **getTableMetadata**(`functions`): `Map`<`string`, `string`>
|
||||
```ts
|
||||
getTableMetadata(functions): Map<string, string>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **functions**: [`EmbeddingFunctionConfig`](../interfaces/EmbeddingFunctionConfig.md)[]
|
||||
* **functions**: [`EmbeddingFunctionConfig`](../interfaces/EmbeddingFunctionConfig.md)[]
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -75,9 +80,25 @@ The name of the function
|
||||
|
||||
***
|
||||
|
||||
### length()
|
||||
|
||||
```ts
|
||||
length(): number
|
||||
```
|
||||
|
||||
Get the number of registered functions
|
||||
|
||||
#### Returns
|
||||
|
||||
`number`
|
||||
|
||||
***
|
||||
|
||||
### register()
|
||||
|
||||
> **register**<`T`>(`this`, `alias`?): (`ctor`) => `any`
|
||||
```ts
|
||||
register<T>(this, alias?): (ctor) => any
|
||||
```
|
||||
|
||||
Register an embedding function
|
||||
|
||||
@@ -87,9 +108,9 @@ Register an embedding function
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **this**: [`EmbeddingFunctionRegistry`](EmbeddingFunctionRegistry.md)
|
||||
* **this**: [`EmbeddingFunctionRegistry`](EmbeddingFunctionRegistry.md)
|
||||
|
||||
• **alias?**: `string`
|
||||
* **alias?**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -97,7 +118,7 @@ Register an embedding function
|
||||
|
||||
##### Parameters
|
||||
|
||||
• **ctor**: `T`
|
||||
* **ctor**: `T`
|
||||
|
||||
##### Returns
|
||||
|
||||
@@ -111,13 +132,15 @@ Error if the function is already registered
|
||||
|
||||
### reset()
|
||||
|
||||
> **reset**(`this`): `void`
|
||||
```ts
|
||||
reset(this): void
|
||||
```
|
||||
|
||||
reset the registry to the initial state
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **this**: [`EmbeddingFunctionRegistry`](EmbeddingFunctionRegistry.md)
|
||||
* **this**: [`EmbeddingFunctionRegistry`](EmbeddingFunctionRegistry.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -2,31 +2,33 @@
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / OpenAIEmbeddingFunction
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / TextEmbeddingFunction
|
||||
|
||||
# Class: OpenAIEmbeddingFunction
|
||||
# Class: `abstract` TextEmbeddingFunction<M>
|
||||
|
||||
An embedding function that automatically creates vector representation for a given column.
|
||||
an abstract class for implementing embedding functions that take text as input
|
||||
|
||||
## Extends
|
||||
|
||||
- [`EmbeddingFunction`](EmbeddingFunction.md)<`string`, `Partial`<[`OpenAIOptions`](../type-aliases/OpenAIOptions.md)>>
|
||||
- [`EmbeddingFunction`](EmbeddingFunction.md)<`string`, `M`>
|
||||
|
||||
## Type Parameters
|
||||
|
||||
• **M** *extends* `FunctionOptions` = `FunctionOptions`
|
||||
|
||||
## Constructors
|
||||
|
||||
### new OpenAIEmbeddingFunction()
|
||||
### new TextEmbeddingFunction()
|
||||
|
||||
> **new OpenAIEmbeddingFunction**(`options`): [`OpenAIEmbeddingFunction`](OpenAIEmbeddingFunction.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **options**: `Partial`<[`OpenAIOptions`](../type-aliases/OpenAIOptions.md)> = `...`
|
||||
```ts
|
||||
new TextEmbeddingFunction<M>(): TextEmbeddingFunction<M>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
[`OpenAIEmbeddingFunction`](OpenAIEmbeddingFunction.md)
|
||||
[`TextEmbeddingFunction`](TextEmbeddingFunction.md)<`M`>
|
||||
|
||||
#### Overrides
|
||||
#### Inherited from
|
||||
|
||||
[`EmbeddingFunction`](EmbeddingFunction.md).[`constructor`](EmbeddingFunction.md#constructors)
|
||||
|
||||
@@ -34,17 +36,19 @@ An embedding function that automatically creates vector representation for a giv
|
||||
|
||||
### computeQueryEmbeddings()
|
||||
|
||||
> **computeQueryEmbeddings**(`data`): `Promise`<`number`[]>
|
||||
```ts
|
||||
computeQueryEmbeddings(data): Promise<number[] | Float32Array | Float64Array>
|
||||
```
|
||||
|
||||
Compute the embeddings for a single query
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **data**: `string`
|
||||
* **data**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`number`[]>
|
||||
`Promise`<`number`[] \| `Float32Array` \| `Float64Array`>
|
||||
|
||||
#### Overrides
|
||||
|
||||
@@ -54,17 +58,19 @@ Compute the embeddings for a single query
|
||||
|
||||
### computeSourceEmbeddings()
|
||||
|
||||
> **computeSourceEmbeddings**(`data`): `Promise`<`number`[][]>
|
||||
```ts
|
||||
computeSourceEmbeddings(data): Promise<number[][] | Float32Array[] | Float64Array[]>
|
||||
```
|
||||
|
||||
Creates a vector representation for the given values.
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **data**: `string`[]
|
||||
* **data**: `string`[]
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`number`[][]>
|
||||
`Promise`<`number`[][] \| `Float32Array`[] \| `Float64Array`[]>
|
||||
|
||||
#### Overrides
|
||||
|
||||
@@ -74,7 +80,9 @@ Creates a vector representation for the given values.
|
||||
|
||||
### embeddingDataType()
|
||||
|
||||
> **embeddingDataType**(): `Float`<`Floats`>
|
||||
```ts
|
||||
embeddingDataType(): Float<Floats>
|
||||
```
|
||||
|
||||
The datatype of the embeddings
|
||||
|
||||
@@ -88,17 +96,53 @@ The datatype of the embeddings
|
||||
|
||||
***
|
||||
|
||||
### generateEmbeddings()
|
||||
|
||||
```ts
|
||||
abstract generateEmbeddings(texts, ...args): Promise<number[][] | Float32Array[] | Float64Array[]>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **texts**: `string`[]
|
||||
|
||||
* ...**args**: `any`[]
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`number`[][] \| `Float32Array`[] \| `Float64Array`[]>
|
||||
|
||||
***
|
||||
|
||||
### init()?
|
||||
|
||||
```ts
|
||||
optional init(): Promise<void>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`void`>
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`EmbeddingFunction`](EmbeddingFunction.md).[`init`](EmbeddingFunction.md#init)
|
||||
|
||||
***
|
||||
|
||||
### ndims()
|
||||
|
||||
> **ndims**(): `number`
|
||||
```ts
|
||||
ndims(): undefined | number
|
||||
```
|
||||
|
||||
The number of dimensions of the embeddings
|
||||
|
||||
#### Returns
|
||||
|
||||
`number`
|
||||
`undefined` \| `number`
|
||||
|
||||
#### Overrides
|
||||
#### Inherited from
|
||||
|
||||
[`EmbeddingFunction`](EmbeddingFunction.md).[`ndims`](EmbeddingFunction.md#ndims)
|
||||
|
||||
@@ -106,16 +150,12 @@ The number of dimensions of the embeddings
|
||||
|
||||
### sourceField()
|
||||
|
||||
> **sourceField**(`optionsOrDatatype`): [`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
```ts
|
||||
sourceField(): [DataType<Type, any>, Map<string, EmbeddingFunction<any, FunctionOptions>>]
|
||||
```
|
||||
|
||||
sourceField is used in combination with `LanceSchema` to provide a declarative data model
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
|
||||
The options for the field or the datatype
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
@@ -124,7 +164,7 @@ The options for the field or the datatype
|
||||
|
||||
lancedb.LanceSchema
|
||||
|
||||
#### Inherited from
|
||||
#### Overrides
|
||||
|
||||
[`EmbeddingFunction`](EmbeddingFunction.md).[`sourceField`](EmbeddingFunction.md#sourcefield)
|
||||
|
||||
@@ -132,7 +172,9 @@ lancedb.LanceSchema
|
||||
|
||||
### toJSON()
|
||||
|
||||
> **toJSON**(): `object`
|
||||
```ts
|
||||
abstract toJSON(): Partial<M>
|
||||
```
|
||||
|
||||
Convert the embedding function to a JSON object
|
||||
It is used to serialize the embedding function to the schema
|
||||
@@ -144,11 +186,7 @@ If it does not, the embedding function will not be able to be recreated, or coul
|
||||
|
||||
#### Returns
|
||||
|
||||
`object`
|
||||
|
||||
##### model
|
||||
|
||||
> **model**: `string` & `object` \| `"text-embedding-ada-002"` \| `"text-embedding-3-small"` \| `"text-embedding-3-large"`
|
||||
`Partial`<`M`>
|
||||
|
||||
#### Example
|
||||
|
||||
@@ -167,7 +205,7 @@ class MyEmbeddingFunction extends EmbeddingFunction {
|
||||
}
|
||||
```
|
||||
|
||||
#### Overrides
|
||||
#### Inherited from
|
||||
|
||||
[`EmbeddingFunction`](EmbeddingFunction.md).[`toJSON`](EmbeddingFunction.md#tojson)
|
||||
|
||||
@@ -175,13 +213,15 @@ class MyEmbeddingFunction extends EmbeddingFunction {
|
||||
|
||||
### vectorField()
|
||||
|
||||
> **vectorField**(`optionsOrDatatype`?): [`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
```ts
|
||||
vectorField(optionsOrDatatype?): [DataType<Type, any>, Map<string, EmbeddingFunction<any, FunctionOptions>>]
|
||||
```
|
||||
|
||||
vectorField is used in combination with `LanceSchema` to provide a declarative data model
|
||||
|
||||
#### Parameters
|
||||
|
||||
• **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -6,13 +6,15 @@
|
||||
|
||||
# Function: LanceSchema()
|
||||
|
||||
> **LanceSchema**(`fields`): `Schema`
|
||||
```ts
|
||||
function LanceSchema(fields): Schema
|
||||
```
|
||||
|
||||
Create a schema with embedding functions.
|
||||
|
||||
## Parameters
|
||||
|
||||
• **fields**: `Record`<`string`, `object` \| [`object`, `Map`<`string`, [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>]>
|
||||
* **fields**: `Record`<`string`, `object` \| [`object`, `Map`<`string`, [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>]>
|
||||
|
||||
## Returns
|
||||
|
||||
|
||||
@@ -6,7 +6,9 @@
|
||||
|
||||
# Function: getRegistry()
|
||||
|
||||
> **getRegistry**(): [`EmbeddingFunctionRegistry`](../classes/EmbeddingFunctionRegistry.md)
|
||||
```ts
|
||||
function getRegistry(): EmbeddingFunctionRegistry
|
||||
```
|
||||
|
||||
Utility function to get the global instance of the registry
|
||||
|
||||
|
||||
@@ -6,11 +6,13 @@
|
||||
|
||||
# Function: register()
|
||||
|
||||
> **register**(`name`?): (`ctor`) => `any`
|
||||
```ts
|
||||
function register(name?): (ctor) => any
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
• **name?**: `string`
|
||||
* **name?**: `string`
|
||||
|
||||
## Returns
|
||||
|
||||
@@ -18,7 +20,7 @@
|
||||
|
||||
### Parameters
|
||||
|
||||
• **ctor**: `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>
|
||||
* **ctor**: `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>
|
||||
|
||||
### Returns
|
||||
|
||||
|
||||
@@ -10,16 +10,22 @@
|
||||
|
||||
### function
|
||||
|
||||
> **function**: [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>
|
||||
```ts
|
||||
function: EmbeddingFunction<any, FunctionOptions>;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### sourceColumn
|
||||
|
||||
> **sourceColumn**: `string`
|
||||
```ts
|
||||
sourceColumn: string;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### vectorColumn?
|
||||
|
||||
> `optional` **vectorColumn**: `string`
|
||||
```ts
|
||||
optional vectorColumn: string;
|
||||
```
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / OpenAIOptions
|
||||
|
||||
# Type Alias: OpenAIOptions
|
||||
|
||||
> **OpenAIOptions**: `object`
|
||||
|
||||
## Type declaration
|
||||
|
||||
### apiKey
|
||||
|
||||
> **apiKey**: `string`
|
||||
|
||||
### model
|
||||
|
||||
> **model**: `EmbeddingCreateParams`\[`"model"`\]
|
||||
@@ -6,6 +6,8 @@
|
||||
|
||||
# Type Alias: Data
|
||||
|
||||
> **Data**: `Record`<`string`, `unknown`>[] \| `TableLike`
|
||||
```ts
|
||||
type Data: Record<string, unknown>[] | TableLike;
|
||||
```
|
||||
|
||||
Data type accepted by NodeJS SDK
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>com.lancedb</groupId>
|
||||
<artifactId>lancedb-parent</artifactId>
|
||||
<version>0.14.0-final.0</version>
|
||||
<version>0.14.1-beta.3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
<groupId>com.lancedb</groupId>
|
||||
<artifactId>lancedb-parent</artifactId>
|
||||
<version>0.14.0-final.0</version>
|
||||
<version>0.14.1-beta.3</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>LanceDB Parent</name>
|
||||
|
||||
116
node/package-lock.json
generated
116
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "vectordb",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "vectordb",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"cpu": [
|
||||
"x64",
|
||||
"arm64"
|
||||
@@ -52,14 +52,14 @@
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@lancedb/vectordb-darwin-arm64": "0.14.0",
|
||||
"@lancedb/vectordb-darwin-x64": "0.14.0",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.14.0",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.14.0",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.14.0",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.14.0",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.14.0",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.14.0"
|
||||
"@lancedb/vectordb-darwin-arm64": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-darwin-x64": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.14.1-beta.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@apache-arrow/ts": "^14.0.2",
|
||||
@@ -329,102 +329,6 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.14.0.tgz",
|
||||
"integrity": "sha512-C8wp+eJQY3RMLIRfxDnOm8bYg458OI3Cz7Jh7ws6ibquBdJDCiTdwFfcUXrkoaQ9Wv4nHZOEqupj3FBMsks1hw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-darwin-x64": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.14.0.tgz",
|
||||
"integrity": "sha512-5jkQuEVGaPViFb4dOjncUqVCbvEiT8XYFZoprE0yv7HUUCdt5v15GTNxey72yw+aaX2mdb2CeFIs+4ySZqy/MA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.14.0.tgz",
|
||||
"integrity": "sha512-YLboFJLQyFzsYWi2iW1nr2SGaZTaj4gERIufyTSnX+VXlEYKHke3cMFLF+EamH8eejv2HwXdJpidPaP6aSzujw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-arm64-musl": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-musl/-/vectordb-linux-arm64-musl-0.14.0.tgz",
|
||||
"integrity": "sha512-rel/SaxGRtx5GdAkFH1IknBr0V/tbrN4jYT6FixmSvgc9kgxrMGlBUHSRAO5atdRXZ8jT7XWuOqW1QdgsmPi0g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.14.0.tgz",
|
||||
"integrity": "sha512-N29n8OO2JqSPaSVd5gmyh6r4x6LX0qpcCHrhkEaRoKKIXYdHQ8sAHOqHNt3xhMDLwDJfjGmzAwd977cOYM5MBw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-x64-musl": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-musl/-/vectordb-linux-x64-musl-0.14.0.tgz",
|
||||
"integrity": "sha512-36Ewl9M6IsYgxBIaThgqaSlQ++8YsSnZB85DOnuIds+sRbBfNkknvwBRFO1/FGN8RSBydFPy1irNFmCOnrlTZg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-win32-arm64-msvc": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-arm64-msvc/-/vectordb-win32-arm64-msvc-0.14.0.tgz",
|
||||
"integrity": "sha512-4qsna5yI7umGEA868/ifr1Np66d0dhFAOIGaJKS5Z+Zm4Zplr42BjVZiNWtwwKhndtsiPJnFCYVYRKfjTLZWdg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.14.0.tgz",
|
||||
"integrity": "sha512-1u+J5WFClNc6mzgF5otevMnOxW3pj8yOHrPoIiZe9SrL8O2oVtdYfWJZYG/OST21cS0Mc4Z0upX86G0sA4kEfA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@neon-rs/cli": {
|
||||
"version": "0.0.160",
|
||||
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vectordb",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"description": " Serverless, low-latency vector database for AI applications",
|
||||
"private": false,
|
||||
"main": "dist/index.js",
|
||||
@@ -92,13 +92,13 @@
|
||||
}
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@lancedb/vectordb-darwin-x64": "0.14.0",
|
||||
"@lancedb/vectordb-darwin-arm64": "0.14.0",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.14.0",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.14.0",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.14.0",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.14.0",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.14.0",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.14.0"
|
||||
"@lancedb/vectordb-darwin-x64": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-darwin-arm64": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.14.1-beta.3",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.14.1-beta.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "lancedb-nodejs"
|
||||
edition.workspace = true
|
||||
version = "0.14.0"
|
||||
version = "0.14.1-beta.3"
|
||||
license.workspace = true
|
||||
description.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
@@ -13,11 +13,10 @@ import { Schema } from "apache-arrow";
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import * as arrow13 from "apache-arrow-13";
|
||||
import * as arrow14 from "apache-arrow-14";
|
||||
import * as arrow15 from "apache-arrow-15";
|
||||
import * as arrow16 from "apache-arrow-16";
|
||||
import * as arrow17 from "apache-arrow-17";
|
||||
import * as arrow18 from "apache-arrow-18";
|
||||
|
||||
import {
|
||||
convertToTable,
|
||||
@@ -45,22 +44,16 @@ function sampleRecords(): Array<Record<string, any>> {
|
||||
},
|
||||
];
|
||||
}
|
||||
describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
"Arrow",
|
||||
(
|
||||
arrow:
|
||||
| typeof arrow13
|
||||
| typeof arrow14
|
||||
| typeof arrow15
|
||||
| typeof arrow16
|
||||
| typeof arrow17,
|
||||
arrow: typeof arrow15 | typeof arrow16 | typeof arrow17 | typeof arrow18,
|
||||
) => {
|
||||
type ApacheArrow =
|
||||
| typeof arrow13
|
||||
| typeof arrow14
|
||||
| typeof arrow15
|
||||
| typeof arrow16
|
||||
| typeof arrow17;
|
||||
| typeof arrow17
|
||||
| typeof arrow18;
|
||||
const {
|
||||
Schema,
|
||||
Field,
|
||||
@@ -498,40 +491,40 @@ describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
|
||||
describe("when using two versions of arrow", function () {
|
||||
it("can still import data", async function () {
|
||||
const schema = new arrow13.Schema([
|
||||
new arrow13.Field("id", new arrow13.Int32()),
|
||||
new arrow13.Field(
|
||||
const schema = new arrow15.Schema([
|
||||
new arrow15.Field("id", new arrow15.Int32()),
|
||||
new arrow15.Field(
|
||||
"vector",
|
||||
new arrow13.FixedSizeList(
|
||||
new arrow15.FixedSizeList(
|
||||
1024,
|
||||
new arrow13.Field("item", new arrow13.Float32(), true),
|
||||
new arrow15.Field("item", new arrow15.Float32(), true),
|
||||
),
|
||||
),
|
||||
new arrow13.Field(
|
||||
new arrow15.Field(
|
||||
"struct",
|
||||
new arrow13.Struct([
|
||||
new arrow13.Field(
|
||||
new arrow15.Struct([
|
||||
new arrow15.Field(
|
||||
"nested",
|
||||
new arrow13.Dictionary(
|
||||
new arrow13.Utf8(),
|
||||
new arrow13.Int32(),
|
||||
new arrow15.Dictionary(
|
||||
new arrow15.Utf8(),
|
||||
new arrow15.Int32(),
|
||||
1,
|
||||
true,
|
||||
),
|
||||
),
|
||||
new arrow13.Field(
|
||||
new arrow15.Field(
|
||||
"ts_with_tz",
|
||||
new arrow13.TimestampNanosecond("some_tz"),
|
||||
new arrow15.TimestampNanosecond("some_tz"),
|
||||
),
|
||||
new arrow13.Field(
|
||||
new arrow15.Field(
|
||||
"ts_no_tz",
|
||||
new arrow13.TimestampNanosecond(null),
|
||||
new arrow15.TimestampNanosecond(null),
|
||||
),
|
||||
]),
|
||||
),
|
||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
||||
]) as any;
|
||||
schema.metadataVersion = arrow13.MetadataVersion.V5;
|
||||
schema.metadataVersion = arrow15.MetadataVersion.V5;
|
||||
const table = makeArrowTable([], { schema });
|
||||
|
||||
const buf = await fromTableToBuffer(table);
|
||||
@@ -543,13 +536,13 @@ describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
// Deep equality gets hung up on some very minor unimportant differences
|
||||
// between arrow version 13 and 15 which isn't really what we're testing for
|
||||
// and so we do our own comparison that just checks name/type/nullability
|
||||
function compareFields(lhs: arrow13.Field, rhs: arrow13.Field) {
|
||||
function compareFields(lhs: arrow15.Field, rhs: arrow15.Field) {
|
||||
expect(lhs.name).toEqual(rhs.name);
|
||||
expect(lhs.nullable).toEqual(rhs.nullable);
|
||||
expect(lhs.typeId).toEqual(rhs.typeId);
|
||||
if ("children" in lhs.type && lhs.type.children !== null) {
|
||||
const lhsChildren = lhs.type.children as arrow13.Field[];
|
||||
lhsChildren.forEach((child: arrow13.Field, idx) => {
|
||||
const lhsChildren = lhs.type.children as arrow15.Field[];
|
||||
lhsChildren.forEach((child: arrow15.Field, idx) => {
|
||||
compareFields(child, rhs.type.children[idx]);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -12,11 +12,10 @@ import * as apiArrow from "apache-arrow";
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
import * as arrow13 from "apache-arrow-13";
|
||||
import * as arrow14 from "apache-arrow-14";
|
||||
import * as arrow15 from "apache-arrow-15";
|
||||
import * as arrow16 from "apache-arrow-16";
|
||||
import * as arrow17 from "apache-arrow-17";
|
||||
import * as arrow18 from "apache-arrow-18";
|
||||
|
||||
import * as tmp from "tmp";
|
||||
|
||||
@@ -24,154 +23,144 @@ import { connect } from "../lancedb";
|
||||
import { EmbeddingFunction, LanceSchema } from "../lancedb/embedding";
|
||||
import { getRegistry, register } from "../lancedb/embedding/registry";
|
||||
|
||||
describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
"LanceSchema",
|
||||
(arrow) => {
|
||||
test("should preserve input order", async () => {
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: new arrow.Utf8(),
|
||||
vector: new arrow.Float32(),
|
||||
});
|
||||
expect(schema.fields.map((x) => x.name)).toEqual([
|
||||
"id",
|
||||
"text",
|
||||
"vector",
|
||||
]);
|
||||
describe.each([arrow15, arrow16, arrow17, arrow18])("LanceSchema", (arrow) => {
|
||||
test("should preserve input order", async () => {
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: new arrow.Utf8(),
|
||||
vector: new arrow.Float32(),
|
||||
});
|
||||
},
|
||||
);
|
||||
expect(schema.fields.map((x) => x.name)).toEqual(["id", "text", "vector"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
"Registry",
|
||||
(arrow) => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
});
|
||||
describe.each([arrow15, arrow16, arrow17, arrow18])("Registry", (arrow) => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
tmpDir.removeCallback();
|
||||
getRegistry().reset();
|
||||
});
|
||||
afterEach(() => {
|
||||
tmpDir.removeCallback();
|
||||
getRegistry().reset();
|
||||
});
|
||||
|
||||
it("should register a new item to the registry", async () => {
|
||||
@register("mock-embedding")
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType() {
|
||||
return new arrow.Float32() as apiArrow.Float;
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
it("should register a new item to the registry", async () => {
|
||||
@register("mock-embedding")
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
|
||||
const func = getRegistry()
|
||||
.get<MockEmbeddingFunction>("mock-embedding")!
|
||||
.create();
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: func.sourceField(new arrow.Utf8() as apiArrow.DataType),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createTable(
|
||||
"test",
|
||||
[
|
||||
{ id: 1, text: "hello" },
|
||||
{ id: 2, text: "world" },
|
||||
],
|
||||
{ schema },
|
||||
);
|
||||
const expected = [
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
];
|
||||
const actual = await table.query().toArrow();
|
||||
const vectors = actual.getChild("vector")!.toArray();
|
||||
expect(JSON.parse(JSON.stringify(vectors))).toEqual(
|
||||
JSON.parse(JSON.stringify(expected)),
|
||||
);
|
||||
});
|
||||
test("should error if registering with the same name", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType() {
|
||||
return new arrow.Float32() as apiArrow.Float;
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
register("mock-embedding")(MockEmbeddingFunction);
|
||||
expect(() => register("mock-embedding")(MockEmbeddingFunction)).toThrow(
|
||||
'Embedding function with alias "mock-embedding" already exists',
|
||||
);
|
||||
});
|
||||
test("schema should contain correct metadata", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType() {
|
||||
return new arrow.Float32() as apiArrow.Float;
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
const func = new MockEmbeddingFunction();
|
||||
embeddingDataType() {
|
||||
return new arrow.Float32() as apiArrow.Float;
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
}
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: func.sourceField(new arrow.Utf8() as apiArrow.DataType),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
const expectedMetadata = new Map<string, string>([
|
||||
[
|
||||
"embedding_functions",
|
||||
JSON.stringify([
|
||||
{
|
||||
sourceColumn: "text",
|
||||
vectorColumn: "vector",
|
||||
name: "MockEmbeddingFunction",
|
||||
model: { someText: "hello" },
|
||||
},
|
||||
]),
|
||||
],
|
||||
]);
|
||||
expect(schema.metadata).toEqual(expectedMetadata);
|
||||
const func = getRegistry()
|
||||
.get<MockEmbeddingFunction>("mock-embedding")!
|
||||
.create();
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: func.sourceField(new arrow.Utf8() as apiArrow.DataType),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createTable(
|
||||
"test",
|
||||
[
|
||||
{ id: 1, text: "hello" },
|
||||
{ id: 2, text: "world" },
|
||||
],
|
||||
{ schema },
|
||||
);
|
||||
const expected = [
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
];
|
||||
const actual = await table.query().toArrow();
|
||||
const vectors = actual.getChild("vector")!.toArray();
|
||||
expect(JSON.parse(JSON.stringify(vectors))).toEqual(
|
||||
JSON.parse(JSON.stringify(expected)),
|
||||
);
|
||||
});
|
||||
test("should error if registering with the same name", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType() {
|
||||
return new arrow.Float32() as apiArrow.Float;
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
}
|
||||
register("mock-embedding")(MockEmbeddingFunction);
|
||||
expect(() => register("mock-embedding")(MockEmbeddingFunction)).toThrow(
|
||||
'Embedding function with alias "mock-embedding" already exists',
|
||||
);
|
||||
});
|
||||
test("schema should contain correct metadata", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType() {
|
||||
return new arrow.Float32() as apiArrow.Float;
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
}
|
||||
const func = new MockEmbeddingFunction();
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: func.sourceField(new arrow.Utf8() as apiArrow.DataType),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
const expectedMetadata = new Map<string, string>([
|
||||
[
|
||||
"embedding_functions",
|
||||
JSON.stringify([
|
||||
{
|
||||
sourceColumn: "text",
|
||||
vectorColumn: "vector",
|
||||
name: "MockEmbeddingFunction",
|
||||
model: { someText: "hello" },
|
||||
},
|
||||
]),
|
||||
],
|
||||
]);
|
||||
expect(schema.metadata).toEqual(expectedMetadata);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -16,11 +16,10 @@ import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp";
|
||||
|
||||
import * as arrow13 from "apache-arrow-13";
|
||||
import * as arrow14 from "apache-arrow-14";
|
||||
import * as arrow15 from "apache-arrow-15";
|
||||
import * as arrow16 from "apache-arrow-16";
|
||||
import * as arrow17 from "apache-arrow-17";
|
||||
import * as arrow18 from "apache-arrow-18";
|
||||
|
||||
import { Table, connect } from "../lancedb";
|
||||
import {
|
||||
@@ -44,7 +43,7 @@ import {
|
||||
} from "../lancedb/embedding";
|
||||
import { Index } from "../lancedb/indices";
|
||||
|
||||
describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
"Given a table",
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
(arrow: any) => {
|
||||
@@ -52,11 +51,10 @@ describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
let table: Table;
|
||||
|
||||
const schema:
|
||||
| import("apache-arrow-13").Schema
|
||||
| import("apache-arrow-14").Schema
|
||||
| import("apache-arrow-15").Schema
|
||||
| import("apache-arrow-16").Schema
|
||||
| import("apache-arrow-17").Schema = new arrow.Schema([
|
||||
| import("apache-arrow-17").Schema
|
||||
| import("apache-arrow-18").Schema = new arrow.Schema([
|
||||
new arrow.Field("id", new arrow.Float64(), true),
|
||||
]);
|
||||
|
||||
@@ -569,6 +567,15 @@ describe("When creating an index", () => {
|
||||
// TODO: Verify parameters when we can load index config as part of list indices
|
||||
});
|
||||
|
||||
it("should be able to create 4bit IVF_PQ", async () => {
|
||||
await tbl.createIndex("vec", {
|
||||
config: Index.ivfPq({
|
||||
numPartitions: 10,
|
||||
numBits: 4,
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it("should allow me to replace (or not) an existing index", async () => {
|
||||
await tbl.createIndex("id");
|
||||
// Default is replace=true
|
||||
@@ -939,7 +946,7 @@ describe("when optimizing a dataset", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
"when optimizing a dataset",
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
(arrow: any) => {
|
||||
@@ -1051,6 +1058,26 @@ describe.each([arrow13, arrow14, arrow15, arrow16, arrow17])(
|
||||
expect(results[0].text).toBe(data[0].text);
|
||||
});
|
||||
|
||||
test("full text search without lowercase", async () => {
|
||||
const db = await connect(tmpDir.name);
|
||||
const data = [
|
||||
{ text: "hello world", vector: [0.1, 0.2, 0.3] },
|
||||
{ text: "Hello World", vector: [0.4, 0.5, 0.6] },
|
||||
];
|
||||
const table = await db.createTable("test", data);
|
||||
await table.createIndex("text", {
|
||||
config: Index.fts({ withPosition: false }),
|
||||
});
|
||||
const results = await table.search("hello").toArray();
|
||||
expect(results.length).toBe(2);
|
||||
|
||||
await table.createIndex("text", {
|
||||
config: Index.fts({ withPosition: false, lowercase: false }),
|
||||
});
|
||||
const results2 = await table.search("hello").toArray();
|
||||
expect(results2.length).toBe(1);
|
||||
});
|
||||
|
||||
test("full text search phrase query", async () => {
|
||||
const db = await connect(tmpDir.name);
|
||||
const data = [
|
||||
|
||||
@@ -119,7 +119,9 @@ test("basic table examples", async () => {
|
||||
|
||||
{
|
||||
// --8<-- [start:add_columns]
|
||||
await tbl.addColumns([{ name: "double_price", valueSql: "price * 2" }]);
|
||||
await tbl.addColumns([
|
||||
{ name: "double_price", valueSql: "cast((price * 2) as Float)" },
|
||||
]);
|
||||
// --8<-- [end:add_columns]
|
||||
// --8<-- [start:alter_columns]
|
||||
await tbl.alterColumns([
|
||||
|
||||
@@ -47,6 +47,16 @@ export interface IvfPqOptions {
|
||||
*/
|
||||
numSubVectors?: number;
|
||||
|
||||
/**
|
||||
* Number of bits per sub-vector.
|
||||
*
|
||||
* This value controls how much each subvector is compressed. The more bits the more
|
||||
* accurate the index will be but the slower search. The default is 8 bits.
|
||||
*
|
||||
* The number of bits must be 4 or 8.
|
||||
*/
|
||||
numBits?: number;
|
||||
|
||||
/**
|
||||
* Distance type to use to build the index.
|
||||
*
|
||||
@@ -339,6 +349,52 @@ export interface FtsOptions {
|
||||
* which will make the index smaller and faster to build, but will not support phrase queries.
|
||||
*/
|
||||
withPosition?: boolean;
|
||||
|
||||
/**
|
||||
* The tokenizer to use when building the index.
|
||||
* The default is "simple".
|
||||
*
|
||||
* The following tokenizers are available:
|
||||
*
|
||||
* "simple" - Simple tokenizer. This tokenizer splits the text into tokens using whitespace and punctuation as a delimiter.
|
||||
*
|
||||
* "whitespace" - Whitespace tokenizer. This tokenizer splits the text into tokens using whitespace as a delimiter.
|
||||
*
|
||||
* "raw" - Raw tokenizer. This tokenizer does not split the text into tokens and indexes the entire text as a single token.
|
||||
*/
|
||||
baseTokenizer?: "simple" | "whitespace" | "raw";
|
||||
|
||||
/**
|
||||
* language for stemming and stop words
|
||||
* this is only used when `stem` or `remove_stop_words` is true
|
||||
*/
|
||||
language?: string;
|
||||
|
||||
/**
|
||||
* maximum token length
|
||||
* tokens longer than this length will be ignored
|
||||
*/
|
||||
maxTokenLength?: number;
|
||||
|
||||
/**
|
||||
* whether to lowercase tokens
|
||||
*/
|
||||
lowercase?: boolean;
|
||||
|
||||
/**
|
||||
* whether to stem tokens
|
||||
*/
|
||||
stem?: boolean;
|
||||
|
||||
/**
|
||||
* whether to remove stop words
|
||||
*/
|
||||
removeStopWords?: boolean;
|
||||
|
||||
/**
|
||||
* whether to remove punctuation
|
||||
*/
|
||||
asciiFolding?: boolean;
|
||||
}
|
||||
|
||||
export class Index {
|
||||
@@ -440,7 +496,18 @@ export class Index {
|
||||
* For now, the full text search index only supports English, and doesn't support phrase search.
|
||||
*/
|
||||
static fts(options?: Partial<FtsOptions>) {
|
||||
return new Index(LanceDbIndex.fts(options?.withPosition));
|
||||
return new Index(
|
||||
LanceDbIndex.fts(
|
||||
options?.withPosition,
|
||||
options?.baseTokenizer,
|
||||
options?.language,
|
||||
options?.maxTokenLength,
|
||||
options?.lowercase,
|
||||
options?.stem,
|
||||
options?.removeStopWords,
|
||||
options?.asciiFolding,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-darwin-arm64",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["darwin"],
|
||||
"cpu": ["arm64"],
|
||||
"main": "lancedb.darwin-arm64.node",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-darwin-x64",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["darwin"],
|
||||
"cpu": ["x64"],
|
||||
"main": "lancedb.darwin-x64.node",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-linux-arm64-gnu",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["linux"],
|
||||
"cpu": ["arm64"],
|
||||
"main": "lancedb.linux-arm64-gnu.node",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-linux-arm64-musl",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["linux"],
|
||||
"cpu": ["arm64"],
|
||||
"main": "lancedb.linux-arm64-musl.node",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-linux-x64-gnu",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["linux"],
|
||||
"cpu": ["x64"],
|
||||
"main": "lancedb.linux-x64-gnu.node",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-linux-x64-musl",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["linux"],
|
||||
"cpu": ["x64"],
|
||||
"main": "lancedb.linux-x64-musl.node",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-win32-arm64-msvc",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lancedb/lancedb-win32-x64-msvc",
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"os": ["win32"],
|
||||
"cpu": ["x64"],
|
||||
"main": "lancedb.win32-x64-msvc.node",
|
||||
|
||||
148
nodejs/package-lock.json
generated
148
nodejs/package-lock.json
generated
@@ -31,11 +31,10 @@
|
||||
"@types/jest": "^29.1.2",
|
||||
"@types/node": "^22.7.4",
|
||||
"@types/tmp": "^0.2.6",
|
||||
"apache-arrow-13": "npm:apache-arrow@13.0.0",
|
||||
"apache-arrow-14": "npm:apache-arrow@14.0.0",
|
||||
"apache-arrow-15": "npm:apache-arrow@15.0.0",
|
||||
"apache-arrow-16": "npm:apache-arrow@16.0.0",
|
||||
"apache-arrow-17": "npm:apache-arrow@17.0.0",
|
||||
"apache-arrow-18": "npm:apache-arrow@18.0.0",
|
||||
"eslint": "^8.57.0",
|
||||
"jest": "^29.7.0",
|
||||
"shx": "^0.3.4",
|
||||
@@ -54,7 +53,7 @@
|
||||
"openai": "^4.29.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"apache-arrow": ">=13.0.0 <=17.0.0"
|
||||
"apache-arrow": ">=15.0.0 <=18.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@75lb/deep-merge": {
|
||||
@@ -5146,12 +5145,6 @@
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||
"devOptional": true
|
||||
},
|
||||
"node_modules/@types/pad-left": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/pad-left/-/pad-left-2.1.1.tgz",
|
||||
"integrity": "sha512-Xd22WCRBydkGSApl5Bw0PhAOHKSVjNL3E3AwzKaps96IMraPqy5BvZIsBVK6JLwdybUzjHnuWVwpDd0JjTfHXA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/semver": {
|
||||
"version": "7.5.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz",
|
||||
@@ -5341,74 +5334,6 @@
|
||||
"arrow2csv": "bin/arrow2csv.cjs"
|
||||
}
|
||||
},
|
||||
"node_modules/apache-arrow-13": {
|
||||
"name": "apache-arrow",
|
||||
"version": "13.0.0",
|
||||
"resolved": "https://registry.npmjs.org/apache-arrow/-/apache-arrow-13.0.0.tgz",
|
||||
"integrity": "sha512-3gvCX0GDawWz6KFNC28p65U+zGh/LZ6ZNKWNu74N6CQlKzxeoWHpi4CgEQsgRSEMuyrIIXi1Ea2syja7dwcHvw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/command-line-args": "5.2.0",
|
||||
"@types/command-line-usage": "5.0.2",
|
||||
"@types/node": "20.3.0",
|
||||
"@types/pad-left": "2.1.1",
|
||||
"command-line-args": "5.2.1",
|
||||
"command-line-usage": "7.0.1",
|
||||
"flatbuffers": "23.5.26",
|
||||
"json-bignum": "^0.0.3",
|
||||
"pad-left": "^2.1.0",
|
||||
"tslib": "^2.5.3"
|
||||
},
|
||||
"bin": {
|
||||
"arrow2csv": "bin/arrow2csv.js"
|
||||
}
|
||||
},
|
||||
"node_modules/apache-arrow-13/node_modules/@types/command-line-args": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.0.tgz",
|
||||
"integrity": "sha512-UuKzKpJJ/Ief6ufIaIzr3A/0XnluX7RvFgwkV89Yzvm77wCh1kFaFmqN8XEnGcN62EuHdedQjEMb8mYxFLGPyA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-13/node_modules/@types/node": {
|
||||
"version": "20.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.0.tgz",
|
||||
"integrity": "sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-14": {
|
||||
"name": "apache-arrow",
|
||||
"version": "14.0.0",
|
||||
"resolved": "https://registry.npmjs.org/apache-arrow/-/apache-arrow-14.0.0.tgz",
|
||||
"integrity": "sha512-9cKE24YxkaqAZWJddrVnjUJMLwq6CokOjK+AHpm145rMJNsBZXQkzqouemQyEX0+/iHYRnGym6X6ZgNcHHrcWA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/command-line-args": "5.2.0",
|
||||
"@types/command-line-usage": "5.0.2",
|
||||
"@types/node": "20.3.0",
|
||||
"@types/pad-left": "2.1.1",
|
||||
"command-line-args": "5.2.1",
|
||||
"command-line-usage": "7.0.1",
|
||||
"flatbuffers": "23.5.26",
|
||||
"json-bignum": "^0.0.3",
|
||||
"pad-left": "^2.1.0",
|
||||
"tslib": "^2.5.3"
|
||||
},
|
||||
"bin": {
|
||||
"arrow2csv": "bin/arrow2csv.js"
|
||||
}
|
||||
},
|
||||
"node_modules/apache-arrow-14/node_modules/@types/command-line-args": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.0.tgz",
|
||||
"integrity": "sha512-UuKzKpJJ/Ief6ufIaIzr3A/0XnluX7RvFgwkV89Yzvm77wCh1kFaFmqN8XEnGcN62EuHdedQjEMb8mYxFLGPyA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-14/node_modules/@types/node": {
|
||||
"version": "20.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.0.tgz",
|
||||
"integrity": "sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-15": {
|
||||
"name": "apache-arrow",
|
||||
"version": "15.0.0",
|
||||
@@ -5529,6 +5454,54 @@
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-18": {
|
||||
"name": "apache-arrow",
|
||||
"version": "18.0.0",
|
||||
"resolved": "https://registry.npmjs.org/apache-arrow/-/apache-arrow-18.0.0.tgz",
|
||||
"integrity": "sha512-gFlPaqN9osetbB83zC29AbbZqGiCuFH1vyyPseJ+B7SIbfBtESV62mMT/CkiIt77W6ykC/nTWFzTXFs0Uldg4g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@swc/helpers": "^0.5.11",
|
||||
"@types/command-line-args": "^5.2.3",
|
||||
"@types/command-line-usage": "^5.0.4",
|
||||
"@types/node": "^20.13.0",
|
||||
"command-line-args": "^5.2.1",
|
||||
"command-line-usage": "^7.0.1",
|
||||
"flatbuffers": "^24.3.25",
|
||||
"json-bignum": "^0.0.3",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"bin": {
|
||||
"arrow2csv": "bin/arrow2csv.js"
|
||||
}
|
||||
},
|
||||
"node_modules/apache-arrow-18/node_modules/@types/command-line-usage": {
|
||||
"version": "5.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/command-line-usage/-/command-line-usage-5.0.4.tgz",
|
||||
"integrity": "sha512-BwR5KP3Es/CSht0xqBcUXS3qCAUVXwpRKsV2+arxeb65atasuXG9LykC9Ab10Cw3s2raH92ZqOeILaQbsB2ACg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-18/node_modules/@types/node": {
|
||||
"version": "20.17.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.9.tgz",
|
||||
"integrity": "sha512-0JOXkRyLanfGPE2QRCwgxhzlBAvaRdCNMcvbd7jFfpmD4eEXll7LRwy5ymJmyeZqk7Nh7eD2LeUyQ68BbndmXw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~6.19.2"
|
||||
}
|
||||
},
|
||||
"node_modules/apache-arrow-18/node_modules/flatbuffers": {
|
||||
"version": "24.3.25",
|
||||
"resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-24.3.25.tgz",
|
||||
"integrity": "sha512-3HDgPbgiwWMI9zVB7VYBHaMrbOO7Gm0v+yD2FV/sCKj+9NDeVL7BOBYUuhWAQGKWOzBo8S9WdMvV0eixO233XQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow-18/node_modules/undici-types": {
|
||||
"version": "6.19.8",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/apache-arrow/node_modules/@types/node": {
|
||||
"version": "20.16.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.16.10.tgz",
|
||||
@@ -8533,18 +8506,6 @@
|
||||
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/pad-left": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pad-left/-/pad-left-2.1.0.tgz",
|
||||
"integrity": "sha512-HJxs9K9AztdIQIAIa/OIazRAUW/L6B9hbQDxO4X07roW3eo9XqZc2ur9bn1StH9CnbbI9EgvejHQX7CBpCF1QA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"repeat-string": "^1.5.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/parent-module": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
||||
@@ -8885,15 +8846,6 @@
|
||||
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz",
|
||||
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="
|
||||
},
|
||||
"node_modules/repeat-string": {
|
||||
"version": "1.6.1",
|
||||
"resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
|
||||
"integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/require-directory": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"ann"
|
||||
],
|
||||
"private": false,
|
||||
"version": "0.14.0",
|
||||
"version": "0.14.1-beta.3",
|
||||
"main": "dist/index.js",
|
||||
"exports": {
|
||||
".": "./dist/index.js",
|
||||
@@ -48,11 +48,10 @@
|
||||
"@types/jest": "^29.1.2",
|
||||
"@types/node": "^22.7.4",
|
||||
"@types/tmp": "^0.2.6",
|
||||
"apache-arrow-13": "npm:apache-arrow@13.0.0",
|
||||
"apache-arrow-14": "npm:apache-arrow@14.0.0",
|
||||
"apache-arrow-15": "npm:apache-arrow@15.0.0",
|
||||
"apache-arrow-16": "npm:apache-arrow@16.0.0",
|
||||
"apache-arrow-17": "npm:apache-arrow@17.0.0",
|
||||
"apache-arrow-18": "npm:apache-arrow@18.0.0",
|
||||
"eslint": "^8.57.0",
|
||||
"jest": "^29.7.0",
|
||||
"shx": "^0.3.4",
|
||||
@@ -79,6 +78,7 @@
|
||||
"build-release": "npm run build:release && tsc -b && shx cp lancedb/native.d.ts dist/native.d.ts",
|
||||
"lint-ci": "biome ci .",
|
||||
"docs": "typedoc --plugin typedoc-plugin-markdown --out ../docs/src/js lancedb/index.ts",
|
||||
"postdocs": "node typedoc_post_process.js",
|
||||
"lint": "biome check . && biome format .",
|
||||
"lint-fix": "biome check --write . && biome format --write .",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
@@ -95,6 +95,6 @@
|
||||
"openai": "^4.29.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"apache-arrow": ">=13.0.0 <=17.0.0"
|
||||
"apache-arrow": ">=15.0.0 <=18.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,6 +45,7 @@ impl Index {
|
||||
distance_type: Option<String>,
|
||||
num_partitions: Option<u32>,
|
||||
num_sub_vectors: Option<u32>,
|
||||
num_bits: Option<u32>,
|
||||
max_iterations: Option<u32>,
|
||||
sample_rate: Option<u32>,
|
||||
) -> napi::Result<Self> {
|
||||
@@ -59,6 +60,9 @@ impl Index {
|
||||
if let Some(num_sub_vectors) = num_sub_vectors {
|
||||
ivf_pq_builder = ivf_pq_builder.num_sub_vectors(num_sub_vectors);
|
||||
}
|
||||
if let Some(num_bits) = num_bits {
|
||||
ivf_pq_builder = ivf_pq_builder.num_bits(num_bits);
|
||||
}
|
||||
if let Some(max_iterations) = max_iterations {
|
||||
ivf_pq_builder = ivf_pq_builder.max_iterations(max_iterations);
|
||||
}
|
||||
@@ -92,11 +96,45 @@ impl Index {
|
||||
}
|
||||
|
||||
#[napi(factory)]
|
||||
pub fn fts(with_position: Option<bool>) -> Self {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn fts(
|
||||
with_position: Option<bool>,
|
||||
base_tokenizer: Option<String>,
|
||||
language: Option<String>,
|
||||
max_token_length: Option<u32>,
|
||||
lower_case: Option<bool>,
|
||||
stem: Option<bool>,
|
||||
remove_stop_words: Option<bool>,
|
||||
ascii_folding: Option<bool>,
|
||||
) -> Self {
|
||||
let mut opts = FtsIndexBuilder::default();
|
||||
let mut tokenizer_configs = opts.tokenizer_configs.clone();
|
||||
if let Some(with_position) = with_position {
|
||||
opts = opts.with_position(with_position);
|
||||
}
|
||||
if let Some(base_tokenizer) = base_tokenizer {
|
||||
tokenizer_configs = tokenizer_configs.base_tokenizer(base_tokenizer);
|
||||
}
|
||||
if let Some(language) = language {
|
||||
tokenizer_configs = tokenizer_configs.language(&language).unwrap();
|
||||
}
|
||||
if let Some(max_token_length) = max_token_length {
|
||||
tokenizer_configs = tokenizer_configs.max_token_length(Some(max_token_length as usize));
|
||||
}
|
||||
if let Some(lower_case) = lower_case {
|
||||
tokenizer_configs = tokenizer_configs.lower_case(lower_case);
|
||||
}
|
||||
if let Some(stem) = stem {
|
||||
tokenizer_configs = tokenizer_configs.stem(stem);
|
||||
}
|
||||
if let Some(remove_stop_words) = remove_stop_words {
|
||||
tokenizer_configs = tokenizer_configs.remove_stop_words(remove_stop_words);
|
||||
}
|
||||
if let Some(ascii_folding) = ascii_folding {
|
||||
tokenizer_configs = tokenizer_configs.ascii_folding(ascii_folding);
|
||||
}
|
||||
opts.tokenizer_configs = tokenizer_configs;
|
||||
|
||||
Self {
|
||||
inner: Mutex::new(Some(LanceDbIndex::FTS(opts))),
|
||||
}
|
||||
|
||||
@@ -8,5 +8,6 @@
|
||||
"lancedb/native.d.ts:Table"
|
||||
],
|
||||
"useHTMLEncodedBrackets": true,
|
||||
"useCodeBlocks": true,
|
||||
"disableSources": true
|
||||
}
|
||||
|
||||
63
nodejs/typedoc_post_process.js
Normal file
63
nodejs/typedoc_post_process.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
// Read all files in the directory
|
||||
function processDirectory(directoryPath) {
|
||||
fs.readdir(directoryPath, { withFileTypes: true }, (err, files) => {
|
||||
if (err) {
|
||||
return console.error("Unable to scan directory: " + err);
|
||||
}
|
||||
|
||||
files.forEach((file) => {
|
||||
const filePath = path.join(directoryPath, file.name);
|
||||
|
||||
if (file.isDirectory()) {
|
||||
// Recursively process subdirectory
|
||||
processDirectory(filePath);
|
||||
} else if (file.isFile()) {
|
||||
// Read each file
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) {
|
||||
return console.error("Unable to read file: " + err);
|
||||
}
|
||||
|
||||
// Process the file content
|
||||
const processedData = processContents(data);
|
||||
|
||||
// Write the processed content back to the file
|
||||
fs.writeFile(filePath, processedData, "utf8", (err) => {
|
||||
if (err) {
|
||||
return console.error("Unable to write file: " + err);
|
||||
}
|
||||
console.log(`Processed file: ${filePath}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function processContents(contents) {
|
||||
// This changes the parameters section to put the parameter description on
|
||||
// the same line as the bullet with the parameter name and type.
|
||||
return contents.replace(/(## Parameters[\s\S]*?)(?=##|$)/g, (match) => {
|
||||
let lines = match
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
|
||||
.filter((line) => line !== "")
|
||||
.map((line) => {
|
||||
if (line.startsWith("##")) {
|
||||
return line;
|
||||
} else if (line.startsWith("•")) {
|
||||
return "\n*" + line.substring(1);
|
||||
} else {
|
||||
return " " + line;
|
||||
}
|
||||
});
|
||||
return lines.join("\n") + "\n\n";
|
||||
});
|
||||
}
|
||||
|
||||
// Start processing from the root directory
|
||||
processDirectory("../docs/src/js");
|
||||
@@ -1,5 +1,5 @@
|
||||
[tool.bumpversion]
|
||||
current_version = "0.17.1-beta.0"
|
||||
current_version = "0.17.1-beta.4"
|
||||
parse = """(?x)
|
||||
(?P<major>0|[1-9]\\d*)\\.
|
||||
(?P<minor>0|[1-9]\\d*)\\.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "lancedb-python"
|
||||
version = "0.17.1-beta.0"
|
||||
version = "0.17.1-beta.4"
|
||||
edition.workspace = true
|
||||
description = "Python bindings for LanceDB"
|
||||
license.workspace = true
|
||||
|
||||
@@ -3,7 +3,7 @@ name = "lancedb"
|
||||
# version in Cargo.toml
|
||||
dependencies = [
|
||||
"deprecation",
|
||||
"pylance==0.20.0",
|
||||
"pylance==0.21.0b3",
|
||||
"tqdm>=4.27.0",
|
||||
"pydantic>=1.10",
|
||||
"packaging",
|
||||
|
||||
@@ -178,6 +178,12 @@ class HnswPq:
|
||||
If the dimension is not visible by 8 then we use 1 subvector. This is not
|
||||
ideal and will likely result in poor performance.
|
||||
|
||||
num_bits: int, default 8
|
||||
Number of bits to encode each sub-vector.
|
||||
|
||||
This value controls how much the sub-vectors are compressed. The more bits
|
||||
the more accurate the index but the slower search. Only 4 and 8 are supported.
|
||||
|
||||
max_iterations, default 50
|
||||
|
||||
Max iterations to train kmeans.
|
||||
@@ -232,6 +238,7 @@ class HnswPq:
|
||||
distance_type: Optional[str] = None,
|
||||
num_partitions: Optional[int] = None,
|
||||
num_sub_vectors: Optional[int] = None,
|
||||
num_bits: Optional[int] = None,
|
||||
max_iterations: Optional[int] = None,
|
||||
sample_rate: Optional[int] = None,
|
||||
m: Optional[int] = None,
|
||||
@@ -241,6 +248,7 @@ class HnswPq:
|
||||
distance_type=distance_type,
|
||||
num_partitions=num_partitions,
|
||||
num_sub_vectors=num_sub_vectors,
|
||||
num_bits=num_bits,
|
||||
max_iterations=max_iterations,
|
||||
sample_rate=sample_rate,
|
||||
m=m,
|
||||
@@ -387,6 +395,7 @@ class IvfPq:
|
||||
distance_type: Optional[str] = None,
|
||||
num_partitions: Optional[int] = None,
|
||||
num_sub_vectors: Optional[int] = None,
|
||||
num_bits: Optional[int] = None,
|
||||
max_iterations: Optional[int] = None,
|
||||
sample_rate: Optional[int] = None,
|
||||
):
|
||||
@@ -449,6 +458,12 @@ class IvfPq:
|
||||
|
||||
If the dimension is not visible by 8 then we use 1 subvector. This is not
|
||||
ideal and will likely result in poor performance.
|
||||
num_bits: int, default 8
|
||||
Number of bits to encode each sub-vector.
|
||||
|
||||
This value controls how much the sub-vectors are compressed. The more bits
|
||||
the more accurate the index but the slower search. The default is 8
|
||||
bits. Only 4 and 8 are supported.
|
||||
max_iterations: int, default 50
|
||||
Max iteration to train kmeans.
|
||||
|
||||
@@ -482,6 +497,7 @@ class IvfPq:
|
||||
distance_type=distance_type,
|
||||
num_partitions=num_partitions,
|
||||
num_sub_vectors=num_sub_vectors,
|
||||
num_bits=num_bits,
|
||||
max_iterations=max_iterations,
|
||||
sample_rate=sample_rate,
|
||||
)
|
||||
|
||||
@@ -1,15 +1,5 @@
|
||||
# Copyright 2023 LanceDB Developers
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1644,7 +1634,7 @@ class AsyncQuery(AsyncQueryBase):
|
||||
if (
|
||||
isinstance(query_vector, list)
|
||||
and len(query_vector) > 0
|
||||
and not isinstance(query_vector[0], (float, int))
|
||||
and isinstance(query_vector[0], (list, np.ndarray, pa.Array))
|
||||
):
|
||||
# multiple have been passed
|
||||
query_vectors = [AsyncQuery._query_vec_to_array(v) for v in query_vector]
|
||||
|
||||
@@ -413,6 +413,8 @@ class Table(ABC):
|
||||
replace: bool = True,
|
||||
accelerator: Optional[str] = None,
|
||||
index_cache_size: Optional[int] = None,
|
||||
*,
|
||||
num_bits: int = 8,
|
||||
):
|
||||
"""Create an index on the table.
|
||||
|
||||
@@ -439,6 +441,9 @@ class Table(ABC):
|
||||
Only support "cuda" for now.
|
||||
index_cache_size : int, optional
|
||||
The size of the index cache in number of entries. Default value is 256.
|
||||
num_bits: int
|
||||
The number of bits to encode sub-vectors. Only used with the IVF_PQ index.
|
||||
Only 4 and 8 are supported.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -1430,6 +1435,8 @@ class LanceTable(Table):
|
||||
accelerator: Optional[str] = None,
|
||||
index_cache_size: Optional[int] = None,
|
||||
index_type="IVF_PQ",
|
||||
*,
|
||||
num_bits: int = 8,
|
||||
):
|
||||
"""Create an index on the table."""
|
||||
self._dataset_mut.create_index(
|
||||
@@ -1441,6 +1448,7 @@ class LanceTable(Table):
|
||||
replace=replace,
|
||||
accelerator=accelerator,
|
||||
index_cache_size=index_cache_size,
|
||||
num_bits=num_bits,
|
||||
)
|
||||
|
||||
def create_scalar_index(
|
||||
|
||||
@@ -75,6 +75,22 @@ def test_quickstart():
|
||||
for _ in range(1000)
|
||||
]
|
||||
)
|
||||
# --8<-- [start:add_columns]
|
||||
tbl.add_columns({"double_price": "cast((price * 2) as float)"})
|
||||
# --8<-- [end:add_columns]
|
||||
# --8<-- [start:alter_columns]
|
||||
tbl.alter_columns(
|
||||
{
|
||||
"path": "double_price",
|
||||
"rename": "dbl_price",
|
||||
"data_type": pa.float64(),
|
||||
"nullable": True,
|
||||
}
|
||||
)
|
||||
# --8<-- [end:alter_columns]
|
||||
# --8<-- [start:drop_columns]
|
||||
tbl.drop_columns(["dbl_price"])
|
||||
# --8<-- [end:drop_columns]
|
||||
# --8<-- [start:create_index]
|
||||
# Synchronous client
|
||||
tbl.create_index(num_sub_vectors=1)
|
||||
|
||||
@@ -108,6 +108,29 @@ async def test_create_vector_index(some_table: AsyncTable):
|
||||
assert stats.num_indices == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_4bit_ivfpq_index(some_table: AsyncTable):
|
||||
# Can create
|
||||
await some_table.create_index("vector", config=IvfPq(num_bits=4))
|
||||
# Can recreate if replace=True
|
||||
await some_table.create_index("vector", config=IvfPq(num_bits=4), replace=True)
|
||||
# Can't recreate if replace=False
|
||||
with pytest.raises(RuntimeError, match="already exists"):
|
||||
await some_table.create_index("vector", replace=False)
|
||||
indices = await some_table.list_indices()
|
||||
assert len(indices) == 1
|
||||
assert indices[0].index_type == "IvfPq"
|
||||
assert indices[0].columns == ["vector"]
|
||||
assert indices[0].name == "vector_idx"
|
||||
|
||||
stats = await some_table.index_stats("vector_idx")
|
||||
assert stats.index_type == "IVF_PQ"
|
||||
assert stats.distance_type == "l2"
|
||||
assert stats.num_indexed_rows == await some_table.count_rows()
|
||||
assert stats.num_unindexed_rows == 0
|
||||
assert stats.num_indices == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_hnswpq_index(some_table: AsyncTable):
|
||||
await some_table.create_index("vector", config=HnswPq(num_partitions=10))
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
import unittest.mock as mock
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import lancedb
|
||||
from lancedb.index import IvfPq
|
||||
@@ -384,3 +385,19 @@ async def test_query_to_list_async(table_async: AsyncTable):
|
||||
assert len(list) == 2
|
||||
assert list[0]["vector"] == [1, 2]
|
||||
assert list[1]["vector"] == [3, 4]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_query_with_f16(tmp_path: Path):
|
||||
db = await lancedb.connect_async(tmp_path)
|
||||
f16_arr = np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float16)
|
||||
|
||||
df = pa.table(
|
||||
{
|
||||
"vector": pa.FixedSizeListArray.from_arrays(f16_arr, 2),
|
||||
"id": pa.array([1, 2]),
|
||||
}
|
||||
)
|
||||
tbl = await db.create_table("test", df)
|
||||
results = await tbl.vector_search([np.float16(1), np.float16(2)]).to_pandas()
|
||||
assert len(results) == 2
|
||||
|
||||
@@ -343,6 +343,7 @@ def test_query_sync_maximal():
|
||||
assert body == {
|
||||
"distance_type": "cosine",
|
||||
"k": 42,
|
||||
"offset": 10,
|
||||
"prefilter": True,
|
||||
"refine_factor": 10,
|
||||
"vector": [1.0, 2.0, 3.0],
|
||||
@@ -363,6 +364,7 @@ def test_query_sync_maximal():
|
||||
table.search([1, 2, 3], vector_column_name="vector2", fast_search=True)
|
||||
.metric("cosine")
|
||||
.limit(42)
|
||||
.offset(10)
|
||||
.refine_factor(10)
|
||||
.nprobes(5)
|
||||
.where("id > 0", prefilter=True)
|
||||
|
||||
@@ -530,6 +530,7 @@ def test_create_index_method():
|
||||
replace=True,
|
||||
accelerator=None,
|
||||
index_cache_size=256,
|
||||
num_bits=8,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -47,12 +47,13 @@ impl Index {
|
||||
|
||||
#[pymethods]
|
||||
impl Index {
|
||||
#[pyo3(signature = (distance_type=None, num_partitions=None, num_sub_vectors=None, max_iterations=None, sample_rate=None))]
|
||||
#[pyo3(signature = (distance_type=None, num_partitions=None, num_sub_vectors=None,num_bits=None, max_iterations=None, sample_rate=None))]
|
||||
#[staticmethod]
|
||||
pub fn ivf_pq(
|
||||
distance_type: Option<String>,
|
||||
num_partitions: Option<u32>,
|
||||
num_sub_vectors: Option<u32>,
|
||||
num_bits: Option<u32>,
|
||||
max_iterations: Option<u32>,
|
||||
sample_rate: Option<u32>,
|
||||
) -> PyResult<Self> {
|
||||
@@ -75,6 +76,9 @@ impl Index {
|
||||
if let Some(num_sub_vectors) = num_sub_vectors {
|
||||
ivf_pq_builder = ivf_pq_builder.num_sub_vectors(num_sub_vectors);
|
||||
}
|
||||
if let Some(num_bits) = num_bits {
|
||||
ivf_pq_builder = ivf_pq_builder.num_bits(num_bits);
|
||||
}
|
||||
if let Some(max_iterations) = max_iterations {
|
||||
ivf_pq_builder = ivf_pq_builder.max_iterations(max_iterations);
|
||||
}
|
||||
@@ -148,12 +152,14 @@ impl Index {
|
||||
}
|
||||
}
|
||||
|
||||
#[pyo3(signature = (distance_type=None, num_partitions=None, num_sub_vectors=None, max_iterations=None, sample_rate=None, m=None, ef_construction=None))]
|
||||
#[pyo3(signature = (distance_type=None, num_partitions=None, num_sub_vectors=None,num_bits=None, max_iterations=None, sample_rate=None, m=None, ef_construction=None))]
|
||||
#[staticmethod]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn hnsw_pq(
|
||||
distance_type: Option<String>,
|
||||
num_partitions: Option<u32>,
|
||||
num_sub_vectors: Option<u32>,
|
||||
num_bits: Option<u32>,
|
||||
max_iterations: Option<u32>,
|
||||
sample_rate: Option<u32>,
|
||||
m: Option<u32>,
|
||||
@@ -170,6 +176,9 @@ impl Index {
|
||||
if let Some(num_sub_vectors) = num_sub_vectors {
|
||||
hnsw_pq_builder = hnsw_pq_builder.num_sub_vectors(num_sub_vectors);
|
||||
}
|
||||
if let Some(num_bits) = num_bits {
|
||||
hnsw_pq_builder = hnsw_pq_builder.num_bits(num_bits);
|
||||
}
|
||||
if let Some(max_iterations) = max_iterations {
|
||||
hnsw_pq_builder = hnsw_pq_builder.max_iterations(max_iterations);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "lancedb-node"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1-beta.3"
|
||||
description = "Serverless, low-latency vector database for AI applications"
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "lancedb"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1-beta.3"
|
||||
edition.workspace = true
|
||||
description = "LanceDB: A serverless, low-latency vector database for AI applications"
|
||||
license.workspace = true
|
||||
|
||||
@@ -53,7 +53,10 @@ pub struct LabelListIndexBuilder {}
|
||||
/// A full text search index is an index on a string column that allows for full text search
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FtsIndexBuilder {
|
||||
pub(crate) with_position: bool,
|
||||
/// Whether to store the position of the tokens
|
||||
/// This is used for phrase queries
|
||||
pub with_position: bool,
|
||||
|
||||
pub tokenizer_configs: TokenizerConfig,
|
||||
}
|
||||
|
||||
|
||||
@@ -132,6 +132,10 @@ macro_rules! impl_pq_params_setter {
|
||||
self.num_sub_vectors = Some(num_sub_vectors);
|
||||
self
|
||||
}
|
||||
pub fn num_bits(mut self, num_bits: u32) -> Self {
|
||||
self.num_bits = Some(num_bits);
|
||||
self
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -189,6 +193,7 @@ pub struct IvfPqIndexBuilder {
|
||||
|
||||
// PQ
|
||||
pub(crate) num_sub_vectors: Option<u32>,
|
||||
pub(crate) num_bits: Option<u32>,
|
||||
}
|
||||
|
||||
impl Default for IvfPqIndexBuilder {
|
||||
@@ -197,6 +202,7 @@ impl Default for IvfPqIndexBuilder {
|
||||
distance_type: DistanceType::L2,
|
||||
num_partitions: None,
|
||||
num_sub_vectors: None,
|
||||
num_bits: None,
|
||||
sample_rate: 256,
|
||||
max_iterations: 50,
|
||||
}
|
||||
@@ -256,6 +262,7 @@ pub struct IvfHnswPqIndexBuilder {
|
||||
|
||||
// PQ
|
||||
pub(crate) num_sub_vectors: Option<u32>,
|
||||
pub(crate) num_bits: Option<u32>,
|
||||
}
|
||||
|
||||
impl Default for IvfHnswPqIndexBuilder {
|
||||
@@ -264,6 +271,7 @@ impl Default for IvfHnswPqIndexBuilder {
|
||||
distance_type: DistanceType::L2,
|
||||
num_partitions: None,
|
||||
num_sub_vectors: None,
|
||||
num_bits: None,
|
||||
sample_rate: 256,
|
||||
max_iterations: 50,
|
||||
m: 20,
|
||||
|
||||
@@ -145,10 +145,8 @@ impl<S: HttpSend> RemoteTable<S> {
|
||||
}
|
||||
|
||||
fn apply_query_params(body: &mut serde_json::Value, params: &Query) -> Result<()> {
|
||||
if params.offset.is_some() {
|
||||
return Err(Error::NotSupported {
|
||||
message: "Offset is not yet supported in LanceDB Cloud".into(),
|
||||
});
|
||||
if let Some(offset) = params.offset {
|
||||
body["offset"] = serde_json::Value::Number(serde_json::Number::from(offset));
|
||||
}
|
||||
|
||||
if let Some(limit) = params.limit {
|
||||
@@ -570,7 +568,19 @@ impl<S: HttpSend> TableInternal for RemoteTable<S> {
|
||||
Index::BTree(_) => ("BTREE", None),
|
||||
Index::Bitmap(_) => ("BITMAP", None),
|
||||
Index::LabelList(_) => ("LABEL_LIST", None),
|
||||
Index::FTS(_) => ("FTS", None),
|
||||
Index::FTS(fts) => {
|
||||
let with_position = fts.with_position;
|
||||
let configs = serde_json::to_value(fts.tokenizer_configs).map_err(|e| {
|
||||
Error::InvalidInput {
|
||||
message: format!("failed to serialize FTS index params {:?}", e),
|
||||
}
|
||||
})?;
|
||||
for (key, value) in configs.as_object().unwrap() {
|
||||
body[key] = value.clone();
|
||||
}
|
||||
body["with_position"] = serde_json::Value::Bool(with_position);
|
||||
("FTS", None)
|
||||
}
|
||||
Index::Auto => {
|
||||
let schema = self.schema().await?;
|
||||
let field = schema
|
||||
@@ -1336,6 +1346,7 @@ mod tests {
|
||||
"vector_column": "my_vector",
|
||||
"prefilter": false,
|
||||
"k": 42,
|
||||
"offset": 10,
|
||||
"distance_type": "cosine",
|
||||
"bypass_vector_index": true,
|
||||
"columns": ["a", "b"],
|
||||
@@ -1364,6 +1375,7 @@ mod tests {
|
||||
let _ = table
|
||||
.query()
|
||||
.limit(42)
|
||||
.offset(10)
|
||||
.select(Select::columns(&["a", "b"]))
|
||||
.nearest_to(vec![0.1, 0.2, 0.3])
|
||||
.unwrap()
|
||||
@@ -1496,6 +1508,7 @@ mod tests {
|
||||
];
|
||||
|
||||
for (index_type, distance_type, index) in cases {
|
||||
let params = index.clone();
|
||||
let table = Table::new_with_handler("my_table", move |request| {
|
||||
assert_eq!(request.method(), "POST");
|
||||
assert_eq!(request.url().path(), "/v1/table/my_table/create_index/");
|
||||
@@ -1512,6 +1525,17 @@ mod tests {
|
||||
if let Some(distance_type) = distance_type {
|
||||
expected_body["metric_type"] = distance_type.to_lowercase().into();
|
||||
}
|
||||
if let Index::FTS(fts) = ¶ms {
|
||||
expected_body["with_position"] = fts.with_position.into();
|
||||
expected_body["base_tokenizer"] = "simple".into();
|
||||
expected_body["language"] = "English".into();
|
||||
expected_body["max_token_length"] = 40.into();
|
||||
expected_body["lower_case"] = true.into();
|
||||
expected_body["stem"] = false.into();
|
||||
expected_body["remove_stop_words"] = false.into();
|
||||
expected_body["ascii_folding"] = false.into();
|
||||
}
|
||||
|
||||
assert_eq!(body, expected_body);
|
||||
|
||||
http::Response::builder().status(200).body("{}").unwrap()
|
||||
|
||||
Reference in New Issue
Block a user