From d6b505477886b48b4868a56481373e2a29a72fcd Mon Sep 17 00:00:00 2001 From: Akash Saravanan Date: Mon, 30 Sep 2024 13:36:28 -0600 Subject: [PATCH 01/51] feat(python): add support for trust_remote_code in hf embeddings (#1712) Resovles #1709. Adds `trust_remote_code` as a parameter to the `TransformersEmbeddingFunction` class with a default of False. Updated relevant documentation with the same. --- .../text_embedding_functions/huggingface_embedding.md | 2 +- python/python/lancedb/embeddings/transformers.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/src/embeddings/available_embedding_models/text_embedding_functions/huggingface_embedding.md b/docs/src/embeddings/available_embedding_models/text_embedding_functions/huggingface_embedding.md index 80502b49..eb0dfdea 100644 --- a/docs/src/embeddings/available_embedding_models/text_embedding_functions/huggingface_embedding.md +++ b/docs/src/embeddings/available_embedding_models/text_embedding_functions/huggingface_embedding.md @@ -1,5 +1,5 @@ # Huggingface embedding models -We offer support for all huggingface models (which can be loaded via [transformers](https://huggingface.co/docs/transformers/en/index) library). The default model is `colbert-ir/colbertv2.0` which also has its own special callout - `registry.get("colbert")` +We offer support for all Hugging Face models (which can be loaded via [transformers](https://huggingface.co/docs/transformers/en/index) library). The default model is `colbert-ir/colbertv2.0` which also has its own special callout - `registry.get("colbert")`. Some Hugging Face models might require custom models defined on the HuggingFace Hub in their own modeling files. You may enable this by setting `trust_remote_code=True`. This option should only be set to True for repositories you trust and in which you have read the code, as it will execute code present on the Hub on your local machine. Example usage - ```python diff --git a/python/python/lancedb/embeddings/transformers.py b/python/python/lancedb/embeddings/transformers.py index f532f7c9..caee75fb 100644 --- a/python/python/lancedb/embeddings/transformers.py +++ b/python/python/lancedb/embeddings/transformers.py @@ -40,6 +40,11 @@ class TransformersEmbeddingFunction(EmbeddingFunction): The device to use for the model. Default is "cpu". show_progress_bar : bool Whether to show a progress bar when loading the model. Default is True. + trust_remote_code : bool + Whether or not to allow for custom models defined on the HuggingFace + Hub in their own modeling files. This option should only be set to True + for repositories you trust and in which you have read the code, as it + will execute code present on the Hub on your local machine. to download package, run : `pip install transformers` @@ -49,6 +54,7 @@ class TransformersEmbeddingFunction(EmbeddingFunction): name: str = "colbert-ir/colbertv2.0" device: str = "cpu" + trust_remote_code: bool = False _tokenizer: Any = PrivateAttr() _model: Any = PrivateAttr() @@ -57,7 +63,9 @@ class TransformersEmbeddingFunction(EmbeddingFunction): self._ndims = None transformers = attempt_import_or_raise("transformers") self._tokenizer = transformers.AutoTokenizer.from_pretrained(self.name) - self._model = transformers.AutoModel.from_pretrained(self.name) + self._model = transformers.AutoModel.from_pretrained( + self.name, trust_remote_code=self.trust_remote_code + ) self._model.to(self.device) if PYDANTIC_VERSION.major < 2: # Pydantic 1.x compat From 7b2cdd22693cb4e3d1a5994e4fbf73018a95b9fd Mon Sep 17 00:00:00 2001 From: Rithik Kumar <46047011+rithikJha@users.noreply.github.com> Date: Tue, 1 Oct 2024 11:59:03 +0530 Subject: [PATCH 02/51] docs: revamp Voxel51 v1 (#1714) Revamp Voxel51 ![image](https://github.com/user-attachments/assets/7ac34457-74ec-4654-b1d1-556e3d7357f5) --- docs/src/integrations/voxel51.md | 187 +++++++++++++++++++++++++++++-- 1 file changed, 175 insertions(+), 12 deletions(-) diff --git a/docs/src/integrations/voxel51.md b/docs/src/integrations/voxel51.md index 6f68b98e..49f12863 100644 --- a/docs/src/integrations/voxel51.md +++ b/docs/src/integrations/voxel51.md @@ -1,13 +1,73 @@ # FiftyOne -FiftyOne is an open source toolkit for building high-quality datasets and computer vision models. It provides an API to create LanceDB tables and run similarity queries, both programmatically in Python and via point-and-click in the App. +FiftyOne is an open source toolkit that enables users to curate better data and build better models. It includes tools for data exploration, visualization, and management, as well as features for collaboration and sharing. + +Any developers, data scientists, and researchers who work with computer vision and machine learning can use FiftyOne to improve the quality of their datasets and deliver insights about their models. + ![example](../assets/voxel.gif) -## Basic recipe +**FiftyOne** provides an API to create LanceDB tables and run similarity queries, both **programmatically in Python** and via **point-and-click in the App**. -The basic workflow shown below uses LanceDB to create a similarity index on your FiftyOne -datasets: +Let's get started and see how to use **LanceDB** to create a **similarity index** on your FiftyOne datasets. + +## Overview + +**[Embeddings](../embeddings/understanding_embeddings.md)** are foundational to all of the **vector search** features. In FiftyOne, embeddings are managed by the [**FiftyOne Brain**](https://docs.voxel51.com/user_guide/brain.html) that provides powerful machine learning techniques designed to transform how you curate your data from an art into a measurable science. + +!!!question "Have you ever wanted to find the images most similar to an image in your dataset?" + The **FiftyOne Brain** makes computing **visual similarity** really easy. You can compute the similarity of samples in your dataset using an embedding model and store the results in the **brain key**. + + You can then sort your samples by similarity or use this information to find potential duplicate images. + +Here we will be doing the following : + +1. **Create Index** - In order to run similarity queries against our media, we need to **index** the data. We can do this via the `compute_similarity()` function. + + - In the function, specify the **model** you want to use to generate the embedding vectors, and what **vector search engine** you want to use on the **backend** (here LanceDB). + + !!!tip + You can also give the similarity index a name(`brain_key`), which is useful if you want to run vector searches against multiple indexes. + +2. **Query** - Once you have generated your similarity index, you can query your dataset with `sort_by_similarity()`. The query can be any of the following: + + - An ID (sample or patch) + - A query vector of same dimension as the index + - A list of IDs (samples or patches) + - A text prompt (search semantically) + +## Prerequisites: install necessary dependencies + +1. **Create and activate a virtual environment** + + Install virtualenv package and run the following command in your project directory. + ```python + python -m venv fiftyone_ + ``` + From inside the project directory run the following to activate the virtual environment. + === "Windows" + + ```python + fiftyone_/Scripts/activate + ``` + + === "macOS/Linux" + + ```python + source fiftyone_/Scripts/activate + ``` + +2. **Install the following packages in the virtual environment** + + To install FiftyOne, ensure you have activated any virtual environment that you are using, then run + ```python + pip install fiftyone + ``` + + +## Understand basic workflow + +The basic workflow shown below uses LanceDB to create a similarity index on your FiftyOne datasets: 1. Load a dataset into FiftyOne. @@ -19,14 +79,10 @@ datasets: 5. If desired, delete the table. -The example below demonstrates this workflow. +## Quick Example -!!! Note +Let's jump on a quick example that demonstrates this workflow. - Install the LanceDB Python client to run the code shown below. - ``` - pip install lancedb - ``` ```python @@ -36,7 +92,10 @@ import fiftyone.zoo as foz # Step 1: Load your data into FiftyOne dataset = foz.load_zoo_dataset("quickstart") +``` +Make sure you install torch ([guide here](https://pytorch.org/get-started/locally/)) before proceeding. +```python # Steps 2 and 3: Compute embeddings and create a similarity index lancedb_index = fob.compute_similarity( dataset, @@ -45,8 +104,11 @@ lancedb_index = fob.compute_similarity( backend="lancedb", ) ``` -Once the similarity index has been generated, we can query our data in FiftyOne -by specifying the `brain_key`: + +!!! note + Running the code above will download the clip model (2.6Gb) + +Once the similarity index has been generated, we can query our data in FiftyOne by specifying the `brain_key`: ```python # Step 4: Query your data @@ -56,7 +118,22 @@ view = dataset.sort_by_similarity( brain_key="lancedb_index", k=10, # limit to 10 most similar samples ) +``` +The returned result are of type - `DatasetView`. +!!! note + `DatasetView` does not hold its contents in-memory. Views simply store the rule(s) that are applied to extract the content of interest from the underlying Dataset when the view is iterated/aggregated on. + + This means, for example, that the contents of a `DatasetView` may change as the underlying Dataset is modified. + +??? question "Can you query a view instead of dataset?" + Yes, you can also query a view. + + Performing a similarity search on a `DatasetView` will only return results from the view; if the view contains samples that were not included in the index, they will never be included in the result. + + This means that you can index an entire Dataset once and then perform searches on subsets of the dataset by constructing views that contain the images of interest. + +```python # Step 5 (optional): Cleanup # Delete the LanceDB table @@ -66,4 +143,90 @@ lancedb_index.cleanup() dataset.delete_brain_run("lancedb_index") ``` + +## Using LanceDB backend +By default, calling `compute_similarity()` or `sort_by_similarity()` will use an sklearn backend. + +To use the LanceDB backend, simply set the optional `backend` parameter of `compute_similarity()` to `"lancedb"`: + +```python +import fiftyone.brain as fob +#... rest of the code +fob.compute_similarity(..., backend="lancedb", ...) +``` + +Alternatively, you can configure FiftyOne to use the LanceDB backend by setting the following environment variable. + +In your terminal, set the environment variable using: +=== "Windows" + + ```python + $Env:FIFTYONE_BRAIN_DEFAULT_SIMILARITY_BACKEND="lancedb" //powershell + + set FIFTYONE_BRAIN_DEFAULT_SIMILARITY_BACKEND=lancedb //cmd + ``` + +=== "macOS/Linux" + + ```python + export FIFTYONE_BRAIN_DEFAULT_SIMILARITY_BACKEND=lancedb + ``` + +!!! note + This will only run during the terminal session. Once terminal is closed, environment variable is deleted. + +Alternatively, you can **permanently** configure FiftyOne to use the LanceDB backend creating a `brain_config.json` at `~/.fiftyone/brain_config.json`. The JSON file may contain any desired subset of config fields that you wish to customize. + +```json +{ + "default_similarity_backend": "lancedb" +} +``` +This will override the default `brain_config` and will set it according to your customization. You can check the configuration by running the following code : + +```python +import fiftyone.brain as fob +# Print your current brain config +print(fob.brain_config) +``` + +## LanceDB config parameters + +The LanceDB backend supports query parameters that can be used to customize your similarity queries. These parameters include: + +| Name| Purpose | Default | +|:----|:--------|:--------| +|**table_name**|The name of the LanceDB table to use. If none is provided, a new table will be created|`None`| +|**metric**|The embedding distance metric to use when creating a new table. The supported values are ("cosine", "euclidean")|`"cosine"`| +|**uri**| The database URI to use. In this Database URI, tables will be created. |`"/tmp/lancedb"`| + +There are two ways to specify/customize the parameters: + +1. **Using `brain_config.json` file** + + ```json + { + "similarity_backends": { + "lancedb": { + "table_name": "your-table", + "metric": "euclidean", + "uri": "/tmp/lancedb" + } + } + } + ``` + +2. **Directly passing to `compute_similarity()` to configure a specific new index** : + + ```python + lancedb_index = fob.compute_similarity( + ... + backend="lancedb", + brain_key="lancedb_index", + table_name="your-table", + metric="euclidean", + uri="/tmp/lancedb", + ) + ``` + For a much more in depth walkthrough of the integration, visit the LanceDB x Voxel51 [docs page](https://docs.voxel51.com/integrations/lancedb.html). From 33b402c8610fc9c1145120c0a26118da134d979d Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 1 Oct 2024 09:16:18 -0700 Subject: [PATCH 03/51] fix: `list_indices` returns correct index type (#1715) Fixes https://github.com/lancedb/lancedb/issues/1711 Doesn't address this https://github.com/lancedb/lance/issues/2039 Instead we load the index statistics, which seems to contain the index type. However, this involves more IO than previously. I'm not sure whether we care that much. If we do, we can fix that upstream Lance issue. --- python/python/tests/test_index.py | 10 ++++------ rust/lancedb/src/index.rs | 21 +++++++++++++++++++- rust/lancedb/src/table.rs | 33 ++++++++++++++++--------------- 3 files changed, 41 insertions(+), 23 deletions(-) diff --git a/python/python/tests/test_index.py b/python/python/tests/test_index.py index b0646afe..1245997e 100644 --- a/python/python/tests/test_index.py +++ b/python/python/tests/test_index.py @@ -63,9 +63,8 @@ async def test_create_scalar_index(some_table: AsyncTable): @pytest.mark.asyncio async def test_create_bitmap_index(some_table: AsyncTable): await some_table.create_index("id", config=Bitmap()) - # TODO: Fix via https://github.com/lancedb/lance/issues/2039 - # indices = await some_table.list_indices() - # assert str(indices) == '[Index(Bitmap, columns=["id"])]' + indices = await some_table.list_indices() + assert str(indices) == '[Index(Bitmap, columns=["id"])]' indices = await some_table.list_indices() assert len(indices) == 1 index_name = indices[0].name @@ -80,9 +79,8 @@ async def test_create_bitmap_index(some_table: AsyncTable): @pytest.mark.asyncio async def test_create_label_list_index(some_table: AsyncTable): await some_table.create_index("tags", config=LabelList()) - # TODO: Fix via https://github.com/lancedb/lance/issues/2039 - # indices = await some_table.list_indices() - # assert str(indices) == '[Index(LabelList, columns=["id"])]' + indices = await some_table.list_indices() + assert str(indices) == '[Index(LabelList, columns=["tags"])]' @pytest.mark.asyncio diff --git a/rust/lancedb/src/index.rs b/rust/lancedb/src/index.rs index 21301a2b..1ff80137 100644 --- a/rust/lancedb/src/index.rs +++ b/rust/lancedb/src/index.rs @@ -18,7 +18,7 @@ use scalar::FtsIndexBuilder; use serde::Deserialize; use serde_with::skip_serializing_none; -use crate::{table::TableInternal, DistanceType, Result}; +use crate::{table::TableInternal, DistanceType, Error, Result}; use self::{ scalar::{BTreeIndexBuilder, BitmapIndexBuilder, LabelListIndexBuilder}, @@ -136,6 +136,25 @@ impl std::fmt::Display for IndexType { } } +impl std::str::FromStr for IndexType { + type Err = Error; + + fn from_str(value: &str) -> Result { + match value.to_uppercase().as_str() { + "BTREE" => Ok(Self::BTree), + "BITMAP" => Ok(Self::Bitmap), + "LABEL_LIST" | "LABELLIST" => Ok(Self::LabelList), + "FTS" => Ok(Self::FTS), + "IVF_PQ" => Ok(Self::IvfPq), + "IVF_HNSW_PQ" => Ok(Self::IvfHnswPq), + "IVF_HNSW_SQ" => Ok(Self::IvfHnswSq), + _ => Err(Error::InvalidInput { + message: format!("the input value {} is not a valid IndexType", value), + }), + } + } +} + /// A description of an index currently configured on a column #[derive(Debug, PartialEq, Clone)] pub struct IndexConfig { diff --git a/rust/lancedb/src/table.rs b/rust/lancedb/src/table.rs index cd52c601..5f286cf7 100644 --- a/rust/lancedb/src/table.rs +++ b/rust/lancedb/src/table.rs @@ -25,6 +25,7 @@ use arrow_schema::{Field, Schema, SchemaRef}; use async_trait::async_trait; use datafusion_physical_plan::display::DisplayableExecutionPlan; use datafusion_physical_plan::ExecutionPlan; +use futures::{StreamExt, TryStreamExt}; use lance::dataset::builder::DatasetBuilder; use lance::dataset::cleanup::RemovalStats; use lance::dataset::optimize::{compact_files, CompactionMetrics, IndexRemapperOptions}; @@ -2023,28 +2024,28 @@ impl TableInternal for NativeTable { async fn list_indices(&self) -> Result> { let dataset = self.dataset.get().await?; let indices = dataset.load_indices().await?; - indices.iter().map(|idx| { - let mut is_vector = false; + futures::stream::iter(indices.as_slice()).then(|idx| async { + let stats = dataset.index_statistics(idx.name.as_str()).await?; + let stats: serde_json::Value = serde_json::from_str(&stats).map_err(|e| Error::Runtime { + message: format!("error deserializing index statistics: {}", e), + })?; + let index_type = stats.get("index_type").and_then(|v| v.as_str()) + .ok_or_else(|| Error::Runtime { + message: "index statistics was missing index type".to_string(), + })?; + let index_type: crate::index::IndexType = index_type.parse().map_err(|e| Error::Runtime { + message: format!("error parsing index type: {}", e), + })?; + let mut columns = Vec::with_capacity(idx.fields.len()); for field_id in &idx.fields { let field = dataset.schema().field_by_id(*field_id).ok_or_else(|| Error::Runtime { message: format!("The index with name {} and uuid {} referenced a field with id {} which does not exist in the schema", idx.name, idx.uuid, field_id) })?; - if field.data_type().is_nested() { - // Temporary hack to determine if an index is scalar or vector - // Should be removed in https://github.com/lancedb/lance/issues/2039 - is_vector = true; - } columns.push(field.name.clone()); } - let index_type = if is_vector { - crate::index::IndexType::IvfPq - } else { - crate::index::IndexType::BTree - }; - let name = idx.name.clone(); Ok(IndexConfig { index_type, columns, name }) - }).collect::>>() + }).try_collect::>().await } fn dataset_uri(&self) -> &str { @@ -2803,7 +2804,7 @@ mod tests { let index_configs = table.list_indices().await.unwrap(); assert_eq!(index_configs.len(), 1); let index = index_configs.into_iter().next().unwrap(); - assert_eq!(index.index_type, crate::index::IndexType::IvfPq); + assert_eq!(index.index_type, crate::index::IndexType::IvfHnswSq); assert_eq!(index.columns, vec!["embeddings".to_string()]); assert_eq!(table.count_rows(None).await.unwrap(), 512); assert_eq!(table.name(), "test"); @@ -2867,7 +2868,7 @@ mod tests { let index_configs = table.list_indices().await.unwrap(); assert_eq!(index_configs.len(), 1); let index = index_configs.into_iter().next().unwrap(); - assert_eq!(index.index_type, crate::index::IndexType::IvfPq); + assert_eq!(index.index_type, crate::index::IndexType::IvfHnswPq); assert_eq!(index.columns, vec!["embeddings".to_string()]); assert_eq!(table.count_rows(None).await.unwrap(), 512); assert_eq!(table.name(), "test"); From 2c4b07eb177cefe2a0fb3f97cef56187f465fd7c Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 1 Oct 2024 10:06:52 -0700 Subject: [PATCH 04/51] feat(python): merge_insert in async Python (#1707) Fixes #1401 --- python/python/lancedb/merge.py | 2 +- python/python/lancedb/table.py | 26 +++++++++- python/python/tests/test_table.py | 81 +++++++++++++++++++++++++++++-- python/src/table.rs | 38 ++++++++++++++- 4 files changed, 139 insertions(+), 8 deletions(-) diff --git a/python/python/lancedb/merge.py b/python/python/lancedb/merge.py index 69671c5e..48cc9847 100644 --- a/python/python/lancedb/merge.py +++ b/python/python/lancedb/merge.py @@ -104,4 +104,4 @@ class LanceMergeInsertBuilder(object): fill_value: float, default 0. The value to use when filling vectors. Only used if on_bad_vectors="fill". """ - self._table._do_merge(self, new_data, on_bad_vectors, fill_value) + return self._table._do_merge(self, new_data, on_bad_vectors, fill_value) diff --git a/python/python/lancedb/table.py b/python/python/lancedb/table.py index 59e0d465..b4f8a2e6 100644 --- a/python/python/lancedb/table.py +++ b/python/python/lancedb/table.py @@ -2464,7 +2464,31 @@ class AsyncTable: on_bad_vectors: str, fill_value: float, ): - pass + schema = await self.schema() + if on_bad_vectors is None: + on_bad_vectors = "error" + if fill_value is None: + fill_value = 0.0 + data, _ = _sanitize_data( + new_data, + schema, + metadata=schema.metadata, + on_bad_vectors=on_bad_vectors, + fill_value=fill_value, + ) + if isinstance(data, pa.Table): + data = pa.RecordBatchReader.from_batches(data.schema, data.to_batches()) + await self._inner.execute_merge_insert( + data, + dict( + on=merge._on, + when_matched_update_all=merge._when_matched_update_all, + when_matched_update_all_condition=merge._when_matched_update_all_condition, + when_not_matched_insert_all=merge._when_not_matched_insert_all, + when_not_matched_by_source_delete=merge._when_not_matched_by_source_delete, + when_not_matched_by_source_condition=merge._when_not_matched_by_source_condition, + ), + ) async def delete(self, where: str): """Delete rows from the table. diff --git a/python/python/tests/test_table.py b/python/python/tests/test_table.py index c32a5c98..cc5ecbd2 100644 --- a/python/python/tests/test_table.py +++ b/python/python/tests/test_table.py @@ -636,11 +636,13 @@ def test_merge_insert(db): new_data = pa.table({"a": [2, 4], "b": ["x", "z"]}) # replace-range - table.merge_insert( - "a" - ).when_matched_update_all().when_not_matched_insert_all().when_not_matched_by_source_delete( - "a > 2" - ).execute(new_data) + ( + table.merge_insert("a") + .when_matched_update_all() + .when_not_matched_insert_all() + .when_not_matched_by_source_delete("a > 2") + .execute(new_data) + ) expected = pa.table({"a": [1, 2, 4], "b": ["a", "x", "z"]}) assert table.to_arrow().sort_by("a") == expected @@ -658,6 +660,75 @@ def test_merge_insert(db): assert table.to_arrow().sort_by("a") == expected +@pytest.mark.asyncio +async def test_merge_insert_async(db_async: AsyncConnection): + data = pa.table({"a": [1, 2, 3], "b": ["a", "b", "c"]}) + table = await db_async.create_table("some_table", data=data) + assert await table.count_rows() == 3 + version = await table.version() + + new_data = pa.table({"a": [2, 3, 4], "b": ["x", "y", "z"]}) + + # upsert + await ( + table.merge_insert("a") + .when_matched_update_all() + .when_not_matched_insert_all() + .execute(new_data) + ) + expected = pa.table({"a": [1, 2, 3, 4], "b": ["a", "x", "y", "z"]}) + assert (await table.to_arrow()).sort_by("a") == expected + + await table.checkout(version) + await table.restore() + + # conditional update + await ( + table.merge_insert("a") + .when_matched_update_all(where="target.b = 'b'") + .execute(new_data) + ) + expected = pa.table({"a": [1, 2, 3], "b": ["a", "x", "c"]}) + assert (await table.to_arrow()).sort_by("a") == expected + + await table.checkout(version) + await table.restore() + + # insert-if-not-exists + await table.merge_insert("a").when_not_matched_insert_all().execute(new_data) + expected = pa.table({"a": [1, 2, 3, 4], "b": ["a", "b", "c", "z"]}) + assert (await table.to_arrow()).sort_by("a") == expected + + await table.checkout(version) + await table.restore() + + # replace-range + new_data = pa.table({"a": [2, 4], "b": ["x", "z"]}) + await ( + table.merge_insert("a") + .when_matched_update_all() + .when_not_matched_insert_all() + .when_not_matched_by_source_delete("a > 2") + .execute(new_data) + ) + expected = pa.table({"a": [1, 2, 4], "b": ["a", "x", "z"]}) + assert (await table.to_arrow()).sort_by("a") == expected + + await table.checkout(version) + await table.restore() + + # replace-range no condition + await ( + table.merge_insert("a") + .when_matched_update_all() + .when_not_matched_insert_all() + .when_not_matched_by_source_delete() + .execute(new_data) + ) + expected = pa.table({"a": [2, 4], "b": ["x", "z"]}) + assert (await table.to_arrow()).sort_by("a") == expected + + def test_create_with_embedding_function(db): class MyTable(LanceModel): text: str diff --git a/python/src/table.rs b/python/src/table.rs index 957bf76f..b5087f8d 100644 --- a/python/src/table.rs +++ b/python/src/table.rs @@ -9,7 +9,7 @@ use pyo3::{ exceptions::{PyRuntimeError, PyValueError}, pyclass, pymethods, types::{PyDict, PyDictMethods, PyString}, - Bound, PyAny, PyRef, PyResult, Python, ToPyObject, + Bound, FromPyObject, PyAny, PyRef, PyResult, Python, ToPyObject, }; use pyo3_asyncio_0_21::tokio::future_into_py; @@ -331,6 +331,31 @@ impl Table { }) } + pub fn execute_merge_insert<'a>( + self_: PyRef<'a, Self>, + data: Bound<'a, PyAny>, + parameters: MergeInsertParams, + ) -> PyResult> { + let batches: ArrowArrayStreamReader = ArrowArrayStreamReader::from_pyarrow_bound(&data)?; + let on = parameters.on.iter().map(|s| s.as_str()).collect::>(); + let mut builder = self_.inner_ref()?.merge_insert(&on); + if parameters.when_matched_update_all { + builder.when_matched_update_all(parameters.when_matched_update_all_condition); + } + if parameters.when_not_matched_insert_all { + builder.when_not_matched_insert_all(); + } + if parameters.when_not_matched_by_source_delete { + builder + .when_not_matched_by_source_delete(parameters.when_not_matched_by_source_condition); + } + + future_into_py(self_.py(), async move { + builder.execute(Box::new(batches)).await.infer_error()?; + Ok(()) + }) + } + pub fn uses_v2_manifest_paths(self_: PyRef<'_, Self>) -> PyResult> { let inner = self_.inner_ref()?.clone(); future_into_py(self_.py(), async move { @@ -355,3 +380,14 @@ impl Table { }) } } + +#[derive(FromPyObject)] +#[pyo3(from_item_all)] +pub struct MergeInsertParams { + on: Vec, + when_matched_update_all: bool, + when_matched_update_all_condition: Option, + when_not_matched_insert_all: bool, + when_not_matched_by_source_delete: bool, + when_not_matched_by_source_condition: Option, +} From a416925ca10c2d1cada9f312bf6d9f9810f2160b Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 1 Oct 2024 10:22:53 -0700 Subject: [PATCH 05/51] feat(rust): client configuration for remote client (#1696) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR ports over advanced client configuration present in the Python `RestfulLanceDBClient` to the Rust one. The goal is to have feature parity so we can replace the implementation. * [x] Request timeout * [x] Retries with backoff * [x] Request id generation * [x] User agent (with default tied to library version ✨ ) * [x] Table existence cache * [ ] Deferred: ~Request id customization (should this just pick up OTEL trace ids?)~ Fixes #1684 --- Cargo.toml | 2 + rust/lancedb/Cargo.toml | 7 +- rust/lancedb/src/connection.rs | 32 ++++ rust/lancedb/src/lib.rs | 2 +- rust/lancedb/src/remote.rs | 10 +- rust/lancedb/src/remote/client.rs | 304 +++++++++++++++++++++++++++++- rust/lancedb/src/remote/db.rs | 61 ++++-- rust/lancedb/src/remote/table.rs | 20 +- 8 files changed, 395 insertions(+), 43 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 95a00f40..49e3f5a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,10 +45,12 @@ half = { "version" = "=2.4.1", default-features = false, features = [ ] } futures = "0" log = "0.4" +moka = { version = "0.11", features = ["future"] } object_store = "0.10.2" pin-project = "1.0.7" snafu = "0.7.4" url = "2" num-traits = "0.2" +rand = "0.8" regex = "1.10" lazy_static = "1" diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index 2eee4227..200c5d3b 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -32,6 +32,7 @@ lance-table = { workspace = true } lance-linalg = { workspace = true } lance-testing = { workspace = true } lance-encoding = { workspace = true } +moka = { workspace = true} pin-project = { workspace = true } tokio = { version = "1.23", features = ["rt-multi-thread"] } log.workspace = true @@ -47,7 +48,9 @@ async-openai = { version = "0.20.0", optional = true } serde_with = { version = "3.8.1" } # For remote feature reqwest = { version = "0.12.0", features = ["gzip", "json", "stream"], optional = true } -http = { version = "1", optional = true } # Matching what is in reqwest +rand = { version = "0.8.3", features = ["small_rng"], optional = true} +http = { version = "1", optional = true } # Matching what is in reqwest +uuid = { version = "1.7.0", features = ["v4"], optional = true } polars-arrow = { version = ">=0.37,<0.40.0", optional = true } polars = { version = ">=0.37,<0.40.0", optional = true } hf-hub = { version = "0.3.2", optional = true } @@ -71,7 +74,7 @@ http-body = "1" # Matching reqwest [features] default = [] -remote = ["dep:reqwest", "dep:http"] +remote = ["dep:reqwest", "dep:http", "dep:rand", "dep:uuid"] fp16kernels = ["lance-linalg/fp16kernels"] s3-test = [] openai = ["dep:async-openai", "dep:reqwest"] diff --git a/rust/lancedb/src/connection.rs b/rust/lancedb/src/connection.rs index 144b3134..29403528 100644 --- a/rust/lancedb/src/connection.rs +++ b/rust/lancedb/src/connection.rs @@ -32,6 +32,8 @@ use crate::embeddings::{ }; use crate::error::{CreateDirSnafu, Error, InvalidTableNameSnafu, Result}; use crate::io::object_store::MirroringObjectStoreWrapper; +#[cfg(feature = "remote")] +use crate::remote::client::ClientConfig; use crate::table::{NativeTable, TableDefinition, WriteOptions}; use crate::utils::validate_table_name; use crate::Table; @@ -567,6 +569,8 @@ pub struct ConnectBuilder { region: Option, /// LanceDB Cloud host override, only required if using an on-premises Lance Cloud instance host_override: Option, + #[cfg(feature = "remote")] + client_config: ClientConfig, storage_options: HashMap, @@ -592,6 +596,8 @@ impl ConnectBuilder { api_key: None, region: None, host_override: None, + #[cfg(feature = "remote")] + client_config: Default::default(), read_consistency_interval: None, storage_options: HashMap::new(), embedding_registry: None, @@ -613,6 +619,30 @@ impl ConnectBuilder { self } + /// Set the LanceDB Cloud client configuration. + /// + /// ``` + /// # use lancedb::connect; + /// # use lancedb::remote::*; + /// connect("db://my_database") + /// .client_config(ClientConfig { + /// timeout_config: TimeoutConfig { + /// connect_timeout: Some(std::time::Duration::from_secs(5)), + /// ..Default::default() + /// }, + /// retry_config: RetryConfig { + /// retries: Some(5), + /// ..Default::default() + /// }, + /// ..Default::default() + /// }); + /// ``` + #[cfg(feature = "remote")] + pub fn client_config(mut self, config: ClientConfig) -> Self { + self.client_config = config; + self + } + /// Provide a custom [`EmbeddingRegistry`] to use for this connection. pub fn embedding_registry(mut self, registry: Arc) -> Self { self.embedding_registry = Some(registry); @@ -685,12 +715,14 @@ impl ConnectBuilder { let api_key = self.api_key.ok_or_else(|| Error::InvalidInput { message: "An api_key is required when connecting to LanceDb Cloud".to_string(), })?; + // TODO: remove this warning when the remote client is ready warn!("The rust implementation of the remote client is not yet ready for use."); let internal = Arc::new(crate::remote::db::RemoteDatabase::try_new( &self.uri, &api_key, ®ion, self.host_override, + self.client_config, )?); Ok(Connection { internal, diff --git a/rust/lancedb/src/lib.rs b/rust/lancedb/src/lib.rs index 52771fa5..5bb158c2 100644 --- a/rust/lancedb/src/lib.rs +++ b/rust/lancedb/src/lib.rs @@ -213,7 +213,7 @@ pub mod ipc; mod polars_arrow_convertors; pub mod query; #[cfg(feature = "remote")] -pub(crate) mod remote; +pub mod remote; pub mod table; pub mod utils; diff --git a/rust/lancedb/src/remote.rs b/rust/lancedb/src/remote.rs index 2ef92b55..08b52f3f 100644 --- a/rust/lancedb/src/remote.rs +++ b/rust/lancedb/src/remote.rs @@ -17,10 +17,12 @@ //! building client/server applications with LanceDB or as a client for some //! other custom LanceDB service. -pub mod client; -pub mod db; -pub mod table; -pub mod util; +pub(crate) mod client; +pub(crate) mod db; +pub(crate) mod table; +pub(crate) mod util; const ARROW_STREAM_CONTENT_TYPE: &str = "application/vnd.apache.arrow.stream"; const JSON_CONTENT_TYPE: &str = "application/json"; + +pub use client::{ClientConfig, RetryConfig, TimeoutConfig}; diff --git a/rust/lancedb/src/remote/client.rs b/rust/lancedb/src/remote/client.rs index 41964e0c..644cb1f1 100644 --- a/rust/lancedb/src/remote/client.rs +++ b/rust/lancedb/src/remote/client.rs @@ -14,13 +14,152 @@ use std::{future::Future, time::Duration}; +use log::debug; use reqwest::{ header::{HeaderMap, HeaderValue}, - RequestBuilder, Response, + Request, RequestBuilder, Response, }; use crate::error::{Error, Result}; +const REQUEST_ID_HEADER: &str = "x-request-id"; + +/// Configuration for the LanceDB Cloud HTTP client. +#[derive(Debug)] +pub struct ClientConfig { + pub timeout_config: TimeoutConfig, + pub retry_config: RetryConfig, + /// User agent to use for requests. The default provides the libary + /// name and version. + pub user_agent: String, + // TODO: how to configure request ids? +} + +impl Default for ClientConfig { + fn default() -> Self { + Self { + timeout_config: TimeoutConfig::default(), + retry_config: RetryConfig::default(), + user_agent: concat!("LanceDB-Rust-Client/", env!("CARGO_PKG_VERSION")).into(), + } + } +} + +/// How to handle timeouts for HTTP requests. +#[derive(Default, Debug)] +pub struct TimeoutConfig { + /// The timeout for creating a connection to the server. + /// + /// You can also set the `LANCE_CLIENT_CONNECT_TIMEOUT` environment variable + /// to set this value. Use an integer value in seconds. + /// + /// The default is 120 seconds (2 minutes). + pub connect_timeout: Option, + /// The timeout for reading a response from the server. + /// + /// You can also set the `LANCE_CLIENT_READ_TIMEOUT` environment variable + /// to set this value. Use an integer value in seconds. + /// + /// The default is 300 seconds (5 minutes). + pub read_timeout: Option, + /// The timeout for keeping idle connections alive. + /// + /// You can also set the `LANCE_CLIENT_CONNECTION_TIMEOUT` environment variable + /// to set this value. Use an integer value in seconds. + /// + /// The default is 300 seconds (5 minutes). + pub pool_idle_timeout: Option, +} + +/// How to handle retries for HTTP requests. +#[derive(Default, Debug)] +pub struct RetryConfig { + /// The number of times to retry a request if it fails. + /// + /// You can also set the `LANCE_CLIENT_MAX_RETRIES` environment variable + /// to set this value. Use an integer value. + /// + /// The default is 3 retries. + pub retries: Option, + /// The number of times to retry a request if it fails to connect. + /// + /// You can also set the `LANCE_CLIENT_CONNECT_RETRIES` environment variable + /// to set this value. Use an integer value. + /// + /// The default is 3 retries. + pub connect_retries: Option, + /// The number of times to retry a request if it fails to read. + /// + /// You can also set the `LANCE_CLIENT_READ_RETRIES` environment variable + /// to set this value. Use an integer value. + /// + /// The default is 3 retries. + pub read_retries: Option, + /// The exponential backoff factor to use when retrying requests. + /// + /// Between each retry, the client will wait for the amount of seconds: + /// + /// ```text + /// {backoff factor} * (2 ** ({number of previous retries})) + /// ``` + /// + /// You can also set the `LANCE_CLIENT_RETRY_BACKOFF_FACTOR` environment variable + /// to set this value. Use a float value. + /// + /// The default is 0.25. So the first retry will wait 0.25 seconds, the second + /// retry will wait 0.5 seconds, the third retry will wait 1 second, etc. + pub backoff_factor: Option, + /// The backoff jitter factor to use when retrying requests. + /// + /// The backoff jitter is a random value between 0 and the jitter factor in + /// seconds. + /// + /// You can also set the `LANCE_CLIENT_RETRY_BACKOFF_JITTER` environment variable + /// to set this value. Use a float value. + /// + /// The default is 0.25. So between 0 and 0.25 seconds will be added to the + /// sleep time between retries. + pub backoff_jitter: Option, + /// The set of status codes to retry on. + /// + /// You can also set the `LANCE_CLIENT_RETRY_STATUSES` environment variable + /// to set this value. Use a comma-separated list of integer values. + /// + /// The default is 429, 500, 502, 503. + pub statuses: Option>, + // TODO: should we allow customizing methods? +} + +#[derive(Debug, Clone)] +struct ResolvedRetryConfig { + retries: u8, + connect_retries: u8, + read_retries: u8, + backoff_factor: f32, + backoff_jitter: f32, + statuses: Vec, +} + +impl TryFrom for ResolvedRetryConfig { + type Error = Error; + + fn try_from(retry_config: RetryConfig) -> Result { + Ok(Self { + retries: retry_config.retries.unwrap_or(3), + connect_retries: retry_config.connect_retries.unwrap_or(3), + read_retries: retry_config.read_retries.unwrap_or(3), + backoff_factor: retry_config.backoff_factor.unwrap_or(0.25), + backoff_jitter: retry_config.backoff_jitter.unwrap_or(0.25), + statuses: retry_config + .statuses + .unwrap_or_else(|| vec![429, 500, 502, 503]) + .into_iter() + .map(|status| reqwest::StatusCode::from_u16(status).unwrap()) + .collect(), + }) + } +} + // We use the `HttpSend` trait to abstract over the `reqwest::Client` so that // we can mock responses in tests. Based on the patterns from this blog post: // https://write.as/balrogboogie/testing-reqwest-based-clients @@ -28,28 +167,54 @@ use crate::error::{Error, Result}; pub struct RestfulLanceDbClient { client: reqwest::Client, host: String, + retry_config: ResolvedRetryConfig, sender: S, } pub trait HttpSend: Clone + Send + Sync + std::fmt::Debug + 'static { - fn send(&self, req: RequestBuilder) -> impl Future> + Send; + fn send( + &self, + client: &reqwest::Client, + request: reqwest::Request, + ) -> impl Future> + Send; } // Default implementation of HttpSend which sends the request normally with reqwest #[derive(Clone, Debug)] pub struct Sender; impl HttpSend for Sender { - async fn send(&self, request: reqwest::RequestBuilder) -> Result { - Ok(request.send().await?) + async fn send( + &self, + client: &reqwest::Client, + request: reqwest::Request, + ) -> reqwest::Result { + client.execute(request).await } } impl RestfulLanceDbClient { + fn get_timeout(passed: Option, env_var: &str, default: Duration) -> Result { + if let Some(passed) = passed { + Ok(passed) + } else if let Ok(timeout) = std::env::var(env_var) { + let timeout = timeout.parse::().map_err(|_| Error::InvalidInput { + message: format!( + "Invalid value for {} environment variable: '{}'", + env_var, timeout + ), + })?; + Ok(Duration::from_secs(timeout)) + } else { + Ok(default) + } + } + pub fn try_new( db_url: &str, api_key: &str, region: &str, host_override: Option, + client_config: ClientConfig, ) -> Result { let parsed_url = url::Url::parse(db_url)?; debug_assert_eq!(parsed_url.scheme(), "db"); @@ -59,22 +224,47 @@ impl RestfulLanceDbClient { }); } let db_name = parsed_url.host_str().unwrap(); + + // Get the timeouts + let connect_timeout = Self::get_timeout( + client_config.timeout_config.connect_timeout, + "LANCE_CLIENT_CONNECT_TIMEOUT", + Duration::from_secs(120), + )?; + let read_timeout = Self::get_timeout( + client_config.timeout_config.read_timeout, + "LANCE_CLIENT_READ_TIMEOUT", + Duration::from_secs(300), + )?; + let pool_idle_timeout = Self::get_timeout( + client_config.timeout_config.pool_idle_timeout, + // Though it's confusing with the connect_timeout name, this is the + // legacy name for this in the Python sync client. So we keep as-is. + "LANCE_CLIENT_CONNECTION_TIMEOUT", + Duration::from_secs(300), + )?; + let client = reqwest::Client::builder() - .timeout(Duration::from_secs(30)) + .connect_timeout(connect_timeout) + .read_timeout(read_timeout) + .pool_idle_timeout(pool_idle_timeout) .default_headers(Self::default_headers( api_key, region, db_name, host_override.is_some(), )?) + .user_agent(client_config.user_agent) .build()?; let host = match host_override { Some(host_override) => host_override, None => format!("https://{}.{}.api.lancedb.com", db_name, region), }; + let retry_config = client_config.retry_config.try_into()?; Ok(Self { client, host, + retry_config, sender: Sender, }) } @@ -129,8 +319,100 @@ impl RestfulLanceDbClient { self.client.post(full_uri) } - pub async fn send(&self, req: RequestBuilder) -> Result { - self.sender.send(req).await + pub async fn send(&self, req: RequestBuilder, with_retry: bool) -> Result { + let (client, request) = req.build_split(); + let mut request = request.unwrap(); + + // Set a request id. + // TODO: allow the user to supply this, through middleware? + if request.headers().get(REQUEST_ID_HEADER).is_none() { + let request_id = uuid::Uuid::new_v4(); + let request_id = HeaderValue::from_str(&request_id.to_string()).unwrap(); + request.headers_mut().insert(REQUEST_ID_HEADER, request_id); + } + + if with_retry { + self.send_with_retry_impl(client, request).await + } else { + Ok(self.sender.send(&client, request).await?) + } + } + + async fn send_with_retry_impl( + &self, + client: reqwest::Client, + req: Request, + ) -> Result { + let mut request_failures = 0; + let mut connect_failures = 0; + let mut read_failures = 0; + + loop { + // This only works if the request body is not a stream. If it is + // a stream, we can't use the retry path. We would need to implement + // an outer retry. + let request = req.try_clone().ok_or_else(|| Error::Http { + message: "Attempted to retry a request that cannot be cloned".to_string(), + })?; + let response = self.sender.send(&client, request).await; + let status_code = response.as_ref().map(|r| r.status()); + match status_code { + Ok(status) if status.is_success() => return Ok(response?), + Ok(status) if self.retry_config.statuses.contains(&status) => { + request_failures += 1; + if request_failures >= self.retry_config.retries { + // TODO: better error + return Err(Error::Runtime { + message: format!( + "Request failed after {} retries with status code {}", + request_failures, status + ), + }); + } + } + Err(err) if err.is_connect() => { + connect_failures += 1; + if connect_failures >= self.retry_config.connect_retries { + return Err(Error::Runtime { + message: format!( + "Request failed after {} connect retries with error: {}", + connect_failures, err + ), + }); + } + } + Err(err) if err.is_timeout() || err.is_body() || err.is_decode() => { + read_failures += 1; + if read_failures >= self.retry_config.read_retries { + return Err(Error::Runtime { + message: format!( + "Request failed after {} read retries with error: {}", + read_failures, err + ), + }); + } + } + Ok(_) | Err(_) => return Ok(response?), + } + + let backoff = self.retry_config.backoff_factor * (2.0f32.powi(request_failures as i32)); + let jitter = rand::random::() * self.retry_config.backoff_jitter; + let sleep_time = Duration::from_secs_f32(backoff + jitter); + debug!( + "Retrying request {:?} ({}/{} connect, {}/{} read, {}/{} read) in {:?}", + req.headers() + .get("x-request-id") + .and_then(|v| v.to_str().ok()), + connect_failures, + self.retry_config.connect_retries, + request_failures, + self.retry_config.retries, + read_failures, + self.retry_config.read_retries, + sleep_time + ); + tokio::time::sleep(sleep_time).await; + } } async fn rsp_to_str(response: Response) -> String { @@ -172,8 +454,11 @@ pub mod test_utils { } impl HttpSend for MockSender { - async fn send(&self, request: reqwest::RequestBuilder) -> Result { - let request = request.build().unwrap(); + async fn send( + &self, + _client: &reqwest::Client, + request: reqwest::Request, + ) -> reqwest::Result { let response = (self.f)(request); Ok(response) } @@ -193,6 +478,7 @@ pub mod test_utils { RestfulLanceDbClient { client: reqwest::Client::new(), host: "http://localhost".to_string(), + retry_config: RetryConfig::default().try_into().unwrap(), sender: MockSender { f: Arc::new(wrapper), }, diff --git a/rust/lancedb/src/remote/db.rs b/rust/lancedb/src/remote/db.rs index db26b181..036e5e7c 100644 --- a/rust/lancedb/src/remote/db.rs +++ b/rust/lancedb/src/remote/db.rs @@ -17,6 +17,7 @@ use std::sync::Arc; use arrow_array::RecordBatchReader; use async_trait::async_trait; use http::StatusCode; +use moka::future::Cache; use reqwest::header::CONTENT_TYPE; use serde::Deserialize; use tokio::task::spawn_blocking; @@ -28,7 +29,7 @@ use crate::embeddings::EmbeddingRegistry; use crate::error::Result; use crate::Table; -use super::client::{HttpSend, RestfulLanceDbClient, Sender}; +use super::client::{ClientConfig, HttpSend, RestfulLanceDbClient, Sender}; use super::table::RemoteTable; use super::util::batches_to_ipc_bytes; use super::ARROW_STREAM_CONTENT_TYPE; @@ -41,6 +42,7 @@ struct ListTablesResponse { #[derive(Debug)] pub struct RemoteDatabase { client: RestfulLanceDbClient, + table_cache: Cache, } impl RemoteDatabase { @@ -49,9 +51,20 @@ impl RemoteDatabase { api_key: &str, region: &str, host_override: Option, + client_config: ClientConfig, ) -> Result { - let client = RestfulLanceDbClient::try_new(uri, api_key, region, host_override)?; - Ok(Self { client }) + let client = + RestfulLanceDbClient::try_new(uri, api_key, region, host_override, client_config)?; + + let table_cache = Cache::builder() + .time_to_live(std::time::Duration::from_secs(300)) + .max_capacity(10_000) + .build(); + + Ok(Self { + client, + table_cache, + }) } } @@ -68,7 +81,10 @@ mod test_utils { T: Into, { let client = client_with_handler(handler); - Self { client } + Self { + client, + table_cache: Cache::new(0), + } } } } @@ -89,9 +105,13 @@ impl ConnectionInternal for RemoteDatabase { if let Some(start_after) = options.start_after { req = req.query(&[("page_token", start_after)]); } - let rsp = self.client.send(req).await?; + let rsp = self.client.send(req, true).await?; let rsp = self.client.check_response(rsp).await?; - Ok(rsp.json::().await?.tables) + let tables = rsp.json::().await?.tables; + for table in &tables { + self.table_cache.insert(table.clone(), ()).await; + } + Ok(tables) } async fn do_create_table( @@ -113,7 +133,7 @@ impl ConnectionInternal for RemoteDatabase { .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) // This is currently expected by LanceDb cloud but will be removed soon. .header("x-request-id", "na"); - let rsp = self.client.send(req).await?; + let rsp = self.client.send(req, false).await?; if rsp.status() == StatusCode::BAD_REQUEST { let body = rsp.text().await?; @@ -126,6 +146,8 @@ impl ConnectionInternal for RemoteDatabase { self.client.check_response(rsp).await?; + self.table_cache.insert(options.name.clone(), ()).await; + Ok(Table::new(Arc::new(RemoteTable::new( self.client.clone(), options.name, @@ -134,15 +156,17 @@ impl ConnectionInternal for RemoteDatabase { async fn do_open_table(&self, options: OpenTableBuilder) -> Result { // We describe the table to confirm it exists before moving on. - // TODO: a TTL cache of table existence - let req = self - .client - .get(&format!("/v1/table/{}/describe/", options.name)); - let resp = self.client.send(req).await?; - if resp.status() == StatusCode::NOT_FOUND { - return Err(crate::Error::TableNotFound { name: options.name }); + if self.table_cache.get(&options.name).is_none() { + let req = self + .client + .get(&format!("/v1/table/{}/describe/", options.name)); + let resp = self.client.send(req, true).await?; + if resp.status() == StatusCode::NOT_FOUND { + return Err(crate::Error::TableNotFound { name: options.name }); + } + self.client.check_response(resp).await?; } - self.client.check_response(resp).await?; + Ok(Table::new(Arc::new(RemoteTable::new( self.client.clone(), options.name, @@ -154,15 +178,18 @@ impl ConnectionInternal for RemoteDatabase { .client .post(&format!("/v1/table/{}/rename/", current_name)); let req = req.json(&serde_json::json!({ "new_table_name": new_name })); - let resp = self.client.send(req).await?; + let resp = self.client.send(req, false).await?; self.client.check_response(resp).await?; + self.table_cache.remove(current_name).await; + self.table_cache.insert(new_name.into(), ()).await; Ok(()) } async fn drop_table(&self, name: &str) -> Result<()> { let req = self.client.post(&format!("/v1/table/{}/drop/", name)); - let resp = self.client.send(req).await?; + let resp = self.client.send(req, true).await?; self.client.check_response(resp).await?; + self.table_cache.remove(name).await; Ok(()) } diff --git a/rust/lancedb/src/remote/table.rs b/rust/lancedb/src/remote/table.rs index bdd59045..16caad70 100644 --- a/rust/lancedb/src/remote/table.rs +++ b/rust/lancedb/src/remote/table.rs @@ -51,7 +51,7 @@ impl RemoteTable { async fn describe(&self) -> Result { let request = self.client.post(&format!("/table/{}/describe/", self.name)); - let response = self.client.send(request).await?; + let response = self.client.send(request, true).await?; let response = self.check_table_response(response).await?; @@ -257,7 +257,7 @@ impl TableInternal for RemoteTable { request = request.json(&serde_json::json!({})); } - let response = self.client.send(request).await?; + let response = self.client.send(request, true).await?; let response = self.check_table_response(response).await?; @@ -286,7 +286,7 @@ impl TableInternal for RemoteTable { } } - let response = self.client.send(request).await?; + let response = self.client.send(request, false).await?; self.check_table_response(response).await?; @@ -337,7 +337,7 @@ impl TableInternal for RemoteTable { let request = request.json(&body); - let response = self.client.send(request).await?; + let response = self.client.send(request, true).await?; let stream = self.read_arrow_stream(response).await?; @@ -359,7 +359,7 @@ impl TableInternal for RemoteTable { let request = request.json(&body); - let response = self.client.send(request).await?; + let response = self.client.send(request, true).await?; let stream = self.read_arrow_stream(response).await?; @@ -379,7 +379,7 @@ impl TableInternal for RemoteTable { "only_if": update.filter, })); - let response = self.client.send(request).await?; + let response = self.client.send(request, false).await?; let response = self.check_table_response(response).await?; @@ -398,7 +398,7 @@ impl TableInternal for RemoteTable { .client .post(&format!("/table/{}/delete/", self.name)) .json(&body); - let response = self.client.send(request).await?; + let response = self.client.send(request, false).await?; self.check_table_response(response).await?; Ok(()) } @@ -468,7 +468,7 @@ impl TableInternal for RemoteTable { let request = request.json(&body); - let response = self.client.send(request).await?; + let response = self.client.send(request, false).await?; self.check_table_response(response).await?; @@ -489,7 +489,7 @@ impl TableInternal for RemoteTable { .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) .body(body); - let response = self.client.send(request).await?; + let response = self.client.send(request, false).await?; self.check_table_response(response).await?; @@ -528,7 +528,7 @@ impl TableInternal for RemoteTable { let request = self .client .post(&format!("/table/{}/index/{}/stats/", self.name, index_name)); - let response = self.client.send(request).await?; + let response = self.client.send(request, true).await?; if response.status() == StatusCode::NOT_FOUND { return Ok(None); From f305f34d9b22f652ed2f5706ed6d371d6132c978 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 1 Oct 2024 15:46:59 -0700 Subject: [PATCH 06/51] feat(python): bind python async remote client to rust client (#1700) Closes [#1638](https://github.com/lancedb/lancedb/issues/1638) This just binds the Python Async client to the Rust remote client. --- python/Cargo.toml | 4 +- python/python/lancedb/__init__.py | 20 ++++- python/python/lancedb/remote/__init__.py | 109 +++++++++++++++++++++++ python/python/tests/test_remote_db.py | 58 ++++++++++++ python/src/connection.rs | 68 +++++++++++++- rust/lancedb/src/remote/client.rs | 2 +- 6 files changed, 255 insertions(+), 6 deletions(-) diff --git a/python/Cargo.toml b/python/Cargo.toml index 6ecc2087..115ff2e0 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -22,8 +22,6 @@ pyo3 = { version = "0.21", features = ["extension-module", "abi3-py38", "gil-ref # pyo3-asyncio = { version = "0.20", features = ["attributes", "tokio-runtime"] } pyo3-asyncio-0-21 = { version = "0.21.0", features = ["attributes", "tokio-runtime"] } -# Prevent dynamic linking of lzma, which comes from datafusion -lzma-sys = { version = "*", features = ["static"] } pin-project = "1.1.5" futures.workspace = true tokio = { version = "1.36.0", features = ["sync"] } @@ -35,4 +33,6 @@ pyo3-build-config = { version = "0.20.3", features = [ ] } [features] +default = ["remote"] fp16kernels = ["lancedb/fp16kernels"] +remote = ["lancedb/remote"] diff --git a/python/python/lancedb/__init__.py b/python/python/lancedb/__init__.py index 67a64479..b394fa6f 100644 --- a/python/python/lancedb/__init__.py +++ b/python/python/lancedb/__init__.py @@ -19,6 +19,8 @@ from typing import Dict, Optional, Union, Any __version__ = importlib.metadata.version("lancedb") +from lancedb.remote import ClientConfig + from ._lancedb import connect as lancedb_connect from .common import URI, sanitize_uri from .db import AsyncConnection, DBConnection, LanceDBConnection @@ -120,7 +122,7 @@ async def connect_async( region: str = "us-east-1", host_override: Optional[str] = None, read_consistency_interval: Optional[timedelta] = None, - request_thread_pool: Optional[Union[int, ThreadPoolExecutor]] = None, + client_config: Optional[Union[ClientConfig, Dict[str, Any]]] = None, storage_options: Optional[Dict[str, str]] = None, ) -> AsyncConnection: """Connect to a LanceDB database. @@ -148,6 +150,10 @@ async def connect_async( the last check, then the table will be checked for updates. Note: this consistency only applies to read operations. Write operations are always consistent. + client_config: ClientConfig or dict, optional + Configuration options for the LanceDB Cloud HTTP client. If a dict, then + the keys are the attributes of the ClientConfig class. If None, then the + default configuration is used. storage_options: dict, optional Additional options for the storage backend. See available options at https://lancedb.github.io/lancedb/guides/storage/ @@ -160,7 +166,13 @@ async def connect_async( ... # For a local directory, provide a path to the database ... db = await lancedb.connect_async("~/.lancedb") ... # For object storage, use a URI prefix - ... db = await lancedb.connect_async("s3://my-bucket/lancedb") + ... db = await lancedb.connect_async("s3://my-bucket/lancedb", + ... storage_options={ + ... "aws_access_key_id": "***"}) + ... # Connect to LanceDB cloud + ... db = await lancedb.connect_async("db://my_database", api_key="ldb_...", + ... client_config={ + ... "retry_config": {"retries": 5}}) Returns ------- @@ -172,6 +184,9 @@ async def connect_async( else: read_consistency_interval_secs = None + if isinstance(client_config, dict): + client_config = ClientConfig(**client_config) + return AsyncConnection( await lancedb_connect( sanitize_uri(uri), @@ -179,6 +194,7 @@ async def connect_async( region, host_override, read_consistency_interval_secs, + client_config, storage_options, ) ) diff --git a/python/python/lancedb/remote/__init__.py b/python/python/lancedb/remote/__init__.py index c6e15c83..fdd0cfae 100644 --- a/python/python/lancedb/remote/__init__.py +++ b/python/python/lancedb/remote/__init__.py @@ -12,9 +12,12 @@ # limitations under the License. import abc +from dataclasses import dataclass +from datetime import timedelta from typing import List, Optional import attrs +from lancedb import __version__ import pyarrow as pa from pydantic import BaseModel @@ -62,3 +65,109 @@ class LanceDBClient(abc.ABC): def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult: """Query the LanceDB server for the given table and query.""" pass + + +@dataclass +class TimeoutConfig: + """Timeout configuration for remote HTTP client. + + Attributes + ---------- + connect_timeout: Optional[timedelta] + The timeout for establishing a connection. Default is 120 seconds (2 minutes). + This can also be set via the environment variable + `LANCE_CLIENT_CONNECT_TIMEOUT`, as an integer number of seconds. + read_timeout: Optional[timedelta] + The timeout for reading data from the server. Default is 300 seconds + (5 minutes). This can also be set via the environment variable + `LANCE_CLIENT_READ_TIMEOUT`, as an integer number of seconds. + pool_idle_timeout: Optional[timedelta] + The timeout for keeping idle connections in the connection pool. Default + is 300 seconds (5 minutes). This can also be set via the environment variable + `LANCE_CLIENT_CONNECTION_TIMEOUT`, as an integer number of seconds. + """ + + connect_timeout: Optional[timedelta] = None + read_timeout: Optional[timedelta] = None + pool_idle_timeout: Optional[timedelta] = None + + @staticmethod + def __to_timedelta(value) -> Optional[timedelta]: + if value is None: + return None + elif isinstance(value, timedelta): + return value + elif isinstance(value, (int, float)): + return timedelta(seconds=value) + else: + raise ValueError( + f"Invalid value for timeout: {value}, must be a timedelta " + "or number of seconds" + ) + + def __post_init__(self): + self.connect_timeout = self.__to_timedelta(self.connect_timeout) + self.read_timeout = self.__to_timedelta(self.read_timeout) + self.pool_idle_timeout = self.__to_timedelta(self.pool_idle_timeout) + + +@dataclass +class RetryConfig: + """Retry configuration for the remote HTTP client. + + Attributes + ---------- + retries: Optional[int] + The maximum number of retries for a request. Default is 3. You can also set this + via the environment variable `LANCE_CLIENT_MAX_RETRIES`. + connect_retries: Optional[int] + The maximum number of retries for connection errors. Default is 3. You can also + set this via the environment variable `LANCE_CLIENT_CONNECT_RETRIES`. + read_retries: Optional[int] + The maximum number of retries for read errors. Default is 3. You can also set + this via the environment variable `LANCE_CLIENT_READ_RETRIES`. + backoff_factor: Optional[float] + The backoff factor to apply between retries. Default is 0.25. Between each retry + the client will wait for the amount of seconds: + `{backoff factor} * (2 ** ({number of previous retries}))`. So for the default + of 0.25, the first retry will wait 0.25 seconds, the second retry will wait 0.5 + seconds, the third retry will wait 1 second, etc. + + You can also set this via the environment variable + `LANCE_CLIENT_RETRY_BACKOFF_FACTOR`. + backoff_jitter: Optional[float] + The jitter to apply to the backoff factor, in seconds. Default is 0.25. + + A random value between 0 and `backoff_jitter` will be added to the backoff + factor in seconds. So for the default of 0.25 seconds, between 0 and 250 + milliseconds will be added to the sleep between each retry. + + You can also set this via the environment variable + `LANCE_CLIENT_RETRY_BACKOFF_JITTER`. + statuses: Optional[List[int] + The HTTP status codes for which to retry the request. Default is + [429, 500, 502, 503]. + + You can also set this via the environment variable + `LANCE_CLIENT_RETRY_STATUSES`. Use a comma-separated list of integers. + """ + + retries: Optional[int] = None + connect_retries: Optional[int] = None + read_retries: Optional[int] = None + backoff_factor: Optional[float] = None + backoff_jitter: Optional[float] = None + statuses: Optional[List[int]] = None + + +@dataclass +class ClientConfig: + user_agent: str = f"LanceDB-Python-Client/{__version__}" + retry_config: Optional[RetryConfig] = None + timeout_config: Optional[TimeoutConfig] = None + + def __post_init__(self): + if isinstance(self.retry_config, dict): + self.retry_config = RetryConfig(**self.retry_config) + if isinstance(self.timeout_config, dict): + self.timeout_config = TimeoutConfig(**self.timeout_config) diff --git a/python/python/tests/test_remote_db.py b/python/python/tests/test_remote_db.py index 729fb550..dee183d9 100644 --- a/python/python/tests/test_remote_db.py +++ b/python/python/tests/test_remote_db.py @@ -1,11 +1,15 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: Copyright The LanceDB Authors +import http.server +import threading from unittest.mock import MagicMock +import uuid import lancedb import pyarrow as pa from lancedb.remote.client import VectorQuery, VectorQueryResult +import pytest class FakeLanceDBClient: @@ -81,3 +85,57 @@ def test_create_table_with_recordbatches(): table = conn.create_table("test", [batch], schema=batch.schema) assert table.name == "test" assert client.post.call_args[0][0] == "/v1/table/test/create/" + + +def make_mock_http_handler(handler): + class MockLanceDBHandler(http.server.BaseHTTPRequestHandler): + def do_GET(self): + handler(self) + + def do_POST(self): + handler(self) + + return MockLanceDBHandler + + +@pytest.mark.asyncio +async def test_async_remote_db(): + def handler(request): + # We created a UUID request id + request_id = request.headers["x-request-id"] + assert uuid.UUID(request_id).version == 4 + + # We set a user agent with the current library version + user_agent = request.headers["User-Agent"] + assert user_agent == f"LanceDB-Python-Client/{lancedb.__version__}" + + request.send_response(200) + request.send_header("Content-Type", "application/json") + request.end_headers() + request.wfile.write(b'{"tables": []}') + + def run_server(): + with http.server.HTTPServer( + ("localhost", 8080), make_mock_http_handler(handler) + ) as server: + # we will only make one request + server.handle_request() + + handle = threading.Thread(target=run_server) + handle.start() + + db = await lancedb.connect_async( + "db://dev", + api_key="fake", + host_override="http://localhost:8080", + client_config={ + "retry_config": {"retries": 2}, + "timeout_config": { + "connect_timeout": 1, + }, + }, + ) + table_names = await db.table_names() + assert table_names == [] + + handle.join() diff --git a/python/src/connection.rs b/python/src/connection.rs index 4f7e20a9..200285a4 100644 --- a/python/src/connection.rs +++ b/python/src/connection.rs @@ -7,7 +7,7 @@ use arrow::{datatypes::Schema, ffi_stream::ArrowArrayStreamReader, pyarrow::From use lancedb::connection::{Connection as LanceConnection, CreateTableMode, LanceFileVersion}; use pyo3::{ exceptions::{PyRuntimeError, PyValueError}, - pyclass, pyfunction, pymethods, Bound, PyAny, PyRef, PyResult, Python, + pyclass, pyfunction, pymethods, Bound, FromPyObject, PyAny, PyRef, PyResult, Python, }; use pyo3_asyncio_0_21::tokio::future_into_py; @@ -187,6 +187,7 @@ impl Connection { } #[pyfunction] +#[allow(clippy::too_many_arguments)] pub fn connect( py: Python, uri: String, @@ -194,6 +195,7 @@ pub fn connect( region: Option, host_override: Option, read_consistency_interval: Option, + client_config: Option, storage_options: Option>, ) -> PyResult> { future_into_py(py, async move { @@ -214,6 +216,70 @@ pub fn connect( if let Some(storage_options) = storage_options { builder = builder.storage_options(storage_options); } + #[cfg(feature = "remote")] + if let Some(client_config) = client_config { + builder = builder.client_config(client_config.into()); + } Ok(Connection::new(builder.execute().await.infer_error()?)) }) } + +#[derive(FromPyObject)] +pub struct PyClientConfig { + user_agent: String, + retry_config: Option, + timeout_config: Option, +} + +#[derive(FromPyObject)] +pub struct PyClientRetryConfig { + retries: Option, + connect_retries: Option, + read_retries: Option, + backoff_factor: Option, + backoff_jitter: Option, + statuses: Option>, +} + +#[derive(FromPyObject)] +pub struct PyClientTimeoutConfig { + connect_timeout: Option, + read_timeout: Option, + pool_idle_timeout: Option, +} + +#[cfg(feature = "remote")] +impl From for lancedb::remote::RetryConfig { + fn from(value: PyClientRetryConfig) -> Self { + Self { + retries: value.retries, + connect_retries: value.connect_retries, + read_retries: value.read_retries, + backoff_factor: value.backoff_factor, + backoff_jitter: value.backoff_jitter, + statuses: value.statuses, + } + } +} + +#[cfg(feature = "remote")] +impl From for lancedb::remote::TimeoutConfig { + fn from(value: PyClientTimeoutConfig) -> Self { + Self { + connect_timeout: value.connect_timeout, + read_timeout: value.read_timeout, + pool_idle_timeout: value.pool_idle_timeout, + } + } +} + +#[cfg(feature = "remote")] +impl From for lancedb::remote::ClientConfig { + fn from(value: PyClientConfig) -> Self { + Self { + user_agent: value.user_agent, + retry_config: value.retry_config.map(Into::into).unwrap_or_default(), + timeout_config: value.timeout_config.map(Into::into).unwrap_or_default(), + } + } +} diff --git a/rust/lancedb/src/remote/client.rs b/rust/lancedb/src/remote/client.rs index 644cb1f1..a42c0733 100644 --- a/rust/lancedb/src/remote/client.rs +++ b/rust/lancedb/src/remote/client.rs @@ -29,7 +29,7 @@ const REQUEST_ID_HEADER: &str = "x-request-id"; pub struct ClientConfig { pub timeout_config: TimeoutConfig, pub retry_config: RetryConfig, - /// User agent to use for requests. The default provides the libary + /// User agent to use for requests. The default provides the library /// name and version. pub user_agent: String, // TODO: how to configure request ids? From e2ca8daee1fdc8586851943350128f9bed8ffca1 Mon Sep 17 00:00:00 2001 From: Prashant Dixit <54981696+PrashantDixit0@users.noreply.github.com> Date: Wed, 2 Oct 2024 21:15:24 +0530 Subject: [PATCH 07/51] docs: saleforce's sfr rag (#1717) This PR adds Salesforce's newly released SFR RAG --- docs/mkdocs.yml | 5 ++++- docs/src/rag/sfr_rag.md | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 docs/src/rag/sfr_rag.md diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 5c91577f..5abf132e 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -120,6 +120,7 @@ nav: - Graph RAG: rag/graph_rag.md - Self RAG: rag/self_rag.md - Adaptive RAG: rag/adaptive_rag.md + - SFR RAG: rag/sfr_rag.md - Advanced Techniques: - HyDE: rag/advanced_techniques/hyde.md - FLARE: rag/advanced_techniques/flare.md @@ -247,6 +248,7 @@ nav: - Graph RAG: rag/graph_rag.md - Self RAG: rag/self_rag.md - Adaptive RAG: rag/adaptive_rag.md + - SFR RAG: rag/sfr_rag.md - Advanced Techniques: - HyDE: rag/advanced_techniques/hyde.md - FLARE: rag/advanced_techniques/flare.md @@ -362,4 +364,5 @@ extra: - icon: fontawesome/brands/x-twitter link: https://twitter.com/lancedb - icon: fontawesome/brands/linkedin - link: https://www.linkedin.com/company/lancedb + link: https://www.linkedin.com/company/lancedb + \ No newline at end of file diff --git a/docs/src/rag/sfr_rag.md b/docs/src/rag/sfr_rag.md new file mode 100644 index 00000000..9f063575 --- /dev/null +++ b/docs/src/rag/sfr_rag.md @@ -0,0 +1,17 @@ +**SFR RAG πŸ“‘** +==================================================================== +Salesforce AI Research introduces SFR-RAG, a 9-billion-parameter language model trained with a significant emphasis on reliable, precise, and faithful contextual generation abilities specific to real-world RAG use cases and relevant agentic tasks. They include precise factual knowledge extraction, distinguishing relevant against distracting contexts, citing appropriate sources along with answers, producing complex and multi-hop reasoning over multiple contexts, consistent format following, as well as refraining from hallucination over unanswerable queries. + +**[Offical Implementation](https://github.com/SalesforceAIResearch/SFR-RAG)** + +
+ ![agent-based-rag](https://raw.githubusercontent.com/lancedb/assets/main/docs/assets/rag/salesforce_contextbench.png) +
Average Scores in ContextualBench: Source +
+
+ +To reliably evaluate LLMs in contextual question-answering for RAG, Saleforce introduced [ContextualBench](https://huggingface.co/datasets/Salesforce/ContextualBench?ref=blog.salesforceairesearch.com), featuring 7 benchmarks like [HotpotQA](https://arxiv.org/abs/1809.09600?ref=blog.salesforceairesearch.com) and [2WikiHopQA](https://www.aclweb.org/anthology/2020.coling-main.580/?ref=blog.salesforceairesearch.com) with consistent setups. + +SFR-RAG outperforms GPT-4o, achieving state-of-the-art results in 3 out of 7 benchmarks, and significantly surpasses Command-R+ while using 10 times fewer parameters. It also excels at handling context, even when facts are altered or conflicting. + +[Saleforce AI Research Blog](https://blog.salesforceairesearch.com/sfr-rag/) From 6ceaf8b06ecbff1229b946241d94cf2de7b1400d Mon Sep 17 00:00:00 2001 From: Rithik Kumar <46047011+rithikJha@users.noreply.github.com> Date: Thu, 3 Oct 2024 00:55:00 +0530 Subject: [PATCH 08/51] docs: add langchainjs writing assistant (#1719) --- docs/src/examples/index.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/src/examples/index.md b/docs/src/examples/index.md index f31ea3f9..c4491b29 100644 --- a/docs/src/examples/index.md +++ b/docs/src/examples/index.md @@ -8,9 +8,15 @@ LanceDB provides language APIs, allowing you to embed a database in your languag * πŸ‘Ύ [JavaScript](examples_js.md) examples * πŸ¦€ Rust examples (coming soon) -## Applications powered by LanceDB +## Python Applications powered by LanceDB | Project Name | Description | | --- | --- | | **Ultralytics Explorer πŸš€**
[![Ultralytics](https://img.shields.io/badge/Ultralytics-Docs-green?labelColor=0f3bc4&style=flat-square&logo=https://cdn.prod.website-files.com/646dd1f1a3703e451ba81ecc/64994922cf2a6385a4bf4489_UltralyticsYOLO_mark_blue.svg&link=https://docs.ultralytics.com/datasets/explorer/)](https://docs.ultralytics.com/datasets/explorer/)
[![Open In Collab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/docs/en/datasets/explorer/explorer.ipynb) | - πŸ” **Explore CV Datasets**: Semantic search, SQL queries, vector similarity, natural language.
- πŸ–₯️ **GUI & Python API**: Seamless dataset interaction.
- ⚑ **Efficient & Scalable**: Leverages LanceDB for large datasets.
- πŸ“Š **Detailed Analysis**: Easily analyze data patterns.
- 🌐 **Browser GUI Demo**: Create embeddings, search images, run queries. | | **Website ChatbotπŸ€–**
[![GitHub](https://img.shields.io/badge/github-%23121011.svg?style=for-the-badge&logo=github&logoColor=white)](https://github.com/lancedb/lancedb-vercel-chatbot)
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flancedb%2Flancedb-vercel-chatbot&env=OPENAI_API_KEY&envDescription=OpenAI%20API%20Key%20for%20chat%20completion.&project-name=lancedb-vercel-chatbot&repository-name=lancedb-vercel-chatbot&demo-title=LanceDB%20Chatbot%20Demo&demo-description=Demo%20website%20chatbot%20with%20LanceDB.&demo-url=https%3A%2F%2Flancedb.vercel.app&demo-image=https%3A%2F%2Fi.imgur.com%2FazVJtvr.png) | - 🌐 **Chatbot from Sitemap/Docs**: Create a chatbot using site or document context.
- πŸš€ **Embed LanceDB in Next.js**: Lightweight, on-prem storage.
- 🧠 **AI-Powered Context Retrieval**: Efficiently access relevant data.
- πŸ”§ **Serverless & Native JS**: Seamless integration with Next.js.
- ⚑ **One-Click Deploy on Vercel**: Quick and easy setup.. | + +## Nodejs Applications powered by LanceDB + +| Project Name | Description | +| --- | --- | +| **Langchain Writing Assistant✍️ **
[![Github](../assets/github.svg)](https://github.com/lancedb/vectordb-recipes/tree/main/applications/node/lanchain_writing_assistant) | - **πŸ“‚ Data Source Integration**: Use your own data by specifying data source file, and the app instantly processes it to provide insights.
- **🧠 Intelligent Suggestions**: Powered by LangChain.js and LanceDB, it improves writing productivity and accuracy.
- **πŸ’‘ Enhanced Writing Experience**: It delivers real-time contextual insights and factual suggestions while the user writes. | \ No newline at end of file From 408bc96a440b1a9bae966e5e940c4d72074ae705 Mon Sep 17 00:00:00 2001 From: Prashant Dixit <54981696+PrashantDixit0@users.noreply.github.com> Date: Thu, 3 Oct 2024 16:15:27 +0530 Subject: [PATCH 09/51] fix: broken notebook link fix (#1721) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ad2a63f5..fa1218f1 100644 --- a/README.md +++ b/README.md @@ -82,4 +82,4 @@ result = table.search([100, 100]).limit(2).to_pandas() ## Blogs, Tutorials & Videos * πŸ“ˆ 2000x better performance with Lance over Parquet -* πŸ€– Build a question and answer bot with LanceDB +* πŸ€– Build a question and answer bot with LanceDB From e61ba7f4e21ed801912e9ea75f07d8c40b29adc2 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Fri, 4 Oct 2024 08:43:07 -0700 Subject: [PATCH 10/51] fix(rust): remote SDK bugs (#1723) A few bugs uncovered by integration tests: * We didn't prepend `/v1` to the Table endpoint URLs * `/create_index` takes `metric_type` not `distance_type`. (This is also an error in the OpenAPI docs.) * `/create_index` expects the `metric_type` parameter to always be lowercase. * We were writing an IPC file message when we were supposed to send an IPC stream message. --- rust/lancedb/src/remote/table.rs | 67 ++++++++++++++++++-------------- rust/lancedb/src/remote/util.rs | 2 +- 2 files changed, 38 insertions(+), 31 deletions(-) diff --git a/rust/lancedb/src/remote/table.rs b/rust/lancedb/src/remote/table.rs index 16caad70..4c6182ce 100644 --- a/rust/lancedb/src/remote/table.rs +++ b/rust/lancedb/src/remote/table.rs @@ -50,7 +50,9 @@ impl RemoteTable { } async fn describe(&self) -> Result { - let request = self.client.post(&format!("/table/{}/describe/", self.name)); + let request = self + .client + .post(&format!("/v1/table/{}/describe/", self.name)); let response = self.client.send(request, true).await?; let response = self.check_table_response(response).await?; @@ -249,7 +251,7 @@ impl TableInternal for RemoteTable { async fn count_rows(&self, filter: Option) -> Result { let mut request = self .client - .post(&format!("/table/{}/count_rows/", self.name)); + .post(&format!("/v1/table/{}/count_rows/", self.name)); if let Some(filter) = filter { request = request.json(&serde_json::json!({ "filter": filter })); @@ -275,7 +277,7 @@ impl TableInternal for RemoteTable { let body = Self::reader_as_body(data)?; let mut request = self .client - .post(&format!("/table/{}/insert/", self.name)) + .post(&format!("/v1/table/{}/insert/", self.name)) .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) .body(body); @@ -298,7 +300,7 @@ impl TableInternal for RemoteTable { query: &VectorQuery, _options: QueryExecutionOptions, ) -> Result> { - let request = self.client.post(&format!("/table/{}/query/", self.name)); + let request = self.client.post(&format!("/v1/table/{}/query/", self.name)); let mut body = serde_json::Value::Object(Default::default()); Self::apply_query_params(&mut body, &query.base)?; @@ -351,7 +353,7 @@ impl TableInternal for RemoteTable { ) -> Result { let request = self .client - .post(&format!("/table/{}/query/", self.name)) + .post(&format!("/v1/table/{}/query/", self.name)) .header(CONTENT_TYPE, JSON_CONTENT_TYPE); let mut body = serde_json::Value::Object(Default::default()); @@ -366,7 +368,9 @@ impl TableInternal for RemoteTable { Ok(DatasetRecordBatchStream::new(stream)) } async fn update(&self, update: UpdateBuilder) -> Result { - let request = self.client.post(&format!("/table/{}/update/", self.name)); + let request = self + .client + .post(&format!("/v1/table/{}/update/", self.name)); let mut updates = Vec::new(); for (column, expression) in update.columns { @@ -396,7 +400,7 @@ impl TableInternal for RemoteTable { let body = serde_json::json!({ "predicate": predicate }); let request = self .client - .post(&format!("/table/{}/delete/", self.name)) + .post(&format!("/v1/table/{}/delete/", self.name)) .json(&body); let response = self.client.send(request, false).await?; self.check_table_response(response).await?; @@ -406,7 +410,7 @@ impl TableInternal for RemoteTable { async fn create_index(&self, mut index: IndexBuilder) -> Result<()> { let request = self .client - .post(&format!("/table/{}/create_index/", self.name)); + .post(&format!("/v1/table/{}/create_index/", self.name)); let column = match index.columns.len() { 0 => { @@ -463,7 +467,9 @@ impl TableInternal for RemoteTable { }; body["index_type"] = serde_json::Value::String(index_type.into()); if let Some(distance_type) = distance_type { - body["distance_type"] = serde_json::Value::String(distance_type.to_string()); + // Phalanx expects this to be lowercase right now. + body["metric_type"] = + serde_json::Value::String(distance_type.to_string().to_lowercase()); } let request = request.json(&body); @@ -484,7 +490,7 @@ impl TableInternal for RemoteTable { let body = Self::reader_as_body(new_data)?; let request = self .client - .post(&format!("/table/{}/merge_insert/", self.name)) + .post(&format!("/v1/table/{}/merge_insert/", self.name)) .query(&query) .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) .body(body); @@ -525,9 +531,10 @@ impl TableInternal for RemoteTable { }) } async fn index_stats(&self, index_name: &str) -> Result> { - let request = self - .client - .post(&format!("/table/{}/index/{}/stats/", self.name, index_name)); + let request = self.client.post(&format!( + "/v1/table/{}/index/{}/stats/", + self.name, index_name + )); let response = self.client.send(request, true).await?; if response.status() == StatusCode::NOT_FOUND { @@ -651,7 +658,7 @@ mod tests { async fn test_version() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/describe/"); + assert_eq!(request.url().path(), "/v1/table/my_table/describe/"); http::Response::builder() .status(200) @@ -667,7 +674,7 @@ mod tests { async fn test_schema() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/describe/"); + assert_eq!(request.url().path(), "/v1/table/my_table/describe/"); http::Response::builder() .status(200) @@ -696,7 +703,7 @@ mod tests { async fn test_count_rows() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/count_rows/"); + assert_eq!(request.url().path(), "/v1/table/my_table/count_rows/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -711,7 +718,7 @@ mod tests { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/count_rows/"); + assert_eq!(request.url().path(), "/v1/table/my_table/count_rows/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -764,7 +771,7 @@ mod tests { let (sender, receiver) = std::sync::mpsc::channel(); let table = Table::new_with_handler("my_table", move |mut request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/insert/"); + assert_eq!(request.url().path(), "/v1/table/my_table/insert/"); // If mode is specified, it should be "append". Append is default // so it's not required. assert!(request @@ -808,7 +815,7 @@ mod tests { let (sender, receiver) = std::sync::mpsc::channel(); let table = Table::new_with_handler("my_table", move |mut request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/insert/"); + assert_eq!(request.url().path(), "/v1/table/my_table/insert/"); assert_eq!( request .url() @@ -849,7 +856,7 @@ mod tests { async fn test_update() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/update/"); + assert_eq!(request.url().path(), "/v1/table/my_table/update/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -897,7 +904,7 @@ mod tests { // Default parameters let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/merge_insert/"); + assert_eq!(request.url().path(), "/v1/table/my_table/merge_insert/"); let params = request.url().query_pairs().collect::>(); assert_eq!(params["on"], "some_col"); @@ -920,7 +927,7 @@ mod tests { let (sender, receiver) = std::sync::mpsc::channel(); let table = Table::new_with_handler("my_table", move |mut request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/merge_insert/"); + assert_eq!(request.url().path(), "/v1/table/my_table/merge_insert/"); assert_eq!( request.headers().get("Content-Type").unwrap(), ARROW_STREAM_CONTENT_TYPE @@ -960,7 +967,7 @@ mod tests { async fn test_delete() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/delete/"); + assert_eq!(request.url().path(), "/v1/table/my_table/delete/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -988,7 +995,7 @@ mod tests { let table = Table::new_with_handler("my_table", move |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/query/"); + assert_eq!(request.url().path(), "/v1/table/my_table/query/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -1029,7 +1036,7 @@ mod tests { async fn test_query_vector_all_params() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/query/"); + assert_eq!(request.url().path(), "/v1/table/my_table/query/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -1085,7 +1092,7 @@ mod tests { async fn test_query_fts() { let table = Table::new_with_handler("my_table", |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/query/"); + assert_eq!(request.url().path(), "/v1/table/my_table/query/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -1151,7 +1158,7 @@ mod tests { for (index_type, distance_type, index) in cases { let table = Table::new_with_handler("my_table", move |request| { assert_eq!(request.method(), "POST"); - assert_eq!(request.url().path(), "/table/my_table/create_index/"); + assert_eq!(request.url().path(), "/v1/table/my_table/create_index/"); assert_eq!( request.headers().get("Content-Type").unwrap(), JSON_CONTENT_TYPE @@ -1163,7 +1170,7 @@ mod tests { "index_type": index_type, }); if let Some(distance_type) = distance_type { - expected_body["distance_type"] = distance_type.into(); + expected_body["metric_type"] = distance_type.to_lowercase().into(); } assert_eq!(body, expected_body); @@ -1180,7 +1187,7 @@ mod tests { assert_eq!(request.method(), "POST"); assert_eq!( request.url().path(), - "/table/my_table/index/my_index/stats/" + "/v1/table/my_table/index/my_index/stats/" ); let response_body = serde_json::json!({ @@ -1210,7 +1217,7 @@ mod tests { assert_eq!(request.method(), "POST"); assert_eq!( request.url().path(), - "/table/my_table/index/my_index/stats/" + "/v1/table/my_table/index/my_index/stats/" ); http::Response::builder().status(404).body("").unwrap() diff --git a/rust/lancedb/src/remote/util.rs b/rust/lancedb/src/remote/util.rs index b594ed6e..d13be70f 100644 --- a/rust/lancedb/src/remote/util.rs +++ b/rust/lancedb/src/remote/util.rs @@ -9,7 +9,7 @@ pub fn batches_to_ipc_bytes(batches: impl RecordBatchReader) -> Result> let buf = Vec::with_capacity(WRITE_BUF_SIZE); let mut buf = Cursor::new(buf); { - let mut writer = arrow_ipc::writer::FileWriter::try_new(&mut buf, &batches.schema())?; + let mut writer = arrow_ipc::writer::StreamWriter::try_new(&mut buf, &batches.schema())?; for batch in batches { let batch = batch?; From 4d458d582999916e2cd01e616414e8152498effc Mon Sep 17 00:00:00 2001 From: Gagan Bhullar Date: Tue, 8 Oct 2024 20:41:08 -0600 Subject: [PATCH 11/51] feat(python): drop support for dictionary in Table.add (#1725) PR closes #1706 --- python/python/lancedb/common.py | 2 +- python/python/lancedb/db.py | 4 ++-- python/python/lancedb/table.py | 12 ++++++------ python/python/tests/test_table.py | 18 ++++++++++++++++++ 4 files changed, 27 insertions(+), 9 deletions(-) diff --git a/python/python/lancedb/common.py b/python/python/lancedb/common.py index cc894a72..80e3254d 100644 --- a/python/python/lancedb/common.py +++ b/python/python/lancedb/common.py @@ -20,7 +20,7 @@ from .util import safe_import_pandas pd = safe_import_pandas() -DATA = Union[List[dict], dict, "pd.DataFrame", pa.Table, Iterable[pa.RecordBatch]] +DATA = Union[List[dict], "pd.DataFrame", pa.Table, Iterable[pa.RecordBatch]] VEC = Union[list, np.ndarray, pa.Array, pa.ChunkedArray] URI = Union[str, Path] VECTOR_COLUMN_NAME = "vector" diff --git a/python/python/lancedb/db.py b/python/python/lancedb/db.py index 4a72b4b0..6af4cdb8 100644 --- a/python/python/lancedb/db.py +++ b/python/python/lancedb/db.py @@ -96,7 +96,7 @@ class DBConnection(EnforceOverrides): User must provide at least one of `data` or `schema`. Acceptable types are: - - dict or list-of-dict + - list-of-dict - pandas.DataFrame @@ -579,7 +579,7 @@ class AsyncConnection(object): User must provide at least one of `data` or `schema`. Acceptable types are: - - dict or list-of-dict + - list-of-dict - pandas.DataFrame diff --git a/python/python/lancedb/table.py b/python/python/lancedb/table.py index b4f8a2e6..b7e01190 100644 --- a/python/python/lancedb/table.py +++ b/python/python/lancedb/table.py @@ -31,7 +31,6 @@ import pyarrow.compute as pc import pyarrow.fs as pa_fs from lance import LanceDataset from lance.dependencies import _check_for_hugging_face -from lance.vector import vec_to_table from .common import DATA, VEC, VECTOR_COLUMN_NAME from .embeddings import EmbeddingFunctionConfig, EmbeddingFunctionRegistry @@ -87,6 +86,9 @@ def _coerce_to_table(data, schema: Optional[pa.Schema] = None) -> pa.Table: if isinstance(data, LanceModel): raise ValueError("Cannot add a single LanceModel to a table. Use a list.") + if isinstance(data, dict): + raise ValueError("Cannot add a single dictionary to a table. Use a list.") + if isinstance(data, list): # convert to list of dict if data is a bunch of LanceModels if isinstance(data[0], LanceModel): @@ -98,8 +100,6 @@ def _coerce_to_table(data, schema: Optional[pa.Schema] = None) -> pa.Table: return pa.Table.from_batches(data, schema=schema) else: return pa.Table.from_pylist(data) - elif isinstance(data, dict): - return vec_to_table(data) elif _check_for_pandas(data) and isinstance(data, pd.DataFrame): # Do not add schema here, since schema may contains the vector column table = pa.Table.from_pandas(data, preserve_index=False) @@ -554,7 +554,7 @@ class Table(ABC): data: DATA The data to insert into the table. Acceptable types are: - - dict or list-of-dict + - list-of-dict - pandas.DataFrame @@ -1409,7 +1409,7 @@ class LanceTable(Table): Parameters ---------- - data: list-of-dict, dict, pd.DataFrame + data: list-of-dict, pd.DataFrame The data to insert into the table. mode: str The mode to use when writing the data. Valid values are @@ -2348,7 +2348,7 @@ class AsyncTable: data: DATA The data to insert into the table. Acceptable types are: - - dict or list-of-dict + - list-of-dict - pandas.DataFrame diff --git a/python/python/tests/test_table.py b/python/python/tests/test_table.py index cc5ecbd2..65ec7b3c 100644 --- a/python/python/tests/test_table.py +++ b/python/python/tests/test_table.py @@ -193,6 +193,24 @@ def test_empty_table(db): tbl.add(data=data) +def test_add_dictionary(db): + schema = pa.schema( + [ + pa.field("vector", pa.list_(pa.float32(), 2)), + pa.field("item", pa.string()), + pa.field("price", pa.float32()), + ] + ) + tbl = LanceTable.create(db, "test", schema=schema) + data = {"vector": [3.1, 4.1], "item": "foo", "price": 10.0} + with pytest.raises(ValueError) as excep_info: + tbl.add(data=data) + assert ( + str(excep_info.value) + == "Cannot add a single dictionary to a table. Use a list." + ) + + def test_add(db): schema = pa.schema( [ From 607476788ef933cf13cc09f8585373cc547da9c1 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 8 Oct 2024 20:45:21 -0700 Subject: [PATCH 12/51] feat(rust): `list_indices` in remote SDK (#1726) Implements `list_indices`. --------- Co-authored-by: Weston Pace --- rust/lancedb/src/remote/table.rs | 115 ++++++++++++++++++++++++++++++- 1 file changed, 112 insertions(+), 3 deletions(-) diff --git a/rust/lancedb/src/remote/table.rs b/rust/lancedb/src/remote/table.rs index 4c6182ce..d68907f3 100644 --- a/rust/lancedb/src/remote/table.rs +++ b/rust/lancedb/src/remote/table.rs @@ -525,11 +525,57 @@ impl TableInternal for RemoteTable { message: "drop_columns is not yet supported.".into(), }) } + async fn list_indices(&self) -> Result> { - Err(Error::NotSupported { - message: "list_indices is not yet supported.".into(), - }) + // Make request to list the indices + let request = self + .client + .post(&format!("/v1/table/{}/index/list/", self.name)); + let response = self.client.send(request, true).await?; + let response = self.check_table_response(response).await?; + + #[derive(Deserialize)] + struct ListIndicesResponse { + indexes: Vec, + } + + #[derive(Deserialize)] + struct IndexConfigResponse { + index_name: String, + columns: Vec, + } + + let body = response.text().await?; + let body: ListIndicesResponse = serde_json::from_str(&body).map_err(|err| Error::Http { + message: format!( + "Failed to parse list_indices response: {}, body: {}", + err, body + ), + })?; + + // Make request to get stats for each index, so we get the index type. + // This is a bit inefficient, but it's the only way to get the index type. + let mut futures = Vec::with_capacity(body.indexes.len()); + for index in body.indexes { + let future = async move { + match self.index_stats(&index.index_name).await { + Ok(Some(stats)) => Ok(Some(IndexConfig { + name: index.index_name, + index_type: stats.index_type, + columns: index.columns, + })), + Ok(None) => Ok(None), // The index must have been deleted since we listed it. + Err(e) => Err(e), + } + }; + futures.push(future); + } + let results = futures::future::try_join_all(futures).await?; + let index_configs = results.into_iter().flatten().collect(); + + Ok(index_configs) } + async fn index_stats(&self, index_name: &str) -> Result> { let request = self.client.post(&format!( "/v1/table/{}/index/{}/stats/", @@ -1181,6 +1227,69 @@ mod tests { } } + #[tokio::test] + async fn test_list_indices() { + let table = Table::new_with_handler("my_table", |request| { + assert_eq!(request.method(), "POST"); + + let response_body = match request.url().path() { + "/v1/table/my_table/index/list/" => { + serde_json::json!({ + "indexes": [ + { + "index_name": "vector_idx", + "index_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "columns": ["vector"], + "index_status": "done", + }, + { + "index_name": "my_idx", + "index_uuid": "34255f64-5717-4562-b3fc-2c963f66afa6", + "columns": ["my_column"], + "index_status": "done", + }, + ] + }) + } + "/v1/table/my_table/index/vector_idx/stats/" => { + serde_json::json!({ + "num_indexed_rows": 100000, + "num_unindexed_rows": 0, + "index_type": "IVF_PQ", + "distance_type": "l2" + }) + } + "/v1/table/my_table/index/my_idx/stats/" => { + serde_json::json!({ + "num_indexed_rows": 100000, + "num_unindexed_rows": 0, + "index_type": "LABEL_LIST" + }) + } + path => panic!("Unexpected path: {}", path), + }; + http::Response::builder() + .status(200) + .body(serde_json::to_string(&response_body).unwrap()) + .unwrap() + }); + + let indices = table.list_indices().await.unwrap(); + let expected = vec![ + IndexConfig { + name: "vector_idx".into(), + index_type: IndexType::IvfPq, + columns: vec!["vector".into()], + }, + IndexConfig { + name: "my_idx".into(), + index_type: IndexType::LabelList, + columns: vec!["my_column".into()], + }, + ]; + assert_eq!(indices, expected); + } + #[tokio::test] async fn test_index_stats() { let table = Table::new_with_handler("my_table", |request| { From 8509f732215a01456ba704fe8722dd88f8550135 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 8 Oct 2024 21:21:13 -0700 Subject: [PATCH 13/51] feat: better errors for remote SDK (#1722) * Adds nicer errors to remote SDK, that expose useful properties like `request_id` and `status_code`. * Makes sure the Python tracebacks print nicely by mapping the `source` field from a Rust error to the `__cause__` field. --- python/python/lancedb/remote/client.py | 18 +- python/python/lancedb/remote/errors.py | 97 ++++++++++ python/python/tests/test_db.py | 4 +- python/python/tests/test_remote_db.py | 95 ++++++--- python/src/error.rs | 94 ++++++++- rust/lancedb/src/error.rs | 51 +++-- rust/lancedb/src/remote/client.rs | 256 +++++++++++++++++-------- rust/lancedb/src/remote/db.rs | 79 ++++++-- rust/lancedb/src/remote/table.rs | 121 +++++++----- 9 files changed, 622 insertions(+), 193 deletions(-) diff --git a/python/python/lancedb/remote/client.py b/python/python/lancedb/remote/client.py index 5ad9a2d0..d546e92f 100644 --- a/python/python/lancedb/remote/client.py +++ b/python/python/lancedb/remote/client.py @@ -103,19 +103,29 @@ class RestfulLanceDBClient: @staticmethod def _check_status(resp: requests.Response): + # Leaving request id empty for now, as we'll be replacing this impl + # with the Rust one shortly. if resp.status_code == 404: - raise LanceDBClientError(f"Not found: {resp.text}") + raise LanceDBClientError( + f"Not found: {resp.text}", request_id="", status_code=404 + ) elif 400 <= resp.status_code < 500: raise LanceDBClientError( - f"Bad Request: {resp.status_code}, error: {resp.text}" + f"Bad Request: {resp.status_code}, error: {resp.text}", + request_id="", + status_code=resp.status_code, ) elif 500 <= resp.status_code < 600: raise LanceDBClientError( - f"Internal Server Error: {resp.status_code}, error: {resp.text}" + f"Internal Server Error: {resp.status_code}, error: {resp.text}", + request_id="", + status_code=resp.status_code, ) elif resp.status_code != 200: raise LanceDBClientError( - f"Unknown Error: {resp.status_code}, error: {resp.text}" + f"Unknown Error: {resp.status_code}, error: {resp.text}", + request_id="", + status_code=resp.status_code, ) @_check_not_closed diff --git a/python/python/lancedb/remote/errors.py b/python/python/lancedb/remote/errors.py index a4d290dc..d8f3fde6 100644 --- a/python/python/lancedb/remote/errors.py +++ b/python/python/lancedb/remote/errors.py @@ -12,5 +12,102 @@ # limitations under the License. +from typing import Optional + + class LanceDBClientError(RuntimeError): + """An error that occurred in the LanceDB client. + + Attributes + ---------- + message: str + The error message. + request_id: str + The id of the request that failed. This can be provided in error reports + to help diagnose the issue. + status_code: int + The HTTP status code of the response. May be None if the request + failed before the response was received. + """ + + def __init__( + self, message: str, request_id: str, status_code: Optional[int] = None + ): + super().__init__(message) + self.request_id = request_id + self.status_code = status_code + + +class HttpError(LanceDBClientError): + """An error that occurred during an HTTP request. + + Attributes + ---------- + message: str + The error message. + request_id: str + The id of the request that failed. This can be provided in error reports + to help diagnose the issue. + status_code: int + The HTTP status code of the response. May be None if the request + failed before the response was received. + """ + pass + + +class RetryError(LanceDBClientError): + """An error that occurs when the client has exceeded the maximum number of retries. + + The retry strategy can be adjusted by setting the + [retry_config](lancedb.remote.ClientConfig.retry_config) in the client + configuration. This is passed in the `client_config` argument of + [connect](lancedb.connect) and [connect_async](lancedb.connect_async). + + The __cause__ attribute of this exception will be the last exception that + caused the retry to fail. It will be an + [HttpError][lancedb.remote.errors.HttpError] instance. + + Attributes + ---------- + message: str + The retry error message, which will describe which retry limit was hit. + request_id: str + The id of the request that failed. This can be provided in error reports + to help diagnose the issue. + request_failures: int + The number of request failures. + connect_failures: int + The number of connect failures. + read_failures: int + The number of read failures. + max_request_failures: int + The maximum number of request failures. + max_connect_failures: int + The maximum number of connect failures. + max_read_failures: int + The maximum number of read failures. + status_code: int + The HTTP status code of the last response. May be None if the request + failed before the response was received. + """ + + def __init__( + self, + message: str, + request_id: str, + request_failures: int, + connect_failures: int, + read_failures: int, + max_request_failures: int, + max_connect_failures: int, + max_read_failures: int, + status_code: Optional[int], + ): + super().__init__(message, request_id, status_code) + self.request_failures = request_failures + self.connect_failures = connect_failures + self.read_failures = read_failures + self.max_request_failures = max_request_failures + self.max_connect_failures = max_connect_failures + self.max_read_failures = max_read_failures diff --git a/python/python/tests/test_db.py b/python/python/tests/test_db.py index 8bd7d3af..2e01343b 100644 --- a/python/python/tests/test_db.py +++ b/python/python/tests/test_db.py @@ -354,7 +354,7 @@ async def test_create_mode_async(tmp_path): ) await db.create_table("test", data=data) - with pytest.raises(RuntimeError): + with pytest.raises(ValueError, match="already exists"): await db.create_table("test", data=data) new_data = pd.DataFrame( @@ -382,7 +382,7 @@ async def test_create_exist_ok_async(tmp_path): ) tbl = await db.create_table("test", data=data) - with pytest.raises(RuntimeError): + with pytest.raises(ValueError, match="already exists"): await db.create_table("test", data=data) # open the table but don't add more rows diff --git a/python/python/tests/test_remote_db.py b/python/python/tests/test_remote_db.py index dee183d9..e03b6636 100644 --- a/python/python/tests/test_remote_db.py +++ b/python/python/tests/test_remote_db.py @@ -1,12 +1,14 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: Copyright The LanceDB Authors +import contextlib import http.server import threading from unittest.mock import MagicMock import uuid import lancedb +from lancedb.remote.errors import HttpError, RetryError import pyarrow as pa from lancedb.remote.client import VectorQuery, VectorQueryResult import pytest @@ -98,6 +100,33 @@ def make_mock_http_handler(handler): return MockLanceDBHandler +@contextlib.asynccontextmanager +async def mock_lancedb_connection(handler): + with http.server.HTTPServer( + ("localhost", 8080), make_mock_http_handler(handler) + ) as server: + handle = threading.Thread(target=server.serve_forever) + handle.start() + + db = await lancedb.connect_async( + "db://dev", + api_key="fake", + host_override="http://localhost:8080", + client_config={ + "retry_config": {"retries": 2}, + "timeout_config": { + "connect_timeout": 1, + }, + }, + ) + + try: + yield db + finally: + server.shutdown() + handle.join() + + @pytest.mark.asyncio async def test_async_remote_db(): def handler(request): @@ -114,28 +143,50 @@ async def test_async_remote_db(): request.end_headers() request.wfile.write(b'{"tables": []}') - def run_server(): - with http.server.HTTPServer( - ("localhost", 8080), make_mock_http_handler(handler) - ) as server: - # we will only make one request - server.handle_request() + async with mock_lancedb_connection(handler) as db: + table_names = await db.table_names() + assert table_names == [] - handle = threading.Thread(target=run_server) - handle.start() - db = await lancedb.connect_async( - "db://dev", - api_key="fake", - host_override="http://localhost:8080", - client_config={ - "retry_config": {"retries": 2}, - "timeout_config": { - "connect_timeout": 1, - }, - }, - ) - table_names = await db.table_names() - assert table_names == [] +@pytest.mark.asyncio +async def test_http_error(): + request_id_holder = {"request_id": None} - handle.join() + def handler(request): + request_id_holder["request_id"] = request.headers["x-request-id"] + + request.send_response(507) + request.end_headers() + request.wfile.write(b"Internal Server Error") + + async with mock_lancedb_connection(handler) as db: + with pytest.raises(HttpError, match="Internal Server Error") as exc_info: + await db.table_names() + + assert exc_info.value.request_id == request_id_holder["request_id"] + assert exc_info.value.status_code == 507 + + +@pytest.mark.asyncio +async def test_retry_error(): + request_id_holder = {"request_id": None} + + def handler(request): + request_id_holder["request_id"] = request.headers["x-request-id"] + + request.send_response(429) + request.end_headers() + request.wfile.write(b"Try again later") + + async with mock_lancedb_connection(handler) as db: + with pytest.raises(RetryError, match="Hit retry limit") as exc_info: + await db.table_names() + + assert exc_info.value.request_id == request_id_holder["request_id"] + assert exc_info.value.status_code == 429 + + cause = exc_info.value.__cause__ + assert isinstance(cause, HttpError) + assert "Try again later" in str(cause) + assert cause.request_id == request_id_holder["request_id"] + assert cause.status_code == 429 diff --git a/python/src/error.rs b/python/src/error.rs index 4688b523..4855b8f5 100644 --- a/python/src/error.rs +++ b/python/src/error.rs @@ -14,7 +14,9 @@ use pyo3::{ exceptions::{PyIOError, PyNotImplementedError, PyOSError, PyRuntimeError, PyValueError}, - PyResult, + intern, + types::{PyAnyMethods, PyNone}, + PyErr, PyResult, Python, }; use lancedb::error::Error as LanceError; @@ -38,12 +40,79 @@ impl PythonErrorExt for std::result::Result { LanceError::InvalidInput { .. } | LanceError::InvalidTableName { .. } | LanceError::TableNotFound { .. } - | LanceError::Schema { .. } => self.value_error(), + | LanceError::Schema { .. } + | LanceError::TableAlreadyExists { .. } => self.value_error(), LanceError::CreateDir { .. } => self.os_error(), LanceError::ObjectStore { .. } => Err(PyIOError::new_err(err.to_string())), LanceError::NotSupported { .. } => { Err(PyNotImplementedError::new_err(err.to_string())) } + LanceError::Http { + request_id, + source, + status_code, + } => Python::with_gil(|py| { + let message = err.to_string(); + let http_err_cls = py + .import_bound(intern!(py, "lancedb.remote.errors"))? + .getattr(intern!(py, "HttpError"))?; + let err = http_err_cls.call1(( + message, + request_id, + status_code.map(|s| s.as_u16()), + ))?; + + if let Some(cause) = source.source() { + // The HTTP error already includes the first cause. But + // we can add the rest of the chain if there is any more. + let cause_err = http_from_rust_error( + py, + cause, + request_id, + status_code.map(|s| s.as_u16()), + )?; + err.setattr(intern!(py, "__cause__"), cause_err)?; + } + + Err(PyErr::from_value_bound(err)) + }), + LanceError::Retry { + request_id, + request_failures, + max_request_failures, + connect_failures, + max_connect_failures, + read_failures, + max_read_failures, + source, + status_code, + } => Python::with_gil(|py| { + let cause_err = http_from_rust_error( + py, + source.as_ref(), + request_id, + status_code.map(|s| s.as_u16()), + )?; + + let message = err.to_string(); + let retry_error_cls = py + .import_bound(intern!(py, "lancedb.remote.errors"))? + .getattr("RetryError")?; + let err = retry_error_cls.call1(( + message, + request_id, + *request_failures, + *connect_failures, + *read_failures, + *max_request_failures, + *max_connect_failures, + *max_read_failures, + status_code.map(|s| s.as_u16()), + ))?; + + err.setattr(intern!(py, "__cause__"), cause_err)?; + Err(PyErr::from_value_bound(err)) + }), _ => self.runtime_error(), }, } @@ -61,3 +130,24 @@ impl PythonErrorExt for std::result::Result { self.map_err(|err| PyValueError::new_err(err.to_string())) } } + +fn http_from_rust_error( + py: Python<'_>, + err: &dyn std::error::Error, + request_id: &str, + status_code: Option, +) -> PyResult { + let message = err.to_string(); + let http_err_cls = py.import("lancedb.remote.errors")?.getattr("HttpError")?; + let py_err = http_err_cls.call1((message, request_id, status_code))?; + + // Reset the traceback since it doesn't provide additional information. + let py_err = py_err.call_method1(intern!(py, "with_traceback"), (PyNone::get_bound(py),))?; + + if let Some(cause) = err.source() { + let cause_err = http_from_rust_error(py, cause, request_id, status_code)?; + py_err.setattr(intern!(py, "__cause__"), cause_err)?; + } + + Ok(PyErr::from_value(py_err)) +} diff --git a/rust/lancedb/src/error.rs b/rust/lancedb/src/error.rs index 77f2373a..37bd8852 100644 --- a/rust/lancedb/src/error.rs +++ b/rust/lancedb/src/error.rs @@ -46,8 +46,37 @@ pub enum Error { ObjectStore { source: object_store::Error }, #[snafu(display("lance error: {source}"))] Lance { source: lance::Error }, - #[snafu(display("Http error: {message}"))] - Http { message: String }, + #[cfg(feature = "remote")] + #[snafu(display("Http error: (request_id={request_id}) {source}"))] + Http { + #[snafu(source(from(reqwest::Error, Box::new)))] + source: Box, + request_id: String, + /// Status code associated with the error, if available. + /// This is not always available, for example when the error is due to a + /// connection failure. It may also be missing if the request was + /// successful but there was an error decoding the response. + status_code: Option, + }, + #[cfg(feature = "remote")] + #[snafu(display( + "Hit retry limit for request_id={request_id} (\ + request_failures={request_failures}/{max_request_failures}, \ + connect_failures={connect_failures}/{max_connect_failures}, \ + read_failures={read_failures}/{max_read_failures})" + ))] + Retry { + request_id: String, + request_failures: u8, + max_request_failures: u8, + connect_failures: u8, + max_connect_failures: u8, + read_failures: u8, + max_read_failures: u8, + #[snafu(source(from(reqwest::Error, Box::new)))] + source: Box, + status_code: Option, + }, #[snafu(display("Arrow error: {source}"))] Arrow { source: ArrowError }, #[snafu(display("LanceDBError: not supported: {message}"))] @@ -98,24 +127,6 @@ impl From> for Error { } } -#[cfg(feature = "remote")] -impl From for Error { - fn from(e: reqwest::Error) -> Self { - Self::Http { - message: e.to_string(), - } - } -} - -#[cfg(feature = "remote")] -impl From for Error { - fn from(e: url::ParseError) -> Self { - Self::Http { - message: e.to_string(), - } - } -} - #[cfg(feature = "polars")] impl From for Error { fn from(source: polars::prelude::PolarsError) -> Self { diff --git a/rust/lancedb/src/remote/client.rs b/rust/lancedb/src/remote/client.rs index a42c0733..83d5a14f 100644 --- a/rust/lancedb/src/remote/client.rs +++ b/rust/lancedb/src/remote/client.rs @@ -216,10 +216,12 @@ impl RestfulLanceDbClient { host_override: Option, client_config: ClientConfig, ) -> Result { - let parsed_url = url::Url::parse(db_url)?; + let parsed_url = url::Url::parse(db_url).map_err(|err| Error::InvalidInput { + message: format!("db_url is not a valid URL. '{db_url}'. Error: {err}"), + })?; debug_assert_eq!(parsed_url.scheme(), "db"); if !parsed_url.has_host() { - return Err(Error::Http { + return Err(Error::InvalidInput { message: format!("Invalid database URL (missing host) '{}'", db_url), }); } @@ -255,7 +257,11 @@ impl RestfulLanceDbClient { host_override.is_some(), )?) .user_agent(client_config.user_agent) - .build()?; + .build() + .map_err(|err| Error::Other { + message: "Failed to build HTTP client".into(), + source: Some(Box::new(err)), + })?; let host = match host_override { Some(host_override) => host_override, None => format!("https://{}.{}.api.lancedb.com", db_name, region), @@ -284,7 +290,7 @@ impl RestfulLanceDbClient { let mut headers = HeaderMap::new(); headers.insert( "x-api-key", - HeaderValue::from_str(api_key).map_err(|_| Error::Http { + HeaderValue::from_str(api_key).map_err(|_| Error::InvalidInput { message: "non-ascii api key provided".to_string(), })?, ); @@ -292,7 +298,7 @@ impl RestfulLanceDbClient { let host = format!("{}.local.api.lancedb.com", db_name); headers.insert( "Host", - HeaderValue::from_str(&host).map_err(|_| Error::Http { + HeaderValue::from_str(&host).map_err(|_| Error::InvalidInput { message: format!("non-ascii database name '{}' provided", db_name), })?, ); @@ -300,7 +306,7 @@ impl RestfulLanceDbClient { if has_host_override { headers.insert( "x-lancedb-database", - HeaderValue::from_str(db_name).map_err(|_| Error::Http { + HeaderValue::from_str(db_name).map_err(|_| Error::InvalidInput { message: format!("non-ascii database name '{}' provided", db_name), })?, ); @@ -319,22 +325,30 @@ impl RestfulLanceDbClient { self.client.post(full_uri) } - pub async fn send(&self, req: RequestBuilder, with_retry: bool) -> Result { + pub async fn send(&self, req: RequestBuilder, with_retry: bool) -> Result<(String, Response)> { let (client, request) = req.build_split(); let mut request = request.unwrap(); // Set a request id. // TODO: allow the user to supply this, through middleware? - if request.headers().get(REQUEST_ID_HEADER).is_none() { - let request_id = uuid::Uuid::new_v4(); - let request_id = HeaderValue::from_str(&request_id.to_string()).unwrap(); - request.headers_mut().insert(REQUEST_ID_HEADER, request_id); - } + let request_id = if let Some(request_id) = request.headers().get(REQUEST_ID_HEADER) { + request_id.to_str().unwrap().to_string() + } else { + let request_id = uuid::Uuid::new_v4().to_string(); + let header = HeaderValue::from_str(&request_id).unwrap(); + request.headers_mut().insert(REQUEST_ID_HEADER, header); + request_id + }; if with_retry { - self.send_with_retry_impl(client, request).await + self.send_with_retry_impl(client, request, request_id).await } else { - Ok(self.sender.send(&client, request).await?) + let response = self + .sender + .send(&client, request) + .await + .err_to_http(request_id.clone())?; + Ok((request_id, response)) } } @@ -342,98 +356,178 @@ impl RestfulLanceDbClient { &self, client: reqwest::Client, req: Request, - ) -> Result { - let mut request_failures = 0; - let mut connect_failures = 0; - let mut read_failures = 0; + request_id: String, + ) -> Result<(String, Response)> { + let mut retry_counter = RetryCounter::new(&self.retry_config, request_id); loop { // This only works if the request body is not a stream. If it is // a stream, we can't use the retry path. We would need to implement // an outer retry. - let request = req.try_clone().ok_or_else(|| Error::Http { + let request = req.try_clone().ok_or_else(|| Error::Runtime { message: "Attempted to retry a request that cannot be cloned".to_string(), })?; - let response = self.sender.send(&client, request).await; - let status_code = response.as_ref().map(|r| r.status()); - match status_code { - Ok(status) if status.is_success() => return Ok(response?), - Ok(status) if self.retry_config.statuses.contains(&status) => { - request_failures += 1; - if request_failures >= self.retry_config.retries { - // TODO: better error - return Err(Error::Runtime { - message: format!( - "Request failed after {} retries with status code {}", - request_failures, status - ), - }); - } + let response = self + .sender + .send(&client, request) + .await + .map(|r| (r.status(), r)); + match response { + Ok((status, response)) if status.is_success() => { + return Ok((retry_counter.request_id, response)) + } + Ok((status, response)) if self.retry_config.statuses.contains(&status) => { + let source = self + .check_response(&retry_counter.request_id, response) + .await + .unwrap_err(); + retry_counter.increment_request_failures(source)?; } Err(err) if err.is_connect() => { - connect_failures += 1; - if connect_failures >= self.retry_config.connect_retries { - return Err(Error::Runtime { - message: format!( - "Request failed after {} connect retries with error: {}", - connect_failures, err - ), - }); - } + retry_counter.increment_connect_failures(err)?; } Err(err) if err.is_timeout() || err.is_body() || err.is_decode() => { - read_failures += 1; - if read_failures >= self.retry_config.read_retries { - return Err(Error::Runtime { - message: format!( - "Request failed after {} read retries with error: {}", - read_failures, err - ), - }); - } + retry_counter.increment_read_failures(err)?; } - Ok(_) | Err(_) => return Ok(response?), + Err(err) => { + let status_code = err.status(); + return Err(Error::Http { + source: Box::new(err), + request_id: retry_counter.request_id, + status_code, + }); + } + Ok((_, response)) => return Ok((retry_counter.request_id, response)), } - let backoff = self.retry_config.backoff_factor * (2.0f32.powi(request_failures as i32)); - let jitter = rand::random::() * self.retry_config.backoff_jitter; - let sleep_time = Duration::from_secs_f32(backoff + jitter); - debug!( - "Retrying request {:?} ({}/{} connect, {}/{} read, {}/{} read) in {:?}", - req.headers() - .get("x-request-id") - .and_then(|v| v.to_str().ok()), - connect_failures, - self.retry_config.connect_retries, - request_failures, - self.retry_config.retries, - read_failures, - self.retry_config.read_retries, - sleep_time - ); + let sleep_time = retry_counter.next_sleep_time(); tokio::time::sleep(sleep_time).await; } } - async fn rsp_to_str(response: Response) -> String { + pub async fn check_response(&self, request_id: &str, response: Response) -> Result { + // Try to get the response text, but if that fails, just return the status code let status = response.status(); - response.text().await.unwrap_or_else(|_| status.to_string()) + if status.is_success() { + Ok(response) + } else { + let response_text = response.text().await.ok(); + let message = if let Some(response_text) = response_text { + format!("{}: {}", status, response_text) + } else { + status.to_string() + }; + Err(Error::Http { + source: message.into(), + request_id: request_id.into(), + status_code: Some(status), + }) + } + } +} + +struct RetryCounter<'a> { + request_failures: u8, + connect_failures: u8, + read_failures: u8, + config: &'a ResolvedRetryConfig, + request_id: String, +} + +impl<'a> RetryCounter<'a> { + fn new(config: &'a ResolvedRetryConfig, request_id: String) -> Self { + Self { + request_failures: 0, + connect_failures: 0, + read_failures: 0, + config, + request_id, + } } - pub async fn check_response(&self, response: Response) -> Result { - let status_int: u16 = u16::from(response.status()); - if (400..500).contains(&status_int) { - Err(Error::InvalidInput { - message: Self::rsp_to_str(response).await, - }) - } else if status_int != 200 { - Err(Error::Runtime { - message: Self::rsp_to_str(response).await, + fn check_out_of_retries( + &self, + source: Box, + status_code: Option, + ) -> Result<()> { + if self.request_failures >= self.config.retries + || self.connect_failures >= self.config.connect_retries + || self.read_failures >= self.config.read_retries + { + Err(Error::Retry { + request_id: self.request_id.clone(), + request_failures: self.request_failures, + max_request_failures: self.config.retries, + connect_failures: self.connect_failures, + max_connect_failures: self.config.connect_retries, + read_failures: self.read_failures, + max_read_failures: self.config.read_retries, + source, + status_code, }) } else { - Ok(response) + Ok(()) } } + + fn increment_request_failures(&mut self, source: crate::Error) -> Result<()> { + self.request_failures += 1; + let status_code = if let crate::Error::Http { status_code, .. } = &source { + *status_code + } else { + None + }; + self.check_out_of_retries(Box::new(source), status_code) + } + + fn increment_connect_failures(&mut self, source: reqwest::Error) -> Result<()> { + self.connect_failures += 1; + let status_code = source.status(); + self.check_out_of_retries(Box::new(source), status_code) + } + + fn increment_read_failures(&mut self, source: reqwest::Error) -> Result<()> { + self.read_failures += 1; + let status_code = source.status(); + self.check_out_of_retries(Box::new(source), status_code) + } + + fn next_sleep_time(&self) -> Duration { + let backoff = self.config.backoff_factor * (2.0f32.powi(self.request_failures as i32)); + let jitter = rand::random::() * self.config.backoff_jitter; + let sleep_time = Duration::from_secs_f32(backoff + jitter); + debug!( + "Retrying request {:?} ({}/{} connect, {}/{} read, {}/{} read) in {:?}", + self.request_id, + self.connect_failures, + self.config.connect_retries, + self.request_failures, + self.config.retries, + self.read_failures, + self.config.read_retries, + sleep_time + ); + sleep_time + } +} + +pub trait RequestResultExt { + type Output; + fn err_to_http(self, request_id: String) -> Result; +} + +impl RequestResultExt for reqwest::Result { + type Output = T; + fn err_to_http(self, request_id: String) -> Result { + self.map_err(|err| { + let status_code = err.status(); + Error::Http { + source: Box::new(err), + request_id, + status_code, + } + }) + } } #[cfg(test)] diff --git a/rust/lancedb/src/remote/db.rs b/rust/lancedb/src/remote/db.rs index 036e5e7c..8fe415be 100644 --- a/rust/lancedb/src/remote/db.rs +++ b/rust/lancedb/src/remote/db.rs @@ -29,7 +29,7 @@ use crate::embeddings::EmbeddingRegistry; use crate::error::Result; use crate::Table; -use super::client::{ClientConfig, HttpSend, RestfulLanceDbClient, Sender}; +use super::client::{ClientConfig, HttpSend, RequestResultExt, RestfulLanceDbClient, Sender}; use super::table::RemoteTable; use super::util::batches_to_ipc_bytes; use super::ARROW_STREAM_CONTENT_TYPE; @@ -105,9 +105,13 @@ impl ConnectionInternal for RemoteDatabase { if let Some(start_after) = options.start_after { req = req.query(&[("page_token", start_after)]); } - let rsp = self.client.send(req, true).await?; - let rsp = self.client.check_response(rsp).await?; - let tables = rsp.json::().await?.tables; + let (request_id, rsp) = self.client.send(req, true).await?; + let rsp = self.client.check_response(&request_id, rsp).await?; + let tables = rsp + .json::() + .await + .err_to_http(request_id)? + .tables; for table in &tables { self.table_cache.insert(table.clone(), ()).await; } @@ -130,13 +134,11 @@ impl ConnectionInternal for RemoteDatabase { .client .post(&format!("/v1/table/{}/create/", options.name)) .body(data_buffer) - .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) - // This is currently expected by LanceDb cloud but will be removed soon. - .header("x-request-id", "na"); - let rsp = self.client.send(req, false).await?; + .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE); + let (request_id, rsp) = self.client.send(req, false).await?; if rsp.status() == StatusCode::BAD_REQUEST { - let body = rsp.text().await?; + let body = rsp.text().await.err_to_http(request_id.clone())?; if body.contains("already exists") { return Err(crate::Error::TableAlreadyExists { name: options.name }); } else { @@ -144,7 +146,7 @@ impl ConnectionInternal for RemoteDatabase { } } - self.client.check_response(rsp).await?; + self.client.check_response(&request_id, rsp).await?; self.table_cache.insert(options.name.clone(), ()).await; @@ -160,11 +162,11 @@ impl ConnectionInternal for RemoteDatabase { let req = self .client .get(&format!("/v1/table/{}/describe/", options.name)); - let resp = self.client.send(req, true).await?; + let (request_id, resp) = self.client.send(req, true).await?; if resp.status() == StatusCode::NOT_FOUND { return Err(crate::Error::TableNotFound { name: options.name }); } - self.client.check_response(resp).await?; + self.client.check_response(&request_id, resp).await?; } Ok(Table::new(Arc::new(RemoteTable::new( @@ -178,8 +180,8 @@ impl ConnectionInternal for RemoteDatabase { .client .post(&format!("/v1/table/{}/rename/", current_name)); let req = req.json(&serde_json::json!({ "new_table_name": new_name })); - let resp = self.client.send(req, false).await?; - self.client.check_response(resp).await?; + let (request_id, resp) = self.client.send(req, false).await?; + self.client.check_response(&request_id, resp).await?; self.table_cache.remove(current_name).await; self.table_cache.insert(new_name.into(), ()).await; Ok(()) @@ -187,8 +189,8 @@ impl ConnectionInternal for RemoteDatabase { async fn drop_table(&self, name: &str) -> Result<()> { let req = self.client.post(&format!("/v1/table/{}/drop/", name)); - let resp = self.client.send(req, true).await?; - self.client.check_response(resp).await?; + let (request_id, resp) = self.client.send(req, true).await?; + self.client.check_response(&request_id, resp).await?; self.table_cache.remove(name).await; Ok(()) } @@ -206,16 +208,57 @@ impl ConnectionInternal for RemoteDatabase { #[cfg(test)] mod tests { - use std::sync::Arc; + use std::sync::{Arc, OnceLock}; use arrow_array::{Int32Array, RecordBatch, RecordBatchIterator}; use arrow_schema::{DataType, Field, Schema}; use crate::{ remote::{ARROW_STREAM_CONTENT_TYPE, JSON_CONTENT_TYPE}, - Connection, + Connection, Error, }; + #[tokio::test] + async fn test_retries() { + // We'll record the request_id here, to check it matches the one in the error. + let seen_request_id = Arc::new(OnceLock::new()); + let seen_request_id_ref = seen_request_id.clone(); + let conn = Connection::new_with_handler(move |request| { + // Request id should be the same on each retry. + let request_id = request.headers()["x-request-id"] + .to_str() + .unwrap() + .to_string(); + let seen_id = seen_request_id_ref.get_or_init(|| request_id.clone()); + assert_eq!(&request_id, seen_id); + + http::Response::builder() + .status(500) + .body("internal server error") + .unwrap() + }); + let result = conn.table_names().execute().await; + if let Err(Error::Retry { + request_id, + request_failures, + max_request_failures, + source, + .. + }) = result + { + let expected_id = seen_request_id.get().unwrap(); + assert_eq!(&request_id, expected_id); + assert_eq!(request_failures, max_request_failures); + assert!( + source.to_string().contains("internal server error"), + "source: {:?}", + source + ); + } else { + panic!("unexpected result: {:?}", result); + }; + } + #[tokio::test] async fn test_table_names() { let conn = Connection::new_with_handler(|request| { diff --git a/rust/lancedb/src/remote/table.rs b/rust/lancedb/src/remote/table.rs index d68907f3..81fb7a90 100644 --- a/rust/lancedb/src/remote/table.rs +++ b/rust/lancedb/src/remote/table.rs @@ -34,6 +34,7 @@ use crate::{ }, }; +use super::client::RequestResultExt; use super::client::{HttpSend, RestfulLanceDbClient, Sender}; use super::{ARROW_STREAM_CONTENT_TYPE, JSON_CONTENT_TYPE}; @@ -53,15 +54,25 @@ impl RemoteTable { let request = self .client .post(&format!("/v1/table/{}/describe/", self.name)); - let response = self.client.send(request, true).await?; + let (request_id, response) = self.client.send(request, true).await?; - let response = self.check_table_response(response).await?; + let response = self.check_table_response(&request_id, response).await?; - let body = response.text().await?; - - serde_json::from_str(&body).map_err(|e| Error::Http { - message: format!("Failed to parse table description: {}", e), - }) + match response.text().await { + Ok(body) => serde_json::from_str(&body).map_err(|e| Error::Http { + source: format!("Failed to parse table description: {}", e).into(), + request_id, + status_code: None, + }), + Err(err) => { + let status_code = err.status(); + Err(Error::Http { + source: Box::new(err), + request_id, + status_code, + }) + } + } } fn reader_as_body(data: Box) -> Result { @@ -87,18 +98,23 @@ impl RemoteTable { Ok(reqwest::Body::wrap_stream(body_stream)) } - async fn check_table_response(&self, response: reqwest::Response) -> Result { + async fn check_table_response( + &self, + request_id: &str, + response: reqwest::Response, + ) -> Result { if response.status() == StatusCode::NOT_FOUND { return Err(Error::TableNotFound { name: self.name.clone(), }); } - self.client.check_response(response).await + self.client.check_response(request_id, response).await } async fn read_arrow_stream( &self, + request_id: &str, body: reqwest::Response, ) -> Result { // Assert that the content type is correct @@ -106,24 +122,31 @@ impl RemoteTable { .headers() .get(CONTENT_TYPE) .ok_or_else(|| Error::Http { - message: "Missing content type".into(), + source: "Missing content type".into(), + request_id: request_id.to_string(), + status_code: None, })? .to_str() .map_err(|e| Error::Http { - message: format!("Failed to parse content type: {}", e), + source: format!("Failed to parse content type: {}", e).into(), + request_id: request_id.to_string(), + status_code: None, })?; if content_type != ARROW_STREAM_CONTENT_TYPE { return Err(Error::Http { - message: format!( + source: format!( "Expected content type {}, got {}", ARROW_STREAM_CONTENT_TYPE, content_type - ), + ) + .into(), + request_id: request_id.to_string(), + status_code: None, }); } // There isn't a way to actually stream this data yet. I have an upstream issue: // https://github.com/apache/arrow-rs/issues/6420 - let body = body.bytes().await?; + let body = body.bytes().await.err_to_http(request_id.into())?; let reader = StreamReader::try_new(body.reader(), None)?; let schema = reader.schema(); let stream = futures::stream::iter(reader).map_err(DataFusionError::from); @@ -259,14 +282,16 @@ impl TableInternal for RemoteTable { request = request.json(&serde_json::json!({})); } - let response = self.client.send(request, true).await?; + let (request_id, response) = self.client.send(request, true).await?; - let response = self.check_table_response(response).await?; + let response = self.check_table_response(&request_id, response).await?; - let body = response.text().await?; + let body = response.text().await.err_to_http(request_id.clone())?; serde_json::from_str(&body).map_err(|e| Error::Http { - message: format!("Failed to parse row count: {}", e), + source: format!("Failed to parse row count: {}", e).into(), + request_id, + status_code: None, }) } async fn add( @@ -288,9 +313,9 @@ impl TableInternal for RemoteTable { } } - let response = self.client.send(request, false).await?; + let (request_id, response) = self.client.send(request, false).await?; - self.check_table_response(response).await?; + self.check_table_response(&request_id, response).await?; Ok(()) } @@ -339,9 +364,9 @@ impl TableInternal for RemoteTable { let request = request.json(&body); - let response = self.client.send(request, true).await?; + let (request_id, response) = self.client.send(request, true).await?; - let stream = self.read_arrow_stream(response).await?; + let stream = self.read_arrow_stream(&request_id, response).await?; Ok(Arc::new(OneShotExec::new(stream))) } @@ -361,9 +386,9 @@ impl TableInternal for RemoteTable { let request = request.json(&body); - let response = self.client.send(request, true).await?; + let (request_id, response) = self.client.send(request, true).await?; - let stream = self.read_arrow_stream(response).await?; + let stream = self.read_arrow_stream(&request_id, response).await?; Ok(DatasetRecordBatchStream::new(stream)) } @@ -383,17 +408,20 @@ impl TableInternal for RemoteTable { "only_if": update.filter, })); - let response = self.client.send(request, false).await?; + let (request_id, response) = self.client.send(request, false).await?; - let response = self.check_table_response(response).await?; + let response = self.check_table_response(&request_id, response).await?; - let body = response.text().await?; + let body = response.text().await.err_to_http(request_id.clone())?; serde_json::from_str(&body).map_err(|e| Error::Http { - message: format!( + source: format!( "Failed to parse updated rows result from response {}: {}", body, e - ), + ) + .into(), + request_id, + status_code: None, }) } async fn delete(&self, predicate: &str) -> Result<()> { @@ -402,8 +430,8 @@ impl TableInternal for RemoteTable { .client .post(&format!("/v1/table/{}/delete/", self.name)) .json(&body); - let response = self.client.send(request, false).await?; - self.check_table_response(response).await?; + let (request_id, response) = self.client.send(request, false).await?; + self.check_table_response(&request_id, response).await?; Ok(()) } @@ -474,9 +502,9 @@ impl TableInternal for RemoteTable { let request = request.json(&body); - let response = self.client.send(request, false).await?; + let (request_id, response) = self.client.send(request, false).await?; - self.check_table_response(response).await?; + self.check_table_response(&request_id, response).await?; Ok(()) } @@ -495,9 +523,9 @@ impl TableInternal for RemoteTable { .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) .body(body); - let response = self.client.send(request, false).await?; + let (request_id, response) = self.client.send(request, false).await?; - self.check_table_response(response).await?; + self.check_table_response(&request_id, response).await?; Ok(()) } @@ -531,8 +559,8 @@ impl TableInternal for RemoteTable { let request = self .client .post(&format!("/v1/table/{}/index/list/", self.name)); - let response = self.client.send(request, true).await?; - let response = self.check_table_response(response).await?; + let (request_id, response) = self.client.send(request, true).await?; + let response = self.check_table_response(&request_id, response).await?; #[derive(Deserialize)] struct ListIndicesResponse { @@ -545,12 +573,15 @@ impl TableInternal for RemoteTable { columns: Vec, } - let body = response.text().await?; + let body = response.text().await.err_to_http(request_id.clone())?; let body: ListIndicesResponse = serde_json::from_str(&body).map_err(|err| Error::Http { - message: format!( + source: format!( "Failed to parse list_indices response: {}, body: {}", err, body - ), + ) + .into(), + request_id, + status_code: None, })?; // Make request to get stats for each index, so we get the index type. @@ -581,18 +612,20 @@ impl TableInternal for RemoteTable { "/v1/table/{}/index/{}/stats/", self.name, index_name )); - let response = self.client.send(request, true).await?; + let (request_id, response) = self.client.send(request, true).await?; if response.status() == StatusCode::NOT_FOUND { return Ok(None); } - let response = self.check_table_response(response).await?; + let response = self.check_table_response(&request_id, response).await?; - let body = response.text().await?; + let body = response.text().await.err_to_http(request_id.clone())?; let stats = serde_json::from_str(&body).map_err(|e| Error::Http { - message: format!("Failed to parse index statistics: {}", e), + source: format!("Failed to parse index statistics: {}", e).into(), + request_id, + status_code: None, })?; Ok(Some(stats)) From aff25e3bf9b243c631c06ccc8105169ec20a887e Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 8 Oct 2024 22:03:53 -0700 Subject: [PATCH 14/51] fix(node): add native packages to bump version (#1738) We weren't bumping the version, so when users downloaded our package from npm, they were getting the old binaries. --- .bumpversion.toml | 26 + node/package-lock.json | 1436 ++++++++++++++++------------- node/package.json | 12 +- node/src/integration_test/test.ts | 2 +- node/src/query.ts | 4 +- node/src/remote/index.ts | 4 +- node/src/test/test.ts | 5 +- 7 files changed, 842 insertions(+), 647 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index fb8c0ee9..7e0caac8 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -66,6 +66,32 @@ glob = "nodejs/npm/*/package.json" replace = "\"version\": \"{new_version}\"," search = "\"version\": \"{current_version}\"," +# vectodb node binary packages +[[tool.bumpversion.files]] +glob = "node/package.json" +replace = "\"@lancedb/vectordb-darwin-arm64\": \"{new_version}\"" +search = "\"@lancedb/vectordb-darwin-arm64\": \"{current_version}\"" + +[[tool.bumpversion.files]] +glob = "node/package.json" +replace = "\"@lancedb/vectordb-darwin-x64\": \"{new_version}\"" +search = "\"@lancedb/vectordb-darwin-x64\": \"{current_version}\"" + +[[tool.bumpversion.files]] +glob = "node/package.json" +replace = "\"@lancedb/vectordb-linux-arm64-gnu\": \"{new_version}\"" +search = "\"@lancedb/vectordb-linux-arm64-gnu\": \"{current_version}\"" + +[[tool.bumpversion.files]] +glob = "node/package.json" +replace = "\"@lancedb/vectordb-linux-x64-gnu\": \"{new_version}\"" +search = "\"@lancedb/vectordb-linux-x64-gnu\": \"{current_version}\"" + +[[tool.bumpversion.files]] +glob = "node/package.json" +replace = "\"@lancedb/vectordb-win32-x64-msvc\": \"{new_version}\"" +search = "\"@lancedb/vectordb-win32-x64-msvc\": \"{current_version}\"" + # Cargo files # ------------ [[tool.bumpversion.files]] diff --git a/node/package-lock.json b/node/package-lock.json index 4eb024f3..0544209b 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -48,15 +48,15 @@ "ts-node-dev": "^2.0.0", "typedoc": "^0.24.7", "typedoc-plugin-markdown": "^3.15.3", - "typescript": "*", + "typescript": "^5.1.0", "uuid": "^9.0.0" }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.4.20", - "@lancedb/vectordb-darwin-x64": "0.4.20", - "@lancedb/vectordb-linux-arm64-gnu": "0.4.20", - "@lancedb/vectordb-linux-x64-gnu": "0.4.20", - "@lancedb/vectordb-win32-x64-msvc": "0.4.20" + "@lancedb/vectordb-darwin-arm64": "0.11.0-beta.1", + "@lancedb/vectordb-darwin-x64": "0.11.0-beta.1", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.0-beta.1", + "@lancedb/vectordb-linux-x64-gnu": "0.11.0-beta.1", + "@lancedb/vectordb-win32-x64-msvc": "0.11.0-beta.1" }, "peerDependencies": { "@apache-arrow/ts": "^14.0.2", @@ -64,11 +64,11 @@ } }, "node_modules/@75lb/deep-merge": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@75lb/deep-merge/-/deep-merge-1.1.1.tgz", - "integrity": "sha512-xvgv6pkMGBA6GwdyJbNAnDmfAIR/DfWhrj9jgWh3TY7gRm3KO46x/GPjRg6wJ0nOepwqrNxFfojebh0Df4h4Tw==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@75lb/deep-merge/-/deep-merge-1.1.2.tgz", + "integrity": "sha512-08K9ou5VNbheZFxM5tDWoqjA3ImC50DiuuJ2tj1yEPRfkp8lLLg6XAaJ4On+a0yAXor/8ay5gHnAIshRM44Kpw==", "dependencies": { - "lodash.assignwith": "^4.2.0", + "lodash": "^4.17.21", "typical": "^7.1.1" }, "engines": { @@ -76,22 +76,13 @@ } }, "node_modules/@75lb/deep-merge/node_modules/typical": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz", - "integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-7.2.0.tgz", + "integrity": "sha512-W1+HdVRUl8fS3MZ9ogD51GOb46xMmhAZzR0WPw5jcgIZQJVvkddYzAl4YTU6g5w33Y1iRQLdIi2/1jhi2RNL0g==", "engines": { "node": ">=12.17" } }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@apache-arrow/ts": { "version": "14.0.2", "resolved": "https://registry.npmjs.org/@apache-arrow/ts/-/ts-14.0.2.tgz", @@ -235,9 +226,9 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", - "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz", + "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==", "dev": true, "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" @@ -267,21 +258,22 @@ } }, "node_modules/@eslint/js": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", - "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.14", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", - "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^2.0.2", + "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" }, @@ -303,24 +295,25 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", - "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", "dev": true }, "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz", - "integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "dev": true, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", "dev": true }, "node_modules/@jridgewell/trace-mapping": { @@ -334,9 +327,9 @@ } }, "node_modules/@lancedb/vectordb-darwin-arm64": { - "version": "0.4.20", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.4.20.tgz", - "integrity": "sha512-ffP2K4sA5mQTgePyARw1y8dPN996FmpvyAYoWO+TSItaXlhcXvc+KVa5udNMCZMDYeEnEv2Xpj6k4PwW3oBz+A==", + "version": "0.11.0-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.11.0-beta.1.tgz", + "integrity": "sha512-qKQbFJwstMQEO2MVkkipyDxmH3/KafkuC4xfU8LjMtZ98ZGTQIW47t/OyftiUXYWcjsVxeXI3l2m9MCozFOdhg==", "cpu": [ "arm64" ], @@ -346,9 +339,9 @@ ] }, "node_modules/@lancedb/vectordb-darwin-x64": { - "version": "0.4.20", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.4.20.tgz", - "integrity": "sha512-GSYsXE20RIehDu30FjREhJdEzhnwOTV7ZsrSXagStzLY1gr7pyd7sfqxmmUtdD09di7LnQoiM71AOpPTa01YwQ==", + "version": "0.11.0-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.11.0-beta.1.tgz", + "integrity": "sha512-245Q5hjQKljczBcDLbiq3N5fmUaY2zFRHoW6SBxOziQwyMphhLDSTNkAYkc3JnrQvf6dMolVYWigOsRVCFj56A==", "cpu": [ "x64" ], @@ -358,9 +351,9 @@ ] }, "node_modules/@lancedb/vectordb-linux-arm64-gnu": { - "version": "0.4.20", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.4.20.tgz", - "integrity": "sha512-FpNOjOsz3nJVm6EBGyNgbOW2aFhsWZ/igeY45Z8hbZaaK2YBwrg/DASoNlUzgv6IR8cUaGJ2irNVJfsKR2cG6g==", + "version": "0.11.0-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.11.0-beta.1.tgz", + "integrity": "sha512-B4z6sx4X6uqGDnQm3zL5mL47Agn4X4spf/nlxtrUWEfiOAyp9Iw465UQMmrbnodi+4k/BNjCNZNMFSjMOSsrcA==", "cpu": [ "arm64" ], @@ -370,9 +363,9 @@ ] }, "node_modules/@lancedb/vectordb-linux-x64-gnu": { - "version": "0.4.20", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.4.20.tgz", - "integrity": "sha512-pOqWjrRZQSrLTlQPkjidRii7NZDw8Xu9pN6ouVu2JAK8n81FXaPtFCyAI+Y3v9GpnYDN0rvD4eQ36aHAVPsa2g==", + "version": "0.11.0-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.11.0-beta.1.tgz", + "integrity": "sha512-0vWcPqpe3to78bYkc+3XWZToRu6TMrhLJAxC9cnV5d9GMuN1VbDoLqD8QPRWkoEr9Nk7jdIRKEBUwfq5yGOFLw==", "cpu": [ "x64" ], @@ -382,9 +375,9 @@ ] }, "node_modules/@lancedb/vectordb-win32-x64-msvc": { - "version": "0.4.20", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.4.20.tgz", - "integrity": "sha512-5J5SsYSJ7jRCmU/sgwVHdrGz43B/7R2T9OEoFTKyVAtqTZdu75rkytXyn9SyEayXVhlUOaw76N0ASm0hAoDS/A==", + "version": "0.11.0-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.11.0-beta.1.tgz", + "integrity": "sha512-jU/+w2TfA4HKOZkib1UP4kIpaLgu+88S/t+Ccde67w/4qQuP0uAixTAls1WE4mtlf6pOnG0A1ILTY98nVkIQ3A==", "cpu": [ "x64" ], @@ -451,6 +444,12 @@ "node": ">= 8" } }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true + }, "node_modules/@sinonjs/commons": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", @@ -460,6 +459,15 @@ "type-detect": "4.0.8" } }, + "node_modules/@sinonjs/commons/node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/@sinonjs/fake-timers": { "version": "10.3.0", "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", @@ -470,35 +478,26 @@ } }, "node_modules/@sinonjs/samsam": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.0.tgz", - "integrity": "sha512-Bp8KUVlLp8ibJZrnvq2foVhP0IVX2CIprMJPK0vqGqgrDa0OHVKeZyBykqskkrdxV6yKBPmGasO8LVjAKR3Gew==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.2.tgz", + "integrity": "sha512-v46t/fwnhejRSFTGqbpn9u+LQ9xJDse10gNnPgAcxgdoCDMXj/G2asWAC/8Qs+BAZDicX+MNZouXT1A7c83kVw==", "dev": true, "dependencies": { - "@sinonjs/commons": "^2.0.0", + "@sinonjs/commons": "^3.0.1", "lodash.get": "^4.4.2", - "type-detect": "^4.0.8" - } - }, - "node_modules/@sinonjs/samsam/node_modules/@sinonjs/commons": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-2.0.0.tgz", - "integrity": "sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==", - "dev": true, - "dependencies": { - "type-detect": "4.0.8" + "type-detect": "^4.1.0" } }, "node_modules/@sinonjs/text-encoding": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz", - "integrity": "sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ==", + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz", + "integrity": "sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==", "dev": true }, "node_modules/@tsconfig/node10": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", - "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", "dev": true }, "node_modules/@tsconfig/node12": { @@ -520,9 +519,9 @@ "dev": true }, "node_modules/@types/chai": { - "version": "4.3.11", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.11.tgz", - "integrity": "sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ==", + "version": "4.3.20", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.20.tgz", + "integrity": "sha512-/pC9HAB5I/xMlc5FP77qjCnI16ChlJfW0tGa0IUcFn38VJrTV6DeZ60NU5KZBtaOZqjdpwTWohz5HU1RrhiYxQ==", "dev": true }, "node_modules/@types/chai-as-promised": { @@ -557,15 +556,15 @@ "dev": true }, "node_modules/@types/mocha": { - "version": "10.0.6", - "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.6.tgz", - "integrity": "sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg==", + "version": "10.0.9", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.9.tgz", + "integrity": "sha512-sicdRoWtYevwxjOHNMPTl3vSfJM6oyW8o1wXeI7uww6b6xHg8eBznQDNSGBCDJmsE8UMxP05JgZRtsKbTqt//Q==", "dev": true }, "node_modules/@types/node": { - "version": "18.19.8", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.8.tgz", - "integrity": "sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg==", + "version": "18.19.55", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.55.tgz", + "integrity": "sha512-zzw5Vw52205Zr/nmErSEkN5FLqXPuKX/k5d1D7RKHATGqU7y6YfX9QxZraUzUrFGqH6XzOzG196BC35ltJC4Cw==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -587,9 +586,9 @@ "integrity": "sha512-Xd22WCRBydkGSApl5Bw0PhAOHKSVjNL3E3AwzKaps96IMraPqy5BvZIsBVK6JLwdybUzjHnuWVwpDd0JjTfHXA==" }, "node_modules/@types/semver": { - "version": "7.5.6", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", - "integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==", + "version": "7.5.8", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", + "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==", "dev": true }, "node_modules/@types/sinon": { @@ -629,9 +628,9 @@ } }, "node_modules/@types/uuid": { - "version": "9.0.7", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.7.tgz", - "integrity": "sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g==", + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { @@ -841,9 +840,9 @@ } }, "node_modules/acorn": { - "version": "8.11.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", - "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -862,10 +861,13 @@ } }, "node_modules/acorn-walk": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", - "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", "dev": true, + "dependencies": { + "acorn": "^8.11.0" + }, "engines": { "node": ">=0.4.0" } @@ -899,9 +901,9 @@ } }, "node_modules/ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", "dev": true, "engines": { "node": ">=6" @@ -1025,28 +1027,32 @@ } }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/array-includes": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", - "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", + "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", "is-string": "^1.0.7" }, "engines": { @@ -1066,16 +1072,17 @@ } }, "node_modules/array.prototype.findlastindex": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", - "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -1121,17 +1128,18 @@ } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", - "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dev": true, "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" }, "engines": { @@ -1156,10 +1164,13 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -1168,11 +1179,11 @@ } }, "node_modules/axios": { - "version": "1.6.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz", - "integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==", + "version": "1.7.7", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", + "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", "dependencies": { - "follow-redirects": "^1.15.4", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -1183,19 +1194,16 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, - "node_modules/base-64": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz", - "integrity": "sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==", - "dev": true - }, "node_modules/binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", "dev": true, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/brace-expansion": { @@ -1209,12 +1217,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -1233,23 +1241,28 @@ "dev": true }, "node_modules/builtins": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", - "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", + "integrity": "sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==", "dev": true, "dependencies": { "semver": "^7.0.0" } }, "node_modules/call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -1277,18 +1290,18 @@ } }, "node_modules/cargo-cp-artifact": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/cargo-cp-artifact/-/cargo-cp-artifact-0.1.8.tgz", - "integrity": "sha512-3j4DaoTrsCD1MRkTF2Soacii0Nx7UHCce0EwUf4fHnggwiE4fbmF2AbnfzayR36DF8KGadfh7M/Yfy625kgPlA==", + "version": "0.1.9", + "resolved": "https://registry.npmjs.org/cargo-cp-artifact/-/cargo-cp-artifact-0.1.9.tgz", + "integrity": "sha512-6F+UYzTaGB+awsTXg0uSJA1/b/B3DDJzpKVRu0UmyI7DmNeaAl2RFHuTGIN6fEgpadRxoXGb7gbC1xo4C3IdyA==", "dev": true, "bin": { "cargo-cp-artifact": "bin/cargo-cp-artifact.js" } }, "node_modules/chai": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz", - "integrity": "sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", + "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", "dev": true, "dependencies": { "assertion-error": "^1.1.0", @@ -1297,22 +1310,22 @@ "get-func-name": "^2.0.2", "loupe": "^2.3.6", "pathval": "^1.1.1", - "type-detect": "^4.0.8" + "type-detect": "^4.1.0" }, "engines": { "node": ">=4" } }, "node_modules/chai-as-promised": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz", - "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.2.tgz", + "integrity": "sha512-aBDHZxRzYnUYuIAIPBH2s511DjlKPzXNlXSGFC8CwmroWQLfrW0LtE1nK3MAwwNhJPa9raEjNCmRoFpG0Hurdw==", "dev": true, "dependencies": { "check-error": "^1.0.2" }, "peerDependencies": { - "chai": ">= 2.1.2 < 5" + "chai": ">= 2.1.2 < 6" } }, "node_modules/chalk": { @@ -1344,15 +1357,6 @@ "url": "https://github.com/chalk/chalk-template?sponsor=1" } }, - "node_modules/charenc": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", - "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==", - "dev": true, - "engines": { - "node": "*" - } - }, "node_modules/check-error": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", @@ -1366,16 +1370,10 @@ } }, "node_modules/chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -1388,6 +1386,9 @@ "engines": { "node": ">= 8.10.0" }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, "optionalDependencies": { "fsevents": "~2.3.2" } @@ -1479,9 +1480,9 @@ } }, "node_modules/command-line-usage/node_modules/typical": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz", - "integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-7.2.0.tgz", + "integrity": "sha512-W1+HdVRUl8fS3MZ9ogD51GOb46xMmhAZzR0WPw5jcgIZQJVvkddYzAl4YTU6g5w33Y1iRQLdIi2/1jhi2RNL0g==", "engines": { "node": ">=12.17" } @@ -1512,22 +1513,64 @@ "node": ">= 8" } }, - "node_modules/crypt": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", - "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==", + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, "engines": { - "node": "*" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", "dev": true, "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -1551,9 +1594,9 @@ } }, "node_modules/deep-eql": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", - "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", "dev": true, "dependencies": { "type-detect": "^4.0.0" @@ -1569,17 +1612,20 @@ "dev": true }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-properties": { @@ -1608,24 +1654,14 @@ } }, "node_modules/diff": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", - "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", "dev": true, "engines": { "node": ">=0.3.1" } }, - "node_modules/digest-fetch": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz", - "integrity": "sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==", - "dev": true, - "dependencies": { - "base-64": "^0.1.0", - "md5": "^2.3.0" - } - }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -1666,50 +1702,57 @@ "dev": true }, "node_modules/es-abstract": { - "version": "1.22.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", - "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", "dev": true, "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.2", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.5", - "es-set-tostringtag": "^2.0.1", + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.2", - "get-symbol-description": "^1.0.0", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "hasown": "^2.0.0", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.12", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.1", - "safe-array-concat": "^1.0.1", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.8", - "string.prototype.trimend": "^1.0.7", - "string.prototype.trimstart": "^1.0.7", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.13" + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -1718,15 +1761,48 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -1759,9 +1835,9 @@ } }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, "engines": { "node": ">=6" @@ -1780,16 +1856,17 @@ } }, "node_modules/eslint": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", - "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.56.0", - "@humanwhocodes/config-array": "^0.11.13", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -1864,6 +1941,7 @@ "version": "34.0.1", "resolved": "https://registry.npmjs.org/eslint-config-standard-with-typescript/-/eslint-config-standard-with-typescript-34.0.1.tgz", "integrity": "sha512-J7WvZeLtd0Vr9F+v4dZbqJCLD16cbIy4U+alJMq4MiXdpipdBM3U5NkXaGUjePc4sb1ZE01U9g6VuTBpHHz1fg==", + "deprecated": "Please use eslint-config-love, instead.", "dev": true, "dependencies": { "@typescript-eslint/parser": "^5.43.0", @@ -1899,9 +1977,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.0.tgz", + "integrity": "sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==", "dev": true, "dependencies": { "debug": "^3.2.7" @@ -1968,34 +2046,36 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", - "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", + "version": "2.31.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.31.0.tgz", + "integrity": "sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==", "dev": true, "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", "array.prototype.flat": "^1.3.2", "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "eslint-module-utils": "^2.12.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.8", "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" } }, "node_modules/eslint-plugin-import/node_modules/debug": { @@ -2054,15 +2134,18 @@ } }, "node_modules/eslint-plugin-promise": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", - "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, + "funding": { + "url": "https://opencollective.com/eslint" + }, "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" } }, "node_modules/eslint-scope": { @@ -2160,9 +2243,9 @@ } }, "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -2275,9 +2358,9 @@ "dev": true }, "node_modules/fastq": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.16.0.tgz", - "integrity": "sha512-ifCoaXsDrsdkWTtiNJX5uzHDsrck5TzfKKDcuFFTIrrc/BS076qgEIfoIy1VeZqViznfKiysPYTh/QeHtnIsYA==", + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", "dev": true, "dependencies": { "reusify": "^1.0.4" @@ -2296,9 +2379,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -2363,15 +2446,15 @@ "integrity": "sha512-vE+SI9vrJDwi1oETtTIFldC/o9GsVKRM+s6EL0nQgxXlYV1Vc4Tk30hj4xGICftInKQKj1F3up2n8UbIVobISQ==" }, "node_modules/flatted": { - "version": "3.2.9", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", - "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", - "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", "funding": [ { "type": "individual", @@ -2428,15 +2511,6 @@ "node": ">= 12.20" } }, - "node_modules/formdata-node/node_modules/web-streams-polyfill": { - "version": "4.0.0-beta.3", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", - "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", - "dev": true, - "engines": { - "node": ">= 14" - } - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2512,28 +2586,33 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -2543,20 +2622,20 @@ } }, "node_modules/glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "minimatch": "^5.0.1", + "once": "^1.3.0" }, "engines": { - "node": "*" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -2574,6 +2653,27 @@ "node": ">=10.13.0" } }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/globals": { "version": "13.24.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", @@ -2590,12 +2690,13 @@ } }, "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", "dev": true, "dependencies": { - "define-properties": "^1.1.3" + "define-properties": "^1.2.1", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -2681,21 +2782,21 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "dev": true, "engines": { "node": ">= 0.4" @@ -2717,12 +2818,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -2732,9 +2833,9 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dev": true, "dependencies": { "function-bind": "^1.1.2" @@ -2762,9 +2863,9 @@ } }, "node_modules/ignore": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz", - "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, "engines": { "node": ">= 4" @@ -2799,6 +2900,7 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dev": true, "dependencies": { "once": "^1.3.0", @@ -2812,12 +2914,12 @@ "dev": true }, "node_modules/internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", + "es-errors": "^1.3.0", "hasown": "^2.0.0", "side-channel": "^1.0.4" }, @@ -2826,14 +2928,16 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2879,12 +2983,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", - "dev": true - }, "node_modules/is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -2898,12 +2996,30 @@ } }, "node_modules/is-core-module": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", - "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "dependencies": { - "hasown": "^2.0.0" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dev": true, + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2955,9 +3071,9 @@ } }, "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, "engines": { "node": ">= 0.4" @@ -3025,12 +3141,15 @@ } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3067,12 +3186,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, "dependencies": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -3168,9 +3287,9 @@ } }, "node_modules/jsonc-parser": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz", - "integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz", + "integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==", "dev": true }, "node_modules/just-extend": { @@ -3216,10 +3335,10 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lodash.assignwith": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz", - "integrity": "sha512-ZznplvbvtjK2gMvnQ1BR/zqPFZmS6jbK4p+6Up4xcRYA7yMIwxHCfbTcrYxXKzzqLsQ05eJPVznEW3tuwV7k1g==" + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "node_modules/lodash.camelcase": { "version": "4.3.0", @@ -3263,18 +3382,6 @@ "get-func-name": "^2.0.1" } }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/lunr": { "version": "2.3.9", "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", @@ -3299,17 +3406,6 @@ "node": ">= 12" } }, - "node_modules/md5": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", - "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", - "dev": true, - "dependencies": { - "charenc": "0.0.2", - "crypt": "0.0.2", - "is-buffer": "~1.1.6" - } - }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -3320,12 +3416,12 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -3385,32 +3481,31 @@ } }, "node_modules/mocha": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", - "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.7.3.tgz", + "integrity": "sha512-uQWxAu44wwiACGqjbPYmjo7Lg8sFrS3dQe7PP2FQI+woptP4vZXSMcfMyFL/e1yFEeEpV4RtyTpZROOKmxis+A==", "dev": true, "dependencies": { - "ansi-colors": "4.1.1", - "browser-stdout": "1.3.1", - "chokidar": "3.5.3", - "debug": "4.3.4", - "diff": "5.0.0", - "escape-string-regexp": "4.0.0", - "find-up": "5.0.0", - "glob": "7.2.0", - "he": "1.2.0", - "js-yaml": "4.1.0", - "log-symbols": "4.1.0", - "minimatch": "5.0.1", - "ms": "2.1.3", - "nanoid": "3.3.3", - "serialize-javascript": "6.0.0", - "strip-json-comments": "3.1.1", - "supports-color": "8.1.1", - "workerpool": "6.2.1", - "yargs": "16.2.0", - "yargs-parser": "20.2.4", - "yargs-unparser": "2.0.0" + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^8.1.0", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9", + "yargs-unparser": "^2.0.0" }, "bin": { "_mocha": "bin/_mocha", @@ -3418,10 +3513,6 @@ }, "engines": { "node": ">= 14.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mochajs" } }, "node_modules/mocha/node_modules/brace-expansion": { @@ -3434,9 +3525,9 @@ } }, "node_modules/mocha/node_modules/minimatch": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", - "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -3445,12 +3536,6 @@ "node": ">=10" } }, - "node_modules/mocha/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, "node_modules/mocha/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -3467,23 +3552,11 @@ } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, - "node_modules/nanoid": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", - "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", - "dev": true, - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -3503,9 +3576,9 @@ "dev": true }, "node_modules/nise": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.7.tgz", - "integrity": "sha512-wWtNUhkT7k58uvWTB/Gy26eA/EJKtPZFVAhEilN5UYVmmGRYOURbejRUyKm0Uu9XVEW7K5nBOZfR8VMB4QR2RQ==", + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.9.tgz", + "integrity": "sha512-qOnoujW4SV6e40dYxJOb3uvuoPHtmLzIk4TFo+j0jPJoC+5Z9xja5qH5JZobEPsa8+YYphMrOSwnrshEhG2qww==", "dev": true, "dependencies": { "@sinonjs/commons": "^3.0.0", @@ -3516,12 +3589,12 @@ } }, "node_modules/nise/node_modules/@sinonjs/fake-timers": { - "version": "11.2.2", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-11.2.2.tgz", - "integrity": "sha512-G2piCSxQ7oWOxwGSAyFHfPIsyeJGXYtc6mFbnFA+kRXkiEnTl8c/8jul2S329iFBnDI9HGoeWWAZvuvOkZccgw==", + "version": "11.3.1", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-11.3.1.tgz", + "integrity": "sha512-EVJO7nW5M/F5Tur0Rf2z/QoMo+1Ia963RiMtapiQrEWvY0iBUvADo8Beegwjpnle5BHkyHuoxSTW3jF43H1XRA==", "dev": true, "dependencies": { - "@sinonjs/commons": "^3.0.0" + "@sinonjs/commons": "^3.0.1" } }, "node_modules/node-domexception": { @@ -3573,10 +3646,13 @@ } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -3609,14 +3685,15 @@ } }, "node_modules/object.fromentries": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", - "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -3626,26 +3703,28 @@ } }, "node_modules/object.groupby": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", - "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/object.values": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", - "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", + "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -3664,37 +3743,43 @@ } }, "node_modules/openai": { - "version": "4.25.0", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.25.0.tgz", - "integrity": "sha512-qLMFOizjxKuDfQkBrczZPYo6XVL4bdcuz9MR11Q+M91kGcs8dQw+O90nRcC+qWuhaGphQkfXQJMn4cd7Yew3Kg==", + "version": "4.67.3", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.67.3.tgz", + "integrity": "sha512-HT2tZgjLgRqbLQNKmYtjdF/4TQuiBvg1oGvTDhwpSEQzxo6/oM1us8VQ53vBK2BiKvCxFuq6gKGG70qfwrNhKg==", "dev": true, "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", - "digest-fetch": "^1.3.0", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7", - "web-streams-polyfill": "^3.2.1" + "node-fetch": "^2.6.7" }, "bin": { "openai": "bin/cli" + }, + "peerDependencies": { + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } } }, "node_modules/optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" @@ -3787,9 +3872,9 @@ "dev": true }, "node_modules/path-to-regexp": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.1.tgz", - "integrity": "sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", "dev": true }, "node_modules/path-type": { @@ -3822,6 +3907,15 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -3887,14 +3981,15 @@ } }, "node_modules/regexp.prototype.flags": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", - "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.3.tgz", + "integrity": "sha512-vqlC04+RQoFalODCbCumG2xIOvapzVMHwsyIGM/SIE8fRhFFsXeH8/QQ+s0T0kDAhKc4k30s73/0ydkHQz6HlQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "set-function-name": "^2.0.0" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -3972,6 +4067,7 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "dependencies": { "glob": "^7.1.3" @@ -3983,6 +4079,27 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -4007,13 +4124,13 @@ } }, "node_modules/safe-array-concat": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.0.tgz", - "integrity": "sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "dev": true, "dependencies": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", "has-symbols": "^1.0.3", "isarray": "^2.0.5" }, @@ -4045,13 +4162,13 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.2.tgz", - "integrity": "sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dev": true, "dependencies": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" }, "engines": { @@ -4062,13 +4179,10 @@ } }, "node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -4077,39 +4191,41 @@ } }, "node_modules/serialize-javascript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", - "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "dev": true, "dependencies": { "randombytes": "^2.1.0" } }, "node_modules/set-function-length": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", - "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, "dependencies": { - "define-data-property": "^1.1.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" } }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dev": true, "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -4149,14 +4265,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4181,15 +4301,6 @@ "url": "https://opencollective.com/sinon" } }, - "node_modules/sinon/node_modules/diff": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz", - "integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==", - "dev": true, - "engines": { - "node": ">=0.3.1" - } - }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -4241,14 +4352,15 @@ } }, "node_modules/string.prototype.trim": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", - "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -4258,28 +4370,31 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", - "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", - "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4370,9 +4485,9 @@ } }, "node_modules/table-layout/node_modules/typical": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz", - "integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-7.2.0.tgz", + "integrity": "sha512-W1+HdVRUl8fS3MZ9ogD51GOb46xMmhAZzR0WPw5jcgIZQJVvkddYzAl4YTU6g5w33Y1iRQLdIi2/1jhi2RNL0g==", "engines": { "node": ">=12.17" } @@ -4390,10 +4505,32 @@ "node": ">=6.0.0" } }, + "node_modules/temp/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/temp/node_modules/rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "dependencies": { "glob": "^7.1.3" @@ -4512,6 +4649,27 @@ } } }, + "node_modules/ts-node-dev/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/ts-node-dev/node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", @@ -4528,6 +4686,7 @@ "version": "2.7.1", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "dependencies": { "glob": "^7.1.3" @@ -4579,9 +4738,9 @@ } }, "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", + "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==" }, "node_modules/tsutils": { "version": "3.21.0", @@ -4617,9 +4776,9 @@ } }, "node_modules/type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", + "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", "dev": true, "engines": { "node": ">=4" @@ -4638,29 +4797,30 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -4670,16 +4830,17 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", - "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -4689,14 +4850,20 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4745,9 +4912,9 @@ } }, "node_modules/typedoc/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -4781,9 +4948,9 @@ } }, "node_modules/uglify-js": { - "version": "3.17.4", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.17.4.tgz", - "integrity": "sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==", + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", "dev": true, "optional": true, "bin": { @@ -4855,12 +5022,12 @@ "dev": true }, "node_modules/web-streams-polyfill": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", - "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", "dev": true, "engines": { - "node": ">= 8" + "node": ">= 14" } }, "node_modules/webidl-conversions": { @@ -4911,16 +5078,16 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", - "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -4929,6 +5096,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", @@ -4944,9 +5120,9 @@ } }, "node_modules/workerpool": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", - "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", "dev": true }, "node_modules/wrap-ansi": { @@ -4990,12 +5166,6 @@ "node": ">=10" } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -5015,9 +5185,9 @@ } }, "node_modules/yargs-parser": { - "version": "20.2.4", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", - "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, "engines": { "node": ">=10" diff --git a/node/package.json b/node/package.json index c6005e29..919ef8b7 100644 --- a/node/package.json +++ b/node/package.json @@ -58,7 +58,7 @@ "ts-node-dev": "^2.0.0", "typedoc": "^0.24.7", "typedoc-plugin-markdown": "^3.15.3", - "typescript": "*", + "typescript": "^5.1.0", "uuid": "^9.0.0" }, "dependencies": { @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.4.20", - "@lancedb/vectordb-darwin-x64": "0.4.20", - "@lancedb/vectordb-linux-arm64-gnu": "0.4.20", - "@lancedb/vectordb-linux-x64-gnu": "0.4.20", - "@lancedb/vectordb-win32-x64-msvc": "0.4.20" + "@lancedb/vectordb-darwin-arm64": "0.11.0-beta.1", + "@lancedb/vectordb-darwin-x64": "0.11.0-beta.1", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.0-beta.1", + "@lancedb/vectordb-linux-x64-gnu": "0.11.0-beta.1", + "@lancedb/vectordb-win32-x64-msvc": "0.11.0-beta.1" } } diff --git a/node/src/integration_test/test.ts b/node/src/integration_test/test.ts index 0fd84406..a4098773 100644 --- a/node/src/integration_test/test.ts +++ b/node/src/integration_test/test.ts @@ -14,6 +14,7 @@ import { describe } from 'mocha' import * as chai from 'chai' +import { assert } from 'chai' import * as chaiAsPromised from 'chai-as-promised' import { v4 as uuidv4 } from 'uuid' @@ -22,7 +23,6 @@ import { tmpdir } from 'os' import * as fs from 'fs' import * as path from 'path' -const assert = chai.assert chai.use(chaiAsPromised) describe('LanceDB AWS Integration test', function () { diff --git a/node/src/query.ts b/node/src/query.ts index 23d8acca..f622050c 100644 --- a/node/src/query.ts +++ b/node/src/query.ts @@ -142,9 +142,9 @@ export class Query { Object.keys(entry).forEach((key: string) => { if (entry[key] instanceof Vector) { // toJSON() returns f16 array correctly - newObject[key] = (entry[key] as Vector).toJSON() + newObject[key] = (entry[key] as any).toJSON() } else { - newObject[key] = entry[key] + newObject[key] = entry[key] as any } }) return newObject as unknown as T diff --git a/node/src/remote/index.ts b/node/src/remote/index.ts index 8f01b48c..3a7dc803 100644 --- a/node/src/remote/index.ts +++ b/node/src/remote/index.ts @@ -247,9 +247,9 @@ export class RemoteQuery extends Query { const newObject: Record = {} Object.keys(entry).forEach((key: string) => { if (entry[key] instanceof Vector) { - newObject[key] = (entry[key] as Vector).toArray() + newObject[key] = (entry[key] as any).toArray() } else { - newObject[key] = entry[key] + newObject[key] = entry[key] as any } }) return newObject as unknown as T diff --git a/node/src/test/test.ts b/node/src/test/test.ts index 4f2b3218..eacdfc36 100644 --- a/node/src/test/test.ts +++ b/node/src/test/test.ts @@ -14,6 +14,7 @@ import { describe } from "mocha"; import { track } from "temp"; +import { assert, expect } from 'chai' import * as chai from "chai"; import * as chaiAsPromised from "chai-as-promised"; @@ -44,8 +45,6 @@ import { } from "apache-arrow"; import type { RemoteRequest, RemoteResponse } from "../middleware"; -const expect = chai.expect; -const assert = chai.assert; chai.use(chaiAsPromised); describe("LanceDB client", function () { @@ -169,7 +168,7 @@ describe("LanceDB client", function () { // Should reject a bad filter await expect(table.filter("id % 2 = 0 AND").execute()).to.be.rejectedWith( - /.*sql parser error: Expected an expression:, found: EOF.*/ + /.*sql parser error: .*/ ); }); From f3b6a1f55b5d8670d7996c083e8e1cc1d1cca4c2 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Wed, 9 Oct 2024 10:46:27 -0700 Subject: [PATCH 15/51] feat(node): bind remote SDK to rust implementation (#1730) Closes [#2509](https://github.com/lancedb/sophon/issues/2509) This is the Node.js analogue of #1700 --- .bumpversion.toml | 5 + nodejs/Cargo.toml | 4 +- nodejs/__test__/remote.test.ts | 93 ++++++++++++ nodejs/lancedb/index.ts | 18 +-- nodejs/lancedb/remote/client.ts | 218 --------------------------- nodejs/lancedb/remote/connection.ts | 193 ------------------------ nodejs/lancedb/remote/index.ts | 3 - nodejs/lancedb/remote/table.ts | 226 ---------------------------- nodejs/package-lock.json | 97 ++++++++++-- nodejs/package.json | 2 +- nodejs/src/connection.rs | 18 +++ nodejs/src/lib.rs | 14 ++ nodejs/src/remote.rs | 120 +++++++++++++++ 13 files changed, 347 insertions(+), 664 deletions(-) create mode 100644 nodejs/__test__/remote.test.ts delete mode 100644 nodejs/lancedb/remote/client.ts delete mode 100644 nodejs/lancedb/remote/connection.ts delete mode 100644 nodejs/lancedb/remote/index.ts delete mode 100644 nodejs/lancedb/remote/table.ts create mode 100644 nodejs/src/remote.rs diff --git a/.bumpversion.toml b/.bumpversion.toml index 7e0caac8..aaf46f1c 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -103,3 +103,8 @@ search = "\nversion = \"{current_version}\"" filename = "rust/lancedb/Cargo.toml" replace = "\nversion = \"{new_version}\"" search = "\nversion = \"{current_version}\"" + +[[tool.bumpversion.files]] +filename = "nodejs/Cargo.toml" +replace = "\nversion = \"{new_version}\"" +search = "\nversion = \"{current_version}\"" diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index b4226e9f..d6d36523 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.0.0" +version = "0.11.0-beta.1" license.workspace = true description.workspace = true repository.workspace = true @@ -14,7 +14,7 @@ crate-type = ["cdylib"] [dependencies] arrow-ipc.workspace = true futures.workspace = true -lancedb = { path = "../rust/lancedb" } +lancedb = { path = "../rust/lancedb", features = ["remote"] } napi = { version = "2.16.8", default-features = false, features = [ "napi9", "async", diff --git a/nodejs/__test__/remote.test.ts b/nodejs/__test__/remote.test.ts new file mode 100644 index 00000000..3e693197 --- /dev/null +++ b/nodejs/__test__/remote.test.ts @@ -0,0 +1,93 @@ +// Copyright 2024 Lance Developers. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as http from "http"; +import { RequestListener } from "http"; +import { Connection, ConnectionOptions, connect } from "../lancedb"; + +async function withMockDatabase( + listener: RequestListener, + callback: (db: Connection) => void, + connectionOptions?: ConnectionOptions, +) { + const server = http.createServer(listener); + server.listen(8000); + + const db = await connect( + "db://dev", + Object.assign( + { + apiKey: "fake", + hostOverride: "http://localhost:8000", + }, + connectionOptions, + ), + ); + + try { + await callback(db); + } finally { + server.close(); + } +} + +describe("remote connection", () => { + it("should accept partial connection options", async () => { + await connect("db://test", { + apiKey: "fake", + clientConfig: { + timeoutConfig: { readTimeout: 5 }, + retryConfig: { retries: 2 }, + }, + }); + }); + + it("should pass down apiKey and userAgent", async () => { + await withMockDatabase( + (req, res) => { + expect(req.headers["x-api-key"]).toEqual("fake"); + expect(req.headers["user-agent"]).toEqual( + `LanceDB-Node-Client/${process.env.npm_package_version}`, + ); + + const body = JSON.stringify({ tables: [] }); + res.writeHead(200, { "Content-Type": "application/json" }).end(body); + }, + async (db) => { + const tableNames = await db.tableNames(); + expect(tableNames).toEqual([]); + }, + ); + }); + + it("allows customizing user agent", async () => { + await withMockDatabase( + (req, res) => { + expect(req.headers["user-agent"]).toEqual("MyApp/1.0"); + + const body = JSON.stringify({ tables: [] }); + res.writeHead(200, { "Content-Type": "application/json" }).end(body); + }, + async (db) => { + const tableNames = await db.tableNames(); + expect(tableNames).toEqual([]); + }, + { + clientConfig: { + userAgent: "MyApp/1.0", + }, + }, + ); + }); +}); diff --git a/nodejs/lancedb/index.ts b/nodejs/lancedb/index.ts index 1e66bb8c..74da915f 100644 --- a/nodejs/lancedb/index.ts +++ b/nodejs/lancedb/index.ts @@ -23,8 +23,6 @@ import { Connection as LanceDbConnection, } from "./native.js"; -import { RemoteConnection, RemoteConnectionOptions } from "./remote"; - export { WriteOptions, WriteMode, @@ -33,6 +31,9 @@ export { ConnectionOptions, IndexStatistics, IndexConfig, + ClientConfig, + TimeoutConfig, + RetryConfig, } from "./native.js"; export { @@ -87,7 +88,7 @@ export * as embedding from "./embedding"; */ export async function connect( uri: string, - opts?: Partial, + opts?: Partial, ): Promise; /** * Connect to a LanceDB instance at the given URI. @@ -108,13 +109,11 @@ export async function connect( * ``` */ export async function connect( - opts: Partial & { uri: string }, + opts: Partial & { uri: string }, ): Promise; export async function connect( - uriOrOptions: - | string - | (Partial & { uri: string }), - opts: Partial = {}, + uriOrOptions: string | (Partial & { uri: string }), + opts: Partial = {}, ): Promise { let uri: string | undefined; if (typeof uriOrOptions !== "string") { @@ -129,9 +128,6 @@ export async function connect( throw new Error("uri is required"); } - if (uri?.startsWith("db://")) { - return new RemoteConnection(uri, opts as RemoteConnectionOptions); - } opts = (opts as ConnectionOptions) ?? {}; (opts).storageOptions = cleanseStorageOptions( (opts).storageOptions, diff --git a/nodejs/lancedb/remote/client.ts b/nodejs/lancedb/remote/client.ts deleted file mode 100644 index 4e4a92a3..00000000 --- a/nodejs/lancedb/remote/client.ts +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2023 LanceDB Developers. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import axios, { - AxiosError, - type AxiosResponse, - type ResponseType, -} from "axios"; -import { Table as ArrowTable } from "../arrow"; -import { tableFromIPC } from "../arrow"; -import { VectorQuery } from "../query"; - -export class RestfulLanceDBClient { - #dbName: string; - #region: string; - #apiKey: string; - #hostOverride?: string; - #closed: boolean = false; - #timeout: number = 12 * 1000; // 12 seconds; - #session?: import("axios").AxiosInstance; - - constructor( - dbName: string, - apiKey: string, - region: string, - hostOverride?: string, - timeout?: number, - ) { - this.#dbName = dbName; - this.#apiKey = apiKey; - this.#region = region; - this.#hostOverride = hostOverride ?? this.#hostOverride; - this.#timeout = timeout ?? this.#timeout; - } - - // todo: cache the session. - get session(): import("axios").AxiosInstance { - if (this.#session !== undefined) { - return this.#session; - } else { - return axios.create({ - baseURL: this.url, - headers: { - // biome-ignore lint: external API - Authorization: `Bearer ${this.#apiKey}`, - }, - transformResponse: decodeErrorData, - timeout: this.#timeout, - }); - } - } - - get url(): string { - return ( - this.#hostOverride ?? - `https://${this.#dbName}.${this.#region}.api.lancedb.com` - ); - } - - get headers(): { [key: string]: string } { - const headers: { [key: string]: string } = { - "x-api-key": this.#apiKey, - "x-request-id": "na", - }; - if (this.#region == "local") { - headers["Host"] = `${this.#dbName}.${this.#region}.api.lancedb.com`; - } - if (this.#hostOverride) { - headers["x-lancedb-database"] = this.#dbName; - } - return headers; - } - - isOpen(): boolean { - return !this.#closed; - } - - private checkNotClosed(): void { - if (this.#closed) { - throw new Error("Connection is closed"); - } - } - - close(): void { - this.#session = undefined; - this.#closed = true; - } - - // biome-ignore lint/suspicious/noExplicitAny: - async get(uri: string, params?: Record): Promise { - this.checkNotClosed(); - uri = new URL(uri, this.url).toString(); - let response; - try { - response = await this.session.get(uri, { - headers: this.headers, - params, - }); - } catch (e) { - if (e instanceof AxiosError && e.response) { - response = e.response; - } else { - throw e; - } - } - - RestfulLanceDBClient.checkStatus(response!); - return response!.data; - } - - // biome-ignore lint/suspicious/noExplicitAny: api response - async post(uri: string, body?: any): Promise; - async post( - uri: string, - // biome-ignore lint/suspicious/noExplicitAny: api request - body: any, - additional: { - config?: { responseType: "arraybuffer" }; - headers?: Record; - params?: Record; - }, - ): Promise; - async post( - uri: string, - // biome-ignore lint/suspicious/noExplicitAny: api request - body?: any, - additional?: { - config?: { responseType: ResponseType }; - headers?: Record; - params?: Record; - }, - // biome-ignore lint/suspicious/noExplicitAny: api response - ): Promise { - this.checkNotClosed(); - uri = new URL(uri, this.url).toString(); - additional = Object.assign( - { config: { responseType: "json" } }, - additional, - ); - - const headers = { ...this.headers, ...additional.headers }; - - if (!headers["Content-Type"]) { - headers["Content-Type"] = "application/json"; - } - let response; - try { - response = await this.session.post(uri, body, { - headers, - responseType: additional!.config!.responseType, - params: new Map(Object.entries(additional.params ?? {})), - }); - } catch (e) { - if (e instanceof AxiosError && e.response) { - response = e.response; - } else { - throw e; - } - } - RestfulLanceDBClient.checkStatus(response!); - if (additional!.config!.responseType === "arraybuffer") { - return response!.data; - } else { - return JSON.parse(response!.data); - } - } - - async listTables(limit = 10, pageToken = ""): Promise { - const json = await this.get("/v1/table", { limit, pageToken }); - return json.tables; - } - - async query(tableName: string, query: VectorQuery): Promise { - const tbl = await this.post(`/v1/table/${tableName}/query`, query, { - config: { - responseType: "arraybuffer", - }, - }); - return tableFromIPC(tbl); - } - - static checkStatus(response: AxiosResponse): void { - if (response.status === 404) { - throw new Error(`Not found: ${response.data}`); - } else if (response.status >= 400 && response.status < 500) { - throw new Error( - `Bad Request: ${response.status}, error: ${response.data}`, - ); - } else if (response.status >= 500 && response.status < 600) { - throw new Error( - `Internal Server Error: ${response.status}, error: ${response.data}`, - ); - } else if (response.status !== 200) { - throw new Error( - `Unknown Error: ${response.status}, error: ${response.data}`, - ); - } - } -} - -function decodeErrorData(data: unknown) { - if (Buffer.isBuffer(data)) { - const decoded = data.toString("utf-8"); - return decoded; - } - return data; -} diff --git a/nodejs/lancedb/remote/connection.ts b/nodejs/lancedb/remote/connection.ts deleted file mode 100644 index 4d914c54..00000000 --- a/nodejs/lancedb/remote/connection.ts +++ /dev/null @@ -1,193 +0,0 @@ -import { Schema } from "apache-arrow"; -import { - Data, - SchemaLike, - fromTableToStreamBuffer, - makeEmptyTable, -} from "../arrow"; -import { - Connection, - CreateTableOptions, - OpenTableOptions, - TableNamesOptions, -} from "../connection"; -import { Table } from "../table"; -import { TTLCache } from "../util"; -import { RestfulLanceDBClient } from "./client"; -import { RemoteTable } from "./table"; - -export interface RemoteConnectionOptions { - apiKey?: string; - region?: string; - hostOverride?: string; - timeout?: number; -} - -export class RemoteConnection extends Connection { - #dbName: string; - #apiKey: string; - #region: string; - #client: RestfulLanceDBClient; - #tableCache = new TTLCache(300_000); - - constructor( - url: string, - { apiKey, region, hostOverride, timeout }: RemoteConnectionOptions, - ) { - super(); - apiKey = apiKey ?? process.env.LANCEDB_API_KEY; - region = region ?? process.env.LANCEDB_REGION; - - if (!apiKey) { - throw new Error("apiKey is required when connecting to LanceDB Cloud"); - } - - if (!region) { - throw new Error("region is required when connecting to LanceDB Cloud"); - } - - const parsed = new URL(url); - if (parsed.protocol !== "db:") { - throw new Error( - `invalid protocol: ${parsed.protocol}, only accepts db://`, - ); - } - - this.#dbName = parsed.hostname; - this.#apiKey = apiKey; - this.#region = region; - this.#client = new RestfulLanceDBClient( - this.#dbName, - this.#apiKey, - this.#region, - hostOverride, - timeout, - ); - } - - isOpen(): boolean { - return this.#client.isOpen(); - } - close(): void { - return this.#client.close(); - } - - display(): string { - return `RemoteConnection(${this.#dbName})`; - } - - async tableNames(options?: Partial): Promise { - const response = await this.#client.get("/v1/table/", { - limit: options?.limit ?? 10, - // biome-ignore lint/style/useNamingConvention: - page_token: options?.startAfter ?? "", - }); - const body = await response.body(); - for (const table of body.tables) { - this.#tableCache.set(table, true); - } - return body.tables; - } - - async openTable( - name: string, - _options?: Partial | undefined, - ): Promise
{ - if (this.#tableCache.get(name) === undefined) { - await this.#client.post( - `/v1/table/${encodeURIComponent(name)}/describe/`, - ); - this.#tableCache.set(name, true); - } - return new RemoteTable(this.#client, name, this.#dbName); - } - - async createTable( - nameOrOptions: - | string - | ({ name: string; data: Data } & Partial), - data?: Data, - options?: Partial | undefined, - ): Promise
{ - if (typeof nameOrOptions !== "string" && "name" in nameOrOptions) { - const { name, data, ...options } = nameOrOptions; - return this.createTable(name, data, options); - } - if (data === undefined) { - throw new Error("data is required"); - } - if (options?.mode) { - console.warn( - "option 'mode' is not supported in LanceDB Cloud", - "LanceDB Cloud only supports the default 'create' mode.", - "If the table already exists, an error will be thrown.", - ); - } - if (options?.embeddingFunction) { - console.warn( - "embedding_functions is not yet supported on LanceDB Cloud.", - "Please vote https://github.com/lancedb/lancedb/issues/626 ", - "for this feature.", - ); - } - - const { buf } = await Table.parseTableData( - data, - options, - true /** streaming */, - ); - - await this.#client.post( - `/v1/table/${encodeURIComponent(nameOrOptions)}/create/`, - buf, - { - config: { - responseType: "arraybuffer", - }, - headers: { "Content-Type": "application/vnd.apache.arrow.stream" }, - }, - ); - this.#tableCache.set(nameOrOptions, true); - return new RemoteTable(this.#client, nameOrOptions, this.#dbName); - } - - async createEmptyTable( - name: string, - schema: SchemaLike, - options?: Partial | undefined, - ): Promise
{ - if (options?.mode) { - console.warn(`mode is not supported on LanceDB Cloud`); - } - - if (options?.embeddingFunction) { - console.warn( - "embeddingFunction is not yet supported on LanceDB Cloud.", - "Please vote https://github.com/lancedb/lancedb/issues/626 ", - "for this feature.", - ); - } - const emptyTable = makeEmptyTable(schema); - const buf = await fromTableToStreamBuffer(emptyTable); - - await this.#client.post( - `/v1/table/${encodeURIComponent(name)}/create/`, - buf, - { - config: { - responseType: "arraybuffer", - }, - headers: { "Content-Type": "application/vnd.apache.arrow.stream" }, - }, - ); - - this.#tableCache.set(name, true); - return new RemoteTable(this.#client, name, this.#dbName); - } - - async dropTable(name: string): Promise { - await this.#client.post(`/v1/table/${encodeURIComponent(name)}/drop/`); - - this.#tableCache.delete(name); - } -} diff --git a/nodejs/lancedb/remote/index.ts b/nodejs/lancedb/remote/index.ts deleted file mode 100644 index d1faaae9..00000000 --- a/nodejs/lancedb/remote/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { RestfulLanceDBClient } from "./client"; -export { type RemoteConnectionOptions, RemoteConnection } from "./connection"; -export { RemoteTable } from "./table"; diff --git a/nodejs/lancedb/remote/table.ts b/nodejs/lancedb/remote/table.ts deleted file mode 100644 index c1712415..00000000 --- a/nodejs/lancedb/remote/table.ts +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2023 LanceDB Developers. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import { Table as ArrowTable } from "apache-arrow"; - -import { Data, IntoVector } from "../arrow"; - -import { IndexStatistics } from ".."; -import { CreateTableOptions } from "../connection"; -import { IndexOptions } from "../indices"; -import { MergeInsertBuilder } from "../merge"; -import { VectorQuery } from "../query"; -import { AddDataOptions, Table, UpdateOptions } from "../table"; -import { IntoSql, toSQL } from "../util"; -import { RestfulLanceDBClient } from "./client"; - -export class RemoteTable extends Table { - #client: RestfulLanceDBClient; - #name: string; - - // Used in the display() method - #dbName: string; - - get #tablePrefix() { - return `/v1/table/${encodeURIComponent(this.#name)}/`; - } - - get name(): string { - return this.#name; - } - - public constructor( - client: RestfulLanceDBClient, - tableName: string, - dbName: string, - ) { - super(); - this.#client = client; - this.#name = tableName; - this.#dbName = dbName; - } - - isOpen(): boolean { - return !this.#client.isOpen(); - } - - close(): void { - this.#client.close(); - } - - display(): string { - return `RemoteTable(${this.#dbName}; ${this.#name})`; - } - - async schema(): Promise { - const resp = await this.#client.post(`${this.#tablePrefix}/describe/`); - // TODO: parse this into a valid arrow schema - return resp.schema; - } - async add(data: Data, options?: Partial): Promise { - const { buf, mode } = await Table.parseTableData( - data, - options as CreateTableOptions, - true, - ); - await this.#client.post(`${this.#tablePrefix}/insert/`, buf, { - params: { - mode, - }, - headers: { - "Content-Type": "application/vnd.apache.arrow.stream", - }, - }); - } - - async update( - optsOrUpdates: - | (Map | Record) - | ({ - values: Map | Record; - } & Partial) - | ({ - valuesSql: Map | Record; - } & Partial), - options?: Partial, - ): Promise { - const isValues = - "values" in optsOrUpdates && typeof optsOrUpdates.values !== "string"; - const isValuesSql = - "valuesSql" in optsOrUpdates && - typeof optsOrUpdates.valuesSql !== "string"; - const isMap = (obj: unknown): obj is Map => { - return obj instanceof Map; - }; - - let predicate; - let columns: [string, string][]; - switch (true) { - case isMap(optsOrUpdates): - columns = Array.from(optsOrUpdates.entries()); - predicate = options?.where; - break; - case isValues && isMap(optsOrUpdates.values): - columns = Array.from(optsOrUpdates.values.entries()).map(([k, v]) => [ - k, - toSQL(v), - ]); - predicate = optsOrUpdates.where; - break; - case isValues && !isMap(optsOrUpdates.values): - columns = Object.entries(optsOrUpdates.values).map(([k, v]) => [ - k, - toSQL(v), - ]); - predicate = optsOrUpdates.where; - break; - - case isValuesSql && isMap(optsOrUpdates.valuesSql): - columns = Array.from(optsOrUpdates.valuesSql.entries()); - predicate = optsOrUpdates.where; - break; - case isValuesSql && !isMap(optsOrUpdates.valuesSql): - columns = Object.entries(optsOrUpdates.valuesSql).map(([k, v]) => [ - k, - v, - ]); - predicate = optsOrUpdates.where; - break; - default: - columns = Object.entries(optsOrUpdates as Record); - predicate = options?.where; - } - - await this.#client.post(`${this.#tablePrefix}/update/`, { - predicate: predicate ?? null, - updates: columns, - }); - } - async countRows(filter?: unknown): Promise { - const payload = { predicate: filter }; - return await this.#client.post(`${this.#tablePrefix}/count_rows/`, payload); - } - - async delete(predicate: unknown): Promise { - const payload = { predicate }; - await this.#client.post(`${this.#tablePrefix}/delete/`, payload); - } - async createIndex( - column: string, - options?: Partial, - ): Promise { - if (options !== undefined) { - console.warn("options are not yet supported on the LanceDB cloud"); - } - const indexType = "vector"; - const metric = "L2"; - const data = { - column, - // biome-ignore lint/style/useNamingConvention: external API - index_type: indexType, - // biome-ignore lint/style/useNamingConvention: external API - metric_type: metric, - }; - await this.#client.post(`${this.#tablePrefix}/create_index`, data); - } - query(): import("..").Query { - throw new Error("query() is not yet supported on the LanceDB cloud"); - } - - search(_query: string | IntoVector): VectorQuery { - throw new Error("search() is not yet supported on the LanceDB cloud"); - } - vectorSearch(_vector: unknown): import("..").VectorQuery { - throw new Error("vectorSearch() is not yet supported on the LanceDB cloud"); - } - addColumns(_newColumnTransforms: unknown): Promise { - throw new Error("addColumns() is not yet supported on the LanceDB cloud"); - } - alterColumns(_columnAlterations: unknown): Promise { - throw new Error("alterColumns() is not yet supported on the LanceDB cloud"); - } - dropColumns(_columnNames: unknown): Promise { - throw new Error("dropColumns() is not yet supported on the LanceDB cloud"); - } - async version(): Promise { - const resp = await this.#client.post(`${this.#tablePrefix}/describe/`); - return resp.version; - } - checkout(_version: unknown): Promise { - throw new Error("checkout() is not yet supported on the LanceDB cloud"); - } - checkoutLatest(): Promise { - throw new Error( - "checkoutLatest() is not yet supported on the LanceDB cloud", - ); - } - restore(): Promise { - throw new Error("restore() is not yet supported on the LanceDB cloud"); - } - optimize(_options?: unknown): Promise { - throw new Error("optimize() is not yet supported on the LanceDB cloud"); - } - async listIndices(): Promise { - return await this.#client.post(`${this.#tablePrefix}/index/list/`); - } - toArrow(): Promise { - throw new Error("toArrow() is not yet supported on the LanceDB cloud"); - } - mergeInsert(_on: string | string[]): MergeInsertBuilder { - throw new Error("mergeInsert() is not yet supported on the LanceDB cloud"); - } - async indexStats(_name: string): Promise { - throw new Error("indexStats() is not yet supported on the LanceDB cloud"); - } -} diff --git a/nodejs/package-lock.json b/nodejs/package-lock.json index e9157ff2..05f7377d 100644 --- a/nodejs/package-lock.json +++ b/nodejs/package-lock.json @@ -1,12 +1,12 @@ { "name": "@lancedb/lancedb", - "version": "0.10.0-beta.1", + "version": "0.11.0-beta.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@lancedb/lancedb", - "version": "0.10.0-beta.1", + "version": "0.11.0-beta.1", "cpu": [ "x64", "arm64" @@ -18,7 +18,6 @@ "win32" ], "dependencies": { - "axios": "^1.7.2", "reflect-metadata": "^0.2.2" }, "devDependencies": { @@ -30,6 +29,7 @@ "@napi-rs/cli": "^2.18.3", "@types/axios": "^0.14.0", "@types/jest": "^29.1.2", + "@types/node": "^22.7.4", "@types/tmp": "^0.2.6", "apache-arrow-13": "npm:apache-arrow@13.0.0", "apache-arrow-14": "npm:apache-arrow@14.0.0", @@ -4648,11 +4648,12 @@ "optional": true }, "node_modules/@types/node": { - "version": "20.14.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.11.tgz", - "integrity": "sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==", + "version": "22.7.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz", + "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==", + "devOptional": true, "dependencies": { - "undici-types": "~5.26.4" + "undici-types": "~6.19.2" } }, "node_modules/@types/node-fetch": { @@ -4665,6 +4666,12 @@ "form-data": "^4.0.0" } }, + "node_modules/@types/node/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "devOptional": true + }, "node_modules/@types/pad-left": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/@types/pad-left/-/pad-left-2.1.1.tgz", @@ -4963,6 +4970,21 @@ "arrow2csv": "bin/arrow2csv.cjs" } }, + "node_modules/apache-arrow-15/node_modules/@types/node": { + "version": "20.16.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.16.10.tgz", + "integrity": "sha512-vQUKgWTjEIRFCvK6CyriPH3MZYiYlNy0fKiEYHWbcoWLEgs4opurGGKlebrTLqdSMIbXImH6XExNiIyNUv3WpA==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/apache-arrow-15/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, "node_modules/apache-arrow-16": { "name": "apache-arrow", "version": "16.0.0", @@ -4984,6 +5006,21 @@ "arrow2csv": "bin/arrow2csv.cjs" } }, + "node_modules/apache-arrow-16/node_modules/@types/node": { + "version": "20.16.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.16.10.tgz", + "integrity": "sha512-vQUKgWTjEIRFCvK6CyriPH3MZYiYlNy0fKiEYHWbcoWLEgs4opurGGKlebrTLqdSMIbXImH6XExNiIyNUv3WpA==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/apache-arrow-16/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, "node_modules/apache-arrow-17": { "name": "apache-arrow", "version": "17.0.0", @@ -5011,12 +5048,42 @@ "integrity": "sha512-BwR5KP3Es/CSht0xqBcUXS3qCAUVXwpRKsV2+arxeb65atasuXG9LykC9Ab10Cw3s2raH92ZqOeILaQbsB2ACg==", "dev": true }, + "node_modules/apache-arrow-17/node_modules/@types/node": { + "version": "20.16.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.16.10.tgz", + "integrity": "sha512-vQUKgWTjEIRFCvK6CyriPH3MZYiYlNy0fKiEYHWbcoWLEgs4opurGGKlebrTLqdSMIbXImH6XExNiIyNUv3WpA==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, "node_modules/apache-arrow-17/node_modules/flatbuffers": { "version": "24.3.25", "resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-24.3.25.tgz", "integrity": "sha512-3HDgPbgiwWMI9zVB7VYBHaMrbOO7Gm0v+yD2FV/sCKj+9NDeVL7BOBYUuhWAQGKWOzBo8S9WdMvV0eixO233XQ==", "dev": true }, + "node_modules/apache-arrow-17/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, + "node_modules/apache-arrow/node_modules/@types/node": { + "version": "20.16.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.16.10.tgz", + "integrity": "sha512-vQUKgWTjEIRFCvK6CyriPH3MZYiYlNy0fKiEYHWbcoWLEgs4opurGGKlebrTLqdSMIbXImH6XExNiIyNUv3WpA==", + "peer": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/apache-arrow/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "peer": true + }, "node_modules/argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -5046,12 +5113,14 @@ "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "devOptional": true }, "node_modules/axios": { "version": "1.7.2", "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz", "integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==", + "dev": true, "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", @@ -5536,6 +5605,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "devOptional": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -5723,6 +5793,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "devOptional": true, "engines": { "node": ">=0.4.0" } @@ -6248,6 +6319,7 @@ "version": "1.15.6", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "dev": true, "funding": [ { "type": "individual", @@ -6267,6 +6339,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "devOptional": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -7773,6 +7846,7 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "devOptional": true, "engines": { "node": ">= 0.6" } @@ -7781,6 +7855,7 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "devOptional": true, "dependencies": { "mime-db": "1.52.0" }, @@ -8393,7 +8468,8 @@ "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true }, "node_modules/pump": { "version": "3.0.0", @@ -9561,7 +9637,8 @@ "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "optional": true }, "node_modules/update-browserslist-db": { "version": "1.0.13", diff --git a/nodejs/package.json b/nodejs/package.json index 064f54fa..b3410e47 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -40,6 +40,7 @@ "@napi-rs/cli": "^2.18.3", "@types/axios": "^0.14.0", "@types/jest": "^29.1.2", + "@types/node": "^22.7.4", "@types/tmp": "^0.2.6", "apache-arrow-13": "npm:apache-arrow@13.0.0", "apache-arrow-14": "npm:apache-arrow@14.0.0", @@ -81,7 +82,6 @@ "version": "napi version" }, "dependencies": { - "axios": "^1.7.2", "reflect-metadata": "^0.2.2" }, "optionalDependencies": { diff --git a/nodejs/src/connection.rs b/nodejs/src/connection.rs index 4a454bfa..9f2a7305 100644 --- a/nodejs/src/connection.rs +++ b/nodejs/src/connection.rs @@ -68,6 +68,24 @@ impl Connection { builder = builder.storage_option(key, value); } } + + let client_config = options.client_config.unwrap_or_default(); + builder = builder.client_config(client_config.into()); + + if let Some(api_key) = options.api_key { + builder = builder.api_key(&api_key); + } + + if let Some(region) = options.region { + builder = builder.region(®ion); + } else { + builder = builder.region("us-east-1"); + } + + if let Some(host_override) = options.host_override { + builder = builder.host_override(&host_override); + } + Ok(Self::inner_new( builder .execute() diff --git a/nodejs/src/lib.rs b/nodejs/src/lib.rs index 1c15ff91..54fde9bc 100644 --- a/nodejs/src/lib.rs +++ b/nodejs/src/lib.rs @@ -22,6 +22,7 @@ mod index; mod iterator; pub mod merge; mod query; +pub mod remote; mod table; mod util; @@ -42,6 +43,19 @@ pub struct ConnectionOptions { /// /// The available options are described at https://lancedb.github.io/lancedb/guides/storage/ pub storage_options: Option>, + + /// (For LanceDB cloud only): configuration for the remote HTTP client. + pub client_config: Option, + /// (For LanceDB cloud only): the API key to use with LanceDB Cloud. + /// + /// Can also be set via the environment variable `LANCEDB_API_KEY`. + pub api_key: Option, + /// (For LanceDB cloud only): the region to use for LanceDB cloud. + /// Defaults to 'us-east-1'. + pub region: Option, + /// (For LanceDB cloud only): the host to use for LanceDB cloud. Used + /// for testing purposes. + pub host_override: Option, } /// Write mode for writing a table. diff --git a/nodejs/src/remote.rs b/nodejs/src/remote.rs new file mode 100644 index 00000000..3ed940aa --- /dev/null +++ b/nodejs/src/remote.rs @@ -0,0 +1,120 @@ +// Copyright 2024 Lance Developers. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use napi_derive::*; + +/// Timeout configuration for remote HTTP client. +#[napi(object)] +#[derive(Debug)] +pub struct TimeoutConfig { + /// The timeout for establishing a connection in seconds. Default is 120 + /// seconds (2 minutes). This can also be set via the environment variable + /// `LANCE_CLIENT_CONNECT_TIMEOUT`, as an integer number of seconds. + pub connect_timeout: Option, + /// The timeout for reading data from the server in seconds. Default is 300 + /// seconds (5 minutes). This can also be set via the environment variable + /// `LANCE_CLIENT_READ_TIMEOUT`, as an integer number of seconds. + pub read_timeout: Option, + /// The timeout for keeping idle connections in the connection pool in seconds. + /// Default is 300 seconds (5 minutes). This can also be set via the + /// environment variable `LANCE_CLIENT_CONNECTION_TIMEOUT`, as an integer + /// number of seconds. + pub pool_idle_timeout: Option, +} + +/// Retry configuration for the remote HTTP client. +#[napi(object)] +#[derive(Debug)] +pub struct RetryConfig { + /// The maximum number of retries for a request. Default is 3. You can also + /// set this via the environment variable `LANCE_CLIENT_MAX_RETRIES`. + pub retries: Option, + /// The maximum number of retries for connection errors. Default is 3. You + /// can also set this via the environment variable `LANCE_CLIENT_CONNECT_RETRIES`. + pub connect_retries: Option, + /// The maximum number of retries for read errors. Default is 3. You can also + /// set this via the environment variable `LANCE_CLIENT_READ_RETRIES`. + pub read_retries: Option, + /// The backoff factor to apply between retries. Default is 0.25. Between each retry + /// the client will wait for the amount of seconds: + /// `{backoff factor} * (2 ** ({number of previous retries}))`. So for the default + /// of 0.25, the first retry will wait 0.25 seconds, the second retry will wait 0.5 + /// seconds, the third retry will wait 1 second, etc. + /// + /// You can also set this via the environment variable + /// `LANCE_CLIENT_RETRY_BACKOFF_FACTOR`. + pub backoff_factor: Option, + /// The jitter to apply to the backoff factor, in seconds. Default is 0.25. + /// + /// A random value between 0 and `backoff_jitter` will be added to the backoff + /// factor in seconds. So for the default of 0.25 seconds, between 0 and 250 + /// milliseconds will be added to the sleep between each retry. + /// + /// You can also set this via the environment variable + /// `LANCE_CLIENT_RETRY_BACKOFF_JITTER`. + pub backoff_jitter: Option, + /// The HTTP status codes for which to retry the request. Default is + /// [429, 500, 502, 503]. + /// + /// You can also set this via the environment variable + /// `LANCE_CLIENT_RETRY_STATUSES`. Use a comma-separated list of integers. + pub statuses: Option>, +} + +#[napi(object)] +#[derive(Debug, Default)] +pub struct ClientConfig { + pub user_agent: Option, + pub retry_config: Option, + pub timeout_config: Option, +} + +impl From for lancedb::remote::TimeoutConfig { + fn from(config: TimeoutConfig) -> Self { + Self { + connect_timeout: config + .connect_timeout + .map(std::time::Duration::from_secs_f64), + read_timeout: config.read_timeout.map(std::time::Duration::from_secs_f64), + pool_idle_timeout: config + .pool_idle_timeout + .map(std::time::Duration::from_secs_f64), + } + } +} + +impl From for lancedb::remote::RetryConfig { + fn from(config: RetryConfig) -> Self { + Self { + retries: config.retries, + connect_retries: config.connect_retries, + read_retries: config.read_retries, + backoff_factor: config.backoff_factor.map(|v| v as f32), + backoff_jitter: config.backoff_jitter.map(|v| v as f32), + statuses: config.statuses, + } + } +} + +impl From for lancedb::remote::ClientConfig { + fn from(config: ClientConfig) -> Self { + Self { + user_agent: config + .user_agent + .unwrap_or(concat!("LanceDB-Node-Client/", env!("CARGO_PKG_VERSION")).to_string()), + retry_config: config.retry_config.map(Into::into).unwrap_or_default(), + timeout_config: config.timeout_config.map(Into::into).unwrap_or_default(), + } + } +} From 5f9d8509b3507956da90904dfd71364eb4157dfe Mon Sep 17 00:00:00 2001 From: Will Jones Date: Wed, 9 Oct 2024 10:46:46 -0700 Subject: [PATCH 16/51] feat: upgrade Lance to v0.18.2 (#1737) Includes changes from v0.18.1 and v0.18.2: * [v0.18.1 change log](https://github.com/lancedb/lance/releases/tag/v0.18.1) * [v0.18.2 change log](https://github.com/lancedb/lance/releases/tag/v0.18.2) Closes #1656 Closes #1615 Closes #1661 --- Cargo.toml | 18 +++++++++--------- python/pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 49e3f5a8..d7dae6c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,13 +20,13 @@ keywords = ["lancedb", "lance", "database", "vector", "search"] categories = ["database-implementations"] [workspace.dependencies] -lance = { "version" = "=0.18.0", "features" = ["dynamodb"] } -lance-index = { "version" = "=0.18.0" } -lance-linalg = { "version" = "=0.18.0" } -lance-table = { "version" = "=0.18.0" } -lance-testing = { "version" = "=0.18.0" } -lance-datafusion = { "version" = "=0.18.0" } -lance-encoding = { "version" = "=0.18.0" } +lance = { "version" = "=0.18.2", "features" = ["dynamodb"] } +lance-index = { "version" = "=0.18.2" } +lance-linalg = { "version" = "=0.18.2" } +lance-table = { "version" = "=0.18.2" } +lance-testing = { "version" = "=0.18.2" } +lance-datafusion = { "version" = "=0.18.2" } +lance-encoding = { "version" = "=0.18.2" } # Note that this one does not include pyarrow arrow = { version = "52.2", optional = false } arrow-array = "52.2" @@ -38,8 +38,8 @@ arrow-arith = "52.2" arrow-cast = "52.2" async-trait = "0" chrono = "0.4.35" -datafusion-common = "40.0" -datafusion-physical-plan = "40.0" +datafusion-common = "41.0" +datafusion-physical-plan = "41.0" half = { "version" = "=2.4.1", default-features = false, features = [ "num-traits", ] } diff --git a/python/pyproject.toml b/python/pyproject.toml index f2e82d2d..2bdc665e 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -3,7 +3,7 @@ name = "lancedb" # version in Cargo.toml dependencies = [ "deprecation", - "pylance==0.18.0", + "pylance==0.18.2", "requests>=2.31.0", "retry>=0.9.2", "tqdm>=4.27.0", From 8d0055fe6b9efe42d1628fc2bf0e367c5a65e231 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 9 Oct 2024 18:53:34 +0000 Subject: [PATCH 17/51] =?UTF-8?q?Bump=20version:=200.14.0-beta.0=20?= =?UTF-8?q?=E2=86=92=200.14.0-beta.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/.bumpversion.toml | 2 +- python/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/.bumpversion.toml b/python/.bumpversion.toml index 038ccc92..b48cc822 100644 --- a/python/.bumpversion.toml +++ b/python/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.14.0-beta.0" +current_version = "0.14.0-beta.1" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/python/Cargo.toml b/python/Cargo.toml index 115ff2e0..e1f78eaa 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-python" -version = "0.14.0-beta.0" +version = "0.14.0-beta.1" edition.workspace = true description = "Python bindings for LanceDB" license.workspace = true From 515ab5f4173a54da11dcd0a031aae0cf3ff1ab19 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 9 Oct 2024 18:53:35 +0000 Subject: [PATCH 18/51] =?UTF-8?q?Bump=20version:=200.14.0-beta.1=20?= =?UTF-8?q?=E2=86=92=200.14.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/.bumpversion.toml | 2 +- python/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/.bumpversion.toml b/python/.bumpversion.toml index b48cc822..7fce1ac5 100644 --- a/python/.bumpversion.toml +++ b/python/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.14.0-beta.1" +current_version = "0.14.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/python/Cargo.toml b/python/Cargo.toml index e1f78eaa..16178abc 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-python" -version = "0.14.0-beta.1" +version = "0.14.0" edition.workspace = true description = "Python bindings for LanceDB" license.workspace = true From d892f7a62237108e6b8f5c860cd37dfbf04fc0be Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 9 Oct 2024 18:54:04 +0000 Subject: [PATCH 19/51] =?UTF-8?q?Bump=20version:=200.11.0-beta.1=20?= =?UTF-8?q?=E2=86=92=200.11.0-beta.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- java/core/pom.xml | 2 +- java/pom.xml | 2 +- node/package.json | 12 ++++++------ nodejs/Cargo.toml | 2 +- nodejs/npm/darwin-arm64/package.json | 2 +- nodejs/npm/darwin-x64/package.json | 2 +- nodejs/npm/linux-arm64-gnu/package.json | 2 +- nodejs/npm/linux-x64-gnu/package.json | 2 +- nodejs/npm/win32-x64-msvc/package.json | 2 +- nodejs/package.json | 2 +- rust/ffi/node/Cargo.toml | 2 +- rust/lancedb/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index aaf46f1c..adf30b46 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.11.0-beta.1" +current_version = "0.11.0-beta.2" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/java/core/pom.xml b/java/core/pom.xml index 7f608160..f4e4ad8e 100644 --- a/java/core/pom.xml +++ b/java/core/pom.xml @@ -8,7 +8,7 @@ com.lancedb lancedb-parent - 0.11.0-beta.1 + 0.11.0-beta.2 ../pom.xml diff --git a/java/pom.xml b/java/pom.xml index c6957496..e9aef037 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -6,7 +6,7 @@ com.lancedb lancedb-parent - 0.11.0-beta.1 + 0.11.0-beta.2 pom LanceDB Parent diff --git a/node/package.json b/node/package.json index 919ef8b7..70ac104a 100644 --- a/node/package.json +++ b/node/package.json @@ -1,6 +1,6 @@ { "name": "vectordb", - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "description": " Serverless, low-latency vector database for AI applications", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.0-beta.1", - "@lancedb/vectordb-darwin-x64": "0.11.0-beta.1", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.0-beta.1", - "@lancedb/vectordb-linux-x64-gnu": "0.11.0-beta.1", - "@lancedb/vectordb-win32-x64-msvc": "0.11.0-beta.1" + "@lancedb/vectordb-darwin-arm64": "0.11.0-beta.2", + "@lancedb/vectordb-darwin-x64": "0.11.0-beta.2", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.0-beta.2", + "@lancedb/vectordb-linux-x64-gnu": "0.11.0-beta.2", + "@lancedb/vectordb-win32-x64-msvc": "0.11.0-beta.2" } } diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index d6d36523..7fbd04fb 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.11.0-beta.1" +version = "0.11.0-beta.2" license.workspace = true description.workspace = true repository.workspace = true diff --git a/nodejs/npm/darwin-arm64/package.json b/nodejs/npm/darwin-arm64/package.json index a7ecc2bf..b771cbaf 100644 --- a/nodejs/npm/darwin-arm64/package.json +++ b/nodejs/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-arm64", - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "os": ["darwin"], "cpu": ["arm64"], "main": "lancedb.darwin-arm64.node", diff --git a/nodejs/npm/darwin-x64/package.json b/nodejs/npm/darwin-x64/package.json index 39518599..ce81de35 100644 --- a/nodejs/npm/darwin-x64/package.json +++ b/nodejs/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-x64", - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "os": ["darwin"], "cpu": ["x64"], "main": "lancedb.darwin-x64.node", diff --git a/nodejs/npm/linux-arm64-gnu/package.json b/nodejs/npm/linux-arm64-gnu/package.json index b21f194a..df7d206e 100644 --- a/nodejs/npm/linux-arm64-gnu/package.json +++ b/nodejs/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-arm64-gnu", - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "os": ["linux"], "cpu": ["arm64"], "main": "lancedb.linux-arm64-gnu.node", diff --git a/nodejs/npm/linux-x64-gnu/package.json b/nodejs/npm/linux-x64-gnu/package.json index 7cc06793..028a8f6a 100644 --- a/nodejs/npm/linux-x64-gnu/package.json +++ b/nodejs/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-x64-gnu", - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "os": ["linux"], "cpu": ["x64"], "main": "lancedb.linux-x64-gnu.node", diff --git a/nodejs/npm/win32-x64-msvc/package.json b/nodejs/npm/win32-x64-msvc/package.json index eafe3417..eccdba38 100644 --- a/nodejs/npm/win32-x64-msvc/package.json +++ b/nodejs/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-win32-x64-msvc", - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "os": ["win32"], "cpu": ["x64"], "main": "lancedb.win32-x64-msvc.node", diff --git a/nodejs/package.json b/nodejs/package.json index b3410e47..1ad85684 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -10,7 +10,7 @@ "vector database", "ann" ], - "version": "0.11.0-beta.1", + "version": "0.11.0-beta.2", "main": "dist/index.js", "exports": { ".": "./dist/index.js", diff --git a/rust/ffi/node/Cargo.toml b/rust/ffi/node/Cargo.toml index e9c6b85b..2e8bc533 100644 --- a/rust/ffi/node/Cargo.toml +++ b/rust/ffi/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-node" -version = "0.11.0-beta.1" +version = "0.11.0-beta.2" description = "Serverless, low-latency vector database for AI applications" license.workspace = true edition.workspace = true diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index 200c5d3b..1f0d1c3d 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb" -version = "0.11.0-beta.1" +version = "0.11.0-beta.2" edition.workspace = true description = "LanceDB: A serverless, low-latency vector database for AI applications" license.workspace = true From 9f42fbba96928814fa0ac79fe30f76adc05720de Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 9 Oct 2024 18:54:09 +0000 Subject: [PATCH 20/51] =?UTF-8?q?Bump=20version:=200.11.0-beta.2=20?= =?UTF-8?q?=E2=86=92=200.11.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- java/core/pom.xml | 2 +- java/pom.xml | 2 +- node/package.json | 12 ++++++------ nodejs/Cargo.toml | 2 +- nodejs/npm/darwin-arm64/package.json | 2 +- nodejs/npm/darwin-x64/package.json | 2 +- nodejs/npm/linux-arm64-gnu/package.json | 2 +- nodejs/npm/linux-x64-gnu/package.json | 2 +- nodejs/npm/win32-x64-msvc/package.json | 2 +- nodejs/package.json | 2 +- rust/ffi/node/Cargo.toml | 2 +- rust/lancedb/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index adf30b46..8c58e6c5 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.11.0-beta.2" +current_version = "0.11.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/java/core/pom.xml b/java/core/pom.xml index f4e4ad8e..88dd775f 100644 --- a/java/core/pom.xml +++ b/java/core/pom.xml @@ -8,7 +8,7 @@ com.lancedb lancedb-parent - 0.11.0-beta.2 + 0.11.0-final.0 ../pom.xml diff --git a/java/pom.xml b/java/pom.xml index e9aef037..46b7049b 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -6,7 +6,7 @@ com.lancedb lancedb-parent - 0.11.0-beta.2 + 0.11.0-final.0 pom LanceDB Parent diff --git a/node/package.json b/node/package.json index 70ac104a..cb1fb55c 100644 --- a/node/package.json +++ b/node/package.json @@ -1,6 +1,6 @@ { "name": "vectordb", - "version": "0.11.0-beta.2", + "version": "0.11.0", "description": " Serverless, low-latency vector database for AI applications", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.0-beta.2", - "@lancedb/vectordb-darwin-x64": "0.11.0-beta.2", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.0-beta.2", - "@lancedb/vectordb-linux-x64-gnu": "0.11.0-beta.2", - "@lancedb/vectordb-win32-x64-msvc": "0.11.0-beta.2" + "@lancedb/vectordb-darwin-arm64": "0.11.0", + "@lancedb/vectordb-darwin-x64": "0.11.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.0", + "@lancedb/vectordb-linux-x64-gnu": "0.11.0", + "@lancedb/vectordb-win32-x64-msvc": "0.11.0" } } diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index 7fbd04fb..aca34cef 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.11.0-beta.2" +version = "0.11.0" license.workspace = true description.workspace = true repository.workspace = true diff --git a/nodejs/npm/darwin-arm64/package.json b/nodejs/npm/darwin-arm64/package.json index b771cbaf..a11285fa 100644 --- a/nodejs/npm/darwin-arm64/package.json +++ b/nodejs/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-arm64", - "version": "0.11.0-beta.2", + "version": "0.11.0", "os": ["darwin"], "cpu": ["arm64"], "main": "lancedb.darwin-arm64.node", diff --git a/nodejs/npm/darwin-x64/package.json b/nodejs/npm/darwin-x64/package.json index ce81de35..a75f5392 100644 --- a/nodejs/npm/darwin-x64/package.json +++ b/nodejs/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-x64", - "version": "0.11.0-beta.2", + "version": "0.11.0", "os": ["darwin"], "cpu": ["x64"], "main": "lancedb.darwin-x64.node", diff --git a/nodejs/npm/linux-arm64-gnu/package.json b/nodejs/npm/linux-arm64-gnu/package.json index df7d206e..4eb26c40 100644 --- a/nodejs/npm/linux-arm64-gnu/package.json +++ b/nodejs/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-arm64-gnu", - "version": "0.11.0-beta.2", + "version": "0.11.0", "os": ["linux"], "cpu": ["arm64"], "main": "lancedb.linux-arm64-gnu.node", diff --git a/nodejs/npm/linux-x64-gnu/package.json b/nodejs/npm/linux-x64-gnu/package.json index 028a8f6a..a0761058 100644 --- a/nodejs/npm/linux-x64-gnu/package.json +++ b/nodejs/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-x64-gnu", - "version": "0.11.0-beta.2", + "version": "0.11.0", "os": ["linux"], "cpu": ["x64"], "main": "lancedb.linux-x64-gnu.node", diff --git a/nodejs/npm/win32-x64-msvc/package.json b/nodejs/npm/win32-x64-msvc/package.json index eccdba38..c059db9a 100644 --- a/nodejs/npm/win32-x64-msvc/package.json +++ b/nodejs/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-win32-x64-msvc", - "version": "0.11.0-beta.2", + "version": "0.11.0", "os": ["win32"], "cpu": ["x64"], "main": "lancedb.win32-x64-msvc.node", diff --git a/nodejs/package.json b/nodejs/package.json index 1ad85684..024766c0 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -10,7 +10,7 @@ "vector database", "ann" ], - "version": "0.11.0-beta.2", + "version": "0.11.0", "main": "dist/index.js", "exports": { ".": "./dist/index.js", diff --git a/rust/ffi/node/Cargo.toml b/rust/ffi/node/Cargo.toml index 2e8bc533..e8225599 100644 --- a/rust/ffi/node/Cargo.toml +++ b/rust/ffi/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-node" -version = "0.11.0-beta.2" +version = "0.11.0" description = "Serverless, low-latency vector database for AI applications" license.workspace = true edition.workspace = true diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index 1f0d1c3d..7a441c01 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb" -version = "0.11.0-beta.2" +version = "0.11.0" edition.workspace = true description = "LanceDB: A serverless, low-latency vector database for AI applications" license.workspace = true From 6de53ce393ec4e2004b988eb46a425979e3ddf91 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 9 Oct 2024 18:54:29 +0000 Subject: [PATCH 21/51] Updating package-lock.json --- node/package-lock.json | 74 ++++-------------------------------------- 1 file changed, 7 insertions(+), 67 deletions(-) diff --git a/node/package-lock.json b/node/package-lock.json index 0544209b..0aa67ac3 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -1,12 +1,12 @@ { "name": "vectordb", - "version": "0.11.0-beta.1", + "version": "0.11.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "vectordb", - "version": "0.11.0-beta.1", + "version": "0.11.0", "cpu": [ "x64", "arm64" @@ -52,11 +52,11 @@ "uuid": "^9.0.0" }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.0-beta.1", - "@lancedb/vectordb-darwin-x64": "0.11.0-beta.1", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.0-beta.1", - "@lancedb/vectordb-linux-x64-gnu": "0.11.0-beta.1", - "@lancedb/vectordb-win32-x64-msvc": "0.11.0-beta.1" + "@lancedb/vectordb-darwin-arm64": "0.11.0", + "@lancedb/vectordb-darwin-x64": "0.11.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.0", + "@lancedb/vectordb-linux-x64-gnu": "0.11.0", + "@lancedb/vectordb-win32-x64-msvc": "0.11.0" }, "peerDependencies": { "@apache-arrow/ts": "^14.0.2", @@ -326,66 +326,6 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, - "node_modules/@lancedb/vectordb-darwin-arm64": { - "version": "0.11.0-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.11.0-beta.1.tgz", - "integrity": "sha512-qKQbFJwstMQEO2MVkkipyDxmH3/KafkuC4xfU8LjMtZ98ZGTQIW47t/OyftiUXYWcjsVxeXI3l2m9MCozFOdhg==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lancedb/vectordb-darwin-x64": { - "version": "0.11.0-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.11.0-beta.1.tgz", - "integrity": "sha512-245Q5hjQKljczBcDLbiq3N5fmUaY2zFRHoW6SBxOziQwyMphhLDSTNkAYkc3JnrQvf6dMolVYWigOsRVCFj56A==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lancedb/vectordb-linux-arm64-gnu": { - "version": "0.11.0-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.11.0-beta.1.tgz", - "integrity": "sha512-B4z6sx4X6uqGDnQm3zL5mL47Agn4X4spf/nlxtrUWEfiOAyp9Iw465UQMmrbnodi+4k/BNjCNZNMFSjMOSsrcA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lancedb/vectordb-linux-x64-gnu": { - "version": "0.11.0-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.11.0-beta.1.tgz", - "integrity": "sha512-0vWcPqpe3to78bYkc+3XWZToRu6TMrhLJAxC9cnV5d9GMuN1VbDoLqD8QPRWkoEr9Nk7jdIRKEBUwfq5yGOFLw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lancedb/vectordb-win32-x64-msvc": { - "version": "0.11.0-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.11.0-beta.1.tgz", - "integrity": "sha512-jU/+w2TfA4HKOZkib1UP4kIpaLgu+88S/t+Ccde67w/4qQuP0uAixTAls1WE4mtlf6pOnG0A1ILTY98nVkIQ3A==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "win32" - ] - }, "node_modules/@neon-rs/cli": { "version": "0.0.160", "resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz", From 9c74c435e021db6433a7dd3f2e4391288df0ded5 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Wed, 9 Oct 2024 14:14:08 -0700 Subject: [PATCH 22/51] ci: update package lock (#1740) --- node/package-lock.json | 60 ++++++++++++++++++++++++++++++++++++++++ nodejs/package-lock.json | 4 +-- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/node/package-lock.json b/node/package-lock.json index 0aa67ac3..ef31e324 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -326,6 +326,66 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@lancedb/vectordb-darwin-arm64": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.11.0.tgz", + "integrity": "sha512-ffI9sLdlJ0L0FjKVy5QpznRTgVaEGL2INJVcJauuzsYY2aOC3weNfE+v58n/cm9I/NulTdu1BemwzFpESoZf5A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@lancedb/vectordb-darwin-x64": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.11.0.tgz", + "integrity": "sha512-sMGKVmTj7Gt1z+1Sy24toCV8UgcQkX0ljQU1QunVEzJvoP9yah/DN5rw5Ozxiv8Obk6Pz3BMZYqV3BPmL9AiAg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@lancedb/vectordb-linux-arm64-gnu": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.11.0.tgz", + "integrity": "sha512-BQTiTbvJfNKEye9FRomItlFcbOoYCV8frBrQMIfli4q9GECwgBmXQaWP+rEZZrdqfG0DivTQJ0YSSHgAy3wCcA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@lancedb/vectordb-linux-x64-gnu": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.11.0.tgz", + "integrity": "sha512-+RHu6YY311N21ZBM8OYbBFNuW+rqq0AC7Vp5eBvWKTOeNIf1Lz2vFAKhDPOgJt+ROoT/nzKRbksIEeIvnYQJNw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@lancedb/vectordb-win32-x64-msvc": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.11.0.tgz", + "integrity": "sha512-IWdhJdiYIkJW+njNlRVNGG1bnGlQs+Wbrjy/NIZhVxch2yCj9gknZqWCuSHNR43a2oAdUY/kXgWL2AKEOfK1CQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@neon-rs/cli": { "version": "0.0.160", "resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz", diff --git a/nodejs/package-lock.json b/nodejs/package-lock.json index 05f7377d..b124bed2 100644 --- a/nodejs/package-lock.json +++ b/nodejs/package-lock.json @@ -1,12 +1,12 @@ { "name": "@lancedb/lancedb", - "version": "0.11.0-beta.1", + "version": "0.11.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@lancedb/lancedb", - "version": "0.11.0-beta.1", + "version": "0.11.0", "cpu": [ "x64", "arm64" From 5ccd0edec21bd912752c13e679b89bd0f4ea4adf Mon Sep 17 00:00:00 2001 From: Olzhas Alexandrov <9992724+o-alexandrov@users.noreply.github.com> Date: Sat, 12 Oct 2024 01:06:28 +0500 Subject: [PATCH 23/51] docs: clarify infrastructure requirements for S3 Express One Zone (#1745) --- docs/src/guides/storage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/guides/storage.md b/docs/src/guides/storage.md index 6cdc52bd..f4a7904b 100644 --- a/docs/src/guides/storage.md +++ b/docs/src/guides/storage.md @@ -498,7 +498,7 @@ This can also be done with the ``AWS_ENDPOINT`` and ``AWS_DEFAULT_REGION`` envir #### S3 Express -LanceDB supports [S3 Express One Zone](https://aws.amazon.com/s3/storage-classes/express-one-zone/) endpoints, but requires additional configuration. Also, S3 Express endpoints only support connecting from an EC2 instance within the same region. +LanceDB supports [S3 Express One Zone](https://aws.amazon.com/s3/storage-classes/express-one-zone/) endpoints, but requires additional infrastructure configuration for the compute service, such as EC2 or Lambda. Please refer to [Networking requirements for S3 Express One Zone](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-express-networking.html). To configure LanceDB to use an S3 Express endpoint, you must set the storage option `s3_express`. The bucket name in your table URI should **include the suffix**. From e7b56b7b2a3faa135fad2815ba0d06ad72277b15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dominik=20Weckm=C3=BCller?= <47481567+do-me@users.noreply.github.com> Date: Mon, 14 Oct 2024 20:58:23 +0200 Subject: [PATCH 24/51] docs: add permanent link chain icon to headings without impacting SEO (#1746) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I noted that there are no permanent links in the docs. Adapted the current best solution from https://github.com/squidfunk/mkdocs-material/discussions/3535. It adds a GitHub-like chain icon to the left of each heading (right on mobile) and does not impact SEO unlike the default solution with pilcrow char `ΒΆ` that might show up on google search results. image --- docs/mkdocs.yml | 3 +++ docs/src/styles/extra.css | 44 ++++++++++++++++++++++++++++++++++++++- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 5abf132e..ddfa66b0 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -90,6 +90,9 @@ markdown_extensions: - pymdownx.emoji: emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_generator: !!python/name:material.extensions.emoji.to_svg + - markdown.extensions.toc: + baselevel: 1 + permalink: "" nav: - Home: diff --git a/docs/src/styles/extra.css b/docs/src/styles/extra.css index 1075e4d6..913c2e35 100644 --- a/docs/src/styles/extra.css +++ b/docs/src/styles/extra.css @@ -39,4 +39,46 @@ height: 1.2rem; margin-top: -.1rem; } - } \ No newline at end of file + } + +/* remove pilcrow as permanent link and add chain icon similar to github https://github.com/squidfunk/mkdocs-material/discussions/3535 */ + +.headerlink { + --permalink-size: 16px; /* for font-relative sizes, 0.6em is a good choice */ + --permalink-spacing: 4px; + + width: calc(var(--permalink-size) + var(--permalink-spacing)); + height: var(--permalink-size); + vertical-align: middle; + background-color: var(--md-default-fg-color--lighter); + background-size: var(--permalink-size); + mask-size: var(--permalink-size); + -webkit-mask-size: var(--permalink-size); + mask-repeat: no-repeat; + -webkit-mask-repeat: no-repeat; + visibility: visible; + mask-image: url('data:image/svg+xml;utf8,'); + -webkit-mask-image: url('data:image/svg+xml;utf8,'); +} + +[id]:target .headerlink { + background-color: var(--md-typeset-a-color); +} + +.headerlink:hover { + background-color: var(--md-accent-fg-color) !important; +} + +@media screen and (min-width: 76.25em) { + h1, h2, h3, h4, h5, h6 { + display: flex; + align-items: center; + flex-direction: row; + column-gap: 0.2em; /* fixes spaces in titles */ + } + + .headerlink { + order: -1; + margin-left: calc(var(--permalink-size) * -1 - var(--permalink-spacing)) !important; + } +} From 679a70231e843b64cd788c5bb3455e5abef5b6f5 Mon Sep 17 00:00:00 2001 From: Ryan Green Date: Mon, 14 Oct 2024 14:39:54 -0600 Subject: [PATCH 25/51] feat: allow fast_search on python remote table (#1747) Add `fast_search` parameter to query builder and remote table to support skipping flat search in remote search --- python/python/lancedb/query.py | 17 ++++++++++++++++- python/python/lancedb/remote/__init__.py | 2 ++ python/python/lancedb/remote/table.py | 8 ++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/python/python/lancedb/query.py b/python/python/lancedb/query.py index 8ef97897..c79b8846 100644 --- a/python/python/lancedb/query.py +++ b/python/python/lancedb/query.py @@ -88,6 +88,11 @@ class Query(pydantic.BaseModel): tuning advice. offset: int The offset to start fetching results from + fast_search: bool + Skip a flat search of unindexed data. This will improve + search performance but search results will not include unindexed data. + + - *default False*. """ vector_column: Optional[str] = None @@ -124,6 +129,8 @@ class Query(pydantic.BaseModel): offset: int = 0 + fast_search: bool = False + class LanceQueryBuilder(ABC): """An abstract query builder. Subclasses are defined for vector search, @@ -139,6 +146,7 @@ class LanceQueryBuilder(ABC): vector_column_name: str, ordering_field_name: Optional[str] = None, fts_columns: Union[str, List[str]] = [], + fast_search: bool = False, ) -> LanceQueryBuilder: """ Create a query builder based on the given query and query type. @@ -155,6 +163,8 @@ class LanceQueryBuilder(ABC): If "auto", the query type is inferred based on the query. vector_column_name: str The name of the vector column to use for vector search. + fast_search: bool + Skip flat search of unindexed data. """ # Check hybrid search first as it supports empty query pattern if query_type == "hybrid": @@ -196,7 +206,9 @@ class LanceQueryBuilder(ABC): else: raise TypeError(f"Unsupported query type: {type(query)}") - return LanceVectorQueryBuilder(table, query, vector_column_name, str_query) + return LanceVectorQueryBuilder( + table, query, vector_column_name, str_query, fast_search + ) @classmethod def _resolve_query(cls, table, query, query_type, vector_column_name): @@ -565,6 +577,7 @@ class LanceVectorQueryBuilder(LanceQueryBuilder): query: Union[np.ndarray, list, "PIL.Image.Image"], vector_column: str, str_query: Optional[str] = None, + fast_search: bool = False, ): super().__init__(table) self._query = query @@ -575,6 +588,7 @@ class LanceVectorQueryBuilder(LanceQueryBuilder): self._prefilter = False self._reranker = None self._str_query = str_query + self._fast_search = fast_search def metric(self, metric: Literal["L2", "cosine", "dot"]) -> LanceVectorQueryBuilder: """Set the distance metric to use. @@ -675,6 +689,7 @@ class LanceVectorQueryBuilder(LanceQueryBuilder): vector_column=self._vector_column, with_row_id=self._with_row_id, offset=self._offset, + fast_search=self._fast_search, ) result_set = self._table._execute_query(query, batch_size) if self._reranker is not None: diff --git a/python/python/lancedb/remote/__init__.py b/python/python/lancedb/remote/__init__.py index fdd0cfae..98cbd2e5 100644 --- a/python/python/lancedb/remote/__init__.py +++ b/python/python/lancedb/remote/__init__.py @@ -50,6 +50,8 @@ class VectorQuery(BaseModel): vector_column: str = VECTOR_COLUMN_NAME + fast_search: bool = False + @attrs.define class VectorQueryResult: diff --git a/python/python/lancedb/remote/table.py b/python/python/lancedb/remote/table.py index 34e2fe7c..986fbced 100644 --- a/python/python/lancedb/remote/table.py +++ b/python/python/lancedb/remote/table.py @@ -270,6 +270,7 @@ class RemoteTable(Table): vector_column_name: Optional[str] = None, query_type="auto", fts_columns: Optional[Union[str, List[str]]] = None, + fast_search: bool = False, ) -> LanceVectorQueryBuilder: """Create a search query to find the nearest neighbors of the given query vector. We currently support [vector search][search] @@ -314,6 +315,12 @@ class RemoteTable(Table): - If the table has multiple vector columns then the *vector_column_name* needs to be specified. Otherwise, an error is raised. + fast_search: bool, optional + Skip a flat search of unindexed data. This may improve + search performance but search results will not include unindexed data. + + - *default False*. + Returns ------- LanceQueryBuilder @@ -343,6 +350,7 @@ class RemoteTable(Table): query_type, vector_column_name=vector_column_name, fts_columns=fts_columns, + fast_search=fast_search, ) def _execute_query( From 38eb05f29716f92f64c0664cbcfd2c46ffc3cf7e Mon Sep 17 00:00:00 2001 From: James Wu Date: Tue, 15 Oct 2024 15:13:57 -0700 Subject: [PATCH 26/51] fix(python): remove dependency on retry package (#1749) ## user story fixes https://github.com/lancedb/lancedb/issues/1480 https://github.com/invl/retry has not had an update in 8 years, one if its sub-dependencies via requirements.txt (https://github.com/pytest-dev/py) is no longer maintained and has a high severity vulnerability (CVE-2022-42969). retry is only used for a single function in the python codebase for a deprecated helper function `with_embeddings`, which was created for an older tutorial (https://github.com/lancedb/lancedb/pull/12) [but is now deprecated](https://lancedb.github.io/lancedb/embeddings/legacy/). ## changes i backported a limited range of functionality of the `@retry()` decorator directly into lancedb so that we no longer have a dependency to the `retry` package. ## tests ``` /Users/james/src/lancedb/python $ ruff check . All checks passed! /Users/james/src/lancedb/python $ pytest python/tests/test_embeddings.py python/tests/test_embeddings.py .......s.... [100%] ================================================================ 11 passed, 1 skipped, 2 warnings in 7.08s ================================================================ ``` --- python/pyproject.toml | 1 - python/python/lancedb/embeddings/utils.py | 23 ++++++++++++++++++++++- python/python/tests/test_embeddings.py | 11 +++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) diff --git a/python/pyproject.toml b/python/pyproject.toml index 2bdc665e..394cc4a4 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -5,7 +5,6 @@ dependencies = [ "deprecation", "pylance==0.18.2", "requests>=2.31.0", - "retry>=0.9.2", "tqdm>=4.27.0", "pydantic>=1.10", "attrs>=21.3.0", diff --git a/python/python/lancedb/embeddings/utils.py b/python/python/lancedb/embeddings/utils.py index ce5396d7..59dcdb9b 100644 --- a/python/python/lancedb/embeddings/utils.py +++ b/python/python/lancedb/embeddings/utils.py @@ -21,14 +21,35 @@ import time import urllib.error import weakref import logging +from functools import wraps from typing import Callable, List, Union import numpy as np import pyarrow as pa from lance.vector import vec_to_table -from retry import retry from ..util import deprecated, safe_import_pandas + +# ruff: noqa: PERF203 +def retry(tries=10, delay=1, max_delay=30, backoff=3, jitter=1): + def wrapper(fn): + @wraps(fn) + def wrapped(*args, **kwargs): + for i in range(tries): + try: + return fn(*args, **kwargs) + except Exception: + if i + 1 == tries: + raise + else: + sleep = min(delay * (backoff**i) + jitter, max_delay) + time.sleep(sleep) + + return wrapped + + return wrapper + + pd = safe_import_pandas() DATA = Union[pa.Table, "pd.DataFrame"] diff --git a/python/python/tests/test_embeddings.py b/python/python/tests/test_embeddings.py index 9611f0ec..e48fb209 100644 --- a/python/python/tests/test_embeddings.py +++ b/python/python/tests/test_embeddings.py @@ -11,6 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import List, Union +from unittest.mock import MagicMock, patch import lance import lancedb @@ -25,6 +26,7 @@ from lancedb.embeddings import ( ) from lancedb.embeddings.base import TextEmbeddingFunction from lancedb.embeddings.registry import get_registry, register +from lancedb.embeddings.utils import retry from lancedb.pydantic import LanceModel, Vector @@ -225,3 +227,12 @@ def test_embedding_function_safe_model_dump(embedding_type): f"{embedding_type}: Private attribute '{key}' " f"is present in dumped model" ) + + +@patch("time.sleep") +def test_retry(mock_sleep): + test_function = MagicMock(side_effect=[Exception] * 9 + ["result"]) + test_function = retry()(test_function) + result = test_function() + assert mock_sleep.call_count == 9 + assert result == "result" From f43cb8bba17f2b2c780d6a50346ba9babcd51b7d Mon Sep 17 00:00:00 2001 From: Weston Pace Date: Wed, 16 Oct 2024 00:48:31 -0700 Subject: [PATCH 27/51] feat: upgrade lance to 0.18.3 (#1748) --- Cargo.toml | 16 +++++++++------- python/pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d7dae6c5..a9fffc24 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,13 +20,15 @@ keywords = ["lancedb", "lance", "database", "vector", "search"] categories = ["database-implementations"] [workspace.dependencies] -lance = { "version" = "=0.18.2", "features" = ["dynamodb"] } -lance-index = { "version" = "=0.18.2" } -lance-linalg = { "version" = "=0.18.2" } -lance-table = { "version" = "=0.18.2" } -lance-testing = { "version" = "=0.18.2" } -lance-datafusion = { "version" = "=0.18.2" } -lance-encoding = { "version" = "=0.18.2" } +lance = { "version" = "=0.18.3", "features" = [ + "dynamodb", +], git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance-index = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance-linalg = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance-table = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance-testing = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance-datafusion = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance-encoding = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } # Note that this one does not include pyarrow arrow = { version = "52.2", optional = false } arrow-array = "52.2" diff --git a/python/pyproject.toml b/python/pyproject.toml index 394cc4a4..86b75067 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -3,7 +3,7 @@ name = "lancedb" # version in Cargo.toml dependencies = [ "deprecation", - "pylance==0.18.2", + "pylance==0.18.3-beta.2", "requests>=2.31.0", "tqdm>=4.27.0", "pydantic>=1.10", From 46486d4d2200dac530704c70edea40e0179995bc Mon Sep 17 00:00:00 2001 From: Will Jones Date: Wed, 16 Oct 2024 10:39:40 -0700 Subject: [PATCH 28/51] fix: list_indices can handle fts indexes (#1753) Fixes #1752 --- rust/lancedb/src/index.rs | 2 +- rust/lancedb/src/table.rs | 72 +++++++++++++++++++++++++-------------- 2 files changed, 47 insertions(+), 27 deletions(-) diff --git a/rust/lancedb/src/index.rs b/rust/lancedb/src/index.rs index 1ff80137..47b42050 100644 --- a/rust/lancedb/src/index.rs +++ b/rust/lancedb/src/index.rs @@ -144,7 +144,7 @@ impl std::str::FromStr for IndexType { "BTREE" => Ok(Self::BTree), "BITMAP" => Ok(Self::Bitmap), "LABEL_LIST" | "LABELLIST" => Ok(Self::LabelList), - "FTS" => Ok(Self::FTS), + "FTS" | "INVERTED" => Ok(Self::FTS), "IVF_PQ" => Ok(Self::IvfPq), "IVF_HNSW_PQ" => Ok(Self::IvfHnswPq), "IVF_HNSW_SQ" => Ok(Self::IvfHnswSq), diff --git a/rust/lancedb/src/table.rs b/rust/lancedb/src/table.rs index 5f286cf7..7b611de7 100644 --- a/rust/lancedb/src/table.rs +++ b/rust/lancedb/src/table.rs @@ -2110,7 +2110,6 @@ mod tests { use arrow_schema::{DataType, Field, Schema, TimeUnit}; use futures::TryStreamExt; use lance::dataset::{Dataset, WriteMode}; - use lance::index::DatasetIndexInternalExt; use lance::io::{ObjectStoreParams, WrappingObjectStore}; use rand::Rng; use tempfile::tempdir; @@ -3002,22 +3001,8 @@ mod tests { let index_configs = table.list_indices().await.unwrap(); assert_eq!(index_configs.len(), 1); let index = index_configs.into_iter().next().unwrap(); - // TODO: Fix via https://github.com/lancedb/lance/issues/2039 - // assert_eq!(index.index_type, crate::index::IndexType::Bitmap); + assert_eq!(index.index_type, crate::index::IndexType::Bitmap); assert_eq!(index.columns, vec!["category".to_string()]); - - // For now, just open the index to verify its type - let lance_dataset = table.as_native().unwrap().dataset.get().await.unwrap(); - let indices = lance_dataset - .load_indices_by_name(&index.name) - .await - .unwrap(); - let index_meta = &indices[0]; - let idx = lance_dataset - .open_scalar_index("category", &index_meta.uuid.to_string()) - .await - .unwrap(); - assert_eq!(idx.index_type(), IndexType::Bitmap); } #[tokio::test] @@ -3086,22 +3071,57 @@ mod tests { let index_configs = table.list_indices().await.unwrap(); assert_eq!(index_configs.len(), 1); let index = index_configs.into_iter().next().unwrap(); - // TODO: Fix via https://github.com/lancedb/lance/issues/2039 - // assert_eq!(index.index_type, crate::index::IndexType::LabelList); + assert_eq!(index.index_type, crate::index::IndexType::LabelList); assert_eq!(index.columns, vec!["tags".to_string()]); + } - // For now, just open the index to verify its type - let lance_dataset = table.as_native().unwrap().dataset.get().await.unwrap(); - let indices = lance_dataset - .load_indices_by_name(&index.name) + #[tokio::test] + async fn test_create_inverted_index() { + let tmp_dir = tempdir().unwrap(); + let uri = tmp_dir.path().to_str().unwrap(); + + let conn = ConnectBuilder::new(uri).execute().await.unwrap(); + const WORDS: [&str; 3] = ["cat", "dog", "fish"]; + let mut text_builder = StringBuilder::new(); + let num_rows = 120; + for i in 0..num_rows { + text_builder.append_value(WORDS[i % 3]); + } + let text = Arc::new(text_builder.finish()); + + let schema = Arc::new(Schema::new(vec![ + Field::new("id", DataType::Int32, false), + Field::new("text", DataType::Utf8, true), + ])); + let batch = RecordBatch::try_new( + schema.clone(), + vec![ + Arc::new(Int32Array::from_iter_values(0..num_rows as i32)), + text, + ], + ) + .unwrap(); + + let table = conn + .create_table( + "test_bitmap", + RecordBatchIterator::new(vec![Ok(batch.clone())], batch.schema()), + ) + .execute() .await .unwrap(); - let index_meta = &indices[0]; - let idx = lance_dataset - .open_scalar_index("tags", &index_meta.uuid.to_string()) + + table + .create_index(&["text"], Index::FTS(Default::default())) + .execute() .await .unwrap(); - assert_eq!(idx.index_type(), IndexType::LabelList); + let index_configs = table.list_indices().await.unwrap(); + assert_eq!(index_configs.len(), 1); + let index = index_configs.into_iter().next().unwrap(); + assert_eq!(index.index_type, crate::index::IndexType::FTS); + assert_eq!(index.columns, vec!["text".to_string()]); + assert_eq!(index.name, "text_idx"); } #[tokio::test] From 99e4db0d6a0cab84ff6f17cd4eb961bdd1367d51 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Thu, 17 Oct 2024 11:58:15 -0700 Subject: [PATCH 29/51] feat(rust): allow `add_embedding` on `create_empty_table` (#1754) Fixes https://github.com/lancedb/lancedb/issues/1750 --- rust/lancedb/src/connection.rs | 36 +++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/rust/lancedb/src/connection.rs b/rust/lancedb/src/connection.rs index 29403528..44a6b443 100644 --- a/rust/lancedb/src/connection.rs +++ b/rust/lancedb/src/connection.rs @@ -196,22 +196,6 @@ impl CreateTableBuilder { }; Ok((data, builder)) } - - pub fn add_embedding(mut self, definition: EmbeddingDefinition) -> Result { - // Early verification of the embedding name - let embedding_func = self - .parent - .embedding_registry() - .get(&definition.embedding_name) - .ok_or_else(|| Error::EmbeddingFunctionNotFound { - name: definition.embedding_name.clone(), - reason: "No embedding function found in the connection's embedding_registry" - .to_string(), - })?; - - self.embeddings.push((definition, embedding_func)); - Ok(self) - } } // Builder methods that only apply when we do not have initial data @@ -329,6 +313,26 @@ impl CreateTableBuilder { }; self } + + /// Add an embedding definition to the table. + /// + /// The `embedding_name` must match the name of an embedding function that + /// was previously registered with the connection's [`EmbeddingRegistry`]. + pub fn add_embedding(mut self, definition: EmbeddingDefinition) -> Result { + // Early verification of the embedding name + let embedding_func = self + .parent + .embedding_registry() + .get(&definition.embedding_name) + .ok_or_else(|| Error::EmbeddingFunctionNotFound { + name: definition.embedding_name.clone(), + reason: "No embedding function found in the connection's embedding_registry" + .to_string(), + })?; + + self.embeddings.push((definition, embedding_func)); + Ok(self) + } } #[derive(Clone, Debug)] From 1f41101897a64a9c598f5eb1954146f0dbaff5ca Mon Sep 17 00:00:00 2001 From: Lance Release Date: Thu, 17 Oct 2024 18:58:45 +0000 Subject: [PATCH 30/51] =?UTF-8?q?Bump=20version:=200.14.0=20=E2=86=92=200.?= =?UTF-8?q?14.1-beta.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/.bumpversion.toml | 2 +- python/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/.bumpversion.toml b/python/.bumpversion.toml index 7fce1ac5..8a19a9ec 100644 --- a/python/.bumpversion.toml +++ b/python/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.14.0" +current_version = "0.14.1-beta.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/python/Cargo.toml b/python/Cargo.toml index 16178abc..5b42a4b9 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-python" -version = "0.14.0" +version = "0.14.1-beta.0" edition.workspace = true description = "Python bindings for LanceDB" license.workspace = true From d406eab2c8ab2f21dc40e117abb8a69fe7779306 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Thu, 17 Oct 2024 18:59:01 +0000 Subject: [PATCH 31/51] =?UTF-8?q?Bump=20version:=200.11.0=20=E2=86=92=200.?= =?UTF-8?q?11.1-beta.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- java/core/pom.xml | 2 +- java/pom.xml | 2 +- node/package.json | 12 ++++++------ nodejs/Cargo.toml | 2 +- nodejs/npm/darwin-arm64/package.json | 2 +- nodejs/npm/darwin-x64/package.json | 2 +- nodejs/npm/linux-arm64-gnu/package.json | 2 +- nodejs/npm/linux-x64-gnu/package.json | 2 +- nodejs/npm/win32-x64-msvc/package.json | 2 +- nodejs/package.json | 2 +- rust/ffi/node/Cargo.toml | 2 +- rust/lancedb/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index 8c58e6c5..1e853bb4 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.11.0" +current_version = "0.11.1-beta.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/java/core/pom.xml b/java/core/pom.xml index 88dd775f..d1d386f5 100644 --- a/java/core/pom.xml +++ b/java/core/pom.xml @@ -8,7 +8,7 @@ com.lancedb lancedb-parent - 0.11.0-final.0 + 0.11.1-beta.0 ../pom.xml diff --git a/java/pom.xml b/java/pom.xml index 46b7049b..ee861ad0 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -6,7 +6,7 @@ com.lancedb lancedb-parent - 0.11.0-final.0 + 0.11.1-beta.0 pom LanceDB Parent diff --git a/node/package.json b/node/package.json index cb1fb55c..51723fab 100644 --- a/node/package.json +++ b/node/package.json @@ -1,6 +1,6 @@ { "name": "vectordb", - "version": "0.11.0", + "version": "0.11.1-beta.0", "description": " Serverless, low-latency vector database for AI applications", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.0", - "@lancedb/vectordb-darwin-x64": "0.11.0", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.0", - "@lancedb/vectordb-linux-x64-gnu": "0.11.0", - "@lancedb/vectordb-win32-x64-msvc": "0.11.0" + "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.0", + "@lancedb/vectordb-darwin-x64": "0.11.1-beta.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.0", + "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.0", + "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.0" } } diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index aca34cef..d5e15fe2 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.11.0" +version = "0.11.1-beta.0" license.workspace = true description.workspace = true repository.workspace = true diff --git a/nodejs/npm/darwin-arm64/package.json b/nodejs/npm/darwin-arm64/package.json index a11285fa..119f0653 100644 --- a/nodejs/npm/darwin-arm64/package.json +++ b/nodejs/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-arm64", - "version": "0.11.0", + "version": "0.11.1-beta.0", "os": ["darwin"], "cpu": ["arm64"], "main": "lancedb.darwin-arm64.node", diff --git a/nodejs/npm/darwin-x64/package.json b/nodejs/npm/darwin-x64/package.json index a75f5392..99f7e1a2 100644 --- a/nodejs/npm/darwin-x64/package.json +++ b/nodejs/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-x64", - "version": "0.11.0", + "version": "0.11.1-beta.0", "os": ["darwin"], "cpu": ["x64"], "main": "lancedb.darwin-x64.node", diff --git a/nodejs/npm/linux-arm64-gnu/package.json b/nodejs/npm/linux-arm64-gnu/package.json index 4eb26c40..a659b936 100644 --- a/nodejs/npm/linux-arm64-gnu/package.json +++ b/nodejs/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-arm64-gnu", - "version": "0.11.0", + "version": "0.11.1-beta.0", "os": ["linux"], "cpu": ["arm64"], "main": "lancedb.linux-arm64-gnu.node", diff --git a/nodejs/npm/linux-x64-gnu/package.json b/nodejs/npm/linux-x64-gnu/package.json index a0761058..21b4adc9 100644 --- a/nodejs/npm/linux-x64-gnu/package.json +++ b/nodejs/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-x64-gnu", - "version": "0.11.0", + "version": "0.11.1-beta.0", "os": ["linux"], "cpu": ["x64"], "main": "lancedb.linux-x64-gnu.node", diff --git a/nodejs/npm/win32-x64-msvc/package.json b/nodejs/npm/win32-x64-msvc/package.json index c059db9a..cd457f2f 100644 --- a/nodejs/npm/win32-x64-msvc/package.json +++ b/nodejs/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-win32-x64-msvc", - "version": "0.11.0", + "version": "0.11.1-beta.0", "os": ["win32"], "cpu": ["x64"], "main": "lancedb.win32-x64-msvc.node", diff --git a/nodejs/package.json b/nodejs/package.json index 024766c0..f218a007 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -10,7 +10,7 @@ "vector database", "ann" ], - "version": "0.11.0", + "version": "0.11.1-beta.0", "main": "dist/index.js", "exports": { ".": "./dist/index.js", diff --git a/rust/ffi/node/Cargo.toml b/rust/ffi/node/Cargo.toml index e8225599..2cfd0026 100644 --- a/rust/ffi/node/Cargo.toml +++ b/rust/ffi/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-node" -version = "0.11.0" +version = "0.11.1-beta.0" description = "Serverless, low-latency vector database for AI applications" license.workspace = true edition.workspace = true diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index 7a441c01..2d92ec2c 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb" -version = "0.11.0" +version = "0.11.1-beta.0" edition.workspace = true description = "LanceDB: A serverless, low-latency vector database for AI applications" license.workspace = true From 437316cbbc70c0c84dabd804d790c53851dcb07c Mon Sep 17 00:00:00 2001 From: Lance Release Date: Thu, 17 Oct 2024 18:59:18 +0000 Subject: [PATCH 32/51] Updating package-lock.json --- node/package-lock.json | 74 ++++-------------------------------------- 1 file changed, 7 insertions(+), 67 deletions(-) diff --git a/node/package-lock.json b/node/package-lock.json index ef31e324..9ad26c38 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -1,12 +1,12 @@ { "name": "vectordb", - "version": "0.11.0", + "version": "0.11.1-beta.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "vectordb", - "version": "0.11.0", + "version": "0.11.1-beta.0", "cpu": [ "x64", "arm64" @@ -52,11 +52,11 @@ "uuid": "^9.0.0" }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.0", - "@lancedb/vectordb-darwin-x64": "0.11.0", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.0", - "@lancedb/vectordb-linux-x64-gnu": "0.11.0", - "@lancedb/vectordb-win32-x64-msvc": "0.11.0" + "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.0", + "@lancedb/vectordb-darwin-x64": "0.11.1-beta.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.0", + "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.0", + "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.0" }, "peerDependencies": { "@apache-arrow/ts": "^14.0.2", @@ -326,66 +326,6 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, - "node_modules/@lancedb/vectordb-darwin-arm64": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.11.0.tgz", - "integrity": "sha512-ffI9sLdlJ0L0FjKVy5QpznRTgVaEGL2INJVcJauuzsYY2aOC3weNfE+v58n/cm9I/NulTdu1BemwzFpESoZf5A==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lancedb/vectordb-darwin-x64": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.11.0.tgz", - "integrity": "sha512-sMGKVmTj7Gt1z+1Sy24toCV8UgcQkX0ljQU1QunVEzJvoP9yah/DN5rw5Ozxiv8Obk6Pz3BMZYqV3BPmL9AiAg==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lancedb/vectordb-linux-arm64-gnu": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.11.0.tgz", - "integrity": "sha512-BQTiTbvJfNKEye9FRomItlFcbOoYCV8frBrQMIfli4q9GECwgBmXQaWP+rEZZrdqfG0DivTQJ0YSSHgAy3wCcA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lancedb/vectordb-linux-x64-gnu": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.11.0.tgz", - "integrity": "sha512-+RHu6YY311N21ZBM8OYbBFNuW+rqq0AC7Vp5eBvWKTOeNIf1Lz2vFAKhDPOgJt+ROoT/nzKRbksIEeIvnYQJNw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lancedb/vectordb-win32-x64-msvc": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.11.0.tgz", - "integrity": "sha512-IWdhJdiYIkJW+njNlRVNGG1bnGlQs+Wbrjy/NIZhVxch2yCj9gknZqWCuSHNR43a2oAdUY/kXgWL2AKEOfK1CQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "win32" - ] - }, "node_modules/@neon-rs/cli": { "version": "0.0.160", "resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz", From 48f46d47510ccc9bdfb92c36579efb5a61b0c9d1 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Fri, 18 Oct 2024 10:53:28 -0700 Subject: [PATCH 33/51] docs(node): update `indexStats` signature and regenerate docs (#1742) `indexStats` still referenced UUID even though in https://github.com/lancedb/lancedb/pull/1702 we changed it to take name instead. --- docs/src/javascript/README.md | 13 - .../javascript/classes/DefaultWriteOptions.md | 2 +- .../src/javascript/classes/LocalConnection.md | 57 +++- docs/src/javascript/classes/LocalTable.md | 244 +++++++++++++++--- .../classes/MakeArrowTableOptions.md | 82 ++++++ .../classes/OpenAIEmbeddingFunction.md | 12 +- docs/src/javascript/classes/Query.md | 80 ++++-- docs/src/javascript/enums/IndexStatus.md | 52 ++++ docs/src/javascript/enums/MetricType.md | 6 +- docs/src/javascript/enums/WriteMode.md | 6 +- .../javascript/interfaces/AwsCredentials.md | 6 +- .../src/javascript/interfaces/CleanupStats.md | 4 +- .../javascript/interfaces/ColumnAlteration.md | 53 ++++ .../interfaces/CompactionMetrics.md | 8 +- .../interfaces/CompactionOptions.md | 12 +- docs/src/javascript/interfaces/Connection.md | 55 +++- .../interfaces/ConnectionOptions.md | 82 +++++- .../interfaces/CreateTableOptions.md | 10 +- .../interfaces/EmbeddingFunction.md | 69 ++++- docs/src/javascript/interfaces/IndexStats.md | 37 ++- .../javascript/interfaces/IvfPQIndexConfig.md | 24 +- .../javascript/interfaces/MergeInsertArgs.md | 73 ++++++ docs/src/javascript/interfaces/Table.md | 232 +++++++++++++++-- docs/src/javascript/interfaces/UpdateArgs.md | 4 +- .../javascript/interfaces/UpdateSqlArgs.md | 4 +- docs/src/javascript/interfaces/VectorIndex.md | 17 +- .../src/javascript/interfaces/WriteOptions.md | 2 +- docs/src/javascript/modules.md | 158 +++++++++++- node/src/index.ts | 6 +- node/src/remote/index.ts | 4 +- 30 files changed, 1220 insertions(+), 194 deletions(-) create mode 100644 docs/src/javascript/classes/MakeArrowTableOptions.md create mode 100644 docs/src/javascript/enums/IndexStatus.md create mode 100644 docs/src/javascript/interfaces/ColumnAlteration.md create mode 100644 docs/src/javascript/interfaces/MergeInsertArgs.md diff --git a/docs/src/javascript/README.md b/docs/src/javascript/README.md index bc632d1e..5c455510 100644 --- a/docs/src/javascript/README.md +++ b/docs/src/javascript/README.md @@ -41,7 +41,6 @@ To build everything fresh: ```bash npm install -npm run tsc npm run build ``` @@ -51,18 +50,6 @@ Then you should be able to run the tests with: npm test ``` -### Rebuilding Rust library - -```bash -npm run build -``` - -### Rebuilding Typescript - -```bash -npm run tsc -``` - ### Fix lints To run the linter and have it automatically fix all errors diff --git a/docs/src/javascript/classes/DefaultWriteOptions.md b/docs/src/javascript/classes/DefaultWriteOptions.md index 9efe7aeb..2103dc98 100644 --- a/docs/src/javascript/classes/DefaultWriteOptions.md +++ b/docs/src/javascript/classes/DefaultWriteOptions.md @@ -38,4 +38,4 @@ A [WriteMode](../enums/WriteMode.md) to use on this operation #### Defined in -[index.ts:1019](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1019) +[index.ts:1359](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1359) diff --git a/docs/src/javascript/classes/LocalConnection.md b/docs/src/javascript/classes/LocalConnection.md index 1a29d690..cfb0a4b9 100644 --- a/docs/src/javascript/classes/LocalConnection.md +++ b/docs/src/javascript/classes/LocalConnection.md @@ -30,6 +30,7 @@ A connection to a LanceDB database. - [dropTable](LocalConnection.md#droptable) - [openTable](LocalConnection.md#opentable) - [tableNames](LocalConnection.md#tablenames) +- [withMiddleware](LocalConnection.md#withmiddleware) ## Constructors @@ -46,7 +47,7 @@ A connection to a LanceDB database. #### Defined in -[index.ts:489](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L489) +[index.ts:739](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L739) ## Properties @@ -56,7 +57,7 @@ A connection to a LanceDB database. #### Defined in -[index.ts:487](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L487) +[index.ts:737](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L737) ___ @@ -74,7 +75,7 @@ ___ #### Defined in -[index.ts:486](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L486) +[index.ts:736](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L736) ## Accessors @@ -92,7 +93,7 @@ ___ #### Defined in -[index.ts:494](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L494) +[index.ts:744](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L744) ## Methods @@ -113,7 +114,7 @@ Creates a new Table, optionally initializing it with new data. | Name | Type | | :------ | :------ | | `name` | `string` \| [`CreateTableOptions`](../interfaces/CreateTableOptions.md)\<`T`\> | -| `data?` | `Record`\<`string`, `unknown`\>[] | +| `data?` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | | `optsOrEmbedding?` | [`WriteOptions`](../interfaces/WriteOptions.md) \| [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)\<`T`\> | | `opt?` | [`WriteOptions`](../interfaces/WriteOptions.md) | @@ -127,7 +128,7 @@ Creates a new Table, optionally initializing it with new data. #### Defined in -[index.ts:542](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L542) +[index.ts:788](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L788) ___ @@ -158,7 +159,7 @@ ___ #### Defined in -[index.ts:576](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L576) +[index.ts:822](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L822) ___ @@ -184,7 +185,7 @@ Drop an existing table. #### Defined in -[index.ts:630](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L630) +[index.ts:876](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L876) ___ @@ -210,7 +211,7 @@ Open a table in the database. #### Defined in -[index.ts:510](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L510) +[index.ts:760](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L760) β–Έ **openTable**\<`T`\>(`name`, `embeddings`): `Promise`\<[`Table`](../interfaces/Table.md)\<`T`\>\> @@ -239,7 +240,7 @@ Connection.openTable #### Defined in -[index.ts:518](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L518) +[index.ts:768](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L768) β–Έ **openTable**\<`T`\>(`name`, `embeddings?`): `Promise`\<[`Table`](../interfaces/Table.md)\<`T`\>\> @@ -266,7 +267,7 @@ Connection.openTable #### Defined in -[index.ts:522](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L522) +[index.ts:772](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L772) ___ @@ -286,4 +287,36 @@ Get the names of all tables in the database. #### Defined in -[index.ts:501](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L501) +[index.ts:751](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L751) + +___ + +### withMiddleware + +β–Έ **withMiddleware**(`middleware`): [`Connection`](../interfaces/Connection.md) + +Instrument the behavior of this Connection with middleware. + +The middleware will be called in the order they are added. + +Currently this functionality is only supported for remote Connections. + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `middleware` | `HttpMiddleware` | + +#### Returns + +[`Connection`](../interfaces/Connection.md) + +- this Connection instrumented by the passed middleware + +#### Implementation of + +[Connection](../interfaces/Connection.md).[withMiddleware](../interfaces/Connection.md#withmiddleware) + +#### Defined in + +[index.ts:880](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L880) diff --git a/docs/src/javascript/classes/LocalTable.md b/docs/src/javascript/classes/LocalTable.md index 476a4b0e..69b4a01c 100644 --- a/docs/src/javascript/classes/LocalTable.md +++ b/docs/src/javascript/classes/LocalTable.md @@ -37,6 +37,8 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector ### Methods - [add](LocalTable.md#add) +- [addColumns](LocalTable.md#addcolumns) +- [alterColumns](LocalTable.md#altercolumns) - [checkElectron](LocalTable.md#checkelectron) - [cleanupOldVersions](LocalTable.md#cleanupoldversions) - [compactFiles](LocalTable.md#compactfiles) @@ -44,13 +46,16 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector - [createIndex](LocalTable.md#createindex) - [createScalarIndex](LocalTable.md#createscalarindex) - [delete](LocalTable.md#delete) +- [dropColumns](LocalTable.md#dropcolumns) - [filter](LocalTable.md#filter) - [getSchema](LocalTable.md#getschema) - [indexStats](LocalTable.md#indexstats) - [listIndices](LocalTable.md#listindices) +- [mergeInsert](LocalTable.md#mergeinsert) - [overwrite](LocalTable.md#overwrite) - [search](LocalTable.md#search) - [update](LocalTable.md#update) +- [withMiddleware](LocalTable.md#withmiddleware) ## Constructors @@ -74,7 +79,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector #### Defined in -[index.ts:642](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L642) +[index.ts:892](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L892) β€’ **new LocalTable**\<`T`\>(`tbl`, `name`, `options`, `embeddings`) @@ -95,7 +100,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector #### Defined in -[index.ts:649](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L649) +[index.ts:899](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L899) ## Properties @@ -105,7 +110,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector #### Defined in -[index.ts:639](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L639) +[index.ts:889](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L889) ___ @@ -115,7 +120,7 @@ ___ #### Defined in -[index.ts:638](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L638) +[index.ts:888](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L888) ___ @@ -125,7 +130,7 @@ ___ #### Defined in -[index.ts:637](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L637) +[index.ts:887](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L887) ___ @@ -143,7 +148,7 @@ ___ #### Defined in -[index.ts:640](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L640) +[index.ts:890](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L890) ___ @@ -153,7 +158,7 @@ ___ #### Defined in -[index.ts:636](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L636) +[index.ts:886](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L886) ___ @@ -179,7 +184,7 @@ Creates a filter query to find all rows matching the specified criteria #### Defined in -[index.ts:688](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L688) +[index.ts:938](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L938) ## Accessors @@ -197,7 +202,7 @@ Creates a filter query to find all rows matching the specified criteria #### Defined in -[index.ts:668](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L668) +[index.ts:918](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L918) ___ @@ -215,7 +220,7 @@ ___ #### Defined in -[index.ts:849](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L849) +[index.ts:1171](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1171) ## Methods @@ -229,7 +234,7 @@ Insert records into this Table. | Name | Type | Description | | :------ | :------ | :------ | -| `data` | `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | #### Returns @@ -243,7 +248,59 @@ The number of rows added to the table #### Defined in -[index.ts:696](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L696) +[index.ts:946](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L946) + +___ + +### addColumns + +β–Έ **addColumns**(`newColumnTransforms`): `Promise`\<`void`\> + +Add new columns with defined values. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `newColumnTransforms` | \{ `name`: `string` ; `valueSql`: `string` }[] | pairs of column names and the SQL expression to use to calculate the value of the new column. These expressions will be evaluated for each row in the table, and can reference existing columns in the table. | + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[Table](../interfaces/Table.md).[addColumns](../interfaces/Table.md#addcolumns) + +#### Defined in + +[index.ts:1195](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1195) + +___ + +### alterColumns + +β–Έ **alterColumns**(`columnAlterations`): `Promise`\<`void`\> + +Alter the name or nullability of columns. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `columnAlterations` | [`ColumnAlteration`](../interfaces/ColumnAlteration.md)[] | One or more alterations to apply to columns. | + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[Table](../interfaces/Table.md).[alterColumns](../interfaces/Table.md#altercolumns) + +#### Defined in + +[index.ts:1201](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1201) ___ @@ -257,7 +314,7 @@ ___ #### Defined in -[index.ts:861](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L861) +[index.ts:1183](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1183) ___ @@ -280,7 +337,7 @@ Clean up old versions of the table, freeing disk space. #### Defined in -[index.ts:808](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L808) +[index.ts:1130](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1130) ___ @@ -307,16 +364,22 @@ Metrics about the compaction operation. #### Defined in -[index.ts:831](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L831) +[index.ts:1153](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1153) ___ ### countRows -β–Έ **countRows**(): `Promise`\<`number`\> +β–Έ **countRows**(`filter?`): `Promise`\<`number`\> Returns the number of rows in this table. +#### Parameters + +| Name | Type | +| :------ | :------ | +| `filter?` | `string` | + #### Returns `Promise`\<`number`\> @@ -327,7 +390,7 @@ Returns the number of rows in this table. #### Defined in -[index.ts:749](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L749) +[index.ts:1021](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1021) ___ @@ -357,13 +420,13 @@ VectorIndexParams. #### Defined in -[index.ts:734](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L734) +[index.ts:1003](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1003) ___ ### createScalarIndex -β–Έ **createScalarIndex**(`column`, `replace`): `Promise`\<`void`\> +β–Έ **createScalarIndex**(`column`, `replace?`): `Promise`\<`void`\> Create a scalar index on this Table for the given column @@ -372,7 +435,7 @@ Create a scalar index on this Table for the given column | Name | Type | Description | | :------ | :------ | :------ | | `column` | `string` | The column to index | -| `replace` | `boolean` | If false, fail if an index already exists on the column Scalar indices, like vector indices, can be used to speed up scans. A scalar index can speed up scans that contain filter expressions on the indexed column. For example, the following scan will be faster if the column `my_col` has a scalar index: ```ts const con = await lancedb.connect('./.lancedb'); const table = await con.openTable('images'); const results = await table.where('my_col = 7').execute(); ``` Scalar indices can also speed up scans containing a vector search and a prefilter: ```ts const con = await lancedb.connect('././lancedb'); const table = await con.openTable('images'); const results = await table.search([1.0, 2.0]).where('my_col != 7').prefilter(true); ``` Scalar indices can only speed up scans for basic filters using equality, comparison, range (e.g. `my_col BETWEEN 0 AND 100`), and set membership (e.g. `my_col IN (0, 1, 2)`) Scalar indices can be used if the filter contains multiple indexed columns and the filter criteria are AND'd or OR'd together (e.g. `my_col < 0 AND other_col> 100`) Scalar indices may be used if the filter contains non-indexed columns but, depending on the structure of the filter, they may not be usable. For example, if the column `not_indexed` does not have a scalar index then the filter `my_col = 0 OR not_indexed = 1` will not be able to use any scalar index on `my_col`. | +| `replace?` | `boolean` | If false, fail if an index already exists on the column it is always set to true for remote connections Scalar indices, like vector indices, can be used to speed up scans. A scalar index can speed up scans that contain filter expressions on the indexed column. For example, the following scan will be faster if the column `my_col` has a scalar index: ```ts const con = await lancedb.connect('./.lancedb'); const table = await con.openTable('images'); const results = await table.where('my_col = 7').execute(); ``` Scalar indices can also speed up scans containing a vector search and a prefilter: ```ts const con = await lancedb.connect('././lancedb'); const table = await con.openTable('images'); const results = await table.search([1.0, 2.0]).where('my_col != 7').prefilter(true); ``` Scalar indices can only speed up scans for basic filters using equality, comparison, range (e.g. `my_col BETWEEN 0 AND 100`), and set membership (e.g. `my_col IN (0, 1, 2)`) Scalar indices can be used if the filter contains multiple indexed columns and the filter criteria are AND'd or OR'd together (e.g. `my_col < 0 AND other_col> 100`) Scalar indices may be used if the filter contains non-indexed columns but, depending on the structure of the filter, they may not be usable. For example, if the column `not_indexed` does not have a scalar index then the filter `my_col = 0 OR not_indexed = 1` will not be able to use any scalar index on `my_col`. | #### Returns @@ -392,7 +455,7 @@ await table.createScalarIndex('my_col') #### Defined in -[index.ts:742](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L742) +[index.ts:1011](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1011) ___ @@ -418,7 +481,38 @@ Delete rows from this table. #### Defined in -[index.ts:758](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L758) +[index.ts:1030](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1030) + +___ + +### dropColumns + +β–Έ **dropColumns**(`columnNames`): `Promise`\<`void`\> + +Drop one or more columns from the dataset + +This is a metadata-only operation and does not remove the data from the +underlying storage. In order to remove the data, you must subsequently +call ``compact_files`` to rewrite the data without the removed columns and +then call ``cleanup_files`` to remove the old files. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `columnNames` | `string`[] | The names of the columns to drop. These can be nested column references (e.g. "a.b.c") or top-level column names (e.g. "a"). | + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[Table](../interfaces/Table.md).[dropColumns](../interfaces/Table.md#dropcolumns) + +#### Defined in + +[index.ts:1205](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1205) ___ @@ -438,9 +532,13 @@ Creates a filter query to find all rows matching the specified criteria [`Query`](Query.md)\<`T`\> +#### Implementation of + +[Table](../interfaces/Table.md).[filter](../interfaces/Table.md#filter) + #### Defined in -[index.ts:684](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L684) +[index.ts:934](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L934) ___ @@ -454,13 +552,13 @@ ___ #### Defined in -[index.ts:854](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L854) +[index.ts:1176](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1176) ___ ### indexStats -β–Έ **indexStats**(`indexUuid`): `Promise`\<[`IndexStats`](../interfaces/IndexStats.md)\> +β–Έ **indexStats**(`indexName`): `Promise`\<[`IndexStats`](../interfaces/IndexStats.md)\> Get statistics about an index. @@ -468,7 +566,7 @@ Get statistics about an index. | Name | Type | | :------ | :------ | -| `indexUuid` | `string` | +| `indexName` | `string` | #### Returns @@ -480,7 +578,7 @@ Get statistics about an index. #### Defined in -[index.ts:845](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L845) +[index.ts:1167](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1167) ___ @@ -500,7 +598,57 @@ List the indicies on this table. #### Defined in -[index.ts:841](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L841) +[index.ts:1163](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1163) + +___ + +### mergeInsert + +β–Έ **mergeInsert**(`on`, `data`, `args`): `Promise`\<`void`\> + +Runs a "merge insert" operation on the table + +This operation can add rows, update rows, and remove rows all in a single +transaction. It is a very generic tool that can be used to create +behaviors like "insert if not exists", "update or insert (i.e. upsert)", +or even replace a portion of existing data with new data (e.g. replace +all data where month="january") + +The merge insert operation works by combining new data from a +**source table** with existing data in a **target table** by using a +join. There are three categories of records. + +"Matched" records are records that exist in both the source table and +the target table. "Not matched" records exist only in the source table +(e.g. these are new data) "Not matched by source" records exist only +in the target table (this is old data) + +The MergeInsertArgs can be used to customize what should happen for +each category of data. + +Please note that the data may appear to be reordered as part of this +operation. This is because updated rows will be deleted from the +dataset and then reinserted at the end with the new values. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `on` | `string` | a column to join on. This is how records from the source table and target table are matched. | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | the new data to insert | +| `args` | [`MergeInsertArgs`](../interfaces/MergeInsertArgs.md) | parameters controlling how the operation should behave | + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[Table](../interfaces/Table.md).[mergeInsert](../interfaces/Table.md#mergeinsert) + +#### Defined in + +[index.ts:1065](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1065) ___ @@ -514,7 +662,7 @@ Insert records into this Table, replacing its contents. | Name | Type | Description | | :------ | :------ | :------ | -| `data` | `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | #### Returns @@ -528,7 +676,7 @@ The number of rows added to the table #### Defined in -[index.ts:716](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L716) +[index.ts:977](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L977) ___ @@ -554,7 +702,7 @@ Creates a search query to find the nearest neighbors of the given search term #### Defined in -[index.ts:676](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L676) +[index.ts:926](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L926) ___ @@ -580,4 +728,36 @@ Update rows in this table. #### Defined in -[index.ts:771](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L771) +[index.ts:1043](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1043) + +___ + +### withMiddleware + +β–Έ **withMiddleware**(`middleware`): [`Table`](../interfaces/Table.md)\<`T`\> + +Instrument the behavior of this Table with middleware. + +The middleware will be called in the order they are added. + +Currently this functionality is only supported for remote tables. + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `middleware` | `HttpMiddleware` | + +#### Returns + +[`Table`](../interfaces/Table.md)\<`T`\> + +- this Table instrumented by the passed middleware + +#### Implementation of + +[Table](../interfaces/Table.md).[withMiddleware](../interfaces/Table.md#withmiddleware) + +#### Defined in + +[index.ts:1209](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1209) diff --git a/docs/src/javascript/classes/MakeArrowTableOptions.md b/docs/src/javascript/classes/MakeArrowTableOptions.md new file mode 100644 index 00000000..93863765 --- /dev/null +++ b/docs/src/javascript/classes/MakeArrowTableOptions.md @@ -0,0 +1,82 @@ +[vectordb](../README.md) / [Exports](../modules.md) / MakeArrowTableOptions + +# Class: MakeArrowTableOptions + +Options to control the makeArrowTable call. + +## Table of contents + +### Constructors + +- [constructor](MakeArrowTableOptions.md#constructor) + +### Properties + +- [dictionaryEncodeStrings](MakeArrowTableOptions.md#dictionaryencodestrings) +- [embeddings](MakeArrowTableOptions.md#embeddings) +- [schema](MakeArrowTableOptions.md#schema) +- [vectorColumns](MakeArrowTableOptions.md#vectorcolumns) + +## Constructors + +### constructor + +β€’ **new MakeArrowTableOptions**(`values?`) + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `values?` | `Partial`\<[`MakeArrowTableOptions`](MakeArrowTableOptions.md)\> | + +#### Defined in + +[arrow.ts:98](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L98) + +## Properties + +### dictionaryEncodeStrings + +β€’ **dictionaryEncodeStrings**: `boolean` = `false` + +If true then string columns will be encoded with dictionary encoding + +Set this to true if your string columns tend to repeat the same values +often. For more precise control use the `schema` property to specify the +data type for individual columns. + +If `schema` is provided then this property is ignored. + +#### Defined in + +[arrow.ts:96](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L96) + +___ + +### embeddings + +β€’ `Optional` **embeddings**: [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)\<`any`\> + +#### Defined in + +[arrow.ts:85](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L85) + +___ + +### schema + +β€’ `Optional` **schema**: `Schema`\<`any`\> + +#### Defined in + +[arrow.ts:63](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L63) + +___ + +### vectorColumns + +β€’ **vectorColumns**: `Record`\<`string`, `VectorColumnOptions`\> + +#### Defined in + +[arrow.ts:81](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L81) diff --git a/docs/src/javascript/classes/OpenAIEmbeddingFunction.md b/docs/src/javascript/classes/OpenAIEmbeddingFunction.md index 78f52a0b..f152b0de 100644 --- a/docs/src/javascript/classes/OpenAIEmbeddingFunction.md +++ b/docs/src/javascript/classes/OpenAIEmbeddingFunction.md @@ -40,7 +40,7 @@ An embedding function that automatically creates vector representation for a giv #### Defined in -[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/openai.ts#L21) +[embedding/openai.ts:22](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/openai.ts#L22) ## Properties @@ -50,17 +50,17 @@ An embedding function that automatically creates vector representation for a giv #### Defined in -[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/openai.ts#L19) +[embedding/openai.ts:20](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/openai.ts#L20) ___ ### \_openai -β€’ `Private` `Readonly` **\_openai**: `any` +β€’ `Private` `Readonly` **\_openai**: `OpenAI` #### Defined in -[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/openai.ts#L18) +[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/openai.ts#L19) ___ @@ -76,7 +76,7 @@ The name of the column that will be used as input for the Embedding Function. #### Defined in -[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/openai.ts#L50) +[embedding/openai.ts:56](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/openai.ts#L56) ## Methods @@ -102,4 +102,4 @@ Creates a vector representation for the given values. #### Defined in -[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/openai.ts#L38) +[embedding/openai.ts:43](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/openai.ts#L43) diff --git a/docs/src/javascript/classes/Query.md b/docs/src/javascript/classes/Query.md index b6a683da..08944e96 100644 --- a/docs/src/javascript/classes/Query.md +++ b/docs/src/javascript/classes/Query.md @@ -19,6 +19,7 @@ A builder for nearest neighbor queries for LanceDB. ### Properties - [\_embeddings](Query.md#_embeddings) +- [\_fastSearch](Query.md#_fastsearch) - [\_filter](Query.md#_filter) - [\_limit](Query.md#_limit) - [\_metricType](Query.md#_metrictype) @@ -34,6 +35,7 @@ A builder for nearest neighbor queries for LanceDB. ### Methods - [execute](Query.md#execute) +- [fastSearch](Query.md#fastsearch) - [filter](Query.md#filter) - [isElectron](Query.md#iselectron) - [limit](Query.md#limit) @@ -65,7 +67,7 @@ A builder for nearest neighbor queries for LanceDB. #### Defined in -[query.ts:38](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L38) +[query.ts:39](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L39) ## Properties @@ -75,7 +77,17 @@ A builder for nearest neighbor queries for LanceDB. #### Defined in -[query.ts:36](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L36) +[query.ts:37](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L37) + +___ + +### \_fastSearch + +β€’ `Private` **\_fastSearch**: `boolean` + +#### Defined in + +[query.ts:36](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L36) ___ @@ -85,7 +97,7 @@ ___ #### Defined in -[query.ts:33](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L33) +[query.ts:33](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L33) ___ @@ -95,7 +107,7 @@ ___ #### Defined in -[query.ts:29](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L29) +[query.ts:29](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L29) ___ @@ -105,7 +117,7 @@ ___ #### Defined in -[query.ts:34](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L34) +[query.ts:34](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L34) ___ @@ -115,7 +127,7 @@ ___ #### Defined in -[query.ts:31](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L31) +[query.ts:31](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L31) ___ @@ -125,7 +137,7 @@ ___ #### Defined in -[query.ts:35](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L35) +[query.ts:35](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L35) ___ @@ -135,7 +147,7 @@ ___ #### Defined in -[query.ts:26](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L26) +[query.ts:26](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L26) ___ @@ -145,7 +157,7 @@ ___ #### Defined in -[query.ts:28](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L28) +[query.ts:28](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L28) ___ @@ -155,7 +167,7 @@ ___ #### Defined in -[query.ts:30](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L30) +[query.ts:30](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L30) ___ @@ -165,7 +177,7 @@ ___ #### Defined in -[query.ts:32](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L32) +[query.ts:32](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L32) ___ @@ -175,7 +187,7 @@ ___ #### Defined in -[query.ts:27](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L27) +[query.ts:27](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L27) ___ @@ -201,7 +213,7 @@ A filter statement to be applied to this query. #### Defined in -[query.ts:87](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L87) +[query.ts:90](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L90) ## Methods @@ -223,7 +235,30 @@ Execute the query and return the results as an Array of Objects #### Defined in -[query.ts:115](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L115) +[query.ts:127](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L127) + +___ + +### fastSearch + +β–Έ **fastSearch**(`value`): [`Query`](Query.md)\<`T`\> + +Skip searching un-indexed data. This can make search faster, but will miss +any data that is not yet indexed. + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `value` | `boolean` | + +#### Returns + +[`Query`](Query.md)\<`T`\> + +#### Defined in + +[query.ts:119](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L119) ___ @@ -245,7 +280,7 @@ A filter statement to be applied to this query. #### Defined in -[query.ts:82](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L82) +[query.ts:85](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L85) ___ @@ -259,7 +294,7 @@ ___ #### Defined in -[query.ts:142](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L142) +[query.ts:155](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L155) ___ @@ -268,6 +303,7 @@ ___ β–Έ **limit**(`value`): [`Query`](Query.md)\<`T`\> Sets the number of results that will be returned +default value is 10 #### Parameters @@ -281,7 +317,7 @@ Sets the number of results that will be returned #### Defined in -[query.ts:55](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L55) +[query.ts:58](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L58) ___ @@ -307,7 +343,7 @@ MetricType for the different options #### Defined in -[query.ts:102](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L102) +[query.ts:105](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L105) ___ @@ -329,7 +365,7 @@ The number of probes used. A higher number makes search more accurate but also s #### Defined in -[query.ts:73](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L73) +[query.ts:76](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L76) ___ @@ -349,7 +385,7 @@ ___ #### Defined in -[query.ts:107](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L107) +[query.ts:110](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L110) ___ @@ -371,7 +407,7 @@ Refine the results by reading extra elements and re-ranking them in memory. #### Defined in -[query.ts:64](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L64) +[query.ts:67](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L67) ___ @@ -393,4 +429,4 @@ Return only the specified columns. #### Defined in -[query.ts:93](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/query.ts#L93) +[query.ts:96](https://github.com/lancedb/lancedb/blob/92179835/node/src/query.ts#L96) diff --git a/docs/src/javascript/enums/IndexStatus.md b/docs/src/javascript/enums/IndexStatus.md new file mode 100644 index 00000000..4cbd3f8e --- /dev/null +++ b/docs/src/javascript/enums/IndexStatus.md @@ -0,0 +1,52 @@ +[vectordb](../README.md) / [Exports](../modules.md) / IndexStatus + +# Enumeration: IndexStatus + +## Table of contents + +### Enumeration Members + +- [Done](IndexStatus.md#done) +- [Failed](IndexStatus.md#failed) +- [Indexing](IndexStatus.md#indexing) +- [Pending](IndexStatus.md#pending) + +## Enumeration Members + +### Done + +β€’ **Done** = ``"done"`` + +#### Defined in + +[index.ts:713](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L713) + +___ + +### Failed + +β€’ **Failed** = ``"failed"`` + +#### Defined in + +[index.ts:714](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L714) + +___ + +### Indexing + +β€’ **Indexing** = ``"indexing"`` + +#### Defined in + +[index.ts:712](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L712) + +___ + +### Pending + +β€’ **Pending** = ``"pending"`` + +#### Defined in + +[index.ts:711](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L711) diff --git a/docs/src/javascript/enums/MetricType.md b/docs/src/javascript/enums/MetricType.md index bb97b91e..9f9f0977 100644 --- a/docs/src/javascript/enums/MetricType.md +++ b/docs/src/javascript/enums/MetricType.md @@ -22,7 +22,7 @@ Cosine distance #### Defined in -[index.ts:1041](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1041) +[index.ts:1381](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1381) ___ @@ -34,7 +34,7 @@ Dot product #### Defined in -[index.ts:1046](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1046) +[index.ts:1386](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1386) ___ @@ -46,4 +46,4 @@ Euclidean distance #### Defined in -[index.ts:1036](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1036) +[index.ts:1376](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1376) diff --git a/docs/src/javascript/enums/WriteMode.md b/docs/src/javascript/enums/WriteMode.md index 3e39e082..42b0c8bf 100644 --- a/docs/src/javascript/enums/WriteMode.md +++ b/docs/src/javascript/enums/WriteMode.md @@ -22,7 +22,7 @@ Append new data to the table. #### Defined in -[index.ts:1007](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1007) +[index.ts:1347](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1347) ___ @@ -34,7 +34,7 @@ Create a new [Table](../interfaces/Table.md). #### Defined in -[index.ts:1003](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1003) +[index.ts:1343](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1343) ___ @@ -46,4 +46,4 @@ Overwrite the existing [Table](../interfaces/Table.md) if presented. #### Defined in -[index.ts:1005](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1005) +[index.ts:1345](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1345) diff --git a/docs/src/javascript/interfaces/AwsCredentials.md b/docs/src/javascript/interfaces/AwsCredentials.md index 4792457c..68e31a83 100644 --- a/docs/src/javascript/interfaces/AwsCredentials.md +++ b/docs/src/javascript/interfaces/AwsCredentials.md @@ -18,7 +18,7 @@ #### Defined in -[index.ts:54](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L54) +[index.ts:68](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L68) ___ @@ -28,7 +28,7 @@ ___ #### Defined in -[index.ts:56](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L56) +[index.ts:70](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L70) ___ @@ -38,4 +38,4 @@ ___ #### Defined in -[index.ts:58](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L58) +[index.ts:72](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L72) diff --git a/docs/src/javascript/interfaces/CleanupStats.md b/docs/src/javascript/interfaces/CleanupStats.md index e60ac76f..f76e0889 100644 --- a/docs/src/javascript/interfaces/CleanupStats.md +++ b/docs/src/javascript/interfaces/CleanupStats.md @@ -19,7 +19,7 @@ The number of bytes removed from disk. #### Defined in -[index.ts:878](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L878) +[index.ts:1218](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1218) ___ @@ -31,4 +31,4 @@ The number of old table versions removed. #### Defined in -[index.ts:882](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L882) +[index.ts:1222](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1222) diff --git a/docs/src/javascript/interfaces/ColumnAlteration.md b/docs/src/javascript/interfaces/ColumnAlteration.md new file mode 100644 index 00000000..88eaa827 --- /dev/null +++ b/docs/src/javascript/interfaces/ColumnAlteration.md @@ -0,0 +1,53 @@ +[vectordb](../README.md) / [Exports](../modules.md) / ColumnAlteration + +# Interface: ColumnAlteration + +A definition of a column alteration. The alteration changes the column at +`path` to have the new name `name`, to be nullable if `nullable` is true, +and to have the data type `data_type`. At least one of `rename` or `nullable` +must be provided. + +## Table of contents + +### Properties + +- [nullable](ColumnAlteration.md#nullable) +- [path](ColumnAlteration.md#path) +- [rename](ColumnAlteration.md#rename) + +## Properties + +### nullable + +β€’ `Optional` **nullable**: `boolean` + +Set the new nullability. Note that a nullable column cannot be made non-nullable. + +#### Defined in + +[index.ts:638](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L638) + +___ + +### path + +β€’ **path**: `string` + +The path to the column to alter. This is a dot-separated path to the column. +If it is a top-level column then it is just the name of the column. If it is +a nested column then it is the path to the column, e.g. "a.b.c" for a column +`c` nested inside a column `b` nested inside a column `a`. + +#### Defined in + +[index.ts:633](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L633) + +___ + +### rename + +β€’ `Optional` **rename**: `string` + +#### Defined in + +[index.ts:634](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L634) diff --git a/docs/src/javascript/interfaces/CompactionMetrics.md b/docs/src/javascript/interfaces/CompactionMetrics.md index 694bbdda..0f98591b 100644 --- a/docs/src/javascript/interfaces/CompactionMetrics.md +++ b/docs/src/javascript/interfaces/CompactionMetrics.md @@ -22,7 +22,7 @@ fragments added. #### Defined in -[index.ts:933](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L933) +[index.ts:1273](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1273) ___ @@ -35,7 +35,7 @@ file. #### Defined in -[index.ts:928](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L928) +[index.ts:1268](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1268) ___ @@ -47,7 +47,7 @@ The number of new fragments that were created. #### Defined in -[index.ts:923](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L923) +[index.ts:1263](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1263) ___ @@ -59,4 +59,4 @@ The number of fragments that were removed. #### Defined in -[index.ts:919](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L919) +[index.ts:1259](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1259) diff --git a/docs/src/javascript/interfaces/CompactionOptions.md b/docs/src/javascript/interfaces/CompactionOptions.md index 01b99a45..7da8e492 100644 --- a/docs/src/javascript/interfaces/CompactionOptions.md +++ b/docs/src/javascript/interfaces/CompactionOptions.md @@ -24,7 +24,7 @@ Default is true. #### Defined in -[index.ts:901](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L901) +[index.ts:1241](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1241) ___ @@ -38,7 +38,7 @@ the deleted rows. Default is 10%. #### Defined in -[index.ts:907](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L907) +[index.ts:1247](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1247) ___ @@ -46,11 +46,11 @@ ___ β€’ `Optional` **maxRowsPerGroup**: `number` -The maximum number of rows per group. Defaults to 1024. +The maximum number of T per group. Defaults to 1024. #### Defined in -[index.ts:895](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L895) +[index.ts:1235](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1235) ___ @@ -63,7 +63,7 @@ the number of cores on the machine. #### Defined in -[index.ts:912](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L912) +[index.ts:1252](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1252) ___ @@ -77,4 +77,4 @@ Defaults to 1024 * 1024. #### Defined in -[index.ts:891](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L891) +[index.ts:1231](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1231) diff --git a/docs/src/javascript/interfaces/Connection.md b/docs/src/javascript/interfaces/Connection.md index 1fe0ab4c..c7a7b24a 100644 --- a/docs/src/javascript/interfaces/Connection.md +++ b/docs/src/javascript/interfaces/Connection.md @@ -22,6 +22,7 @@ Connection could be local against filesystem or remote against a server. - [dropTable](Connection.md#droptable) - [openTable](Connection.md#opentable) - [tableNames](Connection.md#tablenames) +- [withMiddleware](Connection.md#withmiddleware) ## Properties @@ -31,7 +32,7 @@ Connection could be local against filesystem or remote against a server. #### Defined in -[index.ts:183](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L183) +[index.ts:261](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L261) ## Methods @@ -59,7 +60,7 @@ Creates a new Table, optionally initializing it with new data. #### Defined in -[index.ts:207](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L207) +[index.ts:285](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L285) β–Έ **createTable**(`name`, `data`): `Promise`\<[`Table`](Table.md)\<`number`[]\>\> @@ -70,7 +71,7 @@ Creates a new Table and initialize it with new data. | Name | Type | Description | | :------ | :------ | :------ | | `name` | `string` | The name of the table. | -| `data` | `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | #### Returns @@ -78,7 +79,7 @@ Creates a new Table and initialize it with new data. #### Defined in -[index.ts:221](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L221) +[index.ts:299](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L299) β–Έ **createTable**(`name`, `data`, `options`): `Promise`\<[`Table`](Table.md)\<`number`[]\>\> @@ -89,7 +90,7 @@ Creates a new Table and initialize it with new data. | Name | Type | Description | | :------ | :------ | :------ | | `name` | `string` | The name of the table. | -| `data` | `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | | `options` | [`WriteOptions`](WriteOptions.md) | The write options to use when creating the table. | #### Returns @@ -98,7 +99,7 @@ Creates a new Table and initialize it with new data. #### Defined in -[index.ts:233](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L233) +[index.ts:311](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L311) β–Έ **createTable**\<`T`\>(`name`, `data`, `embeddings`): `Promise`\<[`Table`](Table.md)\<`T`\>\> @@ -115,7 +116,7 @@ Creates a new Table and initialize it with new data. | Name | Type | Description | | :------ | :------ | :------ | | `name` | `string` | The name of the table. | -| `data` | `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | | `embeddings` | [`EmbeddingFunction`](EmbeddingFunction.md)\<`T`\> | An embedding function to use on this table | #### Returns @@ -124,7 +125,7 @@ Creates a new Table and initialize it with new data. #### Defined in -[index.ts:246](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L246) +[index.ts:324](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L324) β–Έ **createTable**\<`T`\>(`name`, `data`, `embeddings`, `options`): `Promise`\<[`Table`](Table.md)\<`T`\>\> @@ -141,7 +142,7 @@ Creates a new Table and initialize it with new data. | Name | Type | Description | | :------ | :------ | :------ | | `name` | `string` | The name of the table. | -| `data` | `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the table | | `embeddings` | [`EmbeddingFunction`](EmbeddingFunction.md)\<`T`\> | An embedding function to use on this table | | `options` | [`WriteOptions`](WriteOptions.md) | The write options to use when creating the table. | @@ -151,7 +152,7 @@ Creates a new Table and initialize it with new data. #### Defined in -[index.ts:259](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L259) +[index.ts:337](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L337) ___ @@ -173,7 +174,7 @@ Drop an existing table. #### Defined in -[index.ts:270](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L270) +[index.ts:348](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L348) ___ @@ -202,7 +203,7 @@ Open a table in the database. #### Defined in -[index.ts:193](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L193) +[index.ts:271](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L271) ___ @@ -216,4 +217,32 @@ ___ #### Defined in -[index.ts:185](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L185) +[index.ts:263](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L263) + +___ + +### withMiddleware + +β–Έ **withMiddleware**(`middleware`): [`Connection`](Connection.md) + +Instrument the behavior of this Connection with middleware. + +The middleware will be called in the order they are added. + +Currently this functionality is only supported for remote Connections. + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `middleware` | `HttpMiddleware` | + +#### Returns + +[`Connection`](Connection.md) + +- this Connection instrumented by the passed middleware + +#### Defined in + +[index.ts:360](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L360) diff --git a/docs/src/javascript/interfaces/ConnectionOptions.md b/docs/src/javascript/interfaces/ConnectionOptions.md index 044cf192..92bf594a 100644 --- a/docs/src/javascript/interfaces/ConnectionOptions.md +++ b/docs/src/javascript/interfaces/ConnectionOptions.md @@ -10,7 +10,10 @@ - [awsCredentials](ConnectionOptions.md#awscredentials) - [awsRegion](ConnectionOptions.md#awsregion) - [hostOverride](ConnectionOptions.md#hostoverride) +- [readConsistencyInterval](ConnectionOptions.md#readconsistencyinterval) - [region](ConnectionOptions.md#region) +- [storageOptions](ConnectionOptions.md#storageoptions) +- [timeout](ConnectionOptions.md#timeout) - [uri](ConnectionOptions.md#uri) ## Properties @@ -19,9 +22,13 @@ β€’ `Optional` **apiKey**: `string` +API key for the remote connections + +Can also be passed by setting environment variable `LANCEDB_API_KEY` + #### Defined in -[index.ts:81](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L81) +[index.ts:112](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L112) ___ @@ -33,9 +40,14 @@ User provided AWS crednetials. If not provided, LanceDB will use the default credentials provider chain. +**`Deprecated`** + +Pass `aws_access_key_id`, `aws_secret_access_key`, and `aws_session_token` +through `storageOptions` instead. + #### Defined in -[index.ts:75](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L75) +[index.ts:92](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L92) ___ @@ -43,11 +55,15 @@ ___ β€’ `Optional` **awsRegion**: `string` -AWS region to connect to. Default is defaultAwsRegion. +AWS region to connect to. Default is defaultAwsRegion + +**`Deprecated`** + +Pass `region` through `storageOptions` instead. #### Defined in -[index.ts:78](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L78) +[index.ts:98](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L98) ___ @@ -55,13 +71,33 @@ ___ β€’ `Optional` **hostOverride**: `string` -Override the host URL for the remote connections. +Override the host URL for the remote connection. This is useful for local testing. #### Defined in -[index.ts:91](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L91) +[index.ts:122](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L122) + +___ + +### readConsistencyInterval + +β€’ `Optional` **readConsistencyInterval**: `number` + +(For LanceDB OSS only): The interval, in seconds, at which to check for +updates to the table from other processes. If None, then consistency is not +checked. For performance reasons, this is the default. For strong +consistency, set this to zero seconds. Then every read will check for +updates from other processes. As a compromise, you can set this to a +non-zero value for eventual consistency. If more than that interval +has passed since the last check, then the table will be checked for updates. +Note: this consistency only applies to read operations. Write operations are +always consistent. + +#### Defined in + +[index.ts:140](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L140) ___ @@ -69,11 +105,37 @@ ___ β€’ `Optional` **region**: `string` -Region to connect +Region to connect. Default is 'us-east-1' #### Defined in -[index.ts:84](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L84) +[index.ts:115](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L115) + +___ + +### storageOptions + +β€’ `Optional` **storageOptions**: `Record`\<`string`, `string`\> + +User provided options for object storage. For example, S3 credentials or request timeouts. + +The various options are described at https://lancedb.github.io/lancedb/guides/storage/ + +#### Defined in + +[index.ts:105](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L105) + +___ + +### timeout + +β€’ `Optional` **timeout**: `number` + +Duration in milliseconds for request timeout. Default = 10,000 (10 seconds) + +#### Defined in + +[index.ts:127](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L127) ___ @@ -85,8 +147,8 @@ LanceDB database URI. - `/path/to/database` - local database - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage -- `db://host:port` - remote database (SaaS) +- `db://host:port` - remote database (LanceDB cloud) #### Defined in -[index.ts:69](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L69) +[index.ts:83](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L83) diff --git a/docs/src/javascript/interfaces/CreateTableOptions.md b/docs/src/javascript/interfaces/CreateTableOptions.md index 995f460d..2c933fdc 100644 --- a/docs/src/javascript/interfaces/CreateTableOptions.md +++ b/docs/src/javascript/interfaces/CreateTableOptions.md @@ -26,7 +26,7 @@ #### Defined in -[index.ts:116](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L116) +[index.ts:163](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L163) ___ @@ -36,7 +36,7 @@ ___ #### Defined in -[index.ts:122](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L122) +[index.ts:169](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L169) ___ @@ -46,7 +46,7 @@ ___ #### Defined in -[index.ts:113](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L113) +[index.ts:160](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L160) ___ @@ -56,7 +56,7 @@ ___ #### Defined in -[index.ts:119](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L119) +[index.ts:166](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L166) ___ @@ -66,4 +66,4 @@ ___ #### Defined in -[index.ts:125](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L125) +[index.ts:172](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L172) diff --git a/docs/src/javascript/interfaces/EmbeddingFunction.md b/docs/src/javascript/interfaces/EmbeddingFunction.md index 8229e83a..8249a4ab 100644 --- a/docs/src/javascript/interfaces/EmbeddingFunction.md +++ b/docs/src/javascript/interfaces/EmbeddingFunction.md @@ -18,11 +18,29 @@ An embedding function that automatically creates vector representation for a giv ### Properties +- [destColumn](EmbeddingFunction.md#destcolumn) - [embed](EmbeddingFunction.md#embed) +- [embeddingDataType](EmbeddingFunction.md#embeddingdatatype) +- [embeddingDimension](EmbeddingFunction.md#embeddingdimension) +- [excludeSource](EmbeddingFunction.md#excludesource) - [sourceColumn](EmbeddingFunction.md#sourcecolumn) ## Properties +### destColumn + +β€’ `Optional` **destColumn**: `string` + +The name of the column that will contain the embedding + +By default this is "vector" + +#### Defined in + +[embedding/embedding_function.ts:49](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/embedding_function.ts#L49) + +___ + ### embed β€’ **embed**: (`data`: `T`[]) => `Promise`\<`number`[][]\> @@ -45,7 +63,54 @@ Creates a vector representation for the given values. #### Defined in -[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/embedding_function.ts#L27) +[embedding/embedding_function.ts:62](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/embedding_function.ts#L62) + +___ + +### embeddingDataType + +β€’ `Optional` **embeddingDataType**: `Float`\<`Floats`\> + +The data type of the embedding + +The embedding function should return `number`. This will be converted into +an Arrow float array. By default this will be Float32 but this property can +be used to control the conversion. + +#### Defined in + +[embedding/embedding_function.ts:33](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/embedding_function.ts#L33) + +___ + +### embeddingDimension + +β€’ `Optional` **embeddingDimension**: `number` + +The dimension of the embedding + +This is optional, normally this can be determined by looking at the results of +`embed`. If this is not specified, and there is an attempt to apply the embedding +to an empty table, then that process will fail. + +#### Defined in + +[embedding/embedding_function.ts:42](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/embedding_function.ts#L42) + +___ + +### excludeSource + +β€’ `Optional` **excludeSource**: `boolean` + +Should the source column be excluded from the resulting table + +By default the source column is included. Set this to true and +only the embedding will be stored. + +#### Defined in + +[embedding/embedding_function.ts:57](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/embedding_function.ts#L57) ___ @@ -57,4 +122,4 @@ The name of the column that will be used as input for the Embedding Function. #### Defined in -[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/embedding/embedding_function.ts#L22) +[embedding/embedding_function.ts:24](https://github.com/lancedb/lancedb/blob/92179835/node/src/embedding/embedding_function.ts#L24) diff --git a/docs/src/javascript/interfaces/IndexStats.md b/docs/src/javascript/interfaces/IndexStats.md index c605d001..8044b3df 100644 --- a/docs/src/javascript/interfaces/IndexStats.md +++ b/docs/src/javascript/interfaces/IndexStats.md @@ -6,18 +6,51 @@ ### Properties +- [distanceType](IndexStats.md#distancetype) +- [indexType](IndexStats.md#indextype) - [numIndexedRows](IndexStats.md#numindexedrows) +- [numIndices](IndexStats.md#numindices) - [numUnindexedRows](IndexStats.md#numunindexedrows) ## Properties +### distanceType + +β€’ `Optional` **distanceType**: `string` + +#### Defined in + +[index.ts:728](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L728) + +___ + +### indexType + +β€’ **indexType**: `string` + +#### Defined in + +[index.ts:727](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L727) + +___ + ### numIndexedRows β€’ **numIndexedRows**: ``null`` \| `number` #### Defined in -[index.ts:478](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L478) +[index.ts:725](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L725) + +___ + +### numIndices + +β€’ `Optional` **numIndices**: `number` + +#### Defined in + +[index.ts:729](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L729) ___ @@ -27,4 +60,4 @@ ___ #### Defined in -[index.ts:479](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L479) +[index.ts:726](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L726) diff --git a/docs/src/javascript/interfaces/IvfPQIndexConfig.md b/docs/src/javascript/interfaces/IvfPQIndexConfig.md index c491c959..9211dda3 100644 --- a/docs/src/javascript/interfaces/IvfPQIndexConfig.md +++ b/docs/src/javascript/interfaces/IvfPQIndexConfig.md @@ -29,7 +29,7 @@ The column to be indexed #### Defined in -[index.ts:942](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L942) +[index.ts:1282](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1282) ___ @@ -41,7 +41,7 @@ Cache size of the index #### Defined in -[index.ts:991](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L991) +[index.ts:1331](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1331) ___ @@ -53,7 +53,7 @@ A unique name for the index #### Defined in -[index.ts:947](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L947) +[index.ts:1287](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1287) ___ @@ -65,7 +65,7 @@ The max number of iterations for kmeans training. #### Defined in -[index.ts:962](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L962) +[index.ts:1302](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1302) ___ @@ -77,7 +77,7 @@ Max number of iterations to train OPQ, if `use_opq` is true. #### Defined in -[index.ts:981](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L981) +[index.ts:1321](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1321) ___ @@ -89,7 +89,7 @@ Metric type, L2 or Cosine #### Defined in -[index.ts:952](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L952) +[index.ts:1292](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1292) ___ @@ -101,7 +101,7 @@ The number of bits to present one PQ centroid. #### Defined in -[index.ts:976](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L976) +[index.ts:1316](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1316) ___ @@ -113,7 +113,7 @@ The number of partitions this index #### Defined in -[index.ts:957](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L957) +[index.ts:1297](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1297) ___ @@ -125,7 +125,7 @@ Number of subvectors to build PQ code #### Defined in -[index.ts:972](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L972) +[index.ts:1312](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1312) ___ @@ -137,7 +137,7 @@ Replace an existing index with the same name if it exists. #### Defined in -[index.ts:986](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L986) +[index.ts:1326](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1326) ___ @@ -147,7 +147,7 @@ ___ #### Defined in -[index.ts:993](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L993) +[index.ts:1333](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1333) ___ @@ -159,4 +159,4 @@ Train as optimized product quantization. #### Defined in -[index.ts:967](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L967) +[index.ts:1307](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1307) diff --git a/docs/src/javascript/interfaces/MergeInsertArgs.md b/docs/src/javascript/interfaces/MergeInsertArgs.md new file mode 100644 index 00000000..0f88d65a --- /dev/null +++ b/docs/src/javascript/interfaces/MergeInsertArgs.md @@ -0,0 +1,73 @@ +[vectordb](../README.md) / [Exports](../modules.md) / MergeInsertArgs + +# Interface: MergeInsertArgs + +## Table of contents + +### Properties + +- [whenMatchedUpdateAll](MergeInsertArgs.md#whenmatchedupdateall) +- [whenNotMatchedBySourceDelete](MergeInsertArgs.md#whennotmatchedbysourcedelete) +- [whenNotMatchedInsertAll](MergeInsertArgs.md#whennotmatchedinsertall) + +## Properties + +### whenMatchedUpdateAll + +β€’ `Optional` **whenMatchedUpdateAll**: `string` \| `boolean` + +If true then rows that exist in both the source table (new data) and +the target table (old data) will be updated, replacing the old row +with the corresponding matching row. + +If there are multiple matches then the behavior is undefined. +Currently this causes multiple copies of the row to be created +but that behavior is subject to change. + +Optionally, a filter can be specified. This should be an SQL +filter where fields with the prefix "target." refer to fields +in the target table (old data) and fields with the prefix +"source." refer to fields in the source table (new data). For +example, the filter "target.lastUpdated < source.lastUpdated" will +only update matched rows when the incoming `lastUpdated` value is +newer. + +Rows that do not match the filter will not be updated. Rows that +do not match the filter do become "not matched" rows. + +#### Defined in + +[index.ts:690](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L690) + +___ + +### whenNotMatchedBySourceDelete + +β€’ `Optional` **whenNotMatchedBySourceDelete**: `string` \| `boolean` + +If true then rows that exist only in the target table (old data) +will be deleted. + +If this is a string then it will be treated as an SQL filter and +only rows that both do not match any row in the source table and +match the given filter will be deleted. + +This can be used to replace a selection of existing data with +new data. + +#### Defined in + +[index.ts:707](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L707) + +___ + +### whenNotMatchedInsertAll + +β€’ `Optional` **whenNotMatchedInsertAll**: `boolean` + +If true then rows that exist only in the source table (new data) +will be inserted into the target table. + +#### Defined in + +[index.ts:695](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L695) diff --git a/docs/src/javascript/interfaces/Table.md b/docs/src/javascript/interfaces/Table.md index 03838ca7..6a88c2e3 100644 --- a/docs/src/javascript/interfaces/Table.md +++ b/docs/src/javascript/interfaces/Table.md @@ -25,17 +25,26 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector - [delete](Table.md#delete) - [indexStats](Table.md#indexstats) - [listIndices](Table.md#listindices) +- [mergeInsert](Table.md#mergeinsert) - [name](Table.md#name) - [overwrite](Table.md#overwrite) - [schema](Table.md#schema) - [search](Table.md#search) - [update](Table.md#update) +### Methods + +- [addColumns](Table.md#addcolumns) +- [alterColumns](Table.md#altercolumns) +- [dropColumns](Table.md#dropcolumns) +- [filter](Table.md#filter) +- [withMiddleware](Table.md#withmiddleware) + ## Properties ### add -β€’ **add**: (`data`: `Record`\<`string`, `unknown`\>[]) => `Promise`\<`number`\> +β€’ **add**: (`data`: `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[]) => `Promise`\<`number`\> #### Type declaration @@ -47,7 +56,7 @@ Insert records into this Table. | Name | Type | Description | | :------ | :------ | :------ | -| `data` | `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | ##### Returns @@ -57,27 +66,33 @@ The number of rows added to the table #### Defined in -[index.ts:291](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L291) +[index.ts:381](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L381) ___ ### countRows -β€’ **countRows**: () => `Promise`\<`number`\> +β€’ **countRows**: (`filter?`: `string`) => `Promise`\<`number`\> #### Type declaration -β–Έ (): `Promise`\<`number`\> +β–Έ (`filter?`): `Promise`\<`number`\> Returns the number of rows in this table. +##### Parameters + +| Name | Type | +| :------ | :------ | +| `filter?` | `string` | + ##### Returns `Promise`\<`number`\> #### Defined in -[index.ts:361](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L361) +[index.ts:454](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L454) ___ @@ -107,17 +122,17 @@ VectorIndexParams. #### Defined in -[index.ts:306](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L306) +[index.ts:398](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L398) ___ ### createScalarIndex -β€’ **createScalarIndex**: (`column`: `string`, `replace`: `boolean`) => `Promise`\<`void`\> +β€’ **createScalarIndex**: (`column`: `string`, `replace?`: `boolean`) => `Promise`\<`void`\> #### Type declaration -β–Έ (`column`, `replace`): `Promise`\<`void`\> +β–Έ (`column`, `replace?`): `Promise`\<`void`\> Create a scalar index on this Table for the given column @@ -126,7 +141,7 @@ Create a scalar index on this Table for the given column | Name | Type | Description | | :------ | :------ | :------ | | `column` | `string` | The column to index | -| `replace` | `boolean` | If false, fail if an index already exists on the column Scalar indices, like vector indices, can be used to speed up scans. A scalar index can speed up scans that contain filter expressions on the indexed column. For example, the following scan will be faster if the column `my_col` has a scalar index: ```ts const con = await lancedb.connect('./.lancedb'); const table = await con.openTable('images'); const results = await table.where('my_col = 7').execute(); ``` Scalar indices can also speed up scans containing a vector search and a prefilter: ```ts const con = await lancedb.connect('././lancedb'); const table = await con.openTable('images'); const results = await table.search([1.0, 2.0]).where('my_col != 7').prefilter(true); ``` Scalar indices can only speed up scans for basic filters using equality, comparison, range (e.g. `my_col BETWEEN 0 AND 100`), and set membership (e.g. `my_col IN (0, 1, 2)`) Scalar indices can be used if the filter contains multiple indexed columns and the filter criteria are AND'd or OR'd together (e.g. `my_col < 0 AND other_col> 100`) Scalar indices may be used if the filter contains non-indexed columns but, depending on the structure of the filter, they may not be usable. For example, if the column `not_indexed` does not have a scalar index then the filter `my_col = 0 OR not_indexed = 1` will not be able to use any scalar index on `my_col`. | +| `replace?` | `boolean` | If false, fail if an index already exists on the column it is always set to true for remote connections Scalar indices, like vector indices, can be used to speed up scans. A scalar index can speed up scans that contain filter expressions on the indexed column. For example, the following scan will be faster if the column `my_col` has a scalar index: ```ts const con = await lancedb.connect('./.lancedb'); const table = await con.openTable('images'); const results = await table.where('my_col = 7').execute(); ``` Scalar indices can also speed up scans containing a vector search and a prefilter: ```ts const con = await lancedb.connect('././lancedb'); const table = await con.openTable('images'); const results = await table.search([1.0, 2.0]).where('my_col != 7').prefilter(true); ``` Scalar indices can only speed up scans for basic filters using equality, comparison, range (e.g. `my_col BETWEEN 0 AND 100`), and set membership (e.g. `my_col IN (0, 1, 2)`) Scalar indices can be used if the filter contains multiple indexed columns and the filter criteria are AND'd or OR'd together (e.g. `my_col < 0 AND other_col> 100`) Scalar indices may be used if the filter contains non-indexed columns but, depending on the structure of the filter, they may not be usable. For example, if the column `not_indexed` does not have a scalar index then the filter `my_col = 0 OR not_indexed = 1` will not be able to use any scalar index on `my_col`. | ##### Returns @@ -142,7 +157,7 @@ await table.createScalarIndex('my_col') #### Defined in -[index.ts:356](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L356) +[index.ts:449](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L449) ___ @@ -194,17 +209,17 @@ await tbl.countRows() // Returns 1 #### Defined in -[index.ts:395](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L395) +[index.ts:488](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L488) ___ ### indexStats -β€’ **indexStats**: (`indexUuid`: `string`) => `Promise`\<[`IndexStats`](IndexStats.md)\> +β€’ **indexStats**: (`indexName`: `string`) => `Promise`\<[`IndexStats`](IndexStats.md)\> #### Type declaration -β–Έ (`indexUuid`): `Promise`\<[`IndexStats`](IndexStats.md)\> +β–Έ (`indexName`): `Promise`\<[`IndexStats`](IndexStats.md)\> Get statistics about an index. @@ -212,7 +227,7 @@ Get statistics about an index. | Name | Type | | :------ | :------ | -| `indexUuid` | `string` | +| `indexName` | `string` | ##### Returns @@ -220,7 +235,7 @@ Get statistics about an index. #### Defined in -[index.ts:438](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L438) +[index.ts:567](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L567) ___ @@ -240,7 +255,57 @@ List the indicies on this table. #### Defined in -[index.ts:433](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L433) +[index.ts:562](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L562) + +___ + +### mergeInsert + +β€’ **mergeInsert**: (`on`: `string`, `data`: `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[], `args`: [`MergeInsertArgs`](MergeInsertArgs.md)) => `Promise`\<`void`\> + +#### Type declaration + +β–Έ (`on`, `data`, `args`): `Promise`\<`void`\> + +Runs a "merge insert" operation on the table + +This operation can add rows, update rows, and remove rows all in a single +transaction. It is a very generic tool that can be used to create +behaviors like "insert if not exists", "update or insert (i.e. upsert)", +or even replace a portion of existing data with new data (e.g. replace +all data where month="january") + +The merge insert operation works by combining new data from a +**source table** with existing data in a **target table** by using a +join. There are three categories of records. + +"Matched" records are records that exist in both the source table and +the target table. "Not matched" records exist only in the source table +(e.g. these are new data) "Not matched by source" records exist only +in the target table (this is old data) + +The MergeInsertArgs can be used to customize what should happen for +each category of data. + +Please note that the data may appear to be reordered as part of this +operation. This is because updated rows will be deleted from the +dataset and then reinserted at the end with the new values. + +##### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `on` | `string` | a column to join on. This is how records from the source table and target table are matched. | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | the new data to insert | +| `args` | [`MergeInsertArgs`](MergeInsertArgs.md) | parameters controlling how the operation should behave | + +##### Returns + +`Promise`\<`void`\> + +#### Defined in + +[index.ts:553](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L553) ___ @@ -250,13 +315,13 @@ ___ #### Defined in -[index.ts:277](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L277) +[index.ts:367](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L367) ___ ### overwrite -β€’ **overwrite**: (`data`: `Record`\<`string`, `unknown`\>[]) => `Promise`\<`number`\> +β€’ **overwrite**: (`data`: `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[]) => `Promise`\<`number`\> #### Type declaration @@ -268,7 +333,7 @@ Insert records into this Table, replacing its contents. | Name | Type | Description | | :------ | :------ | :------ | -| `data` | `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | +| `data` | `Table`\<`any`\> \| `Record`\<`string`, `unknown`\>[] | Records to be inserted into the Table | ##### Returns @@ -278,7 +343,7 @@ The number of rows added to the table #### Defined in -[index.ts:299](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L299) +[index.ts:389](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L389) ___ @@ -288,7 +353,7 @@ ___ #### Defined in -[index.ts:440](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L440) +[index.ts:571](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L571) ___ @@ -314,7 +379,7 @@ Creates a search query to find the nearest neighbors of the given search term #### Defined in -[index.ts:283](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L283) +[index.ts:373](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L373) ___ @@ -365,4 +430,123 @@ let results = await tbl.search([1, 1]).execute(); #### Defined in -[index.ts:428](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L428) +[index.ts:521](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L521) + +## Methods + +### addColumns + +β–Έ **addColumns**(`newColumnTransforms`): `Promise`\<`void`\> + +Add new columns with defined values. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `newColumnTransforms` | \{ `name`: `string` ; `valueSql`: `string` }[] | pairs of column names and the SQL expression to use to calculate the value of the new column. These expressions will be evaluated for each row in the table, and can reference existing columns in the table. | + +#### Returns + +`Promise`\<`void`\> + +#### Defined in + +[index.ts:582](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L582) + +___ + +### alterColumns + +β–Έ **alterColumns**(`columnAlterations`): `Promise`\<`void`\> + +Alter the name or nullability of columns. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `columnAlterations` | [`ColumnAlteration`](ColumnAlteration.md)[] | One or more alterations to apply to columns. | + +#### Returns + +`Promise`\<`void`\> + +#### Defined in + +[index.ts:591](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L591) + +___ + +### dropColumns + +β–Έ **dropColumns**(`columnNames`): `Promise`\<`void`\> + +Drop one or more columns from the dataset + +This is a metadata-only operation and does not remove the data from the +underlying storage. In order to remove the data, you must subsequently +call ``compact_files`` to rewrite the data without the removed columns and +then call ``cleanup_files`` to remove the old files. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `columnNames` | `string`[] | The names of the columns to drop. These can be nested column references (e.g. "a.b.c") or top-level column names (e.g. "a"). | + +#### Returns + +`Promise`\<`void`\> + +#### Defined in + +[index.ts:605](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L605) + +___ + +### filter + +β–Έ **filter**(`value`): [`Query`](../classes/Query.md)\<`T`\> + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `value` | `string` | + +#### Returns + +[`Query`](../classes/Query.md)\<`T`\> + +#### Defined in + +[index.ts:569](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L569) + +___ + +### withMiddleware + +β–Έ **withMiddleware**(`middleware`): [`Table`](Table.md)\<`T`\> + +Instrument the behavior of this Table with middleware. + +The middleware will be called in the order they are added. + +Currently this functionality is only supported for remote tables. + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `middleware` | `HttpMiddleware` | + +#### Returns + +[`Table`](Table.md)\<`T`\> + +- this Table instrumented by the passed middleware + +#### Defined in + +[index.ts:617](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L617) diff --git a/docs/src/javascript/interfaces/UpdateArgs.md b/docs/src/javascript/interfaces/UpdateArgs.md index 2cca5436..7a30a20c 100644 --- a/docs/src/javascript/interfaces/UpdateArgs.md +++ b/docs/src/javascript/interfaces/UpdateArgs.md @@ -20,7 +20,7 @@ new values to set #### Defined in -[index.ts:454](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L454) +[index.ts:652](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L652) ___ @@ -33,4 +33,4 @@ in which case all rows will be updated. #### Defined in -[index.ts:448](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L448) +[index.ts:646](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L646) diff --git a/docs/src/javascript/interfaces/UpdateSqlArgs.md b/docs/src/javascript/interfaces/UpdateSqlArgs.md index 4f3ba568..d979125b 100644 --- a/docs/src/javascript/interfaces/UpdateSqlArgs.md +++ b/docs/src/javascript/interfaces/UpdateSqlArgs.md @@ -20,7 +20,7 @@ new values to set as SQL expressions. #### Defined in -[index.ts:468](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L468) +[index.ts:666](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L666) ___ @@ -33,4 +33,4 @@ in which case all rows will be updated. #### Defined in -[index.ts:462](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L462) +[index.ts:660](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L660) diff --git a/docs/src/javascript/interfaces/VectorIndex.md b/docs/src/javascript/interfaces/VectorIndex.md index 2abd0967..e1fbeab0 100644 --- a/docs/src/javascript/interfaces/VectorIndex.md +++ b/docs/src/javascript/interfaces/VectorIndex.md @@ -8,6 +8,7 @@ - [columns](VectorIndex.md#columns) - [name](VectorIndex.md#name) +- [status](VectorIndex.md#status) - [uuid](VectorIndex.md#uuid) ## Properties @@ -18,7 +19,7 @@ #### Defined in -[index.ts:472](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L472) +[index.ts:718](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L718) ___ @@ -28,7 +29,17 @@ ___ #### Defined in -[index.ts:473](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L473) +[index.ts:719](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L719) + +___ + +### status + +β€’ **status**: [`IndexStatus`](../enums/IndexStatus.md) + +#### Defined in + +[index.ts:721](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L721) ___ @@ -38,4 +49,4 @@ ___ #### Defined in -[index.ts:474](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L474) +[index.ts:720](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L720) diff --git a/docs/src/javascript/interfaces/WriteOptions.md b/docs/src/javascript/interfaces/WriteOptions.md index a754887c..20be78ae 100644 --- a/docs/src/javascript/interfaces/WriteOptions.md +++ b/docs/src/javascript/interfaces/WriteOptions.md @@ -24,4 +24,4 @@ A [WriteMode](../enums/WriteMode.md) to use on this operation #### Defined in -[index.ts:1015](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1015) +[index.ts:1355](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1355) diff --git a/docs/src/javascript/modules.md b/docs/src/javascript/modules.md index 64aab2d9..d1796302 100644 --- a/docs/src/javascript/modules.md +++ b/docs/src/javascript/modules.md @@ -6,6 +6,7 @@ ### Enumerations +- [IndexStatus](enums/IndexStatus.md) - [MetricType](enums/MetricType.md) - [WriteMode](enums/WriteMode.md) @@ -14,6 +15,7 @@ - [DefaultWriteOptions](classes/DefaultWriteOptions.md) - [LocalConnection](classes/LocalConnection.md) - [LocalTable](classes/LocalTable.md) +- [MakeArrowTableOptions](classes/MakeArrowTableOptions.md) - [OpenAIEmbeddingFunction](classes/OpenAIEmbeddingFunction.md) - [Query](classes/Query.md) @@ -21,6 +23,7 @@ - [AwsCredentials](interfaces/AwsCredentials.md) - [CleanupStats](interfaces/CleanupStats.md) +- [ColumnAlteration](interfaces/ColumnAlteration.md) - [CompactionMetrics](interfaces/CompactionMetrics.md) - [CompactionOptions](interfaces/CompactionOptions.md) - [Connection](interfaces/Connection.md) @@ -29,6 +32,7 @@ - [EmbeddingFunction](interfaces/EmbeddingFunction.md) - [IndexStats](interfaces/IndexStats.md) - [IvfPQIndexConfig](interfaces/IvfPQIndexConfig.md) +- [MergeInsertArgs](interfaces/MergeInsertArgs.md) - [Table](interfaces/Table.md) - [UpdateArgs](interfaces/UpdateArgs.md) - [UpdateSqlArgs](interfaces/UpdateSqlArgs.md) @@ -42,7 +46,9 @@ ### Functions - [connect](modules.md#connect) +- [convertToTable](modules.md#converttotable) - [isWriteOptions](modules.md#iswriteoptions) +- [makeArrowTable](modules.md#makearrowtable) ## Type Aliases @@ -52,7 +58,7 @@ #### Defined in -[index.ts:996](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L996) +[index.ts:1336](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1336) ## Functions @@ -62,11 +68,11 @@ Connect to a LanceDB instance at the given URI. -Accpeted formats: +Accepted formats: - `/path/to/database` - local database - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage -- `db://host:port` - remote database (SaaS) +- `db://host:port` - remote database (LanceDB cloud) #### Parameters @@ -84,7 +90,7 @@ Accpeted formats: #### Defined in -[index.ts:141](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L141) +[index.ts:188](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L188) β–Έ **connect**(`opts`): `Promise`\<[`Connection`](interfaces/Connection.md)\> @@ -102,7 +108,35 @@ Connect to a LanceDB instance with connection options. #### Defined in -[index.ts:147](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L147) +[index.ts:194](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L194) + +___ + +### convertToTable + +β–Έ **convertToTable**\<`T`\>(`data`, `embeddings?`, `makeTableOptions?`): `Promise`\<`ArrowTable`\> + +#### Type parameters + +| Name | +| :------ | +| `T` | + +#### Parameters + +| Name | Type | +| :------ | :------ | +| `data` | `Record`\<`string`, `unknown`\>[] | +| `embeddings?` | [`EmbeddingFunction`](interfaces/EmbeddingFunction.md)\<`T`\> | +| `makeTableOptions?` | `Partial`\<[`MakeArrowTableOptions`](classes/MakeArrowTableOptions.md)\> | + +#### Returns + +`Promise`\<`ArrowTable`\> + +#### Defined in + +[arrow.ts:465](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L465) ___ @@ -122,4 +156,116 @@ value is WriteOptions #### Defined in -[index.ts:1022](https://github.com/lancedb/lancedb/blob/c89d5e6/node/src/index.ts#L1022) +[index.ts:1362](https://github.com/lancedb/lancedb/blob/92179835/node/src/index.ts#L1362) + +___ + +### makeArrowTable + +β–Έ **makeArrowTable**(`data`, `options?`): `ArrowTable` + +An enhanced version of the makeTable function from Apache Arrow +that supports nested fields and embeddings columns. + +This function converts an array of Record (row-major JS objects) +to an Arrow Table (a columnar structure) + +Note that it currently does not support nulls. + +If a schema is provided then it will be used to determine the resulting array +types. Fields will also be reordered to fit the order defined by the schema. + +If a schema is not provided then the types will be inferred and the field order +will be controlled by the order of properties in the first record. + +If the input is empty then a schema must be provided to create an empty table. + +When a schema is not specified then data types will be inferred. The inference +rules are as follows: + + - boolean => Bool + - number => Float64 + - String => Utf8 + - Buffer => Binary + - Record => Struct + - Array => List + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `data` | `Record`\<`string`, `any`\>[] | input data | +| `options?` | `Partial`\<[`MakeArrowTableOptions`](classes/MakeArrowTableOptions.md)\> | options to control the makeArrowTable call. | + +#### Returns + +`ArrowTable` + +**`Example`** + +```ts + +import { fromTableToBuffer, makeArrowTable } from "../arrow"; +import { Field, FixedSizeList, Float16, Float32, Int32, Schema } from "apache-arrow"; + +const schema = new Schema([ + new Field("a", new Int32()), + new Field("b", new Float32()), + new Field("c", new FixedSizeList(3, new Field("item", new Float16()))), + ]); + const table = makeArrowTable([ + { a: 1, b: 2, c: [1, 2, 3] }, + { a: 4, b: 5, c: [4, 5, 6] }, + { a: 7, b: 8, c: [7, 8, 9] }, + ], { schema }); +``` + +By default it assumes that the column named `vector` is a vector column +and it will be converted into a fixed size list array of type float32. +The `vectorColumns` option can be used to support other vector column +names and data types. + +```ts + +const schema = new Schema([ + new Field("a", new Float64()), + new Field("b", new Float64()), + new Field( + "vector", + new FixedSizeList(3, new Field("item", new Float32())) + ), + ]); + const table = makeArrowTable([ + { a: 1, b: 2, vector: [1, 2, 3] }, + { a: 4, b: 5, vector: [4, 5, 6] }, + { a: 7, b: 8, vector: [7, 8, 9] }, + ]); + assert.deepEqual(table.schema, schema); +``` + +You can specify the vector column types and names using the options as well + +```typescript + +const schema = new Schema([ + new Field('a', new Float64()), + new Field('b', new Float64()), + new Field('vec1', new FixedSizeList(3, new Field('item', new Float16()))), + new Field('vec2', new FixedSizeList(3, new Field('item', new Float16()))) + ]); +const table = makeArrowTable([ + { a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] }, + { a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] }, + { a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] } + ], { + vectorColumns: { + vec1: { type: new Float16() }, + vec2: { type: new Float16() } + } + } +assert.deepEqual(table.schema, schema) +``` + +#### Defined in + +[arrow.ts:198](https://github.com/lancedb/lancedb/blob/92179835/node/src/arrow.ts#L198) diff --git a/node/src/index.ts b/node/src/index.ts index 5b312c2a..d63306ba 100644 --- a/node/src/index.ts +++ b/node/src/index.ts @@ -564,7 +564,7 @@ export interface Table { /** * Get statistics about an index. */ - indexStats: (indexUuid: string) => Promise + indexStats: (indexName: string) => Promise filter(value: string): Query @@ -1164,8 +1164,8 @@ export class LocalTable implements Table { return tableListIndices.call(this._tbl); } - async indexStats(indexUuid: string): Promise { - return tableIndexStats.call(this._tbl, indexUuid); + async indexStats(indexName: string): Promise { + return tableIndexStats.call(this._tbl, indexName); } get schema(): Promise { diff --git a/node/src/remote/index.ts b/node/src/remote/index.ts index 3a7dc803..b8a96641 100644 --- a/node/src/remote/index.ts +++ b/node/src/remote/index.ts @@ -517,9 +517,9 @@ export class RemoteTable implements Table { })) } - async indexStats (indexUuid: string): Promise { + async indexStats (indexName: string): Promise { const results = await this._client.post( - `/v1/table/${encodeURIComponent(this._name)}/index/${indexUuid}/stats/` + `/v1/table/${encodeURIComponent(this._name)}/index/${indexName}/stats/` ) const body = await results.body() return { From 82197c54e401df7f338b8c6d9593e2ad5537ea71 Mon Sep 17 00:00:00 2001 From: Will Jones Date: Fri, 18 Oct 2024 13:40:24 -0700 Subject: [PATCH 34/51] perf: eliminate iop in refresh (#1760) Closes #1741 If we checkout a version, we need to make a `HEAD` request to get the size of the manifest. The new `checkout_latest()` code path can skip this IOP. This makes the refresh slightly faster. --- rust/lancedb/src/table/dataset.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/rust/lancedb/src/table/dataset.rs b/rust/lancedb/src/table/dataset.rs index e8404898..a3c6348f 100644 --- a/rust/lancedb/src/table/dataset.rs +++ b/rust/lancedb/src/table/dataset.rs @@ -54,9 +54,7 @@ impl DatasetRef { last_consistency_check, .. } => { - *dataset = dataset - .checkout_version(dataset.latest_version_id().await?) - .await?; + dataset.checkout_latest().await?; last_consistency_check.replace(Instant::now()); } Self::TimeTravel { dataset, version } => { From 5517e102c349b15cbd4850421443c26cda9487c7 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 23 Oct 2024 00:33:40 +0000 Subject: [PATCH 35/51] =?UTF-8?q?Bump=20version:=200.14.1-beta.0=20?= =?UTF-8?q?=E2=86=92=200.14.1-beta.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/.bumpversion.toml | 2 +- python/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/.bumpversion.toml b/python/.bumpversion.toml index 8a19a9ec..fad6ada8 100644 --- a/python/.bumpversion.toml +++ b/python/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.14.1-beta.0" +current_version = "0.14.1-beta.1" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/python/Cargo.toml b/python/Cargo.toml index 5b42a4b9..2d42f80f 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-python" -version = "0.14.1-beta.0" +version = "0.14.1-beta.1" edition.workspace = true description = "Python bindings for LanceDB" license.workspace = true From bbc588e27dc4dc9aa20c2ea31a5a725f08255a15 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 23 Oct 2024 00:34:01 +0000 Subject: [PATCH 36/51] =?UTF-8?q?Bump=20version:=200.11.1-beta.0=20?= =?UTF-8?q?=E2=86=92=200.11.1-beta.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- java/core/pom.xml | 2 +- java/pom.xml | 2 +- node/package.json | 12 ++++++------ nodejs/Cargo.toml | 2 +- nodejs/npm/darwin-arm64/package.json | 2 +- nodejs/npm/darwin-x64/package.json | 2 +- nodejs/npm/linux-arm64-gnu/package.json | 2 +- nodejs/npm/linux-x64-gnu/package.json | 2 +- nodejs/npm/win32-x64-msvc/package.json | 2 +- nodejs/package.json | 2 +- rust/ffi/node/Cargo.toml | 2 +- rust/lancedb/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index 1e853bb4..037f783a 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.11.1-beta.0" +current_version = "0.11.1-beta.1" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/java/core/pom.xml b/java/core/pom.xml index d1d386f5..b3a079f5 100644 --- a/java/core/pom.xml +++ b/java/core/pom.xml @@ -8,7 +8,7 @@ com.lancedb lancedb-parent - 0.11.1-beta.0 + 0.11.1-beta.1 ../pom.xml diff --git a/java/pom.xml b/java/pom.xml index ee861ad0..a89870e7 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -6,7 +6,7 @@ com.lancedb lancedb-parent - 0.11.1-beta.0 + 0.11.1-beta.1 pom LanceDB Parent diff --git a/node/package.json b/node/package.json index 51723fab..caf64030 100644 --- a/node/package.json +++ b/node/package.json @@ -1,6 +1,6 @@ { "name": "vectordb", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "description": " Serverless, low-latency vector database for AI applications", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.0", - "@lancedb/vectordb-darwin-x64": "0.11.1-beta.0", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.0", - "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.0", - "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.0" + "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.1", + "@lancedb/vectordb-darwin-x64": "0.11.1-beta.1", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.1", + "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.1", + "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.1" } } diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index d5e15fe2..f8c0dbb0 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.11.1-beta.0" +version = "0.11.1-beta.1" license.workspace = true description.workspace = true repository.workspace = true diff --git a/nodejs/npm/darwin-arm64/package.json b/nodejs/npm/darwin-arm64/package.json index 119f0653..b6edd42f 100644 --- a/nodejs/npm/darwin-arm64/package.json +++ b/nodejs/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-arm64", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "os": ["darwin"], "cpu": ["arm64"], "main": "lancedb.darwin-arm64.node", diff --git a/nodejs/npm/darwin-x64/package.json b/nodejs/npm/darwin-x64/package.json index 99f7e1a2..4fe3b16f 100644 --- a/nodejs/npm/darwin-x64/package.json +++ b/nodejs/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-x64", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "os": ["darwin"], "cpu": ["x64"], "main": "lancedb.darwin-x64.node", diff --git a/nodejs/npm/linux-arm64-gnu/package.json b/nodejs/npm/linux-arm64-gnu/package.json index a659b936..10aba252 100644 --- a/nodejs/npm/linux-arm64-gnu/package.json +++ b/nodejs/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-arm64-gnu", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "os": ["linux"], "cpu": ["arm64"], "main": "lancedb.linux-arm64-gnu.node", diff --git a/nodejs/npm/linux-x64-gnu/package.json b/nodejs/npm/linux-x64-gnu/package.json index 21b4adc9..73dd4592 100644 --- a/nodejs/npm/linux-x64-gnu/package.json +++ b/nodejs/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-x64-gnu", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "os": ["linux"], "cpu": ["x64"], "main": "lancedb.linux-x64-gnu.node", diff --git a/nodejs/npm/win32-x64-msvc/package.json b/nodejs/npm/win32-x64-msvc/package.json index cd457f2f..6e53b2da 100644 --- a/nodejs/npm/win32-x64-msvc/package.json +++ b/nodejs/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-win32-x64-msvc", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "os": ["win32"], "cpu": ["x64"], "main": "lancedb.win32-x64-msvc.node", diff --git a/nodejs/package.json b/nodejs/package.json index f218a007..5f40d8d3 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -10,7 +10,7 @@ "vector database", "ann" ], - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "main": "dist/index.js", "exports": { ".": "./dist/index.js", diff --git a/rust/ffi/node/Cargo.toml b/rust/ffi/node/Cargo.toml index 2cfd0026..1c7244be 100644 --- a/rust/ffi/node/Cargo.toml +++ b/rust/ffi/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-node" -version = "0.11.1-beta.0" +version = "0.11.1-beta.1" description = "Serverless, low-latency vector database for AI applications" license.workspace = true edition.workspace = true diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index 2d92ec2c..a7fa770e 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb" -version = "0.11.1-beta.0" +version = "0.11.1-beta.1" edition.workspace = true description = "LanceDB: A serverless, low-latency vector database for AI applications" license.workspace = true From 04e1f1ee4c0c56efc86f7b36e9f08f06983f1de7 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Wed, 23 Oct 2024 00:34:22 +0000 Subject: [PATCH 37/51] Updating package-lock.json --- node/package-lock.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/node/package-lock.json b/node/package-lock.json index 9ad26c38..5bdeba1c 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -1,12 +1,12 @@ { "name": "vectordb", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "vectordb", - "version": "0.11.1-beta.0", + "version": "0.11.1-beta.1", "cpu": [ "x64", "arm64" @@ -52,11 +52,11 @@ "uuid": "^9.0.0" }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.0", - "@lancedb/vectordb-darwin-x64": "0.11.1-beta.0", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.0", - "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.0", - "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.0" + "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.1", + "@lancedb/vectordb-darwin-x64": "0.11.1-beta.1", + "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.1", + "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.1", + "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.1" }, "peerDependencies": { "@apache-arrow/ts": "^14.0.2", From 2ea5939f859a9308b303fab205cd41b5e19415c4 Mon Sep 17 00:00:00 2001 From: Ryan Green Date: Fri, 25 Oct 2024 14:40:14 -0230 Subject: [PATCH 38/51] fix: error during deserialization of "INVERTED" index type --- rust/lancedb/src/index.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rust/lancedb/src/index.rs b/rust/lancedb/src/index.rs index 47b42050..b966e8cc 100644 --- a/rust/lancedb/src/index.rs +++ b/rust/lancedb/src/index.rs @@ -120,6 +120,8 @@ pub enum IndexType { LabelList, // FTS FTS, + #[serde(alias = "INVERTED")] + Inverted, } impl std::fmt::Display for IndexType { @@ -132,6 +134,7 @@ impl std::fmt::Display for IndexType { Self::Bitmap => write!(f, "BITMAP"), Self::LabelList => write!(f, "LABEL_LIST"), Self::FTS => write!(f, "FTS"), + Self::Inverted => write!(f, "FTS"), } } } From b9802a0d23fae618bc8231cd5d3c88899b8df60f Mon Sep 17 00:00:00 2001 From: Ryan Green Date: Fri, 25 Oct 2024 14:46:47 -0230 Subject: [PATCH 39/51] Revert "fix: error during deserialization of "INVERTED" index type" This reverts commit 2ea5939f859a9308b303fab205cd41b5e19415c4. --- rust/lancedb/src/index.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/rust/lancedb/src/index.rs b/rust/lancedb/src/index.rs index b966e8cc..47b42050 100644 --- a/rust/lancedb/src/index.rs +++ b/rust/lancedb/src/index.rs @@ -120,8 +120,6 @@ pub enum IndexType { LabelList, // FTS FTS, - #[serde(alias = "INVERTED")] - Inverted, } impl std::fmt::Display for IndexType { @@ -134,7 +132,6 @@ impl std::fmt::Display for IndexType { Self::Bitmap => write!(f, "BITMAP"), Self::LabelList => write!(f, "LABEL_LIST"), Self::FTS => write!(f, "FTS"), - Self::Inverted => write!(f, "FTS"), } } } From 32fdcf97dbcd169d34867bf6dd236773bea54d3b Mon Sep 17 00:00:00 2001 From: BubbleCal Date: Wed, 30 Oct 2024 00:03:52 +0800 Subject: [PATCH 40/51] feat!: upgrade lance to 0.19.1 (#1762) BREAKING CHANGE: default tokenizer no longer does stemming or stop-word removal. Users should explicitly turn that option on in the future. - upgrade lance to 0.19.1 - update the FTS docs - update the FTS API Upstream change notes: https://github.com/lancedb/lance/releases/tag/v0.19.1 --------- Signed-off-by: BubbleCal Co-authored-by: Will Jones --- Cargo.toml | 17 ++- docs/mkdocs.yml | 25 +++-- docs/src/fts.md | 158 +++++--------------------- docs/src/fts_tantivy.md | 162 +++++++++++++++++++++++++++ docs/src/index.md | 3 +- docs/src/js/interfaces/FtsOptions.md | 25 +++++ docs/test/requirements.txt | 2 +- node/package-lock.json | 65 +++++++++++ nodejs/package-lock.json | 4 +- python/pyproject.toml | 2 +- python/python/lancedb/index.py | 33 +++++- python/python/lancedb/table.py | 90 ++++++++++++++- python/src/index.rs | 31 ++++- rust-toolchain.toml | 2 +- rust/lancedb/src/index/scalar.rs | 3 + rust/lancedb/src/table.rs | 3 +- 16 files changed, 459 insertions(+), 166 deletions(-) create mode 100644 docs/src/fts_tantivy.md create mode 100644 docs/src/js/interfaces/FtsOptions.md diff --git a/Cargo.toml b/Cargo.toml index a9fffc24..9d5725f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,17 +18,16 @@ repository = "https://github.com/lancedb/lancedb" description = "Serverless, low-latency vector database for AI applications" keywords = ["lancedb", "lance", "database", "vector", "search"] categories = ["database-implementations"] +rust-version = "1.80.0" # TODO: lower this once we upgrade Lance again. [workspace.dependencies] -lance = { "version" = "=0.18.3", "features" = [ - "dynamodb", -], git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } -lance-index = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } -lance-linalg = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } -lance-table = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } -lance-testing = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } -lance-datafusion = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } -lance-encoding = { "version" = "=0.18.3", git = "https://github.com/lancedb/lance.git", tag = "v0.18.3-beta.2" } +lance = { "version" = "=0.19.1", "features" = ["dynamodb"] } +lance-index = { "version" = "=0.19.1" } +lance-linalg = { "version" = "=0.19.1" } +lance-table = { "version" = "=0.19.1" } +lance-testing = { "version" = "=0.19.1" } +lance-datafusion = { "version" = "=0.19.1" } +lance-encoding = { "version" = "=0.19.1" } # Note that this one does not include pyarrow arrow = { version = "52.2", optional = false } arrow-array = "52.2" diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index ddfa66b0..0f64a0a3 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -100,7 +100,7 @@ nav: - πŸƒπŸΌβ€β™‚οΈ Quick start: basic.md - πŸ“š Concepts: - Vector search: concepts/vector_search.md - - Indexing: + - Indexing: - IVFPQ: concepts/index_ivfpq.md - HNSW: concepts/index_hnsw.md - Storage: concepts/storage.md @@ -109,7 +109,8 @@ nav: - Working with tables: guides/tables.md - Building a vector index: ann_indexes.md - Vector Search: search.md - - Full-text search: fts.md + - Full-text search (native): fts.md + - Full-text search (tantivy-based): fts_tantivy.md - Building a scalar index: guides/scalar_index.md - Hybrid search: - Overview: hybrid_search/hybrid_search.md @@ -148,10 +149,10 @@ nav: - Reranking: guides/tuning_retrievers/2_reranking.md - Embedding fine-tuning: guides/tuning_retrievers/3_embed_tuning.md - 🧬 Managing embeddings: - - Understand Embeddings: embeddings/understanding_embeddings.md + - Understand Embeddings: embeddings/understanding_embeddings.md - Get Started: embeddings/index.md - Embedding functions: embeddings/embedding_functions.md - - Available models: + - Available models: - Overview: embeddings/default_embedding_functions.md - Text Embedding Functions: - Sentence Transformers: embeddings/available_embedding_models/text_embedding_functions/sentence_transformers.md @@ -200,7 +201,7 @@ nav: - Evaluation: examples/python_examples/evaluations.md - AI Agent: examples/python_examples/aiagent.md - Recommender System: examples/python_examples/recommendersystem.md - - Miscellaneous: + - Miscellaneous: - Serverless QA Bot with S3 and Lambda: examples/serverless_lancedb_with_s3_and_lambda.md - Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md - πŸ‘Ύ JavaScript: @@ -228,7 +229,7 @@ nav: - Quick start: basic.md - Concepts: - Vector search: concepts/vector_search.md - - Indexing: + - Indexing: - IVFPQ: concepts/index_ivfpq.md - HNSW: concepts/index_hnsw.md - Storage: concepts/storage.md @@ -237,7 +238,8 @@ nav: - Working with tables: guides/tables.md - Building an ANN index: ann_indexes.md - Vector Search: search.md - - Full-text search: fts.md + - Full-text search (native): fts.md + - Full-text search (tantivy-based): fts_tantivy.md - Building a scalar index: guides/scalar_index.md - Hybrid search: - Overview: hybrid_search/hybrid_search.md @@ -276,10 +278,10 @@ nav: - Reranking: guides/tuning_retrievers/2_reranking.md - Embedding fine-tuning: guides/tuning_retrievers/3_embed_tuning.md - Managing Embeddings: - - Understand Embeddings: embeddings/understanding_embeddings.md + - Understand Embeddings: embeddings/understanding_embeddings.md - Get Started: embeddings/index.md - Embedding functions: embeddings/embedding_functions.md - - Available models: + - Available models: - Overview: embeddings/default_embedding_functions.md - Text Embedding Functions: - Sentence Transformers: embeddings/available_embedding_models/text_embedding_functions/sentence_transformers.md @@ -324,7 +326,7 @@ nav: - Evaluation: examples/python_examples/evaluations.md - AI Agent: examples/python_examples/aiagent.md - Recommender System: examples/python_examples/recommendersystem.md - - Miscellaneous: + - Miscellaneous: - Serverless QA Bot with S3 and Lambda: examples/serverless_lancedb_with_s3_and_lambda.md - Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md - πŸ‘Ύ JavaScript: @@ -367,5 +369,4 @@ extra: - icon: fontawesome/brands/x-twitter link: https://twitter.com/lancedb - icon: fontawesome/brands/linkedin - link: https://www.linkedin.com/company/lancedb - \ No newline at end of file + link: https://www.linkedin.com/company/lancedb diff --git a/docs/src/fts.md b/docs/src/fts.md index 60381a42..57838a9e 100644 --- a/docs/src/fts.md +++ b/docs/src/fts.md @@ -1,21 +1,9 @@ -# Full-text search +# Full-text search (Native FTS) -LanceDB provides support for full-text search via Lance (before via [Tantivy](https://github.com/quickwit-oss/tantivy) (Python only)), allowing you to incorporate keyword-based search (based on BM25) in your retrieval solutions. - -Currently, the Lance full text search is missing some features that are in the Tantivy full text search. This includes query parser and customizing the tokenizer. Thus, in Python, Tantivy is still the default way to do full text search and many of the instructions below apply just to Tantivy-based indices. - - -## Installation (Only for Tantivy-based FTS) +LanceDB provides support for full-text search via Lance, allowing you to incorporate keyword-based search (based on BM25) in your retrieval solutions. !!! note - No need to install the tantivy dependency if using native FTS - -To use full-text search, install the dependency [`tantivy-py`](https://github.com/quickwit-oss/tantivy-py): - -```sh -# Say you want to use tantivy==0.20.1 -pip install tantivy==0.20.1 -``` + The Python SDK uses tantivy-based FTS by default, need to pass `use_tantivy=False` to use native FTS. ## Example @@ -39,7 +27,7 @@ Consider that we have a LanceDB table named `my_table`, whose string column `tex # passing `use_tantivy=False` to use lance FTS index # `use_tantivy=True` by default - table.create_fts_index("text") + table.create_fts_index("text", use_tantivy=False) table.search("puppy").limit(10).select(["text"]).to_list() # [{'text': 'Frodo was a happy puppy', '_score': 0.6931471824645996}] # ... @@ -93,51 +81,40 @@ Consider that we have a LanceDB table named `my_table`, whose string column `tex ``` It would search on all indexed columns by default, so it's useful when there are multiple indexed columns. -For now, this is supported in tantivy way only. -Passing `fts_columns="text"` if you want to specify the columns to search, but it's not available for Tantivy-based full text search. +Passing `fts_columns="text"` if you want to specify the columns to search. !!! note LanceDB automatically searches on the existing FTS index if the input to the search is of type `str`. If you provide a vector as input, LanceDB will search the ANN index instead. ## Tokenization -By default the text is tokenized by splitting on punctuation and whitespaces and then removing tokens that are longer than 40 chars. For more language specific tokenization then provide the argument tokenizer_name with the 2 letter language code followed by "_stem". So for english it would be "en_stem". +By default the text is tokenized by splitting on punctuation and whitespaces, and would filter out words that are with length greater than 40, and lowercase all words. -For now, only the Tantivy-based FTS index supports to specify the tokenizer, so it's only available in Python with `use_tantivy=True`. +Stemming is useful for improving search results by reducing words to their root form, e.g. "running" to "run". LanceDB supports stemming for multiple languages, you can specify the tokenizer name to enable stemming by the pattern `tokenizer_name="{language_code}_stem"`, e.g. `en_stem` for English. -=== "use_tantivy=True" - - ```python - table.create_fts_index("text", use_tantivy=True, tokenizer_name="en_stem") - ``` - -=== "use_tantivy=False" - - [**Not supported yet**](https://github.com/lancedb/lance/issues/1195) +For example, to enable stemming for English: +```python +table.create_fts_index("text", use_tantivy=True, tokenizer_name="en_stem") +``` the following [languages](https://docs.rs/tantivy/latest/tantivy/tokenizer/enum.Language.html) are currently supported. -## Index multiple columns +The tokenizer is customizable, you can specify how the tokenizer splits the text, and how it filters out words, etc. -If you have multiple string columns to index, there's no need to combine them manually -- simply pass them all as a list to `create_fts_index`: - -=== "use_tantivy=True" - - ```python - table.create_fts_index(["text1", "text2"]) - ``` - -=== "use_tantivy=False" - - [**Not supported yet**](https://github.com/lancedb/lance/issues/1195) - -Note that the search API call does not change - you can search over all indexed columns at once. +For example, for language with accents, you can specify the tokenizer to use `ascii_folding` to remove accents, e.g. 'Γ©' to 'e': +```python +table.create_fts_index("text", + use_tantivy=False, + language="French", + stem=True, + ascii_folding=True) +``` ## Filtering -Currently the LanceDB full text search feature supports *post-filtering*, meaning filters are -applied on top of the full text search results. This can be invoked via the familiar -`where` syntax: +LanceDB full text search supports to filter the search results by a condition, both pre-filtering and post-filtering are supported. + +This can be invoked via the familiar `where` syntax: === "Python" @@ -169,98 +146,17 @@ applied on top of the full text search results. This can be invoked via the fami .await?; ``` -## Sorting - -!!! warning "Warn" - Sorting is available for only Tantivy-based FTS - -You can pre-sort the documents by specifying `ordering_field_names` when -creating the full-text search index. Once pre-sorted, you can then specify -`ordering_field_name` while searching to return results sorted by the given -field. For example, - -```python -table.create_fts_index(["text_field"], use_tantivy=True, ordering_field_names=["sort_by_field"]) - -(table.search("terms", ordering_field_name="sort_by_field") - .limit(20) - .to_list()) -``` - -!!! note - If you wish to specify an ordering field at query time, you must also - have specified it during indexing time. Otherwise at query time, an - error will be raised that looks like `ValueError: The field does not exist: xxx` - -!!! note - The fields to sort on must be of typed unsigned integer, or else you will see - an error during indexing that looks like - `TypeError: argument 'value': 'float' object cannot be interpreted as an integer`. - -!!! note - You can specify multiple fields for ordering at indexing time. - But at query time only one ordering field is supported. - - ## Phrase queries vs. terms queries !!! warning "Warn" Lance-based FTS doesn't support queries using boolean operators `OR`, `AND`. For full-text search you can specify either a **phrase** query like `"the old man and the sea"`, -or a **terms** search query like `"(Old AND Man) AND Sea"`. For more details on the terms +or a **terms** search query like `old man sea`. For more details on the terms query syntax, see Tantivy's [query parser rules](https://docs.rs/tantivy/latest/tantivy/query/struct.QueryParser.html). -!!! tip "Note" - The query parser will raise an exception on queries that are ambiguous. For example, in the query `they could have been dogs OR cats`, `OR` is capitalized so it's considered a keyword query operator. But it's ambiguous how the left part should be treated. So if you submit this search query as is, you'll get `Syntax Error: they could have been dogs OR cats`. - - ```py - # This raises a syntax error - table.search("they could have been dogs OR cats") - ``` - - On the other hand, lowercasing `OR` to `or` will work, because there are no capitalized logical operators and - the query is treated as a phrase query. - - ```py - # This works! - table.search("they could have been dogs or cats") - ``` - -It can be cumbersome to have to remember what will cause a syntax error depending on the type of -query you want to perform. To make this simpler, when you want to perform a phrase query, you can -enforce it in one of two ways: - -1. Place the double-quoted query inside single quotes. For example, `table.search('"they could have been dogs OR cats"')` is treated as -a phrase query. -1. Explicitly declare the `phrase_query()` method. This is useful when you have a phrase query that -itself contains double quotes. For example, `table.search('the cats OR dogs were not really "pets" at all').phrase_query()` -is treated as a phrase query. - -In general, a query that's declared as a phrase query will be wrapped in double quotes during parsing, with nested -double quotes replaced by single quotes. - - -## Configurations (Only for Tantivy-based FTS) - -By default, LanceDB configures a 1GB heap size limit for creating the index. You can -reduce this if running on a smaller node, or increase this for faster performance while -indexing a larger corpus. - +To search for a phrase, the index must be created with `with_position=True`: ```python -# configure a 512MB heap size -heap = 1024 * 1024 * 512 -table.create_fts_index(["text1", "text2"], writer_heap_size=heap, replace=True) +table.create_fts_index("text", use_tantivy=False, with_position=True) ``` - -## Current limitations - -For that Tantivy-based FTS: - -1. Currently we do not yet support incremental writes. - If you add data after FTS index creation, it won't be reflected - in search results until you do a full reindex. - -2. We currently only support local filesystem paths for the FTS index. - This is a tantivy limitation. We've implemented an object store plugin - but there's no way in tantivy-py to specify to use it. +This will allow you to search for phrases, but it will also significantly increase the index size and indexing time. diff --git a/docs/src/fts_tantivy.md b/docs/src/fts_tantivy.md new file mode 100644 index 00000000..674f3c15 --- /dev/null +++ b/docs/src/fts_tantivy.md @@ -0,0 +1,162 @@ +# Full-text search (Tantivy-based FTS) + +LanceDB also provides support for full-text search via [Tantivy](https://github.com/quickwit-oss/tantivy), allowing you to incorporate keyword-based search (based on BM25) in your retrieval solutions. + +The tantivy-based FTS is only available in Python and does not support building indexes on object storage or incremental indexing. If you need these features, try native FTS [native FTS](fts.md). + +## Installation + +To use full-text search, install the dependency [`tantivy-py`](https://github.com/quickwit-oss/tantivy-py): + +```sh +# Say you want to use tantivy==0.20.1 +pip install tantivy==0.20.1 +``` + +## Example + +Consider that we have a LanceDB table named `my_table`, whose string column `content` we want to index and query via keyword search, the FTS index must be created before you can search via keywords. + +```python +import lancedb + +uri = "data/sample-lancedb" +db = lancedb.connect(uri) + +table = db.create_table( + "my_table", + data=[ + {"id": 1, "vector": [3.1, 4.1], "title": "happy puppy", "content": "Frodo was a happy puppy", "meta": "foo"}, + {"id": 2, "vector": [5.9, 26.5], "title": "playing kittens", "content": "There are several kittens playing around the puppy", "meta": "bar"}, + ], +) + +# passing `use_tantivy=False` to use lance FTS index +# `use_tantivy=True` by default +table.create_fts_index("content", use_tantivy=True) +table.search("puppy").limit(10).select(["content"]).to_list() +# [{'text': 'Frodo was a happy puppy', '_score': 0.6931471824645996}] +# ... +``` + +It would search on all indexed columns by default, so it's useful when there are multiple indexed columns. + +!!! note + LanceDB automatically searches on the existing FTS index if the input to the search is of type `str`. If you provide a vector as input, LanceDB will search the ANN index instead. + +## Tokenization +By default the text is tokenized by splitting on punctuation and whitespaces and then removing tokens that are longer than 40 chars. For more language specific tokenization then provide the argument tokenizer_name with the 2 letter language code followed by "_stem". So for english it would be "en_stem". + +```python +table.create_fts_index("content", use_tantivy=True, tokenizer_name="en_stem", replace=True) +``` + +the following [languages](https://docs.rs/tantivy/latest/tantivy/tokenizer/enum.Language.html) are currently supported. + +## Index multiple columns + +If you have multiple string columns to index, there's no need to combine them manually -- simply pass them all as a list to `create_fts_index`: + +```python +table.create_fts_index(["title", "content"], use_tantivy=True, replace=True) +``` + +Note that the search API call does not change - you can search over all indexed columns at once. + +## Filtering + +Currently the LanceDB full text search feature supports *post-filtering*, meaning filters are +applied on top of the full text search results (see [native FTS](fts.md) if you need pre-filtering). This can be invoked via the familiar +`where` syntax: + +```python +table.search("puppy").limit(10).where("meta='foo'").to_list() +``` + +## Sorting + +You can pre-sort the documents by specifying `ordering_field_names` when +creating the full-text search index. Once pre-sorted, you can then specify +`ordering_field_name` while searching to return results sorted by the given +field. For example, + +```python +table.create_fts_index(["content"], use_tantivy=True, ordering_field_names=["id"], replace=True) + +(table.search("puppy", ordering_field_name="id") + .limit(20) + .to_list()) +``` + +!!! note + If you wish to specify an ordering field at query time, you must also + have specified it during indexing time. Otherwise at query time, an + error will be raised that looks like `ValueError: The field does not exist: xxx` + +!!! note + The fields to sort on must be of typed unsigned integer, or else you will see + an error during indexing that looks like + `TypeError: argument 'value': 'float' object cannot be interpreted as an integer`. + +!!! note + You can specify multiple fields for ordering at indexing time. + But at query time only one ordering field is supported. + + +## Phrase queries vs. terms queries + +For full-text search you can specify either a **phrase** query like `"the old man and the sea"`, +or a **terms** search query like `"(Old AND Man) AND Sea"`. For more details on the terms +query syntax, see Tantivy's [query parser rules](https://docs.rs/tantivy/latest/tantivy/query/struct.QueryParser.html). + +!!! tip "Note" + The query parser will raise an exception on queries that are ambiguous. For example, in the query `they could have been dogs OR cats`, `OR` is capitalized so it's considered a keyword query operator. But it's ambiguous how the left part should be treated. So if you submit this search query as is, you'll get `Syntax Error: they could have been dogs OR cats`. + + ```py + # This raises a syntax error + table.search("they could have been dogs OR cats") + ``` + + On the other hand, lowercasing `OR` to `or` will work, because there are no capitalized logical operators and + the query is treated as a phrase query. + + ```py + # This works! + table.search("they could have been dogs or cats") + ``` + +It can be cumbersome to have to remember what will cause a syntax error depending on the type of +query you want to perform. To make this simpler, when you want to perform a phrase query, you can +enforce it in one of two ways: + +1. Place the double-quoted query inside single quotes. For example, `table.search('"they could have been dogs OR cats"')` is treated as +a phrase query. +1. Explicitly declare the `phrase_query()` method. This is useful when you have a phrase query that +itself contains double quotes. For example, `table.search('the cats OR dogs were not really "pets" at all').phrase_query()` +is treated as a phrase query. + +In general, a query that's declared as a phrase query will be wrapped in double quotes during parsing, with nested +double quotes replaced by single quotes. + + +## Configurations + +By default, LanceDB configures a 1GB heap size limit for creating the index. You can +reduce this if running on a smaller node, or increase this for faster performance while +indexing a larger corpus. + +```python +# configure a 512MB heap size +heap = 1024 * 1024 * 512 +table.create_fts_index(["title", "content"], use_tantivy=True, writer_heap_size=heap, replace=True) +``` + +## Current limitations + +1. Currently we do not yet support incremental writes. + If you add data after FTS index creation, it won't be reflected + in search results until you do a full reindex. + +2. We currently only support local filesystem paths for the FTS index. + This is a tantivy limitation. We've implemented an object store plugin + but there's no way in tantivy-py to specify to use it. diff --git a/docs/src/index.md b/docs/src/index.md index 8339edf7..c2d59717 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -49,7 +49,8 @@ The following pages go deeper into the internal of LanceDB and how to use it. * [Working with tables](guides/tables.md): Learn how to work with tables and their associated functions * [Indexing](ann_indexes.md): Understand how to create indexes * [Vector search](search.md): Learn how to perform vector similarity search -* [Full-text search](fts.md): Learn how to perform full-text search +* [Full-text search (native)](fts.md): Learn how to perform full-text search +* [Full-text search (tantivy-based)](fts_tantivy.md): Learn how to perform full-text search using Tantivy * [Managing embeddings](embeddings/index.md): Managing embeddings and the embedding functions API in LanceDB * [Ecosystem Integrations](integrations/index.md): Integrate LanceDB with other tools in the data ecosystem * [Python API Reference](python/python.md): Python OSS and Cloud API references diff --git a/docs/src/js/interfaces/FtsOptions.md b/docs/src/js/interfaces/FtsOptions.md new file mode 100644 index 00000000..95a635d7 --- /dev/null +++ b/docs/src/js/interfaces/FtsOptions.md @@ -0,0 +1,25 @@ +[**@lancedb/lancedb**](../README.md) β€’ **Docs** + +*** + +[@lancedb/lancedb](../globals.md) / FtsOptions + +# Interface: FtsOptions + +Options to create an `FTS` index + +## Properties + +### withPosition? + +> `optional` **withPosition**: `boolean` + +Whether to store the positions of the term in the document. + +If this is true then the index will store the positions of the term in the document. +This allows phrase queries to be run. But it also increases the size of the index, +and the time to build the index. + +The default value is true. + +*** diff --git a/docs/test/requirements.txt b/docs/test/requirements.txt index 671940a5..bbccf6b8 100644 --- a/docs/test/requirements.txt +++ b/docs/test/requirements.txt @@ -3,7 +3,7 @@ numpy pandas pylance duckdb +tantivy==0.20.1 --extra-index-url https://download.pytorch.org/whl/cpu torch polars>=0.19, <=1.3.0 - diff --git a/node/package-lock.json b/node/package-lock.json index 5bdeba1c..31cfc115 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -326,6 +326,71 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@lancedb/vectordb-darwin-arm64": { + "version": "0.11.1-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.11.1-beta.1.tgz", + "integrity": "sha512-q9jcCbmcz45UHmjgecL6zK82WaqUJsARfniwXXPcnd8ooISVhPkgN+RVKv6edwI9T0PV+xVRYq+LQLlZu5fyxw==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@lancedb/vectordb-darwin-x64": { + "version": "0.11.1-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.11.1-beta.1.tgz", + "integrity": "sha512-E5tCTS5TaTkssTPa+gdnFxZJ1f60jnSIJXhqufNFZk4s+IMViwR1BPqaqE++WY5c1uBI55ef1862CROKDKX4gg==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@lancedb/vectordb-linux-arm64-gnu": { + "version": "0.11.1-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.11.1-beta.1.tgz", + "integrity": "sha512-Obohy6TH31Uq+fp6ZisHR7iAsvgVPqBExrycVcIJqrLZnIe88N9OWUwBXkmfMAw/2hNJFwD4tU7+4U2FcBWX4w==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@lancedb/vectordb-linux-x64-gnu": { + "version": "0.11.1-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.11.1-beta.1.tgz", + "integrity": "sha512-3Meu0dgrzNrnBVVQhxkUSAOhQNmgtKHvOvmrRLUicV+X19hd33udihgxVpZZb9mpXenJ8lZsS+Jq6R0hWqntag==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@lancedb/vectordb-win32-x64-msvc": { + "version": "0.11.1-beta.1", + "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.11.1-beta.1.tgz", + "integrity": "sha512-BafZ9OJPQXsS7JW0weAl12wC+827AiRjfUrE5tvrYWZah2OwCF2U2g6uJ3x4pxfwEGsv5xcHFqgxlS7ttFkh+Q==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@neon-rs/cli": { "version": "0.0.160", "resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz", diff --git a/nodejs/package-lock.json b/nodejs/package-lock.json index b124bed2..a642d63b 100644 --- a/nodejs/package-lock.json +++ b/nodejs/package-lock.json @@ -1,12 +1,12 @@ { "name": "@lancedb/lancedb", - "version": "0.11.0", + "version": "0.11.1-beta.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@lancedb/lancedb", - "version": "0.11.0", + "version": "0.11.1-beta.1", "cpu": [ "x64", "arm64" diff --git a/python/pyproject.toml b/python/pyproject.toml index 86b75067..10dc7375 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -3,7 +3,7 @@ name = "lancedb" # version in Cargo.toml dependencies = [ "deprecation", - "pylance==0.18.3-beta.2", + "pylance==0.19.1", "requests>=2.31.0", "tqdm>=4.27.0", "pydantic>=1.10", diff --git a/python/python/lancedb/index.py b/python/python/lancedb/index.py index 78160ac6..b7e44b52 100644 --- a/python/python/lancedb/index.py +++ b/python/python/lancedb/index.py @@ -7,6 +7,27 @@ from ._lancedb import ( IndexConfig, ) +lang_mapping = { + "ar": "Arabic", + "da": "Danish", + "du": "Dutch", + "en": "English", + "fi": "Finnish", + "fr": "French", + "de": "German", + "gr": "Greek", + "hu": "Hungarian", + "it": "Italian", + "no": "Norwegian", + "pt": "Portuguese", + "ro": "Romanian", + "ru": "Russian", + "es": "Spanish", + "sv": "Swedish", + "ta": "Tamil", + "tr": "Turkish", +} + class BTree: """Describes a btree index configuration @@ -78,7 +99,17 @@ class FTS: For example, it works with `title`, `description`, `content`, etc. """ - def __init__(self, with_position: bool = True): + def __init__( + self, + with_position: bool = True, + base_tokenizer: str = "simple", + language: str = "English", + max_token_length: Optional[int] = 40, + lower_case: bool = True, + stem: bool = False, + remove_stop_words: bool = False, + ascii_folding: bool = False, + ): self._inner = LanceDbIndex.fts(with_position=with_position) diff --git a/python/python/lancedb/table.py b/python/python/lancedb/table.py index b7e01190..59dc4487 100644 --- a/python/python/lancedb/table.py +++ b/python/python/lancedb/table.py @@ -55,6 +55,7 @@ from .util import ( safe_import_polars, value_to_sql, ) +from .index import lang_mapping if TYPE_CHECKING: import PIL @@ -497,10 +498,18 @@ class Table(ABC): ordering_field_names: Union[str, List[str]] = None, *, replace: bool = False, - with_position: bool = True, writer_heap_size: Optional[int] = 1024 * 1024 * 1024, - tokenizer_name: str = "default", use_tantivy: bool = True, + tokenizer_name: Optional[str] = None, + with_position: bool = True, + # tokenizer configs: + base_tokenizer: str = "simple", + language: str = "English", + max_token_length: Optional[int] = 40, + lower_case: bool = True, + stem: bool = False, + remove_stop_words: bool = False, + ascii_folding: bool = False, ): """Create a full-text search index on the table. @@ -526,7 +535,6 @@ class Table(ABC): The tokenizer to use for the index. Can be "raw", "default" or the 2 letter language code followed by "_stem". So for english it would be "en_stem". For available languages see: https://docs.rs/tantivy/latest/tantivy/tokenizer/enum.Language.html - only available with use_tantivy=True for now use_tantivy: bool, default True If True, use the legacy full-text search implementation based on tantivy. If False, use the new full-text search implementation based on lance-index. @@ -1341,14 +1349,33 @@ class LanceTable(Table): ordering_field_names: Union[str, List[str]] = None, *, replace: bool = False, - with_position: bool = True, writer_heap_size: Optional[int] = 1024 * 1024 * 1024, - tokenizer_name: str = "default", use_tantivy: bool = True, + tokenizer_name: Optional[str] = None, + with_position: bool = True, + # tokenizer configs: + base_tokenizer: str = "simple", + language: str = "English", + max_token_length: Optional[int] = 40, + lower_case: bool = True, + stem: bool = False, + remove_stop_words: bool = False, + ascii_folding: bool = False, ): if not use_tantivy: if not isinstance(field_names, str): raise ValueError("field_names must be a string when use_tantivy=False") + tokenizer_configs = { + "base_tokenizer": base_tokenizer, + "language": language, + "max_token_length": max_token_length, + "lower_case": lower_case, + "stem": stem, + "remove_stop_words": remove_stop_words, + "ascii_folding": ascii_folding, + } + if tokenizer_name is not None: + tokenizer_configs = self.infer_tokenizer_configs(tokenizer_name) # delete the existing legacy index if it exists if replace: path, fs, exist = self._get_fts_index_path() @@ -1359,6 +1386,7 @@ class LanceTable(Table): index_type="INVERTED", replace=replace, with_position=with_position, + **tokenizer_configs, ) return @@ -1381,6 +1409,8 @@ class LanceTable(Table): "Full-text search is only supported on the local filesystem" ) + if tokenizer_name is None: + tokenizer_name = "default" index = create_index( path, field_names, @@ -1395,6 +1425,56 @@ class LanceTable(Table): writer_heap_size=writer_heap_size, ) + def infer_tokenizer_configs(tokenizer_name: str) -> dict: + if tokenizer_name == "default": + return { + "base_tokenizer": "simple", + "language": "English", + "max_token_length": 40, + "lower_case": True, + "stem": False, + "remove_stop_words": False, + "ascii_folding": False, + } + elif tokenizer_name == "raw": + return { + "base_tokenizer": "raw", + "language": "English", + "max_token_length": None, + "lower_case": False, + "stem": False, + "remove_stop_words": False, + "ascii_folding": False, + } + elif tokenizer_name == "whitespace": + return { + "base_tokenizer": "whitespace", + "language": "English", + "max_token_length": None, + "lower_case": False, + "stem": False, + "remove_stop_words": False, + "ascii_folding": False, + } + + # or it's with language stemming with pattern like "en_stem" + if len(tokenizer_name) != 7: + raise ValueError(f"Invalid tokenizer name {tokenizer_name}") + lang = tokenizer_name[:2] + if tokenizer_name[-5:] != "_stem": + raise ValueError(f"Invalid tokenizer name {tokenizer_name}") + if lang not in lang_mapping: + raise ValueError(f"Invalid language code {lang}") + return { + "base_tokenizer": "simple", + "language": lang_mapping[lang], + "max_token_length": 40, + "lower_case": True, + "stem": True, + "remove_stop_words": False, + "ascii_folding": False, + } + def add( self, data: DATA, diff --git a/python/src/index.rs b/python/src/index.rs index d9eeccde..7510b7fe 100644 --- a/python/src/index.rs +++ b/python/src/index.rs @@ -106,12 +106,41 @@ impl Index { }) } + #[allow(clippy::too_many_arguments)] #[staticmethod] - pub fn fts(with_position: Option) -> Self { + pub fn fts( + with_position: Option, + base_tokenizer: Option, + language: Option, + max_token_length: Option, + lower_case: Option, + stem: Option, + remove_stop_words: Option, + ascii_folding: Option, + ) -> Self { let mut opts = FtsIndexBuilder::default(); if let Some(with_position) = with_position { opts = opts.with_position(with_position); } + if let Some(base_tokenizer) = base_tokenizer { + opts.tokenizer_configs = opts.tokenizer_configs.base_tokenizer(base_tokenizer); + } + if let Some(language) = language { + opts.tokenizer_configs = opts.tokenizer_configs.language(&language).unwrap(); + } + opts.tokenizer_configs = opts.tokenizer_configs.max_token_length(max_token_length); + if let Some(lower_case) = lower_case { + opts.tokenizer_configs = opts.tokenizer_configs.lower_case(lower_case); + } + if let Some(stem) = stem { + opts.tokenizer_configs = opts.tokenizer_configs.stem(stem); + } + if let Some(remove_stop_words) = remove_stop_words { + opts.tokenizer_configs = opts.tokenizer_configs.remove_stop_words(remove_stop_words); + } + if let Some(ascii_folding) = ascii_folding { + opts.tokenizer_configs = opts.tokenizer_configs.ascii_folding(ascii_folding); + } Self { inner: Mutex::new(Some(LanceDbIndex::FTS(opts))), } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 628740b1..4d2dee85 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.79.0" +channel = "1.80.0" diff --git a/rust/lancedb/src/index/scalar.rs b/rust/lancedb/src/index/scalar.rs index 553ad64f..e7066548 100644 --- a/rust/lancedb/src/index/scalar.rs +++ b/rust/lancedb/src/index/scalar.rs @@ -54,12 +54,14 @@ pub struct LabelListIndexBuilder {} #[derive(Debug, Clone)] pub struct FtsIndexBuilder { pub(crate) with_position: bool, + pub tokenizer_configs: TokenizerConfig, } impl Default for FtsIndexBuilder { fn default() -> Self { Self { with_position: true, + tokenizer_configs: TokenizerConfig::default(), } } } @@ -72,4 +74,5 @@ impl FtsIndexBuilder { } } +use lance_index::scalar::inverted::TokenizerConfig; pub use lance_index::scalar::FullTextSearchQuery; diff --git a/rust/lancedb/src/table.rs b/rust/lancedb/src/table.rs index 7b611de7..0bc61c34 100644 --- a/rust/lancedb/src/table.rs +++ b/rust/lancedb/src/table.rs @@ -1568,6 +1568,7 @@ impl NativeTable { let mut dataset = self.dataset.get_mut().await?; let fts_params = lance_index::scalar::InvertedIndexParams { with_position: fts_opts.with_position, + tokenizer_config: fts_opts.tokenizer_configs, }; dataset .create_index( @@ -2002,7 +2003,7 @@ impl TableInternal for NativeTable { self.dataset .get_mut() .await? - .add_columns(transforms, read_columns) + .add_columns(transforms, read_columns, None) .await?; Ok(()) } From aa269199ad9eec5b2565a0058761746cf5013402 Mon Sep 17 00:00:00 2001 From: Rithik Kumar <46047011+rithikJha@users.noreply.github.com> Date: Tue, 29 Oct 2024 22:55:27 +0530 Subject: [PATCH 41/51] docs: fix archived examples links (#1751) --- docs/src/examples/python_examples/chatbot.md | 4 ++-- .../examples/python_examples/multimodal.md | 2 +- docs/src/examples/python_examples/rag.md | 8 ++++---- .../python_examples/recommendersystem.md | 8 ++++---- .../examples/python_examples/vector_search.md | 20 +++++++++---------- docs/src/integrations/phidata.md | 2 +- 6 files changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/src/examples/python_examples/chatbot.md b/docs/src/examples/python_examples/chatbot.md index 6d1e59cd..52d4e404 100644 --- a/docs/src/examples/python_examples/chatbot.md +++ b/docs/src/examples/python_examples/chatbot.md @@ -36,6 +36,6 @@ [aware_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/tutorials/chatbot_using_Llama2_&_lanceDB/main.ipynb [aware_ghost]: https://blog.lancedb.com/context-aware-chatbot-using-llama-2-lancedb-as-vector-database-4d771d95c755 -[csv_github]: https://github.com/lancedb/vectordb-recipes/blob/main/tutorials/Chat_with_csv_file -[csv_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/tutorials/Chat_with_csv_file/main.ipynb +[csv_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/Chat_with_csv_file +[csv_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/Chat_with_csv_file/main.ipynb [csv_ghost]: https://blog.lancedb.com/p/d8c71df4-e55f-479a-819e-cde13354a6a3/ diff --git a/docs/src/examples/python_examples/multimodal.md b/docs/src/examples/python_examples/multimodal.md index 08c561c6..69b7b778 100644 --- a/docs/src/examples/python_examples/multimodal.md +++ b/docs/src/examples/python_examples/multimodal.md @@ -12,7 +12,7 @@ LanceDB supports multimodal search by indexing and querying vector representatio |:----------------|:-----------------|:-----------| | **Multimodal CLIP: DiffusionDB 🌐πŸ’₯** | Multi-Modal Search with **CLIP** and **LanceDB** Using **DiffusionDB** Data for Combined Text and Image Understanding ! πŸ”“ | [![GitHub](../../assets/github.svg)][Clip_diffusionDB_github]
[![Open In Collab](../../assets/colab.svg)][Clip_diffusionDB_colab]
[![Python](../../assets/python.svg)][Clip_diffusionDB_python]
[![Ghost](../../assets/ghost.svg)][Clip_diffusionDB_ghost] | | **Multimodal CLIP: Youtube Videos πŸ“ΉπŸ‘€** | Search **Youtube videos** using Multimodal CLIP, finding relevant content with ease and accuracy! 🎯 | [![Github](../../assets/github.svg)][Clip_youtube_github]
[![Open In Collab](../../assets/colab.svg)][Clip_youtube_colab]
[![Python](../../assets/python.svg)][Clip_youtube_python]
[![Ghost](../../assets/ghost.svg)][Clip_youtube_python] | -| **Multimodal Image + Text Search πŸ“ΈπŸ”** | Find **relevant documents** and **images** with a single query using **LanceDB's** multimodal search capabilities, to seamlessly integrate text and visuals ! πŸŒ‰ | [![GitHub](../../assets/github.svg)](https://github.com/lancedb/vectordb-recipes/blob/main/examples/multimodal_search)
[![Open In Collab](../../assets/colab.svg)](https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/multimodal_search/main.ipynb)
[![Python](../../assets/python.svg)](https://github.com/lancedb/vectordb-recipes/blob/main/examples/multimodal_search/main.py)
[![Ghost](../../assets/ghost.svg)](https://blog.lancedb.com/multi-modal-ai-made-easy-with-lancedb-clip-5aaf8801c939/) | +| **Multimodal Image + Text Search πŸ“ΈπŸ”** | Find **relevant documents** and **images** with a single query using **LanceDB's** multimodal search capabilities, to seamlessly integrate text and visuals ! πŸŒ‰ | [![GitHub](../../assets/github.svg)](https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/multimodal_search)
[![Open In Collab](../../assets/colab.svg)](https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/multimodal_search/main.ipynb)
[![Python](../../assets/python.svg)](https://github.com/lancedb/vectordb-recipes/blob/main/examples/multimodal_search/main.py)
[![Ghost](../../assets/ghost.svg)](https://blog.lancedb.com/multi-modal-ai-made-easy-with-lancedb-clip-5aaf8801c939/) | | **Cambrian-1: Vision-Centric Image Exploration πŸ”πŸ‘€** | Learn how **Cambrian-1** works, using an example of **Vision-Centric** exploration on images found through vector search ! Work on **Flickr-8k** dataset πŸ”Ž | [![Kaggle](https://img.shields.io/badge/Kaggle-035a7d?style=for-the-badge&logo=kaggle&logoColor=white)](https://www.kaggle.com/code/prasantdixit/cambrian-1-vision-centric-exploration-of-images/)
[![Ghost](../../assets/ghost.svg)](https://blog.lancedb.com/cambrian-1-vision-centric-exploration/) | diff --git a/docs/src/examples/python_examples/rag.md b/docs/src/examples/python_examples/rag.md index a6db3a68..bed3a1ab 100644 --- a/docs/src/examples/python_examples/rag.md +++ b/docs/src/examples/python_examples/rag.md @@ -70,12 +70,12 @@ Build RAG (Retrieval-Augmented Generation) with LanceDB, a powerful solution fo [flare_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/better-rag-FLAIR/main.ipynb [flare_ghost]: https://blog.lancedb.com/better-rag-with-active-retrieval-augmented-generation-flare-3b66646e2a9f/ -[query_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/QueryExpansion&Reranker -[query_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/QueryExpansion&Reranker/main.ipynb +[query_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/QueryExpansion%26Reranker +[query_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/QueryExpansion&Reranker/main.ipynb -[fusion_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/RAG_Fusion -[fusion_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/RAG_Fusion/main.ipynb +[fusion_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/RAG_Fusion +[fusion_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/RAG_Fusion/main.ipynb [agentic_github]: https://github.com/lancedb/vectordb-recipes/blob/main/tutorials/Agentic_RAG [agentic_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/tutorials/Agentic_RAG/main.ipynb diff --git a/docs/src/examples/python_examples/recommendersystem.md b/docs/src/examples/python_examples/recommendersystem.md index 12ce7780..a7d91143 100644 --- a/docs/src/examples/python_examples/recommendersystem.md +++ b/docs/src/examples/python_examples/recommendersystem.md @@ -19,8 +19,8 @@ Deliver personalized experiences with Recommender Systems. 🎁 [movie_python]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/movie-recommender/main.py -[genre_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/movie-recommendation-with-genres -[genre_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/movie-recommendation-with-genres/movie_recommendation_with_doc2vec_and_lancedb.ipynb +[genre_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/movie-recommendation-with-genres +[genre_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/movie-recommendation-with-genres/movie_recommendation_with_doc2vec_and_lancedb.ipynb [genre_ghost]: https://blog.lancedb.com/movie-recommendation-system-using-lancedb-and-doc2vec/ [product_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/product-recommender @@ -33,5 +33,5 @@ Deliver personalized experiences with Recommender Systems. 🎁 [arxiv_python]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/arxiv-recommender/main.py -[food_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/Food_recommendation -[food_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/Food_recommendation/main.ipynb +[food_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/Food_recommendation +[food_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/Food_recommendation/main.ipynb diff --git a/docs/src/examples/python_examples/vector_search.md b/docs/src/examples/python_examples/vector_search.md index 8561f716..1861e6dc 100644 --- a/docs/src/examples/python_examples/vector_search.md +++ b/docs/src/examples/python_examples/vector_search.md @@ -37,16 +37,16 @@ LanceDB implements vector search algorithms for efficient document retrieval and [NER_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/tutorials/NER-powered-Semantic-Search/NER_powered_Semantic_Search_with_LanceDB.ipynb [NER_ghost]: https://blog.lancedb.com/ner-powered-semantic-search-using-lancedb-51051dc3e493 -[audio_search_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/audio_search -[audio_search_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/audio_search/main.ipynb -[audio_search_python]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/audio_search/main.py +[audio_search_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/audio_search +[audio_search_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/audio_search/main.ipynb +[audio_search_python]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/archived_examples/audio_search/main.py -[mls_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/multi-lingual-wiki-qa -[mls_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/multi-lingual-wiki-qa/main.ipynb -[mls_python]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/multi-lingual-wiki-qa/main.py +[mls_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/multi-lingual-wiki-qa +[mls_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/multi-lingual-wiki-qa/main.ipynb +[mls_python]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/archived_examples/multi-lingual-wiki-qa/main.py -[fr_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/facial_recognition -[fr_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/facial_recognition/main.ipynb +[fr_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/facial_recognition +[fr_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/facial_recognition/main.ipynb [sentiment_analysis_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/Sentiment-Analysis-Analyse-Hotel-Reviews [sentiment_analysis_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/Sentiment-Analysis-Analyse-Hotel-Reviews/Sentiment_Analysis_using_LanceDB.ipynb @@ -70,8 +70,8 @@ LanceDB implements vector search algorithms for efficient document retrieval and [openvino_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/Accelerate-Vector-Search-Applications-Using-OpenVINO/clip_text_image_search.ipynb [openvino_ghost]: https://blog.lancedb.com/accelerate-vector-search-applications-using-openvino-lancedb/ -[zsic_github]: https://github.com/lancedb/vectordb-recipes/blob/main/examples/zero-shot-image-classification -[zsic_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/zero-shot-image-classification/main.ipynb +[zsic_github]: https://github.com/lancedb/vectordb-recipes/tree/main/examples/archived_examples/zero-shot-image-classification +[zsic_colab]: https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/archived_examples/zero-shot-image-classification/main.ipynb [zsic_ghost]: https://blog.lancedb.com/zero-shot-image-classification-with-vector-search/ diff --git a/docs/src/integrations/phidata.md b/docs/src/integrations/phidata.md index ad5ecf47..fbabcf70 100644 --- a/docs/src/integrations/phidata.md +++ b/docs/src/integrations/phidata.md @@ -45,7 +45,7 @@ Let's see how using LanceDB inside phidata helps in making LLM more useful: **Install the following packages in the virtual environment** ```python -pip install lancedb phidata youtube_transcript_api openai ollama pandas numpy +pip install lancedb phidata youtube_transcript_api openai ollama numpy pandas ``` **Create python files and import necessary libraries** From d71df4572e89321c479f655816b761a7d9dc3f21 Mon Sep 17 00:00:00 2001 From: Rithik Kumar <46047011+rithikJha@users.noreply.github.com> Date: Tue, 29 Oct 2024 22:55:50 +0530 Subject: [PATCH 42/51] docs: revamp langchain integration page (#1773) Before - Screenshot 2024-10-28 132932 After - Screenshot 2024-10-28 132727 --- docs/src/integrations/langchain.md | 209 +++++++++++++++++++---------- 1 file changed, 137 insertions(+), 72 deletions(-) diff --git a/docs/src/integrations/langchain.md b/docs/src/integrations/langchain.md index ec3dbe22..157428ba 100644 --- a/docs/src/integrations/langchain.md +++ b/docs/src/integrations/langchain.md @@ -1,5 +1,10 @@ -# Langchain -![Illustration](../assets/langchain.png) +**LangChain** is a framework designed for building applications with large language models (LLMs) by chaining together various components. It supports a range of functionalities including memory, agents, and chat models, enabling developers to create context-aware applications. + +![Illustration](https://raw.githubusercontent.com/lancedb/assets/refs/heads/main/docs/assets/integration/langchain_rag.png) + +LangChain streamlines these stages (in figure above) by providing pre-built components and tools for integration, memory management, and deployment, allowing developers to focus on application logic rather than underlying complexities. + +Integration of **Langchain** with **LanceDB** enables applications to retrieve the most relevant data by comparing query vectors against stored vectors, facilitating effective information retrieval. It results in better and context aware replies and actions by the LLMs. ## Quick Start You can load your document data using langchain's loaders, for this example we are using `TextLoader` and `OpenAIEmbeddings` as the embedding model. Checkout Complete example here - [LangChain demo](../notebooks/langchain_example.ipynb) @@ -26,20 +31,28 @@ print(docs[0].page_content) ## Documentation In the above example `LanceDB` vector store class object is created using `from_documents()` method which is a `classmethod` and returns the initialized class object. + You can also use `LanceDB.from_texts(texts: List[str],embedding: Embeddings)` class method. -The exhaustive list of parameters for `LanceDB` vector store are : -- `connection`: (Optional) `lancedb.db.LanceDBConnection` connection object to use. If not provided, a new connection will be created. -- `embedding`: Langchain embedding model. -- `vector_key`: (Optional) Column name to use for vector's in the table. Defaults to `'vector'`. -- `id_key`: (Optional) Column name to use for id's in the table. Defaults to `'id'`. -- `text_key`: (Optional) Column name to use for text in the table. Defaults to `'text'`. -- `table_name`: (Optional) Name of your table in the database. Defaults to `'vectorstore'`. -- `api_key`: (Optional) API key to use for LanceDB cloud database. Defaults to `None`. -- `region`: (Optional) Region to use for LanceDB cloud database. Only for LanceDB Cloud, defaults to `None`. -- `mode`: (Optional) Mode to use for adding data to the table. Defaults to `'overwrite'`. -- `reranker`: (Optional) The reranker to use for LanceDB. -- `relevance_score_fn`: (Optional[Callable[[float], float]]) Langchain relevance score function to be used. Defaults to `None`. +The exhaustive list of parameters for `LanceDB` vector store are : + +|Name|type|Purpose|default| +|:----|:----|:----|:----| +|`connection`| (Optional) `Any` |`lancedb.db.LanceDBConnection` connection object to use. If not provided, a new connection will be created.|`None`| +|`embedding`| (Optional) `Embeddings` | Langchain embedding model.|Provided by user.| +|`uri`| (Optional) `str` |It specifies the directory location of **LanceDB database** and establishes a connection that can be used to interact with the database. |`/tmp/lancedb`| +|`vector_key` |(Optional) `str`| Column name to use for vector's in the table.|`'vector'`| +|`id_key` |(Optional) `str`| Column name to use for id's in the table.|`'id'`| +|`text_key` |(Optional) `str` |Column name to use for text in the table.|`'text'`| +|`table_name` |(Optional) `str`| Name of your table in the database.|`'vectorstore'`| +|`api_key` |(Optional `str`) |API key to use for LanceDB cloud database.|`None`| +|`region` |(Optional) `str`| Region to use for LanceDB cloud database.|Only for LanceDB Cloud : `None`.| +|`mode` |(Optional) `str` |Mode to use for adding data to the table. Valid values are "append" and "overwrite".|`'overwrite'`| +|`table`| (Optional) `Any`|You can connect to an existing table of LanceDB, created outside of langchain, and utilize it.|`None`| +|`distance`|(Optional) `str`|The choice of distance metric used to calculate the similarity between vectors.|`'l2'`| +|`reranker` |(Optional) `Any`|The reranker to use for LanceDB.|`None`| +|`relevance_score_fn` |(Optional) `Callable[[float], float]` | Langchain relevance score function to be used.|`None`| +|`limit`|`int`|Set the maximum number of results to return.|`DEFAULT_K` (it is 4)| ```python db_url = "db://lang_test" # url of db you created @@ -51,19 +64,24 @@ vector_store = LanceDB( api_key=api_key, #(dont include for local API) region=region, #(dont include for local API) embedding=embeddings, - table_name='langchain_test' #Optional + table_name='langchain_test' # Optional ) ``` ### Methods ##### add_texts() -- `texts`: `Iterable` of strings to add to the vectorstore. -- `metadatas`: Optional `list[dict()]` of metadatas associated with the texts. -- `ids`: Optional `list` of ids to associate with the texts. -- `kwargs`: `Any` -This method adds texts and stores respective embeddings automatically. +This method turn texts into embedding and add it to the database. + +|Name|Purpose|defaults| +|:---|:---|:---| +|`texts`|`Iterable` of strings to add to the vectorstore.|Provided by user| +|`metadatas`|Optional `list[dict()]` of metadatas associated with the texts.|`None`| +|`ids`|Optional `list` of ids to associate with the texts.|`None`| +|`kwargs`| Other keyworded arguments provided by the user. |-| + +It returns list of ids of the added texts. ```python vector_store.add_texts(texts = ['test_123'], metadatas =[{'source' :'wiki'}]) @@ -78,14 +96,25 @@ pd_df.to_csv("docsearch.csv", index=False) # you can also create a new vector store object using an older connection object: vector_store = LanceDB(connection=tbl, embedding=embeddings) ``` -##### create_index() -- `col_name`: `Optional[str] = None` -- `vector_col`: `Optional[str] = None` -- `num_partitions`: `Optional[int] = 256` -- `num_sub_vectors`: `Optional[int] = 96` -- `index_cache_size`: `Optional[int] = None` -This method creates an index for the vector store. For index creation make sure your table has enough data in it. An ANN index is ususally not needed for datasets ~100K vectors. For large-scale (>1M) or higher dimension vectors, it is beneficial to create an ANN index. +------ + + +##### create_index() + +This method creates a scalar(for non-vector cols) or a vector index on a table. + +|Name|type|Purpose|defaults| +|:---|:---|:---|:---| +|`vector_col`|`Optional[str]`| Provide if you want to create index on a vector column. |`None`| +|`col_name`|`Optional[str]`| Provide if you want to create index on a non-vector column. |`None`| +|`metric`|`Optional[str]` |Provide the metric to use for vector index. choice of metrics: 'L2', 'dot', 'cosine'. |`L2`| +|`num_partitions`|`Optional[int]`|Number of partitions to use for the index.|`256`| +|`num_sub_vectors`|`Optional[int]` |Number of sub-vectors to use for the index.|`96`| +|`index_cache_size`|`Optional[int]` |Size of the index cache.|`None`| +|`name`|`Optional[str]` |Name of the table to create index on.|`None`| + +For index creation make sure your table has enough data in it. An ANN index is ususally not needed for datasets ~100K vectors. For large-scale (>1M) or higher dimension vectors, it is beneficial to create an ANN index. ```python # for creating vector index @@ -96,42 +125,63 @@ vector_store.create_index(col_name='text') ``` -##### similarity_search() -- `query`: `str` -- `k`: `Optional[int] = None` -- `filter`: `Optional[Dict[str, str]] = None` -- `fts`: `Optional[bool] = False` -- `name`: `Optional[str] = None` -- `kwargs`: `Any` +------ -Return documents most similar to the query without relevance scores +##### similarity_search() + +This method performs similarity search based on **text query**. + +| Name | Type | Purpose | Default | +|---------|----------------------|---------|---------| +| `query` | `str` | A `str` representing the text query that you want to search for in the vector store. | N/A | +| `k` | `Optional[int]` | It specifies the number of documents to return. | `None` | +| `filter` | `Optional[Dict[str, str]]`| It is used to filter the search results by specific metadata criteria. | `None` | +| `fts` | `Optional[bool]` | It indicates whether to perform a full-text search (FTS). | `False` | +| `name` | `Optional[str]` | It is used for specifying the name of the table to query. If not provided, it uses the default table set during the initialization of the LanceDB instance. | `None` | +| `kwargs` | `Any` | Other keyworded arguments provided by the user. | N/A | + +Return documents most similar to the query **without relevance scores**. ```python docs = docsearch.similarity_search(query) print(docs[0].page_content) ``` -##### similarity_search_by_vector() -- `embedding`: `List[float]` -- `k`: `Optional[int] = None` -- `filter`: `Optional[Dict[str, str]] = None` -- `name`: `Optional[str] = None` -- `kwargs`: `Any` +------ -Returns documents most similar to the query vector. +##### similarity_search_by_vector() + +The method returns documents that are most similar to the specified **embedding (query) vector**. + +| Name | Type | Purpose | Default | +|-------------|---------------------------|---------|---------| +| `embedding` | `List[float]` | The embedding vector you want to use to search for similar documents in the vector store. | N/A | +| `k` | `Optional[int]` | It specifies the number of documents to return. | `None` | +| `filter` | `Optional[Dict[str, str]]`| It is used to filter the search results by specific metadata criteria. | `None` | +| `name` | `Optional[str]` | It is used for specifying the name of the table to query. If not provided, it uses the default table set during the initialization of the LanceDB instance. | `None` | +| `kwargs` | `Any` | Other keyworded arguments provided by the user. | N/A | + +**It does not provide relevance scores.** ```python docs = docsearch.similarity_search_by_vector(query) print(docs[0].page_content) ``` -##### similarity_search_with_score() -- `query`: `str` -- `k`: `Optional[int] = None` -- `filter`: `Optional[Dict[str, str]] = None` -- `kwargs`: `Any` +------ -Returns documents most similar to the query string with relevance scores, gets called by base class's `similarity_search_with_relevance_scores` which selects relevance score based on our `_select_relevance_score_fn`. +##### similarity_search_with_score() + +Returns documents most similar to the **query string** along with their relevance scores. + +| Name | Type | Purpose | Default | +|----------|---------------------------|---------|---------| +| `query` | `str` |A `str` representing the text query you want to search for in the vector store. This query will be converted into an embedding using the specified embedding function. | N/A | +| `k` | `Optional[int]` | It specifies the number of documents to return. | `None` | +| `filter` | `Optional[Dict[str, str]]`| It is used to filter the search results by specific metadata criteria. This allows you to narrow down the search results based on certain metadata attributes associated with the documents. | `None` | +| `kwargs` | `Any` | Other keyworded arguments provided by the user. | N/A | + +It gets called by base class's `similarity_search_with_relevance_scores` which selects relevance score based on our `_select_relevance_score_fn`. ```python docs = docsearch.similarity_search_with_relevance_scores(query) @@ -139,15 +189,21 @@ print("relevance score - ", docs[0][1]) print("text- ", docs[0][0].page_content[:1000]) ``` -##### similarity_search_by_vector_with_relevance_scores() -- `embedding`: `List[float]` -- `k`: `Optional[int] = None` -- `filter`: `Optional[Dict[str, str]] = None` -- `name`: `Optional[str] = None` -- `kwargs`: `Any` +------ -Return documents most similar to the query vector with relevance scores. -Relevance score +##### similarity_search_by_vector_with_relevance_scores() + +Similarity search using **query vector**. + +| Name | Type | Purpose | Default | +|-------------|---------------------------|---------|---------| +| `embedding` | `List[float]` | The embedding vector you want to use to search for similar documents in the vector store. | N/A | +| `k` | `Optional[int]` | It specifies the number of documents to return. | `None` | +| `filter` | `Optional[Dict[str, str]]`| It is used to filter the search results by specific metadata criteria. | `None` | +| `name` | `Optional[str]` | It is used for specifying the name of the table to query. | `None` | +| `kwargs` | `Any` | Other keyworded arguments provided by the user. | N/A | + +The method returns documents most similar to the specified embedding (query) vector, along with their relevance scores. ```python docs = docsearch.similarity_search_by_vector_with_relevance_scores(query_embedding) @@ -155,20 +211,22 @@ print("relevance score - ", docs[0][1]) print("text- ", docs[0][0].page_content[:1000]) ``` -##### max_marginal_relevance_search() -- `query`: `str` -- `k`: `Optional[int] = None` -- `fetch_k` : Number of Documents to fetch to pass to MMR algorithm, `Optional[int] = None` -- `lambda_mult`: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. `float = 0.5` -- `filter`: `Optional[Dict[str, str]] = None` -- `kwargs`: `Any` +------ -Returns docs selected using the maximal marginal relevance(MMR). +##### max_marginal_relevance_search() + +This method returns docs selected using the maximal marginal relevance(MMR). Maximal marginal relevance optimizes for similarity to query AND diversity among selected documents. +| Name | Type | Purpose | Default | +|---------------|-----------------|-----------|---------| +| `query` | `str` | Text to look up documents similar to. | N/A | +| `k` | `Optional[int]` | Number of Documents to return.| `4` | +| `fetch_k`| `Optional[int]`| Number of Documents to fetch to pass to MMR algorithm.| `None` | +| `lambda_mult` | `float` | Number between 0 and 1 that determines the degree of diversity among the results with 0 corresponding to maximum diversity and 1 to minimum diversity. | `0.5` | +| `filter`| `Optional[Dict[str, str]]`| Filter by metadata. | `None` | +|`kwargs`| Other keyworded arguments provided by the user. |-| + Similarly, `max_marginal_relevance_search_by_vector()` function returns docs most similar to the embedding passed to the function using MMR. instead of a string query you need to pass the embedding to be searched for. ```python @@ -186,12 +244,19 @@ result_texts = [doc.page_content for doc in result] print(result_texts) ``` -##### add_images() -- `uris` : File path to the image. `List[str]`. -- `metadatas` : Optional list of metadatas. `(Optional[List[dict]], optional)` -- `ids` : Optional list of IDs. `(Optional[List[str]], optional)` +------ -Adds images by automatically creating their embeddings and adds them to the vectorstore. +##### add_images() + +This method ddds images by automatically creating their embeddings and adds them to the vectorstore. + +| Name | Type | Purpose | Default | +|------------|-------------------------------|--------------------------------|---------| +| `uris` | `List[str]` | File path to the image | N/A | +| `metadatas`| `Optional[List[dict]]` | Optional list of metadatas | `None` | +| `ids` | `Optional[List[str]]` | Optional list of IDs | `None` | + +It returns list of IDs of the added images. ```python vec_store.add_images(uris=image_uris) From 55104c5bae87dbe3af6f1b4c2ada52c3beeb77bc Mon Sep 17 00:00:00 2001 From: Weston Pace Date: Tue, 29 Oct 2024 13:51:18 -0700 Subject: [PATCH 43/51] feat: allow distance type (metric) to be specified during hybrid search (#1777) --- python/python/lancedb/conftest.py | 19 ++++++++++++++++- python/python/lancedb/query.py | 24 +++++++++++++++++++++- python/python/tests/test_table.py | 34 ++++++++++++++++++++++++++----- 3 files changed, 70 insertions(+), 7 deletions(-) diff --git a/python/python/lancedb/conftest.py b/python/python/lancedb/conftest.py index 7a6a5fd1..a1c748f5 100644 --- a/python/python/lancedb/conftest.py +++ b/python/python/lancedb/conftest.py @@ -26,7 +26,7 @@ registry = EmbeddingFunctionRegistry.get_instance() @registry.register("test") class MockTextEmbeddingFunction(TextEmbeddingFunction): """ - Return the hash of the first 10 characters + Return the hash of the first 10 characters (normalized) """ def generate_embeddings(self, texts): @@ -41,6 +41,23 @@ class MockTextEmbeddingFunction(TextEmbeddingFunction): return 10 +@registry.register("nonnorm") +class MockNonNormTextEmbeddingFunction(TextEmbeddingFunction): + """ + Return the ord of the first 10 characters (not normalized) + """ + + def generate_embeddings(self, texts): + return [self._compute_one_embedding(row) for row in texts] + + def _compute_one_embedding(self, row): + emb = np.array([float(ord(c)) for c in row[:10]]) + return emb if len(emb) == 10 else [0] * 10 + + def ndims(self): + return 10 + + class RateLimitedAPI: rate_limit = 0.1 # 1 request per 0.1 second last_request_time = 0 diff --git a/python/python/lancedb/query.py b/python/python/lancedb/query.py index c79b8846..1062289e 100644 --- a/python/python/lancedb/query.py +++ b/python/python/lancedb/query.py @@ -983,6 +983,7 @@ class LanceHybridQueryBuilder(LanceQueryBuilder): self._reranker = RRFReranker() self._nprobes = None self._refine_factor = None + self._metric = None self._phrase_query = False def _validate_query(self, query, vector=None, text=None): @@ -1050,6 +1051,8 @@ class LanceHybridQueryBuilder(LanceQueryBuilder): self._fts_query.with_row_id(True) if self._phrase_query: self._fts_query.phrase_query(True) + if self._metric: + self._vector_query.metric(self._metric) if self._nprobes: self._vector_query.nprobes(self._nprobes) if self._refine_factor: @@ -1067,6 +1070,7 @@ class LanceHybridQueryBuilder(LanceQueryBuilder): if self._norm == "rank": vector_results = self._rank(vector_results, "_distance") fts_results = self._rank(fts_results, "_score") + # normalize the scores to be between 0 and 1, 0 being most relevant vector_results = self._normalize_scores(vector_results, "_distance") @@ -1115,7 +1119,9 @@ class LanceHybridQueryBuilder(LanceQueryBuilder): rng = max else: rng = max - min - scores = (scores - min) / rng + # If rng is 0 then min and max are both 0 and so we can leave the scores as is + if rng != 0: + scores = (scores - min) / rng if invert: scores = 1 - scores # replace the _score column with the ranks @@ -1177,6 +1183,22 @@ class LanceHybridQueryBuilder(LanceQueryBuilder): self._nprobes = nprobes return self + def metric(self, metric: Literal["L2", "cosine", "dot"]) -> LanceHybridQueryBuilder: + """Set the distance metric to use. + + Parameters + ---------- + metric: "L2" or "cosine" or "dot" + The distance metric to use. By default "L2" is used. + + Returns + ------- + LanceVectorQueryBuilder + The LanceQueryBuilder object. + """ + self._metric = metric.lower() + return self + def refine_factor(self, refine_factor: int) -> LanceHybridQueryBuilder: """ Refine the vector search results by reading extra elements and diff --git a/python/python/tests/test_table.py b/python/python/tests/test_table.py index 65ec7b3c..bdf22ddf 100644 --- a/python/python/tests/test_table.py +++ b/python/python/tests/test_table.py @@ -991,13 +991,10 @@ def test_count_rows(db): assert table.count_rows(filter="text='bar'") == 1 -def test_hybrid_search(db, tmp_path): - # This test uses an FTS index - pytest.importorskip("lancedb.fts") - +def setup_hybrid_search_table(tmp_path, embedding_func): db = MockDB(str(tmp_path)) # Create a LanceDB table schema with a vector and a text column - emb = EmbeddingFunctionRegistry.get_instance().get("test")() + emb = EmbeddingFunctionRegistry.get_instance().get(embedding_func)() class MyTable(LanceModel): text: str = emb.SourceField() @@ -1030,6 +1027,15 @@ def test_hybrid_search(db, tmp_path): # Create a fts index table.create_fts_index("text") + return table, MyTable, emb + + +def test_hybrid_search(tmp_path): + # This test uses an FTS index + pytest.importorskip("lancedb.fts") + + table, MyTable, emb = setup_hybrid_search_table(tmp_path, "test") + result1 = ( table.search("Our father who art in heaven", query_type="hybrid") .rerank(normalize="score") @@ -1094,6 +1100,24 @@ def test_hybrid_search(db, tmp_path): table.search(query_type="hybrid").text("Arrrrggghhhhhhh").to_list() +def test_hybrid_search_metric_type(db, tmp_path): + # This test uses an FTS index + pytest.importorskip("lancedb.fts") + + # Need to use nonnorm as the embedding function so L2 and dot results + # are different + table, _, _ = setup_hybrid_search_table(tmp_path, "nonnorm") + + # with custom metric + result_dot = ( + table.search("feeling lucky", query_type="hybrid").metric("dot").to_arrow() + ) + result_l2 = table.search("feeling lucky", query_type="hybrid").to_arrow() + assert len(result_dot) > 0 + assert len(result_l2) > 0 + assert result_dot["_relevance_score"] != result_l2["_relevance_score"] + + @pytest.mark.parametrize( "consistency_interval", [None, timedelta(seconds=0), timedelta(seconds=0.1)] ) From a324f4ad7aaded8893e73c29f7f0fd94c1404d8f Mon Sep 17 00:00:00 2001 From: Will Jones Date: Tue, 29 Oct 2024 15:13:34 -0700 Subject: [PATCH 44/51] feat(node): enable logging and show full errors (#1775) This exposes the `LANCEDB_LOG` environment variable in node, so that users can now turn on logging. In addition, fixes a bug where only the top-level error from Rust was being shown. This PR makes sure the full error chain is included in the error message. In the future, will improve this so the error chain is set on the [cause](https://nodejs.org/api/errors.html#errorcause) property of JS errors https://github.com/lancedb/lancedb/issues/1779 Fixes #1774 --- Cargo.toml | 1 + docs/mkdocs.yml | 3 +- docs/src/troubleshooting.md | 33 ++++++++++++++++++++++ nodejs/Cargo.toml | 2 ++ nodejs/__test__/remote.test.ts | 25 ++++++++++++++++ nodejs/src/connection.rs | 38 ++++++------------------- nodejs/src/error.rs | 27 +++++++++++++++++- nodejs/src/lib.rs | 9 ++++++ nodejs/src/merge.rs | 16 +++++++---- nodejs/src/query.rs | 21 +++++++++++--- nodejs/src/table.rs | 47 +++++-------------------------- python/Cargo.toml | 2 +- rust/lancedb/src/remote/client.rs | 13 ++++++++- 13 files changed, 155 insertions(+), 82 deletions(-) create mode 100644 docs/src/troubleshooting.md diff --git a/Cargo.toml b/Cargo.toml index 9d5725f1..263fab98 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,6 +41,7 @@ async-trait = "0" chrono = "0.4.35" datafusion-common = "41.0" datafusion-physical-plan = "41.0" +env_logger = "0.10" half = { "version" = "=2.4.1", default-features = false, features = [ "num-traits", ] } diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 0f64a0a3..0bb1ebbe 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -211,9 +211,10 @@ nav: - TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md - πŸ¦€ Rust: - Overview: examples/examples_rust.md - - Studies: + - πŸ““ Studies: - β†—Improve retrievers with hybrid search and reranking: https://blog.lancedb.com/hybrid-search-and-reranking-report/ - πŸ’­ FAQs: faq.md + - πŸ” Troubleshooting: troubleshooting.md - βš™οΈ API reference: - 🐍 Python: python/python.md - πŸ‘Ύ JavaScript (vectordb): javascript/modules.md diff --git a/docs/src/troubleshooting.md b/docs/src/troubleshooting.md new file mode 100644 index 00000000..595753d6 --- /dev/null +++ b/docs/src/troubleshooting.md @@ -0,0 +1,33 @@ +## Getting help + +The following sections provide various diagnostics and troubleshooting tips for LanceDB. +These can help you provide additional information when asking questions or making +error reports. + +For trouble shooting, the best place to ask is in our Discord, under the relevant +language channel. By asking in the language-specific channel, it makes it more +likely that someone who knows the answer will see your question. + +## Enabling logging + +To provide more information, especially for LanceDB Cloud related issues, enable +debug logging. You can set the `LANCEDB_LOG` environment variable: + +```shell +export LANCEDB_LOG=debug +``` + +You can turn off colors and formatting in the logs by setting + +```shell +export LANCEDB_LOG_STYLE=never +``` + +## Explaining query plans + +If you have slow queries or unexpected query results, it can be helpful to +print the resolved query plan. You can use the `explain_plan` method to do this: + +* Python Sync: [LanceQueryBuilder.explain_plan][lancedb.query.LanceQueryBuilder.explain_plan] +* Python Async: [AsyncQueryBase.explain_plan][lancedb.query.AsyncQueryBase.explain_plan] +* Node @lancedb/lancedb: [LanceQueryBuilder.explainPlan](/lancedb/js/classes/QueryBase/#explainplan) diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index f8c0dbb0..458eb4ef 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -13,6 +13,7 @@ crate-type = ["cdylib"] [dependencies] arrow-ipc.workspace = true +env_logger.workspace = true futures.workspace = true lancedb = { path = "../rust/lancedb", features = ["remote"] } napi = { version = "2.16.8", default-features = false, features = [ @@ -22,6 +23,7 @@ napi = { version = "2.16.8", default-features = false, features = [ napi-derive = "2.16.4" # Prevent dynamic linking of lzma, which comes from datafusion lzma-sys = { version = "*", features = ["static"] } +log.workspace = true [build-dependencies] napi-build = "2.1" diff --git a/nodejs/__test__/remote.test.ts b/nodejs/__test__/remote.test.ts index 3e693197..6dbac639 100644 --- a/nodejs/__test__/remote.test.ts +++ b/nodejs/__test__/remote.test.ts @@ -90,4 +90,29 @@ describe("remote connection", () => { }, ); }); + + it("shows the full error messages on retry errors", async () => { + await withMockDatabase( + (_req, res) => { + // We retry on 500 errors, so we return 500s until the client gives up. + res.writeHead(500).end("Internal Server Error"); + }, + async (db) => { + try { + await db.tableNames(); + fail("expected an error"); + // biome-ignore lint/suspicious/noExplicitAny: skip + } catch (e: any) { + expect(e.message).toContain("Hit retry limit for request_id="); + expect(e.message).toContain("Caused by: Http error"); + expect(e.message).toContain("500 Internal Server Error"); + } + }, + { + clientConfig: { + retryConfig: { retries: 2 }, + }, + }, + ); + }); }); diff --git a/nodejs/src/connection.rs b/nodejs/src/connection.rs index 9f2a7305..01c41239 100644 --- a/nodejs/src/connection.rs +++ b/nodejs/src/connection.rs @@ -18,6 +18,7 @@ use std::str::FromStr; use napi::bindgen_prelude::*; use napi_derive::*; +use crate::error::{convert_error, NapiErrorExt}; use crate::table::Table; use crate::ConnectionOptions; use lancedb::connection::{ @@ -86,12 +87,7 @@ impl Connection { builder = builder.host_override(&host_override); } - Ok(Self::inner_new( - builder - .execute() - .await - .map_err(|e| napi::Error::from_reason(format!("{}", e)))?, - )) + Ok(Self::inner_new(builder.execute().await.default_error()?)) } #[napi] @@ -123,9 +119,7 @@ impl Connection { if let Some(limit) = limit { op = op.limit(limit); } - op.execute() - .await - .map_err(|e| napi::Error::from_reason(format!("{}", e))) + op.execute().await.default_error() } /// Create table from a Apache Arrow IPC (file) buffer. @@ -156,17 +150,13 @@ impl Connection { } if let Some(data_storage_option) = data_storage_options.as_ref() { builder = builder.data_storage_version( - LanceFileVersion::from_str(data_storage_option) - .map_err(|e| napi::Error::from_reason(format!("{}", e)))?, + LanceFileVersion::from_str(data_storage_option).map_err(|e| convert_error(&e))?, ); } if let Some(enable_v2_manifest_paths) = enable_v2_manifest_paths { builder = builder.enable_v2_manifest_paths(enable_v2_manifest_paths); } - let tbl = builder - .execute() - .await - .map_err(|e| napi::Error::from_reason(format!("{}", e)))?; + let tbl = builder.execute().await.default_error()?; Ok(Table::new(tbl)) } @@ -195,17 +185,13 @@ impl Connection { } if let Some(data_storage_option) = data_storage_options.as_ref() { builder = builder.data_storage_version( - LanceFileVersion::from_str(data_storage_option) - .map_err(|e| napi::Error::from_reason(format!("{}", e)))?, + LanceFileVersion::from_str(data_storage_option).map_err(|e| convert_error(&e))?, ); } if let Some(enable_v2_manifest_paths) = enable_v2_manifest_paths { builder = builder.enable_v2_manifest_paths(enable_v2_manifest_paths); } - let tbl = builder - .execute() - .await - .map_err(|e| napi::Error::from_reason(format!("{}", e)))?; + let tbl = builder.execute().await.default_error()?; Ok(Table::new(tbl)) } @@ -225,19 +211,13 @@ impl Connection { if let Some(index_cache_size) = index_cache_size { builder = builder.index_cache_size(index_cache_size); } - let tbl = builder - .execute() - .await - .map_err(|e| napi::Error::from_reason(format!("{}", e)))?; + let tbl = builder.execute().await.default_error()?; Ok(Table::new(tbl)) } /// Drop table with the name. Or raise an error if the table does not exist. #[napi(catch_unwind)] pub async fn drop_table(&self, name: String) -> napi::Result<()> { - self.get_inner()? - .drop_table(&name) - .await - .map_err(|e| napi::Error::from_reason(format!("{}", e))) + self.get_inner()?.drop_table(&name).await.default_error() } } diff --git a/nodejs/src/error.rs b/nodejs/src/error.rs index ddbb4471..7fec4725 100644 --- a/nodejs/src/error.rs +++ b/nodejs/src/error.rs @@ -7,6 +7,31 @@ pub trait NapiErrorExt { impl NapiErrorExt for std::result::Result { fn default_error(self) -> Result { - self.map_err(|err| napi::Error::from_reason(err.to_string())) + self.map_err(|err| convert_error(&err)) } } + +pub fn convert_error(err: &dyn std::error::Error) -> napi::Error { + let mut message = err.to_string(); + + // Append causes + let mut cause = err.source(); + let mut indent = 2; + while let Some(err) = cause { + let cause_message = format!("Caused by: {}", err); + message.push_str(&indent_string(&cause_message, indent)); + + cause = err.source(); + indent += 2; + } + + napi::Error::from_reason(message) +} + +fn indent_string(s: &str, amount: usize) -> String { + let indent = " ".repeat(amount); + s.lines() + .map(|line| format!("{}{}", indent, line)) + .collect::>() + .join("\n") +} diff --git a/nodejs/src/lib.rs b/nodejs/src/lib.rs index 54fde9bc..a18bc75d 100644 --- a/nodejs/src/lib.rs +++ b/nodejs/src/lib.rs @@ -14,6 +14,7 @@ use std::collections::HashMap; +use env_logger::Env; use napi_derive::*; mod connection; @@ -77,3 +78,11 @@ pub struct WriteOptions { pub struct OpenTableOptions { pub storage_options: Option>, } + +#[napi::module_init] +fn init() { + let env = Env::new() + .filter_or("LANCEDB_LOG", "trace") + .write_style("LANCEDB_LOG_STYLE"); + env_logger::init_from_env(env); +} diff --git a/nodejs/src/merge.rs b/nodejs/src/merge.rs index f13091fc..9228ec44 100644 --- a/nodejs/src/merge.rs +++ b/nodejs/src/merge.rs @@ -2,6 +2,8 @@ use lancedb::{arrow::IntoArrow, ipc::ipc_file_to_batches, table::merge::MergeIns use napi::bindgen_prelude::*; use napi_derive::napi; +use crate::error::convert_error; + #[napi] #[derive(Clone)] /// A builder used to create and run a merge insert operation @@ -35,14 +37,18 @@ impl NativeMergeInsertBuilder { pub async fn execute(&self, buf: Buffer) -> napi::Result<()> { let data = ipc_file_to_batches(buf.to_vec()) .and_then(IntoArrow::into_arrow) - .map_err(|e| napi::Error::from_reason(format!("Failed to read IPC file: {}", e)))?; + .map_err(|e| { + napi::Error::from_reason(format!("Failed to read IPC file: {}", convert_error(&e))) + })?; let this = self.clone(); - this.inner - .execute(data) - .await - .map_err(|e| napi::Error::from_reason(format!("Failed to execute merge insert: {}", e))) + this.inner.execute(data).await.map_err(|e| { + napi::Error::from_reason(format!( + "Failed to execute merge insert: {}", + convert_error(&e) + )) + }) } } diff --git a/nodejs/src/query.rs b/nodejs/src/query.rs index d3fe7283..d0132699 100644 --- a/nodejs/src/query.rs +++ b/nodejs/src/query.rs @@ -22,6 +22,7 @@ use lancedb::query::VectorQuery as LanceDbVectorQuery; use napi::bindgen_prelude::*; use napi_derive::napi; +use crate::error::convert_error; use crate::error::NapiErrorExt; use crate::iterator::RecordBatchIterator; use crate::util::parse_distance_type; @@ -93,7 +94,10 @@ impl Query { .execute_with_options(execution_opts) .await .map_err(|e| { - napi::Error::from_reason(format!("Failed to execute query stream: {}", e)) + napi::Error::from_reason(format!( + "Failed to execute query stream: {}", + convert_error(&e) + )) })?; Ok(RecordBatchIterator::new(inner_stream)) } @@ -101,7 +105,10 @@ impl Query { #[napi] pub async fn explain_plan(&self, verbose: bool) -> napi::Result { self.inner.explain_plan(verbose).await.map_err(|e| { - napi::Error::from_reason(format!("Failed to retrieve the query plan: {}", e)) + napi::Error::from_reason(format!( + "Failed to retrieve the query plan: {}", + convert_error(&e) + )) }) } } @@ -190,7 +197,10 @@ impl VectorQuery { .execute_with_options(execution_opts) .await .map_err(|e| { - napi::Error::from_reason(format!("Failed to execute query stream: {}", e)) + napi::Error::from_reason(format!( + "Failed to execute query stream: {}", + convert_error(&e) + )) })?; Ok(RecordBatchIterator::new(inner_stream)) } @@ -198,7 +208,10 @@ impl VectorQuery { #[napi] pub async fn explain_plan(&self, verbose: bool) -> napi::Result { self.inner.explain_plan(verbose).await.map_err(|e| { - napi::Error::from_reason(format!("Failed to retrieve the query plan: {}", e)) + napi::Error::from_reason(format!( + "Failed to retrieve the query plan: {}", + convert_error(&e) + )) }) } } diff --git a/nodejs/src/table.rs b/nodejs/src/table.rs index f9b14e7f..b2da97de 100644 --- a/nodejs/src/table.rs +++ b/nodejs/src/table.rs @@ -72,10 +72,7 @@ impl Table { /// Return Schema as empty Arrow IPC file. #[napi(catch_unwind)] pub async fn schema(&self) -> napi::Result { - let schema = - self.inner_ref()?.schema().await.map_err(|e| { - napi::Error::from_reason(format!("Failed to create IPC file: {}", e)) - })?; + let schema = self.inner_ref()?.schema().await.default_error()?; let mut writer = FileWriter::try_new(vec![], &schema) .map_err(|e| napi::Error::from_reason(format!("Failed to create IPC file: {}", e)))?; writer @@ -100,12 +97,7 @@ impl Table { return Err(napi::Error::from_reason(format!("Invalid mode: {}", mode))); }; - op.execute().await.map_err(|e| { - napi::Error::from_reason(format!( - "Failed to add batches to table {}: {}", - self.name, e - )) - }) + op.execute().await.default_error() } #[napi(catch_unwind)] @@ -114,22 +106,12 @@ impl Table { .count_rows(filter) .await .map(|val| val as i64) - .map_err(|e| { - napi::Error::from_reason(format!( - "Failed to count rows in table {}: {}", - self.name, e - )) - }) + .default_error() } #[napi(catch_unwind)] pub async fn delete(&self, predicate: String) -> napi::Result<()> { - self.inner_ref()?.delete(&predicate).await.map_err(|e| { - napi::Error::from_reason(format!( - "Failed to delete rows in table {}: predicate={}", - self.name, e - )) - }) + self.inner_ref()?.delete(&predicate).await.default_error() } #[napi(catch_unwind)] @@ -187,12 +169,7 @@ impl Table { self.inner_ref()? .add_columns(transforms, None) .await - .map_err(|err| { - napi::Error::from_reason(format!( - "Failed to add columns to table {}: {}", - self.name, err - )) - })?; + .default_error()?; Ok(()) } @@ -213,12 +190,7 @@ impl Table { self.inner_ref()? .alter_columns(&alterations) .await - .map_err(|err| { - napi::Error::from_reason(format!( - "Failed to alter columns in table {}: {}", - self.name, err - )) - })?; + .default_error()?; Ok(()) } @@ -228,12 +200,7 @@ impl Table { self.inner_ref()? .drop_columns(&col_refs) .await - .map_err(|err| { - napi::Error::from_reason(format!( - "Failed to drop columns from table {}: {}", - self.name, err - )) - })?; + .default_error()?; Ok(()) } diff --git a/python/Cargo.toml b/python/Cargo.toml index 2d42f80f..372dc85a 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -16,7 +16,7 @@ crate-type = ["cdylib"] [dependencies] arrow = { version = "52.1", features = ["pyarrow"] } lancedb = { path = "../rust/lancedb" } -env_logger = "0.10" +env_logger.workspace = true pyo3 = { version = "0.21", features = ["extension-module", "abi3-py38", "gil-refs"] } # Using this fork for now: https://github.com/awestlake87/pyo3-asyncio/issues/119 # pyo3-asyncio = { version = "0.20", features = ["attributes", "tokio-runtime"] } diff --git a/rust/lancedb/src/remote/client.rs b/rust/lancedb/src/remote/client.rs index 83d5a14f..50789108 100644 --- a/rust/lancedb/src/remote/client.rs +++ b/rust/lancedb/src/remote/client.rs @@ -266,6 +266,7 @@ impl RestfulLanceDbClient { Some(host_override) => host_override, None => format!("https://{}.{}.api.lancedb.com", db_name, region), }; + debug!("Created client for host: {}", host); let retry_config = client_config.retry_config.try_into()?; Ok(Self { client, @@ -340,6 +341,8 @@ impl RestfulLanceDbClient { request_id }; + debug!("Sending request_id={}: {:?}", request_id, &request); + if with_retry { self.send_with_retry_impl(client, request, request_id).await } else { @@ -348,6 +351,10 @@ impl RestfulLanceDbClient { .send(&client, request) .await .err_to_http(request_id.clone())?; + debug!( + "Received response for request_id={}: {:?}", + request_id, &response + ); Ok((request_id, response)) } } @@ -374,7 +381,11 @@ impl RestfulLanceDbClient { .map(|r| (r.status(), r)); match response { Ok((status, response)) if status.is_success() => { - return Ok((retry_counter.request_id, response)) + debug!( + "Received response for request_id={}: {:?}", + retry_counter.request_id, &response + ); + return Ok((retry_counter.request_id, response)); } Ok((status, response)) if self.retry_config.statuses.contains(&status) => { let source = self From edc6445f6f9b8269ba6c2fcb5032f928c5337e21 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Tue, 29 Oct 2024 22:16:05 +0000 Subject: [PATCH 45/51] =?UTF-8?q?Bump=20version:=200.14.1-beta.1=20?= =?UTF-8?q?=E2=86=92=200.15.0-beta.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/.bumpversion.toml | 2 +- python/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/.bumpversion.toml b/python/.bumpversion.toml index fad6ada8..eb4d9dde 100644 --- a/python/.bumpversion.toml +++ b/python/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.14.1-beta.1" +current_version = "0.15.0-beta.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/python/Cargo.toml b/python/Cargo.toml index 372dc85a..48966ba1 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-python" -version = "0.14.1-beta.1" +version = "0.15.0-beta.0" edition.workspace = true description = "Python bindings for LanceDB" license.workspace = true From e291212ecffbd388adc1e027da6e46becbdfaab3 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Tue, 29 Oct 2024 22:16:05 +0000 Subject: [PATCH 46/51] =?UTF-8?q?Bump=20version:=200.15.0-beta.0=20?= =?UTF-8?q?=E2=86=92=200.15.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/.bumpversion.toml | 2 +- python/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/.bumpversion.toml b/python/.bumpversion.toml index eb4d9dde..4b25c2f0 100644 --- a/python/.bumpversion.toml +++ b/python/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.15.0-beta.0" +current_version = "0.15.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/python/Cargo.toml b/python/Cargo.toml index 48966ba1..31c825cb 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-python" -version = "0.15.0-beta.0" +version = "0.15.0" edition.workspace = true description = "Python bindings for LanceDB" license.workspace = true From f947259f167a94cc654f2c9e69bbbbfd6aa75edb Mon Sep 17 00:00:00 2001 From: Lance Release Date: Tue, 29 Oct 2024 22:16:27 +0000 Subject: [PATCH 47/51] =?UTF-8?q?Bump=20version:=200.11.1-beta.1=20?= =?UTF-8?q?=E2=86=92=200.12.0-beta.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- java/core/pom.xml | 2 +- java/pom.xml | 2 +- node/package.json | 12 ++++++------ nodejs/Cargo.toml | 2 +- nodejs/npm/darwin-arm64/package.json | 2 +- nodejs/npm/darwin-x64/package.json | 2 +- nodejs/npm/linux-arm64-gnu/package.json | 2 +- nodejs/npm/linux-x64-gnu/package.json | 2 +- nodejs/npm/win32-x64-msvc/package.json | 2 +- nodejs/package.json | 2 +- rust/ffi/node/Cargo.toml | 2 +- rust/lancedb/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index 037f783a..38fa2217 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.11.1-beta.1" +current_version = "0.12.0-beta.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/java/core/pom.xml b/java/core/pom.xml index b3a079f5..4bb3c4d4 100644 --- a/java/core/pom.xml +++ b/java/core/pom.xml @@ -8,7 +8,7 @@ com.lancedb lancedb-parent - 0.11.1-beta.1 + 0.12.0-beta.0 ../pom.xml diff --git a/java/pom.xml b/java/pom.xml index a89870e7..7d4822b0 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -6,7 +6,7 @@ com.lancedb lancedb-parent - 0.11.1-beta.1 + 0.12.0-beta.0 pom LanceDB Parent diff --git a/node/package.json b/node/package.json index caf64030..50be318e 100644 --- a/node/package.json +++ b/node/package.json @@ -1,6 +1,6 @@ { "name": "vectordb", - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "description": " Serverless, low-latency vector database for AI applications", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.1", - "@lancedb/vectordb-darwin-x64": "0.11.1-beta.1", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.1", - "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.1", - "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.1" + "@lancedb/vectordb-darwin-arm64": "0.12.0-beta.0", + "@lancedb/vectordb-darwin-x64": "0.12.0-beta.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.12.0-beta.0", + "@lancedb/vectordb-linux-x64-gnu": "0.12.0-beta.0", + "@lancedb/vectordb-win32-x64-msvc": "0.12.0-beta.0" } } diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index 458eb4ef..31cd2fab 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.11.1-beta.1" +version = "0.12.0-beta.0" license.workspace = true description.workspace = true repository.workspace = true diff --git a/nodejs/npm/darwin-arm64/package.json b/nodejs/npm/darwin-arm64/package.json index b6edd42f..4656810a 100644 --- a/nodejs/npm/darwin-arm64/package.json +++ b/nodejs/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-arm64", - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "os": ["darwin"], "cpu": ["arm64"], "main": "lancedb.darwin-arm64.node", diff --git a/nodejs/npm/darwin-x64/package.json b/nodejs/npm/darwin-x64/package.json index 4fe3b16f..405d53ea 100644 --- a/nodejs/npm/darwin-x64/package.json +++ b/nodejs/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-x64", - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "os": ["darwin"], "cpu": ["x64"], "main": "lancedb.darwin-x64.node", diff --git a/nodejs/npm/linux-arm64-gnu/package.json b/nodejs/npm/linux-arm64-gnu/package.json index 10aba252..c24d560e 100644 --- a/nodejs/npm/linux-arm64-gnu/package.json +++ b/nodejs/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-arm64-gnu", - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "os": ["linux"], "cpu": ["arm64"], "main": "lancedb.linux-arm64-gnu.node", diff --git a/nodejs/npm/linux-x64-gnu/package.json b/nodejs/npm/linux-x64-gnu/package.json index 73dd4592..df02b9f7 100644 --- a/nodejs/npm/linux-x64-gnu/package.json +++ b/nodejs/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-x64-gnu", - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "os": ["linux"], "cpu": ["x64"], "main": "lancedb.linux-x64-gnu.node", diff --git a/nodejs/npm/win32-x64-msvc/package.json b/nodejs/npm/win32-x64-msvc/package.json index 6e53b2da..69c986fa 100644 --- a/nodejs/npm/win32-x64-msvc/package.json +++ b/nodejs/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-win32-x64-msvc", - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "os": ["win32"], "cpu": ["x64"], "main": "lancedb.win32-x64-msvc.node", diff --git a/nodejs/package.json b/nodejs/package.json index 5f40d8d3..d0b623fa 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -10,7 +10,7 @@ "vector database", "ann" ], - "version": "0.11.1-beta.1", + "version": "0.12.0-beta.0", "main": "dist/index.js", "exports": { ".": "./dist/index.js", diff --git a/rust/ffi/node/Cargo.toml b/rust/ffi/node/Cargo.toml index 1c7244be..4129198d 100644 --- a/rust/ffi/node/Cargo.toml +++ b/rust/ffi/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-node" -version = "0.11.1-beta.1" +version = "0.12.0-beta.0" description = "Serverless, low-latency vector database for AI applications" license.workspace = true edition.workspace = true diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index a7fa770e..06fc5169 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb" -version = "0.11.1-beta.1" +version = "0.12.0-beta.0" edition.workspace = true description = "LanceDB: A serverless, low-latency vector database for AI applications" license.workspace = true From facc7d61c00bdac7add04657ca91b2e98e2b8722 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Tue, 29 Oct 2024 22:16:32 +0000 Subject: [PATCH 48/51] =?UTF-8?q?Bump=20version:=200.12.0-beta.0=20?= =?UTF-8?q?=E2=86=92=200.12.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- java/core/pom.xml | 2 +- java/pom.xml | 2 +- node/package.json | 12 ++++++------ nodejs/Cargo.toml | 2 +- nodejs/npm/darwin-arm64/package.json | 2 +- nodejs/npm/darwin-x64/package.json | 2 +- nodejs/npm/linux-arm64-gnu/package.json | 2 +- nodejs/npm/linux-x64-gnu/package.json | 2 +- nodejs/npm/win32-x64-msvc/package.json | 2 +- nodejs/package.json | 2 +- rust/ffi/node/Cargo.toml | 2 +- rust/lancedb/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index 38fa2217..57a76fbd 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.12.0-beta.0" +current_version = "0.12.0" parse = """(?x) (?P0|[1-9]\\d*)\\. (?P0|[1-9]\\d*)\\. diff --git a/java/core/pom.xml b/java/core/pom.xml index 4bb3c4d4..398bdc6f 100644 --- a/java/core/pom.xml +++ b/java/core/pom.xml @@ -8,7 +8,7 @@ com.lancedb lancedb-parent - 0.12.0-beta.0 + 0.12.0-final.0 ../pom.xml diff --git a/java/pom.xml b/java/pom.xml index 7d4822b0..acd4b9df 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -6,7 +6,7 @@ com.lancedb lancedb-parent - 0.12.0-beta.0 + 0.12.0-final.0 pom LanceDB Parent diff --git a/node/package.json b/node/package.json index 50be318e..f293b411 100644 --- a/node/package.json +++ b/node/package.json @@ -1,6 +1,6 @@ { "name": "vectordb", - "version": "0.12.0-beta.0", + "version": "0.12.0", "description": " Serverless, low-latency vector database for AI applications", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -88,10 +88,10 @@ } }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.12.0-beta.0", - "@lancedb/vectordb-darwin-x64": "0.12.0-beta.0", - "@lancedb/vectordb-linux-arm64-gnu": "0.12.0-beta.0", - "@lancedb/vectordb-linux-x64-gnu": "0.12.0-beta.0", - "@lancedb/vectordb-win32-x64-msvc": "0.12.0-beta.0" + "@lancedb/vectordb-darwin-arm64": "0.12.0", + "@lancedb/vectordb-darwin-x64": "0.12.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.12.0", + "@lancedb/vectordb-linux-x64-gnu": "0.12.0", + "@lancedb/vectordb-win32-x64-msvc": "0.12.0" } } diff --git a/nodejs/Cargo.toml b/nodejs/Cargo.toml index 31cd2fab..81733d0e 100644 --- a/nodejs/Cargo.toml +++ b/nodejs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lancedb-nodejs" edition.workspace = true -version = "0.12.0-beta.0" +version = "0.12.0" license.workspace = true description.workspace = true repository.workspace = true diff --git a/nodejs/npm/darwin-arm64/package.json b/nodejs/npm/darwin-arm64/package.json index 4656810a..b9938915 100644 --- a/nodejs/npm/darwin-arm64/package.json +++ b/nodejs/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-arm64", - "version": "0.12.0-beta.0", + "version": "0.12.0", "os": ["darwin"], "cpu": ["arm64"], "main": "lancedb.darwin-arm64.node", diff --git a/nodejs/npm/darwin-x64/package.json b/nodejs/npm/darwin-x64/package.json index 405d53ea..8b3da0f4 100644 --- a/nodejs/npm/darwin-x64/package.json +++ b/nodejs/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-darwin-x64", - "version": "0.12.0-beta.0", + "version": "0.12.0", "os": ["darwin"], "cpu": ["x64"], "main": "lancedb.darwin-x64.node", diff --git a/nodejs/npm/linux-arm64-gnu/package.json b/nodejs/npm/linux-arm64-gnu/package.json index c24d560e..55e3c7f2 100644 --- a/nodejs/npm/linux-arm64-gnu/package.json +++ b/nodejs/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-arm64-gnu", - "version": "0.12.0-beta.0", + "version": "0.12.0", "os": ["linux"], "cpu": ["arm64"], "main": "lancedb.linux-arm64-gnu.node", diff --git a/nodejs/npm/linux-x64-gnu/package.json b/nodejs/npm/linux-x64-gnu/package.json index df02b9f7..37219174 100644 --- a/nodejs/npm/linux-x64-gnu/package.json +++ b/nodejs/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-linux-x64-gnu", - "version": "0.12.0-beta.0", + "version": "0.12.0", "os": ["linux"], "cpu": ["x64"], "main": "lancedb.linux-x64-gnu.node", diff --git a/nodejs/npm/win32-x64-msvc/package.json b/nodejs/npm/win32-x64-msvc/package.json index 69c986fa..4c705e06 100644 --- a/nodejs/npm/win32-x64-msvc/package.json +++ b/nodejs/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@lancedb/lancedb-win32-x64-msvc", - "version": "0.12.0-beta.0", + "version": "0.12.0", "os": ["win32"], "cpu": ["x64"], "main": "lancedb.win32-x64-msvc.node", diff --git a/nodejs/package.json b/nodejs/package.json index d0b623fa..fbd76092 100644 --- a/nodejs/package.json +++ b/nodejs/package.json @@ -10,7 +10,7 @@ "vector database", "ann" ], - "version": "0.12.0-beta.0", + "version": "0.12.0", "main": "dist/index.js", "exports": { ".": "./dist/index.js", diff --git a/rust/ffi/node/Cargo.toml b/rust/ffi/node/Cargo.toml index 4129198d..70c5baf0 100644 --- a/rust/ffi/node/Cargo.toml +++ b/rust/ffi/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb-node" -version = "0.12.0-beta.0" +version = "0.12.0" description = "Serverless, low-latency vector database for AI applications" license.workspace = true edition.workspace = true diff --git a/rust/lancedb/Cargo.toml b/rust/lancedb/Cargo.toml index 06fc5169..99a245bd 100644 --- a/rust/lancedb/Cargo.toml +++ b/rust/lancedb/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lancedb" -version = "0.12.0-beta.0" +version = "0.12.0" edition.workspace = true description = "LanceDB: A serverless, low-latency vector database for AI applications" license.workspace = true From 02535bdc88d07558459c836be8c9c9ece38fc789 Mon Sep 17 00:00:00 2001 From: Lance Release Date: Tue, 29 Oct 2024 22:16:51 +0000 Subject: [PATCH 49/51] Updating package-lock.json --- node/package-lock.json | 79 ++++-------------------------------------- 1 file changed, 7 insertions(+), 72 deletions(-) diff --git a/node/package-lock.json b/node/package-lock.json index 31cfc115..c81e6662 100644 --- a/node/package-lock.json +++ b/node/package-lock.json @@ -1,12 +1,12 @@ { "name": "vectordb", - "version": "0.11.1-beta.1", + "version": "0.12.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "vectordb", - "version": "0.11.1-beta.1", + "version": "0.12.0", "cpu": [ "x64", "arm64" @@ -52,11 +52,11 @@ "uuid": "^9.0.0" }, "optionalDependencies": { - "@lancedb/vectordb-darwin-arm64": "0.11.1-beta.1", - "@lancedb/vectordb-darwin-x64": "0.11.1-beta.1", - "@lancedb/vectordb-linux-arm64-gnu": "0.11.1-beta.1", - "@lancedb/vectordb-linux-x64-gnu": "0.11.1-beta.1", - "@lancedb/vectordb-win32-x64-msvc": "0.11.1-beta.1" + "@lancedb/vectordb-darwin-arm64": "0.12.0", + "@lancedb/vectordb-darwin-x64": "0.12.0", + "@lancedb/vectordb-linux-arm64-gnu": "0.12.0", + "@lancedb/vectordb-linux-x64-gnu": "0.12.0", + "@lancedb/vectordb-win32-x64-msvc": "0.12.0" }, "peerDependencies": { "@apache-arrow/ts": "^14.0.2", @@ -326,71 +326,6 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, - "node_modules/@lancedb/vectordb-darwin-arm64": { - "version": "0.11.1-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.11.1-beta.1.tgz", - "integrity": "sha512-q9jcCbmcz45UHmjgecL6zK82WaqUJsARfniwXXPcnd8ooISVhPkgN+RVKv6edwI9T0PV+xVRYq+LQLlZu5fyxw==", - "cpu": [ - "arm64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lancedb/vectordb-darwin-x64": { - "version": "0.11.1-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.11.1-beta.1.tgz", - "integrity": "sha512-E5tCTS5TaTkssTPa+gdnFxZJ1f60jnSIJXhqufNFZk4s+IMViwR1BPqaqE++WY5c1uBI55ef1862CROKDKX4gg==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lancedb/vectordb-linux-arm64-gnu": { - "version": "0.11.1-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.11.1-beta.1.tgz", - "integrity": "sha512-Obohy6TH31Uq+fp6ZisHR7iAsvgVPqBExrycVcIJqrLZnIe88N9OWUwBXkmfMAw/2hNJFwD4tU7+4U2FcBWX4w==", - "cpu": [ - "arm64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lancedb/vectordb-linux-x64-gnu": { - "version": "0.11.1-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.11.1-beta.1.tgz", - "integrity": "sha512-3Meu0dgrzNrnBVVQhxkUSAOhQNmgtKHvOvmrRLUicV+X19hd33udihgxVpZZb9mpXenJ8lZsS+Jq6R0hWqntag==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lancedb/vectordb-win32-x64-msvc": { - "version": "0.11.1-beta.1", - "resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.11.1-beta.1.tgz", - "integrity": "sha512-BafZ9OJPQXsS7JW0weAl12wC+827AiRjfUrE5tvrYWZah2OwCF2U2g6uJ3x4pxfwEGsv5xcHFqgxlS7ttFkh+Q==", - "cpu": [ - "x64" - ], - "license": "Apache-2.0", - "optional": true, - "os": [ - "win32" - ] - }, "node_modules/@neon-rs/cli": { "version": "0.0.160", "resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz", From 113cd6995b70ad766fdbc62387c70e87e52ca99c Mon Sep 17 00:00:00 2001 From: Will Jones Date: Wed, 30 Oct 2024 11:33:49 -0700 Subject: [PATCH 50/51] fix: index_stats works for FTS indices (#1780) When running `index_stats()` for an FTS index, users would get the deserialization error: ``` InvalidInput { message: "error deserializing index statistics: unknown variant `Inverted`, expected one of `IvfPq`, `IvfHnswPq`, `IvfHnswSq`, `BTree`, `Bitmap`, `LabelList`, `FTS` at line 1 column 24" } ``` --- rust/lancedb/src/index.rs | 1 + rust/lancedb/src/table.rs | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/rust/lancedb/src/index.rs b/rust/lancedb/src/index.rs index 47b42050..6ec6249e 100644 --- a/rust/lancedb/src/index.rs +++ b/rust/lancedb/src/index.rs @@ -119,6 +119,7 @@ pub enum IndexType { #[serde(alias = "LABEL_LIST")] LabelList, // FTS + #[serde(alias = "INVERTED", alias = "Inverted")] FTS, } diff --git a/rust/lancedb/src/table.rs b/rust/lancedb/src/table.rs index 0bc61c34..a94526ca 100644 --- a/rust/lancedb/src/table.rs +++ b/rust/lancedb/src/table.rs @@ -3123,6 +3123,12 @@ mod tests { assert_eq!(index.index_type, crate::index::IndexType::FTS); assert_eq!(index.columns, vec!["text".to_string()]); assert_eq!(index.name, "text_idx"); + + let stats = table.index_stats("text_idx").await.unwrap().unwrap(); + assert_eq!(stats.num_indexed_rows, num_rows); + assert_eq!(stats.num_unindexed_rows, 0); + assert_eq!(stats.index_type, crate::index::IndexType::FTS); + assert_eq!(stats.distance_type, None); } #[tokio::test] From f3fc339ef650517674a22995d81f9ae21e7bae5a Mon Sep 17 00:00:00 2001 From: Will Jones Date: Thu, 31 Oct 2024 15:22:09 -0700 Subject: [PATCH 51/51] fix(rust): fix delete, update, query in remote SDK (#1782) Fixes several minor issues with Rust remote SDK: * Delete uses `predicate` not `filter` as parameter * Update does not return the row value in remote SDK * Update takes tuples * Content type returned by query node is wrong, so we shouldn't validate it. https://github.com/lancedb/sophon/issues/2742 * Data returned by query endpoint is actually an Arrow IPC file, not IPC stream. --- rust/lancedb/src/remote.rs | 2 + rust/lancedb/src/remote/table.rs | 94 +++++++++++++------------------- 2 files changed, 40 insertions(+), 56 deletions(-) diff --git a/rust/lancedb/src/remote.rs b/rust/lancedb/src/remote.rs index 08b52f3f..7f94ea7d 100644 --- a/rust/lancedb/src/remote.rs +++ b/rust/lancedb/src/remote.rs @@ -23,6 +23,8 @@ pub(crate) mod table; pub(crate) mod util; const ARROW_STREAM_CONTENT_TYPE: &str = "application/vnd.apache.arrow.stream"; +#[cfg(test)] +const ARROW_FILE_CONTENT_TYPE: &str = "application/vnd.apache.arrow.file"; const JSON_CONTENT_TYPE: &str = "application/json"; pub use client::{ClientConfig, RetryConfig, TimeoutConfig}; diff --git a/rust/lancedb/src/remote/table.rs b/rust/lancedb/src/remote/table.rs index 81fb7a90..f9900b2c 100644 --- a/rust/lancedb/src/remote/table.rs +++ b/rust/lancedb/src/remote/table.rs @@ -1,3 +1,4 @@ +use std::io::Cursor; use std::sync::{Arc, Mutex}; use crate::index::Index; @@ -7,10 +8,9 @@ use crate::table::AddDataMode; use crate::utils::{supported_btree_data_type, supported_vector_data_type}; use crate::Error; use arrow_array::RecordBatchReader; -use arrow_ipc::reader::StreamReader; +use arrow_ipc::reader::FileReader; use arrow_schema::{DataType, SchemaRef}; use async_trait::async_trait; -use bytes::Buf; use datafusion_common::DataFusionError; use datafusion_physical_plan::stream::RecordBatchStreamAdapter; use datafusion_physical_plan::{ExecutionPlan, SendableRecordBatchStream}; @@ -115,39 +115,14 @@ impl RemoteTable { async fn read_arrow_stream( &self, request_id: &str, - body: reqwest::Response, + response: reqwest::Response, ) -> Result { - // Assert that the content type is correct - let content_type = body - .headers() - .get(CONTENT_TYPE) - .ok_or_else(|| Error::Http { - source: "Missing content type".into(), - request_id: request_id.to_string(), - status_code: None, - })? - .to_str() - .map_err(|e| Error::Http { - source: format!("Failed to parse content type: {}", e).into(), - request_id: request_id.to_string(), - status_code: None, - })?; - if content_type != ARROW_STREAM_CONTENT_TYPE { - return Err(Error::Http { - source: format!( - "Expected content type {}, got {}", - ARROW_STREAM_CONTENT_TYPE, content_type - ) - .into(), - request_id: request_id.to_string(), - status_code: None, - }); - } + let response = self.check_table_response(request_id, response).await?; // There isn't a way to actually stream this data yet. I have an upstream issue: // https://github.com/apache/arrow-rs/issues/6420 - let body = body.bytes().await.err_to_http(request_id.into())?; - let reader = StreamReader::try_new(body.reader(), None)?; + let body = response.bytes().await.err_to_http(request_id.into())?; + let reader = FileReader::try_new(Cursor::new(body), None)?; let schema = reader.schema(); let stream = futures::stream::iter(reader).map_err(DataFusionError::from); Ok(Box::pin(RecordBatchStreamAdapter::new(schema, stream))) @@ -277,7 +252,7 @@ impl TableInternal for RemoteTable { .post(&format!("/v1/table/{}/count_rows/", self.name)); if let Some(filter) = filter { - request = request.json(&serde_json::json!({ "filter": filter })); + request = request.json(&serde_json::json!({ "predicate": filter })); } else { request = request.json(&serde_json::json!({})); } @@ -399,8 +374,7 @@ impl TableInternal for RemoteTable { let mut updates = Vec::new(); for (column, expression) in update.columns { - updates.push(column); - updates.push(expression); + updates.push(vec![column, expression]); } let request = request.json(&serde_json::json!({ @@ -410,19 +384,9 @@ impl TableInternal for RemoteTable { let (request_id, response) = self.client.send(request, false).await?; - let response = self.check_table_response(&request_id, response).await?; + self.check_table_response(&request_id, response).await?; - let body = response.text().await.err_to_http(request_id.clone())?; - - serde_json::from_str(&body).map_err(|e| Error::Http { - source: format!( - "Failed to parse updated rows result from response {}: {}", - body, e - ) - .into(), - request_id, - status_code: None, - }) + Ok(0) // TODO: support returning number of modified rows once supported in SaaS. } async fn delete(&self, predicate: &str) -> Result<()> { let body = serde_json::json!({ "predicate": predicate }); @@ -691,6 +655,7 @@ mod tests { use crate::{ index::{vector::IvfPqIndexBuilder, Index, IndexStatistics, IndexType}, query::{ExecutableQuery, QueryBase}, + remote::ARROW_FILE_CONTENT_TYPE, DistanceType, Error, Table, }; @@ -804,7 +769,7 @@ mod tests { ); assert_eq!( request.body().unwrap().as_bytes().unwrap(), - br#"{"filter":"a > 10"}"# + br#"{"predicate":"a > 10"}"# ); http::Response::builder().status(200).body("42").unwrap() @@ -839,6 +804,17 @@ mod tests { body } + fn write_ipc_file(data: &RecordBatch) -> Vec { + let mut body = Vec::new(); + { + let mut writer = arrow_ipc::writer::FileWriter::try_new(&mut body, &data.schema()) + .expect("Failed to create writer"); + writer.write(data).expect("Failed to write data"); + writer.finish().expect("Failed to finish"); + } + body + } + #[tokio::test] async fn test_add_append() { let data = RecordBatch::try_new( @@ -947,21 +923,27 @@ mod tests { let updates = value.get("updates").unwrap().as_array().unwrap(); assert!(updates.len() == 2); - let col_name = updates[0].as_str().unwrap(); - let expression = updates[1].as_str().unwrap(); + let col_name = updates[0][0].as_str().unwrap(); + let expression = updates[0][1].as_str().unwrap(); assert_eq!(col_name, "a"); assert_eq!(expression, "a + 1"); + let col_name = updates[1][0].as_str().unwrap(); + let expression = updates[1][1].as_str().unwrap(); + assert_eq!(col_name, "b"); + assert_eq!(expression, "b - 1"); + let only_if = value.get("only_if").unwrap().as_str().unwrap(); assert_eq!(only_if, "b > 10"); } - http::Response::builder().status(200).body("1").unwrap() + http::Response::builder().status(200).body("{}").unwrap() }); table .update() .column("a", "a + 1") + .column("b", "b - 1") .only_if("b > 10") .execute() .await @@ -1092,10 +1074,10 @@ mod tests { expected_body["vector"] = vec![0.1f32, 0.2, 0.3].into(); assert_eq!(body, expected_body); - let response_body = write_ipc_stream(&expected_data_ref); + let response_body = write_ipc_file(&expected_data_ref); http::Response::builder() .status(200) - .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) + .header(CONTENT_TYPE, ARROW_FILE_CONTENT_TYPE) .body(response_body) .unwrap() }); @@ -1142,10 +1124,10 @@ mod tests { vec![Arc::new(Int32Array::from(vec![1, 2, 3]))], ) .unwrap(); - let response_body = write_ipc_stream(&data); + let response_body = write_ipc_file(&data); http::Response::builder() .status(200) - .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) + .header(CONTENT_TYPE, ARROW_FILE_CONTENT_TYPE) .body(response_body) .unwrap() }); @@ -1193,10 +1175,10 @@ mod tests { vec![Arc::new(Int32Array::from(vec![1, 2, 3]))], ) .unwrap(); - let response_body = write_ipc_stream(&data); + let response_body = write_ipc_file(&data); http::Response::builder() .status(200) - .header(CONTENT_TYPE, ARROW_STREAM_CONTENT_TYPE) + .header(CONTENT_TYPE, ARROW_FILE_CONTENT_TYPE) .body(response_body) .unwrap() });