feat: add support for add to async python API (#1037)

In order to add support for `add` we needed to migrate the rust `Table`
trait to a `Table` struct and `TableInternal` trait (similar to the way
the connection is designed).

While doing this we also cleaned up some inconsistencies between the
SDKs:

* Python and Node are garbage collected languages and it can be
difficult to trigger something to be freed. The convention for these
languages is to have some kind of close method. I added a close method
to both the table and connection which will drop the underlying rust
object.
* We made significant improvements to table creation in
cc5f2136a6
for the `node` SDK. I copied these changes to the `nodejs` SDK.
* The nodejs tables were using fs to create tmp directories and these
were not getting cleaned up. This is mostly harmless but annoying and so
I changed it up a bit to ensure we cleanup tmp directories.
* ~~countRows in the node SDK was returning `bigint`. I changed it to
return `number`~~ (this actually happened in a previous PR)
* Tables and connections now implement `std::fmt::Display` which is
hooked into python's `__repr__`. Node has no concept of a regular "to
string" function and so I added a `display` method.
* Python method signatures are changing so that optional parameters are
always `Optional[foo] = None` instead of something like `foo = False`.
This is because we want those defaults to be in rust whenever possible
(though we still need to mention the default in documentation).
* I changed the python `AsyncConnection/AsyncTable` classes from
abstract classes with a single implementation to just classes because we
no longer have the remote implementation in python.

Note: this does NOT add the `add` function to the remote table. This PR
was already large enough, and the remote implementation is unique
enough, that I am going to do all the remote stuff at a later date (we
should have the structure in place and correct so there shouldn't be any
refactor concerns)

---------

Co-authored-by: Will Jones <willjones127@gmail.com>
This commit is contained in:
Weston Pace
2024-03-04 09:27:41 -08:00
committed by GitHub
parent 14b9277ac1
commit abaf315baf
42 changed files with 2822 additions and 1122 deletions

View File

@@ -12,9 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
import { makeArrowTable, toBuffer } from "../lancedb/arrow";
import { convertToTable, fromTableToBuffer, makeArrowTable, makeEmptyTable } from '../dist/arrow'
import {
Int64,
Field,
FixedSizeList,
Float16,
@@ -23,98 +22,301 @@ import {
tableFromIPC,
Schema,
Float64,
} from "apache-arrow";
type Table,
Binary,
Bool,
Utf8,
Struct,
List,
DataType,
Dictionary,
Int64,
Float,
Precision
} from 'apache-arrow'
import { type EmbeddingFunction } from '../dist/embedding/embedding_function'
test("customized schema", function () {
const schema = new Schema([
new Field("a", new Int32(), true),
new Field("b", new Float32(), true),
new Field(
"c",
new FixedSizeList(3, new Field("item", new Float16())),
true
),
]);
const table = makeArrowTable(
[
{ a: 1, b: 2, c: [1, 2, 3] },
{ a: 4, b: 5, c: [4, 5, 6] },
{ a: 7, b: 8, c: [7, 8, 9] },
],
{ schema }
);
expect(table.schema.toString()).toEqual(schema.toString());
const buf = toBuffer(table);
expect(buf.byteLength).toBeGreaterThan(0);
const actual = tableFromIPC(buf);
expect(actual.numRows).toBe(3);
const actualSchema = actual.schema;
expect(actualSchema.toString()).toStrictEqual(schema.toString());
});
test("default vector column", function () {
const schema = new Schema([
new Field("a", new Float64(), true),
new Field("b", new Float64(), true),
new Field("vector", new FixedSizeList(3, new Field("item", new Float32()))),
]);
const table = makeArrowTable([
{ a: 1, b: 2, vector: [1, 2, 3] },
{ a: 4, b: 5, vector: [4, 5, 6] },
{ a: 7, b: 8, vector: [7, 8, 9] },
]);
const buf = toBuffer(table);
expect(buf.byteLength).toBeGreaterThan(0);
const actual = tableFromIPC(buf);
expect(actual.numRows).toBe(3);
const actualSchema = actual.schema;
expect(actualSchema.toString()).toEqual(actualSchema.toString());
});
test("2 vector columns", function () {
const schema = new Schema([
new Field("a", new Float64()),
new Field("b", new Float64()),
new Field("vec1", new FixedSizeList(3, new Field("item", new Float16()))),
new Field("vec2", new FixedSizeList(3, new Field("item", new Float16()))),
]);
const table = makeArrowTable(
[
{ a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] },
{ a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] },
{ a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] },
],
function sampleRecords (): Array<Record<string, any>> {
return [
{
vectorColumns: {
vec1: { type: new Float16() },
vec2: { type: new Float16() },
},
binary: Buffer.alloc(5),
boolean: false,
number: 7,
string: 'hello',
struct: { x: 0, y: 0 },
list: ['anime', 'action', 'comedy']
}
);
]
}
const buf = toBuffer(table);
expect(buf.byteLength).toBeGreaterThan(0);
const actual = tableFromIPC(buf);
expect(actual.numRows).toBe(3);
const actualSchema = actual.schema;
expect(actualSchema.toString()).toEqual(schema.toString());
});
test("handles int64", function() {
// https://github.com/lancedb/lancedb/issues/960
// Helper method to verify various ways to create a table
async function checkTableCreation (tableCreationMethod: (records: any, recordsReversed: any, schema: Schema) => Promise<Table>, infersTypes: boolean): Promise<void> {
const records = sampleRecords()
const recordsReversed = [{
list: ['anime', 'action', 'comedy'],
struct: { x: 0, y: 0 },
string: 'hello',
number: 7,
boolean: false,
binary: Buffer.alloc(5)
}]
const schema = new Schema([
new Field("x", new Int64(), true)
]);
const table = makeArrowTable([
{ x: 1 },
{ x: 2 },
{ x: 3 }
], { schema });
expect(table.schema).toEqual(schema);
})
new Field('binary', new Binary(), false),
new Field('boolean', new Bool(), false),
new Field('number', new Float64(), false),
new Field('string', new Utf8(), false),
new Field('struct', new Struct([
new Field('x', new Float64(), false),
new Field('y', new Float64(), false)
])),
new Field('list', new List(new Field('item', new Utf8(), false)), false)
])
const table = await tableCreationMethod(records, recordsReversed, schema)
schema.fields.forEach((field, idx) => {
const actualField = table.schema.fields[idx]
// Type inference always assumes nullable=true
if (infersTypes) {
expect(actualField.nullable).toBe(true)
} else {
expect(actualField.nullable).toBe(false)
}
expect(table.getChild(field.name)?.type.toString()).toEqual(field.type.toString())
expect(table.getChildAt(idx)?.type.toString()).toEqual(field.type.toString())
})
}
describe('The function makeArrowTable', function () {
it('will use data types from a provided schema instead of inference', async function () {
const schema = new Schema([
new Field('a', new Int32()),
new Field('b', new Float32()),
new Field('c', new FixedSizeList(3, new Field('item', new Float16()))),
new Field('d', new Int64())
])
const table = makeArrowTable(
[
{ a: 1, b: 2, c: [1, 2, 3], d: 9 },
{ a: 4, b: 5, c: [4, 5, 6], d: 10 },
{ a: 7, b: 8, c: [7, 8, 9], d: null }
],
{ schema }
)
const buf = await fromTableToBuffer(table)
expect(buf.byteLength).toBeGreaterThan(0)
const actual = tableFromIPC(buf)
expect(actual.numRows).toBe(3)
const actualSchema = actual.schema
expect(actualSchema).toEqual(schema)
})
it('will assume the column `vector` is FixedSizeList<Float32> by default', async function () {
const schema = new Schema([
new Field('a', new Float(Precision.DOUBLE), true),
new Field('b', new Float(Precision.DOUBLE), true),
new Field(
'vector',
new FixedSizeList(3, new Field('item', new Float(Precision.SINGLE), true)),
true
)
])
const table = makeArrowTable([
{ a: 1, b: 2, vector: [1, 2, 3] },
{ a: 4, b: 5, vector: [4, 5, 6] },
{ a: 7, b: 8, vector: [7, 8, 9] }
])
const buf = await fromTableToBuffer(table)
expect(buf.byteLength).toBeGreaterThan(0)
const actual = tableFromIPC(buf)
expect(actual.numRows).toBe(3)
const actualSchema = actual.schema
expect(actualSchema).toEqual(schema)
})
it('can support multiple vector columns', async function () {
const schema = new Schema([
new Field('a', new Float(Precision.DOUBLE), true),
new Field('b', new Float(Precision.DOUBLE), true),
new Field('vec1', new FixedSizeList(3, new Field('item', new Float16(), true)), true),
new Field('vec2', new FixedSizeList(3, new Field('item', new Float16(), true)), true)
])
const table = makeArrowTable(
[
{ a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] },
{ a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] },
{ a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] }
],
{
vectorColumns: {
vec1: { type: new Float16() },
vec2: { type: new Float16() }
}
}
)
const buf = await fromTableToBuffer(table)
expect(buf.byteLength).toBeGreaterThan(0)
const actual = tableFromIPC(buf)
expect(actual.numRows).toBe(3)
const actualSchema = actual.schema
expect(actualSchema).toEqual(schema)
})
it('will allow different vector column types', async function () {
const table = makeArrowTable(
[
{ fp16: [1], fp32: [1], fp64: [1] }
],
{
vectorColumns: {
fp16: { type: new Float16() },
fp32: { type: new Float32() },
fp64: { type: new Float64() }
}
}
)
expect(table.getChild('fp16')?.type.children[0].type.toString()).toEqual(new Float16().toString())
expect(table.getChild('fp32')?.type.children[0].type.toString()).toEqual(new Float32().toString())
expect(table.getChild('fp64')?.type.children[0].type.toString()).toEqual(new Float64().toString())
})
it('will use dictionary encoded strings if asked', async function () {
const table = makeArrowTable([{ str: 'hello' }])
expect(DataType.isUtf8(table.getChild('str')?.type)).toBe(true)
const tableWithDict = makeArrowTable([{ str: 'hello' }], { dictionaryEncodeStrings: true })
expect(DataType.isDictionary(tableWithDict.getChild('str')?.type)).toBe(true)
const schema = new Schema([
new Field('str', new Dictionary(new Utf8(), new Int32()))
])
const tableWithDict2 = makeArrowTable([{ str: 'hello' }], { schema })
expect(DataType.isDictionary(tableWithDict2.getChild('str')?.type)).toBe(true)
})
it('will infer data types correctly', async function () {
await checkTableCreation(async (records) => makeArrowTable(records), true)
})
it('will allow a schema to be provided', async function () {
await checkTableCreation(async (records, _, schema) => makeArrowTable(records, { schema }), false)
})
it('will use the field order of any provided schema', async function () {
await checkTableCreation(async (_, recordsReversed, schema) => makeArrowTable(recordsReversed, { schema }), false)
})
it('will make an empty table', async function () {
await checkTableCreation(async (_, __, schema) => makeArrowTable([], { schema }), false)
})
})
class DummyEmbedding implements EmbeddingFunction<string> {
public readonly sourceColumn = 'string'
public readonly embeddingDimension = 2
public readonly embeddingDataType = new Float16()
async embed (data: string[]): Promise<number[][]> {
return data.map(
() => [0.0, 0.0]
)
}
}
class DummyEmbeddingWithNoDimension implements EmbeddingFunction<string> {
public readonly sourceColumn = 'string'
async embed (data: string[]): Promise<number[][]> {
return data.map(
() => [0.0, 0.0]
)
}
}
describe('convertToTable', function () {
it('will infer data types correctly', async function () {
await checkTableCreation(async (records) => await convertToTable(records), true)
})
it('will allow a schema to be provided', async function () {
await checkTableCreation(async (records, _, schema) => await convertToTable(records, undefined, { schema }), false)
})
it('will use the field order of any provided schema', async function () {
await checkTableCreation(async (_, recordsReversed, schema) => await convertToTable(recordsReversed, undefined, { schema }), false)
})
it('will make an empty table', async function () {
await checkTableCreation(async (_, __, schema) => await convertToTable([], undefined, { schema }), false)
})
it('will apply embeddings', async function () {
const records = sampleRecords()
const table = await convertToTable(records, new DummyEmbedding())
expect(DataType.isFixedSizeList(table.getChild('vector')?.type)).toBe(true)
expect(table.getChild('vector')?.type.children[0].type.toString()).toEqual(new Float16().toString())
})
it('will fail if missing the embedding source column', async function () {
await expect(convertToTable([{ id: 1 }], new DummyEmbedding())).rejects.toThrow("'string' was not present")
})
it('use embeddingDimension if embedding missing from table', async function () {
const schema = new Schema([
new Field('string', new Utf8(), false)
])
// Simulate getting an empty Arrow table (minus embedding) from some other source
// In other words, we aren't starting with records
const table = makeEmptyTable(schema)
// If the embedding specifies the dimension we are fine
await fromTableToBuffer(table, new DummyEmbedding())
// We can also supply a schema and should be ok
const schemaWithEmbedding = new Schema([
new Field('string', new Utf8(), false),
new Field('vector', new FixedSizeList(2, new Field('item', new Float16(), false)), false)
])
await fromTableToBuffer(table, new DummyEmbeddingWithNoDimension(), schemaWithEmbedding)
// Otherwise we will get an error
await expect(fromTableToBuffer(table, new DummyEmbeddingWithNoDimension())).rejects.toThrow('does not specify `embeddingDimension`')
})
it('will apply embeddings to an empty table', async function () {
const schema = new Schema([
new Field('string', new Utf8(), false),
new Field('vector', new FixedSizeList(2, new Field('item', new Float16(), false)), false)
])
const table = await convertToTable([], new DummyEmbedding(), { schema })
expect(DataType.isFixedSizeList(table.getChild('vector')?.type)).toBe(true)
expect(table.getChild('vector')?.type.children[0].type.toString()).toEqual(new Float16().toString())
})
it('will complain if embeddings present but schema missing embedding column', async function () {
const schema = new Schema([
new Field('string', new Utf8(), false)
])
await expect(convertToTable([], new DummyEmbedding(), { schema })).rejects.toThrow('column vector was missing')
})
it('will provide a nice error if run twice', async function () {
const records = sampleRecords()
const table = await convertToTable(records, new DummyEmbedding())
// fromTableToBuffer will try and apply the embeddings again
await expect(fromTableToBuffer(table, new DummyEmbedding())).rejects.toThrow('already existed')
})
})
describe('makeEmptyTable', function () {
it('will make an empty table', async function () {
await checkTableCreation(async (_, __, schema) => makeEmptyTable(schema), false)
})
})

View File

@@ -12,18 +12,49 @@
// See the License for the specific language governing permissions and
// limitations under the License.
import * as os from "os";
import * as path from "path";
import * as fs from "fs";
import * as tmp from "tmp";
import { connect } from "../dist/index.js";
import { Connection, connect } from "../dist/index.js";
describe("when working with a connection", () => {
describe("when connecting", () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "test-connection"));
let tmpDir: tmp.DirResult;
beforeEach(() => tmpDir = tmp.dirSync({ unsafeCleanup: true }));
afterEach(() => tmpDir.removeCallback());
it("should connect", async() => {
const db = await connect(tmpDir.name);
expect(db.display()).toBe(`NativeDatabase(uri=${tmpDir.name}, read_consistency_interval=None)`);
})
it("should allow read consistency interval to be specified", async() => {
const db = await connect(tmpDir.name, { readConsistencyInterval: 5});
expect(db.display()).toBe(`NativeDatabase(uri=${tmpDir.name}, read_consistency_interval=5s)`);
})
});
describe("given a connection", () => {
let tmpDir: tmp.DirResult
let db: Connection
beforeEach(async () => {
tmpDir = tmp.dirSync({ unsafeCleanup: true });
db = await connect(tmpDir.name)
});
afterEach(() => tmpDir.removeCallback());
it("should raise an error if opening a non-existent table", async() => {
await expect(db.openTable("non-existent")).rejects.toThrow("was not found");
})
it("should raise an error if any operation is tried after it is closed", async() => {
expect(db.isOpen()).toBe(true);
await db.close();
expect(db.isOpen()).toBe(false);
await expect(db.tableNames()).rejects.toThrow("Connection is closed");
})
it("should fail if creating table twice, unless overwrite is true", async() => {
const db = await connect(tmpDir);
let tbl = await db.createTable("test", [{ id: 1 }, { id: 2 }]);
await expect(tbl.countRows()).resolves.toBe(2);
await expect(db.createTable("test", [{ id: 1 }, { id: 2 }])).rejects.toThrow();
@@ -31,4 +62,10 @@ describe("when working with a connection", () => {
await expect(tbl.countRows()).resolves.toBe(1);
})
it("should list tables", async() => {
await db.createTable("test2", [{ id: 1 }, { id: 2 }]);
await db.createTable("test1", [{ id: 1 }, { id: 2 }]);
expect(await db.tableNames()).toEqual(["test1", "test2"]);
})
});

View File

@@ -1,34 +0,0 @@
// Copyright 2024 Lance Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as os from "os";
import * as path from "path";
import * as fs from "fs";
import { Schema, Field, Float64 } from "apache-arrow";
import { connect } from "../dist/index.js";
test("open database", async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "test-open"));
const db = await connect(tmpDir);
let tableNames = await db.tableNames();
expect(tableNames).toStrictEqual([]);
const tbl = await db.createTable("test", [{ id: 1 }, { id: 2 }]);
expect(await db.tableNames()).toStrictEqual(["test"]);
const schema = await tbl.schema();
expect(schema).toEqual(new Schema([new Field("id", new Float64(), true)]));
});

View File

@@ -12,27 +12,68 @@
// See the License for the specific language governing permissions and
// limitations under the License.
import * as os from "os";
import * as path from "path";
import * as fs from "fs";
import * as path from "path";
import * as tmp from "tmp";
import { connect } from "../dist";
import { Table, connect } from "../dist";
import { Schema, Field, Float32, Int32, FixedSizeList, Int64, Float64 } from "apache-arrow";
import { makeArrowTable } from "../dist/arrow";
describe("Given a table", () => {
let tmpDir: tmp.DirResult;
let table: Table;
const schema = new Schema([
new Field("id", new Float64(), true),
]);
beforeEach(async () => {
tmpDir = tmp.dirSync({ unsafeCleanup: true });
const conn = await connect(tmpDir.name);
table = await conn.createEmptyTable("some_table", schema);
});
afterEach(() => tmpDir.removeCallback());
it("be displayable", async () => {
expect(table.display()).toMatch(/NativeTable\(some_table, uri=.*, read_consistency_interval=None\)/);
table.close()
expect(table.display()).toBe("ClosedTable(some_table)")
})
it("should let me add data", async () => {
await table.add([{ id: 1 }, { id: 2 }]);
await table.add([{ id: 1 }]);
await expect(table.countRows()).resolves.toBe(3);
})
it("should overwrite data if asked", async () => {
await table.add([{ id: 1 }, { id: 2 }]);
await table.add([{ id: 1 }], { mode: "overwrite" });
await expect(table.countRows()).resolves.toBe(1);
})
it("should let me close the table", async () => {
expect(table.isOpen()).toBe(true);
table.close();
expect(table.isOpen()).toBe(false);
expect(table.countRows()).rejects.toThrow("Table some_table is closed");
})
})
describe("Test creating index", () => {
let tmpDir: string;
let tmpDir: tmp.DirResult;
const schema = new Schema([
new Field("id", new Int32(), true),
new Field("vec", new FixedSizeList(32, new Field("item", new Float32()))),
]);
beforeEach(() => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "index-"));
tmpDir = tmp.dirSync({ unsafeCleanup: true });
});
afterEach(() => tmpDir.removeCallback());
test("create vector index with no column", async () => {
const db = await connect(tmpDir);
const db = await connect(tmpDir.name);
const data = makeArrowTable(
Array(300)
.fill(1)
@@ -50,7 +91,7 @@ describe("Test creating index", () => {
await tbl.createIndex().build();
// check index directory
const indexDir = path.join(tmpDir, "test.lance", "_indices");
const indexDir = path.join(tmpDir.name, "test.lance", "_indices");
expect(fs.readdirSync(indexDir)).toHaveLength(1);
// TODO: check index type.
@@ -66,7 +107,7 @@ describe("Test creating index", () => {
});
test("no vector column available", async () => {
const db = await connect(tmpDir);
const db = await connect(tmpDir.name);
const tbl = await db.createTable(
"no_vec",
makeArrowTable([
@@ -79,7 +120,7 @@ describe("Test creating index", () => {
);
await tbl.createIndex("val").build();
const indexDir = path.join(tmpDir, "no_vec.lance", "_indices");
const indexDir = path.join(tmpDir.name, "no_vec.lance", "_indices");
expect(fs.readdirSync(indexDir)).toHaveLength(1);
for await (const r of tbl.query().filter("id > 1").select(["id"])) {
@@ -88,7 +129,7 @@ describe("Test creating index", () => {
});
test("two columns with different dimensions", async () => {
const db = await connect(tmpDir);
const db = await connect(tmpDir.name);
const schema = new Schema([
new Field("id", new Int32(), true),
new Field("vec", new FixedSizeList(32, new Field("item", new Float32()))),
@@ -158,7 +199,7 @@ describe("Test creating index", () => {
});
test("create scalar index", async () => {
const db = await connect(tmpDir);
const db = await connect(tmpDir.name);
const data = makeArrowTable(
Array(300)
.fill(1)
@@ -176,25 +217,27 @@ describe("Test creating index", () => {
await tbl.createIndex("id").build();
// check index directory
const indexDir = path.join(tmpDir, "test.lance", "_indices");
const indexDir = path.join(tmpDir.name, "test.lance", "_indices");
expect(fs.readdirSync(indexDir)).toHaveLength(1);
// TODO: check index type.
});
});
describe("Read consistency interval", () => {
let tmpDir: string;
let tmpDir: tmp.DirResult;
beforeEach(() => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "read-consistency-"));
tmpDir = tmp.dirSync({ unsafeCleanup: true });
});
afterEach(() => tmpDir.removeCallback());
// const intervals = [undefined, 0, 0.1];
const intervals = [0];
test.each(intervals)("read consistency interval %p", async (interval) => {
const db = await connect({ uri: tmpDir });
const db = await connect(tmpDir.name);
const table = await db.createTable("my_table", [{ id: 1 }]);
const db2 = await connect({ uri: tmpDir, readConsistencyInterval: interval });
const db2 = await connect(tmpDir.name, { readConsistencyInterval: interval });
const table2 = await db2.openTable("my_table");
expect(await table2.countRows()).toEqual(await table.countRows());
@@ -218,14 +261,18 @@ describe("Read consistency interval", () => {
describe('schema evolution', function () {
let tmpDir: string;
let tmpDir: tmp.DirResult;
beforeEach(() => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "schema-evolution-"));
tmpDir = tmp.dirSync({ unsafeCleanup: true });
});
afterEach(() => {
tmpDir.removeCallback();
})
// Create a new sample table
it('can add a new column to the schema', async function () {
const con = await connect(tmpDir)
const con = await connect(tmpDir.name)
const table = await con.createTable('vectors', [
{ id: 1n, vector: [0.1, 0.2] }
])
@@ -241,7 +288,7 @@ describe('schema evolution', function () {
});
it('can alter the columns in the schema', async function () {
const con = await connect(tmpDir)
const con = await connect(tmpDir.name)
const schema = new Schema([
new Field('id', new Int64(), true),
new Field('vector', new FixedSizeList(2, new Field('item', new Float32(), true)), true),
@@ -268,7 +315,7 @@ describe('schema evolution', function () {
});
it('can drop a column from the schema', async function () {
const con = await connect(tmpDir)
const con = await connect(tmpDir.name)
const table = await con.createTable('vectors', [
{ id: 1n, vector: [0.1, 0.2] }
])
@@ -279,4 +326,4 @@ describe('schema evolution', function () {
])
expect(await table.schema()).toEqual(expectedSchema)
});
});
});

View File

@@ -1,4 +1,4 @@
// Copyright 2024 Lance Developers.
// Copyright 2023 Lance Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -13,44 +13,91 @@
// limitations under the License.
import {
Int64,
Field,
makeBuilder,
RecordBatchFileWriter,
Utf8,
type Vector,
FixedSizeList,
Float,
Float32,
Schema,
Table as ArrowTable,
Table,
Vector,
vectorFromArray,
tableToIPC,
type Schema,
Table as ArrowTable,
RecordBatchStreamWriter,
List,
RecordBatch,
makeData,
Struct,
type Float,
DataType,
} from "apache-arrow";
Binary,
Float32
} from 'apache-arrow'
import { type EmbeddingFunction } from './embedding/embedding_function'
import { Table } from './native';
/** Data type accepted by NodeJS SDK */
export type Data = Record<string, unknown>[] | ArrowTable;
/*
* Options to control how a column should be converted to a vector array
*/
export class VectorColumnOptions {
/** Vector column type. */
type: Float = new Float32();
type: Float = new Float32()
constructor(values?: Partial<VectorColumnOptions>) {
Object.assign(this, values);
constructor (values?: Partial<VectorColumnOptions>) {
Object.assign(this, values)
}
}
/** Options to control the makeArrowTable call. */
export class MakeArrowTableOptions {
/** Provided schema. */
schema?: Schema;
/*
* Schema of the data.
*
* If this is not provided then the data type will be inferred from the
* JS type. Integer numbers will become int64, floating point numbers
* will become float64 and arrays will become variable sized lists with
* the data type inferred from the first element in the array.
*
* The schema must be specified if there are no records (e.g. to make
* an empty table)
*/
schema?: Schema
/** Vector columns */
/*
* Mapping from vector column name to expected type
*
* Lance expects vector columns to be fixed size list arrays (i.e. tensors)
* However, `makeArrowTable` will not infer this by default (it creates
* variable size list arrays). This field can be used to indicate that a column
* should be treated as a vector column and converted to a fixed size list.
*
* The keys should be the names of the vector columns. The value specifies the
* expected data type of the vector columns.
*
* If `schema` is provided then this field is ignored.
*
* By default, the column named "vector" will be assumed to be a float32
* vector column.
*/
vectorColumns: Record<string, VectorColumnOptions> = {
vector: new VectorColumnOptions(),
};
vector: new VectorColumnOptions()
}
constructor(values?: Partial<MakeArrowTableOptions>) {
Object.assign(this, values);
/**
* If true then string columns will be encoded with dictionary encoding
*
* Set this to true if your string columns tend to repeat the same values
* often. For more precise control use the `schema` property to specify the
* data type for individual columns.
*
* If `schema` is provided then this property is ignored.
*/
dictionaryEncodeStrings: boolean = false
constructor (values?: Partial<MakeArrowTableOptions>) {
Object.assign(this, values)
}
}
@@ -58,8 +105,30 @@ export class MakeArrowTableOptions {
* An enhanced version of the {@link makeTable} function from Apache Arrow
* that supports nested fields and embeddings columns.
*
* This function converts an array of Record<String, any> (row-major JS objects)
* to an Arrow Table (a columnar structure)
*
* Note that it currently does not support nulls.
*
* If a schema is provided then it will be used to determine the resulting array
* types. Fields will also be reordered to fit the order defined by the schema.
*
* If a schema is not provided then the types will be inferred and the field order
* will be controlled by the order of properties in the first record. If a type
* is inferred it will always be nullable.
*
* If the input is empty then a schema must be provided to create an empty table.
*
* When a schema is not specified then data types will be inferred. The inference
* rules are as follows:
*
* - boolean => Bool
* - number => Float64
* - String => Utf8
* - Buffer => Binary
* - Record<String, any> => Struct
* - Array<any> => List
*
* @param data input data
* @param options options to control the makeArrowTable call.
*
@@ -82,25 +151,27 @@ export class MakeArrowTableOptions {
* ], { schema });
* ```
*
* It guesses the vector columns if the schema is not provided. For example,
* by default it assumes that the column named `vector` is a vector column.
* By default it assumes that the column named `vector` is a vector column
* and it will be converted into a fixed size list array of type float32.
* The `vectorColumns` option can be used to support other vector column
* names and data types.
*
* ```ts
*
* const schema = new Schema([
new Field("a", new Float64()),
new Field("b", new Float64()),
new Field(
"vector",
new FixedSizeList(3, new Field("item", new Float32()))
),
]);
const table = makeArrowTable([
{ a: 1, b: 2, vector: [1, 2, 3] },
{ a: 4, b: 5, vector: [4, 5, 6] },
{ a: 7, b: 8, vector: [7, 8, 9] },
]);
assert.deepEqual(table.schema, schema);
new Field("a", new Float64()),
new Field("b", new Float64()),
new Field(
"vector",
new FixedSizeList(3, new Field("item", new Float32()))
),
]);
const table = makeArrowTable([
{ a: 1, b: 2, vector: [1, 2, 3] },
{ a: 4, b: 5, vector: [4, 5, 6] },
{ a: 7, b: 8, vector: [7, 8, 9] },
]);
assert.deepEqual(table.schema, schema);
* ```
*
* You can specify the vector column types and names using the options as well
@@ -108,81 +179,372 @@ export class MakeArrowTableOptions {
* ```typescript
*
* const schema = new Schema([
new Field('a', new Float64()),
new Field('b', new Float64()),
new Field('vec1', new FixedSizeList(3, new Field('item', new Float16()))),
new Field('vec2', new FixedSizeList(3, new Field('item', new Float16())))
]);
new Field('a', new Float64()),
new Field('b', new Float64()),
new Field('vec1', new FixedSizeList(3, new Field('item', new Float16()))),
new Field('vec2', new FixedSizeList(3, new Field('item', new Float16())))
]);
* const table = makeArrowTable([
{ a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] },
{ a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] },
{ a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] }
], {
vectorColumns: {
vec1: { type: new Float16() },
vec2: { type: new Float16() }
}
}
{ a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] },
{ a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] },
{ a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] }
], {
vectorColumns: {
vec1: { type: new Float16() },
vec2: { type: new Float16() }
}
}
* assert.deepEqual(table.schema, schema)
* ```
*/
export function makeArrowTable(
data: Record<string, any>[],
export function makeArrowTable (
data: Array<Record<string, any>>,
options?: Partial<MakeArrowTableOptions>
): Table {
if (data.length === 0) {
throw new Error("At least one record needs to be provided");
): ArrowTable {
if (data.length === 0 && (options?.schema === undefined || options?.schema === null)) {
throw new Error('At least one record or a schema needs to be provided')
}
const opt = new MakeArrowTableOptions(options ?? {});
const columns: Record<string, Vector> = {};
// TODO: sample dataset to find missing columns
const columnNames = Object.keys(data[0]);
for (const colName of columnNames) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
let values = data.map((datum) => datum[colName]);
let vector: Vector;
const opt = new MakeArrowTableOptions(options !== undefined ? options : {})
const columns: Record<string, Vector> = {}
// TODO: sample dataset to find missing columns
// Prefer the field ordering of the schema, if present
const columnNames = ((options?.schema) != null) ? (options?.schema?.names as string[]) : Object.keys(data[0])
for (const colName of columnNames) {
if (data.length !== 0 && !Object.prototype.hasOwnProperty.call(data[0], colName)) {
// The field is present in the schema, but not in the data, skip it
continue
}
// Extract a single column from the records (transpose from row-major to col-major)
let values = data.map((datum) => datum[colName])
// By default (type === undefined) arrow will infer the type from the JS type
let type
if (opt.schema !== undefined) {
// Explicit schema is provided, highest priority
const fieldType: DataType | undefined = opt.schema.fields.filter((f) => f.name === colName)[0]?.type as DataType;
if (fieldType instanceof Int64) {
// If there is a schema provided, then use that for the type instead
type = opt.schema?.fields.filter((f) => f.name === colName)[0]?.type
if (DataType.isInt(type) && type.bitWidth === 64) {
// wrap in BigInt to avoid bug: https://github.com/apache/arrow/issues/40051
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
values = values.map((v) => BigInt(v));
values = values.map((v) => {
if (v === null) {
return v
}
return BigInt(v)
})
}
vector = vectorFromArray(values, fieldType);
} else {
const vectorColumnOptions = opt.vectorColumns[colName];
// Otherwise, check to see if this column is one of the vector columns
// defined by opt.vectorColumns and, if so, use the fixed size list type
const vectorColumnOptions = opt.vectorColumns[colName]
if (vectorColumnOptions !== undefined) {
const fslType = new FixedSizeList(
(values[0] as any[]).length,
new Field("item", vectorColumnOptions.type, false)
);
vector = vectorFromArray(values, fslType);
} else {
// Normal case
vector = vectorFromArray(values);
type = newVectorType(values[0].length, vectorColumnOptions.type)
}
}
columns[colName] = vector;
try {
// Convert an Array of JS values to an arrow vector
columns[colName] = makeVector(values, type, opt.dictionaryEncodeStrings)
} catch (error: unknown) {
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
throw Error(`Could not convert column "${colName}" to Arrow: ${error}`)
}
}
return new Table(columns);
if (opt.schema != null) {
// `new ArrowTable(columns)` infers a schema which may sometimes have
// incorrect nullability (it assumes nullable=true always)
//
// `new ArrowTable(schema, columns)` will also fail because it will create a
// batch with an inferred schema and then complain that the batch schema
// does not match the provided schema.
//
// To work around this we first create a table with the wrong schema and
// then patch the schema of the batches so we can use
// `new ArrowTable(schema, batches)` which does not do any schema inference
const firstTable = new ArrowTable(columns)
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const batchesFixed = firstTable.batches.map(batch => new RecordBatch(opt.schema!, batch.data))
return new ArrowTable(opt.schema, batchesFixed)
} else {
return new ArrowTable(columns)
}
}
/**
* Convert an Arrow Table to a Buffer.
*
* @param data Arrow Table
* @param schema Arrow Schema, optional
* @returns Buffer node
* Create an empty Arrow table with the provided schema
*/
export function toBuffer(data: Data, schema?: Schema): Buffer {
let tbl: Table;
if (data instanceof Table) {
tbl = data;
} else {
tbl = makeArrowTable(data, { schema });
}
return Buffer.from(tableToIPC(tbl));
export function makeEmptyTable (schema: Schema): ArrowTable {
return makeArrowTable([], { schema })
}
// Helper function to convert Array<Array<any>> to a variable sized list array
function makeListVector (lists: any[][]): Vector<any> {
if (lists.length === 0 || lists[0].length === 0) {
throw Error('Cannot infer list vector from empty array or empty list')
}
const sampleList = lists[0]
let inferredType
try {
const sampleVector = makeVector(sampleList)
inferredType = sampleVector.type
} catch (error: unknown) {
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
throw Error(`Cannot infer list vector. Cannot infer inner type: ${error}`)
}
const listBuilder = makeBuilder({
type: new List(new Field('item', inferredType, true))
})
for (const list of lists) {
listBuilder.append(list)
}
return listBuilder.finish().toVector()
}
// Helper function to convert an Array of JS values to an Arrow Vector
function makeVector (values: any[], type?: DataType, stringAsDictionary?: boolean): Vector<any> {
if (type !== undefined) {
// No need for inference, let Arrow create it
return vectorFromArray(values, type)
}
if (values.length === 0) {
throw Error('makeVector requires at least one value or the type must be specfied')
}
const sampleValue = values.find(val => val !== null && val !== undefined)
if (sampleValue === undefined) {
throw Error('makeVector cannot infer the type if all values are null or undefined')
}
if (Array.isArray(sampleValue)) {
// Default Arrow inference doesn't handle list types
return makeListVector(values)
} else if (Buffer.isBuffer(sampleValue)) {
// Default Arrow inference doesn't handle Buffer
return vectorFromArray(values, new Binary())
} else if (!(stringAsDictionary ?? false) && (typeof sampleValue === 'string' || sampleValue instanceof String)) {
// If the type is string then don't use Arrow's default inference unless dictionaries are requested
// because it will always use dictionary encoding for strings
return vectorFromArray(values, new Utf8())
} else {
// Convert a JS array of values to an arrow vector
return vectorFromArray(values)
}
}
async function applyEmbeddings<T> (table: ArrowTable, embeddings?: EmbeddingFunction<T>, schema?: Schema): Promise<ArrowTable> {
if (embeddings == null) {
return table
}
// Convert from ArrowTable to Record<String, Vector>
const colEntries = [...Array(table.numCols).keys()].map((_, idx) => {
const name = table.schema.fields[idx].name
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const vec = table.getChildAt(idx)!
return [name, vec]
})
const newColumns = Object.fromEntries(colEntries)
const sourceColumn = newColumns[embeddings.sourceColumn]
const destColumn = embeddings.destColumn ?? 'vector'
const innerDestType = embeddings.embeddingDataType ?? new Float32()
if (sourceColumn === undefined) {
throw new Error(`Cannot apply embedding function because the source column '${embeddings.sourceColumn}' was not present in the data`)
}
if (table.numRows === 0) {
if (Object.prototype.hasOwnProperty.call(newColumns, destColumn)) {
// We have an empty table and it already has the embedding column so no work needs to be done
// Note: we don't return an error like we did below because this is a common occurrence. For example,
// if we call convertToTable with 0 records and a schema that includes the embedding
return table
}
if (embeddings.embeddingDimension !== undefined) {
const destType = newVectorType(embeddings.embeddingDimension, innerDestType)
newColumns[destColumn] = makeVector([], destType)
} else if (schema != null) {
const destField = schema.fields.find(f => f.name === destColumn)
if (destField != null) {
newColumns[destColumn] = makeVector([], destField.type)
} else {
throw new Error(`Attempt to apply embeddings to an empty table failed because schema was missing embedding column '${destColumn}'`)
}
} else {
throw new Error('Attempt to apply embeddings to an empty table when the embeddings function does not specify `embeddingDimension`')
}
} else {
if (Object.prototype.hasOwnProperty.call(newColumns, destColumn)) {
throw new Error(`Attempt to apply embeddings to table failed because column ${destColumn} already existed`)
}
if (table.batches.length > 1) {
throw new Error('Internal error: `makeArrowTable` unexpectedly created a table with more than one batch')
}
const values = sourceColumn.toArray()
const vectors = await embeddings.embed(values as T[])
if (vectors.length !== values.length) {
throw new Error('Embedding function did not return an embedding for each input element')
}
const destType = newVectorType(vectors[0].length, innerDestType)
newColumns[destColumn] = makeVector(vectors, destType)
}
const newTable = new ArrowTable(newColumns)
if (schema != null) {
if (schema.fields.find(f => f.name === destColumn) === undefined) {
throw new Error(`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`)
}
return alignTable(newTable, schema)
}
return newTable
}
/*
* Convert an Array of records into an Arrow Table, optionally applying an
* embeddings function to it.
*
* This function calls `makeArrowTable` first to create the Arrow Table.
* Any provided `makeTableOptions` (e.g. a schema) will be passed on to
* that call.
*
* The embedding function will be passed a column of values (based on the
* `sourceColumn` of the embedding function) and expects to receive back
* number[][] which will be converted into a fixed size list column. By
* default this will be a fixed size list of Float32 but that can be
* customized by the `embeddingDataType` property of the embedding function.
*
* If a schema is provided in `makeTableOptions` then it should include the
* embedding columns. If no schema is provded then embedding columns will
* be placed at the end of the table, after all of the input columns.
*/
export async function convertToTable<T> (
data: Array<Record<string, unknown>>,
embeddings?: EmbeddingFunction<T>,
makeTableOptions?: Partial<MakeArrowTableOptions>
): Promise<ArrowTable> {
const table = makeArrowTable(data, makeTableOptions)
return await applyEmbeddings(table, embeddings, makeTableOptions?.schema)
}
// Creates the Arrow Type for a Vector column with dimension `dim`
function newVectorType <T extends Float> (dim: number, innerType: T): FixedSizeList<T> {
// in Lance we always default to have the elements nullable, so we need to set it to true
// otherwise we often get schema mismatches because the stored data always has schema with nullable elements
const children = new Field<T>('item', innerType, true)
return new FixedSizeList(dim, children)
}
/**
* Serialize an Array of records into a buffer using the Arrow IPC File serialization
*
* This function will call `convertToTable` and pass on `embeddings` and `schema`
*
* `schema` is required if data is empty
*/
export async function fromRecordsToBuffer<T> (
data: Array<Record<string, unknown>>,
embeddings?: EmbeddingFunction<T>,
schema?: Schema
): Promise<Buffer> {
const table = await convertToTable(data, embeddings, { schema })
const writer = RecordBatchFileWriter.writeAll(table)
return Buffer.from(await writer.toUint8Array())
}
/**
* Serialize an Array of records into a buffer using the Arrow IPC Stream serialization
*
* This function will call `convertToTable` and pass on `embeddings` and `schema`
*
* `schema` is required if data is empty
*/
export async function fromRecordsToStreamBuffer<T> (
data: Array<Record<string, unknown>>,
embeddings?: EmbeddingFunction<T>,
schema?: Schema
): Promise<Buffer> {
const table = await convertToTable(data, embeddings, { schema })
const writer = RecordBatchStreamWriter.writeAll(table)
return Buffer.from(await writer.toUint8Array())
}
/**
* Serialize an Arrow Table into a buffer using the Arrow IPC File serialization
*
* This function will apply `embeddings` to the table in a manner similar to
* `convertToTable`.
*
* `schema` is required if the table is empty
*/
export async function fromTableToBuffer<T> (
table: ArrowTable,
embeddings?: EmbeddingFunction<T>,
schema?: Schema
): Promise<Buffer> {
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema)
const writer = RecordBatchFileWriter.writeAll(tableWithEmbeddings)
return Buffer.from(await writer.toUint8Array())
}
export async function fromDataToBuffer<T> (
data: Data,
embeddings?: EmbeddingFunction<T>,
schema?: Schema
): Promise<Buffer> {
if (data instanceof ArrowTable) {
return fromTableToBuffer(data, embeddings, schema)
} else {
const table = await convertToTable(data);
return fromTableToBuffer(table, embeddings, schema);
}
}
/**
* Serialize an Arrow Table into a buffer using the Arrow IPC Stream serialization
*
* This function will apply `embeddings` to the table in a manner similar to
* `convertToTable`.
*
* `schema` is required if the table is empty
*/
export async function fromTableToStreamBuffer<T> (
table: ArrowTable,
embeddings?: EmbeddingFunction<T>,
schema?: Schema
): Promise<Buffer> {
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema)
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings)
return Buffer.from(await writer.toUint8Array())
}
function alignBatch (batch: RecordBatch, schema: Schema): RecordBatch {
const alignedChildren = []
for (const field of schema.fields) {
const indexInBatch = batch.schema.fields?.findIndex(
(f) => f.name === field.name
)
if (indexInBatch < 0) {
throw new Error(
`The column ${field.name} was not found in the Arrow Table`
)
}
alignedChildren.push(batch.data.children[indexInBatch])
}
const newData = makeData({
type: new Struct(schema.fields),
length: batch.numRows,
nullCount: batch.nullCount,
children: alignedChildren
})
return new RecordBatch(schema, newData)
}
function alignTable (table: ArrowTable, schema: Schema): ArrowTable {
const alignedBatches = table.batches.map((batch) =>
alignBatch(batch, schema)
)
return new ArrowTable(schema, alignedBatches)
}
// Creates an empty Arrow Table
export function createEmptyTable (schema: Schema): ArrowTable {
return new ArrowTable(schema)
}

View File

@@ -12,10 +12,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
import { toBuffer } from "./arrow";
import { Connection as _NativeConnection } from "./native";
import { fromTableToBuffer, makeArrowTable, makeEmptyTable } from "./arrow";
import { Connection as LanceDbConnection } from "./native";
import { Table } from "./table";
import { Table as ArrowTable } from "apache-arrow";
import { Table as ArrowTable, Schema } from "apache-arrow";
export interface CreateTableOptions {
/**
@@ -39,14 +39,47 @@ export interface CreateTableOptions {
* A LanceDB Connection that allows you to open tables and create new ones.
*
* Connection could be local against filesystem or remote against a server.
*
* A Connection is intended to be a long lived object and may hold open
* resources such as HTTP connection pools. This is generally fine and
* a single connection should be shared if it is going to be used many
* times. However, if you are finished with a connection, you may call
* close to eagerly free these resources. Any call to a Connection
* method after it has been closed will result in an error.
*
* Closing a connection is optional. Connections will automatically
* be closed when they are garbage collected.
*
* Any created tables are independent and will continue to work even if
* the underlying connection has been closed.
*/
export class Connection {
readonly inner: _NativeConnection;
readonly inner: LanceDbConnection;
constructor(inner: _NativeConnection) {
constructor(inner: LanceDbConnection) {
this.inner = inner;
}
/** Return true if the connection has not been closed */
isOpen(): boolean {
return this.inner.isOpen();
}
/** Close the connection, releasing any underlying resources.
*
* It is safe to call this method multiple times.
*
* Any attempt to use the connection after it is closed will result in an error.
*/
close(): void {
this.inner.close();
}
/** Return a brief description of the connection */
display(): string {
return this.inner.display();
}
/** List all the table names in this database. */
async tableNames(): Promise<string[]> {
return this.inner.tableNames();
@@ -81,11 +114,41 @@ export class Connection {
mode = "exist_ok";
}
const buf = toBuffer(data);
let table: ArrowTable;
if (data instanceof ArrowTable) {
table = data;
} else {
table = makeArrowTable(data);
}
const buf = await fromTableToBuffer(table);
const innerTable = await this.inner.createTable(name, buf, mode);
return new Table(innerTable);
}
/**
* Creates a new empty Table
*
* @param {string} name - The name of the table.
* @param schema - The schema of the table
*/
async createEmptyTable(
name: string,
schema: Schema,
options?: Partial<CreateTableOptions>
): Promise<Table> {
let mode: string = options?.mode ?? "create";
const existOk = options?.existOk ?? false;
if (mode === "create" && existOk) {
mode = "exist_ok";
}
const table = makeEmptyTable(schema);
const buf = await fromTableToBuffer(table);
const innerTable = await this.inner.createEmptyTable(name, buf, mode);
return new Table(innerTable);
}
/**
* Drop an existing table.
* @param name The name of the table to drop.

View File

@@ -0,0 +1,68 @@
// Copyright 2023 Lance Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { type Float } from 'apache-arrow'
/**
* An embedding function that automatically creates vector representation for a given column.
*/
export interface EmbeddingFunction<T> {
/**
* The name of the column that will be used as input for the Embedding Function.
*/
sourceColumn: string
/**
* The data type of the embedding
*
* The embedding function should return `number`. This will be converted into
* an Arrow float array. By default this will be Float32 but this property can
* be used to control the conversion.
*/
embeddingDataType?: Float
/**
* The dimension of the embedding
*
* This is optional, normally this can be determined by looking at the results of
* `embed`. If this is not specified, and there is an attempt to apply the embedding
* to an empty table, then that process will fail.
*/
embeddingDimension?: number
/**
* The name of the column that will contain the embedding
*
* By default this is "vector"
*/
destColumn?: string
/**
* Should the source column be excluded from the resulting table
*
* By default the source column is included. Set this to true and
* only the embedding will be stored.
*/
excludeSource?: boolean
/**
* Creates a vector representation for the given values.
*/
embed: (data: T[]) => Promise<number[][]>
}
export function isEmbeddingFunction<T> (value: any): value is EmbeddingFunction<T> {
return typeof value.sourceColumn === 'string' &&
typeof value.embed === 'function'
}

View File

@@ -0,0 +1,57 @@
// Copyright 2023 Lance Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { type EmbeddingFunction } from './embedding_function'
import type OpenAI from 'openai'
export class OpenAIEmbeddingFunction implements EmbeddingFunction<string> {
private readonly _openai: OpenAI
private readonly _modelName: string
constructor (sourceColumn: string, openAIKey: string, modelName: string = 'text-embedding-ada-002') {
/**
* @type {import("openai").default}
*/
let Openai
try {
// eslint-disable-next-line @typescript-eslint/no-var-requires
Openai = require('openai')
} catch {
throw new Error('please install openai@^4.24.1 using npm install openai')
}
this.sourceColumn = sourceColumn
const configuration = {
apiKey: openAIKey
}
this._openai = new Openai(configuration)
this._modelName = modelName
}
async embed (data: string[]): Promise<number[][]> {
const response = await this._openai.embeddings.create({
model: this._modelName,
input: data
})
const embeddings: number[][] = []
for (let i = 0; i < response.data.length; i++) {
embeddings.push(response.data[i].embedding)
}
return embeddings
}
sourceColumn: string
}

View File

@@ -13,7 +13,7 @@
// limitations under the License.
import { Connection } from "./connection";
import { Connection as NativeConnection, ConnectionOptions } from "./native.js";
import { Connection as LanceDbConnection, ConnectionOptions } from "./native.js";
export {
ConnectionOptions,
@@ -23,7 +23,6 @@ export {
} from "./native.js";
export { Connection } from "./connection";
export { Table } from "./table";
export { Data } from "./arrow";
export { IvfPQOptions, IndexBuilder } from "./indexer";
/**
@@ -39,26 +38,9 @@ export { IvfPQOptions, IndexBuilder } from "./indexer";
*
* @see {@link ConnectionOptions} for more details on the URI format.
*/
export async function connect(uri: string): Promise<Connection>;
export async function connect(
opts: Partial<ConnectionOptions>
): Promise<Connection>;
export async function connect(
args: string | Partial<ConnectionOptions>
): Promise<Connection> {
let opts: ConnectionOptions;
if (typeof args === "string") {
opts = { uri: args };
} else {
opts = Object.assign(
{
uri: "",
apiKey: undefined,
hostOverride: undefined,
},
args
);
}
const nativeConn = await NativeConnection.new(opts);
export async function connect(uri: string, opts?: Partial<ConnectionOptions>): Promise<Connection>
{
opts = opts ?? {};
const nativeConn = await LanceDbConnection.new(uri, opts);
return new Connection(nativeConn);
}

View File

@@ -45,7 +45,6 @@ export interface AddColumnsSql {
valueSql: string
}
export interface ConnectionOptions {
uri: string
apiKey?: string
hostOverride?: string
/**
@@ -71,10 +70,13 @@ export const enum WriteMode {
export interface WriteOptions {
mode?: WriteMode
}
export function connect(options: ConnectionOptions): Promise<Connection>
export function connect(uri: string, options: ConnectionOptions): Promise<Connection>
export class Connection {
/** Create a new Connection instance from the given URI. */
static new(options: ConnectionOptions): Promise<Connection>
static new(uri: string, options: ConnectionOptions): Promise<Connection>
display(): string
isOpen(): boolean
close(): void
/** List all tables in the dataset. */
tableNames(): Promise<Array<string>>
/**
@@ -86,6 +88,7 @@ export class Connection {
*
*/
createTable(name: string, buf: Buffer, mode: string): Promise<Table>
createEmptyTable(name: string, schemaBuf: Buffer, mode: string): Promise<Table>
openTable(name: string): Promise<Table>
/** Drop table with the name. Or raise an error if the table does not exist. */
dropTable(name: string): Promise<void>
@@ -114,9 +117,12 @@ export class Query {
executeStream(): Promise<RecordBatchIterator>
}
export class Table {
display(): string
isOpen(): boolean
close(): void
/** Return Schema as empty Arrow IPC file. */
schema(): Promise<Buffer>
add(buf: Buffer): Promise<void>
add(buf: Buffer, mode: string): Promise<void>
countRows(filter?: string | undefined | null): Promise<number>
delete(predicate: string): Promise<void>
createIndex(): IndexBuilder

View File

@@ -14,14 +14,32 @@
import { Schema, tableFromIPC } from "apache-arrow";
import { AddColumnsSql, ColumnAlteration, Table as _NativeTable } from "./native";
import { toBuffer, Data } from "./arrow";
import { Query } from "./query";
import { IndexBuilder } from "./indexer";
import { Data, fromDataToBuffer } from "./arrow";
/**
* A LanceDB Table is the collection of Records.
* Options for adding data to a table.
*/
export interface AddDataOptions {
/** If "append" (the default) then the new data will be added to the table
*
* If "overwrite" then the new data will replace the existing data in the table.
*/
mode: "append" | "overwrite";
}
/**
* A Table is a collection of Records in a LanceDB Database.
*
* Each Record has one or more vector fields.
* A Table object is expected to be long lived and reused for multiple operations.
* Table objects will cache a certain amount of index data in memory. This cache
* will be freed when the Table is garbage collected. To eagerly free the cache you
* can call the `close` method. Once the Table is closed, it cannot be used for any
* further operations.
*
* Closing a table is optional. It not closed, it will be closed when it is garbage
* collected.
*/
export class Table {
private readonly inner: _NativeTable;
@@ -31,6 +49,27 @@ export class Table {
this.inner = inner;
}
/** Return true if the table has not been closed */
isOpen(): boolean {
return this.inner.isOpen();
}
/** Close the table, releasing any underlying resources.
*
* It is safe to call this method multiple times.
*
* Any attempt to use the table after it is closed will result in an error.
*/
close(): void {
this.inner.close();
}
/** Return a brief description of the table */
display(): string {
return this.inner.display();
}
/** Get the schema of the table. */
async schema(): Promise<Schema> {
const schemaBuf = await this.inner.schema();
@@ -44,9 +83,11 @@ export class Table {
* @param {Data} data Records to be inserted into the Table
* @return The number of rows added to the table
*/
async add(data: Data): Promise<void> {
const buffer = toBuffer(data);
await this.inner.add(buffer);
async add(data: Data, options?: Partial<AddDataOptions>): Promise<void> {
let mode = options?.mode ?? "append";
const buffer = await fromDataToBuffer(data);
await this.inner.add(buffer, mode);
}
/** Count the total number of rows in the dataset. */

380
nodejs/package-lock.json generated
View File

@@ -1,11 +1,11 @@
{
"name": "vectordb",
"name": "lancedb",
"version": "0.4.3",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "vectordb",
"name": "lancedb",
"version": "0.4.3",
"cpu": [
"x64",
@@ -17,16 +17,15 @@
"linux",
"windows"
],
"dependencies": {
"apache-arrow": "^15.0.0"
},
"devDependencies": {
"@napi-rs/cli": "^2.18.0",
"@types/jest": "^29.1.2",
"@types/tmp": "^0.2.6",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"eslint": "^8.56.0",
"jest": "^29.7.0",
"tmp": "^0.2.3",
"ts-jest": "^29.1.2",
"typedoc": "^0.25.7",
"typedoc-plugin-markdown": "^3.17.1",
@@ -36,16 +35,21 @@
"node": ">= 18"
},
"optionalDependencies": {
"vectordb-darwin-arm64": "0.4.3",
"vectordb-darwin-x64": "0.4.3",
"vectordb-linux-arm64-gnu": "0.4.3",
"vectordb-linux-x64-gnu": "0.4.3"
"lancedb-darwin-arm64": "0.4.3",
"lancedb-darwin-x64": "0.4.3",
"lancedb-linux-arm64-gnu": "0.4.3",
"lancedb-linux-x64-gnu": "0.4.3",
"openai": "^4.28.4"
},
"peerDependencies": {
"apache-arrow": "^15.0.0"
}
},
"node_modules/@75lb/deep-merge": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@75lb/deep-merge/-/deep-merge-1.1.1.tgz",
"integrity": "sha512-xvgv6pkMGBA6GwdyJbNAnDmfAIR/DfWhrj9jgWh3TY7gRm3KO46x/GPjRg6wJ0nOepwqrNxFfojebh0Df4h4Tw==",
"peer": true,
"dependencies": {
"lodash.assignwith": "^4.2.0",
"typical": "^7.1.1"
@@ -58,6 +62,7 @@
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz",
"integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==",
"peer": true,
"engines": {
"node": ">=12.17"
}
@@ -1416,9 +1421,10 @@
}
},
"node_modules/@swc/helpers": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.3.tgz",
"integrity": "sha512-FaruWX6KdudYloq1AHD/4nU+UsMTdNE8CKyrseXWEcgjDAbvkwJg2QGPAnfIJLIWsjZOSPLOAykK6fuYp4vp4A==",
"version": "0.5.6",
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.6.tgz",
"integrity": "sha512-aYX01Ke9hunpoCexYAgQucEpARGQ5w/cqHFrIR+e9gdKb1QWTsVJuTJ2ozQzIAxLyRQe/m+2RqzkyOOGiMKRQA==",
"peer": true,
"dependencies": {
"tslib": "^2.4.0"
}
@@ -1467,12 +1473,14 @@
"node_modules/@types/command-line-args": {
"version": "5.2.3",
"resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.3.tgz",
"integrity": "sha512-uv0aG6R0Y8WHZLTamZwtfsDLVRnOa+n+n5rEvFWL5Na5gZ8V2Teab/duDPFzIIIhs9qizDpcavCusCLJZu62Kw=="
"integrity": "sha512-uv0aG6R0Y8WHZLTamZwtfsDLVRnOa+n+n5rEvFWL5Na5gZ8V2Teab/duDPFzIIIhs9qizDpcavCusCLJZu62Kw==",
"peer": true
},
"node_modules/@types/command-line-usage": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/@types/command-line-usage/-/command-line-usage-5.0.4.tgz",
"integrity": "sha512-BwR5KP3Es/CSht0xqBcUXS3qCAUVXwpRKsV2+arxeb65atasuXG9LykC9Ab10Cw3s2raH92ZqOeILaQbsB2ACg=="
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/@types/command-line-usage/-/command-line-usage-5.0.2.tgz",
"integrity": "sha512-n7RlEEJ+4x4TS7ZQddTmNSxP+zziEG0TNsMfiRIxcIVXt71ENJ9ojeXmGO3wPoTdn7pJcU2xc3CJYMktNT6DPg==",
"peer": true
},
"node_modules/@types/graceful-fs": {
"version": "4.1.9",
@@ -1531,6 +1539,16 @@
"undici-types": "~5.26.4"
}
},
"node_modules/@types/node-fetch": {
"version": "2.6.11",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz",
"integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==",
"optional": true,
"dependencies": {
"@types/node": "*",
"form-data": "^4.0.0"
}
},
"node_modules/@types/semver": {
"version": "7.5.6",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz",
@@ -1543,6 +1561,12 @@
"integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==",
"dev": true
},
"node_modules/@types/tmp": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.6.tgz",
"integrity": "sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==",
"dev": true
},
"node_modules/@types/yargs": {
"version": "17.0.32",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz",
@@ -1807,6 +1831,18 @@
"integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==",
"dev": true
},
"node_modules/abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
"optional": true,
"dependencies": {
"event-target-shim": "^5.0.0"
},
"engines": {
"node": ">=6.5"
}
},
"node_modules/acorn": {
"version": "8.11.3",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
@@ -1828,6 +1864,18 @@
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
}
},
"node_modules/agentkeepalive": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz",
"integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==",
"optional": true,
"dependencies": {
"humanize-ms": "^1.2.1"
},
"engines": {
"node": ">= 8.0.0"
}
},
"node_modules/ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
@@ -1917,6 +1965,7 @@
"version": "15.0.0",
"resolved": "https://registry.npmjs.org/apache-arrow/-/apache-arrow-15.0.0.tgz",
"integrity": "sha512-e6aunxNKM+woQf137ny3tp/xbLjFJS2oGQxQhYGqW6dGeIwNV1jOeEAeR6sS2jwAI2qLO83gYIP2MBz02Gw5Xw==",
"peer": true,
"dependencies": {
"@swc/helpers": "^0.5.2",
"@types/command-line-args": "^5.2.1",
@@ -1945,6 +1994,7 @@
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz",
"integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==",
"peer": true,
"engines": {
"node": ">=6"
}
@@ -1958,6 +2008,12 @@
"node": ">=8"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"optional": true
},
"node_modules/babel-jest": {
"version": "29.7.0",
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz",
@@ -2089,6 +2145,12 @@
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true
},
"node_modules/base-64": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
"integrity": "sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==",
"optional": true
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
@@ -2218,6 +2280,7 @@
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-0.4.0.tgz",
"integrity": "sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==",
"peer": true,
"dependencies": {
"chalk": "^4.1.2"
},
@@ -2237,6 +2300,15 @@
"node": ">=10"
}
},
"node_modules/charenc": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
"integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==",
"optional": true,
"engines": {
"node": "*"
}
},
"node_modules/cjs-module-lexer": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz",
@@ -2289,10 +2361,23 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"optional": true,
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/command-line-args": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz",
"integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==",
"peer": true,
"dependencies": {
"array-back": "^3.1.0",
"find-replace": "^3.0.0",
@@ -2307,6 +2392,7 @@
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-7.0.1.tgz",
"integrity": "sha512-NCyznE//MuTjwi3y84QVUGEOT+P5oto1e1Pk/jFPVdPPfsG03qpTIl3yw6etR+v73d0lXsoojRpvbru2sqePxQ==",
"peer": true,
"dependencies": {
"array-back": "^6.2.2",
"chalk-template": "^0.4.0",
@@ -2321,6 +2407,7 @@
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/array-back/-/array-back-6.2.2.tgz",
"integrity": "sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw==",
"peer": true,
"engines": {
"node": ">=12.17"
}
@@ -2329,6 +2416,7 @@
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz",
"integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==",
"peer": true,
"engines": {
"node": ">=12.17"
}
@@ -2380,6 +2468,15 @@
"node": ">= 8"
}
},
"node_modules/crypt": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
"integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==",
"optional": true,
"engines": {
"node": "*"
}
},
"node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
@@ -2432,6 +2529,15 @@
"node": ">=0.10.0"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"optional": true,
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/detect-newline": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz",
@@ -2450,6 +2556,16 @@
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
}
},
"node_modules/digest-fetch": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz",
"integrity": "sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==",
"optional": true,
"dependencies": {
"base-64": "^0.1.0",
"md5": "^2.3.0"
}
},
"node_modules/dir-glob": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
@@ -2710,6 +2826,15 @@
"node": ">=0.10.0"
}
},
"node_modules/event-target-shim": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
"optional": true,
"engines": {
"node": ">=6"
}
},
"node_modules/exit": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz",
@@ -2815,6 +2940,7 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz",
"integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==",
"peer": true,
"dependencies": {
"array-back": "^3.0.1"
},
@@ -2855,7 +2981,8 @@
"node_modules/flatbuffers": {
"version": "23.5.26",
"resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-23.5.26.tgz",
"integrity": "sha512-vE+SI9vrJDwi1oETtTIFldC/o9GsVKRM+s6EL0nQgxXlYV1Vc4Tk30hj4xGICftInKQKj1F3up2n8UbIVobISQ=="
"integrity": "sha512-vE+SI9vrJDwi1oETtTIFldC/o9GsVKRM+s6EL0nQgxXlYV1Vc4Tk30hj4xGICftInKQKj1F3up2n8UbIVobISQ==",
"peer": true
},
"node_modules/flatted": {
"version": "3.2.9",
@@ -2863,6 +2990,48 @@
"integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==",
"dev": true
},
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"optional": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/form-data-encoder": {
"version": "1.7.2",
"resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz",
"integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==",
"optional": true
},
"node_modules/formdata-node": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz",
"integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==",
"optional": true,
"dependencies": {
"node-domexception": "1.0.0",
"web-streams-polyfill": "4.0.0-beta.3"
},
"engines": {
"node": ">= 12.20"
}
},
"node_modules/formdata-node/node_modules/web-streams-polyfill": {
"version": "4.0.0-beta.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz",
"integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==",
"optional": true,
"engines": {
"node": ">= 14"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@@ -3049,6 +3218,15 @@
"integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
"dev": true
},
"node_modules/humanize-ms": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
"integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==",
"optional": true,
"dependencies": {
"ms": "^2.0.0"
}
},
"node_modules/ignore": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz",
@@ -3133,6 +3311,12 @@
"integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
"dev": true
},
"node_modules/is-buffer": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==",
"optional": true
},
"node_modules/is-core-module": {
"version": "2.13.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz",
@@ -4067,6 +4251,7 @@
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/json-bignum/-/json-bignum-0.0.3.tgz",
"integrity": "sha512-2WHyXj3OfHSgNyuzDbSxI1w2jgw5gkWSWhS7Qg4bWXx1nLk3jnbwfUeS0PSba3IzpTUWdHxBieELUzXRjQB2zg==",
"peer": true,
"engines": {
"node": ">=0.8"
}
@@ -4177,12 +4362,14 @@
"node_modules/lodash.assignwith": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz",
"integrity": "sha512-ZznplvbvtjK2gMvnQ1BR/zqPFZmS6jbK4p+6Up4xcRYA7yMIwxHCfbTcrYxXKzzqLsQ05eJPVznEW3tuwV7k1g=="
"integrity": "sha512-ZznplvbvtjK2gMvnQ1BR/zqPFZmS6jbK4p+6Up4xcRYA7yMIwxHCfbTcrYxXKzzqLsQ05eJPVznEW3tuwV7k1g==",
"peer": true
},
"node_modules/lodash.camelcase": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
"integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
"integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==",
"peer": true
},
"node_modules/lodash.memoize": {
"version": "4.1.2",
@@ -4241,6 +4428,17 @@
"node": ">= 12"
}
},
"node_modules/md5": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
"integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==",
"optional": true,
"dependencies": {
"charenc": "0.0.2",
"crypt": "0.0.2",
"is-buffer": "~1.1.6"
}
},
"node_modules/merge-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
@@ -4269,6 +4467,27 @@
"node": ">=8.6"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"optional": true,
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"optional": true,
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
@@ -4290,6 +4509,12 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"optional": true
},
"node_modules/natural-compare": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
@@ -4302,6 +4527,45 @@
"integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
"dev": true
},
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
],
"optional": true,
"engines": {
"node": ">=10.5.0"
}
},
"node_modules/node-fetch": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
"optional": true,
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/node-int64": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz",
@@ -4332,6 +4596,35 @@
"wrappy": "1"
}
},
"node_modules/openai": {
"version": "4.28.4",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.28.4.tgz",
"integrity": "sha512-RNIwx4MT/F0zyizGcwS+bXKLzJ8QE9IOyigDG/ttnwB220d58bYjYFp0qjvGwEFBO6+pvFVIDABZPGDl46RFsg==",
"optional": true,
"dependencies": {
"@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
"digest-fetch": "^1.3.0",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7",
"web-streams-polyfill": "^3.2.1"
},
"bin": {
"openai": "bin/cli"
}
},
"node_modules/openai/node_modules/@types/node": {
"version": "18.19.20",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.20.tgz",
"integrity": "sha512-SKXZvI375jkpvAj8o+5U2518XQv76mAsixqfXiVyWyXZbVWQK25RurFovYpVIxVzul0rZoH58V/3SkEnm7s3qA==",
"optional": true,
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/optionator": {
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
@@ -4864,6 +5157,7 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/stream-read-all/-/stream-read-all-3.0.1.tgz",
"integrity": "sha512-EWZT9XOceBPlVJRrYcykW8jyRSZYbkb/0ZK36uLEmoWVO5gxBOnntNTseNzfREsqxqdfEGQrD8SXQ3QWbBmq8A==",
"peer": true,
"engines": {
"node": ">=10"
}
@@ -4955,6 +5249,7 @@
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/table-layout/-/table-layout-3.0.2.tgz",
"integrity": "sha512-rpyNZYRw+/C+dYkcQ3Pr+rLxW4CfHpXjPDnG7lYhdRoUcZTUt+KEsX+94RGp/aVp/MQU35JCITv2T/beY4m+hw==",
"peer": true,
"dependencies": {
"@75lb/deep-merge": "^1.1.1",
"array-back": "^6.2.2",
@@ -4975,6 +5270,7 @@
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/array-back/-/array-back-6.2.2.tgz",
"integrity": "sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw==",
"peer": true,
"engines": {
"node": ">=12.17"
}
@@ -4983,6 +5279,7 @@
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz",
"integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==",
"peer": true,
"engines": {
"node": ">=12.17"
}
@@ -5007,6 +5304,15 @@
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
"dev": true
},
"node_modules/tmp": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
"integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==",
"dev": true,
"engines": {
"node": ">=14.14"
}
},
"node_modules/tmpl": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz",
@@ -5034,6 +5340,12 @@
"node": ">=8.0"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"optional": true
},
"node_modules/ts-api-utils": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.3.tgz",
@@ -5092,7 +5404,8 @@
"node_modules/tslib": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz",
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==",
"peer": true
},
"node_modules/type-check": {
"version": "0.4.0",
@@ -5189,6 +5502,7 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz",
"integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==",
"peer": true,
"engines": {
"node": ">=8"
}
@@ -5285,6 +5599,31 @@
"makeerror": "1.0.12"
}
},
"node_modules/web-streams-polyfill": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
"integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==",
"optional": true,
"engines": {
"node": ">= 8"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"optional": true
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"optional": true,
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -5310,6 +5649,7 @@
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-5.1.0.tgz",
"integrity": "sha512-JNjcULU2e4KJwUNv6CHgI46UvDGitb6dGryHajXTDiLgg1/RiGoPSDw4kZfYnwGtEXf2ZMeIewDQgFGzkCB2Sg==",
"peer": true,
"engines": {
"node": ">=12.17"
}

View File

@@ -19,10 +19,12 @@
"devDependencies": {
"@napi-rs/cli": "^2.18.0",
"@types/jest": "^29.1.2",
"@types/tmp": "^0.2.6",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"eslint": "^8.56.0",
"jest": "^29.7.0",
"tmp": "^0.2.3",
"ts-jest": "^29.1.2",
"typedoc": "^0.25.7",
"typedoc-plugin-markdown": "^3.17.1",
@@ -59,7 +61,8 @@
"lancedb-darwin-arm64": "0.4.3",
"lancedb-darwin-x64": "0.4.3",
"lancedb-linux-arm64-gnu": "0.4.3",
"lancedb-linux-x64-gnu": "0.4.3"
"lancedb-linux-x64-gnu": "0.4.3",
"openai": "^4.28.4"
},
"peerDependencies": {
"apache-arrow": "^15.0.0"

View File

@@ -18,11 +18,23 @@ use napi_derive::*;
use crate::table::Table;
use crate::ConnectionOptions;
use lancedb::connection::{ConnectBuilder, Connection as LanceDBConnection, CreateTableMode};
use lancedb::ipc::ipc_file_to_batches;
use lancedb::ipc::{ipc_file_to_batches, ipc_file_to_schema};
#[napi]
pub struct Connection {
conn: LanceDBConnection,
inner: Option<LanceDBConnection>,
}
impl Connection {
pub(crate) fn inner_new(inner: LanceDBConnection) -> Self {
Self { inner: Some(inner) }
}
fn get_inner(&self) -> napi::Result<&LanceDBConnection> {
self.inner
.as_ref()
.ok_or_else(|| napi::Error::from_reason("Connection is closed"))
}
}
impl Connection {
@@ -40,8 +52,8 @@ impl Connection {
impl Connection {
/// Create a new Connection instance from the given URI.
#[napi(factory)]
pub async fn new(options: ConnectionOptions) -> napi::Result<Self> {
let mut builder = ConnectBuilder::new(&options.uri);
pub async fn new(uri: String, options: ConnectionOptions) -> napi::Result<Self> {
let mut builder = ConnectBuilder::new(&uri);
if let Some(api_key) = options.api_key {
builder = builder.api_key(&api_key);
}
@@ -52,18 +64,33 @@ impl Connection {
builder =
builder.read_consistency_interval(std::time::Duration::from_secs_f64(interval));
}
Ok(Self {
conn: builder
Ok(Self::inner_new(
builder
.execute()
.await
.map_err(|e| napi::Error::from_reason(format!("{}", e)))?,
})
))
}
#[napi]
pub fn display(&self) -> napi::Result<String> {
Ok(self.get_inner()?.to_string())
}
#[napi]
pub fn is_open(&self) -> bool {
self.inner.is_some()
}
#[napi]
pub fn close(&mut self) {
self.inner.take();
}
/// List all tables in the dataset.
#[napi]
pub async fn table_names(&self) -> napi::Result<Vec<String>> {
self.conn
self.get_inner()?
.table_names()
.await
.map_err(|e| napi::Error::from_reason(format!("{}", e)))
@@ -86,7 +113,7 @@ impl Connection {
.map_err(|e| napi::Error::from_reason(format!("Failed to read IPC file: {}", e)))?;
let mode = Self::parse_create_mode_str(&mode)?;
let tbl = self
.conn
.get_inner()?
.create_table(&name, Box::new(batches))
.mode(mode)
.execute()
@@ -95,10 +122,31 @@ impl Connection {
Ok(Table::new(tbl))
}
#[napi]
pub async fn create_empty_table(
&self,
name: String,
schema_buf: Buffer,
mode: String,
) -> napi::Result<Table> {
let schema = ipc_file_to_schema(schema_buf.to_vec()).map_err(|e| {
napi::Error::from_reason(format!("Failed to marshal schema from JS to Rust: {}", e))
})?;
let mode = Self::parse_create_mode_str(&mode)?;
let tbl = self
.get_inner()?
.create_empty_table(&name, schema)
.mode(mode)
.execute()
.await
.map_err(|e| napi::Error::from_reason(format!("{}", e)))?;
Ok(Table::new(tbl))
}
#[napi]
pub async fn open_table(&self, name: String) -> napi::Result<Table> {
let tbl = self
.conn
.get_inner()?
.open_table(&name)
.execute()
.await
@@ -109,7 +157,7 @@ impl Connection {
/// Drop table with the name. Or raise an error if the table does not exist.
#[napi]
pub async fn drop_table(&self, name: String) -> napi::Result<()> {
self.conn
self.get_inner()?
.drop_table(&name)
.await
.map_err(|e| napi::Error::from_reason(format!("{}", e)))

View File

@@ -12,7 +12,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Mutex;
use lance_linalg::distance::MetricType as LanceMetricType;
use lancedb::index::IndexBuilder as LanceDbIndexBuilder;
use lancedb::Table as LanceDbTable;
use napi_derive::napi;
#[napi]
@@ -40,58 +44,93 @@ impl From<MetricType> for LanceMetricType {
#[napi]
pub struct IndexBuilder {
inner: lancedb::index::IndexBuilder,
inner: Mutex<Option<LanceDbIndexBuilder>>,
}
impl IndexBuilder {
fn modify(
&self,
mod_fn: impl Fn(LanceDbIndexBuilder) -> LanceDbIndexBuilder,
) -> napi::Result<()> {
let mut inner = self.inner.lock().unwrap();
let inner_builder = inner.take().ok_or_else(|| {
napi::Error::from_reason("IndexBuilder has already been consumed".to_string())
})?;
let inner_builder = mod_fn(inner_builder);
inner.replace(inner_builder);
Ok(())
}
}
#[napi]
impl IndexBuilder {
pub fn new(tbl: &dyn lancedb::Table) -> Self {
pub fn new(tbl: &LanceDbTable) -> Self {
let inner = tbl.create_index(&[]);
Self { inner }
Self {
inner: Mutex::new(Some(inner)),
}
}
#[napi]
pub unsafe fn replace(&mut self, v: bool) {
self.inner.replace(v);
pub fn replace(&self, v: bool) -> napi::Result<()> {
self.modify(|b| b.replace(v))
}
#[napi]
pub unsafe fn column(&mut self, c: String) {
self.inner.columns(&[c.as_str()]);
pub fn column(&self, c: String) -> napi::Result<()> {
self.modify(|b| b.columns(&[c.as_str()]))
}
#[napi]
pub unsafe fn name(&mut self, name: String) {
self.inner.name(name.as_str());
pub fn name(&self, name: String) -> napi::Result<()> {
self.modify(|b| b.name(name.as_str()))
}
#[napi]
pub unsafe fn ivf_pq(
&mut self,
pub fn ivf_pq(
&self,
metric_type: Option<MetricType>,
num_partitions: Option<u32>,
num_sub_vectors: Option<u32>,
num_bits: Option<u32>,
max_iterations: Option<u32>,
sample_rate: Option<u32>,
) {
self.inner.ivf_pq();
metric_type.map(|m| self.inner.metric_type(m.into()));
num_partitions.map(|p| self.inner.num_partitions(p));
num_sub_vectors.map(|s| self.inner.num_sub_vectors(s));
num_bits.map(|b| self.inner.num_bits(b));
max_iterations.map(|i| self.inner.max_iterations(i));
sample_rate.map(|s| self.inner.sample_rate(s));
) -> napi::Result<()> {
self.modify(|b| {
let mut b = b.ivf_pq();
if let Some(metric_type) = metric_type {
b = b.metric_type(metric_type.into());
}
if let Some(num_partitions) = num_partitions {
b = b.num_partitions(num_partitions);
}
if let Some(num_sub_vectors) = num_sub_vectors {
b = b.num_sub_vectors(num_sub_vectors);
}
if let Some(num_bits) = num_bits {
b = b.num_bits(num_bits);
}
if let Some(max_iterations) = max_iterations {
b = b.max_iterations(max_iterations);
}
if let Some(sample_rate) = sample_rate {
b = b.sample_rate(sample_rate);
}
b
})
}
#[napi]
pub unsafe fn scalar(&mut self) {
self.inner.scalar();
pub fn scalar(&self) -> napi::Result<()> {
self.modify(|b| b.scalar())
}
#[napi]
pub async fn build(&self) -> napi::Result<()> {
self.inner
let inner = self.inner.lock().unwrap().take().ok_or_else(|| {
napi::Error::from_reason("IndexBuilder has already been consumed".to_string())
})?;
inner
.build()
.await
.map_err(|e| napi::Error::from_reason(format!("Failed to build index: {}", e)))?;

View File

@@ -24,7 +24,6 @@ mod table;
#[napi(object)]
#[derive(Debug)]
pub struct ConnectionOptions {
pub uri: String,
pub api_key: Option<String>,
pub host_override: Option<String>,
/// (For LanceDB OSS only): The interval, in seconds, at which to check for
@@ -54,6 +53,6 @@ pub struct WriteOptions {
}
#[napi]
pub async fn connect(options: ConnectionOptions) -> napi::Result<Connection> {
Connection::new(options).await
pub async fn connect(uri: String, options: ConnectionOptions) -> napi::Result<Connection> {
Connection::new(uri, options).await
}

View File

@@ -16,7 +16,7 @@ use lancedb::query::Query as LanceDBQuery;
use napi::bindgen_prelude::*;
use napi_derive::napi;
use crate::{iterator::RecordBatchIterator, table::Table};
use crate::iterator::RecordBatchIterator;
#[napi]
pub struct Query {
@@ -25,10 +25,8 @@ pub struct Query {
#[napi]
impl Query {
pub fn new(table: &Table) -> Self {
Self {
inner: table.table.query(),
}
pub fn new(query: LanceDBQuery) -> Self {
Self { inner: query }
}
#[napi]

View File

@@ -14,10 +14,8 @@
use arrow_ipc::writer::FileWriter;
use lance::dataset::ColumnAlteration as LanceColumnAlteration;
use lancedb::{
ipc::ipc_file_to_batches,
table::{AddDataOptions, TableRef},
};
use lancedb::ipc::ipc_file_to_batches;
use lancedb::table::{AddDataMode, Table as LanceDbTable};
use napi::bindgen_prelude::*;
use napi_derive::napi;
@@ -26,20 +24,52 @@ use crate::query::Query;
#[napi]
pub struct Table {
pub(crate) table: TableRef,
// We keep a duplicate of the table name so we can use it for error
// messages even if the table has been closed
name: String,
pub(crate) inner: Option<LanceDbTable>,
}
impl Table {
fn inner_ref(&self) -> napi::Result<&LanceDbTable> {
self.inner
.as_ref()
.ok_or_else(|| napi::Error::from_reason(format!("Table {} is closed", self.name)))
}
}
#[napi]
impl Table {
pub(crate) fn new(table: TableRef) -> Self {
Self { table }
pub(crate) fn new(table: LanceDbTable) -> Self {
Self {
name: table.name().to_string(),
inner: Some(table),
}
}
#[napi]
pub fn display(&self) -> String {
match &self.inner {
None => format!("ClosedTable({})", self.name),
Some(inner) => inner.to_string(),
}
}
#[napi]
pub fn is_open(&self) -> bool {
self.inner.is_some()
}
#[napi]
pub fn close(&mut self) {
self.inner.take();
}
/// Return Schema as empty Arrow IPC file.
#[napi]
pub async fn schema(&self) -> napi::Result<Buffer> {
let schema =
self.table.schema().await.map_err(|e| {
self.inner_ref()?.schema().await.map_err(|e| {
napi::Error::from_reason(format!("Failed to create IPC file: {}", e))
})?;
let mut writer = FileWriter::try_new(vec![], &schema)
@@ -53,52 +83,59 @@ impl Table {
}
#[napi]
pub async fn add(&self, buf: Buffer) -> napi::Result<()> {
pub async fn add(&self, buf: Buffer, mode: String) -> napi::Result<()> {
let batches = ipc_file_to_batches(buf.to_vec())
.map_err(|e| napi::Error::from_reason(format!("Failed to read IPC file: {}", e)))?;
self.table
.add(Box::new(batches), AddDataOptions::default())
.await
.map_err(|e| {
napi::Error::from_reason(format!(
"Failed to add batches to table {}: {}",
self.table, e
))
})
let mut op = self.inner_ref()?.add(Box::new(batches));
op = if mode == "append" {
op.mode(AddDataMode::Append)
} else if mode == "overwrite" {
op.mode(AddDataMode::Overwrite)
} else {
return Err(napi::Error::from_reason(format!("Invalid mode: {}", mode)));
};
op.execute().await.map_err(|e| {
napi::Error::from_reason(format!(
"Failed to add batches to table {}: {}",
self.name, e
))
})
}
#[napi]
pub async fn count_rows(&self, filter: Option<String>) -> napi::Result<i64> {
self.table
self.inner_ref()?
.count_rows(filter)
.await
.map(|val| val as i64)
.map_err(|e| {
napi::Error::from_reason(format!(
"Failed to count rows in table {}: {}",
self.table, e
self.name, e
))
})
}
#[napi]
pub async fn delete(&self, predicate: String) -> napi::Result<()> {
self.table.delete(&predicate).await.map_err(|e| {
self.inner_ref()?.delete(&predicate).await.map_err(|e| {
napi::Error::from_reason(format!(
"Failed to delete rows in table {}: predicate={}",
self.table, e
self.name, e
))
})
}
#[napi]
pub fn create_index(&self) -> IndexBuilder {
IndexBuilder::new(self.table.as_ref())
pub fn create_index(&self) -> napi::Result<IndexBuilder> {
Ok(IndexBuilder::new(self.inner_ref()?))
}
#[napi]
pub fn query(&self) -> Query {
Query::new(self)
pub fn query(&self) -> napi::Result<Query> {
Ok(Query::new(self.inner_ref()?.query()))
}
#[napi]
@@ -108,13 +145,13 @@ impl Table {
.map(|sql| (sql.name, sql.value_sql))
.collect::<Vec<_>>();
let transforms = lance::dataset::NewColumnTransform::SqlExpressions(transforms);
self.table
self.inner_ref()?
.add_columns(transforms, None)
.await
.map_err(|err| {
napi::Error::from_reason(format!(
"Failed to add columns to table {}: {}",
self.table, err
self.name, err
))
})?;
Ok(())
@@ -134,13 +171,13 @@ impl Table {
.map(LanceColumnAlteration::from)
.collect::<Vec<_>>();
self.table
self.inner_ref()?
.alter_columns(&alterations)
.await
.map_err(|err| {
napi::Error::from_reason(format!(
"Failed to alter columns in table {}: {}",
self.table, err
self.name, err
))
})?;
Ok(())
@@ -149,12 +186,15 @@ impl Table {
#[napi]
pub async fn drop_columns(&self, columns: Vec<String>) -> napi::Result<()> {
let col_refs = columns.iter().map(String::as_str).collect::<Vec<_>>();
self.table.drop_columns(&col_refs).await.map_err(|err| {
napi::Error::from_reason(format!(
"Failed to drop columns from table {}: {}",
self.table, err
))
})?;
self.inner_ref()?
.drop_columns(&col_refs)
.await
.map_err(|err| {
napi::Error::from_reason(format!(
"Failed to drop columns from table {}: {}",
self.name, err
))
})?;
Ok(())
}
}