mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 13:29:57 +00:00
Compare commits
456 Commits
rmeng/patc
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05f484b716 | ||
|
|
7e92aa657a | ||
|
|
e5f40a4b09 | ||
|
|
6779c1c192 | ||
|
|
e0bf6d9bd0 | ||
|
|
67f041be91 | ||
|
|
d388ef2f55 | ||
|
|
e52dc877e3 | ||
|
|
ca4fdf5499 | ||
|
|
0e9ad764b0 | ||
|
|
cae0348c51 | ||
|
|
e9e0a37ca8 | ||
|
|
c37a28abbd | ||
|
|
98c1e635b3 | ||
|
|
9992b927fd | ||
|
|
80d501011c | ||
|
|
6e3a9d08e0 | ||
|
|
268d8e057b | ||
|
|
dfc518b8fb | ||
|
|
98acf34ae8 | ||
|
|
25988d23cd | ||
|
|
c0dd98c798 | ||
|
|
ee73a3bcb8 | ||
|
|
c07989ac29 | ||
|
|
8f7ef26f5f | ||
|
|
e14f079fe2 | ||
|
|
7d790bd9e7 | ||
|
|
dbdd0a7b4b | ||
|
|
befb79c5f9 | ||
|
|
0a387a5429 | ||
|
|
5a173e1d54 | ||
|
|
51bdbcad98 | ||
|
|
0c7809c7a0 | ||
|
|
2de226220b | ||
|
|
bd5b6f21e2 | ||
|
|
6331807b95 | ||
|
|
83cb3f01a4 | ||
|
|
81f2cdf736 | ||
|
|
d404a3590c | ||
|
|
e688484bd3 | ||
|
|
3bcd61c8de | ||
|
|
c76ec48603 | ||
|
|
d974413745 | ||
|
|
ec4f2fbd30 | ||
|
|
6375ea419a | ||
|
|
6689192cee | ||
|
|
dbec598610 | ||
|
|
8f6e7ce4f3 | ||
|
|
b482f41bf4 | ||
|
|
4dc7497547 | ||
|
|
d744972f2f | ||
|
|
9bc320874a | ||
|
|
510d449167 | ||
|
|
356e89a800 | ||
|
|
ae1cf4441d | ||
|
|
1ae08fe31d | ||
|
|
a517629c65 | ||
|
|
553dae1607 | ||
|
|
9c7e00eec3 | ||
|
|
a7d66032aa | ||
|
|
7fb8a732a5 | ||
|
|
f393ac3b0d | ||
|
|
ca83354780 | ||
|
|
272cbcad7a | ||
|
|
722fe1836c | ||
|
|
d1983602c2 | ||
|
|
9148cd6d47 | ||
|
|
47dbb988bf | ||
|
|
6821536d44 | ||
|
|
d6f0663671 | ||
|
|
ea33b68c6c | ||
|
|
1453bf4e7a | ||
|
|
abaf315baf | ||
|
|
14b9277ac1 | ||
|
|
d621826b79 | ||
|
|
08c0803ae1 | ||
|
|
62632cb90b | ||
|
|
14566df213 | ||
|
|
acfdf1b9cb | ||
|
|
f95402af7c | ||
|
|
d14c9b6d9e | ||
|
|
c1af53b787 | ||
|
|
2a02d1394b | ||
|
|
085066d2a8 | ||
|
|
adf1a38f4d | ||
|
|
294c33a42e | ||
|
|
245786fed7 | ||
|
|
edd9a043f8 | ||
|
|
38c09fc294 | ||
|
|
ebaa2dede5 | ||
|
|
ba7618a026 | ||
|
|
a6bcbd007b | ||
|
|
5af74b5aca | ||
|
|
8a52619bc0 | ||
|
|
314d4c93e5 | ||
|
|
c5471ee694 | ||
|
|
4605359d3b | ||
|
|
f1596122e6 | ||
|
|
3aa0c40168 | ||
|
|
677b7c1fcc | ||
|
|
8303a7197b | ||
|
|
5fa9bfc4a8 | ||
|
|
bf2e9d0088 | ||
|
|
f04590ddad | ||
|
|
62c5117def | ||
|
|
22c196b3e3 | ||
|
|
1f4ac71fa3 | ||
|
|
b5aad2d856 | ||
|
|
ca6f55b160 | ||
|
|
6f8cf1e068 | ||
|
|
e0277383a5 | ||
|
|
d6b408e26f | ||
|
|
2447372c1f | ||
|
|
f0298d8372 | ||
|
|
54693e6bec | ||
|
|
73b2977bff | ||
|
|
aec85f7875 | ||
|
|
51f92ecb3d | ||
|
|
5b60412d66 | ||
|
|
53d63966a9 | ||
|
|
5ba87575e7 | ||
|
|
cc5f2136a6 | ||
|
|
78e5fb5451 | ||
|
|
8104c5c18e | ||
|
|
4fbabdeec3 | ||
|
|
eb31d95fef | ||
|
|
3169c36525 | ||
|
|
1b990983b3 | ||
|
|
0c21f91c16 | ||
|
|
7e50c239eb | ||
|
|
24e8043150 | ||
|
|
990440385d | ||
|
|
a693a9d897 | ||
|
|
82936c77ef | ||
|
|
dddcddcaf9 | ||
|
|
a9727eb318 | ||
|
|
48d55bf952 | ||
|
|
d2e71c8b08 | ||
|
|
f53aace89c | ||
|
|
d982ee934a | ||
|
|
57605a2d86 | ||
|
|
738511c5f2 | ||
|
|
0b0f42537e | ||
|
|
e412194008 | ||
|
|
a9088224c5 | ||
|
|
688c57a0d8 | ||
|
|
12a98deded | ||
|
|
e4bb042918 | ||
|
|
04e1662681 | ||
|
|
ce2242e06d | ||
|
|
778339388a | ||
|
|
7f8637a0b4 | ||
|
|
09cd08222d | ||
|
|
a248d7feec | ||
|
|
cc9473a94a | ||
|
|
d77e95a4f4 | ||
|
|
62f053ac92 | ||
|
|
34e10caad2 | ||
|
|
f5726e2d0c | ||
|
|
12b4fb42fc | ||
|
|
1328cd46f1 | ||
|
|
0c940ed9f8 | ||
|
|
5f59e51583 | ||
|
|
8d0ea29f89 | ||
|
|
b9468bb980 | ||
|
|
a42df158a3 | ||
|
|
9df6905d86 | ||
|
|
3ffed89793 | ||
|
|
f150768739 | ||
|
|
b432ecf2f6 | ||
|
|
d1a7257810 | ||
|
|
5c5e23bbb9 | ||
|
|
e5796a4836 | ||
|
|
b9c5323265 | ||
|
|
e41a52863a | ||
|
|
13acc8a480 | ||
|
|
22b9eceb12 | ||
|
|
5f62302614 | ||
|
|
d84e0d1db8 | ||
|
|
ac94b2a420 | ||
|
|
b49bc113c4 | ||
|
|
77b5b1cf0e | ||
|
|
e910809de0 | ||
|
|
90b5b55126 | ||
|
|
488e4f8452 | ||
|
|
ba6f949515 | ||
|
|
3dd8522bc9 | ||
|
|
e01ef63488 | ||
|
|
a6cf24b359 | ||
|
|
9a07c9aad8 | ||
|
|
d405798952 | ||
|
|
e8a8b92b2a | ||
|
|
66362c6506 | ||
|
|
5228ca4b6b | ||
|
|
dcc216a244 | ||
|
|
a7aa168c7f | ||
|
|
7a89b5ec68 | ||
|
|
ee862abd29 | ||
|
|
4e1ed2b139 | ||
|
|
008e0b1a93 | ||
|
|
82cbcf6d07 | ||
|
|
1cd5426aea | ||
|
|
41f0e32a06 | ||
|
|
ccfd043939 | ||
|
|
b4d451ed21 | ||
|
|
4c303ba293 | ||
|
|
66eaa2a00e | ||
|
|
5f14a411af | ||
|
|
bea3cef627 | ||
|
|
0e92a7277c | ||
|
|
83ed8d1e49 | ||
|
|
a1ab549457 | ||
|
|
3ba1618be9 | ||
|
|
9a9fc77a95 | ||
|
|
c89d5e6e6d | ||
|
|
d012db24c2 | ||
|
|
7af213801a | ||
|
|
8f54cfcde9 | ||
|
|
119b928a52 | ||
|
|
8bcdc81fd3 | ||
|
|
39e14c70c5 | ||
|
|
af8263af94 | ||
|
|
be4ab9eef3 | ||
|
|
184d2bc969 | ||
|
|
ff6f005336 | ||
|
|
49333e522c | ||
|
|
44eba363b5 | ||
|
|
4568df422d | ||
|
|
a90358a1e3 | ||
|
|
f7f9beaf31 | ||
|
|
cfdbddc5cf | ||
|
|
88affc1428 | ||
|
|
a7be064f00 | ||
|
|
707df47c3f | ||
|
|
6e97fada13 | ||
|
|
3f66be666d | ||
|
|
eda4c587fc | ||
|
|
91d64d86e0 | ||
|
|
ff81c0d698 | ||
|
|
fcfb4587bb | ||
|
|
f43c06d9ce | ||
|
|
ba01d274eb | ||
|
|
615c469af2 | ||
|
|
a649b3b1e4 | ||
|
|
be76242884 | ||
|
|
f4994cb0ec | ||
|
|
00b0c75710 | ||
|
|
47299385fa | ||
|
|
9dea884a7f | ||
|
|
85f8cf20aa | ||
|
|
5e720b2776 | ||
|
|
30a8223944 | ||
|
|
5b1587d84a | ||
|
|
78bafb3007 | ||
|
|
4417f7c5a7 | ||
|
|
577d6ea16e | ||
|
|
53d2ef5e81 | ||
|
|
e48ceb2ebd | ||
|
|
327692ccb1 | ||
|
|
bc224a6a0b | ||
|
|
2dcb39f556 | ||
|
|
6bda6f2f2a | ||
|
|
a3fafd6b54 | ||
|
|
dc8d6835c0 | ||
|
|
f55d99cec5 | ||
|
|
3d8b2f5531 | ||
|
|
b71aa4117f | ||
|
|
55db26f59a | ||
|
|
7e42f58dec | ||
|
|
2790b19279 | ||
|
|
4ba655d05e | ||
|
|
986891db98 | ||
|
|
036bf02901 | ||
|
|
4e31f0cc7a | ||
|
|
0a16e29b93 | ||
|
|
cf7d7a19f5 | ||
|
|
fe2fb91a8b | ||
|
|
81af350d85 | ||
|
|
99adfe065a | ||
|
|
277406509e | ||
|
|
63411b4d8b | ||
|
|
d998f80b04 | ||
|
|
629379a532 | ||
|
|
821cf0e434 | ||
|
|
99ba5331f0 | ||
|
|
121687231c | ||
|
|
ac40d4b235 | ||
|
|
c5a52565ac | ||
|
|
b0a88a7286 | ||
|
|
d41d849e0e | ||
|
|
bf5202f196 | ||
|
|
8be2861061 | ||
|
|
0560e3a0e5 | ||
|
|
b83fbfc344 | ||
|
|
60b22d84bf | ||
|
|
7d55a94efd | ||
|
|
4d8e401d34 | ||
|
|
684eb8b087 | ||
|
|
4e3b82feaa | ||
|
|
8e248a9d67 | ||
|
|
065ffde443 | ||
|
|
c3059dc689 | ||
|
|
a9caa5f2d4 | ||
|
|
8411c36b96 | ||
|
|
7773bda7ee | ||
|
|
392777952f | ||
|
|
7e75e50d3a | ||
|
|
4b8af261a3 | ||
|
|
c8728d4ca1 | ||
|
|
446f837335 | ||
|
|
8f9ad978f5 | ||
|
|
0df38341d5 | ||
|
|
60260018cf | ||
|
|
bb100c5c19 | ||
|
|
eab9072bb5 | ||
|
|
ee1d0b596f | ||
|
|
38a4524893 | ||
|
|
ee0f0611d9 | ||
|
|
34966312cb | ||
|
|
756188358c | ||
|
|
dc5126d8d1 | ||
|
|
50c20af060 | ||
|
|
0965d7dd5a | ||
|
|
7bbb2872de | ||
|
|
e81d2975da | ||
|
|
2c7f96ba4f | ||
|
|
f9dd7a5d8a | ||
|
|
1d4943688d | ||
|
|
7856a94d2c | ||
|
|
371d2f979e | ||
|
|
fff8e399a3 | ||
|
|
73e4015797 | ||
|
|
5142a27482 | ||
|
|
81df2a524e | ||
|
|
40638e5515 | ||
|
|
018314a5c1 | ||
|
|
409eb30ea5 | ||
|
|
ff9872fd44 | ||
|
|
a0608044a1 | ||
|
|
2e4ea7d2bc | ||
|
|
57e5695a54 | ||
|
|
ce58ea7c38 | ||
|
|
57207eff4a | ||
|
|
2d78bff120 | ||
|
|
7c09b9b9a9 | ||
|
|
bd0034a157 | ||
|
|
144b3b5d83 | ||
|
|
b6f0a31686 | ||
|
|
9ec526f73f | ||
|
|
600bfd7237 | ||
|
|
d087e7891d | ||
|
|
098e397cf0 | ||
|
|
63ee8fa6a1 | ||
|
|
693091db29 | ||
|
|
dca4533dbe | ||
|
|
f6bbe199dc | ||
|
|
366e522c2b | ||
|
|
244b6919cc | ||
|
|
aca785ff98 | ||
|
|
bbdebf2c38 | ||
|
|
1336cce0dc | ||
|
|
6c83b6a513 | ||
|
|
6bec4bec51 | ||
|
|
23d30dfc78 | ||
|
|
94c8c50f96 | ||
|
|
72765d8e1a | ||
|
|
a2a8f9615e | ||
|
|
b085d9aaa1 | ||
|
|
6eb662de9b | ||
|
|
2bb2bb581a | ||
|
|
38321fa226 | ||
|
|
22749c3fa2 | ||
|
|
123a49df77 | ||
|
|
a57aa4b142 | ||
|
|
d8e3e54226 | ||
|
|
ccfdf4853a | ||
|
|
87e5d86e90 | ||
|
|
1cf8a3e4e0 | ||
|
|
5372843281 | ||
|
|
54677b8f0b | ||
|
|
ebcf9bf6ae | ||
|
|
797514bcbf | ||
|
|
1c872ce501 | ||
|
|
479f471c14 | ||
|
|
ae0d2f2599 | ||
|
|
1e8678f11a | ||
|
|
662968559d | ||
|
|
9d895801f2 | ||
|
|
80613a40fd | ||
|
|
d43ef7f11e | ||
|
|
554e068917 | ||
|
|
567734dd6e | ||
|
|
1589499f89 | ||
|
|
682e95fa83 | ||
|
|
1ad5e7f2f0 | ||
|
|
ddb3ef4ce5 | ||
|
|
ef20b2a138 | ||
|
|
2e0f251bfd | ||
|
|
2cb91e818d | ||
|
|
2835c76336 | ||
|
|
8068a2bbc3 | ||
|
|
24111d543a | ||
|
|
7eec2b8f9a | ||
|
|
b2b70ea399 | ||
|
|
e50a3c1783 | ||
|
|
b517134309 | ||
|
|
6fb539b5bf | ||
|
|
f37fe120fd | ||
|
|
2e115acb9a | ||
|
|
27a638362d | ||
|
|
22a6695d7a | ||
|
|
57eff82ee7 | ||
|
|
7732f7d41c | ||
|
|
5ca98c326f | ||
|
|
b55db397eb | ||
|
|
c04d72ac8a | ||
|
|
28b02fb72a | ||
|
|
f3cf986777 | ||
|
|
c73fcc8898 | ||
|
|
cd9debc3b7 | ||
|
|
26a97ba997 | ||
|
|
ce19fedb08 | ||
|
|
14e8e48de2 | ||
|
|
c30faf6083 | ||
|
|
64a4f025bb | ||
|
|
6dc968e7d3 | ||
|
|
06b5b69f1e | ||
|
|
6bd3a838fc | ||
|
|
f36fea8f20 | ||
|
|
0a30591729 | ||
|
|
0ed39b6146 | ||
|
|
a8c7f80073 | ||
|
|
0293bbe142 | ||
|
|
7372656369 | ||
|
|
d46bc5dd6e | ||
|
|
86efb11572 | ||
|
|
bb01ad5290 | ||
|
|
1b8cda0941 | ||
|
|
bc85a749a3 | ||
|
|
02c35d3457 | ||
|
|
345c136cfb | ||
|
|
043e388254 | ||
|
|
fe64fc4671 | ||
|
|
6d66404506 | ||
|
|
eff94ecea8 | ||
|
|
7dfb555fea | ||
|
|
f762a669e7 | ||
|
|
0bdc7140dd | ||
|
|
8f6e955b24 | ||
|
|
1096da09da | ||
|
|
683824f1e9 | ||
|
|
db7bdefe77 | ||
|
|
e41894b071 | ||
|
|
e1ae2bcbd8 | ||
|
|
ababc3f8ec | ||
|
|
a1377afcaa |
22
.bumpversion.cfg
Normal file
22
.bumpversion.cfg
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
[bumpversion]
|
||||||
|
current_version = 0.4.15
|
||||||
|
commit = True
|
||||||
|
message = Bump version: {current_version} → {new_version}
|
||||||
|
tag = True
|
||||||
|
tag_name = v{new_version}
|
||||||
|
|
||||||
|
[bumpversion:file:node/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:nodejs/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:nodejs/npm/darwin-x64/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:nodejs/npm/darwin-arm64/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:nodejs/npm/linux-x64-gnu/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:nodejs/npm/linux-arm64-gnu/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:rust/ffi/node/Cargo.toml]
|
||||||
|
|
||||||
|
[bumpversion:file:rust/lancedb/Cargo.toml]
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
[tool.bumpversion]
|
|
||||||
current_version = "0.5.1"
|
|
||||||
parse = """(?x)
|
|
||||||
(?P<major>0|[1-9]\\d*)\\.
|
|
||||||
(?P<minor>0|[1-9]\\d*)\\.
|
|
||||||
(?P<patch>0|[1-9]\\d*)
|
|
||||||
(?:-(?P<pre_l>[a-zA-Z-]+)\\.(?P<pre_n>0|[1-9]\\d*))?
|
|
||||||
"""
|
|
||||||
serialize = [
|
|
||||||
"{major}.{minor}.{patch}-{pre_l}.{pre_n}",
|
|
||||||
"{major}.{minor}.{patch}",
|
|
||||||
]
|
|
||||||
search = "{current_version}"
|
|
||||||
replace = "{new_version}"
|
|
||||||
regex = false
|
|
||||||
ignore_missing_version = false
|
|
||||||
ignore_missing_files = false
|
|
||||||
tag = true
|
|
||||||
sign_tags = false
|
|
||||||
tag_name = "v{new_version}"
|
|
||||||
tag_message = "Bump version: {current_version} → {new_version}"
|
|
||||||
allow_dirty = true
|
|
||||||
commit = true
|
|
||||||
message = "Bump version: {current_version} → {new_version}"
|
|
||||||
commit_args = ""
|
|
||||||
|
|
||||||
[tool.bumpversion.parts.pre_l]
|
|
||||||
values = ["beta", "final"]
|
|
||||||
optional_value = "final"
|
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
filename = "node/package.json"
|
|
||||||
search = "\"version\": \"{current_version}\","
|
|
||||||
replace = "\"version\": \"{new_version}\","
|
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
filename = "nodejs/package.json"
|
|
||||||
search = "\"version\": \"{current_version}\","
|
|
||||||
replace = "\"version\": \"{new_version}\","
|
|
||||||
|
|
||||||
# nodejs binary packages
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
glob = "nodejs/npm/*/package.json"
|
|
||||||
search = "\"version\": \"{current_version}\","
|
|
||||||
replace = "\"version\": \"{new_version}\","
|
|
||||||
|
|
||||||
# Cargo files
|
|
||||||
# ------------
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
filename = "rust/ffi/node/Cargo.toml"
|
|
||||||
search = "\nversion = \"{current_version}\""
|
|
||||||
replace = "\nversion = \"{new_version}\""
|
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
filename = "rust/lancedb/Cargo.toml"
|
|
||||||
search = "\nversion = \"{current_version}\""
|
|
||||||
replace = "\nversion = \"{new_version}\""
|
|
||||||
33
.github/labeler.yml
vendored
33
.github/labeler.yml
vendored
@@ -1,33 +0,0 @@
|
|||||||
version: 1
|
|
||||||
appendOnly: true
|
|
||||||
# Labels are applied based on conventional commits standard
|
|
||||||
# https://www.conventionalcommits.org/en/v1.0.0/
|
|
||||||
# These labels are later used in release notes. See .github/release.yml
|
|
||||||
labels:
|
|
||||||
# If the PR title has an ! before the : it will be considered a breaking change
|
|
||||||
# For example, `feat!: add new feature` will be considered a breaking change
|
|
||||||
- label: breaking-change
|
|
||||||
title: "^[^:]+!:.*"
|
|
||||||
- label: breaking-change
|
|
||||||
body: "BREAKING CHANGE"
|
|
||||||
- label: enhancement
|
|
||||||
title: "^feat(\\(.+\\))?!?:.*"
|
|
||||||
- label: bug
|
|
||||||
title: "^fix(\\(.+\\))?!?:.*"
|
|
||||||
- label: documentation
|
|
||||||
title: "^docs(\\(.+\\))?!?:.*"
|
|
||||||
- label: performance
|
|
||||||
title: "^perf(\\(.+\\))?!?:.*"
|
|
||||||
- label: ci
|
|
||||||
title: "^ci(\\(.+\\))?!?:.*"
|
|
||||||
- label: chore
|
|
||||||
title: "^(chore|test|build|style)(\\(.+\\))?!?:.*"
|
|
||||||
- label: Python
|
|
||||||
files:
|
|
||||||
- "^python\\/.*"
|
|
||||||
- label: Rust
|
|
||||||
files:
|
|
||||||
- "^rust\\/.*"
|
|
||||||
- label: typescript
|
|
||||||
files:
|
|
||||||
- "^node\\/.*"
|
|
||||||
41
.github/release_notes.json
vendored
41
.github/release_notes.json
vendored
@@ -1,41 +0,0 @@
|
|||||||
{
|
|
||||||
"ignore_labels": ["chore"],
|
|
||||||
"pr_template": "- ${{TITLE}} by @${{AUTHOR}} in ${{URL}}",
|
|
||||||
"categories": [
|
|
||||||
{
|
|
||||||
"title": "## 🏆 Highlights",
|
|
||||||
"labels": ["highlight"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## 🛠 Breaking Changes",
|
|
||||||
"labels": ["breaking-change"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## ⚠️ Deprecations ",
|
|
||||||
"labels": ["deprecation"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## 🎉 New Features",
|
|
||||||
"labels": ["enhancement"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## 🐛 Bug Fixes",
|
|
||||||
"labels": ["bug"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## 📚 Documentation",
|
|
||||||
"labels": ["documentation"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## 🚀 Performance Improvements",
|
|
||||||
"labels": ["performance"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## Other Changes"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"title": "## 🔧 Build and CI",
|
|
||||||
"labels": ["ci"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -14,10 +14,6 @@ inputs:
|
|||||||
# Note: this does *not* mean the host is arm64, since we might be cross-compiling.
|
# Note: this does *not* mean the host is arm64, since we might be cross-compiling.
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
manylinux:
|
|
||||||
description: "The manylinux version to build for"
|
|
||||||
required: false
|
|
||||||
default: "2_17"
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
@@ -32,7 +28,7 @@ runs:
|
|||||||
command: build
|
command: build
|
||||||
working-directory: python
|
working-directory: python
|
||||||
target: x86_64-unknown-linux-gnu
|
target: x86_64-unknown-linux-gnu
|
||||||
manylinux: ${{ inputs.manylinux }}
|
manylinux: "2_17"
|
||||||
args: ${{ inputs.args }}
|
args: ${{ inputs.args }}
|
||||||
before-script-linux: |
|
before-script-linux: |
|
||||||
set -e
|
set -e
|
||||||
@@ -47,7 +43,7 @@ runs:
|
|||||||
command: build
|
command: build
|
||||||
working-directory: python
|
working-directory: python
|
||||||
target: aarch64-unknown-linux-gnu
|
target: aarch64-unknown-linux-gnu
|
||||||
manylinux: ${{ inputs.manylinux }}
|
manylinux: "2_24"
|
||||||
args: ${{ inputs.args }}
|
args: ${{ inputs.args }}
|
||||||
before-script-linux: |
|
before-script-linux: |
|
||||||
set -e
|
set -e
|
||||||
|
|||||||
11
.github/workflows/cargo-publish.yml
vendored
11
.github/workflows/cargo-publish.yml
vendored
@@ -1,20 +1,13 @@
|
|||||||
name: Cargo Publish
|
name: Cargo Publish
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
tags-ignore:
|
types: [ published ]
|
||||||
# We don't publish pre-releases for Rust. Crates.io is just a source
|
|
||||||
# distribution, so we don't need to publish pre-releases.
|
|
||||||
- 'v*-beta*'
|
|
||||||
- '*-v*' # for example, python-vX.Y.Z
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
||||||
# key, so we set it to make sure it is always consistent.
|
# key, so we set it to make sure it is always consistent.
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
# Up-to-date compilers needed for fp16kernels.
|
|
||||||
CC: gcc-12
|
|
||||||
CXX: g++-12
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|||||||
81
.github/workflows/dev.yml
vendored
81
.github/workflows/dev.yml
vendored
@@ -1,81 +0,0 @@
|
|||||||
name: PR Checks
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, edited, synchronize, reopened]
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
labeler:
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
name: Label PR
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: srvaroa/labeler@master
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
commitlint:
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
name: Verify PR title / description conforms to semantic-release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: "18"
|
|
||||||
# These rules are disabled because Github will always ensure there
|
|
||||||
# is a blank line between the title and the body and Github will
|
|
||||||
# word wrap the description field to ensure a reasonable max line
|
|
||||||
# length.
|
|
||||||
- run: npm install @commitlint/config-conventional
|
|
||||||
- run: >
|
|
||||||
echo 'module.exports = {
|
|
||||||
"rules": {
|
|
||||||
"body-max-line-length": [0, "always", Infinity],
|
|
||||||
"footer-max-line-length": [0, "always", Infinity],
|
|
||||||
"body-leading-blank": [0, "always"]
|
|
||||||
}
|
|
||||||
}' > .commitlintrc.js
|
|
||||||
- run: npx commitlint --extends @commitlint/config-conventional --verbose <<< $COMMIT_MSG
|
|
||||||
env:
|
|
||||||
COMMIT_MSG: >
|
|
||||||
${{ github.event.pull_request.title }}
|
|
||||||
|
|
||||||
${{ github.event.pull_request.body }}
|
|
||||||
- if: failure()
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const message = `**ACTION NEEDED**
|
|
||||||
|
|
||||||
Lance follows the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) for release automation.
|
|
||||||
|
|
||||||
The PR title and description are used as the merge commit message.\
|
|
||||||
Please update your PR title and description to match the specification.
|
|
||||||
|
|
||||||
For details on the error please inspect the "PR Title Check" action.
|
|
||||||
`
|
|
||||||
// Get list of current comments
|
|
||||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.issue.number
|
|
||||||
});
|
|
||||||
// Check if this job already commented
|
|
||||||
for (const comment of comments) {
|
|
||||||
if (comment.body === message) {
|
|
||||||
return // Already commented
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Post the comment about Conventional Commits
|
|
||||||
github.rest.issues.createComment({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
body: message
|
|
||||||
})
|
|
||||||
core.setFailed(message)
|
|
||||||
85
.github/workflows/java.yml
vendored
85
.github/workflows/java.yml
vendored
@@ -1,85 +0,0 @@
|
|||||||
name: Build and Run Java JNI Tests
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- java/**
|
|
||||||
- rust/**
|
|
||||||
- .github/workflows/java.yml
|
|
||||||
env:
|
|
||||||
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
|
||||||
# key, so we set it to make sure it is always consistent.
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
# Disable full debug symbol generation to speed up CI build and keep memory down
|
|
||||||
# "1" means line tables only, which is useful for panic tracebacks.
|
|
||||||
RUSTFLAGS: "-C debuginfo=1"
|
|
||||||
RUST_BACKTRACE: "1"
|
|
||||||
# according to: https://matklad.github.io/2021/09/04/fast-rust-builds.html
|
|
||||||
# CI builds are faster with incremental disabled.
|
|
||||||
CARGO_INCREMENTAL: "0"
|
|
||||||
CARGO_BUILD_JOBS: "1"
|
|
||||||
jobs:
|
|
||||||
linux-build:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
name: ubuntu-22.04 + Java 11 & 17
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: ./java
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
with:
|
|
||||||
workspaces: java/core/lancedb-jni
|
|
||||||
- name: Run cargo fmt
|
|
||||||
run: cargo fmt --check
|
|
||||||
working-directory: ./java/core/lancedb-jni
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y protobuf-compiler libssl-dev
|
|
||||||
- name: Install Java 17
|
|
||||||
uses: actions/setup-java@v4
|
|
||||||
with:
|
|
||||||
distribution: temurin
|
|
||||||
java-version: 17
|
|
||||||
cache: "maven"
|
|
||||||
- run: echo "JAVA_17=$JAVA_HOME" >> $GITHUB_ENV
|
|
||||||
- name: Install Java 11
|
|
||||||
uses: actions/setup-java@v4
|
|
||||||
with:
|
|
||||||
distribution: temurin
|
|
||||||
java-version: 11
|
|
||||||
cache: "maven"
|
|
||||||
- name: Java Style Check
|
|
||||||
run: mvn checkstyle:check
|
|
||||||
# Disable because of issues in lancedb rust core code
|
|
||||||
# - name: Rust Clippy
|
|
||||||
# working-directory: java/core/lancedb-jni
|
|
||||||
# run: cargo clippy --all-targets -- -D warnings
|
|
||||||
- name: Running tests with Java 11
|
|
||||||
run: mvn clean test
|
|
||||||
- name: Running tests with Java 17
|
|
||||||
run: |
|
|
||||||
export JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS \
|
|
||||||
-XX:+IgnoreUnrecognizedVMOptions \
|
|
||||||
--add-opens=java.base/java.lang=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.io=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.net=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.nio=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.util=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/sun.security.action=ALL-UNNAMED \
|
|
||||||
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED \
|
|
||||||
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \
|
|
||||||
-Djdk.reflect.useDirectMethodHandle=false \
|
|
||||||
-Dio.netty.tryReflectionSetAccessible=true"
|
|
||||||
JAVA_HOME=$JAVA_17 mvn clean test
|
|
||||||
88
.github/workflows/make-release-commit.yml
vendored
88
.github/workflows/make-release-commit.yml
vendored
@@ -1,62 +1,37 @@
|
|||||||
name: Create release commit
|
name: Create release commit
|
||||||
|
|
||||||
# This workflow increments versions, tags the version, and pushes it.
|
|
||||||
# When a tag is pushed, another workflow is triggered that creates a GH release
|
|
||||||
# and uploads the binaries. This workflow is only for creating the tag.
|
|
||||||
|
|
||||||
# This script will enforce that a minor version is incremented if there are any
|
|
||||||
# breaking changes since the last minor increment. However, it isn't able to
|
|
||||||
# differentiate between breaking changes in Node versus Python. If you wish to
|
|
||||||
# bypass this check, you can manually increment the version and push the tag.
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
dry_run:
|
dry_run:
|
||||||
description: 'Dry run (create the local commit/tags but do not push it)'
|
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||||
required: true
|
required: true
|
||||||
default: false
|
default: "false"
|
||||||
type: boolean
|
|
||||||
type:
|
|
||||||
description: 'What kind of release is this?'
|
|
||||||
required: true
|
|
||||||
default: 'preview'
|
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- preview
|
- "true"
|
||||||
- stable
|
- "false"
|
||||||
python:
|
part:
|
||||||
description: 'Make a Python release'
|
description: 'What kind of release is this?'
|
||||||
required: true
|
required: true
|
||||||
default: true
|
default: 'patch'
|
||||||
type: boolean
|
type: choice
|
||||||
other:
|
options:
|
||||||
description: 'Make a Node/Rust release'
|
- patch
|
||||||
required: true
|
- minor
|
||||||
default: true
|
- major
|
||||||
type: boolean
|
|
||||||
bump-minor:
|
|
||||||
description: 'Bump minor version'
|
|
||||||
required: true
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
make-release:
|
bump-version:
|
||||||
# Creates tag and GH release. The GH release will trigger the build and release jobs.
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
steps:
|
||||||
- name: Output Inputs
|
- name: Check out main
|
||||||
run: echo "${{ toJSON(github.event.inputs) }}"
|
uses: actions/checkout@v4
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
lfs: true
|
lfs: true
|
||||||
# It's important we use our token here, as the default token will NOT
|
|
||||||
# trigger any workflows watching for new tags. See:
|
|
||||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
|
||||||
token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
|
||||||
- name: Set git configs for bumpversion
|
- name: Set git configs for bumpversion
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@@ -66,34 +41,19 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
- name: Bump Python version
|
- name: Bump version, create tag and commit
|
||||||
if: ${{ inputs.python }}
|
|
||||||
working-directory: python
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
run: |
|
||||||
# Need to get the commit before bumping the version, so we can
|
pip install bump2version
|
||||||
# determine if there are breaking changes in the next step as well.
|
bumpversion --verbose ${{ inputs.part }}
|
||||||
echo "COMMIT_BEFORE_BUMP=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
- name: Push new version and tag
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
pip install bump-my-version PyGithub packaging
|
|
||||||
bash ../ci/bump_version.sh ${{ inputs.type }} ${{ inputs.bump-minor }} python-v
|
|
||||||
- name: Bump Node/Rust version
|
|
||||||
if: ${{ inputs.other }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
|
||||||
pip install bump-my-version PyGithub packaging
|
|
||||||
bash ci/bump_version.sh ${{ inputs.type }} ${{ inputs.bump-minor }} v $COMMIT_BEFORE_BUMP
|
|
||||||
- name: Push new version tag
|
|
||||||
if: ${{ !inputs.dry_run }}
|
|
||||||
uses: ad-m/github-push-action@master
|
uses: ad-m/github-push-action@master
|
||||||
with:
|
with:
|
||||||
# Need to use PAT here too to trigger next workflow. See comment above.
|
|
||||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
branch: ${{ github.ref }}
|
branch: main
|
||||||
tags: true
|
tags: true
|
||||||
- uses: ./.github/workflows/update_package_lock
|
- uses: ./.github/workflows/update_package_lock
|
||||||
if: ${{ inputs.dry_run }} == "false"
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
|
||||||
|
|||||||
1
.github/workflows/node.yml
vendored
1
.github/workflows/node.yml
vendored
@@ -107,7 +107,6 @@ jobs:
|
|||||||
AWS_ENDPOINT: http://localhost:4566
|
AWS_ENDPOINT: http://localhost:4566
|
||||||
# this one is for dynamodb
|
# this one is for dynamodb
|
||||||
DYNAMODB_ENDPOINT: http://localhost:4566
|
DYNAMODB_ENDPOINT: http://localhost:4566
|
||||||
ALLOW_HTTP: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
|||||||
12
.github/workflows/nodejs.yml
vendored
12
.github/workflows/nodejs.yml
vendored
@@ -28,10 +28,6 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
working-directory: nodejs
|
working-directory: nodejs
|
||||||
env:
|
|
||||||
# Need up-to-date compilers for kernels
|
|
||||||
CC: gcc-12
|
|
||||||
CXX: g++-12
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -52,7 +48,8 @@ jobs:
|
|||||||
cargo fmt --all -- --check
|
cargo fmt --all -- --check
|
||||||
cargo clippy --all --all-features -- -D warnings
|
cargo clippy --all --all-features -- -D warnings
|
||||||
npm ci
|
npm ci
|
||||||
npm run lint-ci
|
npm run lint
|
||||||
|
npm run chkformat
|
||||||
linux:
|
linux:
|
||||||
name: Linux (NodeJS ${{ matrix.node-version }})
|
name: Linux (NodeJS ${{ matrix.node-version }})
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@@ -84,12 +81,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
npm run build
|
||||||
- name: Setup localstack
|
|
||||||
working-directory: .
|
|
||||||
run: docker compose up --detach --wait
|
|
||||||
- name: Test
|
- name: Test
|
||||||
env:
|
|
||||||
S3_TEST: "1"
|
|
||||||
run: npm run test
|
run: npm run test
|
||||||
macos:
|
macos:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|||||||
99
.github/workflows/npm-publish.yml
vendored
99
.github/workflows/npm-publish.yml
vendored
@@ -1,9 +1,8 @@
|
|||||||
name: NPM Publish
|
name: NPM Publish
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
tags:
|
types: [published]
|
||||||
- 'v*'
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
node:
|
node:
|
||||||
@@ -275,15 +274,9 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
# Tag beta as "preview" instead of default "latest". See lancedb
|
|
||||||
# npm publish step for more info.
|
|
||||||
if [[ $GITHUB_REF =~ refs/tags/v(.*)-beta.* ]]; then
|
|
||||||
PUBLISH_ARGS="--tag preview"
|
|
||||||
fi
|
|
||||||
|
|
||||||
mv */*.tgz .
|
mv */*.tgz .
|
||||||
for filename in *.tgz; do
|
for filename in *.tgz; do
|
||||||
npm publish $PUBLISH_ARGS $filename
|
npm publish $filename
|
||||||
done
|
done
|
||||||
|
|
||||||
release-nodejs:
|
release-nodejs:
|
||||||
@@ -323,23 +316,11 @@ jobs:
|
|||||||
- name: Publish to NPM
|
- name: Publish to NPM
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
||||||
# By default, things are published to the latest tag. This is what is
|
run: npm publish --access public
|
||||||
# installed by default if the user does not specify a version. This is
|
|
||||||
# good for stable releases, but for pre-releases, we want to publish to
|
|
||||||
# the "preview" tag so they can install with `npm install lancedb@preview`.
|
|
||||||
# See: https://medium.com/@mbostock/prereleases-and-npm-e778fc5e2420
|
|
||||||
run: |
|
|
||||||
if [[ $GITHUB_REF =~ refs/tags/v(.*)-beta.* ]]; then
|
|
||||||
npm publish --access public --tag preview
|
|
||||||
else
|
|
||||||
npm publish --access public
|
|
||||||
fi
|
|
||||||
|
|
||||||
update-package-lock:
|
update-package-lock:
|
||||||
needs: [release]
|
needs: [release]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -350,13 +331,11 @@ jobs:
|
|||||||
lfs: true
|
lfs: true
|
||||||
- uses: ./.github/workflows/update_package_lock
|
- uses: ./.github/workflows/update_package_lock
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
|
||||||
update-package-lock-nodejs:
|
update-package-lock-nodejs:
|
||||||
needs: [release-nodejs]
|
needs: [release-nodejs]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -367,70 +346,4 @@ jobs:
|
|||||||
lfs: true
|
lfs: true
|
||||||
- uses: ./.github/workflows/update_package_lock_nodejs
|
- uses: ./.github/workflows/update_package_lock_nodejs
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
|
||||||
gh-release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
lfs: true
|
|
||||||
- name: Extract version
|
|
||||||
id: extract_version
|
|
||||||
env:
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
echo "Extracting tag and version from $GITHUB_REF"
|
|
||||||
if [[ $GITHUB_REF =~ refs/tags/v(.*) ]]; then
|
|
||||||
VERSION=${BASH_REMATCH[1]}
|
|
||||||
TAG=v$VERSION
|
|
||||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "Failed to extract version from $GITHUB_REF"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Extracted version $VERSION from $GITHUB_REF"
|
|
||||||
if [[ $VERSION =~ beta ]]; then
|
|
||||||
echo "This is a beta release"
|
|
||||||
|
|
||||||
# Get last release (that is not this one)
|
|
||||||
FROM_TAG=$(git tag --sort='version:refname' \
|
|
||||||
| grep ^v \
|
|
||||||
| grep -vF "$TAG" \
|
|
||||||
| python ci/semver_sort.py v \
|
|
||||||
| tail -n 1)
|
|
||||||
else
|
|
||||||
echo "This is a stable release"
|
|
||||||
# Get last stable tag (ignore betas)
|
|
||||||
FROM_TAG=$(git tag --sort='version:refname' \
|
|
||||||
| grep ^v \
|
|
||||||
| grep -vF "$TAG" \
|
|
||||||
| grep -v beta \
|
|
||||||
| python ci/semver_sort.py v \
|
|
||||||
| tail -n 1)
|
|
||||||
fi
|
|
||||||
echo "Found from tag $FROM_TAG"
|
|
||||||
echo "from_tag=$FROM_TAG" >> $GITHUB_OUTPUT
|
|
||||||
- name: Create Release Notes
|
|
||||||
id: release_notes
|
|
||||||
uses: mikepenz/release-changelog-builder-action@v4
|
|
||||||
with:
|
|
||||||
configuration: .github/release_notes.json
|
|
||||||
toTag: ${{ steps.extract_version.outputs.tag }}
|
|
||||||
fromTag: ${{ steps.extract_version.outputs.from_tag }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Create GH release
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
prerelease: ${{ contains('beta', github.ref) }}
|
|
||||||
tag_name: ${{ steps.extract_version.outputs.tag }}
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
generate_release_notes: false
|
|
||||||
name: Node/Rust LanceDB v${{ steps.extract_version.outputs.version }}
|
|
||||||
body: ${{ steps.release_notes.outputs.changelog }}
|
|
||||||
|
|||||||
124
.github/workflows/pypi-publish.yml
vendored
124
.github/workflows/pypi-publish.yml
vendored
@@ -1,28 +1,18 @@
|
|||||||
name: PyPI Publish
|
name: PyPI Publish
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
tags:
|
types: [published]
|
||||||
- 'python-v*'
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linux:
|
linux:
|
||||||
name: Python ${{ matrix.config.platform }} manylinux${{ matrix.config.manylinux }}
|
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
config:
|
python-minor-version: ["8"]
|
||||||
- platform: x86_64
|
platform:
|
||||||
manylinux: "2_17"
|
- x86_64
|
||||||
extra_args: ""
|
- aarch64
|
||||||
- platform: x86_64
|
|
||||||
manylinux: "2_28"
|
|
||||||
extra_args: "--features fp16kernels"
|
|
||||||
- platform: aarch64
|
|
||||||
manylinux: "2_24"
|
|
||||||
extra_args: ""
|
|
||||||
# We don't build fp16 kernels for aarch64, because it uses
|
|
||||||
# cross compilation image, which doesn't have a new enough compiler.
|
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: "ubuntu-22.04"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -32,22 +22,22 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.${{ matrix.python-minor-version }}
|
||||||
- uses: ./.github/workflows/build_linux_wheel
|
- uses: ./.github/workflows/build_linux_wheel
|
||||||
with:
|
with:
|
||||||
python-minor-version: 8
|
python-minor-version: ${{ matrix.python-minor-version }}
|
||||||
args: "--release --strip ${{ matrix.config.extra_args }}"
|
args: "--release --strip"
|
||||||
arm-build: ${{ matrix.config.platform == 'aarch64' }}
|
arm-build: ${{ matrix.platform == 'aarch64' }}
|
||||||
manylinux: ${{ matrix.config.manylinux }}
|
|
||||||
- uses: ./.github/workflows/upload_wheel
|
- uses: ./.github/workflows/upload_wheel
|
||||||
with:
|
with:
|
||||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
repo: "pypi"
|
||||||
mac:
|
mac:
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
runs-on: ${{ matrix.config.runner }}
|
runs-on: ${{ matrix.config.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
python-minor-version: ["8"]
|
||||||
config:
|
config:
|
||||||
- target: x86_64-apple-darwin
|
- target: x86_64-apple-darwin
|
||||||
runner: macos-13
|
runner: macos-13
|
||||||
@@ -58,6 +48,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
lfs: true
|
lfs: true
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
@@ -66,95 +57,36 @@ jobs:
|
|||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
- uses: ./.github/workflows/build_mac_wheel
|
- uses: ./.github/workflows/build_mac_wheel
|
||||||
with:
|
with:
|
||||||
python-minor-version: 8
|
python-minor-version: ${{ matrix.python-minor-version }}
|
||||||
args: "--release --strip --target ${{ matrix.config.target }} --features fp16kernels"
|
args: "--release --strip --target ${{ matrix.config.target }}"
|
||||||
- uses: ./.github/workflows/upload_wheel
|
- uses: ./.github/workflows/upload_wheel
|
||||||
with:
|
with:
|
||||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
python-minor-version: ${{ matrix.python-minor-version }}
|
||||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
|
repo: "pypi"
|
||||||
windows:
|
windows:
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-minor-version: ["8"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
lfs: true
|
lfs: true
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.${{ matrix.python-minor-version }}
|
||||||
- uses: ./.github/workflows/build_windows_wheel
|
- uses: ./.github/workflows/build_windows_wheel
|
||||||
with:
|
with:
|
||||||
python-minor-version: 8
|
python-minor-version: ${{ matrix.python-minor-version }}
|
||||||
args: "--release --strip"
|
args: "--release --strip"
|
||||||
vcpkg_token: ${{ secrets.VCPKG_GITHUB_PACKAGES }}
|
vcpkg_token: ${{ secrets.VCPKG_GITHUB_PACKAGES }}
|
||||||
- uses: ./.github/workflows/upload_wheel
|
- uses: ./.github/workflows/upload_wheel
|
||||||
with:
|
with:
|
||||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
python-minor-version: ${{ matrix.python-minor-version }}
|
||||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
gh-release:
|
repo: "pypi"
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
lfs: true
|
|
||||||
- name: Extract version
|
|
||||||
id: extract_version
|
|
||||||
env:
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
echo "Extracting tag and version from $GITHUB_REF"
|
|
||||||
if [[ $GITHUB_REF =~ refs/tags/python-v(.*) ]]; then
|
|
||||||
VERSION=${BASH_REMATCH[1]}
|
|
||||||
TAG=python-v$VERSION
|
|
||||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "Failed to extract version from $GITHUB_REF"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Extracted version $VERSION from $GITHUB_REF"
|
|
||||||
if [[ $VERSION =~ beta ]]; then
|
|
||||||
echo "This is a beta release"
|
|
||||||
|
|
||||||
# Get last release (that is not this one)
|
|
||||||
FROM_TAG=$(git tag --sort='version:refname' \
|
|
||||||
| grep ^python-v \
|
|
||||||
| grep -vF "$TAG" \
|
|
||||||
| python ci/semver_sort.py python-v \
|
|
||||||
| tail -n 1)
|
|
||||||
else
|
|
||||||
echo "This is a stable release"
|
|
||||||
# Get last stable tag (ignore betas)
|
|
||||||
FROM_TAG=$(git tag --sort='version:refname' \
|
|
||||||
| grep ^python-v \
|
|
||||||
| grep -vF "$TAG" \
|
|
||||||
| grep -v beta \
|
|
||||||
| python ci/semver_sort.py python-v \
|
|
||||||
| tail -n 1)
|
|
||||||
fi
|
|
||||||
echo "Found from tag $FROM_TAG"
|
|
||||||
echo "from_tag=$FROM_TAG" >> $GITHUB_OUTPUT
|
|
||||||
- name: Create Python Release Notes
|
|
||||||
id: python_release_notes
|
|
||||||
uses: mikepenz/release-changelog-builder-action@v4
|
|
||||||
with:
|
|
||||||
configuration: .github/release_notes.json
|
|
||||||
toTag: ${{ steps.extract_version.outputs.tag }}
|
|
||||||
fromTag: ${{ steps.extract_version.outputs.from_tag }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Create Python GH release
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
prerelease: ${{ contains('beta', github.ref) }}
|
|
||||||
tag_name: ${{ steps.extract_version.outputs.tag }}
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
generate_release_notes: false
|
|
||||||
name: Python LanceDB v${{ steps.extract_version.outputs.version }}
|
|
||||||
body: ${{ steps.python_release_notes.outputs.changelog }}
|
|
||||||
|
|||||||
56
.github/workflows/python-make-release-commit.yml
vendored
Normal file
56
.github/workflows/python-make-release-commit.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
name: Python - Create release commit
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dry_run:
|
||||||
|
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- "true"
|
||||||
|
- "false"
|
||||||
|
part:
|
||||||
|
description: 'What kind of release is this?'
|
||||||
|
required: true
|
||||||
|
default: 'patch'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- patch
|
||||||
|
- minor
|
||||||
|
- major
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out main
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- name: Set git configs for bumpversion
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config user.name 'Lance Release'
|
||||||
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.11"
|
||||||
|
- name: Bump version, create tag and commit
|
||||||
|
working-directory: python
|
||||||
|
run: |
|
||||||
|
pip install bump2version
|
||||||
|
bumpversion --verbose ${{ inputs.part }}
|
||||||
|
- name: Push new version and tag
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
branch: main
|
||||||
|
tags: true
|
||||||
|
|
||||||
6
.github/workflows/python.yml
vendored
6
.github/workflows/python.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
|||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-minor-version: ["9", "11"]
|
python-minor-version: ["8", "11"]
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: "ubuntu-22.04"
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -99,8 +99,6 @@ jobs:
|
|||||||
workspaces: python
|
workspaces: python
|
||||||
- uses: ./.github/workflows/build_linux_wheel
|
- uses: ./.github/workflows/build_linux_wheel
|
||||||
- uses: ./.github/workflows/run_tests
|
- uses: ./.github/workflows/run_tests
|
||||||
with:
|
|
||||||
integration: true
|
|
||||||
# Make sure wheels are not included in the Rust cache
|
# Make sure wheels are not included in the Rust cache
|
||||||
- name: Delete wheels
|
- name: Delete wheels
|
||||||
run: rm -rf target/wheels
|
run: rm -rf target/wheels
|
||||||
@@ -192,4 +190,4 @@ jobs:
|
|||||||
pip install -e .[tests]
|
pip install -e .[tests]
|
||||||
pip install tantivy
|
pip install tantivy
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest -m "not slow and not s3_test" -x -v --durations=30 python/tests
|
run: pytest -m "not slow" -x -v --durations=30 python/tests
|
||||||
|
|||||||
16
.github/workflows/run_tests/action.yml
vendored
16
.github/workflows/run_tests/action.yml
vendored
@@ -5,10 +5,6 @@ inputs:
|
|||||||
python-minor-version:
|
python-minor-version:
|
||||||
required: true
|
required: true
|
||||||
description: "8 9 10 11 12"
|
description: "8 9 10 11 12"
|
||||||
integration:
|
|
||||||
required: false
|
|
||||||
description: "Run integration tests"
|
|
||||||
default: "false"
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
@@ -16,16 +12,6 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
pip3 install $(ls target/wheels/lancedb-*.whl)[tests,dev]
|
pip3 install $(ls target/wheels/lancedb-*.whl)[tests,dev]
|
||||||
- name: Setup localstack for integration tests
|
- name: pytest
|
||||||
if: ${{ inputs.integration == 'true' }}
|
|
||||||
shell: bash
|
shell: bash
|
||||||
working-directory: .
|
|
||||||
run: docker compose up --detach --wait
|
|
||||||
- name: pytest (with integration)
|
|
||||||
shell: bash
|
|
||||||
if: ${{ inputs.integration == 'true' }}
|
|
||||||
run: pytest -m "not slow" -x -v --durations=30 python/python/tests
|
run: pytest -m "not slow" -x -v --durations=30 python/python/tests
|
||||||
- name: pytest (no integration tests)
|
|
||||||
shell: bash
|
|
||||||
if: ${{ inputs.integration != 'true' }}
|
|
||||||
run: pytest -m "not slow and not s3_test" -x -v --durations=30 python/python/tests
|
|
||||||
|
|||||||
14
.github/workflows/rust.yml
vendored
14
.github/workflows/rust.yml
vendored
@@ -31,10 +31,6 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
working-directory: rust
|
working-directory: rust
|
||||||
env:
|
|
||||||
# Need up-to-date compilers for kernels
|
|
||||||
CC: gcc-12
|
|
||||||
CXX: g++-12
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -58,10 +54,6 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
working-directory: rust
|
working-directory: rust
|
||||||
env:
|
|
||||||
# Need up-to-date compilers for kernels
|
|
||||||
CC: gcc-12
|
|
||||||
CXX: g++-12
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -74,9 +66,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install -y protobuf-compiler libssl-dev
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
- name: Start S3 integration test environment
|
|
||||||
working-directory: .
|
|
||||||
run: docker compose up --detach --wait
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --all-features
|
run: cargo build --all-features
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
@@ -108,8 +97,7 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --all-features
|
run: cargo build --all-features
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
# Run with everything except the integration tests.
|
run: cargo test --all-features
|
||||||
run: cargo test --features remote,fp16kernels
|
|
||||||
windows:
|
windows:
|
||||||
runs-on: windows-2022
|
runs-on: windows-2022
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
53
.github/workflows/upload_wheel/action.yml
vendored
53
.github/workflows/upload_wheel/action.yml
vendored
@@ -2,43 +2,28 @@ name: upload-wheel
|
|||||||
|
|
||||||
description: "Upload wheels to Pypi"
|
description: "Upload wheels to Pypi"
|
||||||
inputs:
|
inputs:
|
||||||
pypi_token:
|
os:
|
||||||
|
required: true
|
||||||
|
description: "ubuntu-22.04 or macos-13"
|
||||||
|
repo:
|
||||||
|
required: false
|
||||||
|
description: "pypi or testpypi"
|
||||||
|
default: "pypi"
|
||||||
|
token:
|
||||||
required: true
|
required: true
|
||||||
description: "release token for the repo"
|
description: "release token for the repo"
|
||||||
fury_token:
|
|
||||||
required: true
|
|
||||||
description: "release token for the fury repo"
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install twine
|
pip install twine
|
||||||
- name: Choose repo
|
- name: Publish wheel
|
||||||
shell: bash
|
env:
|
||||||
id: choose_repo
|
TWINE_USERNAME: __token__
|
||||||
run: |
|
TWINE_PASSWORD: ${{ inputs.token }}
|
||||||
if [ ${{ github.ref }} == "*beta*" ]; then
|
shell: bash
|
||||||
echo "repo=fury" >> $GITHUB_OUTPUT
|
run: twine upload --repository ${{ inputs.repo }} target/wheels/lancedb-*.whl
|
||||||
else
|
|
||||||
echo "repo=pypi" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
- name: Publish to PyPI
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
FURY_TOKEN: ${{ inputs.fury_token }}
|
|
||||||
PYPI_TOKEN: ${{ inputs.pypi_token }}
|
|
||||||
run: |
|
|
||||||
if [ ${{ steps.choose_repo.outputs.repo }} == "fury" ]; then
|
|
||||||
WHEEL=$(ls target/wheels/lancedb-*.whl 2> /dev/null | head -n 1)
|
|
||||||
echo "Uploading $WHEEL to Fury"
|
|
||||||
curl -f -F package=@$WHEEL https://$FURY_TOKEN@push.fury.io/lancedb/
|
|
||||||
else
|
|
||||||
twine upload --repository ${{ steps.choose_repo.outputs.repo }} \
|
|
||||||
--username __token__ \
|
|
||||||
--password $PYPI_TOKEN \
|
|
||||||
target/wheels/lancedb-*.whl
|
|
||||||
fi
|
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,7 +6,7 @@
|
|||||||
venv
|
venv
|
||||||
|
|
||||||
.vscode
|
.vscode
|
||||||
.zed
|
|
||||||
rust/target
|
rust/target
|
||||||
rust/Cargo.lock
|
rust/Cargo.lock
|
||||||
|
|
||||||
|
|||||||
@@ -10,12 +10,9 @@ repos:
|
|||||||
rev: v0.2.2
|
rev: v0.2.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- repo: local
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
|
rev: v3.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: local-biome-check
|
- id: prettier
|
||||||
name: biome check
|
|
||||||
entry: npx @biomejs/biome check --config-path nodejs/biome.json nodejs/
|
|
||||||
language: system
|
|
||||||
types: [text]
|
|
||||||
files: "nodejs/.*"
|
files: "nodejs/.*"
|
||||||
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*
|
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*
|
||||||
|
|||||||
28
Cargo.toml
28
Cargo.toml
@@ -1,5 +1,5 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = ["rust/ffi/node", "rust/lancedb", "nodejs", "python", "java/core/lancedb-jni"]
|
members = ["rust/ffi/node", "rust/lancedb", "nodejs", "python"]
|
||||||
# Python package needs to be built by maturin.
|
# Python package needs to be built by maturin.
|
||||||
exclude = ["python"]
|
exclude = ["python"]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
@@ -14,22 +14,22 @@ keywords = ["lancedb", "lance", "database", "vector", "search"]
|
|||||||
categories = ["database-implementations"]
|
categories = ["database-implementations"]
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lance = { "version" = "=0.11.1", "features" = ["dynamodb"] }
|
lance = { "version" = "=0.10.6", "features" = ["dynamodb"] }
|
||||||
lance-index = { "version" = "=0.11.1" }
|
lance-index = { "version" = "=0.10.6" }
|
||||||
lance-linalg = { "version" = "=0.11.1" }
|
lance-linalg = { "version" = "=0.10.6" }
|
||||||
lance-testing = { "version" = "=0.11.1" }
|
lance-testing = { "version" = "=0.10.6" }
|
||||||
# Note that this one does not include pyarrow
|
# Note that this one does not include pyarrow
|
||||||
arrow = { version = "51.0", optional = false }
|
arrow = { version = "50.0", optional = false }
|
||||||
arrow-array = "51.0"
|
arrow-array = "50.0"
|
||||||
arrow-data = "51.0"
|
arrow-data = "50.0"
|
||||||
arrow-ipc = "51.0"
|
arrow-ipc = "50.0"
|
||||||
arrow-ord = "51.0"
|
arrow-ord = "50.0"
|
||||||
arrow-schema = "51.0"
|
arrow-schema = "50.0"
|
||||||
arrow-arith = "51.0"
|
arrow-arith = "50.0"
|
||||||
arrow-cast = "51.0"
|
arrow-cast = "50.0"
|
||||||
async-trait = "0"
|
async-trait = "0"
|
||||||
chrono = "0.4.35"
|
chrono = "0.4.35"
|
||||||
half = { "version" = "=2.4.1", default-features = false, features = [
|
half = { "version" = "=2.3.1", default-features = false, features = [
|
||||||
"num-traits",
|
"num-traits",
|
||||||
] }
|
] }
|
||||||
futures = "0"
|
futures = "0"
|
||||||
|
|||||||
@@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
<hr />
|
<hr />
|
||||||
|
|
||||||
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrieval, filtering and management of embeddings.
|
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrevial, filtering and management of embeddings.
|
||||||
|
|
||||||
The key features of LanceDB include:
|
The key features of LanceDB include:
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ The key features of LanceDB include:
|
|||||||
|
|
||||||
* GPU support in building vector index(*).
|
* GPU support in building vector index(*).
|
||||||
|
|
||||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/docs/integrations/vectorstores/lancedb/), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||||
|
|
||||||
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
||||||
|
|
||||||
|
|||||||
@@ -1,51 +0,0 @@
|
|||||||
set -e
|
|
||||||
|
|
||||||
RELEASE_TYPE=${1:-"stable"}
|
|
||||||
BUMP_MINOR=${2:-false}
|
|
||||||
TAG_PREFIX=${3:-"v"} # Such as "python-v"
|
|
||||||
HEAD_SHA=${4:-$(git rev-parse HEAD)}
|
|
||||||
|
|
||||||
readonly SELF_DIR=$(cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
|
||||||
|
|
||||||
PREV_TAG=$(git tag --sort='version:refname' | grep ^$TAG_PREFIX | python $SELF_DIR/semver_sort.py $TAG_PREFIX | tail -n 1)
|
|
||||||
echo "Found previous tag $PREV_TAG"
|
|
||||||
|
|
||||||
# Initially, we don't want to tag if we are doing stable, because we will bump
|
|
||||||
# again later. See comment at end for why.
|
|
||||||
if [[ "$RELEASE_TYPE" == 'stable' ]]; then
|
|
||||||
BUMP_ARGS="--no-tag"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If last is stable and not bumping minor
|
|
||||||
if [[ $PREV_TAG != *beta* ]]; then
|
|
||||||
if [[ "$BUMP_MINOR" != "false" ]]; then
|
|
||||||
# X.Y.Z -> X.(Y+1).0-beta.0
|
|
||||||
bump-my-version bump -vv $BUMP_ARGS minor
|
|
||||||
else
|
|
||||||
# X.Y.Z -> X.Y.(Z+1)-beta.0
|
|
||||||
bump-my-version bump -vv $BUMP_ARGS patch
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$BUMP_MINOR" != "false" ]]; then
|
|
||||||
# X.Y.Z-beta.N -> X.(Y+1).0-beta.0
|
|
||||||
bump-my-version bump -vv $BUMP_ARGS minor
|
|
||||||
else
|
|
||||||
# X.Y.Z-beta.N -> X.Y.Z-beta.(N+1)
|
|
||||||
bump-my-version bump -vv $BUMP_ARGS pre_n
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# The above bump will always bump to a pre-release version. If we are releasing
|
|
||||||
# a stable version, bump the pre-release level ("pre_l") to make it stable.
|
|
||||||
if [[ $RELEASE_TYPE == 'stable' ]]; then
|
|
||||||
# X.Y.Z-beta.N -> X.Y.Z
|
|
||||||
bump-my-version bump -vv pre_l
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Validate that we have incremented version appropriately for breaking changes
|
|
||||||
NEW_TAG=$(git describe --tags --exact-match HEAD)
|
|
||||||
NEW_VERSION=$(echo $NEW_TAG | sed "s/^$TAG_PREFIX//")
|
|
||||||
LAST_STABLE_RELEASE=$(git tag --sort='version:refname' | grep ^$TAG_PREFIX | grep -v beta | grep -vF "$NEW_TAG" | python $SELF_DIR/semver_sort.py $TAG_PREFIX | tail -n 1)
|
|
||||||
LAST_STABLE_VERSION=$(echo $LAST_STABLE_RELEASE | sed "s/^$TAG_PREFIX//")
|
|
||||||
|
|
||||||
python $SELF_DIR/check_breaking_changes.py $LAST_STABLE_RELEASE $HEAD_SHA $LAST_STABLE_VERSION $NEW_VERSION
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
"""
|
|
||||||
Check whether there are any breaking changes in the PRs between the base and head commits.
|
|
||||||
If there are, assert that we have incremented the minor version.
|
|
||||||
"""
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
from packaging.version import parse
|
|
||||||
|
|
||||||
from github import Github
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("base")
|
|
||||||
parser.add_argument("head")
|
|
||||||
parser.add_argument("last_stable_version")
|
|
||||||
parser.add_argument("current_version")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
repo = Github(os.environ["GITHUB_TOKEN"]).get_repo(os.environ["GITHUB_REPOSITORY"])
|
|
||||||
commits = repo.compare(args.base, args.head).commits
|
|
||||||
prs = (pr for commit in commits for pr in commit.get_pulls())
|
|
||||||
|
|
||||||
for pr in prs:
|
|
||||||
if any(label.name == "breaking-change" for label in pr.labels):
|
|
||||||
print(f"Breaking change in PR: {pr.html_url}")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
print("No breaking changes found.")
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
last_stable_version = parse(args.last_stable_version)
|
|
||||||
current_version = parse(args.current_version)
|
|
||||||
if current_version.minor <= last_stable_version.minor:
|
|
||||||
print("Minor version is not greater than the last stable version.")
|
|
||||||
exit(1)
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
"""
|
|
||||||
Takes a list of semver strings and sorts them in ascending order.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from packaging.version import parse, InvalidVersion
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import argparse
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("prefix", default="v")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Read the input from stdin
|
|
||||||
lines = sys.stdin.readlines()
|
|
||||||
|
|
||||||
# Parse the versions
|
|
||||||
versions = []
|
|
||||||
for line in lines:
|
|
||||||
line = line.strip()
|
|
||||||
try:
|
|
||||||
version_str = line.removeprefix(args.prefix)
|
|
||||||
version = parse(version_str)
|
|
||||||
except InvalidVersion:
|
|
||||||
# There are old tags that don't follow the semver format
|
|
||||||
print(f"Invalid version: {line}", file=sys.stderr)
|
|
||||||
continue
|
|
||||||
versions.append((line, version))
|
|
||||||
|
|
||||||
# Sort the versions
|
|
||||||
versions.sort(key=lambda x: x[1])
|
|
||||||
|
|
||||||
# Print the sorted versions as original strings
|
|
||||||
for line, _ in versions:
|
|
||||||
print(line)
|
|
||||||
@@ -1,18 +1,18 @@
|
|||||||
version: "3.9"
|
version: "3.9"
|
||||||
services:
|
services:
|
||||||
localstack:
|
localstack:
|
||||||
image: localstack/localstack:3.3
|
image: localstack/localstack:0.14
|
||||||
ports:
|
ports:
|
||||||
- 4566:4566
|
- 4566:4566
|
||||||
environment:
|
environment:
|
||||||
- SERVICES=s3,dynamodb,kms
|
- SERVICES=s3,dynamodb
|
||||||
- DEBUG=1
|
- DEBUG=1
|
||||||
- LS_LOG=trace
|
- LS_LOG=trace
|
||||||
- DOCKER_HOST=unix:///var/run/docker.sock
|
- DOCKER_HOST=unix:///var/run/docker.sock
|
||||||
- AWS_ACCESS_KEY_ID=ACCESSKEY
|
- AWS_ACCESS_KEY_ID=ACCESSKEY
|
||||||
- AWS_SECRET_ACCESS_KEY=SECRETKEY
|
- AWS_SECRET_ACCESS_KEY=SECRETKEY
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: [ "CMD", "curl", "-s", "http://localhost:4566/_localstack/health" ]
|
test: [ "CMD", "curl", "-f", "http://localhost:4566/health" ]
|
||||||
interval: 5s
|
interval: 5s
|
||||||
retries: 3
|
retries: 3
|
||||||
start_period: 10s
|
start_period: 10s
|
||||||
|
|||||||
@@ -57,6 +57,16 @@ plugins:
|
|||||||
- https://arrow.apache.org/docs/objects.inv
|
- https://arrow.apache.org/docs/objects.inv
|
||||||
- https://pandas.pydata.org/docs/objects.inv
|
- https://pandas.pydata.org/docs/objects.inv
|
||||||
- mkdocs-jupyter
|
- mkdocs-jupyter
|
||||||
|
- ultralytics:
|
||||||
|
verbose: True
|
||||||
|
enabled: True
|
||||||
|
default_image: "assets/lancedb_and_lance.png" # Default image for all pages
|
||||||
|
add_image: True # Automatically add meta image
|
||||||
|
add_keywords: True # Add page keywords in the header tag
|
||||||
|
add_share_buttons: True # Add social share buttons
|
||||||
|
add_authors: False # Display page authors
|
||||||
|
add_desc: False
|
||||||
|
add_dates: False
|
||||||
|
|
||||||
markdown_extensions:
|
markdown_extensions:
|
||||||
- admonition
|
- admonition
|
||||||
@@ -94,14 +104,6 @@ nav:
|
|||||||
- Overview: hybrid_search/hybrid_search.md
|
- Overview: hybrid_search/hybrid_search.md
|
||||||
- Comparing Rerankers: hybrid_search/eval.md
|
- Comparing Rerankers: hybrid_search/eval.md
|
||||||
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
||||||
- Reranking:
|
|
||||||
- Quickstart: reranking/index.md
|
|
||||||
- Cohere Reranker: reranking/cohere.md
|
|
||||||
- Linear Combination Reranker: reranking/linear_combination.md
|
|
||||||
- Cross Encoder Reranker: reranking/cross_encoder.md
|
|
||||||
- ColBERT Reranker: reranking/colbert.md
|
|
||||||
- OpenAI Reranker: reranking/openai.md
|
|
||||||
- Building Custom Rerankers: reranking/custom_reranker.md
|
|
||||||
- Filtering: sql.md
|
- Filtering: sql.md
|
||||||
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
||||||
- Configuring Storage: guides/storage.md
|
- Configuring Storage: guides/storage.md
|
||||||
@@ -118,10 +120,9 @@ nav:
|
|||||||
- Pandas and PyArrow: python/pandas_and_pyarrow.md
|
- Pandas and PyArrow: python/pandas_and_pyarrow.md
|
||||||
- Polars: python/polars_arrow.md
|
- Polars: python/polars_arrow.md
|
||||||
- DuckDB: python/duckdb.md
|
- DuckDB: python/duckdb.md
|
||||||
- LangChain:
|
- LangChain 🔗: https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lancedb.html
|
||||||
- LangChain 🔗: integrations/langchain.md
|
- LangChain JS/TS 🔗: https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/lancedb
|
||||||
- LangChain JS/TS 🔗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
- LlamaIndex 🦙: https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html
|
||||||
- LlamaIndex 🦙: https://docs.llamaindex.ai/en/stable/examples/vector_stores/LanceDBIndexDemo/
|
|
||||||
- Pydantic: python/pydantic.md
|
- Pydantic: python/pydantic.md
|
||||||
- Voxel51: integrations/voxel51.md
|
- Voxel51: integrations/voxel51.md
|
||||||
- PromptTools: integrations/prompttools.md
|
- PromptTools: integrations/prompttools.md
|
||||||
@@ -142,6 +143,7 @@ nav:
|
|||||||
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
||||||
- 🦀 Rust:
|
- 🦀 Rust:
|
||||||
- Overview: examples/examples_rust.md
|
- Overview: examples/examples_rust.md
|
||||||
|
- 🔧 CLI & Config: cli_config.md
|
||||||
- 💭 FAQs: faq.md
|
- 💭 FAQs: faq.md
|
||||||
- ⚙️ API reference:
|
- ⚙️ API reference:
|
||||||
- 🐍 Python: python/python.md
|
- 🐍 Python: python/python.md
|
||||||
@@ -169,14 +171,6 @@ nav:
|
|||||||
- Overview: hybrid_search/hybrid_search.md
|
- Overview: hybrid_search/hybrid_search.md
|
||||||
- Comparing Rerankers: hybrid_search/eval.md
|
- Comparing Rerankers: hybrid_search/eval.md
|
||||||
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
||||||
- Reranking:
|
|
||||||
- Quickstart: reranking/index.md
|
|
||||||
- Cohere Reranker: reranking/cohere.md
|
|
||||||
- Linear Combination Reranker: reranking/linear_combination.md
|
|
||||||
- Cross Encoder Reranker: reranking/cross_encoder.md
|
|
||||||
- ColBERT Reranker: reranking/colbert.md
|
|
||||||
- OpenAI Reranker: reranking/openai.md
|
|
||||||
- Building Custom Rerankers: reranking/custom_reranker.md
|
|
||||||
- Filtering: sql.md
|
- Filtering: sql.md
|
||||||
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
||||||
- Configuring Storage: guides/storage.md
|
- Configuring Storage: guides/storage.md
|
||||||
@@ -193,8 +187,8 @@ nav:
|
|||||||
- Pandas and PyArrow: python/pandas_and_pyarrow.md
|
- Pandas and PyArrow: python/pandas_and_pyarrow.md
|
||||||
- Polars: python/polars_arrow.md
|
- Polars: python/polars_arrow.md
|
||||||
- DuckDB: python/duckdb.md
|
- DuckDB: python/duckdb.md
|
||||||
- LangChain 🦜️🔗↗: https://python.langchain.com/docs/integrations/vectorstores/lancedb
|
- LangChain 🦜️🔗↗: https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lancedb.html
|
||||||
- LangChain.js 🦜️🔗↗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
- LangChain.js 🦜️🔗↗: https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/lancedb
|
||||||
- LlamaIndex 🦙↗: https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html
|
- LlamaIndex 🦙↗: https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html
|
||||||
- Pydantic: python/pydantic.md
|
- Pydantic: python/pydantic.md
|
||||||
- Voxel51: integrations/voxel51.md
|
- Voxel51: integrations/voxel51.md
|
||||||
@@ -232,10 +226,3 @@ extra:
|
|||||||
analytics:
|
analytics:
|
||||||
provider: google
|
provider: google
|
||||||
property: G-B7NFM40W74
|
property: G-B7NFM40W74
|
||||||
social:
|
|
||||||
- icon: fontawesome/brands/github
|
|
||||||
link: https://github.com/lancedb/lancedb
|
|
||||||
- icon: fontawesome/brands/x-twitter
|
|
||||||
link: https://twitter.com/lancedb
|
|
||||||
- icon: fontawesome/brands/linkedin
|
|
||||||
link: https://www.linkedin.com/company/lancedb
|
|
||||||
|
|||||||
@@ -2,4 +2,5 @@ mkdocs==1.5.3
|
|||||||
mkdocs-jupyter==0.24.1
|
mkdocs-jupyter==0.24.1
|
||||||
mkdocs-material==9.5.3
|
mkdocs-material==9.5.3
|
||||||
mkdocstrings[python]==0.20.0
|
mkdocstrings[python]==0.20.0
|
||||||
pydantic
|
pydantic
|
||||||
|
mkdocs-ultralytics-plugin==0.0.44
|
||||||
@@ -44,36 +44,6 @@
|
|||||||
|
|
||||||
!!! info "Please also make sure you're using the same version of Arrow as in the [lancedb crate](https://github.com/lancedb/lancedb/blob/main/Cargo.toml)"
|
!!! info "Please also make sure you're using the same version of Arrow as in the [lancedb crate](https://github.com/lancedb/lancedb/blob/main/Cargo.toml)"
|
||||||
|
|
||||||
### Preview releases
|
|
||||||
|
|
||||||
Stable releases are created about every 2 weeks. For the latest features and bug
|
|
||||||
fixes, you can install the preview release. These releases receive the same
|
|
||||||
level of testing as stable releases, but are not guaranteed to be available for
|
|
||||||
more than 6 months after they are released. Once your application is stable, we
|
|
||||||
recommend switching to stable releases.
|
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pip install --pre --extra-index-url https://pypi.fury.io/lancedb/ lancedb
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "Typescript"
|
|
||||||
|
|
||||||
```shell
|
|
||||||
npm install vectordb@preview
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "Rust"
|
|
||||||
|
|
||||||
We don't push preview releases to crates.io, but you can referent the tag
|
|
||||||
in GitHub within your Cargo dependencies:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[dependencies]
|
|
||||||
lancedb = { git = "https://github.com/lancedb/lancedb.git", tag = "vX.Y.Z-beta.N" }
|
|
||||||
```
|
|
||||||
|
|
||||||
## Connect to a database
|
## Connect to a database
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|||||||
51
docs/src/cli_config.md
Normal file
51
docs/src/cli_config.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
|
||||||
|
# CLI & Config
|
||||||
|
|
||||||
|
## LanceDB CLI
|
||||||
|
Once lanceDB is installed, you can access the CLI using `lancedb` command on the console.
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
This lists out all the various command-line options available. You can get the usage or help for a particular command.
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb {command} --help
|
||||||
|
```
|
||||||
|
|
||||||
|
## LanceDB config
|
||||||
|
LanceDB uses a global config file to store certain settings. These settings are configurable using the lanceDB cli.
|
||||||
|
To view your config settings, you can use:
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb config
|
||||||
|
```
|
||||||
|
|
||||||
|
These config parameters can be tuned using the cli.
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb {config_name} --{argument}
|
||||||
|
```
|
||||||
|
|
||||||
|
## LanceDB Opt-in Diagnostics
|
||||||
|
When enabled, LanceDB will send anonymous events to help us improve LanceDB. These diagnostics are used only for error reporting and no data is collected. Error & stats allow us to automate certain aspects of bug reporting, prioritization of fixes and feature requests.
|
||||||
|
These diagnostics are opt-in and can be enabled or disabled using the `lancedb diagnostics` command. These are enabled by default.
|
||||||
|
|
||||||
|
### Get usage help
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb diagnostics --help
|
||||||
|
```
|
||||||
|
|
||||||
|
### Disable diagnostics
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb diagnostics --disabled
|
||||||
|
```
|
||||||
|
|
||||||
|
### Enable diagnostics
|
||||||
|
|
||||||
|
```
|
||||||
|
lancedb diagnostics --enabled
|
||||||
|
```
|
||||||
@@ -154,12 +154,9 @@ Allows you to set parameters when registering a `sentence-transformers` object.
|
|||||||
!!! note "BAAI Embeddings example"
|
!!! note "BAAI Embeddings example"
|
||||||
Here is an example that uses BAAI embedding model from the HuggingFace Hub [supported models](https://huggingface.co/models?library=sentence-transformers)
|
Here is an example that uses BAAI embedding model from the HuggingFace Hub [supported models](https://huggingface.co/models?library=sentence-transformers)
|
||||||
```python
|
```python
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect("/tmp/db")
|
db = lancedb.connect("/tmp/db")
|
||||||
model = get_registry().get("sentence-transformers").create(name="BAAI/bge-small-en-v1.5", device="cpu")
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
model = registry.get("sentence-transformers").create(name="BAAI/bge-small-en-v1.5", device="cpu")
|
||||||
|
|
||||||
class Words(LanceModel):
|
class Words(LanceModel):
|
||||||
text: str = model.SourceField()
|
text: str = model.SourceField()
|
||||||
@@ -168,7 +165,7 @@ Allows you to set parameters when registering a `sentence-transformers` object.
|
|||||||
table = db.create_table("words", schema=Words)
|
table = db.create_table("words", schema=Words)
|
||||||
table.add(
|
table.add(
|
||||||
[
|
[
|
||||||
{"text": "hello world"},
|
{"text": "hello world"}
|
||||||
{"text": "goodbye world"}
|
{"text": "goodbye world"}
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -180,70 +177,6 @@ Allows you to set parameters when registering a `sentence-transformers` object.
|
|||||||
Visit sentence-transformers [HuggingFace HUB](https://huggingface.co/sentence-transformers) page for more information on the available models.
|
Visit sentence-transformers [HuggingFace HUB](https://huggingface.co/sentence-transformers) page for more information on the available models.
|
||||||
|
|
||||||
|
|
||||||
### Huggingface embedding models
|
|
||||||
We offer support for all huggingface models (which can be loaded via [transformers](https://huggingface.co/docs/transformers/en/index) library). The default model is `colbert-ir/colbertv2.0` which also has its own special callout - `registry.get("colbert")`
|
|
||||||
|
|
||||||
Example usage -
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
|
|
||||||
model = get_registry().get("huggingface").create(name='facebook/bart-base')
|
|
||||||
|
|
||||||
class TextModel(LanceModel):
|
|
||||||
text: str = model.SourceField()
|
|
||||||
vector: Vector(model.ndims()) = model.VectorField()
|
|
||||||
|
|
||||||
df = pd.DataFrame({"text": ["hi hello sayonara", "goodbye world"]})
|
|
||||||
table = db.create_table("greets", schema=Words)
|
|
||||||
table.add()
|
|
||||||
query = "old greeting"
|
|
||||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
|
||||||
print(actual.text)
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### Ollama embeddings
|
|
||||||
Generate embeddings via the [ollama](https://github.com/ollama/ollama-python) python library. More details:
|
|
||||||
|
|
||||||
- [Ollama docs on embeddings](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings)
|
|
||||||
- [Ollama blog on embeddings](https://ollama.com/blog/embedding-models)
|
|
||||||
|
|
||||||
| Parameter | Type | Default Value | Description |
|
|
||||||
|------------------------|----------------------------|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------|
|
|
||||||
| `name` | `str` | `nomic-embed-text` | The name of the model. |
|
|
||||||
| `host` | `str` | `http://localhost:11434` | The Ollama host to connect to. |
|
|
||||||
| `options` | `ollama.Options` or `dict` | `None` | Additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature`. |
|
|
||||||
| `keep_alive` | `float` or `str` | `"5m"` | Controls how long the model will stay loaded into memory following the request. |
|
|
||||||
| `ollama_client_kwargs` | `dict` | `{}` | kwargs that can be past to the `ollama.Client`. |
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect("/tmp/db")
|
|
||||||
func = get_registry().get("ollama").create(name="nomic-embed-text")
|
|
||||||
|
|
||||||
class Words(LanceModel):
|
|
||||||
text: str = func.SourceField()
|
|
||||||
vector: Vector(func.ndims()) = func.VectorField()
|
|
||||||
|
|
||||||
table = db.create_table("words", schema=Words, mode="overwrite")
|
|
||||||
table.add([
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
])
|
|
||||||
|
|
||||||
query = "greetings"
|
|
||||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
|
||||||
print(actual.text)
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### OpenAI embeddings
|
### OpenAI embeddings
|
||||||
LanceDB registers the OpenAI embeddings function in the registry by default, as `openai`. Below are the parameters that you can customize when creating the instances:
|
LanceDB registers the OpenAI embeddings function in the registry by default, as `openai`. Below are the parameters that you can customize when creating the instances:
|
||||||
|
|
||||||
@@ -254,21 +187,18 @@ LanceDB registers the OpenAI embeddings function in the registry by default, as
|
|||||||
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect("/tmp/db")
|
db = lancedb.connect("/tmp/db")
|
||||||
func = get_registry().get("openai").create(name="text-embedding-ada-002")
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
func = registry.get("openai").create()
|
||||||
|
|
||||||
class Words(LanceModel):
|
class Words(LanceModel):
|
||||||
text: str = func.SourceField()
|
text: str = func.SourceField()
|
||||||
vector: Vector(func.ndims()) = func.VectorField()
|
vector: Vector(func.ndims()) = func.VectorField()
|
||||||
|
|
||||||
table = db.create_table("words", schema=Words, mode="overwrite")
|
table = db.create_table("words", schema=Words)
|
||||||
table.add(
|
table.add(
|
||||||
[
|
[
|
||||||
{"text": "hello world"},
|
{"text": "hello world"}
|
||||||
{"text": "goodbye world"}
|
{"text": "goodbye world"}
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -397,10 +327,6 @@ Supported parameters (to be passed in `create` method) are:
|
|||||||
Usage Example:
|
Usage Example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
model = get_registry().get("bedrock-text").create()
|
model = get_registry().get("bedrock-text").create()
|
||||||
|
|
||||||
class TextModel(LanceModel):
|
class TextModel(LanceModel):
|
||||||
@@ -435,12 +361,10 @@ This embedding function supports ingesting images as both bytes and urls. You ca
|
|||||||
LanceDB supports ingesting images directly from accessible links.
|
LanceDB supports ingesting images directly from accessible links.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect(tmp_path)
|
db = lancedb.connect(tmp_path)
|
||||||
func = get_registry.get("open-clip").create()
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
func = registry.get("open-clip").create()
|
||||||
|
|
||||||
class Images(LanceModel):
|
class Images(LanceModel):
|
||||||
label: str
|
label: str
|
||||||
@@ -515,12 +439,9 @@ This function is registered as `imagebind` and supports Audio, Video and Text mo
|
|||||||
Below is an example demonstrating how the API works:
|
Below is an example demonstrating how the API works:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect(tmp_path)
|
db = lancedb.connect(tmp_path)
|
||||||
func = get_registry.get("imagebind").create()
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
func = registry.get("imagebind").create()
|
||||||
|
|
||||||
class ImageBindModel(LanceModel):
|
class ImageBindModel(LanceModel):
|
||||||
text: str
|
text: str
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ For this purpose, LanceDB introduces an **embedding functions API**, that allow
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
class Pets(LanceModel):
|
class Pets(LanceModel):
|
||||||
vector: Vector(clip.ndims()) = clip.VectorField()
|
vector: Vector(clip.ndims) = clip.VectorField()
|
||||||
image_uri: str = clip.SourceField()
|
image_uri: str = clip.SourceField()
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -149,7 +149,7 @@ You can also use the integration for adding utility operations in the schema. Fo
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
class Pets(LanceModel):
|
class Pets(LanceModel):
|
||||||
vector: Vector(clip.ndims()) = clip.VectorField()
|
vector: Vector(clip.ndims) = clip.VectorField()
|
||||||
image_uri: str = clip.SourceField()
|
image_uri: str = clip.SourceField()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -166,4 +166,4 @@ rs[2].image
|
|||||||

|

|
||||||
|
|
||||||
Now that you have the basic idea about LanceDB embedding functions and the embedding function registry,
|
Now that you have the basic idea about LanceDB embedding functions and the embedding function registry,
|
||||||
let's dive deeper into defining your own [custom functions](./custom_embedding_function.md).
|
let's dive deeper into defining your own [custom functions](./custom_embedding_function.md).
|
||||||
@@ -11,64 +11,4 @@ LanceDB supports 3 methods of working with embeddings.
|
|||||||
that extends the default embedding functions.
|
that extends the default embedding functions.
|
||||||
|
|
||||||
For python users, there is also a legacy [with_embeddings API](./legacy.md).
|
For python users, there is also a legacy [with_embeddings API](./legacy.md).
|
||||||
It is retained for compatibility and will be removed in a future version.
|
It is retained for compatibility and will be removed in a future version.
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
|
|
||||||
To get started with embeddings, you can use the built-in embedding functions.
|
|
||||||
|
|
||||||
### OpenAI Embedding function
|
|
||||||
LanceDB registers the OpenAI embeddings function in the registry as `openai`. You can pass any supported model name to the `create`. By default it uses `"text-embedding-ada-002"`.
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect("/tmp/db")
|
|
||||||
func = get_registry().get("openai").create(name="text-embedding-ada-002")
|
|
||||||
|
|
||||||
class Words(LanceModel):
|
|
||||||
text: str = func.SourceField()
|
|
||||||
vector: Vector(func.ndims()) = func.VectorField()
|
|
||||||
|
|
||||||
table = db.create_table("words", schema=Words, mode="overwrite")
|
|
||||||
table.add(
|
|
||||||
[
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
query = "greetings"
|
|
||||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
|
||||||
print(actual.text)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Sentence Transformers Embedding function
|
|
||||||
LanceDB registers the Sentence Transformers embeddings function in the registry as `sentence-transformers`. You can pass any supported model name to the `create`. By default it uses `"sentence-transformers/paraphrase-MiniLM-L6-v2"`.
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
|
|
||||||
db = lancedb.connect("/tmp/db")
|
|
||||||
model = get_registry().get("sentence-transformers").create(name="BAAI/bge-small-en-v1.5", device="cpu")
|
|
||||||
|
|
||||||
class Words(LanceModel):
|
|
||||||
text: str = model.SourceField()
|
|
||||||
vector: Vector(model.ndims()) = model.VectorField()
|
|
||||||
|
|
||||||
table = db.create_table("words", schema=Words)
|
|
||||||
table.add(
|
|
||||||
[
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
query = "greetings"
|
|
||||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
|
||||||
print(actual.text)
|
|
||||||
```
|
|
||||||
@@ -55,139 +55,18 @@ LanceDB OSS supports object stores such as AWS S3 (and compatible stores), Azure
|
|||||||
const db = await lancedb.connect("az://bucket/path");
|
const db = await lancedb.connect("az://bucket/path");
|
||||||
```
|
```
|
||||||
|
|
||||||
In most cases, when running in the respective cloud and permissions are set up correctly, no additional configuration is required. When running outside of the respective cloud, authentication credentials must be provided. Credentials and other configuration options can be set in two ways: first, by setting environment variables. And second, by passing a `storage_options` object to the `connect` function. For example, to increase the request timeout to 60 seconds, you can set the `TIMEOUT` environment variable to `60s`:
|
In most cases, when running in the respective cloud and permissions are set up correctly, no additional configuration is required. When running outside of the respective cloud, authentication credentials must be provided using environment variables. In general, these environment variables are the same as those used by the respective cloud SDKs. The sections below describe the environment variables that can be used to configure each object store.
|
||||||
|
|
||||||
```bash
|
LanceDB OSS uses the [object-store](https://docs.rs/object_store/latest/object_store/) Rust crate for object store access. There are general environment variables that can be used to configure the object store, such as the request timeout and proxy configuration. See the [object_store ClientConfigKey](https://docs.rs/object_store/latest/object_store/enum.ClientConfigKey.html) doc for available configuration options. The environment variables that can be set are the snake-cased versions of these variable names. For example, to set `ProxyUrl` use the environment variable `PROXY_URL`. (Don't let the Rust docs intimidate you! We link to them so you can see an up-to-date list of the available options.)
|
||||||
export TIMEOUT=60s
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note "`storage_options` availability"
|
|
||||||
|
|
||||||
The `storage_options` parameter is only available in Python *async* API and JavaScript API.
|
|
||||||
It is not yet supported in the Python synchronous API.
|
|
||||||
|
|
||||||
If you only want this to apply to one particular connection, you can pass the `storage_options` argument when opening the connection:
|
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async(
|
|
||||||
"s3://bucket/path",
|
|
||||||
storage_options={"timeout": "60s"}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect("s3://bucket/path",
|
|
||||||
{storageOptions: {timeout: "60s"}});
|
|
||||||
```
|
|
||||||
|
|
||||||
Getting even more specific, you can set the `timeout` for only a particular table:
|
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
<!-- skip-test -->
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async("s3://bucket/path")
|
|
||||||
table = await db.create_table(
|
|
||||||
"table",
|
|
||||||
[{"a": 1, "b": 2}],
|
|
||||||
storage_options={"timeout": "60s"}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
<!-- skip-test -->
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect("s3://bucket/path");
|
|
||||||
const table = db.createTable(
|
|
||||||
"table",
|
|
||||||
[{ a: 1, b: 2}],
|
|
||||||
{storageOptions: {timeout: "60s"}}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! info "Storage option casing"
|
|
||||||
|
|
||||||
The storage option keys are case-insensitive. So `connect_timeout` and `CONNECT_TIMEOUT` are the same setting. Usually lowercase is used in the `storage_options` argument and uppercase is used for environment variables. In the `lancedb` Node package, the keys can also be provided in `camelCase` capitalization. For example, `connectTimeout` is equivalent to `connect_timeout`.
|
|
||||||
|
|
||||||
### General configuration
|
|
||||||
|
|
||||||
There are several options that can be set for all object stores, mostly related to network client configuration.
|
|
||||||
|
|
||||||
<!-- from here: https://docs.rs/object_store/latest/object_store/enum.ClientConfigKey.html -->
|
|
||||||
|
|
||||||
| Key | Description |
|
|
||||||
|----------------------------|--------------------------------------------------------------------------------------------------|
|
|
||||||
| `allow_http` | Allow non-TLS, i.e. non-HTTPS connections. Default: `False`. |
|
|
||||||
| `allow_invalid_certificates`| Skip certificate validation on HTTPS connections. Default: `False`. |
|
|
||||||
| `connect_timeout` | Timeout for only the connect phase of a Client. Default: `5s`. |
|
|
||||||
| `timeout` | Timeout for the entire request, from connection until the response body has finished. Default: `30s`. |
|
|
||||||
| `user_agent` | User agent string to use in requests. |
|
|
||||||
| `proxy_url` | URL of a proxy server to use for requests. Default: `None`. |
|
|
||||||
| `proxy_ca_certificate` | PEM-formatted CA certificate for proxy connections. |
|
|
||||||
| `proxy_excludes` | List of hosts that bypass the proxy. This is a comma-separated list of domains and IP masks. Any subdomain of the provided domain will be bypassed. For example, `example.com, 192.168.1.0/24` would bypass `https://api.example.com`, `https://www.example.com`, and any IP in the range `192.168.1.0/24`. |
|
|
||||||
|
|
||||||
|
|
||||||
### AWS S3
|
### AWS S3
|
||||||
|
|
||||||
To configure credentials for AWS S3, you can use the `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and `AWS_SESSION_TOKEN` keys. Region can also be set, but it is not mandatory when using AWS.
|
To configure credentials for AWS S3, you can use the `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and `AWS_SESSION_TOKEN` environment variables.
|
||||||
These can be set as environment variables or passed in the `storage_options` parameter:
|
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async(
|
|
||||||
"s3://bucket/path",
|
|
||||||
storage_options={
|
|
||||||
"aws_access_key_id": "my-access-key",
|
|
||||||
"aws_secret_access_key": "my-secret-key",
|
|
||||||
"aws_session_token": "my-session-token",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect(
|
|
||||||
"s3://bucket/path",
|
|
||||||
{
|
|
||||||
storageOptions: {
|
|
||||||
awsAccessKeyId: "my-access-key",
|
|
||||||
awsSecretAccessKey: "my-secret-key",
|
|
||||||
awsSessionToken: "my-session-token",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, if you are using AWS SSO, you can use the `AWS_PROFILE` and `AWS_DEFAULT_REGION` environment variables.
|
Alternatively, if you are using AWS SSO, you can use the `AWS_PROFILE` and `AWS_DEFAULT_REGION` environment variables.
|
||||||
|
|
||||||
The following keys can be used as both environment variables or keys in the `storage_options` parameter:
|
You can see a full list of environment variables [here](https://docs.rs/object_store/latest/object_store/aws/struct.AmazonS3Builder.html#method.from_env).
|
||||||
|
|
||||||
| Key | Description |
|
|
||||||
|------------------------------------|------------------------------------------------------------------------------------------------------|
|
|
||||||
| `aws_region` / `region` | The AWS region the bucket is in. This can be automatically detected when using AWS S3, but must be specified for S3-compatible stores. |
|
|
||||||
| `aws_access_key_id` / `access_key_id` | The AWS access key ID to use. |
|
|
||||||
| `aws_secret_access_key` / `secret_access_key` | The AWS secret access key to use. |
|
|
||||||
| `aws_session_token` / `session_token` | The AWS session token to use. |
|
|
||||||
| `aws_endpoint` / `endpoint` | The endpoint to use for S3-compatible stores. |
|
|
||||||
| `aws_virtual_hosted_style_request` / `virtual_hosted_style_request` | Whether to use virtual hosted-style requests, where the bucket name is part of the endpoint. Meant to be used with `aws_endpoint`. Default: `False`. |
|
|
||||||
| `aws_s3_express` / `s3_express` | Whether to use S3 Express One Zone endpoints. Default: `False`. See more details below. |
|
|
||||||
| `aws_server_side_encryption` | The server-side encryption algorithm to use. Must be one of `"AES256"`, `"aws:kms"`, or `"aws:kms:dsse"`. Default: `None`. |
|
|
||||||
| `aws_sse_kms_key_id` | The KMS key ID to use for server-side encryption. If set, `aws_server_side_encryption` must be `"aws:kms"` or `"aws:kms:dsse"`. |
|
|
||||||
| `aws_sse_bucket_key_enabled` | Whether to use bucket keys for server-side encryption. |
|
|
||||||
|
|
||||||
|
|
||||||
!!! tip "Automatic cleanup for failed writes"
|
!!! tip "Automatic cleanup for failed writes"
|
||||||
|
|
||||||
@@ -267,182 +146,22 @@ For **read-only access**, LanceDB will need a policy such as:
|
|||||||
|
|
||||||
#### S3-compatible stores
|
#### S3-compatible stores
|
||||||
|
|
||||||
LanceDB can also connect to S3-compatible stores, such as MinIO. To do so, you must specify both region and endpoint:
|
LanceDB can also connect to S3-compatible stores, such as MinIO. To do so, you must specify two environment variables: `AWS_ENDPOINT` and `AWS_DEFAULT_REGION`. `AWS_ENDPOINT` should be the URL of the S3-compatible store, and `AWS_DEFAULT_REGION` should be the region to use.
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async(
|
|
||||||
"s3://bucket/path",
|
|
||||||
storage_options={
|
|
||||||
"region": "us-east-1",
|
|
||||||
"endpoint": "http://minio:9000",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect(
|
|
||||||
"s3://bucket/path",
|
|
||||||
{
|
|
||||||
storageOptions: {
|
|
||||||
region: "us-east-1",
|
|
||||||
endpoint: "http://minio:9000",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
This can also be done with the ``AWS_ENDPOINT`` and ``AWS_DEFAULT_REGION`` environment variables.
|
|
||||||
|
|
||||||
!!! tip "Local servers"
|
|
||||||
|
|
||||||
For local development, the server often has a `http` endpoint rather than a
|
|
||||||
secure `https` endpoint. In this case, you must also set the `ALLOW_HTTP`
|
|
||||||
environment variable to `true` to allow non-TLS connections, or pass the
|
|
||||||
storage option `allow_http` as `true`. If you do not do this, you will get
|
|
||||||
an error like `URL scheme is not allowed`.
|
|
||||||
|
|
||||||
#### S3 Express
|
|
||||||
|
|
||||||
LanceDB supports [S3 Express One Zone](https://aws.amazon.com/s3/storage-classes/express-one-zone/) endpoints, but requires additional configuration. Also, S3 Express endpoints only support connecting from an EC2 instance within the same region.
|
|
||||||
|
|
||||||
To configure LanceDB to use an S3 Express endpoint, you must set the storage option `s3_express`. The bucket name in your table URI should **include the suffix**.
|
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async(
|
|
||||||
"s3://my-bucket--use1-az4--x-s3/path",
|
|
||||||
storage_options={
|
|
||||||
"region": "us-east-1",
|
|
||||||
"s3_express": "true",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect(
|
|
||||||
"s3://my-bucket--use1-az4--x-s3/path",
|
|
||||||
{
|
|
||||||
storageOptions: {
|
|
||||||
region: "us-east-1",
|
|
||||||
s3Express: "true",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
|
<!-- TODO: we should also document the use of S3 Express once we fully support it -->
|
||||||
|
|
||||||
### Google Cloud Storage
|
### Google Cloud Storage
|
||||||
|
|
||||||
GCS credentials are configured by setting the `GOOGLE_SERVICE_ACCOUNT` environment variable to the path of a JSON file containing the service account credentials. Alternatively, you can pass the path to the JSON file in the `storage_options`:
|
GCS credentials are configured by setting the `GOOGLE_SERVICE_ACCOUNT` environment variable to the path of a JSON file containing the service account credentials. There are several aliases for this environment variable, documented [here](https://docs.rs/object_store/latest/object_store/gcp/struct.GoogleCloudStorageBuilder.html#method.from_env).
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
<!-- skip-test -->
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async(
|
|
||||||
"gs://my-bucket/my-database",
|
|
||||||
storage_options={
|
|
||||||
"service_account": "path/to/service-account.json",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect(
|
|
||||||
"gs://my-bucket/my-database",
|
|
||||||
{
|
|
||||||
storageOptions: {
|
|
||||||
serviceAccount: "path/to/service-account.json",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
!!! info "HTTP/2 support"
|
!!! info "HTTP/2 support"
|
||||||
|
|
||||||
By default, GCS uses HTTP/1 for communication, as opposed to HTTP/2. This improves maximum throughput significantly. However, if you wish to use HTTP/2 for some reason, you can set the environment variable `HTTP1_ONLY` to `false`.
|
By default, GCS uses HTTP/1 for communication, as opposed to HTTP/2. This improves maximum throughput significantly. However, if you wish to use HTTP/2 for some reason, you can set the environment variable `HTTP1_ONLY` to `false`.
|
||||||
|
|
||||||
|
|
||||||
The following keys can be used as both environment variables or keys in the `storage_options` parameter:
|
|
||||||
<!-- source: https://docs.rs/object_store/latest/object_store/gcp/enum.GoogleConfigKey.html -->
|
|
||||||
|
|
||||||
| Key | Description |
|
|
||||||
|---------------------------------------|----------------------------------------------|
|
|
||||||
| ``google_service_account`` / `service_account` | Path to the service account JSON file. |
|
|
||||||
| ``google_service_account_key`` | The serialized service account key. |
|
|
||||||
| ``google_application_credentials`` | Path to the application credentials. |
|
|
||||||
|
|
||||||
|
|
||||||
### Azure Blob Storage
|
### Azure Blob Storage
|
||||||
|
|
||||||
Azure Blob Storage credentials can be configured by setting the `AZURE_STORAGE_ACCOUNT_NAME`and `AZURE_STORAGE_ACCOUNT_KEY` environment variables. Alternatively, you can pass the account name and key in the `storage_options` parameter:
|
Azure Blob Storage credentials can be configured by setting the `AZURE_STORAGE_ACCOUNT_NAME` and ``AZURE_STORAGE_ACCOUNT_KEY`` environment variables. The full list of environment variables that can be set are documented [here](https://docs.rs/object_store/latest/object_store/azure/struct.MicrosoftAzureBuilder.html#method.from_env).
|
||||||
|
|
||||||
=== "Python"
|
|
||||||
|
|
||||||
<!-- skip-test -->
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
db = await lancedb.connect_async(
|
|
||||||
"az://my-container/my-database",
|
|
||||||
storage_options={
|
|
||||||
account_name: "some-account",
|
|
||||||
account_key: "some-key",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
=== "JavaScript"
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("lancedb");
|
|
||||||
const db = await lancedb.connect(
|
|
||||||
"az://my-container/my-database",
|
|
||||||
{
|
|
||||||
storageOptions: {
|
|
||||||
accountName: "some-account",
|
|
||||||
accountKey: "some-key",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
These keys can be used as both environment variables or keys in the `storage_options` parameter:
|
|
||||||
|
|
||||||
<!-- source: https://docs.rs/object_store/latest/object_store/azure/enum.AzureConfigKey.html -->
|
|
||||||
|
|
||||||
| Key | Description |
|
|
||||||
|---------------------------------------|--------------------------------------------------------------------------------------------------|
|
|
||||||
| ``azure_storage_account_name`` | The name of the azure storage account. |
|
|
||||||
| ``azure_storage_account_key`` | The serialized service account key. |
|
|
||||||
| ``azure_client_id`` | Service principal client id for authorizing requests. |
|
|
||||||
| ``azure_client_secret`` | Service principal client secret for authorizing requests. |
|
|
||||||
| ``azure_tenant_id`` | Tenant id used in oauth flows. |
|
|
||||||
| ``azure_storage_sas_key`` | Shared access signature. The signature is expected to be percent-encoded, much like they are provided in the azure storage explorer or azure portal. |
|
|
||||||
| ``azure_storage_token`` | Bearer token. |
|
|
||||||
| ``azure_storage_use_emulator`` | Use object store with azurite storage emulator. |
|
|
||||||
| ``azure_endpoint`` | Override the endpoint used to communicate with blob storage. |
|
|
||||||
| ``azure_use_fabric_endpoint`` | Use object store with url scheme account.dfs.fabric.microsoft.com. |
|
|
||||||
| ``azure_msi_endpoint`` | Endpoint to request a imds managed identity token. |
|
|
||||||
| ``azure_object_id`` | Object id for use with managed identity authentication. |
|
|
||||||
| ``azure_msi_resource_id`` | Msi resource id for use with managed identity authentication. |
|
|
||||||
| ``azure_federated_token_file`` | File containing token for Azure AD workload identity federation. |
|
|
||||||
| ``azure_use_azure_cli`` | Use azure cli for acquiring access token. |
|
|
||||||
| ``azure_disable_tagging`` | Disables tagging objects. This can be desirable if not supported by the backing store. |
|
|
||||||
|
|
||||||
<!-- TODO: demonstrate how to configure networked file systems for optimal performance -->
|
<!-- TODO: demonstrate how to configure networked file systems for optimal performance -->
|
||||||
@@ -13,7 +13,7 @@ Get started using these examples and quick links.
|
|||||||
| Integrations | |
|
| Integrations | |
|
||||||
|---|---:|
|
|---|---:|
|
||||||
| <h3> LlamaIndex </h3>LlamaIndex is a simple, flexible data framework for connecting custom data sources to large language models. Llama index integrates with LanceDB as the serverless VectorDB. <h3>[Lean More](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html) </h3> |<img src="../assets/llama-index.jpg" alt="image" width="150" height="auto">|
|
| <h3> LlamaIndex </h3>LlamaIndex is a simple, flexible data framework for connecting custom data sources to large language models. Llama index integrates with LanceDB as the serverless VectorDB. <h3>[Lean More](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html) </h3> |<img src="../assets/llama-index.jpg" alt="image" width="150" height="auto">|
|
||||||
| <h3>Langchain</h3>Langchain allows building applications with LLMs through composability <h3>[Lean More](https://lancedb.github.io/lancedb/integrations/langchain/) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
| <h3>Langchain</h3>Langchain allows building applications with LLMs through composability <h3>[Lean More](https://python.langchain.com/docs/integrations/vectorstores/lancedb) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
||||||
| <h3>Langchain TS</h3> Javascript bindings for Langchain. It integrates with LanceDB's serverless vectordb allowing you to build powerful AI applications through composibility using only serverless functions. <h3>[Learn More]( https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/lancedb) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
| <h3>Langchain TS</h3> Javascript bindings for Langchain. It integrates with LanceDB's serverless vectordb allowing you to build powerful AI applications through composibility using only serverless functions. <h3>[Learn More]( https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/lancedb) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
||||||
| <h3>Voxel51</h3> It is an open source toolkit that enables you to build better computer vision workflows by improving the quality of your datasets and delivering insights about your models.<h3>[Learn More](./voxel51.md) | <img src="../assets/voxel.gif" alt="image" width="150" height="auto">|
|
| <h3>Voxel51</h3> It is an open source toolkit that enables you to build better computer vision workflows by improving the quality of your datasets and delivering insights about your models.<h3>[Learn More](./voxel51.md) | <img src="../assets/voxel.gif" alt="image" width="150" height="auto">|
|
||||||
| <h3>PromptTools</h3> Offers a set of free, open-source tools for testing and experimenting with models, prompts, and configurations. The core idea is to enable developers to evaluate prompts using familiar interfaces like code and notebooks. You can use it to experiment with different configurations of LanceDB, and test how LanceDB integrates with the LLM of your choice.<h3>[Learn More](./prompttools.md) | <img src="../assets/prompttools.jpeg" alt="image" width="150" height="auto">|
|
| <h3>PromptTools</h3> Offers a set of free, open-source tools for testing and experimenting with models, prompts, and configurations. The core idea is to enable developers to evaluate prompts using familiar interfaces like code and notebooks. You can use it to experiment with different configurations of LanceDB, and test how LanceDB integrates with the LLM of your choice.<h3>[Learn More](./prompttools.md) | <img src="../assets/prompttools.jpeg" alt="image" width="150" height="auto">|
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
# Langchain
|
|
||||||

|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
You can load your document data using langchain's loaders, for this example we are using `TextLoader` and `OpenAIEmbeddings` as the embedding model.
|
|
||||||
```python
|
|
||||||
import os
|
|
||||||
from langchain.document_loaders import TextLoader
|
|
||||||
from langchain.vectorstores import LanceDB
|
|
||||||
from langchain_openai import OpenAIEmbeddings
|
|
||||||
from langchain_text_splitters import CharacterTextSplitter
|
|
||||||
|
|
||||||
os.environ["OPENAI_API_KEY"] = "sk-..."
|
|
||||||
|
|
||||||
loader = TextLoader("../../modules/state_of_the_union.txt") # Replace with your data path
|
|
||||||
documents = loader.load()
|
|
||||||
|
|
||||||
documents = CharacterTextSplitter().split_documents(documents)
|
|
||||||
embeddings = OpenAIEmbeddings()
|
|
||||||
|
|
||||||
docsearch = LanceDB.from_documents(documents, embeddings)
|
|
||||||
query = "What did the president say about Ketanji Brown Jackson"
|
|
||||||
docs = docsearch.similarity_search(query)
|
|
||||||
print(docs[0].page_content)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
In the above example `LanceDB` vector store class object is created using `from_documents()` method which is a `classmethod` and returns the initialized class object.
|
|
||||||
You can also use `LanceDB.from_texts(texts: List[str],embedding: Embeddings)` class method.
|
|
||||||
|
|
||||||
The exhaustive list of parameters for `LanceDB` vector store are :
|
|
||||||
- `connection`: (Optional) `lancedb.db.LanceDBConnection` connection object to use. If not provided, a new connection will be created.
|
|
||||||
- `embedding`: Langchain embedding model.
|
|
||||||
- `vector_key`: (Optional) Column name to use for vector's in the table. Defaults to `'vector'`.
|
|
||||||
- `id_key`: (Optional) Column name to use for id's in the table. Defaults to `'id'`.
|
|
||||||
- `text_key`: (Optional) Column name to use for text in the table. Defaults to `'text'`.
|
|
||||||
- `table_name`: (Optional) Name of your table in the database. Defaults to `'vectorstore'`.
|
|
||||||
- `api_key`: (Optional) API key to use for LanceDB cloud database. Defaults to `None`.
|
|
||||||
- `region`: (Optional) Region to use for LanceDB cloud database. Only for LanceDB Cloud, defaults to `None`.
|
|
||||||
- `mode`: (Optional) Mode to use for adding data to the table. Defaults to `'overwrite'`.
|
|
||||||
|
|
||||||
```python
|
|
||||||
db_url = "db://lang_test" # url of db you created
|
|
||||||
api_key = "xxxxx" # your API key
|
|
||||||
region="us-east-1-dev" # your selected region
|
|
||||||
|
|
||||||
vector_store = LanceDB(
|
|
||||||
uri=db_url,
|
|
||||||
api_key=api_key, #(dont include for local API)
|
|
||||||
region=region, #(dont include for local API)
|
|
||||||
embedding=embeddings,
|
|
||||||
table_name='langchain_test' #Optional
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Methods
|
|
||||||
To add texts and store respective embeddings automatically:
|
|
||||||
##### add_texts()
|
|
||||||
- `texts`: `Iterable` of strings to add to the vectorstore.
|
|
||||||
- `metadatas`: Optional `list[dict()]` of metadatas associated with the texts.
|
|
||||||
- `ids`: Optional `list` of ids to associate with the texts.
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
vector_store.add_texts(texts = ['test_123'], metadatas =[{'source' :'wiki'}])
|
|
||||||
|
|
||||||
#Additionaly, to explore the table you can load it into a df or save it in a csv file:
|
|
||||||
|
|
||||||
tbl = vector_store.get_table()
|
|
||||||
print("tbl:", tbl)
|
|
||||||
pd_df = tbl.to_pandas()
|
|
||||||
pd_df.to_csv("docsearch.csv", index=False)
|
|
||||||
|
|
||||||
# you can also create a new vector store object using an older connection object:
|
|
||||||
vector_store = LanceDB(connection=tbl, embedding=embeddings)
|
|
||||||
```
|
|
||||||
For index creation make sure your table has enough data in it. An ANN index is ususally not needed for datasets ~100K vectors. For large-scale (>1M) or higher dimension vectors, it is beneficial to create an ANN index.
|
|
||||||
##### create_index()
|
|
||||||
- `col_name`: `Optional[str] = None`
|
|
||||||
- `vector_col`: `Optional[str] = None`
|
|
||||||
- `num_partitions`: `Optional[int] = 256`
|
|
||||||
- `num_sub_vectors`: `Optional[int] = 96`
|
|
||||||
- `index_cache_size`: `Optional[int] = None`
|
|
||||||
|
|
||||||
```python
|
|
||||||
# for creating vector index
|
|
||||||
vector_store.create_index(vector_col='vector', metric = 'cosine')
|
|
||||||
|
|
||||||
# for creating scalar index(for non-vector columns)
|
|
||||||
vector_store.create_index(col_name='text')
|
|
||||||
|
|
||||||
```
|
|
||||||
@@ -142,7 +142,6 @@ rules are as follows:
|
|||||||
|
|
||||||
**`Example`**
|
**`Example`**
|
||||||
|
|
||||||
```ts
|
|
||||||
import { fromTableToBuffer, makeArrowTable } from "../arrow";
|
import { fromTableToBuffer, makeArrowTable } from "../arrow";
|
||||||
import { Field, FixedSizeList, Float16, Float32, Int32, Schema } from "apache-arrow";
|
import { Field, FixedSizeList, Float16, Float32, Int32, Schema } from "apache-arrow";
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"!pip install --quiet openai datasets\n",
|
"!pip install --quiet openai datasets \n",
|
||||||
"!pip install --quiet -U lancedb"
|
"!pip install --quiet -U lancedb"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -213,7 +213,7 @@
|
|||||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||||
" # OR set the key here as a variable\n",
|
" # OR set the key here as a variable\n",
|
||||||
" os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"\n",
|
" os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"\n",
|
||||||
"\n",
|
" \n",
|
||||||
"client = OpenAI()\n",
|
"client = OpenAI()\n",
|
||||||
"assert len(client.models.list().data) > 0"
|
"assert len(client.models.list().data) > 0"
|
||||||
]
|
]
|
||||||
@@ -234,12 +234,9 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"def embed_func(c):\n",
|
"def embed_func(c): \n",
|
||||||
" rs = client.embeddings.create(input=c, model=\"text-embedding-ada-002\")\n",
|
" rs = client.embeddings.create(input=c, model=\"text-embedding-ada-002\")\n",
|
||||||
" return [\n",
|
" return [rs.data[0].embedding]"
|
||||||
" data.embedding\n",
|
|
||||||
" for data in rs.data\n",
|
|
||||||
" ]"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -517,7 +514,7 @@
|
|||||||
" prompt_start +\n",
|
" prompt_start +\n",
|
||||||
" \"\\n\\n---\\n\\n\".join(context.text) +\n",
|
" \"\\n\\n---\\n\\n\".join(context.text) +\n",
|
||||||
" prompt_end\n",
|
" prompt_end\n",
|
||||||
" )\n",
|
" ) \n",
|
||||||
" return prompt"
|
" return prompt"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -24,8 +24,7 @@ data = [
|
|||||||
table = db.create_table("pd_table", data=data)
|
table = db.create_table("pd_table", data=data)
|
||||||
```
|
```
|
||||||
|
|
||||||
The `to_lance` method converts the LanceDB table to a `LanceDataset`, which is accessible to DuckDB through the Arrow compatibility layer.
|
To query the table, first call `to_lance` to convert the table to a "dataset", which is an object that can be queried by DuckDB. Then all you need to do is reference that dataset by the same name in your SQL query.
|
||||||
To query the resulting Lance dataset in DuckDB, all you need to do is reference the dataset by the same name in your SQL query.
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import duckdb
|
import duckdb
|
||||||
|
|||||||
@@ -1,75 +0,0 @@
|
|||||||
# Cohere Reranker
|
|
||||||
|
|
||||||
This re-ranker uses the [Cohere](https://cohere.ai/) API to rerank the search results. You can use this re-ranker by passing `CohereReranker()` to the `rerank()` method. Note that you'll either need to set the `COHERE_API_KEY` environment variable or pass the `api_key` argument to use this re-ranker.
|
|
||||||
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Supported Query Types: Hybrid, Vector, FTS
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
import numpy
|
|
||||||
import lancedb
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.rerankers import CohereReranker
|
|
||||||
|
|
||||||
embedder = get_registry().get("sentence-transformers").create()
|
|
||||||
db = lancedb.connect("~/.lancedb")
|
|
||||||
|
|
||||||
class Schema(LanceModel):
|
|
||||||
text: str = embedder.SourceField()
|
|
||||||
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
|
||||||
|
|
||||||
data = [
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
tbl = db.create_table("test", schema=Schema, mode="overwrite")
|
|
||||||
tbl.add(data)
|
|
||||||
reranker = CohereReranker(api_key="key")
|
|
||||||
|
|
||||||
# Run vector search with a reranker
|
|
||||||
result = tbl.search("hello").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run FTS search with a reranker
|
|
||||||
result = tbl.search("hello", query_type="fts").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run hybrid search with a reranker
|
|
||||||
tbl.create_fts_index("text", replace=True)
|
|
||||||
result = tbl.search("hello", query_type="hybrid").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Accepted Arguments
|
|
||||||
----------------
|
|
||||||
| Argument | Type | Default | Description |
|
|
||||||
| --- | --- | --- | --- |
|
|
||||||
| `model_name` | `str` | `"rerank-english-v2.0"` | The name of the reranker model to use. Available cohere models are: rerank-english-v2.0, rerank-multilingual-v2.0 |
|
|
||||||
| `column` | `str` | `"text"` | The name of the column to use as input to the cross encoder model. |
|
|
||||||
| `top_n` | `str` | `None` | The number of results to return. If None, will return all results. |
|
|
||||||
| `api_key` | `str` | `None` | The API key for the Cohere API. If not provided, the `COHERE_API_KEY` environment variable is used. |
|
|
||||||
| `return_score` | str | `"relevance"` | Options are "relevance" or "all". The type of score to return. If "relevance", will return only the `_relevance_score. If "all" is supported, will return relevance score along with the vector and/or fts scores depending on query type |
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Supported Scores for each query type
|
|
||||||
You can specify the type of scores you want the reranker to return. The following are the supported scores for each query type:
|
|
||||||
|
|
||||||
### Hybrid Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ❌ Not Supported | Returns have vector(`_distance`) and FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### Vector Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have vector(`_distance`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### FTS Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
# ColBERT Reranker
|
|
||||||
|
|
||||||
This re-ranker uses ColBERT model to rerank the search results. You can use this re-ranker by passing `ColbertReranker()` to the `rerank()` method.
|
|
||||||
!!! note
|
|
||||||
Supported Query Types: Hybrid, Vector, FTS
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
import numpy
|
|
||||||
import lancedb
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.rerankers import ColbertReranker
|
|
||||||
|
|
||||||
embedder = get_registry().get("sentence-transformers").create()
|
|
||||||
db = lancedb.connect("~/.lancedb")
|
|
||||||
|
|
||||||
class Schema(LanceModel):
|
|
||||||
text: str = embedder.SourceField()
|
|
||||||
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
|
||||||
|
|
||||||
data = [
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
tbl = db.create_table("test", schema=Schema, mode="overwrite")
|
|
||||||
tbl.add(data)
|
|
||||||
reranker = ColbertReranker()
|
|
||||||
|
|
||||||
# Run vector search with a reranker
|
|
||||||
result = tbl.search("hello").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run FTS search with a reranker
|
|
||||||
result = tbl.search("hello", query_type="fts").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run hybrid search with a reranker
|
|
||||||
tbl.create_fts_index("text", replace=True)
|
|
||||||
result = tbl.search("hello", query_type="hybrid").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Accepted Arguments
|
|
||||||
----------------
|
|
||||||
| Argument | Type | Default | Description |
|
|
||||||
| --- | --- | --- | --- |
|
|
||||||
| `model_name` | `str` | `"colbert-ir/colbertv2.0"` | The name of the reranker model to use.|
|
|
||||||
| `column` | `str` | `"text"` | The name of the column to use as input to the cross encoder model. |
|
|
||||||
| `device` | `str` | `None` | The device to use for the cross encoder model. If None, will use "cuda" if available, otherwise "cpu". |
|
|
||||||
| `return_score` | str | `"relevance"` | Options are "relevance" or "all". The type of score to return. If "relevance", will return only the `_relevance_score. If "all" is supported, will return relevance score along with the vector and/or fts scores depending on query type |
|
|
||||||
|
|
||||||
|
|
||||||
## Supported Scores for each query type
|
|
||||||
You can specify the type of scores you want the reranker to return. The following are the supported scores for each query type:
|
|
||||||
|
|
||||||
### Hybrid Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ❌ Not Supported | Returns have vector(`_distance`) and FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### Vector Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have vector(`_distance`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### FTS Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
# Cross Encoder Reranker
|
|
||||||
|
|
||||||
This re-ranker uses Cross Encoder models from sentence-transformers to rerank the search results. You can use this re-ranker by passing `CrossEncoderReranker()` to the `rerank()` method.
|
|
||||||
!!! note
|
|
||||||
Supported Query Types: Hybrid, Vector, FTS
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
import numpy
|
|
||||||
import lancedb
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.rerankers import CrossEncoderReranker
|
|
||||||
|
|
||||||
embedder = get_registry().get("sentence-transformers").create()
|
|
||||||
db = lancedb.connect("~/.lancedb")
|
|
||||||
|
|
||||||
class Schema(LanceModel):
|
|
||||||
text: str = embedder.SourceField()
|
|
||||||
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
|
||||||
|
|
||||||
data = [
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
tbl = db.create_table("test", schema=Schema, mode="overwrite")
|
|
||||||
tbl.add(data)
|
|
||||||
reranker = CrossEncoderReranker()
|
|
||||||
|
|
||||||
# Run vector search with a reranker
|
|
||||||
result = tbl.search("hello").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run FTS search with a reranker
|
|
||||||
result = tbl.search("hello", query_type="fts").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run hybrid search with a reranker
|
|
||||||
tbl.create_fts_index("text", replace=True)
|
|
||||||
result = tbl.search("hello", query_type="hybrid").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Accepted Arguments
|
|
||||||
----------------
|
|
||||||
| Argument | Type | Default | Description |
|
|
||||||
| --- | --- | --- | --- |
|
|
||||||
| `model_name` | `str` | `""cross-encoder/ms-marco-TinyBERT-L-6"` | The name of the reranker model to use.|
|
|
||||||
| `column` | `str` | `"text"` | The name of the column to use as input to the cross encoder model. |
|
|
||||||
| `device` | `str` | `None` | The device to use for the cross encoder model. If None, will use "cuda" if available, otherwise "cpu". |
|
|
||||||
| `return_score` | str | `"relevance"` | Options are "relevance" or "all". The type of score to return. If "relevance", will return only the `_relevance_score. If "all" is supported, will return relevance score along with the vector and/or fts scores depending on query type |
|
|
||||||
|
|
||||||
## Supported Scores for each query type
|
|
||||||
You can specify the type of scores you want the reranker to return. The following are the supported scores for each query type:
|
|
||||||
|
|
||||||
### Hybrid Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ❌ Not Supported | Returns have vector(`_distance`) and FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### Vector Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have vector(`_distance`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### FTS Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
## Building Custom Rerankers
|
|
||||||
You can build your own custom reranker by subclassing the `Reranker` class and implementing the `rerank_hybrid()` method. Optionally, you can also implement the `rerank_vector()` and `rerank_fts()` methods if you want to support reranking for vector and FTS search separately.
|
|
||||||
Here's an example of a custom reranker that combines the results of semantic and full-text search using a linear combination of the scores.
|
|
||||||
|
|
||||||
The `Reranker` base interface comes with a `merge_results()` method that can be used to combine the results of semantic and full-text search. This is a vanilla merging algorithm that simply concatenates the results and removes the duplicates without taking the scores into consideration. It only keeps the first copy of the row encountered. This works well in cases that don't require the scores of semantic and full-text search to combine the results. If you want to use the scores or want to support `return_score="all"`, you'll need to implement your own merging algorithm.
|
|
||||||
|
|
||||||
```python
|
|
||||||
|
|
||||||
from lancedb.rerankers import Reranker
|
|
||||||
import pyarrow as pa
|
|
||||||
|
|
||||||
class MyReranker(Reranker):
|
|
||||||
def __init__(self, param1, param2, ..., return_score="relevance"):
|
|
||||||
super().__init__(return_score)
|
|
||||||
self.param1 = param1
|
|
||||||
self.param2 = param2
|
|
||||||
|
|
||||||
def rerank_hybrid(self, query: str, vector_results: pa.Table, fts_results: pa.Table):
|
|
||||||
# Use the built-in merging function
|
|
||||||
combined_result = self.merge_results(vector_results, fts_results)
|
|
||||||
|
|
||||||
# Do something with the combined results
|
|
||||||
# ...
|
|
||||||
|
|
||||||
# Return the combined results
|
|
||||||
return combined_result
|
|
||||||
|
|
||||||
def rerank_vector(self, query: str, vector_results: pa.Table):
|
|
||||||
# Do something with the vector results
|
|
||||||
# ...
|
|
||||||
|
|
||||||
# Return the vector results
|
|
||||||
return vector_results
|
|
||||||
|
|
||||||
def rerank_fts(self, query: str, fts_results: pa.Table):
|
|
||||||
# Do something with the FTS results
|
|
||||||
# ...
|
|
||||||
|
|
||||||
# Return the FTS results
|
|
||||||
return fts_results
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example of a Custom Reranker
|
|
||||||
For the sake of simplicity let's build custom reranker that just enchances the Cohere Reranker by accepting a filter query, and accept other CohereReranker params as kwags.
|
|
||||||
|
|
||||||
```python
|
|
||||||
|
|
||||||
from typing import List, Union
|
|
||||||
import pandas as pd
|
|
||||||
from lancedb.rerankers import CohereReranker
|
|
||||||
|
|
||||||
class ModifiedCohereReranker(CohereReranker):
|
|
||||||
def __init__(self, filters: Union[str, List[str]], **kwargs):
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
filters = filters if isinstance(filters, list) else [filters]
|
|
||||||
self.filters = filters
|
|
||||||
|
|
||||||
def rerank_hybrid(self, query: str, vector_results: pa.Table, fts_results: pa.Table)-> pa.Table:
|
|
||||||
combined_result = super().rerank_hybrid(query, vector_results, fts_results)
|
|
||||||
df = combined_result.to_pandas()
|
|
||||||
for filter in self.filters:
|
|
||||||
df = df.query("not text.str.contains(@filter)")
|
|
||||||
|
|
||||||
return pa.Table.from_pandas(df)
|
|
||||||
|
|
||||||
def rerank_vector(self, query: str, vector_results: pa.Table)-> pa.Table:
|
|
||||||
vector_results = super().rerank_vector(query, vector_results)
|
|
||||||
df = vector_results.to_pandas()
|
|
||||||
for filter in self.filters:
|
|
||||||
df = df.query("not text.str.contains(@filter)")
|
|
||||||
|
|
||||||
return pa.Table.from_pandas(df)
|
|
||||||
|
|
||||||
def rerank_fts(self, query: str, fts_results: pa.Table)-> pa.Table:
|
|
||||||
fts_results = super().rerank_fts(query, fts_results)
|
|
||||||
df = fts_results.to_pandas()
|
|
||||||
for filter in self.filters:
|
|
||||||
df = df.query("not text.str.contains(@filter)")
|
|
||||||
|
|
||||||
return pa.Table.from_pandas(df)
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! tip
|
|
||||||
The `vector_results` and `fts_results` are pyarrow tables. Lean more about pyarrow tables [here](https://arrow.apache.org/docs/python). It can be convered to other data types like pandas dataframe, pydict, pylist etc.
|
|
||||||
|
|
||||||
For example, You can convert them to pandas dataframes using `to_pandas()` method and perform any operations you want. After you are done, you can convert the dataframe back to pyarrow table using `pa.Table.from_pandas()` method and return it.
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
Reranking is the process of reordering a list of items based on some criteria. In the context of search, reranking is used to reorder the search results returned by a search engine based on some criteria. This can be useful when the initial ranking of the search results is not satisfactory or when the user has provided additional information that can be used to improve the ranking of the search results.
|
|
||||||
|
|
||||||
LanceDB comes with some built-in rerankers. Some of the rerankers that are available in LanceDB are:
|
|
||||||
|
|
||||||
| Reranker | Description | Supported Query Types |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `LinearCombinationReranker` | Reranks search results based on a linear combination of FTS and vector search scores | Hybrid |
|
|
||||||
| `CohereReranker` | Uses cohere Reranker API to rerank results | Vector, FTS, Hybrid |
|
|
||||||
| `CrossEncoderReranker` | Uses a cross-encoder model to rerank search results | Vector, FTS, Hybrid |
|
|
||||||
| `ColbertReranker` | Uses a colbert model to rerank search results | Vector, FTS, Hybrid |
|
|
||||||
| `OpenaiReranker`(Experimental) | Uses OpenAI's chat model to rerank search results | Vector, FTS, Hybrid |
|
|
||||||
|
|
||||||
|
|
||||||
## Using a Reranker
|
|
||||||
Using rerankers is optional for vector and FTS. However, for hybrid search, rerankers are required. To use a reranker, you need to create an instance of the reranker and pass it to the `rerank` method of the query builder.
|
|
||||||
|
|
||||||
```python
|
|
||||||
import numpy
|
|
||||||
import lancedb
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.rerankers import CohereReranker
|
|
||||||
|
|
||||||
embedder = get_registry().get("sentence-transformers").create()
|
|
||||||
db = lancedb.connect("~/.lancedb")
|
|
||||||
|
|
||||||
class Schema(LanceModel):
|
|
||||||
text: str = embedder.SourceField()
|
|
||||||
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
|
||||||
|
|
||||||
data = [
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
tbl = db.create_table("test", data)
|
|
||||||
reranker = CohereReranker(api_key="your_api_key")
|
|
||||||
|
|
||||||
# Run vector search with a reranker
|
|
||||||
result = tbl.query("hello").rerank(reranker).to_list()
|
|
||||||
|
|
||||||
# Run FTS search with a reranker
|
|
||||||
result = tbl.query("hello", query_type="fts").rerank(reranker).to_list()
|
|
||||||
|
|
||||||
# Run hybrid search with a reranker
|
|
||||||
tbl.create_fts_index("text")
|
|
||||||
result = tbl.query("hello", query_type="hybrid").rerank(reranker).to_list()
|
|
||||||
```
|
|
||||||
|
|
||||||
## Available Rerankers
|
|
||||||
LanceDB comes with some built-in rerankers. Here are some of the rerankers that are available in LanceDB:
|
|
||||||
|
|
||||||
- [Cohere Reranker](./cohere.md)
|
|
||||||
- [Cross Encoder Reranker](./cross_encoder.md)
|
|
||||||
- [ColBERT Reranker](./colbert.md)
|
|
||||||
- [OpenAI Reranker](./openai.md)
|
|
||||||
- [Linear Combination Reranker](./linear_combination.md)
|
|
||||||
|
|
||||||
## Creating Custom Rerankers
|
|
||||||
|
|
||||||
LanceDB also you to create custom rerankers by extending the base `Reranker` class. The custom reranker should implement the `rerank` method that takes a list of search results and returns a reranked list of search results. This is covered in more detail in the [Creating Custom Rerankers](./custom_reranker.md) section.
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
# Linear Combination Reranker
|
|
||||||
|
|
||||||
This is the default re-ranker used by LanceDB hybrid search. It combines the results of semantic and full-text search using a linear combination of the scores. The weights for the linear combination can be specified. It defaults to 0.7, i.e, 70% weight for semantic search and 30% weight for full-text search.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Supported Query Types: Hybrid
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
import numpy
|
|
||||||
import lancedb
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.rerankers import LinearCombinationReranker
|
|
||||||
|
|
||||||
embedder = get_registry().get("sentence-transformers").create()
|
|
||||||
db = lancedb.connect("~/.lancedb")
|
|
||||||
|
|
||||||
class Schema(LanceModel):
|
|
||||||
text: str = embedder.SourceField()
|
|
||||||
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
|
||||||
|
|
||||||
data = [
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
tbl = db.create_table("test", schema=Schema, mode="overwrite")
|
|
||||||
tbl.add(data)
|
|
||||||
reranker = LinearCombinationReranker()
|
|
||||||
|
|
||||||
# Run hybrid search with a reranker
|
|
||||||
tbl.create_fts_index("text", replace=True)
|
|
||||||
result = tbl.search("hello", query_type="hybrid").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Accepted Arguments
|
|
||||||
----------------
|
|
||||||
| Argument | Type | Default | Description |
|
|
||||||
| --- | --- | --- | --- |
|
|
||||||
| `weight` | `float` | `0.7` | The weight to use for the semantic search score. The weight for the full-text search score is `1 - weights`. |
|
|
||||||
| `return_score` | str | `"relevance"` | Options are "relevance" or "all". The type of score to return. If "relevance", will return only the `_relevance_score. If "all", will return all scores from the vector and FTS search along with the relevance score. |
|
|
||||||
|
|
||||||
|
|
||||||
## Supported Scores for each query type
|
|
||||||
You can specify the type of scores you want the reranker to return. The following are the supported scores for each query type:
|
|
||||||
|
|
||||||
### Hybrid Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have vector(`_distance`) and FTS(`score`) along with Hybrid Search score(`_distance`) |
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
# OpenAI Reranker (Experimental)
|
|
||||||
|
|
||||||
This re-ranker uses OpenAI chat model to rerank the search results. You can use this re-ranker by passing `OpenAI()` to the `rerank()` method.
|
|
||||||
!!! note
|
|
||||||
Supported Query Types: Hybrid, Vector, FTS
|
|
||||||
|
|
||||||
!!! warning
|
|
||||||
This re-ranker is experimental. OpenAI doesn't have a dedicated reranking model, so we are using the chat model for reranking.
|
|
||||||
|
|
||||||
```python
|
|
||||||
import numpy
|
|
||||||
import lancedb
|
|
||||||
from lancedb.embeddings import get_registry
|
|
||||||
from lancedb.pydantic import LanceModel, Vector
|
|
||||||
from lancedb.rerankers import OpenaiReranker
|
|
||||||
|
|
||||||
embedder = get_registry().get("sentence-transformers").create()
|
|
||||||
db = lancedb.connect("~/.lancedb")
|
|
||||||
|
|
||||||
class Schema(LanceModel):
|
|
||||||
text: str = embedder.SourceField()
|
|
||||||
vector: Vector(embedder.ndims()) = embedder.VectorField()
|
|
||||||
|
|
||||||
data = [
|
|
||||||
{"text": "hello world"},
|
|
||||||
{"text": "goodbye world"}
|
|
||||||
]
|
|
||||||
tbl = db.create_table("test", schema=Schema, mode="overwrite")
|
|
||||||
tbl.add(data)
|
|
||||||
reranker = OpenaiReranker()
|
|
||||||
|
|
||||||
# Run vector search with a reranker
|
|
||||||
result = tbl.search("hello").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run FTS search with a reranker
|
|
||||||
result = tbl.search("hello", query_type="fts").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
# Run hybrid search with a reranker
|
|
||||||
tbl.create_fts_index("text", replace=True)
|
|
||||||
result = tbl.search("hello", query_type="hybrid").rerank(reranker=reranker).to_list()
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Accepted Arguments
|
|
||||||
----------------
|
|
||||||
| Argument | Type | Default | Description |
|
|
||||||
| --- | --- | --- | --- |
|
|
||||||
| `model_name` | `str` | `"gpt-4-turbo-preview"` | The name of the reranker model to use.|
|
|
||||||
| `column` | `str` | `"text"` | The name of the column to use as input to the cross encoder model. |
|
|
||||||
| `return_score` | str | `"relevance"` | Options are "relevance" or "all". The type of score to return. If "relevance", will return only the `_relevance_score. If "all" is supported, will return relevance score along with the vector and/or fts scores depending on query type |
|
|
||||||
| `api_key` | str | `None` | The API key to use. If None, will use the OPENAI_API_KEY environment variable.
|
|
||||||
|
|
||||||
|
|
||||||
## Supported Scores for each query type
|
|
||||||
You can specify the type of scores you want the reranker to return. The following are the supported scores for each query type:
|
|
||||||
|
|
||||||
### Hybrid Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ❌ Not Supported | Returns have vector(`_distance`) and FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### Vector Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have vector(`_distance`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
|
|
||||||
### FTS Search
|
|
||||||
|`return_score`| Status | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `relevance` | ✅ Supported | Returns only have the `_relevance_score` column |
|
|
||||||
| `all` | ✅ Supported | Returns have FTS(`score`) along with Hybrid Search score(`_relevance_score`) |
|
|
||||||
@@ -66,7 +66,6 @@ Currently, Lance supports a growing list of SQL expressions.
|
|||||||
- `LIKE`, `NOT LIKE`
|
- `LIKE`, `NOT LIKE`
|
||||||
- `CAST`
|
- `CAST`
|
||||||
- `regexp_match(column, pattern)`
|
- `regexp_match(column, pattern)`
|
||||||
- [DataFusion Functions](https://arrow.apache.org/datafusion/user-guide/sql/scalar_functions.html)
|
|
||||||
|
|
||||||
For example, the following filter string is acceptable:
|
For example, the following filter string is acceptable:
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import glob
|
import glob
|
||||||
from typing import Iterator, List
|
from typing import Iterator
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
glob_string = "../src/**/*.md"
|
glob_string = "../src/**/*.md"
|
||||||
@@ -8,7 +8,6 @@ excluded_globs = [
|
|||||||
"../src/embedding.md",
|
"../src/embedding.md",
|
||||||
"../src/examples/*.md",
|
"../src/examples/*.md",
|
||||||
"../src/integrations/voxel51.md",
|
"../src/integrations/voxel51.md",
|
||||||
"../src/integrations/langchain.md",
|
|
||||||
"../src/guides/tables.md",
|
"../src/guides/tables.md",
|
||||||
"../src/python/duckdb.md",
|
"../src/python/duckdb.md",
|
||||||
"../src/embeddings/*.md",
|
"../src/embeddings/*.md",
|
||||||
@@ -16,7 +15,6 @@ excluded_globs = [
|
|||||||
"../src/ann_indexes.md",
|
"../src/ann_indexes.md",
|
||||||
"../src/basic.md",
|
"../src/basic.md",
|
||||||
"../src/hybrid_search/hybrid_search.md",
|
"../src/hybrid_search/hybrid_search.md",
|
||||||
"../src/reranking/*.md",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
python_prefix = "py"
|
python_prefix = "py"
|
||||||
@@ -52,24 +50,11 @@ def yield_lines(lines: Iterator[str], prefix: str, suffix: str):
|
|||||||
yield line[strip_length:]
|
yield line[strip_length:]
|
||||||
|
|
||||||
|
|
||||||
def wrap_async(lines: List[str]) -> List[str]:
|
|
||||||
# Indent all the lines
|
|
||||||
lines = [" " + line for line in lines]
|
|
||||||
# Put all lines in `async def main():`
|
|
||||||
lines = ["async def main():\n"] + lines
|
|
||||||
# Put `import asyncio\n asyncio.run(main())` at the end
|
|
||||||
lines = lines + ["\n", "import asyncio\n", "asyncio.run(main())\n"]
|
|
||||||
return lines
|
|
||||||
|
|
||||||
|
|
||||||
for file in filter(lambda file: file not in excluded_files, files):
|
for file in filter(lambda file: file not in excluded_files, files):
|
||||||
with open(file, "r") as f:
|
with open(file, "r") as f:
|
||||||
lines = list(yield_lines(iter(f), "```", "```"))
|
lines = list(yield_lines(iter(f), "```", "```"))
|
||||||
|
|
||||||
if len(lines) > 0:
|
if len(lines) > 0:
|
||||||
if any("await" in line for line in lines):
|
|
||||||
lines = wrap_async(lines)
|
|
||||||
|
|
||||||
print(lines)
|
print(lines)
|
||||||
out_path = (
|
out_path = (
|
||||||
Path(python_folder)
|
Path(python_folder)
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "lancedb-jni"
|
|
||||||
description = "JNI bindings for LanceDB"
|
|
||||||
# TODO modify lancedb/Cargo.toml for version and dependencies
|
|
||||||
version = "0.4.18"
|
|
||||||
edition.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
readme.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
keywords.workspace = true
|
|
||||||
categories.workspace = true
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
lancedb = { path = "../../../rust/lancedb" }
|
|
||||||
lance = { workspace = true }
|
|
||||||
arrow = { workspace = true, features = ["ffi"] }
|
|
||||||
arrow-schema.workspace = true
|
|
||||||
tokio = "1.23"
|
|
||||||
jni = "0.21.1"
|
|
||||||
snafu.workspace = true
|
|
||||||
lazy_static.workspace = true
|
|
||||||
serde = { version = "^1" }
|
|
||||||
serde_json = { version = "1" }
|
|
||||||
@@ -1,130 +0,0 @@
|
|||||||
use crate::ffi::JNIEnvExt;
|
|
||||||
use crate::traits::IntoJava;
|
|
||||||
use crate::{Error, RT};
|
|
||||||
use jni::objects::{JObject, JString, JValue};
|
|
||||||
use jni::JNIEnv;
|
|
||||||
pub const NATIVE_CONNECTION: &str = "nativeConnectionHandle";
|
|
||||||
use crate::Result;
|
|
||||||
use lancedb::connection::{connect, Connection};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct BlockingConnection {
|
|
||||||
pub(crate) inner: Connection,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BlockingConnection {
|
|
||||||
pub fn create(dataset_uri: &str) -> Result<Self> {
|
|
||||||
let inner = RT.block_on(connect(dataset_uri).execute())?;
|
|
||||||
Ok(Self { inner })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn table_names(
|
|
||||||
&self,
|
|
||||||
start_after: Option<String>,
|
|
||||||
limit: Option<i32>,
|
|
||||||
) -> Result<Vec<String>> {
|
|
||||||
let mut op = self.inner.table_names();
|
|
||||||
if let Some(start_after) = start_after {
|
|
||||||
op = op.start_after(start_after);
|
|
||||||
}
|
|
||||||
if let Some(limit) = limit {
|
|
||||||
op = op.limit(limit as u32);
|
|
||||||
}
|
|
||||||
Ok(RT.block_on(op.execute())?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoJava for BlockingConnection {
|
|
||||||
fn into_java<'a>(self, env: &mut JNIEnv<'a>) -> JObject<'a> {
|
|
||||||
attach_native_connection(env, self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn attach_native_connection<'local>(
|
|
||||||
env: &mut JNIEnv<'local>,
|
|
||||||
connection: BlockingConnection,
|
|
||||||
) -> JObject<'local> {
|
|
||||||
let j_connection = create_java_connection_object(env);
|
|
||||||
// This block sets a native Rust object (Connection) as a field in the Java object (j_Connection).
|
|
||||||
// Caution: This creates a potential for memory leaks. The Rust object (Connection) is not
|
|
||||||
// automatically garbage-collected by Java, and its memory will not be freed unless
|
|
||||||
// explicitly handled.
|
|
||||||
//
|
|
||||||
// To prevent memory leaks, ensure the following:
|
|
||||||
// 1. The Java object (`j_Connection`) should implement the `java.io.Closeable` interface.
|
|
||||||
// 2. Users of this Java object should be instructed to always use it within a try-with-resources
|
|
||||||
// statement (or manually call the `close()` method) to ensure that `self.close()` is invoked.
|
|
||||||
match unsafe { env.set_rust_field(&j_connection, NATIVE_CONNECTION, connection) } {
|
|
||||||
Ok(_) => j_connection,
|
|
||||||
Err(err) => {
|
|
||||||
env.throw_new(
|
|
||||||
"java/lang/RuntimeException",
|
|
||||||
format!("Failed to set native handle for Connection: {}", err),
|
|
||||||
)
|
|
||||||
.expect("Error throwing exception");
|
|
||||||
JObject::null()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_java_connection_object<'a>(env: &mut JNIEnv<'a>) -> JObject<'a> {
|
|
||||||
env.new_object("com/lancedb/lancedb/Connection", "()V", &[])
|
|
||||||
.expect("Failed to create Java Lance Connection instance")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "system" fn Java_com_lancedb_lancedb_Connection_releaseNativeConnection(
|
|
||||||
mut env: JNIEnv,
|
|
||||||
j_connection: JObject,
|
|
||||||
) {
|
|
||||||
let _: BlockingConnection = unsafe {
|
|
||||||
env.take_rust_field(j_connection, NATIVE_CONNECTION)
|
|
||||||
.expect("Failed to take native Connection handle")
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "system" fn Java_com_lancedb_lancedb_Connection_connect<'local>(
|
|
||||||
mut env: JNIEnv<'local>,
|
|
||||||
_obj: JObject,
|
|
||||||
dataset_uri_object: JString,
|
|
||||||
) -> JObject<'local> {
|
|
||||||
let dataset_uri: String = ok_or_throw!(env, env.get_string(&dataset_uri_object)).into();
|
|
||||||
let blocking_connection = ok_or_throw!(env, BlockingConnection::create(&dataset_uri));
|
|
||||||
blocking_connection.into_java(&mut env)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "system" fn Java_com_lancedb_lancedb_Connection_tableNames<'local>(
|
|
||||||
mut env: JNIEnv<'local>,
|
|
||||||
j_connection: JObject,
|
|
||||||
start_after_obj: JObject, // Optional<String>
|
|
||||||
limit_obj: JObject, // Optional<Integer>
|
|
||||||
) -> JObject<'local> {
|
|
||||||
ok_or_throw!(
|
|
||||||
env,
|
|
||||||
inner_table_names(&mut env, j_connection, start_after_obj, limit_obj)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn inner_table_names<'local>(
|
|
||||||
env: &mut JNIEnv<'local>,
|
|
||||||
j_connection: JObject,
|
|
||||||
start_after_obj: JObject, // Optional<String>
|
|
||||||
limit_obj: JObject, // Optional<Integer>
|
|
||||||
) -> Result<JObject<'local>> {
|
|
||||||
let start_after = env.get_string_opt(&start_after_obj)?;
|
|
||||||
let limit = env.get_int_opt(&limit_obj)?;
|
|
||||||
let conn =
|
|
||||||
unsafe { env.get_rust_field::<_, _, BlockingConnection>(j_connection, NATIVE_CONNECTION) }?;
|
|
||||||
let table_names = conn.table_names(start_after, limit)?;
|
|
||||||
drop(conn);
|
|
||||||
let j_names = env.new_object("java/util/ArrayList", "()V", &[])?;
|
|
||||||
for item in table_names {
|
|
||||||
let jstr_item = env.new_string(item)?;
|
|
||||||
let item_jobj = JObject::from(jstr_item);
|
|
||||||
let item_gen = JValue::Object(&item_jobj);
|
|
||||||
env.call_method(&j_names, "add", "(Ljava/lang/Object;)Z", &[item_gen])?;
|
|
||||||
}
|
|
||||||
Ok(j_names)
|
|
||||||
}
|
|
||||||
@@ -1,225 +0,0 @@
|
|||||||
// Copyright 2024 Lance Developers.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
use std::str::Utf8Error;
|
|
||||||
|
|
||||||
use arrow_schema::ArrowError;
|
|
||||||
use jni::errors::Error as JniError;
|
|
||||||
use serde_json::Error as JsonError;
|
|
||||||
use snafu::{Location, Snafu};
|
|
||||||
|
|
||||||
type BoxedError = Box<dyn std::error::Error + Send + Sync + 'static>;
|
|
||||||
|
|
||||||
/// Java Exception types
|
|
||||||
pub enum JavaException {
|
|
||||||
IllegalArgumentException,
|
|
||||||
IOException,
|
|
||||||
RuntimeException,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl JavaException {
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
Self::IllegalArgumentException => "java/lang/IllegalArgumentException",
|
|
||||||
Self::IOException => "java/io/IOException",
|
|
||||||
Self::RuntimeException => "java/lang/RuntimeException",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// TODO(lu) change to lancedb-jni
|
|
||||||
#[derive(Debug, Snafu)]
|
|
||||||
#[snafu(visibility(pub))]
|
|
||||||
pub enum Error {
|
|
||||||
#[snafu(display("JNI error: {message}, {location}"))]
|
|
||||||
Jni { message: String, location: Location },
|
|
||||||
#[snafu(display("Invalid argument: {message}, {location}"))]
|
|
||||||
InvalidArgument { message: String, location: Location },
|
|
||||||
#[snafu(display("IO error: {source}, {location}"))]
|
|
||||||
IO {
|
|
||||||
source: BoxedError,
|
|
||||||
location: Location,
|
|
||||||
},
|
|
||||||
#[snafu(display("Arrow error: {message}, {location}"))]
|
|
||||||
Arrow { message: String, location: Location },
|
|
||||||
#[snafu(display("Index error: {message}, {location}"))]
|
|
||||||
Index { message: String, location: Location },
|
|
||||||
#[snafu(display("JSON error: {message}, {location}"))]
|
|
||||||
JSON { message: String, location: Location },
|
|
||||||
#[snafu(display("Dataset at path {path} was not found, {location}"))]
|
|
||||||
DatasetNotFound { path: String, location: Location },
|
|
||||||
#[snafu(display("Dataset already exists: {uri}, {location}"))]
|
|
||||||
DatasetAlreadyExists { uri: String, location: Location },
|
|
||||||
#[snafu(display("Table '{name}' already exists"))]
|
|
||||||
TableAlreadyExists { name: String },
|
|
||||||
#[snafu(display("Table '{name}' was not found"))]
|
|
||||||
TableNotFound { name: String },
|
|
||||||
#[snafu(display("Invalid table name '{name}': {reason}"))]
|
|
||||||
InvalidTableName { name: String, reason: String },
|
|
||||||
#[snafu(display("Embedding function '{name}' was not found: {reason}, {location}"))]
|
|
||||||
EmbeddingFunctionNotFound {
|
|
||||||
name: String,
|
|
||||||
reason: String,
|
|
||||||
location: Location,
|
|
||||||
},
|
|
||||||
#[snafu(display("Other Lance error: {message}, {location}"))]
|
|
||||||
OtherLance { message: String, location: Location },
|
|
||||||
#[snafu(display("Other LanceDB error: {message}, {location}"))]
|
|
||||||
OtherLanceDB { message: String, location: Location },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Error {
|
|
||||||
/// Throw as Java Exception
|
|
||||||
pub fn throw(&self, env: &mut jni::JNIEnv) {
|
|
||||||
match self {
|
|
||||||
Self::InvalidArgument { .. }
|
|
||||||
| Self::DatasetNotFound { .. }
|
|
||||||
| Self::DatasetAlreadyExists { .. }
|
|
||||||
| Self::TableAlreadyExists { .. }
|
|
||||||
| Self::TableNotFound { .. }
|
|
||||||
| Self::InvalidTableName { .. }
|
|
||||||
| Self::EmbeddingFunctionNotFound { .. } => {
|
|
||||||
self.throw_as(env, JavaException::IllegalArgumentException)
|
|
||||||
}
|
|
||||||
Self::IO { .. } | Self::Index { .. } => self.throw_as(env, JavaException::IOException),
|
|
||||||
Self::Arrow { .. }
|
|
||||||
| Self::JSON { .. }
|
|
||||||
| Self::OtherLance { .. }
|
|
||||||
| Self::OtherLanceDB { .. }
|
|
||||||
| Self::Jni { .. } => self.throw_as(env, JavaException::RuntimeException),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Throw as an concrete Java Exception
|
|
||||||
pub fn throw_as(&self, env: &mut jni::JNIEnv, exception: JavaException) {
|
|
||||||
let message = &format!(
|
|
||||||
"Error when throwing Java exception: {}:{}",
|
|
||||||
exception.as_str(),
|
|
||||||
self
|
|
||||||
);
|
|
||||||
env.throw_new(exception.as_str(), self.to_string())
|
|
||||||
.expect(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
|
||||||
|
|
||||||
trait ToSnafuLocation {
|
|
||||||
fn to_snafu_location(&'static self) -> snafu::Location;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToSnafuLocation for std::panic::Location<'static> {
|
|
||||||
fn to_snafu_location(&'static self) -> snafu::Location {
|
|
||||||
snafu::Location::new(self.file(), self.line(), self.column())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<JniError> for Error {
|
|
||||||
#[track_caller]
|
|
||||||
fn from(source: JniError) -> Self {
|
|
||||||
Self::Jni {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Utf8Error> for Error {
|
|
||||||
#[track_caller]
|
|
||||||
fn from(source: Utf8Error) -> Self {
|
|
||||||
Self::InvalidArgument {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ArrowError> for Error {
|
|
||||||
#[track_caller]
|
|
||||||
fn from(source: ArrowError) -> Self {
|
|
||||||
Self::Arrow {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<JsonError> for Error {
|
|
||||||
#[track_caller]
|
|
||||||
fn from(source: JsonError) -> Self {
|
|
||||||
Self::JSON {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<lance::Error> for Error {
|
|
||||||
#[track_caller]
|
|
||||||
fn from(source: lance::Error) -> Self {
|
|
||||||
match source {
|
|
||||||
lance::Error::DatasetNotFound {
|
|
||||||
path,
|
|
||||||
source: _,
|
|
||||||
location,
|
|
||||||
} => Self::DatasetNotFound { path, location },
|
|
||||||
lance::Error::DatasetAlreadyExists { uri, location } => {
|
|
||||||
Self::DatasetAlreadyExists { uri, location }
|
|
||||||
}
|
|
||||||
lance::Error::IO { source, location } => Self::IO { source, location },
|
|
||||||
lance::Error::Arrow { message, location } => Self::Arrow { message, location },
|
|
||||||
lance::Error::Index { message, location } => Self::Index { message, location },
|
|
||||||
lance::Error::InvalidInput { source, location } => Self::InvalidArgument {
|
|
||||||
message: source.to_string(),
|
|
||||||
location,
|
|
||||||
},
|
|
||||||
_ => Self::OtherLance {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<lancedb::Error> for Error {
|
|
||||||
#[track_caller]
|
|
||||||
fn from(source: lancedb::Error) -> Self {
|
|
||||||
match source {
|
|
||||||
lancedb::Error::InvalidTableName { name, reason } => {
|
|
||||||
Self::InvalidTableName { name, reason }
|
|
||||||
}
|
|
||||||
lancedb::Error::InvalidInput { message } => Self::InvalidArgument {
|
|
||||||
message,
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
},
|
|
||||||
lancedb::Error::TableNotFound { name } => Self::TableNotFound { name },
|
|
||||||
lancedb::Error::TableAlreadyExists { name } => Self::TableAlreadyExists { name },
|
|
||||||
lancedb::Error::EmbeddingFunctionNotFound { name, reason } => {
|
|
||||||
Self::EmbeddingFunctionNotFound {
|
|
||||||
name,
|
|
||||||
reason,
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lancedb::Error::Arrow { source } => Self::Arrow {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
},
|
|
||||||
lancedb::Error::Lance { source } => Self::from(source),
|
|
||||||
_ => Self::OtherLanceDB {
|
|
||||||
message: source.to_string(),
|
|
||||||
location: std::panic::Location::caller().to_snafu_location(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,204 +0,0 @@
|
|||||||
// Copyright 2024 Lance Developers.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
use core::slice;
|
|
||||||
|
|
||||||
use jni::objects::{JByteBuffer, JObjectArray, JString};
|
|
||||||
use jni::sys::jobjectArray;
|
|
||||||
use jni::{objects::JObject, JNIEnv};
|
|
||||||
|
|
||||||
use crate::error::{Error, Result};
|
|
||||||
|
|
||||||
/// TODO(lu) import from lance-jni without duplicate
|
|
||||||
/// Extend JNIEnv with helper functions.
|
|
||||||
pub trait JNIEnvExt {
|
|
||||||
/// Get integers from Java List<Integer> object.
|
|
||||||
fn get_integers(&mut self, obj: &JObject) -> Result<Vec<i32>>;
|
|
||||||
|
|
||||||
/// Get strings from Java List<String> object.
|
|
||||||
fn get_strings(&mut self, obj: &JObject) -> Result<Vec<String>>;
|
|
||||||
|
|
||||||
/// Get strings from Java String[] object.
|
|
||||||
/// Note that get Option<Vec<String>> from Java Optional<String[]> just doesn't work.
|
|
||||||
#[allow(unused)]
|
|
||||||
fn get_strings_array(&mut self, obj: jobjectArray) -> Result<Vec<String>>;
|
|
||||||
|
|
||||||
/// Get Option<String> from Java Optional<String>.
|
|
||||||
fn get_string_opt(&mut self, obj: &JObject) -> Result<Option<String>>;
|
|
||||||
|
|
||||||
/// Get Option<Vec<String>> from Java Optional<List<String>>.
|
|
||||||
#[allow(unused)]
|
|
||||||
fn get_strings_opt(&mut self, obj: &JObject) -> Result<Option<Vec<String>>>;
|
|
||||||
|
|
||||||
/// Get Option<i32> from Java Optional<Integer>.
|
|
||||||
fn get_int_opt(&mut self, obj: &JObject) -> Result<Option<i32>>;
|
|
||||||
|
|
||||||
/// Get Option<Vec<i32>> from Java Optional<List<Integer>>.
|
|
||||||
fn get_ints_opt(&mut self, obj: &JObject) -> Result<Option<Vec<i32>>>;
|
|
||||||
|
|
||||||
/// Get Option<i64> from Java Optional<Long>.
|
|
||||||
#[allow(unused)]
|
|
||||||
fn get_long_opt(&mut self, obj: &JObject) -> Result<Option<i64>>;
|
|
||||||
|
|
||||||
/// Get Option<u64> from Java Optional<Long>.
|
|
||||||
#[allow(unused)]
|
|
||||||
fn get_u64_opt(&mut self, obj: &JObject) -> Result<Option<u64>>;
|
|
||||||
|
|
||||||
/// Get Option<&[u8]> from Java Optional<ByteBuffer>.
|
|
||||||
#[allow(unused)]
|
|
||||||
fn get_bytes_opt(&mut self, obj: &JObject) -> Result<Option<&[u8]>>;
|
|
||||||
|
|
||||||
fn get_optional<T, F>(&mut self, obj: &JObject, f: F) -> Result<Option<T>>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut JNIEnv, &JObject) -> Result<T>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl JNIEnvExt for JNIEnv<'_> {
|
|
||||||
fn get_integers(&mut self, obj: &JObject) -> Result<Vec<i32>> {
|
|
||||||
let list = self.get_list(obj)?;
|
|
||||||
let mut iter = list.iter(self)?;
|
|
||||||
let mut results = Vec::with_capacity(list.size(self)? as usize);
|
|
||||||
while let Some(elem) = iter.next(self)? {
|
|
||||||
let int_obj = self.call_method(elem, "intValue", "()I", &[])?;
|
|
||||||
let int_value = int_obj.i()?;
|
|
||||||
results.push(int_value);
|
|
||||||
}
|
|
||||||
Ok(results)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_strings(&mut self, obj: &JObject) -> Result<Vec<String>> {
|
|
||||||
let list = self.get_list(obj)?;
|
|
||||||
let mut iter = list.iter(self)?;
|
|
||||||
let mut results = Vec::with_capacity(list.size(self)? as usize);
|
|
||||||
while let Some(elem) = iter.next(self)? {
|
|
||||||
let jstr = JString::from(elem);
|
|
||||||
let val = self.get_string(&jstr)?;
|
|
||||||
results.push(val.to_str()?.to_string())
|
|
||||||
}
|
|
||||||
Ok(results)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_strings_array(&mut self, obj: jobjectArray) -> Result<Vec<String>> {
|
|
||||||
let jobject_array = unsafe { JObjectArray::from_raw(obj) };
|
|
||||||
let array_len = self.get_array_length(&jobject_array)?;
|
|
||||||
let mut res: Vec<String> = Vec::new();
|
|
||||||
for i in 0..array_len {
|
|
||||||
let item: JString = self.get_object_array_element(&jobject_array, i)?.into();
|
|
||||||
res.push(self.get_string(&item)?.into());
|
|
||||||
}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_string_opt(&mut self, obj: &JObject) -> Result<Option<String>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_string_obj = java_obj_gen.l()?;
|
|
||||||
let jstr = JString::from(java_string_obj);
|
|
||||||
let val = env.get_string(&jstr)?;
|
|
||||||
Ok(val.to_str()?.to_string())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_strings_opt(&mut self, obj: &JObject) -> Result<Option<Vec<String>>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_list_obj = java_obj_gen.l()?;
|
|
||||||
env.get_strings(&java_list_obj)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_int_opt(&mut self, obj: &JObject) -> Result<Option<i32>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_int_obj = java_obj_gen.l()?;
|
|
||||||
let int_obj = env.call_method(java_int_obj, "intValue", "()I", &[])?;
|
|
||||||
let int_value = int_obj.i()?;
|
|
||||||
Ok(int_value)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_ints_opt(&mut self, obj: &JObject) -> Result<Option<Vec<i32>>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_list_obj = java_obj_gen.l()?;
|
|
||||||
env.get_integers(&java_list_obj)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_long_opt(&mut self, obj: &JObject) -> Result<Option<i64>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_long_obj = java_obj_gen.l()?;
|
|
||||||
let long_obj = env.call_method(java_long_obj, "longValue", "()J", &[])?;
|
|
||||||
let long_value = long_obj.j()?;
|
|
||||||
Ok(long_value)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_u64_opt(&mut self, obj: &JObject) -> Result<Option<u64>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_long_obj = java_obj_gen.l()?;
|
|
||||||
let long_obj = env.call_method(java_long_obj, "longValue", "()J", &[])?;
|
|
||||||
let long_value = long_obj.j()?;
|
|
||||||
Ok(long_value as u64)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_bytes_opt(&mut self, obj: &JObject) -> Result<Option<&[u8]>> {
|
|
||||||
self.get_optional(obj, |env, inner_obj| {
|
|
||||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
|
||||||
let java_byte_buffer_obj = java_obj_gen.l()?;
|
|
||||||
let j_byte_buffer = JByteBuffer::from(java_byte_buffer_obj);
|
|
||||||
let raw_data = env.get_direct_buffer_address(&j_byte_buffer)?;
|
|
||||||
let capacity = env.get_direct_buffer_capacity(&j_byte_buffer)?;
|
|
||||||
let data = unsafe { slice::from_raw_parts(raw_data, capacity) };
|
|
||||||
Ok(data)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_optional<T, F>(&mut self, obj: &JObject, f: F) -> Result<Option<T>>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut JNIEnv, &JObject) -> Result<T>,
|
|
||||||
{
|
|
||||||
if obj.is_null() {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
let is_empty = self.call_method(obj, "isEmpty", "()Z", &[])?;
|
|
||||||
if is_empty.z()? {
|
|
||||||
// TODO(lu): put get java object into here cuz can only get java Object
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
f(self, obj).map(Some)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "system" fn Java_com_lancedb_lance_test_JniTestHelper_parseInts(
|
|
||||||
mut env: JNIEnv,
|
|
||||||
_obj: JObject,
|
|
||||||
list_obj: JObject, // List<Integer>
|
|
||||||
) {
|
|
||||||
ok_or_throw_without_return!(env, env.get_integers(&list_obj));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "system" fn Java_com_lancedb_lance_test_JniTestHelper_parseIntsOpt(
|
|
||||||
mut env: JNIEnv,
|
|
||||||
_obj: JObject,
|
|
||||||
list_obj: JObject, // Optional<List<Integer>>
|
|
||||||
) {
|
|
||||||
ok_or_throw_without_return!(env, env.get_ints_opt(&list_obj));
|
|
||||||
}
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
// Copyright 2024 Lance Developers.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
// TODO import from lance-jni without duplicate
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! ok_or_throw {
|
|
||||||
($env:expr, $result:expr) => {
|
|
||||||
match $result {
|
|
||||||
Ok(value) => value,
|
|
||||||
Err(err) => {
|
|
||||||
Error::from(err).throw(&mut $env);
|
|
||||||
return JObject::null();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! ok_or_throw_without_return {
|
|
||||||
($env:expr, $result:expr) => {
|
|
||||||
match $result {
|
|
||||||
Ok(value) => value,
|
|
||||||
Err(err) => {
|
|
||||||
Error::from(err).throw(&mut $env);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! ok_or_throw_with_return {
|
|
||||||
($env:expr, $result:expr, $ret:expr) => {
|
|
||||||
match $result {
|
|
||||||
Ok(value) => value,
|
|
||||||
Err(err) => {
|
|
||||||
Error::from(err).throw(&mut $env);
|
|
||||||
return $ret;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
mod connection;
|
|
||||||
pub mod error;
|
|
||||||
mod ffi;
|
|
||||||
mod traits;
|
|
||||||
|
|
||||||
pub use error::{Error, Result};
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref RT: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread()
|
|
||||||
.enable_all()
|
|
||||||
.build()
|
|
||||||
.expect("Failed to create tokio runtime");
|
|
||||||
}
|
|
||||||
@@ -1,122 +0,0 @@
|
|||||||
// Copyright 2024 Lance Developers.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
use jni::objects::{JMap, JObject, JString, JValue};
|
|
||||||
use jni::JNIEnv;
|
|
||||||
|
|
||||||
use crate::Result;
|
|
||||||
|
|
||||||
pub trait FromJObject<T> {
|
|
||||||
fn extract(&self) -> Result<T>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert a Rust type into a Java Object.
|
|
||||||
pub trait IntoJava {
|
|
||||||
fn into_java<'a>(self, env: &mut JNIEnv<'a>) -> JObject<'a>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromJObject<i32> for JObject<'_> {
|
|
||||||
fn extract(&self) -> Result<i32> {
|
|
||||||
Ok(JValue::from(self).i()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromJObject<i64> for JObject<'_> {
|
|
||||||
fn extract(&self) -> Result<i64> {
|
|
||||||
Ok(JValue::from(self).j()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromJObject<f32> for JObject<'_> {
|
|
||||||
fn extract(&self) -> Result<f32> {
|
|
||||||
Ok(JValue::from(self).f()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromJObject<f64> for JObject<'_> {
|
|
||||||
fn extract(&self) -> Result<f64> {
|
|
||||||
Ok(JValue::from(self).d()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait FromJString {
|
|
||||||
fn extract(&self, env: &mut JNIEnv) -> Result<String>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromJString for JString<'_> {
|
|
||||||
fn extract(&self, env: &mut JNIEnv) -> Result<String> {
|
|
||||||
Ok(env.get_string(self)?.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait JMapExt {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn get_string(&self, env: &mut JNIEnv, key: &str) -> Result<Option<String>>;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn get_i32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i32>>;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn get_i64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i64>>;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn get_f32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f32>>;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn get_f64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f64>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_map_value<T>(env: &mut JNIEnv, map: &JMap, key: &str) -> Result<Option<T>>
|
|
||||||
where
|
|
||||||
for<'a> JObject<'a>: FromJObject<T>,
|
|
||||||
{
|
|
||||||
let key_obj: JObject = env.new_string(key)?.into();
|
|
||||||
if let Some(value) = map.get(env, &key_obj)? {
|
|
||||||
if value.is_null() {
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
Ok(Some(value.extract()?))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl JMapExt for JMap<'_, '_, '_> {
|
|
||||||
fn get_string(&self, env: &mut JNIEnv, key: &str) -> Result<Option<String>> {
|
|
||||||
let key_obj: JObject = env.new_string(key)?.into();
|
|
||||||
if let Some(value) = self.get(env, &key_obj)? {
|
|
||||||
let value_str: JString = value.into();
|
|
||||||
Ok(Some(value_str.extract(env)?))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_i32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i32>> {
|
|
||||||
get_map_value(env, self, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_i64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i64>> {
|
|
||||||
get_map_value(env, self, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_f32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f32>> {
|
|
||||||
get_map_value(env, self, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_f64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f64>> {
|
|
||||||
get_map_value(env, self, key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<parent>
|
|
||||||
<groupId>com.lancedb</groupId>
|
|
||||||
<artifactId>lancedb-parent</artifactId>
|
|
||||||
<version>0.1-SNAPSHOT</version>
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
|
||||||
</parent>
|
|
||||||
|
|
||||||
<artifactId>lancedb-core</artifactId>
|
|
||||||
<name>LanceDB Core</name>
|
|
||||||
<packaging>jar</packaging>
|
|
||||||
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-vector</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-memory-netty</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-c-data</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-dataset</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.json</groupId>
|
|
||||||
<artifactId>json</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.questdb</groupId>
|
|
||||||
<artifactId>jar-jni</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.junit.jupiter</groupId>
|
|
||||||
<artifactId>junit-jupiter</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
|
|
||||||
<profiles>
|
|
||||||
<profile>
|
|
||||||
<id>build-jni</id>
|
|
||||||
<activation>
|
|
||||||
<activeByDefault>true</activeByDefault>
|
|
||||||
</activation>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.questdb</groupId>
|
|
||||||
<artifactId>rust-maven-plugin</artifactId>
|
|
||||||
<version>1.1.1</version>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>lancedb-jni</id>
|
|
||||||
<goals>
|
|
||||||
<goal>build</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<path>lancedb-jni</path>
|
|
||||||
<!--<release>true</release>-->
|
|
||||||
<!-- Copy native libraries to target/classes for runtime access -->
|
|
||||||
<copyTo>${project.build.directory}/classes/nativelib</copyTo>
|
|
||||||
<copyWithPlatformDir>true</copyWithPlatformDir>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
<execution>
|
|
||||||
<id>lancedb-jni-test</id>
|
|
||||||
<goals>
|
|
||||||
<goal>test</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<path>lancedb-jni</path>
|
|
||||||
<release>false</release>
|
|
||||||
<verbosity>-v</verbosity>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
</profiles>
|
|
||||||
</project>
|
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package com.lancedb.lancedb;
|
|
||||||
|
|
||||||
import io.questdb.jar.jni.JarJniLoader;
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Represents LanceDB database.
|
|
||||||
*/
|
|
||||||
public class Connection implements Closeable {
|
|
||||||
static {
|
|
||||||
JarJniLoader.loadLib(Connection.class, "/nativelib", "lancedb_jni");
|
|
||||||
}
|
|
||||||
|
|
||||||
private long nativeConnectionHandle;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Connect to a LanceDB instance.
|
|
||||||
*/
|
|
||||||
public static native Connection connect(String uri);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the names of all tables in the database. The names are sorted in
|
|
||||||
* ascending order.
|
|
||||||
*
|
|
||||||
* @return the table names
|
|
||||||
*/
|
|
||||||
public List<String> tableNames() {
|
|
||||||
return tableNames(Optional.empty(), Optional.empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the names of filtered tables in the database. The names are sorted in
|
|
||||||
* ascending order.
|
|
||||||
*
|
|
||||||
* @param limit The number of results to return.
|
|
||||||
* @return the table names
|
|
||||||
*/
|
|
||||||
public List<String> tableNames(int limit) {
|
|
||||||
return tableNames(Optional.empty(), Optional.of(limit));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the names of filtered tables in the database. The names are sorted in
|
|
||||||
* ascending order.
|
|
||||||
*
|
|
||||||
* @param startAfter If present, only return names that come lexicographically after the supplied
|
|
||||||
* value. This can be combined with limit to implement pagination
|
|
||||||
* by setting this to the last table name from the previous page.
|
|
||||||
* @return the table names
|
|
||||||
*/
|
|
||||||
public List<String> tableNames(String startAfter) {
|
|
||||||
return tableNames(Optional.of(startAfter), Optional.empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the names of filtered tables in the database. The names are sorted in
|
|
||||||
* ascending order.
|
|
||||||
*
|
|
||||||
* @param startAfter If present, only return names that come lexicographically after the supplied
|
|
||||||
* value. This can be combined with limit to implement pagination
|
|
||||||
* by setting this to the last table name from the previous page.
|
|
||||||
* @param limit The number of results to return.
|
|
||||||
* @return the table names
|
|
||||||
*/
|
|
||||||
public List<String> tableNames(String startAfter, int limit) {
|
|
||||||
return tableNames(Optional.of(startAfter), Optional.of(limit));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the names of filtered tables in the database. The names are sorted in
|
|
||||||
* ascending order.
|
|
||||||
*
|
|
||||||
* @param startAfter If present, only return names that come lexicographically after the supplied
|
|
||||||
* value. This can be combined with limit to implement pagination
|
|
||||||
* by setting this to the last table name from the previous page.
|
|
||||||
* @param limit The number of results to return.
|
|
||||||
* @return the table names
|
|
||||||
*/
|
|
||||||
public native List<String> tableNames(
|
|
||||||
Optional<String> startAfter, Optional<Integer> limit);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Closes this connection and releases any system resources associated with it. If
|
|
||||||
* the connection is
|
|
||||||
* already closed, then invoking this method has no effect.
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
if (nativeConnectionHandle != 0) {
|
|
||||||
releaseNativeConnection(nativeConnectionHandle);
|
|
||||||
nativeConnectionHandle = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Native method to release the Lance connection resources associated with the
|
|
||||||
* given handle.
|
|
||||||
*
|
|
||||||
* @param handle The native handle to the connection resource.
|
|
||||||
*/
|
|
||||||
private native void releaseNativeConnection(long handle);
|
|
||||||
|
|
||||||
private Connection() {}
|
|
||||||
}
|
|
||||||
@@ -1,135 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package com.lancedb.lancedb;
|
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
|
||||||
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.util.List;
|
|
||||||
import java.net.URL;
|
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.junit.jupiter.api.io.TempDir;
|
|
||||||
|
|
||||||
public class ConnectionTest {
|
|
||||||
private static final String[] TABLE_NAMES = {
|
|
||||||
"dataset_version",
|
|
||||||
"new_empty_dataset",
|
|
||||||
"test",
|
|
||||||
"write_stream"
|
|
||||||
};
|
|
||||||
|
|
||||||
@TempDir
|
|
||||||
static Path tempDir; // Temporary directory for the tests
|
|
||||||
private static URL lanceDbURL;
|
|
||||||
|
|
||||||
@BeforeAll
|
|
||||||
static void setUp() {
|
|
||||||
ClassLoader classLoader = ConnectionTest.class.getClassLoader();
|
|
||||||
lanceDbURL = classLoader.getResource("example_db");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void emptyDB() {
|
|
||||||
String databaseUri = tempDir.resolve("emptyDB").toString();
|
|
||||||
try (Connection conn = Connection.connect(databaseUri)) {
|
|
||||||
List<String> tableNames = conn.tableNames();
|
|
||||||
assertTrue(tableNames.isEmpty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void tableNames() {
|
|
||||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
|
||||||
List<String> tableNames = conn.tableNames();
|
|
||||||
assertEquals(4, tableNames.size());
|
|
||||||
for (int i = 0; i < TABLE_NAMES.length; i++) {
|
|
||||||
assertEquals(TABLE_NAMES[i], tableNames.get(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void tableNamesStartAfter() {
|
|
||||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
|
||||||
assertTableNamesStartAfter(conn, TABLE_NAMES[0], 3, TABLE_NAMES[1], TABLE_NAMES[2], TABLE_NAMES[3]);
|
|
||||||
assertTableNamesStartAfter(conn, TABLE_NAMES[1], 2, TABLE_NAMES[2], TABLE_NAMES[3]);
|
|
||||||
assertTableNamesStartAfter(conn, TABLE_NAMES[2], 1, TABLE_NAMES[3]);
|
|
||||||
assertTableNamesStartAfter(conn, TABLE_NAMES[3], 0);
|
|
||||||
assertTableNamesStartAfter(conn, "a_dataset", 4, TABLE_NAMES[0], TABLE_NAMES[1], TABLE_NAMES[2], TABLE_NAMES[3]);
|
|
||||||
assertTableNamesStartAfter(conn, "o_dataset", 2, TABLE_NAMES[2], TABLE_NAMES[3]);
|
|
||||||
assertTableNamesStartAfter(conn, "v_dataset", 1, TABLE_NAMES[3]);
|
|
||||||
assertTableNamesStartAfter(conn, "z_dataset", 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertTableNamesStartAfter(Connection conn, String startAfter, int expectedSize, String... expectedNames) {
|
|
||||||
List<String> tableNames = conn.tableNames(startAfter);
|
|
||||||
assertEquals(expectedSize, tableNames.size());
|
|
||||||
for (int i = 0; i < expectedNames.length; i++) {
|
|
||||||
assertEquals(expectedNames[i], tableNames.get(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void tableNamesLimit() {
|
|
||||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
|
||||||
for (int i = 0; i <= TABLE_NAMES.length; i++) {
|
|
||||||
List<String> tableNames = conn.tableNames(i);
|
|
||||||
assertEquals(i, tableNames.size());
|
|
||||||
for (int j = 0; j < i; j++) {
|
|
||||||
assertEquals(TABLE_NAMES[j], tableNames.get(j));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void tableNamesStartAfterLimit() {
|
|
||||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
|
||||||
List<String> tableNames = conn.tableNames(TABLE_NAMES[0], 2);
|
|
||||||
assertEquals(2, tableNames.size());
|
|
||||||
assertEquals(TABLE_NAMES[1], tableNames.get(0));
|
|
||||||
assertEquals(TABLE_NAMES[2], tableNames.get(1));
|
|
||||||
tableNames = conn.tableNames(TABLE_NAMES[1], 1);
|
|
||||||
assertEquals(1, tableNames.size());
|
|
||||||
assertEquals(TABLE_NAMES[2], tableNames.get(0));
|
|
||||||
tableNames = conn.tableNames(TABLE_NAMES[2], 2);
|
|
||||||
assertEquals(1, tableNames.size());
|
|
||||||
assertEquals(TABLE_NAMES[3], tableNames.get(0));
|
|
||||||
tableNames = conn.tableNames(TABLE_NAMES[3], 2);
|
|
||||||
assertEquals(0, tableNames.size());
|
|
||||||
tableNames = conn.tableNames(TABLE_NAMES[0], 0);
|
|
||||||
assertEquals(0, tableNames.size());
|
|
||||||
|
|
||||||
// Limit larger than the number of remaining tables
|
|
||||||
tableNames = conn.tableNames(TABLE_NAMES[0], 10);
|
|
||||||
assertEquals(3, tableNames.size());
|
|
||||||
assertEquals(TABLE_NAMES[1], tableNames.get(0));
|
|
||||||
assertEquals(TABLE_NAMES[2], tableNames.get(1));
|
|
||||||
assertEquals(TABLE_NAMES[3], tableNames.get(2));
|
|
||||||
|
|
||||||
// Start after a value not in the list
|
|
||||||
tableNames = conn.tableNames("non_existent_table", 2);
|
|
||||||
assertEquals(2, tableNames.size());
|
|
||||||
assertEquals(TABLE_NAMES[2], tableNames.get(0));
|
|
||||||
assertEquals(TABLE_NAMES[3], tableNames.get(1));
|
|
||||||
|
|
||||||
// Start after the last table with a limit
|
|
||||||
tableNames = conn.tableNames(TABLE_NAMES[3], 1);
|
|
||||||
assertEquals(0, tableNames.size());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
$d51afd07-e3cd-4c76-9b9b-787e13fd55b0<62>=id <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*int3208name <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string08
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
$15648e72-076f-4ef1-8b90-10d305b95b3b<33>=id <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*int3208name <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string08
|
|
||||||
Binary file not shown.
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
$a3689caf-4f6b-4afc-a3c7-97af75661843<34>oitem <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string8price <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*double80vector <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*fixed_size_list:float:28
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
129
java/pom.xml
129
java/pom.xml
@@ -1,129 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<groupId>com.lancedb</groupId>
|
|
||||||
<artifactId>lancedb-parent</artifactId>
|
|
||||||
<version>0.1-SNAPSHOT</version>
|
|
||||||
<packaging>pom</packaging>
|
|
||||||
|
|
||||||
<name>Lance Parent</name>
|
|
||||||
|
|
||||||
<properties>
|
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
|
||||||
<maven.compiler.source>11</maven.compiler.source>
|
|
||||||
<maven.compiler.target>11</maven.compiler.target>
|
|
||||||
<arrow.version>15.0.0</arrow.version>
|
|
||||||
</properties>
|
|
||||||
|
|
||||||
<modules>
|
|
||||||
<module>core</module>
|
|
||||||
</modules>
|
|
||||||
|
|
||||||
<dependencyManagement>
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-vector</artifactId>
|
|
||||||
<version>${arrow.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-memory-netty</artifactId>
|
|
||||||
<version>${arrow.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-c-data</artifactId>
|
|
||||||
<version>${arrow.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.arrow</groupId>
|
|
||||||
<artifactId>arrow-dataset</artifactId>
|
|
||||||
<version>${arrow.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.questdb</groupId>
|
|
||||||
<artifactId>jar-jni</artifactId>
|
|
||||||
<version>1.1.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.junit.jupiter</groupId>
|
|
||||||
<artifactId>junit-jupiter</artifactId>
|
|
||||||
<version>5.10.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.json</groupId>
|
|
||||||
<artifactId>json</artifactId>
|
|
||||||
<version>20210307</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</dependencyManagement>
|
|
||||||
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
|
||||||
<version>3.3.1</version>
|
|
||||||
<configuration>
|
|
||||||
<configLocation>google_checks.xml</configLocation>
|
|
||||||
<consoleOutput>true</consoleOutput>
|
|
||||||
<failsOnError>true</failsOnError>
|
|
||||||
<violationSeverity>warning</violationSeverity>
|
|
||||||
<linkXRef>false</linkXRef>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>validate</id>
|
|
||||||
<phase>validate</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>check</goal>
|
|
||||||
</goals>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
<pluginManagement>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-clean-plugin</artifactId>
|
|
||||||
<version>3.1.0</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-resources-plugin</artifactId>
|
|
||||||
<version>3.0.2</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<version>3.8.1</version>
|
|
||||||
<configuration>
|
|
||||||
<compilerArgs>
|
|
||||||
<arg>-h</arg>
|
|
||||||
<arg>target/headers</arg>
|
|
||||||
</compilerArgs>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<version>3.2.5</version>
|
|
||||||
<configuration>
|
|
||||||
<argLine>--add-opens=java.base/java.nio=ALL-UNNAMED</argLine>
|
|
||||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
|
||||||
<useSystemClassLoader>false</useSystemClassLoader>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-jar-plugin</artifactId>
|
|
||||||
<version>3.0.2</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-install-plugin</artifactId>
|
|
||||||
<version>2.5.2</version>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</pluginManagement>
|
|
||||||
</build>
|
|
||||||
</project>
|
|
||||||
74
node/package-lock.json
generated
74
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.5.1",
|
"version": "0.4.15",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.5.1",
|
"version": "0.4.15",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64",
|
"x64",
|
||||||
"arm64"
|
"arm64"
|
||||||
@@ -52,11 +52,11 @@
|
|||||||
"uuid": "^9.0.0"
|
"uuid": "^9.0.0"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@lancedb/vectordb-darwin-arm64": "0.4.20",
|
"@lancedb/vectordb-darwin-arm64": "0.4.15",
|
||||||
"@lancedb/vectordb-darwin-x64": "0.4.20",
|
"@lancedb/vectordb-darwin-x64": "0.4.15",
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": "0.4.20",
|
"@lancedb/vectordb-linux-arm64-gnu": "0.4.15",
|
||||||
"@lancedb/vectordb-linux-x64-gnu": "0.4.20",
|
"@lancedb/vectordb-linux-x64-gnu": "0.4.15",
|
||||||
"@lancedb/vectordb-win32-x64-msvc": "0.4.20"
|
"@lancedb/vectordb-win32-x64-msvc": "0.4.15"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"@apache-arrow/ts": "^14.0.2",
|
"@apache-arrow/ts": "^14.0.2",
|
||||||
@@ -333,66 +333,6 @@
|
|||||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
|
||||||
"version": "0.4.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.4.20.tgz",
|
|
||||||
"integrity": "sha512-ffP2K4sA5mQTgePyARw1y8dPN996FmpvyAYoWO+TSItaXlhcXvc+KVa5udNMCZMDYeEnEv2Xpj6k4PwW3oBz+A==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-darwin-x64": {
|
|
||||||
"version": "0.4.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.4.20.tgz",
|
|
||||||
"integrity": "sha512-GSYsXE20RIehDu30FjREhJdEzhnwOTV7ZsrSXagStzLY1gr7pyd7sfqxmmUtdD09di7LnQoiM71AOpPTa01YwQ==",
|
|
||||||
"cpu": [
|
|
||||||
"x64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
|
||||||
"version": "0.4.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.4.20.tgz",
|
|
||||||
"integrity": "sha512-FpNOjOsz3nJVm6EBGyNgbOW2aFhsWZ/igeY45Z8hbZaaK2YBwrg/DASoNlUzgv6IR8cUaGJ2irNVJfsKR2cG6g==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
|
||||||
"version": "0.4.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.4.20.tgz",
|
|
||||||
"integrity": "sha512-pOqWjrRZQSrLTlQPkjidRii7NZDw8Xu9pN6ouVu2JAK8n81FXaPtFCyAI+Y3v9GpnYDN0rvD4eQ36aHAVPsa2g==",
|
|
||||||
"cpu": [
|
|
||||||
"x64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
|
||||||
"version": "0.4.20",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.4.20.tgz",
|
|
||||||
"integrity": "sha512-5J5SsYSJ7jRCmU/sgwVHdrGz43B/7R2T9OEoFTKyVAtqTZdu75rkytXyn9SyEayXVhlUOaw76N0ASm0hAoDS/A==",
|
|
||||||
"cpu": [
|
|
||||||
"x64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"win32"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@neon-rs/cli": {
|
"node_modules/@neon-rs/cli": {
|
||||||
"version": "0.0.160",
|
"version": "0.0.160",
|
||||||
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.5.1",
|
"version": "0.4.15",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -88,10 +88,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@lancedb/vectordb-darwin-arm64": "0.4.20",
|
"@lancedb/vectordb-darwin-arm64": "0.4.15",
|
||||||
"@lancedb/vectordb-darwin-x64": "0.4.20",
|
"@lancedb/vectordb-darwin-x64": "0.4.15",
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": "0.4.20",
|
"@lancedb/vectordb-linux-arm64-gnu": "0.4.15",
|
||||||
"@lancedb/vectordb-linux-x64-gnu": "0.4.20",
|
"@lancedb/vectordb-linux-x64-gnu": "0.4.15",
|
||||||
"@lancedb/vectordb-win32-x64-msvc": "0.4.20"
|
"@lancedb/vectordb-win32-x64-msvc": "0.4.15"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,23 +27,23 @@ import {
|
|||||||
RecordBatch,
|
RecordBatch,
|
||||||
makeData,
|
makeData,
|
||||||
Struct,
|
Struct,
|
||||||
type Float,
|
Float,
|
||||||
DataType,
|
DataType,
|
||||||
Binary,
|
Binary,
|
||||||
Float32
|
Float32
|
||||||
} from "apache-arrow";
|
} from 'apache-arrow'
|
||||||
import { type EmbeddingFunction } from "./index";
|
import { type EmbeddingFunction } from './index'
|
||||||
import { sanitizeSchema } from "./sanitize";
|
import { sanitizeSchema } from './sanitize'
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Options to control how a column should be converted to a vector array
|
* Options to control how a column should be converted to a vector array
|
||||||
*/
|
*/
|
||||||
export class VectorColumnOptions {
|
export class VectorColumnOptions {
|
||||||
/** Vector column type. */
|
/** Vector column type. */
|
||||||
type: Float = new Float32();
|
type: Float = new Float32()
|
||||||
|
|
||||||
constructor(values?: Partial<VectorColumnOptions>) {
|
constructor (values?: Partial<VectorColumnOptions>) {
|
||||||
Object.assign(this, values);
|
Object.assign(this, values)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,7 +60,7 @@ export class MakeArrowTableOptions {
|
|||||||
* The schema must be specified if there are no records (e.g. to make
|
* The schema must be specified if there are no records (e.g. to make
|
||||||
* an empty table)
|
* an empty table)
|
||||||
*/
|
*/
|
||||||
schema?: Schema;
|
schema?: Schema
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Mapping from vector column name to expected type
|
* Mapping from vector column name to expected type
|
||||||
@@ -80,9 +80,7 @@ export class MakeArrowTableOptions {
|
|||||||
*/
|
*/
|
||||||
vectorColumns: Record<string, VectorColumnOptions> = {
|
vectorColumns: Record<string, VectorColumnOptions> = {
|
||||||
vector: new VectorColumnOptions()
|
vector: new VectorColumnOptions()
|
||||||
};
|
}
|
||||||
|
|
||||||
embeddings?: EmbeddingFunction<any>;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true then string columns will be encoded with dictionary encoding
|
* If true then string columns will be encoded with dictionary encoding
|
||||||
@@ -93,10 +91,10 @@ export class MakeArrowTableOptions {
|
|||||||
*
|
*
|
||||||
* If `schema` is provided then this property is ignored.
|
* If `schema` is provided then this property is ignored.
|
||||||
*/
|
*/
|
||||||
dictionaryEncodeStrings: boolean = false;
|
dictionaryEncodeStrings: boolean = false
|
||||||
|
|
||||||
constructor(values?: Partial<MakeArrowTableOptions>) {
|
constructor (values?: Partial<MakeArrowTableOptions>) {
|
||||||
Object.assign(this, values);
|
Object.assign(this, values)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,68 +193,59 @@ export class MakeArrowTableOptions {
|
|||||||
* assert.deepEqual(table.schema, schema)
|
* assert.deepEqual(table.schema, schema)
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
export function makeArrowTable(
|
export function makeArrowTable (
|
||||||
data: Array<Record<string, any>>,
|
data: Array<Record<string, any>>,
|
||||||
options?: Partial<MakeArrowTableOptions>
|
options?: Partial<MakeArrowTableOptions>
|
||||||
): ArrowTable {
|
): ArrowTable {
|
||||||
if (
|
if (data.length === 0 && (options?.schema === undefined || options?.schema === null)) {
|
||||||
data.length === 0 &&
|
throw new Error('At least one record or a schema needs to be provided')
|
||||||
(options?.schema === undefined || options?.schema === null)
|
|
||||||
) {
|
|
||||||
throw new Error("At least one record or a schema needs to be provided");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const opt = new MakeArrowTableOptions(options !== undefined ? options : {});
|
const opt = new MakeArrowTableOptions(options !== undefined ? options : {})
|
||||||
if (opt.schema !== undefined && opt.schema !== null) {
|
if (opt.schema !== undefined && opt.schema !== null) {
|
||||||
opt.schema = sanitizeSchema(opt.schema);
|
opt.schema = sanitizeSchema(opt.schema)
|
||||||
opt.schema = validateSchemaEmbeddings(opt.schema, data, opt.embeddings);
|
|
||||||
}
|
}
|
||||||
|
const columns: Record<string, Vector> = {}
|
||||||
const columns: Record<string, Vector> = {};
|
|
||||||
// TODO: sample dataset to find missing columns
|
// TODO: sample dataset to find missing columns
|
||||||
// Prefer the field ordering of the schema, if present
|
// Prefer the field ordering of the schema, if present
|
||||||
const columnNames =
|
const columnNames = ((opt.schema) != null) ? (opt.schema.names as string[]) : Object.keys(data[0])
|
||||||
opt.schema != null ? (opt.schema.names as string[]) : Object.keys(data[0]);
|
|
||||||
for (const colName of columnNames) {
|
for (const colName of columnNames) {
|
||||||
if (
|
if (data.length !== 0 && !Object.prototype.hasOwnProperty.call(data[0], colName)) {
|
||||||
data.length !== 0 &&
|
|
||||||
!Object.prototype.hasOwnProperty.call(data[0], colName)
|
|
||||||
) {
|
|
||||||
// The field is present in the schema, but not in the data, skip it
|
// The field is present in the schema, but not in the data, skip it
|
||||||
continue;
|
continue
|
||||||
}
|
}
|
||||||
// Extract a single column from the records (transpose from row-major to col-major)
|
// Extract a single column from the records (transpose from row-major to col-major)
|
||||||
let values = data.map((datum) => datum[colName]);
|
let values = data.map((datum) => datum[colName])
|
||||||
|
|
||||||
// By default (type === undefined) arrow will infer the type from the JS type
|
// By default (type === undefined) arrow will infer the type from the JS type
|
||||||
let type;
|
let type
|
||||||
if (opt.schema !== undefined) {
|
if (opt.schema !== undefined) {
|
||||||
// If there is a schema provided, then use that for the type instead
|
// If there is a schema provided, then use that for the type instead
|
||||||
type = opt.schema?.fields.filter((f) => f.name === colName)[0]?.type;
|
type = opt.schema?.fields.filter((f) => f.name === colName)[0]?.type
|
||||||
if (DataType.isInt(type) && type.bitWidth === 64) {
|
if (DataType.isInt(type) && type.bitWidth === 64) {
|
||||||
// wrap in BigInt to avoid bug: https://github.com/apache/arrow/issues/40051
|
// wrap in BigInt to avoid bug: https://github.com/apache/arrow/issues/40051
|
||||||
values = values.map((v) => {
|
values = values.map((v) => {
|
||||||
if (v === null) {
|
if (v === null) {
|
||||||
return v;
|
return v
|
||||||
}
|
}
|
||||||
return BigInt(v);
|
return BigInt(v)
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, check to see if this column is one of the vector columns
|
// Otherwise, check to see if this column is one of the vector columns
|
||||||
// defined by opt.vectorColumns and, if so, use the fixed size list type
|
// defined by opt.vectorColumns and, if so, use the fixed size list type
|
||||||
const vectorColumnOptions = opt.vectorColumns[colName];
|
const vectorColumnOptions = opt.vectorColumns[colName]
|
||||||
if (vectorColumnOptions !== undefined) {
|
if (vectorColumnOptions !== undefined) {
|
||||||
type = newVectorType(values[0].length, vectorColumnOptions.type);
|
type = newVectorType(values[0].length, vectorColumnOptions.type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Convert an Array of JS values to an arrow vector
|
// Convert an Array of JS values to an arrow vector
|
||||||
columns[colName] = makeVector(values, type, opt.dictionaryEncodeStrings);
|
columns[colName] = makeVector(values, type, opt.dictionaryEncodeStrings)
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||||
throw Error(`Could not convert column "${colName}" to Arrow: ${error}`);
|
throw Error(`Could not convert column "${colName}" to Arrow: ${error}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -271,116 +260,97 @@ export function makeArrowTable(
|
|||||||
// To work around this we first create a table with the wrong schema and
|
// To work around this we first create a table with the wrong schema and
|
||||||
// then patch the schema of the batches so we can use
|
// then patch the schema of the batches so we can use
|
||||||
// `new ArrowTable(schema, batches)` which does not do any schema inference
|
// `new ArrowTable(schema, batches)` which does not do any schema inference
|
||||||
const firstTable = new ArrowTable(columns);
|
const firstTable = new ArrowTable(columns)
|
||||||
const batchesFixed = firstTable.batches.map(
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
const batchesFixed = firstTable.batches.map(batch => new RecordBatch(opt.schema!, batch.data))
|
||||||
(batch) => new RecordBatch(opt.schema!, batch.data)
|
return new ArrowTable(opt.schema, batchesFixed)
|
||||||
);
|
|
||||||
return new ArrowTable(opt.schema, batchesFixed);
|
|
||||||
} else {
|
} else {
|
||||||
return new ArrowTable(columns);
|
return new ArrowTable(columns)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an empty Arrow table with the provided schema
|
* Create an empty Arrow table with the provided schema
|
||||||
*/
|
*/
|
||||||
export function makeEmptyTable(schema: Schema): ArrowTable {
|
export function makeEmptyTable (schema: Schema): ArrowTable {
|
||||||
return makeArrowTable([], { schema });
|
return makeArrowTable([], { schema })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to convert Array<Array<any>> to a variable sized list array
|
// Helper function to convert Array<Array<any>> to a variable sized list array
|
||||||
function makeListVector(lists: any[][]): Vector<any> {
|
function makeListVector (lists: any[][]): Vector<any> {
|
||||||
if (lists.length === 0 || lists[0].length === 0) {
|
if (lists.length === 0 || lists[0].length === 0) {
|
||||||
throw Error("Cannot infer list vector from empty array or empty list");
|
throw Error('Cannot infer list vector from empty array or empty list')
|
||||||
}
|
}
|
||||||
const sampleList = lists[0];
|
const sampleList = lists[0]
|
||||||
let inferredType;
|
let inferredType
|
||||||
try {
|
try {
|
||||||
const sampleVector = makeVector(sampleList);
|
const sampleVector = makeVector(sampleList)
|
||||||
inferredType = sampleVector.type;
|
inferredType = sampleVector.type
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||||
throw Error(`Cannot infer list vector. Cannot infer inner type: ${error}`);
|
throw Error(`Cannot infer list vector. Cannot infer inner type: ${error}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const listBuilder = makeBuilder({
|
const listBuilder = makeBuilder({
|
||||||
type: new List(new Field("item", inferredType, true))
|
type: new List(new Field('item', inferredType, true))
|
||||||
});
|
})
|
||||||
for (const list of lists) {
|
for (const list of lists) {
|
||||||
listBuilder.append(list);
|
listBuilder.append(list)
|
||||||
}
|
}
|
||||||
return listBuilder.finish().toVector();
|
return listBuilder.finish().toVector()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to convert an Array of JS values to an Arrow Vector
|
// Helper function to convert an Array of JS values to an Arrow Vector
|
||||||
function makeVector(
|
function makeVector (values: any[], type?: DataType, stringAsDictionary?: boolean): Vector<any> {
|
||||||
values: any[],
|
|
||||||
type?: DataType,
|
|
||||||
stringAsDictionary?: boolean
|
|
||||||
): Vector<any> {
|
|
||||||
if (type !== undefined) {
|
if (type !== undefined) {
|
||||||
// No need for inference, let Arrow create it
|
// No need for inference, let Arrow create it
|
||||||
return vectorFromArray(values, type);
|
return vectorFromArray(values, type)
|
||||||
}
|
}
|
||||||
if (values.length === 0) {
|
if (values.length === 0) {
|
||||||
throw Error(
|
throw Error('makeVector requires at least one value or the type must be specfied')
|
||||||
"makeVector requires at least one value or the type must be specfied"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
const sampleValue = values.find((val) => val !== null && val !== undefined);
|
const sampleValue = values.find(val => val !== null && val !== undefined)
|
||||||
if (sampleValue === undefined) {
|
if (sampleValue === undefined) {
|
||||||
throw Error(
|
throw Error('makeVector cannot infer the type if all values are null or undefined')
|
||||||
"makeVector cannot infer the type if all values are null or undefined"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
if (Array.isArray(sampleValue)) {
|
if (Array.isArray(sampleValue)) {
|
||||||
// Default Arrow inference doesn't handle list types
|
// Default Arrow inference doesn't handle list types
|
||||||
return makeListVector(values);
|
return makeListVector(values)
|
||||||
} else if (Buffer.isBuffer(sampleValue)) {
|
} else if (Buffer.isBuffer(sampleValue)) {
|
||||||
// Default Arrow inference doesn't handle Buffer
|
// Default Arrow inference doesn't handle Buffer
|
||||||
return vectorFromArray(values, new Binary());
|
return vectorFromArray(values, new Binary())
|
||||||
} else if (
|
} else if (!(stringAsDictionary ?? false) && (typeof sampleValue === 'string' || sampleValue instanceof String)) {
|
||||||
!(stringAsDictionary ?? false) &&
|
|
||||||
(typeof sampleValue === "string" || sampleValue instanceof String)
|
|
||||||
) {
|
|
||||||
// If the type is string then don't use Arrow's default inference unless dictionaries are requested
|
// If the type is string then don't use Arrow's default inference unless dictionaries are requested
|
||||||
// because it will always use dictionary encoding for strings
|
// because it will always use dictionary encoding for strings
|
||||||
return vectorFromArray(values, new Utf8());
|
return vectorFromArray(values, new Utf8())
|
||||||
} else {
|
} else {
|
||||||
// Convert a JS array of values to an arrow vector
|
// Convert a JS array of values to an arrow vector
|
||||||
return vectorFromArray(values);
|
return vectorFromArray(values)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function applyEmbeddings<T>(
|
async function applyEmbeddings<T> (table: ArrowTable, embeddings?: EmbeddingFunction<T>, schema?: Schema): Promise<ArrowTable> {
|
||||||
table: ArrowTable,
|
|
||||||
embeddings?: EmbeddingFunction<T>,
|
|
||||||
schema?: Schema
|
|
||||||
): Promise<ArrowTable> {
|
|
||||||
if (embeddings == null) {
|
if (embeddings == null) {
|
||||||
return table;
|
return table
|
||||||
}
|
}
|
||||||
if (schema !== undefined && schema !== null) {
|
if (schema !== undefined && schema !== null) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert from ArrowTable to Record<String, Vector>
|
// Convert from ArrowTable to Record<String, Vector>
|
||||||
const colEntries = [...Array(table.numCols).keys()].map((_, idx) => {
|
const colEntries = [...Array(table.numCols).keys()].map((_, idx) => {
|
||||||
const name = table.schema.fields[idx].name;
|
const name = table.schema.fields[idx].name
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
const vec = table.getChildAt(idx)!;
|
const vec = table.getChildAt(idx)!
|
||||||
return [name, vec];
|
return [name, vec]
|
||||||
});
|
})
|
||||||
const newColumns = Object.fromEntries(colEntries);
|
const newColumns = Object.fromEntries(colEntries)
|
||||||
|
|
||||||
const sourceColumn = newColumns[embeddings.sourceColumn];
|
const sourceColumn = newColumns[embeddings.sourceColumn]
|
||||||
const destColumn = embeddings.destColumn ?? "vector";
|
const destColumn = embeddings.destColumn ?? 'vector'
|
||||||
const innerDestType = embeddings.embeddingDataType ?? new Float32();
|
const innerDestType = embeddings.embeddingDataType ?? new Float32()
|
||||||
if (sourceColumn === undefined) {
|
if (sourceColumn === undefined) {
|
||||||
throw new Error(
|
throw new Error(`Cannot apply embedding function because the source column '${embeddings.sourceColumn}' was not present in the data`)
|
||||||
`Cannot apply embedding function because the source column '${embeddings.sourceColumn}' was not present in the data`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (table.numRows === 0) {
|
if (table.numRows === 0) {
|
||||||
@@ -388,60 +358,45 @@ async function applyEmbeddings<T>(
|
|||||||
// We have an empty table and it already has the embedding column so no work needs to be done
|
// We have an empty table and it already has the embedding column so no work needs to be done
|
||||||
// Note: we don't return an error like we did below because this is a common occurrence. For example,
|
// Note: we don't return an error like we did below because this is a common occurrence. For example,
|
||||||
// if we call convertToTable with 0 records and a schema that includes the embedding
|
// if we call convertToTable with 0 records and a schema that includes the embedding
|
||||||
return table;
|
return table
|
||||||
}
|
}
|
||||||
if (embeddings.embeddingDimension !== undefined) {
|
if (embeddings.embeddingDimension !== undefined) {
|
||||||
const destType = newVectorType(
|
const destType = newVectorType(embeddings.embeddingDimension, innerDestType)
|
||||||
embeddings.embeddingDimension,
|
newColumns[destColumn] = makeVector([], destType)
|
||||||
innerDestType
|
|
||||||
);
|
|
||||||
newColumns[destColumn] = makeVector([], destType);
|
|
||||||
} else if (schema != null) {
|
} else if (schema != null) {
|
||||||
const destField = schema.fields.find((f) => f.name === destColumn);
|
const destField = schema.fields.find(f => f.name === destColumn)
|
||||||
if (destField != null) {
|
if (destField != null) {
|
||||||
newColumns[destColumn] = makeVector([], destField.type);
|
newColumns[destColumn] = makeVector([], destField.type)
|
||||||
} else {
|
} else {
|
||||||
throw new Error(
|
throw new Error(`Attempt to apply embeddings to an empty table failed because schema was missing embedding column '${destColumn}'`)
|
||||||
`Attempt to apply embeddings to an empty table failed because schema was missing embedding column '${destColumn}'`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new Error(
|
throw new Error('Attempt to apply embeddings to an empty table when the embeddings function does not specify `embeddingDimension`')
|
||||||
"Attempt to apply embeddings to an empty table when the embeddings function does not specify `embeddingDimension`"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (Object.prototype.hasOwnProperty.call(newColumns, destColumn)) {
|
if (Object.prototype.hasOwnProperty.call(newColumns, destColumn)) {
|
||||||
throw new Error(
|
throw new Error(`Attempt to apply embeddings to table failed because column ${destColumn} already existed`)
|
||||||
`Attempt to apply embeddings to table failed because column ${destColumn} already existed`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
if (table.batches.length > 1) {
|
if (table.batches.length > 1) {
|
||||||
throw new Error(
|
throw new Error('Internal error: `makeArrowTable` unexpectedly created a table with more than one batch')
|
||||||
"Internal error: `makeArrowTable` unexpectedly created a table with more than one batch"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
const values = sourceColumn.toArray();
|
const values = sourceColumn.toArray()
|
||||||
const vectors = await embeddings.embed(values as T[]);
|
const vectors = await embeddings.embed(values as T[])
|
||||||
if (vectors.length !== values.length) {
|
if (vectors.length !== values.length) {
|
||||||
throw new Error(
|
throw new Error('Embedding function did not return an embedding for each input element')
|
||||||
"Embedding function did not return an embedding for each input element"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
const destType = newVectorType(vectors[0].length, innerDestType);
|
const destType = newVectorType(vectors[0].length, innerDestType)
|
||||||
newColumns[destColumn] = makeVector(vectors, destType);
|
newColumns[destColumn] = makeVector(vectors, destType)
|
||||||
}
|
}
|
||||||
|
|
||||||
const newTable = new ArrowTable(newColumns);
|
const newTable = new ArrowTable(newColumns)
|
||||||
if (schema != null) {
|
if (schema != null) {
|
||||||
if (schema.fields.find((f) => f.name === destColumn) === undefined) {
|
if (schema.fields.find(f => f.name === destColumn) === undefined) {
|
||||||
throw new Error(
|
throw new Error(`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`)
|
||||||
`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
return alignTable(newTable, schema);
|
return alignTable(newTable, schema)
|
||||||
}
|
}
|
||||||
return newTable;
|
return newTable
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -462,24 +417,21 @@ async function applyEmbeddings<T>(
|
|||||||
* embedding columns. If no schema is provded then embedding columns will
|
* embedding columns. If no schema is provded then embedding columns will
|
||||||
* be placed at the end of the table, after all of the input columns.
|
* be placed at the end of the table, after all of the input columns.
|
||||||
*/
|
*/
|
||||||
export async function convertToTable<T>(
|
export async function convertToTable<T> (
|
||||||
data: Array<Record<string, unknown>>,
|
data: Array<Record<string, unknown>>,
|
||||||
embeddings?: EmbeddingFunction<T>,
|
embeddings?: EmbeddingFunction<T>,
|
||||||
makeTableOptions?: Partial<MakeArrowTableOptions>
|
makeTableOptions?: Partial<MakeArrowTableOptions>
|
||||||
): Promise<ArrowTable> {
|
): Promise<ArrowTable> {
|
||||||
const table = makeArrowTable(data, makeTableOptions);
|
const table = makeArrowTable(data, makeTableOptions)
|
||||||
return await applyEmbeddings(table, embeddings, makeTableOptions?.schema);
|
return await applyEmbeddings(table, embeddings, makeTableOptions?.schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates the Arrow Type for a Vector column with dimension `dim`
|
// Creates the Arrow Type for a Vector column with dimension `dim`
|
||||||
function newVectorType<T extends Float>(
|
function newVectorType <T extends Float> (dim: number, innerType: T): FixedSizeList<T> {
|
||||||
dim: number,
|
|
||||||
innerType: T
|
|
||||||
): FixedSizeList<T> {
|
|
||||||
// Somewhere we always default to have the elements nullable, so we need to set it to true
|
// Somewhere we always default to have the elements nullable, so we need to set it to true
|
||||||
// otherwise we often get schema mismatches because the stored data always has schema with nullable elements
|
// otherwise we often get schema mismatches because the stored data always has schema with nullable elements
|
||||||
const children = new Field<T>("item", innerType, true);
|
const children = new Field<T>('item', innerType, true)
|
||||||
return new FixedSizeList(dim, children);
|
return new FixedSizeList(dim, children)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -489,17 +441,17 @@ function newVectorType<T extends Float>(
|
|||||||
*
|
*
|
||||||
* `schema` is required if data is empty
|
* `schema` is required if data is empty
|
||||||
*/
|
*/
|
||||||
export async function fromRecordsToBuffer<T>(
|
export async function fromRecordsToBuffer<T> (
|
||||||
data: Array<Record<string, unknown>>,
|
data: Array<Record<string, unknown>>,
|
||||||
embeddings?: EmbeddingFunction<T>,
|
embeddings?: EmbeddingFunction<T>,
|
||||||
schema?: Schema
|
schema?: Schema
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
if (schema !== undefined && schema !== null) {
|
if (schema !== undefined && schema !== null) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema)
|
||||||
}
|
}
|
||||||
const table = await convertToTable(data, embeddings, { schema, embeddings });
|
const table = await convertToTable(data, embeddings, { schema })
|
||||||
const writer = RecordBatchFileWriter.writeAll(table);
|
const writer = RecordBatchFileWriter.writeAll(table)
|
||||||
return Buffer.from(await writer.toUint8Array());
|
return Buffer.from(await writer.toUint8Array())
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -509,17 +461,17 @@ export async function fromRecordsToBuffer<T>(
|
|||||||
*
|
*
|
||||||
* `schema` is required if data is empty
|
* `schema` is required if data is empty
|
||||||
*/
|
*/
|
||||||
export async function fromRecordsToStreamBuffer<T>(
|
export async function fromRecordsToStreamBuffer<T> (
|
||||||
data: Array<Record<string, unknown>>,
|
data: Array<Record<string, unknown>>,
|
||||||
embeddings?: EmbeddingFunction<T>,
|
embeddings?: EmbeddingFunction<T>,
|
||||||
schema?: Schema
|
schema?: Schema
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
if (schema !== null && schema !== undefined) {
|
if (schema !== null && schema !== undefined) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema)
|
||||||
}
|
}
|
||||||
const table = await convertToTable(data, embeddings, { schema });
|
const table = await convertToTable(data, embeddings, { schema })
|
||||||
const writer = RecordBatchStreamWriter.writeAll(table);
|
const writer = RecordBatchStreamWriter.writeAll(table)
|
||||||
return Buffer.from(await writer.toUint8Array());
|
return Buffer.from(await writer.toUint8Array())
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -530,17 +482,17 @@ export async function fromRecordsToStreamBuffer<T>(
|
|||||||
*
|
*
|
||||||
* `schema` is required if the table is empty
|
* `schema` is required if the table is empty
|
||||||
*/
|
*/
|
||||||
export async function fromTableToBuffer<T>(
|
export async function fromTableToBuffer<T> (
|
||||||
table: ArrowTable,
|
table: ArrowTable,
|
||||||
embeddings?: EmbeddingFunction<T>,
|
embeddings?: EmbeddingFunction<T>,
|
||||||
schema?: Schema
|
schema?: Schema
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
if (schema !== null && schema !== undefined) {
|
if (schema !== null && schema !== undefined) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema)
|
||||||
}
|
}
|
||||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema)
|
||||||
const writer = RecordBatchFileWriter.writeAll(tableWithEmbeddings);
|
const writer = RecordBatchFileWriter.writeAll(tableWithEmbeddings)
|
||||||
return Buffer.from(await writer.toUint8Array());
|
return Buffer.from(await writer.toUint8Array())
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -551,85 +503,49 @@ export async function fromTableToBuffer<T>(
|
|||||||
*
|
*
|
||||||
* `schema` is required if the table is empty
|
* `schema` is required if the table is empty
|
||||||
*/
|
*/
|
||||||
export async function fromTableToStreamBuffer<T>(
|
export async function fromTableToStreamBuffer<T> (
|
||||||
table: ArrowTable,
|
table: ArrowTable,
|
||||||
embeddings?: EmbeddingFunction<T>,
|
embeddings?: EmbeddingFunction<T>,
|
||||||
schema?: Schema
|
schema?: Schema
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
if (schema !== null && schema !== undefined) {
|
if (schema !== null && schema !== undefined) {
|
||||||
schema = sanitizeSchema(schema);
|
schema = sanitizeSchema(schema)
|
||||||
}
|
}
|
||||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema)
|
||||||
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings);
|
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings)
|
||||||
return Buffer.from(await writer.toUint8Array());
|
return Buffer.from(await writer.toUint8Array())
|
||||||
}
|
}
|
||||||
|
|
||||||
function alignBatch(batch: RecordBatch, schema: Schema): RecordBatch {
|
function alignBatch (batch: RecordBatch, schema: Schema): RecordBatch {
|
||||||
const alignedChildren = [];
|
const alignedChildren = []
|
||||||
for (const field of schema.fields) {
|
for (const field of schema.fields) {
|
||||||
const indexInBatch = batch.schema.fields?.findIndex(
|
const indexInBatch = batch.schema.fields?.findIndex(
|
||||||
(f) => f.name === field.name
|
(f) => f.name === field.name
|
||||||
);
|
)
|
||||||
if (indexInBatch < 0) {
|
if (indexInBatch < 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`The column ${field.name} was not found in the Arrow Table`
|
`The column ${field.name} was not found in the Arrow Table`
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
alignedChildren.push(batch.data.children[indexInBatch]);
|
alignedChildren.push(batch.data.children[indexInBatch])
|
||||||
}
|
}
|
||||||
const newData = makeData({
|
const newData = makeData({
|
||||||
type: new Struct(schema.fields),
|
type: new Struct(schema.fields),
|
||||||
length: batch.numRows,
|
length: batch.numRows,
|
||||||
nullCount: batch.nullCount,
|
nullCount: batch.nullCount,
|
||||||
children: alignedChildren
|
children: alignedChildren
|
||||||
});
|
})
|
||||||
return new RecordBatch(schema, newData);
|
return new RecordBatch(schema, newData)
|
||||||
}
|
}
|
||||||
|
|
||||||
function alignTable(table: ArrowTable, schema: Schema): ArrowTable {
|
function alignTable (table: ArrowTable, schema: Schema): ArrowTable {
|
||||||
const alignedBatches = table.batches.map((batch) =>
|
const alignedBatches = table.batches.map((batch) =>
|
||||||
alignBatch(batch, schema)
|
alignBatch(batch, schema)
|
||||||
);
|
)
|
||||||
return new ArrowTable(schema, alignedBatches);
|
return new ArrowTable(schema, alignedBatches)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates an empty Arrow Table
|
// Creates an empty Arrow Table
|
||||||
export function createEmptyTable(schema: Schema): ArrowTable {
|
export function createEmptyTable (schema: Schema): ArrowTable {
|
||||||
return new ArrowTable(sanitizeSchema(schema));
|
return new ArrowTable(sanitizeSchema(schema))
|
||||||
}
|
|
||||||
|
|
||||||
function validateSchemaEmbeddings(
|
|
||||||
schema: Schema<any>,
|
|
||||||
data: Array<Record<string, unknown>>,
|
|
||||||
embeddings: EmbeddingFunction<any> | undefined
|
|
||||||
) {
|
|
||||||
const fields = [];
|
|
||||||
const missingEmbeddingFields = [];
|
|
||||||
|
|
||||||
// First we check if the field is a `FixedSizeList`
|
|
||||||
// Then we check if the data contains the field
|
|
||||||
// if it does not, we add it to the list of missing embedding fields
|
|
||||||
// Finally, we check if those missing embedding fields are `this._embeddings`
|
|
||||||
// if they are not, we throw an error
|
|
||||||
for (const field of schema.fields) {
|
|
||||||
if (field.type instanceof FixedSizeList) {
|
|
||||||
if (data.length !== 0 && data?.[0]?.[field.name] === undefined) {
|
|
||||||
missingEmbeddingFields.push(field);
|
|
||||||
} else {
|
|
||||||
fields.push(field);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
fields.push(field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (missingEmbeddingFields.length > 0 && embeddings === undefined) {
|
|
||||||
throw new Error(
|
|
||||||
`Table has embeddings: "${missingEmbeddingFields
|
|
||||||
.map((f) => f.name)
|
|
||||||
.join(",")}", but no embedding function was provided`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Schema(fields, schema.metadata);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,20 +12,19 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import { type Schema, Table as ArrowTable, tableFromIPC } from "apache-arrow";
|
import { type Schema, Table as ArrowTable, tableFromIPC } from 'apache-arrow'
|
||||||
import {
|
import {
|
||||||
createEmptyTable,
|
createEmptyTable,
|
||||||
fromRecordsToBuffer,
|
fromRecordsToBuffer,
|
||||||
fromTableToBuffer,
|
fromTableToBuffer,
|
||||||
makeArrowTable
|
makeArrowTable
|
||||||
} from "./arrow";
|
} from './arrow'
|
||||||
import type { EmbeddingFunction } from "./embedding/embedding_function";
|
import type { EmbeddingFunction } from './embedding/embedding_function'
|
||||||
import { RemoteConnection } from "./remote";
|
import { RemoteConnection } from './remote'
|
||||||
import { Query } from "./query";
|
import { Query } from './query'
|
||||||
import { isEmbeddingFunction } from "./embedding/embedding_function";
|
import { isEmbeddingFunction } from './embedding/embedding_function'
|
||||||
import { type Literal, toSQL } from "./util";
|
import { type Literal, toSQL } from './util'
|
||||||
|
import { type HttpMiddleware } from './middleware'
|
||||||
import { type HttpMiddleware } from "./middleware";
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
databaseNew,
|
databaseNew,
|
||||||
@@ -49,18 +48,14 @@ const {
|
|||||||
tableAlterColumns,
|
tableAlterColumns,
|
||||||
tableDropColumns
|
tableDropColumns
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
} = require("../native.js");
|
} = require('../native.js')
|
||||||
|
|
||||||
export { Query };
|
export { Query }
|
||||||
export type { EmbeddingFunction };
|
export type { EmbeddingFunction }
|
||||||
export { OpenAIEmbeddingFunction } from "./embedding/openai";
|
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
||||||
export {
|
export { convertToTable, makeArrowTable, type MakeArrowTableOptions } from './arrow'
|
||||||
convertToTable,
|
|
||||||
makeArrowTable,
|
|
||||||
type MakeArrowTableOptions
|
|
||||||
} from "./arrow";
|
|
||||||
|
|
||||||
const defaultAwsRegion = "us-west-2";
|
const defaultAwsRegion = 'us-west-2'
|
||||||
|
|
||||||
export interface AwsCredentials {
|
export interface AwsCredentials {
|
||||||
accessKeyId: string
|
accessKeyId: string
|
||||||
@@ -83,25 +78,12 @@ export interface ConnectionOptions {
|
|||||||
/** User provided AWS crednetials.
|
/** User provided AWS crednetials.
|
||||||
*
|
*
|
||||||
* If not provided, LanceDB will use the default credentials provider chain.
|
* If not provided, LanceDB will use the default credentials provider chain.
|
||||||
*
|
|
||||||
* @deprecated Pass `aws_access_key_id`, `aws_secret_access_key`, and `aws_session_token`
|
|
||||||
* through `storageOptions` instead.
|
|
||||||
*/
|
*/
|
||||||
awsCredentials?: AwsCredentials
|
awsCredentials?: AwsCredentials
|
||||||
|
|
||||||
/** AWS region to connect to. Default is {@link defaultAwsRegion}
|
/** AWS region to connect to. Default is {@link defaultAwsRegion}. */
|
||||||
*
|
|
||||||
* @deprecated Pass `region` through `storageOptions` instead.
|
|
||||||
*/
|
|
||||||
awsRegion?: string
|
awsRegion?: string
|
||||||
|
|
||||||
/**
|
|
||||||
* User provided options for object storage. For example, S3 credentials or request timeouts.
|
|
||||||
*
|
|
||||||
* The various options are described at https://lancedb.github.io/lancedb/guides/storage/
|
|
||||||
*/
|
|
||||||
storageOptions?: Record<string, string>
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* API key for the remote connections
|
* API key for the remote connections
|
||||||
*
|
*
|
||||||
@@ -133,19 +115,19 @@ export interface ConnectionOptions {
|
|||||||
readConsistencyInterval?: number
|
readConsistencyInterval?: number
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAwsArgs(opts: ConnectionOptions): any[] {
|
function getAwsArgs (opts: ConnectionOptions): any[] {
|
||||||
const callArgs: any[] = [];
|
const callArgs: any[] = []
|
||||||
const awsCredentials = opts.awsCredentials;
|
const awsCredentials = opts.awsCredentials
|
||||||
if (awsCredentials !== undefined) {
|
if (awsCredentials !== undefined) {
|
||||||
callArgs.push(awsCredentials.accessKeyId);
|
callArgs.push(awsCredentials.accessKeyId)
|
||||||
callArgs.push(awsCredentials.secretKey);
|
callArgs.push(awsCredentials.secretKey)
|
||||||
callArgs.push(awsCredentials.sessionToken);
|
callArgs.push(awsCredentials.sessionToken)
|
||||||
} else {
|
} else {
|
||||||
callArgs.fill(undefined, 0, 3);
|
callArgs.fill(undefined, 0, 3)
|
||||||
}
|
}
|
||||||
|
|
||||||
callArgs.push(opts.awsRegion);
|
callArgs.push(opts.awsRegion)
|
||||||
return callArgs;
|
return callArgs
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CreateTableOptions<T> {
|
export interface CreateTableOptions<T> {
|
||||||
@@ -168,7 +150,7 @@ export interface CreateTableOptions<T> {
|
|||||||
/**
|
/**
|
||||||
* Connect to a LanceDB instance at the given URI.
|
* Connect to a LanceDB instance at the given URI.
|
||||||
*
|
*
|
||||||
* Accepted formats:
|
* Accpeted formats:
|
||||||
*
|
*
|
||||||
* - `/path/to/database` - local database
|
* - `/path/to/database` - local database
|
||||||
* - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage
|
* - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage
|
||||||
@@ -178,66 +160,53 @@ export interface CreateTableOptions<T> {
|
|||||||
*
|
*
|
||||||
* @see {@link ConnectionOptions} for more details on the URI format.
|
* @see {@link ConnectionOptions} for more details on the URI format.
|
||||||
*/
|
*/
|
||||||
export async function connect(uri: string): Promise<Connection>;
|
export async function connect (uri: string): Promise<Connection>
|
||||||
/**
|
/**
|
||||||
* Connect to a LanceDB instance with connection options.
|
* Connect to a LanceDB instance with connection options.
|
||||||
*
|
*
|
||||||
* @param opts The {@link ConnectionOptions} to use when connecting to the database.
|
* @param opts The {@link ConnectionOptions} to use when connecting to the database.
|
||||||
*/
|
*/
|
||||||
export async function connect(
|
export async function connect (
|
||||||
opts: Partial<ConnectionOptions>
|
opts: Partial<ConnectionOptions>
|
||||||
): Promise<Connection>;
|
): Promise<Connection>
|
||||||
export async function connect(
|
export async function connect (
|
||||||
arg: string | Partial<ConnectionOptions>
|
arg: string | Partial<ConnectionOptions>
|
||||||
): Promise<Connection> {
|
): Promise<Connection> {
|
||||||
let opts: ConnectionOptions;
|
let opts: ConnectionOptions
|
||||||
if (typeof arg === "string") {
|
if (typeof arg === 'string') {
|
||||||
opts = { uri: arg };
|
opts = { uri: arg }
|
||||||
} else {
|
} else {
|
||||||
const keys = Object.keys(arg);
|
// opts = { uri: arg.uri, awsCredentials = arg.awsCredentials }
|
||||||
if (keys.length === 1 && keys[0] === "uri" && typeof arg.uri === "string") {
|
const keys = Object.keys(arg)
|
||||||
opts = { uri: arg.uri };
|
if (keys.length === 1 && keys[0] === 'uri' && typeof arg.uri === 'string') {
|
||||||
|
opts = { uri: arg.uri }
|
||||||
} else {
|
} else {
|
||||||
opts = Object.assign(
|
opts = Object.assign(
|
||||||
{
|
{
|
||||||
uri: "",
|
uri: '',
|
||||||
awsCredentials: undefined,
|
awsCredentials: undefined,
|
||||||
awsRegion: defaultAwsRegion,
|
awsRegion: defaultAwsRegion,
|
||||||
apiKey: undefined,
|
apiKey: undefined,
|
||||||
region: defaultAwsRegion
|
region: defaultAwsRegion
|
||||||
},
|
},
|
||||||
arg
|
arg
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (opts.uri.startsWith("db://")) {
|
if (opts.uri.startsWith('db://')) {
|
||||||
// Remote connection
|
// Remote connection
|
||||||
return new RemoteConnection(opts);
|
return new RemoteConnection(opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
const storageOptions = opts.storageOptions ?? {};
|
|
||||||
if (opts.awsCredentials?.accessKeyId !== undefined) {
|
|
||||||
storageOptions.aws_access_key_id = opts.awsCredentials.accessKeyId;
|
|
||||||
}
|
|
||||||
if (opts.awsCredentials?.secretKey !== undefined) {
|
|
||||||
storageOptions.aws_secret_access_key = opts.awsCredentials.secretKey;
|
|
||||||
}
|
|
||||||
if (opts.awsCredentials?.sessionToken !== undefined) {
|
|
||||||
storageOptions.aws_session_token = opts.awsCredentials.sessionToken;
|
|
||||||
}
|
|
||||||
if (opts.awsRegion !== undefined) {
|
|
||||||
storageOptions.region = opts.awsRegion;
|
|
||||||
}
|
|
||||||
// It's a pain to pass a record to Rust, so we convert it to an array of key-value pairs
|
|
||||||
const storageOptionsArr = Object.entries(storageOptions);
|
|
||||||
|
|
||||||
const db = await databaseNew(
|
const db = await databaseNew(
|
||||||
opts.uri,
|
opts.uri,
|
||||||
storageOptionsArr,
|
opts.awsCredentials?.accessKeyId,
|
||||||
|
opts.awsCredentials?.secretKey,
|
||||||
|
opts.awsCredentials?.sessionToken,
|
||||||
|
opts.awsRegion,
|
||||||
opts.readConsistencyInterval
|
opts.readConsistencyInterval
|
||||||
);
|
)
|
||||||
return new LocalConnection(db, opts);
|
return new LocalConnection(db, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -538,11 +507,7 @@ export interface Table<T = number[]> {
|
|||||||
* @param data the new data to insert
|
* @param data the new data to insert
|
||||||
* @param args parameters controlling how the operation should behave
|
* @param args parameters controlling how the operation should behave
|
||||||
*/
|
*/
|
||||||
mergeInsert: (
|
mergeInsert: (on: string, data: Array<Record<string, unknown>> | ArrowTable, args: MergeInsertArgs) => Promise<void>
|
||||||
on: string,
|
|
||||||
data: Array<Record<string, unknown>> | ArrowTable,
|
|
||||||
args: MergeInsertArgs
|
|
||||||
) => Promise<void>
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List the indicies on this table.
|
* List the indicies on this table.
|
||||||
@@ -567,9 +532,7 @@ export interface Table<T = number[]> {
|
|||||||
* expressions will be evaluated for each row in the
|
* expressions will be evaluated for each row in the
|
||||||
* table, and can reference existing columns in the table.
|
* table, and can reference existing columns in the table.
|
||||||
*/
|
*/
|
||||||
addColumns(
|
addColumns(newColumnTransforms: Array<{ name: string, valueSql: string }>): Promise<void>
|
||||||
newColumnTransforms: Array<{ name: string, valueSql: string }>
|
|
||||||
): Promise<void>
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Alter the name or nullability of columns.
|
* Alter the name or nullability of columns.
|
||||||
@@ -710,23 +673,23 @@ export interface IndexStats {
|
|||||||
* A connection to a LanceDB database.
|
* A connection to a LanceDB database.
|
||||||
*/
|
*/
|
||||||
export class LocalConnection implements Connection {
|
export class LocalConnection implements Connection {
|
||||||
private readonly _options: () => ConnectionOptions;
|
private readonly _options: () => ConnectionOptions
|
||||||
private readonly _db: any;
|
private readonly _db: any
|
||||||
|
|
||||||
constructor(db: any, options: ConnectionOptions) {
|
constructor (db: any, options: ConnectionOptions) {
|
||||||
this._options = () => options;
|
this._options = () => options
|
||||||
this._db = db;
|
this._db = db
|
||||||
}
|
}
|
||||||
|
|
||||||
get uri(): string {
|
get uri (): string {
|
||||||
return this._options().uri;
|
return this._options().uri
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the names of all tables in the database.
|
* Get the names of all tables in the database.
|
||||||
*/
|
*/
|
||||||
async tableNames(): Promise<string[]> {
|
async tableNames (): Promise<string[]> {
|
||||||
return databaseTableNames.call(this._db);
|
return databaseTableNames.call(this._db)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -734,7 +697,7 @@ export class LocalConnection implements Connection {
|
|||||||
*
|
*
|
||||||
* @param name The name of the table.
|
* @param name The name of the table.
|
||||||
*/
|
*/
|
||||||
async openTable(name: string): Promise<Table>;
|
async openTable (name: string): Promise<Table>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a table in the database.
|
* Open a table in the database.
|
||||||
@@ -745,20 +708,24 @@ export class LocalConnection implements Connection {
|
|||||||
async openTable<T>(
|
async openTable<T>(
|
||||||
name: string,
|
name: string,
|
||||||
embeddings: EmbeddingFunction<T>
|
embeddings: EmbeddingFunction<T>
|
||||||
): Promise<Table<T>>;
|
): Promise<Table<T>>
|
||||||
async openTable<T>(
|
async openTable<T>(
|
||||||
name: string,
|
name: string,
|
||||||
embeddings?: EmbeddingFunction<T>
|
embeddings?: EmbeddingFunction<T>
|
||||||
): Promise<Table<T>>;
|
): Promise<Table<T>>
|
||||||
async openTable<T>(
|
async openTable<T>(
|
||||||
name: string,
|
name: string,
|
||||||
embeddings?: EmbeddingFunction<T>
|
embeddings?: EmbeddingFunction<T>
|
||||||
): Promise<Table<T>> {
|
): Promise<Table<T>> {
|
||||||
const tbl = await databaseOpenTable.call(this._db, name);
|
const tbl = await databaseOpenTable.call(
|
||||||
|
this._db,
|
||||||
|
name,
|
||||||
|
...getAwsArgs(this._options())
|
||||||
|
)
|
||||||
if (embeddings !== undefined) {
|
if (embeddings !== undefined) {
|
||||||
return new LocalTable(tbl, name, this._options(), embeddings);
|
return new LocalTable(tbl, name, this._options(), embeddings)
|
||||||
} else {
|
} else {
|
||||||
return new LocalTable(tbl, name, this._options());
|
return new LocalTable(tbl, name, this._options())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -768,32 +735,32 @@ export class LocalConnection implements Connection {
|
|||||||
optsOrEmbedding?: WriteOptions | EmbeddingFunction<T>,
|
optsOrEmbedding?: WriteOptions | EmbeddingFunction<T>,
|
||||||
opt?: WriteOptions
|
opt?: WriteOptions
|
||||||
): Promise<Table<T>> {
|
): Promise<Table<T>> {
|
||||||
if (typeof name === "string") {
|
if (typeof name === 'string') {
|
||||||
let writeOptions: WriteOptions = new DefaultWriteOptions();
|
let writeOptions: WriteOptions = new DefaultWriteOptions()
|
||||||
if (opt !== undefined && isWriteOptions(opt)) {
|
if (opt !== undefined && isWriteOptions(opt)) {
|
||||||
writeOptions = opt;
|
writeOptions = opt
|
||||||
} else if (
|
} else if (
|
||||||
optsOrEmbedding !== undefined &&
|
optsOrEmbedding !== undefined &&
|
||||||
isWriteOptions(optsOrEmbedding)
|
isWriteOptions(optsOrEmbedding)
|
||||||
) {
|
) {
|
||||||
writeOptions = optsOrEmbedding;
|
writeOptions = optsOrEmbedding
|
||||||
}
|
}
|
||||||
|
|
||||||
let embeddings: undefined | EmbeddingFunction<T>;
|
let embeddings: undefined | EmbeddingFunction<T>
|
||||||
if (
|
if (
|
||||||
optsOrEmbedding !== undefined &&
|
optsOrEmbedding !== undefined &&
|
||||||
isEmbeddingFunction(optsOrEmbedding)
|
isEmbeddingFunction(optsOrEmbedding)
|
||||||
) {
|
) {
|
||||||
embeddings = optsOrEmbedding;
|
embeddings = optsOrEmbedding
|
||||||
}
|
}
|
||||||
return await this.createTableImpl({
|
return await this.createTableImpl({
|
||||||
name,
|
name,
|
||||||
data,
|
data,
|
||||||
embeddingFunction: embeddings,
|
embeddingFunction: embeddings,
|
||||||
writeOptions
|
writeOptions
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
return await this.createTableImpl(name);
|
return await this.createTableImpl(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createTableImpl<T>({
|
private async createTableImpl<T>({
|
||||||
@@ -809,27 +776,27 @@ export class LocalConnection implements Connection {
|
|||||||
embeddingFunction?: EmbeddingFunction<T> | undefined
|
embeddingFunction?: EmbeddingFunction<T> | undefined
|
||||||
writeOptions?: WriteOptions | undefined
|
writeOptions?: WriteOptions | undefined
|
||||||
}): Promise<Table<T>> {
|
}): Promise<Table<T>> {
|
||||||
let buffer: Buffer;
|
let buffer: Buffer
|
||||||
|
|
||||||
function isEmpty(
|
function isEmpty (
|
||||||
data: Array<Record<string, unknown>> | ArrowTable<any>
|
data: Array<Record<string, unknown>> | ArrowTable<any>
|
||||||
): boolean {
|
): boolean {
|
||||||
if (data instanceof ArrowTable) {
|
if (data instanceof ArrowTable) {
|
||||||
return data.data.length === 0;
|
return data.data.length === 0
|
||||||
}
|
}
|
||||||
return data.length === 0;
|
return data.length === 0
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data === undefined || isEmpty(data)) {
|
if (data === undefined || isEmpty(data)) {
|
||||||
if (schema === undefined) {
|
if (schema === undefined) {
|
||||||
throw new Error("Either data or schema needs to defined");
|
throw new Error('Either data or schema needs to defined')
|
||||||
}
|
}
|
||||||
buffer = await fromTableToBuffer(createEmptyTable(schema));
|
buffer = await fromTableToBuffer(createEmptyTable(schema))
|
||||||
} else if (data instanceof ArrowTable) {
|
} else if (data instanceof ArrowTable) {
|
||||||
buffer = await fromTableToBuffer(data, embeddingFunction, schema);
|
buffer = await fromTableToBuffer(data, embeddingFunction, schema)
|
||||||
} else {
|
} else {
|
||||||
// data is Array<Record<...>>
|
// data is Array<Record<...>>
|
||||||
buffer = await fromRecordsToBuffer(data, embeddingFunction, schema);
|
buffer = await fromRecordsToBuffer(data, embeddingFunction, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
const tbl = await tableCreate.call(
|
const tbl = await tableCreate.call(
|
||||||
@@ -838,11 +805,11 @@ export class LocalConnection implements Connection {
|
|||||||
buffer,
|
buffer,
|
||||||
writeOptions?.writeMode?.toString(),
|
writeOptions?.writeMode?.toString(),
|
||||||
...getAwsArgs(this._options())
|
...getAwsArgs(this._options())
|
||||||
);
|
)
|
||||||
if (embeddingFunction !== undefined) {
|
if (embeddingFunction !== undefined) {
|
||||||
return new LocalTable(tbl, name, this._options(), embeddingFunction);
|
return new LocalTable(tbl, name, this._options(), embeddingFunction)
|
||||||
} else {
|
} else {
|
||||||
return new LocalTable(tbl, name, this._options());
|
return new LocalTable(tbl, name, this._options())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -850,69 +817,69 @@ export class LocalConnection implements Connection {
|
|||||||
* Drop an existing table.
|
* Drop an existing table.
|
||||||
* @param name The name of the table to drop.
|
* @param name The name of the table to drop.
|
||||||
*/
|
*/
|
||||||
async dropTable(name: string): Promise<void> {
|
async dropTable (name: string): Promise<void> {
|
||||||
await databaseDropTable.call(this._db, name);
|
await databaseDropTable.call(this._db, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
withMiddleware(middleware: HttpMiddleware): Connection {
|
withMiddleware (middleware: HttpMiddleware): Connection {
|
||||||
return this;
|
return this
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class LocalTable<T = number[]> implements Table<T> {
|
export class LocalTable<T = number[]> implements Table<T> {
|
||||||
private _tbl: any;
|
private _tbl: any
|
||||||
private readonly _name: string;
|
private readonly _name: string
|
||||||
private readonly _isElectron: boolean;
|
private readonly _isElectron: boolean
|
||||||
private readonly _embeddings?: EmbeddingFunction<T>;
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
private readonly _options: () => ConnectionOptions;
|
private readonly _options: () => ConnectionOptions
|
||||||
|
|
||||||
constructor(tbl: any, name: string, options: ConnectionOptions);
|
constructor (tbl: any, name: string, options: ConnectionOptions)
|
||||||
/**
|
/**
|
||||||
* @param tbl
|
* @param tbl
|
||||||
* @param name
|
* @param name
|
||||||
* @param options
|
* @param options
|
||||||
* @param embeddings An embedding function to use when interacting with this table
|
* @param embeddings An embedding function to use when interacting with this table
|
||||||
*/
|
*/
|
||||||
constructor(
|
constructor (
|
||||||
tbl: any,
|
tbl: any,
|
||||||
name: string,
|
name: string,
|
||||||
options: ConnectionOptions,
|
options: ConnectionOptions,
|
||||||
embeddings: EmbeddingFunction<T>
|
embeddings: EmbeddingFunction<T>
|
||||||
);
|
)
|
||||||
constructor(
|
constructor (
|
||||||
tbl: any,
|
tbl: any,
|
||||||
name: string,
|
name: string,
|
||||||
options: ConnectionOptions,
|
options: ConnectionOptions,
|
||||||
embeddings?: EmbeddingFunction<T>
|
embeddings?: EmbeddingFunction<T>
|
||||||
) {
|
) {
|
||||||
this._tbl = tbl;
|
this._tbl = tbl
|
||||||
this._name = name;
|
this._name = name
|
||||||
this._embeddings = embeddings;
|
this._embeddings = embeddings
|
||||||
this._options = () => options;
|
this._options = () => options
|
||||||
this._isElectron = this.checkElectron();
|
this._isElectron = this.checkElectron()
|
||||||
}
|
}
|
||||||
|
|
||||||
get name(): string {
|
get name (): string {
|
||||||
return this._name;
|
return this._name
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a search query to find the nearest neighbors of the given search term
|
* Creates a search query to find the nearest neighbors of the given search term
|
||||||
* @param query The query search term
|
* @param query The query search term
|
||||||
*/
|
*/
|
||||||
search(query: T): Query<T> {
|
search (query: T): Query<T> {
|
||||||
return new Query(query, this._tbl, this._embeddings);
|
return new Query(query, this._tbl, this._embeddings)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a filter query to find all rows matching the specified criteria
|
* Creates a filter query to find all rows matching the specified criteria
|
||||||
* @param value The filter criteria (like SQL where clause syntax)
|
* @param value The filter criteria (like SQL where clause syntax)
|
||||||
*/
|
*/
|
||||||
filter(value: string): Query<T> {
|
filter (value: string): Query<T> {
|
||||||
return new Query(undefined, this._tbl, this._embeddings).filter(value);
|
return new Query(undefined, this._tbl, this._embeddings).filter(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
where = this.filter;
|
where = this.filter
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Insert records into this Table.
|
* Insert records into this Table.
|
||||||
@@ -920,19 +887,16 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @param data Records to be inserted into the Table
|
* @param data Records to be inserted into the Table
|
||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async add(
|
async add (
|
||||||
data: Array<Record<string, unknown>> | ArrowTable
|
data: Array<Record<string, unknown>> | ArrowTable
|
||||||
): Promise<number> {
|
): Promise<number> {
|
||||||
const schema = await this.schema;
|
const schema = await this.schema
|
||||||
|
let tbl: ArrowTable
|
||||||
let tbl: ArrowTable;
|
|
||||||
|
|
||||||
if (data instanceof ArrowTable) {
|
if (data instanceof ArrowTable) {
|
||||||
tbl = data;
|
tbl = data
|
||||||
} else {
|
} else {
|
||||||
tbl = makeArrowTable(data, { schema, embeddings: this._embeddings });
|
tbl = makeArrowTable(data, { schema })
|
||||||
}
|
}
|
||||||
|
|
||||||
return tableAdd
|
return tableAdd
|
||||||
.call(
|
.call(
|
||||||
this._tbl,
|
this._tbl,
|
||||||
@@ -941,8 +905,8 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
...getAwsArgs(this._options())
|
...getAwsArgs(this._options())
|
||||||
)
|
)
|
||||||
.then((newTable: any) => {
|
.then((newTable: any) => {
|
||||||
this._tbl = newTable;
|
this._tbl = newTable
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -951,14 +915,14 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @param data Records to be inserted into the Table
|
* @param data Records to be inserted into the Table
|
||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async overwrite(
|
async overwrite (
|
||||||
data: Array<Record<string, unknown>> | ArrowTable
|
data: Array<Record<string, unknown>> | ArrowTable
|
||||||
): Promise<number> {
|
): Promise<number> {
|
||||||
let buffer: Buffer;
|
let buffer: Buffer
|
||||||
if (data instanceof ArrowTable) {
|
if (data instanceof ArrowTable) {
|
||||||
buffer = await fromTableToBuffer(data, this._embeddings);
|
buffer = await fromTableToBuffer(data, this._embeddings)
|
||||||
} else {
|
} else {
|
||||||
buffer = await fromRecordsToBuffer(data, this._embeddings);
|
buffer = await fromRecordsToBuffer(data, this._embeddings)
|
||||||
}
|
}
|
||||||
return tableAdd
|
return tableAdd
|
||||||
.call(
|
.call(
|
||||||
@@ -968,8 +932,8 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
...getAwsArgs(this._options())
|
...getAwsArgs(this._options())
|
||||||
)
|
)
|
||||||
.then((newTable: any) => {
|
.then((newTable: any) => {
|
||||||
this._tbl = newTable;
|
this._tbl = newTable
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -977,26 +941,26 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
*
|
*
|
||||||
* @param indexParams The parameters of this Index, @see VectorIndexParams.
|
* @param indexParams The parameters of this Index, @see VectorIndexParams.
|
||||||
*/
|
*/
|
||||||
async createIndex(indexParams: VectorIndexParams): Promise<any> {
|
async createIndex (indexParams: VectorIndexParams): Promise<any> {
|
||||||
return tableCreateVectorIndex
|
return tableCreateVectorIndex
|
||||||
.call(this._tbl, indexParams)
|
.call(this._tbl, indexParams)
|
||||||
.then((newTable: any) => {
|
.then((newTable: any) => {
|
||||||
this._tbl = newTable;
|
this._tbl = newTable
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async createScalarIndex(column: string, replace?: boolean): Promise<void> {
|
async createScalarIndex (column: string, replace?: boolean): Promise<void> {
|
||||||
if (replace === undefined) {
|
if (replace === undefined) {
|
||||||
replace = true;
|
replace = true
|
||||||
}
|
}
|
||||||
return tableCreateScalarIndex.call(this._tbl, column, replace);
|
return tableCreateScalarIndex.call(this._tbl, column, replace)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the number of rows in this table.
|
* Returns the number of rows in this table.
|
||||||
*/
|
*/
|
||||||
async countRows(filter?: string): Promise<number> {
|
async countRows (filter?: string): Promise<number> {
|
||||||
return tableCountRows.call(this._tbl, filter);
|
return tableCountRows.call(this._tbl, filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1004,10 +968,10 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
*
|
*
|
||||||
* @param filter A filter in the same format used by a sql WHERE clause.
|
* @param filter A filter in the same format used by a sql WHERE clause.
|
||||||
*/
|
*/
|
||||||
async delete(filter: string): Promise<void> {
|
async delete (filter: string): Promise<void> {
|
||||||
return tableDelete.call(this._tbl, filter).then((newTable: any) => {
|
return tableDelete.call(this._tbl, filter).then((newTable: any) => {
|
||||||
this._tbl = newTable;
|
this._tbl = newTable
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1017,65 +981,55 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
*
|
*
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
async update(args: UpdateArgs | UpdateSqlArgs): Promise<void> {
|
async update (args: UpdateArgs | UpdateSqlArgs): Promise<void> {
|
||||||
let filter: string | null;
|
let filter: string | null
|
||||||
let updates: Record<string, string>;
|
let updates: Record<string, string>
|
||||||
|
|
||||||
if ("valuesSql" in args) {
|
if ('valuesSql' in args) {
|
||||||
filter = args.where ?? null;
|
filter = args.where ?? null
|
||||||
updates = args.valuesSql;
|
updates = args.valuesSql
|
||||||
} else {
|
} else {
|
||||||
filter = args.where ?? null;
|
filter = args.where ?? null
|
||||||
updates = {};
|
updates = {}
|
||||||
for (const [key, value] of Object.entries(args.values)) {
|
for (const [key, value] of Object.entries(args.values)) {
|
||||||
updates[key] = toSQL(value);
|
updates[key] = toSQL(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return tableUpdate
|
return tableUpdate
|
||||||
.call(this._tbl, filter, updates)
|
.call(this._tbl, filter, updates)
|
||||||
.then((newTable: any) => {
|
.then((newTable: any) => {
|
||||||
this._tbl = newTable;
|
this._tbl = newTable
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async mergeInsert(
|
async mergeInsert (on: string, data: Array<Record<string, unknown>> | ArrowTable, args: MergeInsertArgs): Promise<void> {
|
||||||
on: string,
|
let whenMatchedUpdateAll = false
|
||||||
data: Array<Record<string, unknown>> | ArrowTable,
|
let whenMatchedUpdateAllFilt = null
|
||||||
args: MergeInsertArgs
|
if (args.whenMatchedUpdateAll !== undefined && args.whenMatchedUpdateAll !== null) {
|
||||||
): Promise<void> {
|
whenMatchedUpdateAll = true
|
||||||
let whenMatchedUpdateAll = false;
|
|
||||||
let whenMatchedUpdateAllFilt = null;
|
|
||||||
if (
|
|
||||||
args.whenMatchedUpdateAll !== undefined &&
|
|
||||||
args.whenMatchedUpdateAll !== null
|
|
||||||
) {
|
|
||||||
whenMatchedUpdateAll = true;
|
|
||||||
if (args.whenMatchedUpdateAll !== true) {
|
if (args.whenMatchedUpdateAll !== true) {
|
||||||
whenMatchedUpdateAllFilt = args.whenMatchedUpdateAll;
|
whenMatchedUpdateAllFilt = args.whenMatchedUpdateAll
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const whenNotMatchedInsertAll = args.whenNotMatchedInsertAll ?? false;
|
const whenNotMatchedInsertAll = args.whenNotMatchedInsertAll ?? false
|
||||||
let whenNotMatchedBySourceDelete = false;
|
let whenNotMatchedBySourceDelete = false
|
||||||
let whenNotMatchedBySourceDeleteFilt = null;
|
let whenNotMatchedBySourceDeleteFilt = null
|
||||||
if (
|
if (args.whenNotMatchedBySourceDelete !== undefined && args.whenNotMatchedBySourceDelete !== null) {
|
||||||
args.whenNotMatchedBySourceDelete !== undefined &&
|
whenNotMatchedBySourceDelete = true
|
||||||
args.whenNotMatchedBySourceDelete !== null
|
|
||||||
) {
|
|
||||||
whenNotMatchedBySourceDelete = true;
|
|
||||||
if (args.whenNotMatchedBySourceDelete !== true) {
|
if (args.whenNotMatchedBySourceDelete !== true) {
|
||||||
whenNotMatchedBySourceDeleteFilt = args.whenNotMatchedBySourceDelete;
|
whenNotMatchedBySourceDeleteFilt = args.whenNotMatchedBySourceDelete
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const schema = await this.schema;
|
const schema = await this.schema
|
||||||
let tbl: ArrowTable;
|
let tbl: ArrowTable
|
||||||
if (data instanceof ArrowTable) {
|
if (data instanceof ArrowTable) {
|
||||||
tbl = data;
|
tbl = data
|
||||||
} else {
|
} else {
|
||||||
tbl = makeArrowTable(data, { schema });
|
tbl = makeArrowTable(data, { schema })
|
||||||
}
|
}
|
||||||
const buffer = await fromTableToBuffer(tbl, this._embeddings, schema);
|
const buffer = await fromTableToBuffer(tbl, this._embeddings, schema)
|
||||||
|
|
||||||
this._tbl = await tableMergeInsert.call(
|
this._tbl = await tableMergeInsert.call(
|
||||||
this._tbl,
|
this._tbl,
|
||||||
@@ -1086,7 +1040,7 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
whenNotMatchedBySourceDelete,
|
whenNotMatchedBySourceDelete,
|
||||||
whenNotMatchedBySourceDeleteFilt,
|
whenNotMatchedBySourceDeleteFilt,
|
||||||
buffer
|
buffer
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1104,16 +1058,16 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* uphold this promise can lead to corrupted tables.
|
* uphold this promise can lead to corrupted tables.
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
async cleanupOldVersions(
|
async cleanupOldVersions (
|
||||||
olderThan?: number,
|
olderThan?: number,
|
||||||
deleteUnverified?: boolean
|
deleteUnverified?: boolean
|
||||||
): Promise<CleanupStats> {
|
): Promise<CleanupStats> {
|
||||||
return tableCleanupOldVersions
|
return tableCleanupOldVersions
|
||||||
.call(this._tbl, olderThan, deleteUnverified)
|
.call(this._tbl, olderThan, deleteUnverified)
|
||||||
.then((res: { newTable: any, metrics: CleanupStats }) => {
|
.then((res: { newTable: any, metrics: CleanupStats }) => {
|
||||||
this._tbl = res.newTable;
|
this._tbl = res.newTable
|
||||||
return res.metrics;
|
return res.metrics
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1127,64 +1081,62 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* for most tables.
|
* for most tables.
|
||||||
* @returns Metrics about the compaction operation.
|
* @returns Metrics about the compaction operation.
|
||||||
*/
|
*/
|
||||||
async compactFiles(options?: CompactionOptions): Promise<CompactionMetrics> {
|
async compactFiles (options?: CompactionOptions): Promise<CompactionMetrics> {
|
||||||
const optionsArg = options ?? {};
|
const optionsArg = options ?? {}
|
||||||
return tableCompactFiles
|
return tableCompactFiles
|
||||||
.call(this._tbl, optionsArg)
|
.call(this._tbl, optionsArg)
|
||||||
.then((res: { newTable: any, metrics: CompactionMetrics }) => {
|
.then((res: { newTable: any, metrics: CompactionMetrics }) => {
|
||||||
this._tbl = res.newTable;
|
this._tbl = res.newTable
|
||||||
return res.metrics;
|
return res.metrics
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async listIndices(): Promise<VectorIndex[]> {
|
async listIndices (): Promise<VectorIndex[]> {
|
||||||
return tableListIndices.call(this._tbl);
|
return tableListIndices.call(this._tbl)
|
||||||
}
|
}
|
||||||
|
|
||||||
async indexStats(indexUuid: string): Promise<IndexStats> {
|
async indexStats (indexUuid: string): Promise<IndexStats> {
|
||||||
return tableIndexStats.call(this._tbl, indexUuid);
|
return tableIndexStats.call(this._tbl, indexUuid)
|
||||||
}
|
}
|
||||||
|
|
||||||
get schema(): Promise<Schema> {
|
get schema (): Promise<Schema> {
|
||||||
// empty table
|
// empty table
|
||||||
return this.getSchema();
|
return this.getSchema()
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getSchema(): Promise<Schema> {
|
private async getSchema (): Promise<Schema> {
|
||||||
const buffer = await tableSchema.call(this._tbl, this._isElectron);
|
const buffer = await tableSchema.call(this._tbl, this._isElectron)
|
||||||
const table = tableFromIPC(buffer);
|
const table = tableFromIPC(buffer)
|
||||||
return table.schema;
|
return table.schema
|
||||||
}
|
}
|
||||||
|
|
||||||
// See https://github.com/electron/electron/issues/2288
|
// See https://github.com/electron/electron/issues/2288
|
||||||
private checkElectron(): boolean {
|
private checkElectron (): boolean {
|
||||||
try {
|
try {
|
||||||
// eslint-disable-next-line no-prototype-builtins
|
// eslint-disable-next-line no-prototype-builtins
|
||||||
return (
|
return (
|
||||||
Object.prototype.hasOwnProperty.call(process?.versions, "electron") ||
|
Object.prototype.hasOwnProperty.call(process?.versions, 'electron') ||
|
||||||
navigator?.userAgent?.toLowerCase()?.includes(" electron")
|
navigator?.userAgent?.toLowerCase()?.includes(' electron')
|
||||||
);
|
)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return false;
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async addColumns(
|
async addColumns (newColumnTransforms: Array<{ name: string, valueSql: string }>): Promise<void> {
|
||||||
newColumnTransforms: Array<{ name: string, valueSql: string }>
|
return tableAddColumns.call(this._tbl, newColumnTransforms)
|
||||||
): Promise<void> {
|
|
||||||
return tableAddColumns.call(this._tbl, newColumnTransforms);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async alterColumns(columnAlterations: ColumnAlteration[]): Promise<void> {
|
async alterColumns (columnAlterations: ColumnAlteration[]): Promise<void> {
|
||||||
return tableAlterColumns.call(this._tbl, columnAlterations);
|
return tableAlterColumns.call(this._tbl, columnAlterations)
|
||||||
}
|
}
|
||||||
|
|
||||||
async dropColumns(columnNames: string[]): Promise<void> {
|
async dropColumns (columnNames: string[]): Promise<void> {
|
||||||
return tableDropColumns.call(this._tbl, columnNames);
|
return tableDropColumns.call(this._tbl, columnNames)
|
||||||
}
|
}
|
||||||
|
|
||||||
withMiddleware(middleware: HttpMiddleware): Table<T> {
|
withMiddleware (middleware: HttpMiddleware): Table<T> {
|
||||||
return this;
|
return this
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1207,7 +1159,7 @@ export interface CompactionOptions {
|
|||||||
*/
|
*/
|
||||||
targetRowsPerFragment?: number
|
targetRowsPerFragment?: number
|
||||||
/**
|
/**
|
||||||
* The maximum number of T per group. Defaults to 1024.
|
* The maximum number of rows per group. Defaults to 1024.
|
||||||
*/
|
*/
|
||||||
maxRowsPerGroup?: number
|
maxRowsPerGroup?: number
|
||||||
/**
|
/**
|
||||||
@@ -1307,21 +1259,21 @@ export interface IvfPQIndexConfig {
|
|||||||
*/
|
*/
|
||||||
index_cache_size?: number
|
index_cache_size?: number
|
||||||
|
|
||||||
type: "ivf_pq"
|
type: 'ivf_pq'
|
||||||
}
|
}
|
||||||
|
|
||||||
export type VectorIndexParams = IvfPQIndexConfig;
|
export type VectorIndexParams = IvfPQIndexConfig
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write mode for writing a table.
|
* Write mode for writing a table.
|
||||||
*/
|
*/
|
||||||
export enum WriteMode {
|
export enum WriteMode {
|
||||||
/** Create a new {@link Table}. */
|
/** Create a new {@link Table}. */
|
||||||
Create = "create",
|
Create = 'create',
|
||||||
/** Overwrite the existing {@link Table} if presented. */
|
/** Overwrite the existing {@link Table} if presented. */
|
||||||
Overwrite = "overwrite",
|
Overwrite = 'overwrite',
|
||||||
/** Append new data to the table. */
|
/** Append new data to the table. */
|
||||||
Append = "append",
|
Append = 'append',
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1333,14 +1285,14 @@ export interface WriteOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class DefaultWriteOptions implements WriteOptions {
|
export class DefaultWriteOptions implements WriteOptions {
|
||||||
writeMode = WriteMode.Create;
|
writeMode = WriteMode.Create
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isWriteOptions(value: any): value is WriteOptions {
|
export function isWriteOptions (value: any): value is WriteOptions {
|
||||||
return (
|
return (
|
||||||
Object.keys(value).length === 1 &&
|
Object.keys(value).length === 1 &&
|
||||||
(value.writeMode === undefined || typeof value.writeMode === "string")
|
(value.writeMode === undefined || typeof value.writeMode === 'string')
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1350,15 +1302,15 @@ export enum MetricType {
|
|||||||
/**
|
/**
|
||||||
* Euclidean distance
|
* Euclidean distance
|
||||||
*/
|
*/
|
||||||
L2 = "l2",
|
L2 = 'l2',
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Cosine distance
|
* Cosine distance
|
||||||
*/
|
*/
|
||||||
Cosine = "cosine",
|
Cosine = 'cosine',
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Dot product
|
* Dot product
|
||||||
*/
|
*/
|
||||||
Dot = "dot",
|
Dot = 'dot',
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ describe('LanceDB Mirrored Store Integration test', function () {
|
|||||||
|
|
||||||
const dir = tmpdir()
|
const dir = tmpdir()
|
||||||
console.log(dir)
|
console.log(dir)
|
||||||
const conn = await lancedb.connect({ uri: `s3://lancedb-integtest?mirroredStore=${dir}`, storageOptions: { allowHttp: 'true' } })
|
const conn = await lancedb.connect(`s3://lancedb-integtest?mirroredStore=${dir}`)
|
||||||
const data = Array(200).fill({ vector: Array(128).fill(1.0), id: 0 })
|
const data = Array(200).fill({ vector: Array(128).fill(1.0), id: 0 })
|
||||||
data.push(...Array(200).fill({ vector: Array(128).fill(1.0), id: 1 }))
|
data.push(...Array(200).fill({ vector: Array(128).fill(1.0), id: 1 }))
|
||||||
data.push(...Array(200).fill({ vector: Array(128).fill(1.0), id: 2 }))
|
data.push(...Array(200).fill({ vector: Array(128).fill(1.0), id: 2 }))
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export class Query<T = number[]> {
|
|||||||
constructor (query?: T, tbl?: any, embeddings?: EmbeddingFunction<T>) {
|
constructor (query?: T, tbl?: any, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._query = query
|
this._query = query
|
||||||
this._limit = 10
|
this._limit = undefined
|
||||||
this._nprobes = 20
|
this._nprobes = 20
|
||||||
this._refineFactor = undefined
|
this._refineFactor = undefined
|
||||||
this._select = undefined
|
this._select = undefined
|
||||||
@@ -50,7 +50,6 @@ export class Query<T = number[]> {
|
|||||||
|
|
||||||
/***
|
/***
|
||||||
* Sets the number of results that will be returned
|
* Sets the number of results that will be returned
|
||||||
* default value is 10
|
|
||||||
* @param value number of results
|
* @param value number of results
|
||||||
*/
|
*/
|
||||||
limit (value: number): Query<T> {
|
limit (value: number): Query<T> {
|
||||||
|
|||||||
@@ -103,22 +103,6 @@ function toLanceRes (res: AxiosResponse): RemoteResponse {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function decodeErrorData(
|
|
||||||
res: RemoteResponse,
|
|
||||||
responseType?: ResponseType
|
|
||||||
): Promise<string> {
|
|
||||||
const errorData = await res.body()
|
|
||||||
if (responseType === 'arraybuffer') {
|
|
||||||
return new TextDecoder().decode(errorData)
|
|
||||||
} else {
|
|
||||||
if (typeof errorData === 'object') {
|
|
||||||
return JSON.stringify(errorData)
|
|
||||||
}
|
|
||||||
|
|
||||||
return errorData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class HttpLancedbClient {
|
export class HttpLancedbClient {
|
||||||
private readonly _url: string
|
private readonly _url: string
|
||||||
private readonly _apiKey: () => string
|
private readonly _apiKey: () => string
|
||||||
@@ -196,7 +180,7 @@ export class HttpLancedbClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (response.status !== 200) {
|
if (response.status !== 200) {
|
||||||
const errorData = await decodeErrorData(response)
|
const errorData = new TextDecoder().decode(await response.body())
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Server Error, status: ${response.status}, ` +
|
`Server Error, status: ${response.status}, ` +
|
||||||
`message: ${response.statusText}: ${errorData}`
|
`message: ${response.statusText}: ${errorData}`
|
||||||
@@ -242,7 +226,7 @@ export class HttpLancedbClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (response.status !== 200) {
|
if (response.status !== 200) {
|
||||||
const errorData = await decodeErrorData(response, responseType)
|
const errorData = new TextDecoder().decode(await response.body())
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Server Error, status: ${response.status}, ` +
|
`Server Error, status: ${response.status}, ` +
|
||||||
`message: ${response.statusText}: ${errorData}`
|
`message: ${response.statusText}: ${errorData}`
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ import {
|
|||||||
fromRecordsToStreamBuffer,
|
fromRecordsToStreamBuffer,
|
||||||
fromTableToStreamBuffer
|
fromTableToStreamBuffer
|
||||||
} from '../arrow'
|
} from '../arrow'
|
||||||
import { toSQL, TTLCache } from '../util'
|
import { toSQL } from '../util'
|
||||||
import { type HttpMiddleware } from '../middleware'
|
import { type HttpMiddleware } from '../middleware'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -47,7 +47,6 @@ import { type HttpMiddleware } from '../middleware'
|
|||||||
export class RemoteConnection implements Connection {
|
export class RemoteConnection implements Connection {
|
||||||
private _client: HttpLancedbClient
|
private _client: HttpLancedbClient
|
||||||
private readonly _dbName: string
|
private readonly _dbName: string
|
||||||
private readonly _tableCache = new TTLCache(300_000)
|
|
||||||
|
|
||||||
constructor (opts: ConnectionOptions) {
|
constructor (opts: ConnectionOptions) {
|
||||||
if (!opts.uri.startsWith('db://')) {
|
if (!opts.uri.startsWith('db://')) {
|
||||||
@@ -90,9 +89,6 @@ export class RemoteConnection implements Connection {
|
|||||||
page_token: pageToken
|
page_token: pageToken
|
||||||
})
|
})
|
||||||
const body = await response.body()
|
const body = await response.body()
|
||||||
for (const table of body.tables) {
|
|
||||||
this._tableCache.set(table, true)
|
|
||||||
}
|
|
||||||
return body.tables
|
return body.tables
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,12 +101,6 @@ export class RemoteConnection implements Connection {
|
|||||||
name: string,
|
name: string,
|
||||||
embeddings?: EmbeddingFunction<T>
|
embeddings?: EmbeddingFunction<T>
|
||||||
): Promise<Table<T>> {
|
): Promise<Table<T>> {
|
||||||
// check if the table exists
|
|
||||||
if (this._tableCache.get(name) === undefined) {
|
|
||||||
await this._client.post(`/v1/table/${encodeURIComponent(name)}/describe/`)
|
|
||||||
this._tableCache.set(name, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (embeddings !== undefined) {
|
if (embeddings !== undefined) {
|
||||||
return new RemoteTable(this._client, name, embeddings)
|
return new RemoteTable(this._client, name, embeddings)
|
||||||
} else {
|
} else {
|
||||||
@@ -140,9 +130,6 @@ export class RemoteConnection implements Connection {
|
|||||||
schema = nameOrOpts.schema
|
schema = nameOrOpts.schema
|
||||||
embeddings = nameOrOpts.embeddingFunction
|
embeddings = nameOrOpts.embeddingFunction
|
||||||
tableName = nameOrOpts.name
|
tableName = nameOrOpts.name
|
||||||
if (data === undefined) {
|
|
||||||
data = nameOrOpts.data
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let buffer: Buffer
|
let buffer: Buffer
|
||||||
@@ -169,7 +156,7 @@ export class RemoteConnection implements Connection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const res = await this._client.post(
|
const res = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(tableName)}/create/`,
|
`/v1/table/${tableName}/create/`,
|
||||||
buffer,
|
buffer,
|
||||||
undefined,
|
undefined,
|
||||||
'application/vnd.apache.arrow.stream'
|
'application/vnd.apache.arrow.stream'
|
||||||
@@ -182,7 +169,6 @@ export class RemoteConnection implements Connection {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
this._tableCache.set(tableName, true)
|
|
||||||
if (embeddings === undefined) {
|
if (embeddings === undefined) {
|
||||||
return new RemoteTable(this._client, tableName)
|
return new RemoteTable(this._client, tableName)
|
||||||
} else {
|
} else {
|
||||||
@@ -191,8 +177,7 @@ export class RemoteConnection implements Connection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async dropTable (name: string): Promise<void> {
|
async dropTable (name: string): Promise<void> {
|
||||||
await this._client.post(`/v1/table/${encodeURIComponent(name)}/drop/`)
|
await this._client.post(`/v1/table/${name}/drop/`)
|
||||||
this._tableCache.delete(name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
withMiddleware (middleware: HttpMiddleware): Connection {
|
withMiddleware (middleware: HttpMiddleware): Connection {
|
||||||
@@ -283,7 +268,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
|
|
||||||
get schema (): Promise<any> {
|
get schema (): Promise<any> {
|
||||||
return this._client
|
return this._client
|
||||||
.post(`/v1/table/${encodeURIComponent(this._name)}/describe/`)
|
.post(`/v1/table/${this._name}/describe/`)
|
||||||
.then(async (res) => {
|
.then(async (res) => {
|
||||||
if (res.status !== 200) {
|
if (res.status !== 200) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
@@ -297,7 +282,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
search (query: T): Query<T> {
|
search (query: T): Query<T> {
|
||||||
return new RemoteQuery(query, this._client, encodeURIComponent(this._name)) //, this._embeddings_new)
|
return new RemoteQuery(query, this._client, this._name) //, this._embeddings_new)
|
||||||
}
|
}
|
||||||
|
|
||||||
filter (where: string): Query<T> {
|
filter (where: string): Query<T> {
|
||||||
@@ -339,7 +324,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
|
|
||||||
const buffer = await fromTableToStreamBuffer(tbl, this._embeddings)
|
const buffer = await fromTableToStreamBuffer(tbl, this._embeddings)
|
||||||
const res = await this._client.post(
|
const res = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/merge_insert/`,
|
`/v1/table/${this._name}/merge_insert/`,
|
||||||
buffer,
|
buffer,
|
||||||
queryParams,
|
queryParams,
|
||||||
'application/vnd.apache.arrow.stream'
|
'application/vnd.apache.arrow.stream'
|
||||||
@@ -363,7 +348,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
|
|
||||||
const buffer = await fromTableToStreamBuffer(tbl, this._embeddings)
|
const buffer = await fromTableToStreamBuffer(tbl, this._embeddings)
|
||||||
const res = await this._client.post(
|
const res = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/insert/`,
|
`/v1/table/${this._name}/insert/`,
|
||||||
buffer,
|
buffer,
|
||||||
{
|
{
|
||||||
mode: 'append'
|
mode: 'append'
|
||||||
@@ -389,7 +374,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
}
|
}
|
||||||
const buffer = await fromTableToStreamBuffer(tbl, this._embeddings)
|
const buffer = await fromTableToStreamBuffer(tbl, this._embeddings)
|
||||||
const res = await this._client.post(
|
const res = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/insert/`,
|
`/v1/table/${this._name}/insert/`,
|
||||||
buffer,
|
buffer,
|
||||||
{
|
{
|
||||||
mode: 'overwrite'
|
mode: 'overwrite'
|
||||||
@@ -436,7 +421,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
index_cache_size: indexCacheSize
|
index_cache_size: indexCacheSize
|
||||||
}
|
}
|
||||||
const res = await this._client.post(
|
const res = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/create_index/`,
|
`/v1/table/${this._name}/create_index/`,
|
||||||
data
|
data
|
||||||
)
|
)
|
||||||
if (res.status !== 200) {
|
if (res.status !== 200) {
|
||||||
@@ -457,7 +442,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
replace: true
|
replace: true
|
||||||
}
|
}
|
||||||
const res = await this._client.post(
|
const res = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/create_scalar_index/`,
|
`/v1/table/${this._name}/create_scalar_index/`,
|
||||||
data
|
data
|
||||||
)
|
)
|
||||||
if (res.status !== 200) {
|
if (res.status !== 200) {
|
||||||
@@ -469,15 +454,13 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async countRows (filter?: string): Promise<number> {
|
async countRows (): Promise<number> {
|
||||||
const result = await this._client.post(`/v1/table/${encodeURIComponent(this._name)}/count_rows/`, {
|
const result = await this._client.post(`/v1/table/${this._name}/describe/`)
|
||||||
predicate: filter
|
return (await result.body())?.stats?.num_rows
|
||||||
})
|
|
||||||
return (await result.body())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async delete (filter: string): Promise<void> {
|
async delete (filter: string): Promise<void> {
|
||||||
await this._client.post(`/v1/table/${encodeURIComponent(this._name)}/delete/`, {
|
await this._client.post(`/v1/table/${this._name}/delete/`, {
|
||||||
predicate: filter
|
predicate: filter
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -496,7 +479,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
updates[key] = toSQL(value)
|
updates[key] = toSQL(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await this._client.post(`/v1/table/${encodeURIComponent(this._name)}/update/`, {
|
await this._client.post(`/v1/table/${this._name}/update/`, {
|
||||||
predicate: filter,
|
predicate: filter,
|
||||||
updates: Object.entries(updates).map(([key, value]) => [key, value])
|
updates: Object.entries(updates).map(([key, value]) => [key, value])
|
||||||
})
|
})
|
||||||
@@ -504,7 +487,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
|
|
||||||
async listIndices (): Promise<VectorIndex[]> {
|
async listIndices (): Promise<VectorIndex[]> {
|
||||||
const results = await this._client.post(
|
const results = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/index/list/`
|
`/v1/table/${this._name}/index/list/`
|
||||||
)
|
)
|
||||||
return (await results.body()).indexes?.map((index: any) => ({
|
return (await results.body()).indexes?.map((index: any) => ({
|
||||||
columns: index.columns,
|
columns: index.columns,
|
||||||
@@ -515,7 +498,7 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
|||||||
|
|
||||||
async indexStats (indexUuid: string): Promise<IndexStats> {
|
async indexStats (indexUuid: string): Promise<IndexStats> {
|
||||||
const results = await this._client.post(
|
const results = await this._client.post(
|
||||||
`/v1/table/${encodeURIComponent(this._name)}/index/${indexUuid}/stats/`
|
`/v1/table/${this._name}/index/${indexUuid}/stats/`
|
||||||
)
|
)
|
||||||
const body = await results.body()
|
const body = await results.body()
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ import {
|
|||||||
Bool,
|
Bool,
|
||||||
Date_,
|
Date_,
|
||||||
Decimal,
|
Decimal,
|
||||||
type DataType,
|
DataType,
|
||||||
Dictionary,
|
Dictionary,
|
||||||
Binary,
|
Binary,
|
||||||
Float32,
|
Float32,
|
||||||
@@ -74,12 +74,12 @@ import {
|
|||||||
DurationNanosecond,
|
DurationNanosecond,
|
||||||
DurationMicrosecond,
|
DurationMicrosecond,
|
||||||
DurationMillisecond,
|
DurationMillisecond,
|
||||||
DurationSecond
|
DurationSecond,
|
||||||
} from "apache-arrow";
|
} from "apache-arrow";
|
||||||
import type { IntBitWidth, TimeBitWidth } from "apache-arrow/type";
|
import type { IntBitWidth, TimeBitWidth } from "apache-arrow/type";
|
||||||
|
|
||||||
function sanitizeMetadata(
|
function sanitizeMetadata(
|
||||||
metadataLike?: unknown
|
metadataLike?: unknown,
|
||||||
): Map<string, string> | undefined {
|
): Map<string, string> | undefined {
|
||||||
if (metadataLike === undefined || metadataLike === null) {
|
if (metadataLike === undefined || metadataLike === null) {
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -90,7 +90,7 @@ function sanitizeMetadata(
|
|||||||
for (const item of metadataLike) {
|
for (const item of metadataLike) {
|
||||||
if (!(typeof item[0] === "string" || !(typeof item[1] === "string"))) {
|
if (!(typeof item[0] === "string" || !(typeof item[1] === "string"))) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected metadata, if present, to be a Map<string, string> but it had non-string keys or values"
|
"Expected metadata, if present, to be a Map<string, string> but it had non-string keys or values",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -105,7 +105,7 @@ function sanitizeInt(typeLike: object) {
|
|||||||
typeof typeLike.isSigned !== "boolean"
|
typeof typeLike.isSigned !== "boolean"
|
||||||
) {
|
) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected an Int Type to have a `bitWidth` and `isSigned` property"
|
"Expected an Int Type to have a `bitWidth` and `isSigned` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return new Int(typeLike.isSigned, typeLike.bitWidth as IntBitWidth);
|
return new Int(typeLike.isSigned, typeLike.bitWidth as IntBitWidth);
|
||||||
@@ -128,7 +128,7 @@ function sanitizeDecimal(typeLike: object) {
|
|||||||
typeof typeLike.bitWidth !== "number"
|
typeof typeLike.bitWidth !== "number"
|
||||||
) {
|
) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a Decimal Type to have `scale`, `precision`, and `bitWidth` properties"
|
"Expected a Decimal Type to have `scale`, `precision`, and `bitWidth` properties",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return new Decimal(typeLike.scale, typeLike.precision, typeLike.bitWidth);
|
return new Decimal(typeLike.scale, typeLike.precision, typeLike.bitWidth);
|
||||||
@@ -149,7 +149,7 @@ function sanitizeTime(typeLike: object) {
|
|||||||
typeof typeLike.bitWidth !== "number"
|
typeof typeLike.bitWidth !== "number"
|
||||||
) {
|
) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a Time type to have `unit` and `bitWidth` properties"
|
"Expected a Time type to have `unit` and `bitWidth` properties",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return new Time(typeLike.unit, typeLike.bitWidth as TimeBitWidth);
|
return new Time(typeLike.unit, typeLike.bitWidth as TimeBitWidth);
|
||||||
@@ -172,7 +172,7 @@ function sanitizeTypedTimestamp(
|
|||||||
| typeof TimestampNanosecond
|
| typeof TimestampNanosecond
|
||||||
| typeof TimestampMicrosecond
|
| typeof TimestampMicrosecond
|
||||||
| typeof TimestampMillisecond
|
| typeof TimestampMillisecond
|
||||||
| typeof TimestampSecond
|
| typeof TimestampSecond,
|
||||||
) {
|
) {
|
||||||
let timezone = null;
|
let timezone = null;
|
||||||
if ("timezone" in typeLike && typeof typeLike.timezone === "string") {
|
if ("timezone" in typeLike && typeof typeLike.timezone === "string") {
|
||||||
@@ -191,7 +191,7 @@ function sanitizeInterval(typeLike: object) {
|
|||||||
function sanitizeList(typeLike: object) {
|
function sanitizeList(typeLike: object) {
|
||||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a List type to have an array-like `children` property"
|
"Expected a List type to have an array-like `children` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (typeLike.children.length !== 1) {
|
if (typeLike.children.length !== 1) {
|
||||||
@@ -203,7 +203,7 @@ function sanitizeList(typeLike: object) {
|
|||||||
function sanitizeStruct(typeLike: object) {
|
function sanitizeStruct(typeLike: object) {
|
||||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a Struct type to have an array-like `children` property"
|
"Expected a Struct type to have an array-like `children` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return new Struct(typeLike.children.map((child) => sanitizeField(child)));
|
return new Struct(typeLike.children.map((child) => sanitizeField(child)));
|
||||||
@@ -216,47 +216,47 @@ function sanitizeUnion(typeLike: object) {
|
|||||||
typeof typeLike.mode !== "number"
|
typeof typeLike.mode !== "number"
|
||||||
) {
|
) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a Union type to have `typeIds` and `mode` properties"
|
"Expected a Union type to have `typeIds` and `mode` properties",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a Union type to have an array-like `children` property"
|
"Expected a Union type to have an array-like `children` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Union(
|
return new Union(
|
||||||
typeLike.mode,
|
typeLike.mode,
|
||||||
typeLike.typeIds as any,
|
typeLike.typeIds as any,
|
||||||
typeLike.children.map((child) => sanitizeField(child))
|
typeLike.children.map((child) => sanitizeField(child)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function sanitizeTypedUnion(
|
function sanitizeTypedUnion(
|
||||||
typeLike: object,
|
typeLike: object,
|
||||||
UnionType: typeof DenseUnion | typeof SparseUnion
|
UnionType: typeof DenseUnion | typeof SparseUnion,
|
||||||
) {
|
) {
|
||||||
if (!("typeIds" in typeLike)) {
|
if (!("typeIds" in typeLike)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a DenseUnion/SparseUnion type to have a `typeIds` property"
|
"Expected a DenseUnion/SparseUnion type to have a `typeIds` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a DenseUnion/SparseUnion type to have an array-like `children` property"
|
"Expected a DenseUnion/SparseUnion type to have an array-like `children` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new UnionType(
|
return new UnionType(
|
||||||
typeLike.typeIds as any,
|
typeLike.typeIds as any,
|
||||||
typeLike.children.map((child) => sanitizeField(child))
|
typeLike.children.map((child) => sanitizeField(child)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function sanitizeFixedSizeBinary(typeLike: object) {
|
function sanitizeFixedSizeBinary(typeLike: object) {
|
||||||
if (!("byteWidth" in typeLike) || typeof typeLike.byteWidth !== "number") {
|
if (!("byteWidth" in typeLike) || typeof typeLike.byteWidth !== "number") {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a FixedSizeBinary type to have a `byteWidth` property"
|
"Expected a FixedSizeBinary type to have a `byteWidth` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return new FixedSizeBinary(typeLike.byteWidth);
|
return new FixedSizeBinary(typeLike.byteWidth);
|
||||||
@@ -268,7 +268,7 @@ function sanitizeFixedSizeList(typeLike: object) {
|
|||||||
}
|
}
|
||||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a FixedSizeList type to have an array-like `children` property"
|
"Expected a FixedSizeList type to have an array-like `children` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (typeLike.children.length !== 1) {
|
if (typeLike.children.length !== 1) {
|
||||||
@@ -276,14 +276,14 @@ function sanitizeFixedSizeList(typeLike: object) {
|
|||||||
}
|
}
|
||||||
return new FixedSizeList(
|
return new FixedSizeList(
|
||||||
typeLike.listSize,
|
typeLike.listSize,
|
||||||
sanitizeField(typeLike.children[0])
|
sanitizeField(typeLike.children[0]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function sanitizeMap(typeLike: object) {
|
function sanitizeMap(typeLike: object) {
|
||||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"Expected a Map type to have an array-like `children` property"
|
"Expected a Map type to have an array-like `children` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!("keysSorted" in typeLike) || typeof typeLike.keysSorted !== "boolean") {
|
if (!("keysSorted" in typeLike) || typeof typeLike.keysSorted !== "boolean") {
|
||||||
@@ -291,7 +291,7 @@ function sanitizeMap(typeLike: object) {
|
|||||||
}
|
}
|
||||||
return new Map_(
|
return new Map_(
|
||||||
typeLike.children.map((field) => sanitizeField(field)) as any,
|
typeLike.children.map((field) => sanitizeField(field)) as any,
|
||||||
typeLike.keysSorted
|
typeLike.keysSorted,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -319,7 +319,7 @@ function sanitizeDictionary(typeLike: object) {
|
|||||||
sanitizeType(typeLike.dictionary),
|
sanitizeType(typeLike.dictionary),
|
||||||
sanitizeType(typeLike.indices) as any,
|
sanitizeType(typeLike.indices) as any,
|
||||||
typeLike.id,
|
typeLike.id,
|
||||||
typeLike.isOrdered
|
typeLike.isOrdered,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -454,7 +454,7 @@ function sanitizeField(fieldLike: unknown): Field {
|
|||||||
!("nullable" in fieldLike)
|
!("nullable" in fieldLike)
|
||||||
) {
|
) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"The field passed in is missing a `type`/`name`/`nullable` property"
|
"The field passed in is missing a `type`/`name`/`nullable` property",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const type = sanitizeType(fieldLike.type);
|
const type = sanitizeType(fieldLike.type);
|
||||||
@@ -489,7 +489,7 @@ export function sanitizeSchema(schemaLike: unknown): Schema {
|
|||||||
}
|
}
|
||||||
if (!("fields" in schemaLike)) {
|
if (!("fields" in schemaLike)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"The schema passed in does not appear to be a schema (no 'fields' property)"
|
"The schema passed in does not appear to be a schema (no 'fields' property)",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let metadata;
|
let metadata;
|
||||||
@@ -498,11 +498,11 @@ export function sanitizeSchema(schemaLike: unknown): Schema {
|
|||||||
}
|
}
|
||||||
if (!Array.isArray(schemaLike.fields)) {
|
if (!Array.isArray(schemaLike.fields)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"The schema passed in had a 'fields' property but it was not an array"
|
"The schema passed in had a 'fields' property but it was not an array",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const sanitizedFields = schemaLike.fields.map((field) =>
|
const sanitizedFields = schemaLike.fields.map((field) =>
|
||||||
sanitizeField(field)
|
sanitizeField(field),
|
||||||
);
|
);
|
||||||
return new Schema(sanitizedFields, metadata);
|
return new Schema(sanitizedFields, metadata);
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -42,36 +42,3 @@ export function toSQL (value: Literal): string {
|
|||||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||||
throw new Error(`Unsupported value type: ${typeof value} value: (${value})`)
|
throw new Error(`Unsupported value type: ${typeof value} value: (${value})`)
|
||||||
}
|
}
|
||||||
|
|
||||||
export class TTLCache {
|
|
||||||
private readonly cache: Map<string, { value: any, expires: number }>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param ttl Time to live in milliseconds
|
|
||||||
*/
|
|
||||||
constructor (private readonly ttl: number) {
|
|
||||||
this.cache = new Map()
|
|
||||||
}
|
|
||||||
|
|
||||||
get (key: string): any | undefined {
|
|
||||||
const entry = this.cache.get(key)
|
|
||||||
if (entry === undefined) {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entry.expires < Date.now()) {
|
|
||||||
this.cache.delete(key)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
return entry.value
|
|
||||||
}
|
|
||||||
|
|
||||||
set (key: string, value: any): void {
|
|
||||||
this.cache.set(key, { value, expires: Date.now() + this.ttl })
|
|
||||||
}
|
|
||||||
|
|
||||||
delete (key: string): void {
|
|
||||||
this.cache.delete(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
3
nodejs/.eslintignore
Normal file
3
nodejs/.eslintignore
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
**/dist/**/*
|
||||||
|
**/native.js
|
||||||
|
**/native.d.ts
|
||||||
1
nodejs/.gitignore
vendored
1
nodejs/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
yarn.lock
|
|
||||||
1
nodejs/.prettierignore
Symbolic link
1
nodejs/.prettierignore
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
.eslintignore
|
||||||
@@ -43,20 +43,29 @@ npm run test
|
|||||||
|
|
||||||
### Running lint / format
|
### Running lint / format
|
||||||
|
|
||||||
LanceDb uses [biome](https://biomejs.dev/) for linting and formatting. if you are using VSCode you will need to install the official [Biome](https://marketplace.visualstudio.com/items?itemName=biomejs.biome) extension.
|
LanceDb uses eslint for linting. VSCode does not need any plugins to use eslint. However, it
|
||||||
To manually lint your code you can run:
|
may need some additional configuration. Make sure that eslint.experimental.useFlatConfig is
|
||||||
|
set to true. Also, if your vscode root folder is the repo root then you will need to set
|
||||||
|
the eslint.workingDirectories to ["nodejs"]. To manually lint your code you can run:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
npm run lint
|
npm run lint
|
||||||
```
|
```
|
||||||
|
|
||||||
to automatically fix all fixable issues:
|
LanceDb uses prettier for formatting. If you are using VSCode you will need to install the
|
||||||
|
"Prettier - Code formatter" extension. You should then configure it to be the default formatter
|
||||||
|
for typescript and you should enable format on save. To manually check your code's format you
|
||||||
|
can run:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
npm run lint-fix
|
npm run chkformat
|
||||||
```
|
```
|
||||||
|
|
||||||
If you do not have your workspace root set to the `nodejs` directory, unfortunately the extension will not work. You can still run the linting and formatting commands manually.
|
If you need to manually format your code you can run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npx prettier --write .
|
||||||
|
```
|
||||||
|
|
||||||
### Generating docs
|
### Generating docs
|
||||||
|
|
||||||
|
|||||||
@@ -13,27 +13,32 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import {
|
import {
|
||||||
Binary,
|
convertToTable,
|
||||||
Bool,
|
fromTableToBuffer,
|
||||||
DataType,
|
makeArrowTable,
|
||||||
Dictionary,
|
makeEmptyTable,
|
||||||
|
} from "../dist/arrow";
|
||||||
|
import {
|
||||||
Field,
|
Field,
|
||||||
FixedSizeList,
|
FixedSizeList,
|
||||||
Float,
|
|
||||||
Float16,
|
Float16,
|
||||||
Float32,
|
Float32,
|
||||||
Float64,
|
|
||||||
Int32,
|
Int32,
|
||||||
Int64,
|
|
||||||
List,
|
|
||||||
MetadataVersion,
|
|
||||||
Precision,
|
|
||||||
Schema,
|
|
||||||
Struct,
|
|
||||||
type Table,
|
|
||||||
Type,
|
|
||||||
Utf8,
|
|
||||||
tableFromIPC,
|
tableFromIPC,
|
||||||
|
Schema,
|
||||||
|
Float64,
|
||||||
|
type Table,
|
||||||
|
Binary,
|
||||||
|
Bool,
|
||||||
|
Utf8,
|
||||||
|
Struct,
|
||||||
|
List,
|
||||||
|
DataType,
|
||||||
|
Dictionary,
|
||||||
|
Int64,
|
||||||
|
Float,
|
||||||
|
Precision,
|
||||||
|
MetadataVersion,
|
||||||
} from "apache-arrow";
|
} from "apache-arrow";
|
||||||
import {
|
import {
|
||||||
Dictionary as OldDictionary,
|
Dictionary as OldDictionary,
|
||||||
@@ -41,25 +46,14 @@ import {
|
|||||||
FixedSizeList as OldFixedSizeList,
|
FixedSizeList as OldFixedSizeList,
|
||||||
Float32 as OldFloat32,
|
Float32 as OldFloat32,
|
||||||
Int32 as OldInt32,
|
Int32 as OldInt32,
|
||||||
Schema as OldSchema,
|
|
||||||
Struct as OldStruct,
|
Struct as OldStruct,
|
||||||
|
Schema as OldSchema,
|
||||||
TimestampNanosecond as OldTimestampNanosecond,
|
TimestampNanosecond as OldTimestampNanosecond,
|
||||||
Utf8 as OldUtf8,
|
Utf8 as OldUtf8,
|
||||||
} from "apache-arrow-old";
|
} from "apache-arrow-old";
|
||||||
import {
|
import { type EmbeddingFunction } from "../dist/embedding/embedding_function";
|
||||||
convertToTable,
|
|
||||||
fromTableToBuffer,
|
|
||||||
makeArrowTable,
|
|
||||||
makeEmptyTable,
|
|
||||||
} from "../lancedb/arrow";
|
|
||||||
import {
|
|
||||||
EmbeddingFunction,
|
|
||||||
FieldOptions,
|
|
||||||
FunctionOptions,
|
|
||||||
} from "../lancedb/embedding/embedding_function";
|
|
||||||
import { EmbeddingFunctionConfig } from "../lancedb/embedding/registry";
|
|
||||||
|
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function sampleRecords(): Array<Record<string, any>> {
|
function sampleRecords(): Array<Record<string, any>> {
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
@@ -286,46 +280,23 @@ describe("The function makeArrowTable", function () {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
class DummyEmbedding extends EmbeddingFunction<string> {
|
class DummyEmbedding implements EmbeddingFunction<string> {
|
||||||
toJSON(): Partial<FunctionOptions> {
|
public readonly sourceColumn = "string";
|
||||||
return {};
|
public readonly embeddingDimension = 2;
|
||||||
}
|
public readonly embeddingDataType = new Float16();
|
||||||
|
|
||||||
async computeSourceEmbeddings(data: string[]): Promise<number[][]> {
|
async embed(data: string[]): Promise<number[][]> {
|
||||||
return data.map(() => [0.0, 0.0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
ndims(): number {
|
|
||||||
return 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
embeddingDataType() {
|
|
||||||
return new Float16();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class DummyEmbeddingWithNoDimension extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): Partial<FunctionOptions> {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
embeddingDataType(): Float {
|
|
||||||
return new Float16();
|
|
||||||
}
|
|
||||||
|
|
||||||
async computeSourceEmbeddings(data: string[]): Promise<number[][]> {
|
|
||||||
return data.map(() => [0.0, 0.0]);
|
return data.map(() => [0.0, 0.0]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const dummyEmbeddingConfig: EmbeddingFunctionConfig = {
|
|
||||||
sourceColumn: "string",
|
|
||||||
function: new DummyEmbedding(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const dummyEmbeddingConfigWithNoDimension: EmbeddingFunctionConfig = {
|
class DummyEmbeddingWithNoDimension implements EmbeddingFunction<string> {
|
||||||
sourceColumn: "string",
|
public readonly sourceColumn = "string";
|
||||||
function: new DummyEmbeddingWithNoDimension(),
|
|
||||||
};
|
async embed(data: string[]): Promise<number[][]> {
|
||||||
|
return data.map(() => [0.0, 0.0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
describe("convertToTable", function () {
|
describe("convertToTable", function () {
|
||||||
it("will infer data types correctly", async function () {
|
it("will infer data types correctly", async function () {
|
||||||
@@ -360,7 +331,7 @@ describe("convertToTable", function () {
|
|||||||
|
|
||||||
it("will apply embeddings", async function () {
|
it("will apply embeddings", async function () {
|
||||||
const records = sampleRecords();
|
const records = sampleRecords();
|
||||||
const table = await convertToTable(records, dummyEmbeddingConfig);
|
const table = await convertToTable(records, new DummyEmbedding());
|
||||||
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(true);
|
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(true);
|
||||||
expect(table.getChild("vector")?.type.children[0].type.toString()).toEqual(
|
expect(table.getChild("vector")?.type.children[0].type.toString()).toEqual(
|
||||||
new Float16().toString(),
|
new Float16().toString(),
|
||||||
@@ -369,7 +340,7 @@ describe("convertToTable", function () {
|
|||||||
|
|
||||||
it("will fail if missing the embedding source column", async function () {
|
it("will fail if missing the embedding source column", async function () {
|
||||||
await expect(
|
await expect(
|
||||||
convertToTable([{ id: 1 }], dummyEmbeddingConfig),
|
convertToTable([{ id: 1 }], new DummyEmbedding()),
|
||||||
).rejects.toThrow("'string' was not present");
|
).rejects.toThrow("'string' was not present");
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -380,7 +351,7 @@ describe("convertToTable", function () {
|
|||||||
const table = makeEmptyTable(schema);
|
const table = makeEmptyTable(schema);
|
||||||
|
|
||||||
// If the embedding specifies the dimension we are fine
|
// If the embedding specifies the dimension we are fine
|
||||||
await fromTableToBuffer(table, dummyEmbeddingConfig);
|
await fromTableToBuffer(table, new DummyEmbedding());
|
||||||
|
|
||||||
// We can also supply a schema and should be ok
|
// We can also supply a schema and should be ok
|
||||||
const schemaWithEmbedding = new Schema([
|
const schemaWithEmbedding = new Schema([
|
||||||
@@ -393,13 +364,13 @@ describe("convertToTable", function () {
|
|||||||
]);
|
]);
|
||||||
await fromTableToBuffer(
|
await fromTableToBuffer(
|
||||||
table,
|
table,
|
||||||
dummyEmbeddingConfigWithNoDimension,
|
new DummyEmbeddingWithNoDimension(),
|
||||||
schemaWithEmbedding,
|
schemaWithEmbedding,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Otherwise we will get an error
|
// Otherwise we will get an error
|
||||||
await expect(
|
await expect(
|
||||||
fromTableToBuffer(table, dummyEmbeddingConfigWithNoDimension),
|
fromTableToBuffer(table, new DummyEmbeddingWithNoDimension()),
|
||||||
).rejects.toThrow("does not specify `embeddingDimension`");
|
).rejects.toThrow("does not specify `embeddingDimension`");
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -412,7 +383,7 @@ describe("convertToTable", function () {
|
|||||||
false,
|
false,
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
const table = await convertToTable([], dummyEmbeddingConfig, { schema });
|
const table = await convertToTable([], new DummyEmbedding(), { schema });
|
||||||
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(true);
|
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(true);
|
||||||
expect(table.getChild("vector")?.type.children[0].type.toString()).toEqual(
|
expect(table.getChild("vector")?.type.children[0].type.toString()).toEqual(
|
||||||
new Float16().toString(),
|
new Float16().toString(),
|
||||||
@@ -422,17 +393,16 @@ describe("convertToTable", function () {
|
|||||||
it("will complain if embeddings present but schema missing embedding column", async function () {
|
it("will complain if embeddings present but schema missing embedding column", async function () {
|
||||||
const schema = new Schema([new Field("string", new Utf8(), false)]);
|
const schema = new Schema([new Field("string", new Utf8(), false)]);
|
||||||
await expect(
|
await expect(
|
||||||
convertToTable([], dummyEmbeddingConfig, { schema }),
|
convertToTable([], new DummyEmbedding(), { schema }),
|
||||||
).rejects.toThrow("column vector was missing");
|
).rejects.toThrow("column vector was missing");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("will provide a nice error if run twice", async function () {
|
it("will provide a nice error if run twice", async function () {
|
||||||
const records = sampleRecords();
|
const records = sampleRecords();
|
||||||
const table = await convertToTable(records, dummyEmbeddingConfig);
|
const table = await convertToTable(records, new DummyEmbedding());
|
||||||
|
|
||||||
// fromTableToBuffer will try and apply the embeddings again
|
// fromTableToBuffer will try and apply the embeddings again
|
||||||
await expect(
|
await expect(
|
||||||
fromTableToBuffer(table, dummyEmbeddingConfig),
|
fromTableToBuffer(table, new DummyEmbedding()),
|
||||||
).rejects.toThrow("already existed");
|
).rejects.toThrow("already existed");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -468,7 +438,7 @@ describe("when using two versions of arrow", function () {
|
|||||||
new OldField("ts_no_tz", new OldTimestampNanosecond(null)),
|
new OldField("ts_no_tz", new OldTimestampNanosecond(null)),
|
||||||
]),
|
]),
|
||||||
),
|
),
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
]) as any;
|
]) as any;
|
||||||
schema.metadataVersion = MetadataVersion.V5;
|
schema.metadataVersion = MetadataVersion.V5;
|
||||||
const table = makeArrowTable([], { schema });
|
const table = makeArrowTable([], { schema });
|
||||||
|
|||||||
@@ -13,13 +13,12 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import * as tmp from "tmp";
|
import * as tmp from "tmp";
|
||||||
import { Connection, connect } from "../lancedb";
|
|
||||||
|
import { Connection, connect } from "../dist/index.js";
|
||||||
|
|
||||||
describe("when connecting", () => {
|
describe("when connecting", () => {
|
||||||
let tmpDir: tmp.DirResult;
|
let tmpDir: tmp.DirResult;
|
||||||
beforeEach(() => {
|
beforeEach(() => (tmpDir = tmp.dirSync({ unsafeCleanup: true })));
|
||||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
|
||||||
});
|
|
||||||
afterEach(() => tmpDir.removeCallback());
|
afterEach(() => tmpDir.removeCallback());
|
||||||
|
|
||||||
it("should connect", async () => {
|
it("should connect", async () => {
|
||||||
|
|||||||
@@ -1,169 +0,0 @@
|
|||||||
// Copyright 2024 Lance Developers.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
import * as arrow from "apache-arrow";
|
|
||||||
import * as arrowOld from "apache-arrow-old";
|
|
||||||
|
|
||||||
import * as tmp from "tmp";
|
|
||||||
|
|
||||||
import { connect } from "../lancedb";
|
|
||||||
import { EmbeddingFunction, LanceSchema } from "../lancedb/embedding";
|
|
||||||
import { getRegistry, register } from "../lancedb/embedding/registry";
|
|
||||||
|
|
||||||
describe.each([arrow, arrowOld])("LanceSchema", (arrow) => {
|
|
||||||
test("should preserve input order", async () => {
|
|
||||||
const schema = LanceSchema({
|
|
||||||
id: new arrow.Int32(),
|
|
||||||
text: new arrow.Utf8(),
|
|
||||||
vector: new arrow.Float32(),
|
|
||||||
});
|
|
||||||
expect(schema.fields.map((x) => x.name)).toEqual(["id", "text", "vector"]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("Registry", () => {
|
|
||||||
let tmpDir: tmp.DirResult;
|
|
||||||
beforeEach(() => {
|
|
||||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
tmpDir.removeCallback();
|
|
||||||
getRegistry().reset();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should register a new item to the registry", async () => {
|
|
||||||
@register("mock-embedding")
|
|
||||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): object {
|
|
||||||
return {
|
|
||||||
someText: "hello",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
constructor() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
ndims() {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
embeddingDataType(): arrow.Float {
|
|
||||||
return new arrow.Float32();
|
|
||||||
}
|
|
||||||
async computeSourceEmbeddings(data: string[]) {
|
|
||||||
return data.map(() => [1, 2, 3]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const func = getRegistry()
|
|
||||||
.get<MockEmbeddingFunction>("mock-embedding")!
|
|
||||||
.create();
|
|
||||||
|
|
||||||
const schema = LanceSchema({
|
|
||||||
id: new arrow.Int32(),
|
|
||||||
text: func.sourceField(new arrow.Utf8()),
|
|
||||||
vector: func.vectorField(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const db = await connect(tmpDir.name);
|
|
||||||
const table = await db.createTable(
|
|
||||||
"test",
|
|
||||||
[
|
|
||||||
{ id: 1, text: "hello" },
|
|
||||||
{ id: 2, text: "world" },
|
|
||||||
],
|
|
||||||
{ schema },
|
|
||||||
);
|
|
||||||
const expected = [
|
|
||||||
[1, 2, 3],
|
|
||||||
[1, 2, 3],
|
|
||||||
];
|
|
||||||
const actual = await table.query().toArrow();
|
|
||||||
const vectors = actual
|
|
||||||
.getChild("vector")
|
|
||||||
?.toArray()
|
|
||||||
.map((x: unknown) => {
|
|
||||||
if (x instanceof arrow.Vector) {
|
|
||||||
return [...x];
|
|
||||||
} else {
|
|
||||||
return x;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(vectors).toEqual(expected);
|
|
||||||
});
|
|
||||||
test("should error if registering with the same name", async () => {
|
|
||||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): object {
|
|
||||||
return {
|
|
||||||
someText: "hello",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
constructor() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
ndims() {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
embeddingDataType(): arrow.Float {
|
|
||||||
return new arrow.Float32();
|
|
||||||
}
|
|
||||||
async computeSourceEmbeddings(data: string[]) {
|
|
||||||
return data.map(() => [1, 2, 3]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
register("mock-embedding")(MockEmbeddingFunction);
|
|
||||||
expect(() => register("mock-embedding")(MockEmbeddingFunction)).toThrow(
|
|
||||||
'Embedding function with alias "mock-embedding" already exists',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
test("schema should contain correct metadata", async () => {
|
|
||||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): object {
|
|
||||||
return {
|
|
||||||
someText: "hello",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
constructor() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
ndims() {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
embeddingDataType(): arrow.Float {
|
|
||||||
return new arrow.Float32();
|
|
||||||
}
|
|
||||||
async computeSourceEmbeddings(data: string[]) {
|
|
||||||
return data.map(() => [1, 2, 3]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const func = new MockEmbeddingFunction();
|
|
||||||
|
|
||||||
const schema = LanceSchema({
|
|
||||||
id: new arrow.Int32(),
|
|
||||||
text: func.sourceField(new arrow.Utf8()),
|
|
||||||
vector: func.vectorField(),
|
|
||||||
});
|
|
||||||
const expectedMetadata = new Map<string, string>([
|
|
||||||
[
|
|
||||||
"embedding_functions",
|
|
||||||
JSON.stringify([
|
|
||||||
{
|
|
||||||
sourceColumn: "text",
|
|
||||||
vectorColumn: "vector",
|
|
||||||
name: "MockEmbeddingFunction",
|
|
||||||
model: { someText: "hello" },
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
expect(schema.metadata).toEqual(expectedMetadata);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,226 +0,0 @@
|
|||||||
// Copyright 2024 Lance Developers.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
/* eslint-disable @typescript-eslint/naming-convention */
|
|
||||||
|
|
||||||
import {
|
|
||||||
CreateKeyCommand,
|
|
||||||
KMSClient,
|
|
||||||
ScheduleKeyDeletionCommand,
|
|
||||||
} from "@aws-sdk/client-kms";
|
|
||||||
import {
|
|
||||||
CreateBucketCommand,
|
|
||||||
DeleteBucketCommand,
|
|
||||||
DeleteObjectCommand,
|
|
||||||
HeadObjectCommand,
|
|
||||||
ListObjectsV2Command,
|
|
||||||
S3Client,
|
|
||||||
} from "@aws-sdk/client-s3";
|
|
||||||
import { connect } from "../lancedb";
|
|
||||||
|
|
||||||
// Skip these tests unless the S3_TEST environment variable is set
|
|
||||||
const maybeDescribe = process.env.S3_TEST ? describe : describe.skip;
|
|
||||||
|
|
||||||
// These are all keys that are accepted by storage_options
|
|
||||||
const CONFIG = {
|
|
||||||
allowHttp: "true",
|
|
||||||
awsAccessKeyId: "ACCESSKEY",
|
|
||||||
awsSecretAccessKey: "SECRETKEY",
|
|
||||||
awsEndpoint: "http://127.0.0.1:4566",
|
|
||||||
awsRegion: "us-east-1",
|
|
||||||
};
|
|
||||||
|
|
||||||
class S3Bucket {
|
|
||||||
name: string;
|
|
||||||
constructor(name: string) {
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
static s3Client() {
|
|
||||||
return new S3Client({
|
|
||||||
region: CONFIG.awsRegion,
|
|
||||||
credentials: {
|
|
||||||
accessKeyId: CONFIG.awsAccessKeyId,
|
|
||||||
secretAccessKey: CONFIG.awsSecretAccessKey,
|
|
||||||
},
|
|
||||||
endpoint: CONFIG.awsEndpoint,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async create(name: string): Promise<S3Bucket> {
|
|
||||||
const client = this.s3Client();
|
|
||||||
// Delete the bucket if it already exists
|
|
||||||
try {
|
|
||||||
await this.deleteBucket(client, name);
|
|
||||||
} catch {
|
|
||||||
// It's fine if the bucket doesn't exist
|
|
||||||
}
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
await client.send(new CreateBucketCommand({ Bucket: name }));
|
|
||||||
return new S3Bucket(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async delete() {
|
|
||||||
const client = S3Bucket.s3Client();
|
|
||||||
await S3Bucket.deleteBucket(client, this.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
static async deleteBucket(client: S3Client, name: string) {
|
|
||||||
// Must delete all objects before we can delete the bucket
|
|
||||||
const objects = await client.send(
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new ListObjectsV2Command({ Bucket: name }),
|
|
||||||
);
|
|
||||||
if (objects.Contents) {
|
|
||||||
for (const object of objects.Contents) {
|
|
||||||
await client.send(
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new DeleteObjectCommand({ Bucket: name, Key: object.Key }),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
await client.send(new DeleteBucketCommand({ Bucket: name }));
|
|
||||||
}
|
|
||||||
|
|
||||||
public async assertAllEncrypted(path: string, keyId: string) {
|
|
||||||
const client = S3Bucket.s3Client();
|
|
||||||
const objects = await client.send(
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new ListObjectsV2Command({ Bucket: this.name, Prefix: path }),
|
|
||||||
);
|
|
||||||
if (objects.Contents) {
|
|
||||||
for (const object of objects.Contents) {
|
|
||||||
const metadata = await client.send(
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
new HeadObjectCommand({ Bucket: this.name, Key: object.Key }),
|
|
||||||
);
|
|
||||||
expect(metadata.ServerSideEncryption).toBe("aws:kms");
|
|
||||||
expect(metadata.SSEKMSKeyId).toContain(keyId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class KmsKey {
|
|
||||||
keyId: string;
|
|
||||||
constructor(keyId: string) {
|
|
||||||
this.keyId = keyId;
|
|
||||||
}
|
|
||||||
|
|
||||||
static kmsClient() {
|
|
||||||
return new KMSClient({
|
|
||||||
region: CONFIG.awsRegion,
|
|
||||||
credentials: {
|
|
||||||
accessKeyId: CONFIG.awsAccessKeyId,
|
|
||||||
secretAccessKey: CONFIG.awsSecretAccessKey,
|
|
||||||
},
|
|
||||||
endpoint: CONFIG.awsEndpoint,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async create(): Promise<KmsKey> {
|
|
||||||
const client = this.kmsClient();
|
|
||||||
const key = await client.send(new CreateKeyCommand({}));
|
|
||||||
const keyId = key?.KeyMetadata?.KeyId;
|
|
||||||
if (!keyId) {
|
|
||||||
throw new Error("Failed to create KMS key");
|
|
||||||
}
|
|
||||||
return new KmsKey(keyId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async delete() {
|
|
||||||
const client = KmsKey.kmsClient();
|
|
||||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
|
||||||
await client.send(new ScheduleKeyDeletionCommand({ KeyId: this.keyId }));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
maybeDescribe("storage_options", () => {
|
|
||||||
let bucket: S3Bucket;
|
|
||||||
let kmsKey: KmsKey;
|
|
||||||
beforeAll(async () => {
|
|
||||||
bucket = await S3Bucket.create("lancedb");
|
|
||||||
kmsKey = await KmsKey.create();
|
|
||||||
});
|
|
||||||
afterAll(async () => {
|
|
||||||
await kmsKey.delete();
|
|
||||||
await bucket.delete();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("can be used to configure auth and endpoints", async () => {
|
|
||||||
const uri = `s3://${bucket.name}/test`;
|
|
||||||
const db = await connect(uri, { storageOptions: CONFIG });
|
|
||||||
|
|
||||||
let table = await db.createTable("test", [{ a: 1, b: 2 }]);
|
|
||||||
|
|
||||||
let rowCount = await table.countRows();
|
|
||||||
expect(rowCount).toBe(1);
|
|
||||||
|
|
||||||
let tableNames = await db.tableNames();
|
|
||||||
expect(tableNames).toEqual(["test"]);
|
|
||||||
|
|
||||||
table = await db.openTable("test");
|
|
||||||
rowCount = await table.countRows();
|
|
||||||
expect(rowCount).toBe(1);
|
|
||||||
|
|
||||||
await table.add([
|
|
||||||
{ a: 2, b: 3 },
|
|
||||||
{ a: 3, b: 4 },
|
|
||||||
]);
|
|
||||||
rowCount = await table.countRows();
|
|
||||||
expect(rowCount).toBe(3);
|
|
||||||
|
|
||||||
await db.dropTable("test");
|
|
||||||
|
|
||||||
tableNames = await db.tableNames();
|
|
||||||
expect(tableNames).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("can configure encryption at connection and table level", async () => {
|
|
||||||
const uri = `s3://${bucket.name}/test`;
|
|
||||||
let db = await connect(uri, { storageOptions: CONFIG });
|
|
||||||
|
|
||||||
let table = await db.createTable("table1", [{ a: 1, b: 2 }], {
|
|
||||||
storageOptions: {
|
|
||||||
awsServerSideEncryption: "aws:kms",
|
|
||||||
awsSseKmsKeyId: kmsKey.keyId,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
let rowCount = await table.countRows();
|
|
||||||
expect(rowCount).toBe(1);
|
|
||||||
|
|
||||||
await table.add([{ a: 2, b: 3 }]);
|
|
||||||
|
|
||||||
await bucket.assertAllEncrypted("test/table1.lance", kmsKey.keyId);
|
|
||||||
|
|
||||||
// Now with encryption settings at connection level
|
|
||||||
db = await connect(uri, {
|
|
||||||
storageOptions: {
|
|
||||||
...CONFIG,
|
|
||||||
awsServerSideEncryption: "aws:kms",
|
|
||||||
awsSseKmsKeyId: kmsKey.keyId,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
table = await db.createTable("table2", [{ a: 1, b: 2 }]);
|
|
||||||
rowCount = await table.countRows();
|
|
||||||
expect(rowCount).toBe(1);
|
|
||||||
|
|
||||||
await table.add([{ a: 2, b: 3 }]);
|
|
||||||
|
|
||||||
await bucket.assertAllEncrypted("test/table2.lance", kmsKey.keyId);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -16,34 +16,23 @@ import * as fs from "fs";
|
|||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import * as tmp from "tmp";
|
import * as tmp from "tmp";
|
||||||
|
|
||||||
import * as arrow from "apache-arrow";
|
import { Table, connect } from "../dist";
|
||||||
import * as arrowOld from "apache-arrow-old";
|
|
||||||
|
|
||||||
import { Table, connect } from "../lancedb";
|
|
||||||
import {
|
import {
|
||||||
Field,
|
|
||||||
FixedSizeList,
|
|
||||||
Float,
|
|
||||||
Float32,
|
|
||||||
Float64,
|
|
||||||
Int32,
|
|
||||||
Int64,
|
|
||||||
Schema,
|
Schema,
|
||||||
Utf8,
|
Field,
|
||||||
makeArrowTable,
|
Float32,
|
||||||
} from "../lancedb/arrow";
|
Int32,
|
||||||
import { EmbeddingFunction, LanceSchema } from "../lancedb/embedding";
|
FixedSizeList,
|
||||||
import { getRegistry, register } from "../lancedb/embedding/registry";
|
Int64,
|
||||||
import { Index } from "../lancedb/indices";
|
Float64,
|
||||||
|
} from "apache-arrow";
|
||||||
|
import { makeArrowTable } from "../dist/arrow";
|
||||||
|
import { Index } from "../dist/indices";
|
||||||
|
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
describe("Given a table", () => {
|
||||||
describe.each([arrow, arrowOld])("Given a table", (arrow: any) => {
|
|
||||||
let tmpDir: tmp.DirResult;
|
let tmpDir: tmp.DirResult;
|
||||||
let table: Table;
|
let table: Table;
|
||||||
|
const schema = new Schema([new Field("id", new Float64(), true)]);
|
||||||
const schema = new arrow.Schema([
|
|
||||||
new arrow.Field("id", new arrow.Float64(), true),
|
|
||||||
]);
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||||
const conn = await connect(tmpDir.name);
|
const conn = await connect(tmpDir.name);
|
||||||
@@ -430,186 +419,3 @@ describe("when dealing with versioning", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("embedding functions", () => {
|
|
||||||
let tmpDir: tmp.DirResult;
|
|
||||||
beforeEach(() => {
|
|
||||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
|
||||||
});
|
|
||||||
afterEach(() => tmpDir.removeCallback());
|
|
||||||
|
|
||||||
it("should be able to create a table with an embedding function", async () => {
|
|
||||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): object {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
ndims() {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
embeddingDataType(): Float {
|
|
||||||
return new Float32();
|
|
||||||
}
|
|
||||||
async computeQueryEmbeddings(_data: string) {
|
|
||||||
return [1, 2, 3];
|
|
||||||
}
|
|
||||||
async computeSourceEmbeddings(data: string[]) {
|
|
||||||
return Array.from({ length: data.length }).fill([
|
|
||||||
1, 2, 3,
|
|
||||||
]) as number[][];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const func = new MockEmbeddingFunction();
|
|
||||||
const db = await connect(tmpDir.name);
|
|
||||||
const table = await db.createTable(
|
|
||||||
"test",
|
|
||||||
[
|
|
||||||
{ id: 1, text: "hello" },
|
|
||||||
{ id: 2, text: "world" },
|
|
||||||
],
|
|
||||||
{
|
|
||||||
embeddingFunction: {
|
|
||||||
function: func,
|
|
||||||
sourceColumn: "text",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
);
|
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
|
||||||
const arr = (await table.query().toArray()) as any;
|
|
||||||
expect(arr[0].vector).toBeDefined();
|
|
||||||
|
|
||||||
// we round trip through JSON to make sure the vector properly gets converted to an array
|
|
||||||
// otherwise it'll be a TypedArray or Vector
|
|
||||||
const vector0 = JSON.parse(JSON.stringify(arr[0].vector));
|
|
||||||
expect(vector0).toEqual([1, 2, 3]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should be able to create an empty table with an embedding function", async () => {
|
|
||||||
@register()
|
|
||||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): object {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
ndims() {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
embeddingDataType(): Float {
|
|
||||||
return new Float32();
|
|
||||||
}
|
|
||||||
async computeQueryEmbeddings(_data: string) {
|
|
||||||
return [1, 2, 3];
|
|
||||||
}
|
|
||||||
async computeSourceEmbeddings(data: string[]) {
|
|
||||||
return Array.from({ length: data.length }).fill([
|
|
||||||
1, 2, 3,
|
|
||||||
]) as number[][];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const schema = new Schema([
|
|
||||||
new Field("text", new Utf8(), true),
|
|
||||||
new Field(
|
|
||||||
"vector",
|
|
||||||
new FixedSizeList(3, new Field("item", new Float32(), true)),
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const func = new MockEmbeddingFunction();
|
|
||||||
const db = await connect(tmpDir.name);
|
|
||||||
const table = await db.createEmptyTable("test", schema, {
|
|
||||||
embeddingFunction: {
|
|
||||||
function: func,
|
|
||||||
sourceColumn: "text",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const outSchema = await table.schema();
|
|
||||||
expect(outSchema.metadata.get("embedding_functions")).toBeDefined();
|
|
||||||
await table.add([{ text: "hello world" }]);
|
|
||||||
|
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
|
||||||
const arr = (await table.query().toArray()) as any;
|
|
||||||
expect(arr[0].vector).toBeDefined();
|
|
||||||
|
|
||||||
// we round trip through JSON to make sure the vector properly gets converted to an array
|
|
||||||
// otherwise it'll be a TypedArray or Vector
|
|
||||||
const vector0 = JSON.parse(JSON.stringify(arr[0].vector));
|
|
||||||
expect(vector0).toEqual([1, 2, 3]);
|
|
||||||
});
|
|
||||||
it("should error when appending to a table with an unregistered embedding function", async () => {
|
|
||||||
@register("mock")
|
|
||||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
|
||||||
toJSON(): object {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
ndims() {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
embeddingDataType(): Float {
|
|
||||||
return new Float32();
|
|
||||||
}
|
|
||||||
async computeQueryEmbeddings(_data: string) {
|
|
||||||
return [1, 2, 3];
|
|
||||||
}
|
|
||||||
async computeSourceEmbeddings(data: string[]) {
|
|
||||||
return Array.from({ length: data.length }).fill([
|
|
||||||
1, 2, 3,
|
|
||||||
]) as number[][];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const func = getRegistry().get<MockEmbeddingFunction>("mock")!.create();
|
|
||||||
|
|
||||||
const schema = LanceSchema({
|
|
||||||
id: new arrow.Float64(),
|
|
||||||
text: func.sourceField(new Utf8()),
|
|
||||||
vector: func.vectorField(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const db = await connect(tmpDir.name);
|
|
||||||
await db.createTable(
|
|
||||||
"test",
|
|
||||||
[
|
|
||||||
{ id: 1, text: "hello" },
|
|
||||||
{ id: 2, text: "world" },
|
|
||||||
],
|
|
||||||
{
|
|
||||||
schema,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
getRegistry().reset();
|
|
||||||
const db2 = await connect(tmpDir.name);
|
|
||||||
|
|
||||||
const tbl = await db2.openTable("test");
|
|
||||||
|
|
||||||
expect(tbl.add([{ id: 3, text: "hello" }])).rejects.toThrow(
|
|
||||||
`Function "mock" not found in registry`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("when optimizing a dataset", () => {
|
|
||||||
let tmpDir: tmp.DirResult;
|
|
||||||
let table: Table;
|
|
||||||
beforeEach(async () => {
|
|
||||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
|
||||||
const con = await connect(tmpDir.name);
|
|
||||||
table = await con.createTable("vectors", [{ id: 1 }]);
|
|
||||||
await table.add([{ id: 2 }]);
|
|
||||||
});
|
|
||||||
afterEach(() => {
|
|
||||||
tmpDir.removeCallback();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("compacts files", async () => {
|
|
||||||
const stats = await table.optimize();
|
|
||||||
expect(stats.compaction.filesAdded).toBe(1);
|
|
||||||
expect(stats.compaction.filesRemoved).toBe(2);
|
|
||||||
expect(stats.compaction.fragmentsAdded).toBe(1);
|
|
||||||
expect(stats.compaction.fragmentsRemoved).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("cleanups old versions", async () => {
|
|
||||||
const stats = await table.optimize({ cleanupOlderThan: new Date() });
|
|
||||||
expect(stats.prune.bytesRemoved).toBeGreaterThan(0);
|
|
||||||
expect(stats.prune.oldVersionsRemoved).toBe(3);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user