mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 05:19:58 +00:00
Compare commits
16 Commits
python-v0.
...
reproducib
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9441fde2bb | ||
|
|
32163063dc | ||
|
|
9a9a73a65d | ||
|
|
52fa7f5577 | ||
|
|
0cba0f4f92 | ||
|
|
8391ffee84 | ||
|
|
fe8848efb9 | ||
|
|
213c313b99 | ||
|
|
157e995a43 | ||
|
|
ab97e5d632 | ||
|
|
87e9a0250f | ||
|
|
e587a17a64 | ||
|
|
2f1f9f6338 | ||
|
|
a34fa4df26 | ||
|
|
e20979b335 | ||
|
|
08689c345d |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.2.3
|
current_version = 0.2.4
|
||||||
commit = True
|
commit = True
|
||||||
message = Bump version: {current_version} → {new_version}
|
message = Bump version: {current_version} → {new_version}
|
||||||
tag = True
|
tag = True
|
||||||
|
|||||||
@@ -67,6 +67,11 @@ nav:
|
|||||||
- Home:
|
- Home:
|
||||||
- 🏢 Home: index.md
|
- 🏢 Home: index.md
|
||||||
- 💡 Basics: basic.md
|
- 💡 Basics: basic.md
|
||||||
|
- 📚 Guides:
|
||||||
|
- Tables: guides/tables.md
|
||||||
|
- Vector Search: search.md
|
||||||
|
- SQL filters: sql.md
|
||||||
|
- Indexing: ann_indexes.md
|
||||||
- 🧬 Embeddings: embedding.md
|
- 🧬 Embeddings: embedding.md
|
||||||
- 🔍 Python full-text search: fts.md
|
- 🔍 Python full-text search: fts.md
|
||||||
- 🔌 Integrations:
|
- 🔌 Integrations:
|
||||||
@@ -91,12 +96,12 @@ nav:
|
|||||||
- Serverless Website Chatbot: examples/serverless_website_chatbot.md
|
- Serverless Website Chatbot: examples/serverless_website_chatbot.md
|
||||||
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
||||||
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
||||||
- 📚 Guides:
|
- Basics: basic.md
|
||||||
|
- Guides:
|
||||||
- Tables: guides/tables.md
|
- Tables: guides/tables.md
|
||||||
- Vector Search: search.md
|
- Vector Search: search.md
|
||||||
- SQL filters: sql.md
|
- SQL filters: sql.md
|
||||||
- Indexing: ann_indexes.md
|
- Indexing: ann_indexes.md
|
||||||
- Basics: basic.md
|
|
||||||
- Embeddings: embedding.md
|
- Embeddings: embedding.md
|
||||||
- Python full-text search: fts.md
|
- Python full-text search: fts.md
|
||||||
- Integrations:
|
- Integrations:
|
||||||
@@ -121,12 +126,6 @@ nav:
|
|||||||
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
||||||
- Serverless Chatbot from any website: examples/serverless_website_chatbot.md
|
- Serverless Chatbot from any website: examples/serverless_website_chatbot.md
|
||||||
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
||||||
|
|
||||||
- Guides:
|
|
||||||
- Tables: guides/tables.md
|
|
||||||
- Vector Search: search.md
|
|
||||||
- SQL filters: sql.md
|
|
||||||
- Indexing: ann_indexes.md
|
|
||||||
- API references:
|
- API references:
|
||||||
- Python API: python/python.md
|
- Python API: python/python.md
|
||||||
- Javascript API: javascript/modules.md
|
- Javascript API: javascript/modules.md
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# LanceDB
|
# LanceDB
|
||||||
|
|
||||||
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrevial, filtering and management of embeddings.
|
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrieval, filtering and management of embeddings.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|||||||
1167
docs/src/notebooks/reproducibility.ipynb
Normal file
1167
docs/src/notebooks/reproducibility.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
62
docs/src/notebooks/rick_and_morty_quotes.csv
Normal file
62
docs/src/notebooks/rick_and_morty_quotes.csv
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
id,quote,author
|
||||||
|
1,"Nobody exists on purpose. Nobody belongs anywhere.",Morty
|
||||||
|
2,"We're all going to die. Come watch TV.",Morty
|
||||||
|
3,"Losers look stuff up while the rest of us are carpin' all them diems.",Summer
|
||||||
|
4,"He's not a hot girl. He can't just bail on his life and set up shop in someone else's.",Beth
|
||||||
|
5,"When you are an a—hole, it doesn't matter how right you are. Nobody wants to give you the satisfaction.",Morty
|
||||||
|
6,"God's turning people into insect monsters, Beth. I'm the one beating them to death. Thank me.",Jerry
|
||||||
|
7,"Camping is just being homeless without the change.",Summer
|
||||||
|
8,"This seems like a good time for a drink and a cold, calculated speech with sinister overtones. A speech about politics, about order, brotherhood, power ... but speeches are for campaigning. Now is the time for action.",Morty
|
||||||
|
9,"Having a family doesn't mean that you stop being an individual. You know the best thing you can do for the people that depend on you? Be honest with them, even if it means setting them free.",Mr. Meeseeks
|
||||||
|
10,"If I've learned one thing, it's that before you get anywhere in life, you gotta stop listening to yourself.",Jerry
|
||||||
|
11,"I just want to go back to Hell, where everyone thinks I'm smart and funny.",Mr. Needful
|
||||||
|
12,"Hi Mr. Jellybean, I'm Morty. I’m on an adventure with my grandpa.",Morty
|
||||||
|
13,"You're not the cause of your parents' misery. You're just a symptom of it.",Summer
|
||||||
|
14,"Don't deify the people who leave you.",Beth
|
||||||
|
15,"Well, then get your s—t together, get it all together, and put it in a backpack, all your s—t, so it's together. And if you gotta take it somewhere, take it somewhere, you know, take it to the s—t store and sell it, or put it in the s—t museum. I don't care what you do, you just gotta get it together. Get your s—t together.",Morty
|
||||||
|
16,"At least the devil has a job!",Summer
|
||||||
|
17,"Life is effort and I'll stop when I die!",Jerry
|
||||||
|
18,"I just killed my family! I don't care what they were!",Morty
|
||||||
|
19,"It's funny to say they are small. It's funny to say they are big.",Shrimply Pibbles
|
||||||
|
20,"You're holding me verbally hostage.",Summer
|
||||||
|
21,"Honey, stop raising your father's cholesterol so you can take a hot funeral selfie.",Beth
|
||||||
|
22,"Rick, when you say you made an exact replica of the house, did you mean, like, an exact replica?",Morty
|
||||||
|
23,"Give a gun to the lady who got pregnant with me too early and constantly makes it our problem.",Summer
|
||||||
|
24,"Say goodbye to your precious dry land! For soon it will be wet!",Mr. Nimbus
|
||||||
|
25,"Nobody's smarter than Rick, but nobody else is my dad. You're a genius at that.",Morty
|
||||||
|
26,"B—h, my generation gets traumatized for breakfast.",Summer
|
||||||
|
27,"Inception made sense!",Morty
|
||||||
|
28,"I realize now I'm attracted to you for the same reason I can't be with you: You can't change. And I have no problem with that, but it clearly means I have a problem with myself.",Unity
|
||||||
|
29,"Mr. President, if I've learned one thing today, it's that sometimes you have to not give a f—k!",Morty
|
||||||
|
30,"I didn't know freedom meant people doing stuff that sucks.",Summer
|
||||||
|
31,"How many of these are just horrible mistakes I made? I mean, maybe I'd stop making so many if I let myself learn from them.",Morty
|
||||||
|
32,"I'm a scientist because I invent, transform, create, and destroy for a living. And when I don't like something about the world, I change it.",Rick
|
||||||
|
33,"Wubba lubba dub dub!",Rick
|
||||||
|
34,"I turned myself into a pickle, Morty! I'm Pickle Rick!",Rick
|
||||||
|
35,"I know about the Yosemite T-shirt, Morty.",Rick
|
||||||
|
36,"The universe is basically an animal. It grazes on the ordinary. It creates infinite idiots just to eat them.",Rick
|
||||||
|
37,"If I die in a cage, I lose a bet.",Rick
|
||||||
|
38,"Sometimes science is more art than science.",Rick
|
||||||
|
39,"To live is to risk it all—otherwise, you're just an inert chunk of randomly assembled molecules drifting wherever the universe blows you.",Rick
|
||||||
|
40,"Welcome to the club, pal.",Rick
|
||||||
|
41,"So I have an emo streak. It's part of what makes me so rad.",Rick
|
||||||
|
42,"Listen, I'm not the nicest guy in the universe, because I'm the smartest, and being nice is something stupid people do to hedge their bets.",Rick
|
||||||
|
43,"Wait a minute! Is that Mountain Dew in my quantum-transport-solution?",Rick
|
||||||
|
44,"Listen, Morty, I hate to break it to you, but what people call 'love' is just a chemical reaction that compels animals to breed.",Rick
|
||||||
|
45,"Break the cycle, Morty. Rise above. Focus on science.",Rick
|
||||||
|
46,"Don't get drawn into the culture, Morty. Stealing stuff is about the stuff, not the stealing.",Rick
|
||||||
|
47,"I'm sorry, but your opinion means very little to me.",Rick
|
||||||
|
48,"You don't get to tell anyone what's sad. You’re like a one-man Mount Sadmore. So I guess like a Lincoln Sadmorial.",Rick
|
||||||
|
49,"This pickle doesn't care about your children. I'm not gonna take their dreams. I'm gonna take their parents.",Rick
|
||||||
|
50,"I programmed you to believe that.",Rick
|
||||||
|
51,"Have fun with empowerment. It seems to make everyone that gets it really happy.",Rick
|
||||||
|
52,"Thanks, Mr. Poopybutthole. I always could count on you.",Rick
|
||||||
|
53,"Weddings are basically funerals with a cake.",Rick
|
||||||
|
54,"I mean, if you spend all day shuffling words around, you can make anything sound bad, Morty.",Rick
|
||||||
|
55,"It's your choice to take this personally.",Rick
|
||||||
|
56,"Excuse me, coming through. What are you here for? Just kidding, I don't care.",Rick
|
||||||
|
57,"If I let you make me nervous, then we can't get schwifty.",Rick
|
||||||
|
58,"Oh, boy, so you actually learned something today? What is this, Full House?",Rick
|
||||||
|
59,"I can't abide bureaucracy. I don't like being told where to go and what to do. I consider it a violation. Did you get those seeds all the way up your butt?",Rick
|
||||||
|
60,"I think you have to think ahead and live in the moment.",Rick
|
||||||
|
61,"I know that new situations can be intimidating. You're lookin' around and it's all scary and different, but you know, meeting them head-on, charging into 'em like a bull—that's how we grow as people.",Rick
|
||||||
|
@@ -28,7 +28,13 @@ pip install lancedb
|
|||||||
|
|
||||||
::: lancedb.embeddings.with_embeddings
|
::: lancedb.embeddings.with_embeddings
|
||||||
|
|
||||||
::: lancedb.embeddings.EmbeddingFunction
|
::: lancedb.embeddings.functions.EmbeddingFunctionRegistry
|
||||||
|
|
||||||
|
::: lancedb.embeddings.functions.EmbeddingFunctionModel
|
||||||
|
|
||||||
|
::: lancedb.embeddings.functions.TextEmbeddingFunctionModel
|
||||||
|
|
||||||
|
::: lancedb.embeddings.functions.SentenceTransformerEmbeddingFunction
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,8 @@ excluded_globs = [
|
|||||||
"../src/embedding.md",
|
"../src/embedding.md",
|
||||||
"../src/examples/*.md",
|
"../src/examples/*.md",
|
||||||
"../src/integrations/voxel51.md",
|
"../src/integrations/voxel51.md",
|
||||||
"../src/guides/tables.md"
|
"../src/guides/tables.md",
|
||||||
|
"../src/python/duckdb.md",
|
||||||
]
|
]
|
||||||
|
|
||||||
python_prefix = "py"
|
python_prefix = "py"
|
||||||
|
|||||||
74
node/package-lock.json
generated
74
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64",
|
"x64",
|
||||||
"arm64"
|
"arm64"
|
||||||
@@ -51,11 +51,11 @@
|
|||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@lancedb/vectordb-darwin-arm64": "0.2.3",
|
"@lancedb/vectordb-darwin-arm64": "0.2.4",
|
||||||
"@lancedb/vectordb-darwin-x64": "0.2.3",
|
"@lancedb/vectordb-darwin-x64": "0.2.4",
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": "0.2.3",
|
"@lancedb/vectordb-linux-arm64-gnu": "0.2.4",
|
||||||
"@lancedb/vectordb-linux-x64-gnu": "0.2.3",
|
"@lancedb/vectordb-linux-x64-gnu": "0.2.4",
|
||||||
"@lancedb/vectordb-win32-x64-msvc": "0.2.3"
|
"@lancedb/vectordb-win32-x64-msvc": "0.2.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@apache-arrow/ts": {
|
"node_modules/@apache-arrow/ts": {
|
||||||
@@ -315,9 +315,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.2.4.tgz",
|
||||||
"integrity": "sha512-/9dRCXrV/UsZv3fqAC/Q+D2FPKXMRprcb+a77tt4I0Iy5iGT55UDRfpaXvmJeKquhTJkZ0AuyoK5BmOh7cY41w==",
|
"integrity": "sha512-MqiZXamHYEOfguPsHWLBQ56IabIN6Az8u2Hx8LCyXcxW9gcyJZMSAfJc+CcA4KYHKotv0KsVBhgxZ3kaZQQyiw==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"arm64"
|
"arm64"
|
||||||
],
|
],
|
||||||
@@ -327,9 +327,9 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-darwin-x64": {
|
"node_modules/@lancedb/vectordb-darwin-x64": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.2.4.tgz",
|
||||||
"integrity": "sha512-p06WkjmdVwDxkH8ghIWh59SCgUhjXBpy1gQISgktouymqfoFbBHz7vmeI6VO1oBA5ji6vSgGZxqjmeLRKM6blA==",
|
"integrity": "sha512-DzL+mw5WhKDwXdEFlPh8M9zSDhGnfks7NvEh6ZqKbU6znH206YB7g3OA4WfFyV579IIEQ8jd4v/XDthNzQKuSA==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64"
|
"x64"
|
||||||
],
|
],
|
||||||
@@ -339,9 +339,9 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.2.4.tgz",
|
||||||
"integrity": "sha512-cSDcJgfbnRmCXZ3AoRWpCAa07PMdB/k8m1LjmxnhpOnP1ohg1eUl99jwPCgd+5GK+iZmezRqbyO+YXlgsCp7GQ==",
|
"integrity": "sha512-LP1nNfIpFxCgcCMlIQdseDX9dZU27TNhCL41xar8euqcetY5uKvi0YqhiVlpNO85Ss1FRQBgQ/GtnOM6Bo7oBQ==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"arm64"
|
"arm64"
|
||||||
],
|
],
|
||||||
@@ -351,9 +351,9 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.2.4.tgz",
|
||||||
"integrity": "sha512-AFA3J4hBYapGC37iXheiN6tGruitx5bmoWXkUcDv/qAaE4tizVZHB9cgx9ThTB0RDsvZEOZ5zCy7BOzPH+oCOg==",
|
"integrity": "sha512-m4RhOI5JJWPU9Ip2LlRIzXu4mwIv9M//OyAuTLiLKRm8726jQHhYi5VFUEtNzqY0o0p6pS0b3XbifYQ+cyJn3Q==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64"
|
"x64"
|
||||||
],
|
],
|
||||||
@@ -363,9 +363,9 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.2.4.tgz",
|
||||||
"integrity": "sha512-LI1mz1HdcpNXTM7HbcLdXz0qvUU4LxSqRC7/kMU918VlOeWy/PnryRrjHnCjcgciGzu1rVlvCqRPh7fVwaG6Kg==",
|
"integrity": "sha512-lMF/2e3YkKWnTYv0R7cUCfjMkAqepNaHSc/dvJzCNsFVEhfDsFdScQFLToARs5GGxnq4fOf+MKpaHg/W6QTxiA==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64"
|
"x64"
|
||||||
],
|
],
|
||||||
@@ -4852,33 +4852,33 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@lancedb/vectordb-darwin-arm64": {
|
"@lancedb/vectordb-darwin-arm64": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.2.4.tgz",
|
||||||
"integrity": "sha512-/9dRCXrV/UsZv3fqAC/Q+D2FPKXMRprcb+a77tt4I0Iy5iGT55UDRfpaXvmJeKquhTJkZ0AuyoK5BmOh7cY41w==",
|
"integrity": "sha512-MqiZXamHYEOfguPsHWLBQ56IabIN6Az8u2Hx8LCyXcxW9gcyJZMSAfJc+CcA4KYHKotv0KsVBhgxZ3kaZQQyiw==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"@lancedb/vectordb-darwin-x64": {
|
"@lancedb/vectordb-darwin-x64": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.2.4.tgz",
|
||||||
"integrity": "sha512-p06WkjmdVwDxkH8ghIWh59SCgUhjXBpy1gQISgktouymqfoFbBHz7vmeI6VO1oBA5ji6vSgGZxqjmeLRKM6blA==",
|
"integrity": "sha512-DzL+mw5WhKDwXdEFlPh8M9zSDhGnfks7NvEh6ZqKbU6znH206YB7g3OA4WfFyV579IIEQ8jd4v/XDthNzQKuSA==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": {
|
"@lancedb/vectordb-linux-arm64-gnu": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.2.4.tgz",
|
||||||
"integrity": "sha512-cSDcJgfbnRmCXZ3AoRWpCAa07PMdB/k8m1LjmxnhpOnP1ohg1eUl99jwPCgd+5GK+iZmezRqbyO+YXlgsCp7GQ==",
|
"integrity": "sha512-LP1nNfIpFxCgcCMlIQdseDX9dZU27TNhCL41xar8euqcetY5uKvi0YqhiVlpNO85Ss1FRQBgQ/GtnOM6Bo7oBQ==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"@lancedb/vectordb-linux-x64-gnu": {
|
"@lancedb/vectordb-linux-x64-gnu": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.2.4.tgz",
|
||||||
"integrity": "sha512-AFA3J4hBYapGC37iXheiN6tGruitx5bmoWXkUcDv/qAaE4tizVZHB9cgx9ThTB0RDsvZEOZ5zCy7BOzPH+oCOg==",
|
"integrity": "sha512-m4RhOI5JJWPU9Ip2LlRIzXu4mwIv9M//OyAuTLiLKRm8726jQHhYi5VFUEtNzqY0o0p6pS0b3XbifYQ+cyJn3Q==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"@lancedb/vectordb-win32-x64-msvc": {
|
"@lancedb/vectordb-win32-x64-msvc": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.2.4.tgz",
|
||||||
"integrity": "sha512-LI1mz1HdcpNXTM7HbcLdXz0qvUU4LxSqRC7/kMU918VlOeWy/PnryRrjHnCjcgciGzu1rVlvCqRPh7fVwaG6Kg==",
|
"integrity": "sha512-lMF/2e3YkKWnTYv0R7cUCfjMkAqepNaHSc/dvJzCNsFVEhfDsFdScQFLToARs5GGxnq4fOf+MKpaHg/W6QTxiA==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"@neon-rs/cli": {
|
"@neon-rs/cli": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -78,10 +78,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@lancedb/vectordb-darwin-arm64": "0.2.3",
|
"@lancedb/vectordb-darwin-arm64": "0.2.4",
|
||||||
"@lancedb/vectordb-darwin-x64": "0.2.3",
|
"@lancedb/vectordb-darwin-x64": "0.2.4",
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": "0.2.3",
|
"@lancedb/vectordb-linux-arm64-gnu": "0.2.4",
|
||||||
"@lancedb/vectordb-linux-x64-gnu": "0.2.3",
|
"@lancedb/vectordb-linux-x64-gnu": "0.2.4",
|
||||||
"@lancedb/vectordb-win32-x64-msvc": "0.2.3"
|
"@lancedb/vectordb-win32-x64-msvc": "0.2.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.2.1
|
current_version = 0.2.2
|
||||||
commit = True
|
commit = True
|
||||||
message = [python] Bump version: {current_version} → {new_version}
|
message = [python] Bump version: {current_version} → {new_version}
|
||||||
tag = True
|
tag = True
|
||||||
|
|||||||
@@ -31,9 +31,13 @@ def connect(
|
|||||||
----------
|
----------
|
||||||
uri: str or Path
|
uri: str or Path
|
||||||
The uri of the database.
|
The uri of the database.
|
||||||
api_token: str, optional
|
api_key: str, optional
|
||||||
If presented, connect to LanceDB cloud.
|
If presented, connect to LanceDB cloud.
|
||||||
Otherwise, connect to a database on file system or cloud storage.
|
Otherwise, connect to a database on file system or cloud storage.
|
||||||
|
region: str, default "us-west-2"
|
||||||
|
The region to use for LanceDB Cloud.
|
||||||
|
host_override: str, optional
|
||||||
|
The override url for LanceDB Cloud.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from lancedb.embeddings import EmbeddingFunctionModel, EmbeddingFunctionRegistry
|
||||||
|
|
||||||
# import lancedb so we don't have to in every example
|
# import lancedb so we don't have to in every example
|
||||||
|
|
||||||
|
|
||||||
@@ -14,3 +17,22 @@ def doctest_setup(monkeypatch, tmpdir):
|
|||||||
monkeypatch.setitem(os.environ, "COLUMNS", "80")
|
monkeypatch.setitem(os.environ, "COLUMNS", "80")
|
||||||
# Work in a temporary directory
|
# Work in a temporary directory
|
||||||
monkeypatch.chdir(tmpdir)
|
monkeypatch.chdir(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
|
||||||
|
|
||||||
|
@registry.register()
|
||||||
|
class MockEmbeddingFunction(EmbeddingFunctionModel):
|
||||||
|
def __call__(self, data):
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = [data]
|
||||||
|
elif isinstance(data, pa.ChunkedArray):
|
||||||
|
data = data.combine_chunks().to_pylist()
|
||||||
|
elif isinstance(data, pa.Array):
|
||||||
|
data = data.to_pylist()
|
||||||
|
|
||||||
|
return [self.embed(row) for row in data]
|
||||||
|
|
||||||
|
def embed(self, row):
|
||||||
|
return [float(hash(c)) for c in row[:10]]
|
||||||
|
|||||||
@@ -16,12 +16,13 @@ from __future__ import annotations
|
|||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
from pyarrow import fs
|
from pyarrow import fs
|
||||||
|
|
||||||
from .common import DATA, URI
|
from .common import DATA, URI
|
||||||
|
from .embeddings import EmbeddingFunctionModel
|
||||||
from .pydantic import LanceModel
|
from .pydantic import LanceModel
|
||||||
from .table import LanceTable, Table
|
from .table import LanceTable, Table
|
||||||
from .util import fs_from_uri, get_uri_location, get_uri_scheme
|
from .util import fs_from_uri, get_uri_location, get_uri_scheme
|
||||||
@@ -40,7 +41,7 @@ class DBConnection(ABC):
|
|||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
data: Optional[DATA] = None,
|
data: Optional[DATA] = None,
|
||||||
schema: Optional[pa.Schema, LanceModel] = None,
|
schema: Optional[Union[pa.Schema, LanceModel]] = None,
|
||||||
mode: str = "create",
|
mode: str = "create",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
@@ -285,10 +286,11 @@ class LanceDBConnection(DBConnection):
|
|||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
data: Optional[DATA] = None,
|
data: Optional[DATA] = None,
|
||||||
schema: Optional[pa.Schema, LanceModel] = None,
|
schema: Optional[Union[pa.Schema, LanceModel]] = None,
|
||||||
mode: str = "create",
|
mode: str = "create",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
|
embedding_functions: Optional[List[EmbeddingFunctionModel]] = None,
|
||||||
) -> LanceTable:
|
) -> LanceTable:
|
||||||
"""Create a table in the database.
|
"""Create a table in the database.
|
||||||
|
|
||||||
@@ -307,6 +309,7 @@ class LanceDBConnection(DBConnection):
|
|||||||
mode=mode,
|
mode=mode,
|
||||||
on_bad_vectors=on_bad_vectors,
|
on_bad_vectors=on_bad_vectors,
|
||||||
fill_value=fill_value,
|
fill_value=fill_value,
|
||||||
|
embedding_functions=embedding_functions,
|
||||||
)
|
)
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
|
|||||||
22
python/lancedb/embeddings/__init__.py
Normal file
22
python/lancedb/embeddings/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Copyright (c) 2023. LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from .functions import (
|
||||||
|
REGISTRY,
|
||||||
|
EmbeddingFunctionModel,
|
||||||
|
EmbeddingFunctionRegistry,
|
||||||
|
SentenceTransformerEmbeddingFunction,
|
||||||
|
TextEmbeddingFunctionModel,
|
||||||
|
)
|
||||||
|
from .utils import with_embeddings
|
||||||
228
python/lancedb/embeddings/functions.py
Normal file
228
python/lancedb/embeddings/functions.py
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
# Copyright (c) 2023. LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
import json
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pyarrow as pa
|
||||||
|
from cachetools import cached
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingFunctionRegistry:
|
||||||
|
"""
|
||||||
|
This is a singleton class used to register embedding functions
|
||||||
|
and fetch them by name. It also handles serializing and deserializing
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_instance(cls):
|
||||||
|
return REGISTRY
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._functions = {}
|
||||||
|
|
||||||
|
def register(self):
|
||||||
|
"""
|
||||||
|
This creates a decorator that can be used to register
|
||||||
|
an EmbeddingFunctionModel.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# This is a decorator for a class that inherits from BaseModel
|
||||||
|
# It adds the class to the registry
|
||||||
|
def decorator(cls):
|
||||||
|
if not issubclass(cls, EmbeddingFunctionModel):
|
||||||
|
raise TypeError("Must be a subclass of EmbeddingFunctionModel")
|
||||||
|
if cls.__name__ in self._functions:
|
||||||
|
raise KeyError(f"{cls.__name__} was already registered")
|
||||||
|
self._functions[cls.__name__] = cls
|
||||||
|
return cls
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
"""
|
||||||
|
Reset the registry to its initial state
|
||||||
|
"""
|
||||||
|
self._functions = {}
|
||||||
|
|
||||||
|
def load(self, name: str):
|
||||||
|
"""
|
||||||
|
Fetch an embedding function class by name
|
||||||
|
"""
|
||||||
|
return self._functions[name]
|
||||||
|
|
||||||
|
def parse_functions(self, metadata: Optional[dict]) -> dict:
|
||||||
|
"""
|
||||||
|
Parse the metadata from an arrow table and
|
||||||
|
return a mapping of the vector column to the
|
||||||
|
embedding function and source column
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
metadata : Optional[dict]
|
||||||
|
The metadata from an arrow table. Note that
|
||||||
|
the keys and values are bytes.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
functions : dict
|
||||||
|
A mapping of vector column name to embedding function.
|
||||||
|
An empty dict is returned if input is None or does not
|
||||||
|
contain b"embedding_functions".
|
||||||
|
"""
|
||||||
|
if metadata is None or b"embedding_functions" not in metadata:
|
||||||
|
return {}
|
||||||
|
serialized = metadata[b"embedding_functions"]
|
||||||
|
raw_list = json.loads(serialized.decode("utf-8"))
|
||||||
|
functions = {}
|
||||||
|
for obj in raw_list:
|
||||||
|
model = self.load(obj["schema"]["title"])
|
||||||
|
functions[obj["model"]["vector_column"]] = model(**obj["model"])
|
||||||
|
return functions
|
||||||
|
|
||||||
|
def function_to_metadata(self, func):
|
||||||
|
"""
|
||||||
|
Convert the given embedding function and source / vector column configs
|
||||||
|
into a config dictionary that can be serialized into arrow metadata
|
||||||
|
"""
|
||||||
|
schema = func.model_json_schema()
|
||||||
|
json_data = func.model_dump()
|
||||||
|
return {
|
||||||
|
"schema": schema,
|
||||||
|
"model": json_data,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_table_metadata(self, func_list):
|
||||||
|
"""
|
||||||
|
Convert a list of embedding functions and source / vector column configs
|
||||||
|
into a config dictionary that can be serialized into arrow metadata
|
||||||
|
"""
|
||||||
|
json_data = [self.function_to_metadata(func) for func in func_list]
|
||||||
|
# Note that metadata dictionary values must be bytes so we need to json dump then utf8 encode
|
||||||
|
metadata = json.dumps(json_data, indent=2).encode("utf-8")
|
||||||
|
return {"embedding_functions": metadata}
|
||||||
|
|
||||||
|
|
||||||
|
REGISTRY = EmbeddingFunctionRegistry()
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingFunctionModel(BaseModel, ABC):
|
||||||
|
"""
|
||||||
|
A callable ABC for embedding functions
|
||||||
|
"""
|
||||||
|
|
||||||
|
source_column: Optional[str]
|
||||||
|
vector_column: str
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __call__(self, *args, **kwargs) -> List[np.array]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
TEXT = Union[str, List[str], pa.Array, pa.ChunkedArray, np.ndarray]
|
||||||
|
|
||||||
|
|
||||||
|
class TextEmbeddingFunctionModel(EmbeddingFunctionModel):
|
||||||
|
"""
|
||||||
|
A callable ABC for embedding functions that take text as input
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __call__(self, texts: TEXT, *args, **kwargs) -> List[np.array]:
|
||||||
|
texts = self.sanitize_input(texts)
|
||||||
|
return self.generate_embeddings(texts)
|
||||||
|
|
||||||
|
def sanitize_input(self, texts: TEXT) -> Union[List[str], np.ndarray]:
|
||||||
|
"""
|
||||||
|
Sanitize the input to the embedding function. This is called
|
||||||
|
before generate_embeddings() and is useful for stripping
|
||||||
|
whitespace, lowercasing, etc.
|
||||||
|
"""
|
||||||
|
if isinstance(texts, str):
|
||||||
|
texts = [texts]
|
||||||
|
elif isinstance(texts, pa.Array):
|
||||||
|
texts = texts.to_pylist()
|
||||||
|
elif isinstance(texts, pa.ChunkedArray):
|
||||||
|
texts = texts.combine_chunks().to_pylist()
|
||||||
|
return texts
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def generate_embeddings(
|
||||||
|
self, texts: Union[List[str], np.ndarray]
|
||||||
|
) -> List[np.array]:
|
||||||
|
"""
|
||||||
|
Generate the embeddings for the given texts
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@REGISTRY.register()
|
||||||
|
class SentenceTransformerEmbeddingFunction(TextEmbeddingFunctionModel):
|
||||||
|
"""
|
||||||
|
An embedding function that uses the sentence-transformers library
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str = "all-MiniLM-L6-v2"
|
||||||
|
device: str = "cpu"
|
||||||
|
normalize: bool = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def embedding_model(self):
|
||||||
|
"""
|
||||||
|
Get the sentence-transformers embedding model specified by the
|
||||||
|
name and device. This is cached so that the model is only loaded
|
||||||
|
once per process.
|
||||||
|
"""
|
||||||
|
return self.__class__.get_embedding_model(self.name, self.device)
|
||||||
|
|
||||||
|
def generate_embeddings(
|
||||||
|
self, texts: Union[List[str], np.ndarray]
|
||||||
|
) -> List[np.array]:
|
||||||
|
"""
|
||||||
|
Get the embeddings for the given texts
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
texts: list[str] or np.ndarray (of str)
|
||||||
|
The texts to embed
|
||||||
|
"""
|
||||||
|
return self.embedding_model.encode(
|
||||||
|
list(texts),
|
||||||
|
convert_to_numpy=True,
|
||||||
|
normalize_embeddings=self.normalize,
|
||||||
|
).tolist()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@cached(cache={})
|
||||||
|
def get_embedding_model(cls, name, device):
|
||||||
|
"""
|
||||||
|
Get the sentence-transformers embedding model specified by the
|
||||||
|
name and device. This is cached so that the model is only loaded
|
||||||
|
once per process.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name : str
|
||||||
|
The name of the model to load
|
||||||
|
device : str
|
||||||
|
The device to load the model on
|
||||||
|
|
||||||
|
TODO: use lru_cache instead with a reasonable/configurable maxsize
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from sentence_transformers import SentenceTransformer
|
||||||
|
|
||||||
|
return SentenceTransformer(name, device=device)
|
||||||
|
except ImportError:
|
||||||
|
raise ValueError("Please install sentence_transformers")
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2023 LanceDB Developers
|
# Copyright (c) 2023. LanceDB Developers
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -20,7 +20,7 @@ import pyarrow as pa
|
|||||||
from lance.vector import vec_to_table
|
from lance.vector import vec_to_table
|
||||||
from retry import retry
|
from retry import retry
|
||||||
|
|
||||||
from .util import safe_import_pandas
|
from ..util import safe_import_pandas
|
||||||
|
|
||||||
pd = safe_import_pandas()
|
pd = safe_import_pandas()
|
||||||
DATA = Union[pa.Table, "pd.DataFrame"]
|
DATA = Union[pa.Table, "pd.DataFrame"]
|
||||||
@@ -58,7 +58,7 @@ def with_embeddings(
|
|||||||
pa.Table
|
pa.Table
|
||||||
The input table with a new column called "vector" containing the embeddings.
|
The input table with a new column called "vector" containing the embeddings.
|
||||||
"""
|
"""
|
||||||
func = EmbeddingFunction(func)
|
func = FunctionWrapper(func)
|
||||||
if wrap_api:
|
if wrap_api:
|
||||||
func = func.retry().rate_limit()
|
func = func.retry().rate_limit()
|
||||||
func = func.batch_size(batch_size)
|
func = func.batch_size(batch_size)
|
||||||
@@ -71,7 +71,11 @@ def with_embeddings(
|
|||||||
return data.append_column("vector", table["vector"])
|
return data.append_column("vector", table["vector"])
|
||||||
|
|
||||||
|
|
||||||
class EmbeddingFunction:
|
class FunctionWrapper:
|
||||||
|
"""
|
||||||
|
A wrapper for embedding functions that adds rate limiting, retries, and batching.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, func: Callable):
|
def __init__(self, func: Callable):
|
||||||
self.func = func
|
self.func = func
|
||||||
self.rate_limiter_kwargs = {}
|
self.rate_limiter_kwargs = {}
|
||||||
@@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
from typing import List, Literal, Optional, Type, Union
|
from typing import List, Literal, Optional, Type, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -54,7 +55,164 @@ class Query(pydantic.BaseModel):
|
|||||||
refine_factor: Optional[int] = None
|
refine_factor: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
class LanceQueryBuilder:
|
class LanceQueryBuilder(ABC):
|
||||||
|
@classmethod
|
||||||
|
def create(
|
||||||
|
cls,
|
||||||
|
table: "lancedb.table.Table",
|
||||||
|
query: Optional[Union[np.ndarray, str]],
|
||||||
|
query_type: str,
|
||||||
|
vector_column_name: str,
|
||||||
|
) -> LanceQueryBuilder:
|
||||||
|
if query is None:
|
||||||
|
return LanceEmptyQueryBuilder(table)
|
||||||
|
|
||||||
|
query, query_type = cls._resolve_query(
|
||||||
|
table, query, query_type, vector_column_name
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(query, str):
|
||||||
|
# fts
|
||||||
|
return LanceFtsQueryBuilder(table, query)
|
||||||
|
|
||||||
|
if isinstance(query, list):
|
||||||
|
query = np.array(query, dtype=np.float32)
|
||||||
|
elif isinstance(query, np.ndarray):
|
||||||
|
query = query.astype(np.float32)
|
||||||
|
else:
|
||||||
|
raise TypeError(f"Unsupported query type: {type(query)}")
|
||||||
|
|
||||||
|
return LanceVectorQueryBuilder(table, query, vector_column_name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _resolve_query(cls, table, query, query_type, vector_column_name):
|
||||||
|
# If query_type is fts, then query must be a string.
|
||||||
|
# otherwise raise TypeError
|
||||||
|
if query_type == "fts":
|
||||||
|
if not isinstance(query, str):
|
||||||
|
raise TypeError(
|
||||||
|
f"Query type is 'fts' but query is not a string: {type(query)}"
|
||||||
|
)
|
||||||
|
return query, query_type
|
||||||
|
elif query_type == "vector":
|
||||||
|
# If query_type is vector, then query must be a list or np.ndarray.
|
||||||
|
# otherwise raise TypeError
|
||||||
|
if not isinstance(query, (list, np.ndarray)):
|
||||||
|
raise TypeError(
|
||||||
|
f"Query type is 'vector' but query is not a list or np.ndarray: {type(query)}"
|
||||||
|
)
|
||||||
|
return query, query_type
|
||||||
|
elif query_type == "auto":
|
||||||
|
if isinstance(query, (list, np.ndarray)):
|
||||||
|
return query, "vector"
|
||||||
|
elif isinstance(query, str):
|
||||||
|
func = table.embedding_functions.get(vector_column_name, None)
|
||||||
|
if func is not None:
|
||||||
|
query = func(query)[0]
|
||||||
|
return query, "vector"
|
||||||
|
else:
|
||||||
|
return query, "fts"
|
||||||
|
else:
|
||||||
|
raise TypeError("Query must be a list, np.ndarray, or str")
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid query_type, must be 'vector', 'fts', or 'auto': {query_type}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, table: "lancedb.table.Table"):
|
||||||
|
self._table = table
|
||||||
|
self._limit = 10
|
||||||
|
self._columns = None
|
||||||
|
self._where = None
|
||||||
|
|
||||||
|
def to_df(self) -> "pd.DataFrame":
|
||||||
|
"""
|
||||||
|
Execute the query and return the results as a pandas DataFrame.
|
||||||
|
In addition to the selected columns, LanceDB also returns a vector
|
||||||
|
and also the "_distance" column which is the distance between the query
|
||||||
|
vector and the returned vector.
|
||||||
|
"""
|
||||||
|
return self.to_arrow().to_pandas()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def to_arrow(self) -> pa.Table:
|
||||||
|
"""
|
||||||
|
Execute the query and return the results as an
|
||||||
|
[Apache Arrow Table](https://arrow.apache.org/docs/python/generated/pyarrow.Table.html#pyarrow.Table).
|
||||||
|
|
||||||
|
In addition to the selected columns, LanceDB also returns a vector
|
||||||
|
and also the "_distance" column which is the distance between the query
|
||||||
|
vector and the returned vectors.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def to_pydantic(self, model: Type[LanceModel]) -> List[LanceModel]:
|
||||||
|
"""Return the table as a list of pydantic models.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
model: Type[LanceModel]
|
||||||
|
The pydantic model to use.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
List[LanceModel]
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
model(**{k: v for k, v in row.items() if k in model.field_names()})
|
||||||
|
for row in self.to_arrow().to_pylist()
|
||||||
|
]
|
||||||
|
|
||||||
|
def limit(self, limit: int) -> LanceVectorQueryBuilder:
|
||||||
|
"""Set the maximum number of results to return.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
limit: int
|
||||||
|
The maximum number of results to return.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
LanceVectorQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
|
"""
|
||||||
|
self._limit = limit
|
||||||
|
return self
|
||||||
|
|
||||||
|
def select(self, columns: list) -> LanceVectorQueryBuilder:
|
||||||
|
"""Set the columns to return.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
columns: list
|
||||||
|
The columns to return.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
LanceVectorQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
|
"""
|
||||||
|
self._columns = columns
|
||||||
|
return self
|
||||||
|
|
||||||
|
def where(self, where: str) -> LanceVectorQueryBuilder:
|
||||||
|
"""Set the where clause.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
where: str
|
||||||
|
The where clause.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
LanceVectorQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
|
"""
|
||||||
|
self._where = where
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class LanceVectorQueryBuilder(LanceQueryBuilder):
|
||||||
"""
|
"""
|
||||||
A builder for nearest neighbor queries for LanceDB.
|
A builder for nearest neighbor queries for LanceDB.
|
||||||
|
|
||||||
@@ -80,68 +238,17 @@ class LanceQueryBuilder:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
table: "lancedb.table.Table",
|
table: "lancedb.table.Table",
|
||||||
query: Union[np.ndarray, str],
|
query: Union[np.ndarray, list],
|
||||||
vector_column: str = VECTOR_COLUMN_NAME,
|
vector_column: str = VECTOR_COLUMN_NAME,
|
||||||
):
|
):
|
||||||
|
super().__init__(table)
|
||||||
|
self._query = query
|
||||||
self._metric = "L2"
|
self._metric = "L2"
|
||||||
self._nprobes = 20
|
self._nprobes = 20
|
||||||
self._refine_factor = None
|
self._refine_factor = None
|
||||||
self._table = table
|
|
||||||
self._query = query
|
|
||||||
self._limit = 10
|
|
||||||
self._columns = None
|
|
||||||
self._where = None
|
|
||||||
self._vector_column = vector_column
|
self._vector_column = vector_column
|
||||||
|
|
||||||
def limit(self, limit: int) -> LanceQueryBuilder:
|
def metric(self, metric: Literal["L2", "cosine"]) -> LanceVectorQueryBuilder:
|
||||||
"""Set the maximum number of results to return.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
limit: int
|
|
||||||
The maximum number of results to return.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
LanceQueryBuilder
|
|
||||||
The LanceQueryBuilder object.
|
|
||||||
"""
|
|
||||||
self._limit = limit
|
|
||||||
return self
|
|
||||||
|
|
||||||
def select(self, columns: list) -> LanceQueryBuilder:
|
|
||||||
"""Set the columns to return.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
columns: list
|
|
||||||
The columns to return.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
LanceQueryBuilder
|
|
||||||
The LanceQueryBuilder object.
|
|
||||||
"""
|
|
||||||
self._columns = columns
|
|
||||||
return self
|
|
||||||
|
|
||||||
def where(self, where: str) -> LanceQueryBuilder:
|
|
||||||
"""Set the where clause.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
where: str
|
|
||||||
The where clause.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
LanceQueryBuilder
|
|
||||||
The LanceQueryBuilder object.
|
|
||||||
"""
|
|
||||||
self._where = where
|
|
||||||
return self
|
|
||||||
|
|
||||||
def metric(self, metric: Literal["L2", "cosine"]) -> LanceQueryBuilder:
|
|
||||||
"""Set the distance metric to use.
|
"""Set the distance metric to use.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
@@ -151,13 +258,13 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
LanceQueryBuilder
|
LanceVectorQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._metric = metric
|
self._metric = metric
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def nprobes(self, nprobes: int) -> LanceQueryBuilder:
|
def nprobes(self, nprobes: int) -> LanceVectorQueryBuilder:
|
||||||
"""Set the number of probes to use.
|
"""Set the number of probes to use.
|
||||||
|
|
||||||
Higher values will yield better recall (more likely to find vectors if
|
Higher values will yield better recall (more likely to find vectors if
|
||||||
@@ -173,13 +280,13 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
LanceQueryBuilder
|
LanceVectorQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._nprobes = nprobes
|
self._nprobes = nprobes
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def refine_factor(self, refine_factor: int) -> LanceQueryBuilder:
|
def refine_factor(self, refine_factor: int) -> LanceVectorQueryBuilder:
|
||||||
"""Set the refine factor to use, increasing the number of vectors sampled.
|
"""Set the refine factor to use, increasing the number of vectors sampled.
|
||||||
|
|
||||||
As an example, a refine factor of 2 will sample 2x as many vectors as
|
As an example, a refine factor of 2 will sample 2x as many vectors as
|
||||||
@@ -195,22 +302,12 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
LanceQueryBuilder
|
LanceVectorQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._refine_factor = refine_factor
|
self._refine_factor = refine_factor
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def to_df(self) -> "pd.DataFrame":
|
|
||||||
"""
|
|
||||||
Execute the query and return the results as a pandas DataFrame.
|
|
||||||
In addition to the selected columns, LanceDB also returns a vector
|
|
||||||
and also the "_distance" column which is the distance between the query
|
|
||||||
vector and the returned vector.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.to_arrow().to_pandas()
|
|
||||||
|
|
||||||
def to_arrow(self) -> pa.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
"""
|
"""
|
||||||
Execute the query and return the results as an
|
Execute the query and return the results as an
|
||||||
@@ -233,25 +330,12 @@ class LanceQueryBuilder:
|
|||||||
)
|
)
|
||||||
return self._table._execute_query(query)
|
return self._table._execute_query(query)
|
||||||
|
|
||||||
def to_pydantic(self, model: Type[LanceModel]) -> List[LanceModel]:
|
|
||||||
"""Return the table as a list of pydantic models.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
model: Type[LanceModel]
|
|
||||||
The pydantic model to use.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
List[LanceModel]
|
|
||||||
"""
|
|
||||||
return [
|
|
||||||
model(**{k: v for k, v in row.items() if k in model.field_names()})
|
|
||||||
for row in self.to_arrow().to_pylist()
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
||||||
|
def __init__(self, table: "lancedb.table.Table", query: str):
|
||||||
|
super().__init__(table)
|
||||||
|
self._query = query
|
||||||
|
|
||||||
def to_arrow(self) -> pa.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
try:
|
try:
|
||||||
import tantivy
|
import tantivy
|
||||||
@@ -275,3 +359,13 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
|
|||||||
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
||||||
output_tbl = output_tbl.append_column("score", scores)
|
output_tbl = output_tbl.append_column("score", scores)
|
||||||
return output_tbl
|
return output_tbl
|
||||||
|
|
||||||
|
|
||||||
|
class LanceEmptyQueryBuilder(LanceQueryBuilder):
|
||||||
|
def to_arrow(self) -> pa.Table:
|
||||||
|
ds = self._table.to_lance()
|
||||||
|
return ds.to_table(
|
||||||
|
columns=self._columns,
|
||||||
|
filter=self._where,
|
||||||
|
limit=self._limit,
|
||||||
|
)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from lance import json_to_schema
|
|||||||
|
|
||||||
from lancedb.common import DATA, VEC, VECTOR_COLUMN_NAME
|
from lancedb.common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
|
|
||||||
from ..query import LanceQueryBuilder
|
from ..query import LanceVectorQueryBuilder
|
||||||
from ..table import Query, Table, _sanitize_data
|
from ..table import Query, Table, _sanitize_data
|
||||||
from .arrow import to_ipc_binary
|
from .arrow import to_ipc_binary
|
||||||
from .client import ARROW_STREAM_CONTENT_TYPE
|
from .client import ARROW_STREAM_CONTENT_TYPE
|
||||||
@@ -73,7 +73,11 @@ class RemoteTable(Table):
|
|||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> int:
|
) -> int:
|
||||||
data = _sanitize_data(
|
data = _sanitize_data(
|
||||||
data, self.schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data,
|
||||||
|
self.schema,
|
||||||
|
metadata=None,
|
||||||
|
on_bad_vectors=on_bad_vectors,
|
||||||
|
fill_value=fill_value,
|
||||||
)
|
)
|
||||||
payload = to_ipc_binary(data)
|
payload = to_ipc_binary(data)
|
||||||
|
|
||||||
@@ -89,9 +93,9 @@ class RemoteTable(Table):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, query: Union[VEC, str], vector_column: str = VECTOR_COLUMN_NAME
|
self, query: Union[VEC, str], vector_column_name: str = VECTOR_COLUMN_NAME
|
||||||
) -> LanceQueryBuilder:
|
) -> LanceVectorQueryBuilder:
|
||||||
return LanceQueryBuilder(self, query, vector_column)
|
return LanceVectorQueryBuilder(self, query, vector_column_name)
|
||||||
|
|
||||||
def _execute_query(self, query: Query) -> pa.Table:
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
result = self._conn._client.query(self._name, query)
|
result = self._conn._client.query(self._name, query)
|
||||||
|
|||||||
@@ -17,56 +17,89 @@ import inspect
|
|||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import Iterable, List, Union
|
from typing import Any, Iterable, List, Optional, Union
|
||||||
|
|
||||||
import lance
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pyarrow.compute as pc
|
import pyarrow.compute as pc
|
||||||
from lance import LanceDataset
|
from lance import LanceDataset
|
||||||
|
from lance.dataset import ReaderLike
|
||||||
from lance.vector import vec_to_table
|
from lance.vector import vec_to_table
|
||||||
|
|
||||||
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
|
from .embeddings import EmbeddingFunctionModel, EmbeddingFunctionRegistry
|
||||||
from .pydantic import LanceModel
|
from .pydantic import LanceModel
|
||||||
from .query import LanceFtsQueryBuilder, LanceQueryBuilder, Query
|
from .query import LanceQueryBuilder, Query
|
||||||
from .util import fs_from_uri, safe_import_pandas
|
from .util import fs_from_uri, safe_import_pandas
|
||||||
|
|
||||||
pd = safe_import_pandas()
|
pd = safe_import_pandas()
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_data(data, schema, on_bad_vectors, fill_value):
|
def _sanitize_data(
|
||||||
|
data,
|
||||||
|
schema: Optional[pa.Schema],
|
||||||
|
metadata: Optional[dict],
|
||||||
|
on_bad_vectors: str,
|
||||||
|
fill_value: Any,
|
||||||
|
):
|
||||||
if isinstance(data, list):
|
if isinstance(data, list):
|
||||||
# convert to list of dict if data is a bunch of LanceModels
|
# convert to list of dict if data is a bunch of LanceModels
|
||||||
if isinstance(data[0], LanceModel):
|
if isinstance(data[0], LanceModel):
|
||||||
schema = data[0].__class__.to_arrow_schema()
|
schema = data[0].__class__.to_arrow_schema()
|
||||||
data = [dict(d) for d in data]
|
data = [dict(d) for d in data]
|
||||||
data = pa.Table.from_pylist(data)
|
data = pa.Table.from_pylist(data)
|
||||||
data = _sanitize_schema(
|
elif isinstance(data, dict):
|
||||||
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
|
||||||
)
|
|
||||||
if isinstance(data, dict):
|
|
||||||
data = vec_to_table(data)
|
data = vec_to_table(data)
|
||||||
if pd is not None and isinstance(data, pd.DataFrame):
|
elif pd is not None and isinstance(data, pd.DataFrame):
|
||||||
data = pa.Table.from_pandas(data, preserve_index=False)
|
data = pa.Table.from_pandas(data, preserve_index=False)
|
||||||
|
# Do not serialize Pandas metadata
|
||||||
|
meta = data.schema.metadata if data.schema.metadata is not None else {}
|
||||||
|
meta = {k: v for k, v in meta.items() if k != b"pandas"}
|
||||||
|
data = data.replace_schema_metadata(meta)
|
||||||
|
|
||||||
|
if isinstance(data, pa.Table):
|
||||||
|
if metadata:
|
||||||
|
data = _append_vector_col(data, metadata, schema)
|
||||||
|
metadata.update(data.schema.metadata or {})
|
||||||
|
data = data.replace_schema_metadata(metadata)
|
||||||
data = _sanitize_schema(
|
data = _sanitize_schema(
|
||||||
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
)
|
)
|
||||||
# Do not serialize Pandas metadata
|
elif isinstance(data, Iterable):
|
||||||
metadata = data.schema.metadata if data.schema.metadata is not None else {}
|
data = _to_record_batch_generator(
|
||||||
metadata = {k: v for k, v in metadata.items() if k != b"pandas"}
|
data, schema, metadata, on_bad_vectors, fill_value
|
||||||
schema = data.schema.with_metadata(metadata)
|
)
|
||||||
data = pa.Table.from_arrays(data.columns, schema=schema)
|
else:
|
||||||
if isinstance(data, Iterable):
|
|
||||||
data = _to_record_batch_generator(data, schema, on_bad_vectors, fill_value)
|
|
||||||
if not isinstance(data, (pa.Table, Iterable)):
|
|
||||||
raise TypeError(f"Unsupported data type: {type(data)}")
|
raise TypeError(f"Unsupported data type: {type(data)}")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def _to_record_batch_generator(data: Iterable, schema, on_bad_vectors, fill_value):
|
def _append_vector_col(data: pa.Table, metadata: dict, schema: Optional[pa.Schema]):
|
||||||
|
"""
|
||||||
|
Use the embedding function to automatically embed the source column and add the
|
||||||
|
vector column to the table.
|
||||||
|
"""
|
||||||
|
functions = EmbeddingFunctionRegistry.get_instance().parse_functions(metadata)
|
||||||
|
for vector_col, func in functions.items():
|
||||||
|
if vector_col not in data.column_names:
|
||||||
|
col_data = func(data[func.source_column])
|
||||||
|
if schema is not None:
|
||||||
|
dtype = schema.field(vector_col).type
|
||||||
|
else:
|
||||||
|
dtype = pa.list_(pa.float32(), len(col_data[0]))
|
||||||
|
data = data.append_column(
|
||||||
|
pa.field(vector_col, type=dtype), pa.array(col_data, type=dtype)
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _to_record_batch_generator(
|
||||||
|
data: Iterable, schema, metadata, on_bad_vectors, fill_value
|
||||||
|
):
|
||||||
for batch in data:
|
for batch in data:
|
||||||
if not isinstance(batch, pa.RecordBatch):
|
if not isinstance(batch, pa.RecordBatch):
|
||||||
table = _sanitize_data(batch, schema, on_bad_vectors, fill_value)
|
table = _sanitize_data(batch, schema, metadata, on_bad_vectors, fill_value)
|
||||||
for batch in table.to_batches():
|
for batch in table.to_batches():
|
||||||
yield batch
|
yield batch
|
||||||
yield batch
|
yield batch
|
||||||
@@ -74,7 +107,7 @@ def _to_record_batch_generator(data: Iterable, schema, on_bad_vectors, fill_valu
|
|||||||
|
|
||||||
class Table(ABC):
|
class Table(ABC):
|
||||||
"""
|
"""
|
||||||
A [Table](Table) is a collection of Records in a LanceDB [Database](Database).
|
A Table is a collection of Records in a LanceDB Database.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
@@ -195,17 +228,28 @@ class Table(ABC):
|
|||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def search(
|
def search(
|
||||||
self, query: Union[VEC, str], vector_column: str = VECTOR_COLUMN_NAME
|
self,
|
||||||
|
query: Optional[Union[VEC, str]] = None,
|
||||||
|
vector_column_name: str = VECTOR_COLUMN_NAME,
|
||||||
|
query_type: str = "auto",
|
||||||
) -> LanceQueryBuilder:
|
) -> LanceQueryBuilder:
|
||||||
"""Create a search query to find the nearest neighbors
|
"""Create a search query to find the nearest neighbors
|
||||||
of the given query vector.
|
of the given query vector.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
query: list, np.ndarray
|
query: str, list, np.ndarray, default None
|
||||||
The query vector.
|
The query to search for. If None then
|
||||||
vector_column: str, default "vector"
|
the select/where/limit clauses are applied to filter
|
||||||
|
the table
|
||||||
|
vector_column_name: str, default "vector"
|
||||||
The name of the vector column to search.
|
The name of the vector column to search.
|
||||||
|
query_type: str, default "auto"
|
||||||
|
"vector", "fts", or "auto"
|
||||||
|
If "auto" then the query type is inferred from the query;
|
||||||
|
If `query` is a list/np.ndarray then the query type is "vector";
|
||||||
|
If `query` is a string, then the query type is "vector" if the
|
||||||
|
table has embedding functions else the query type is "fts"
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
@@ -311,7 +355,7 @@ class LanceTable(Table):
|
|||||||
|
|
||||||
This allows viewing previous versions of the table. If you wish to
|
This allows viewing previous versions of the table. If you wish to
|
||||||
keep writing to the dataset starting from an old version, then use
|
keep writing to the dataset starting from an old version, then use
|
||||||
the `restore` function instead.
|
the `restore` function.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
@@ -324,14 +368,14 @@ class LanceTable(Table):
|
|||||||
>>> db = lancedb.connect("./.lancedb")
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
>>> table = db.create_table("my_table", [{"vector": [1.1, 0.9], "type": "vector"}])
|
>>> table = db.create_table("my_table", [{"vector": [1.1, 0.9], "type": "vector"}])
|
||||||
>>> table.version
|
>>> table.version
|
||||||
1
|
2
|
||||||
>>> table.to_pandas()
|
>>> table.to_pandas()
|
||||||
vector type
|
vector type
|
||||||
0 [1.1, 0.9] vector
|
0 [1.1, 0.9] vector
|
||||||
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
>>> table.version
|
>>> table.version
|
||||||
2
|
3
|
||||||
>>> table.checkout(1)
|
>>> table.checkout(2)
|
||||||
>>> table.to_pandas()
|
>>> table.to_pandas()
|
||||||
vector type
|
vector type
|
||||||
0 [1.1, 0.9] vector
|
0 [1.1, 0.9] vector
|
||||||
@@ -341,16 +385,18 @@ class LanceTable(Table):
|
|||||||
raise ValueError(f"Invalid version {version}")
|
raise ValueError(f"Invalid version {version}")
|
||||||
self._reset_dataset(version=version)
|
self._reset_dataset(version=version)
|
||||||
|
|
||||||
def restore(self, version: int):
|
def restore(self, version: int = None):
|
||||||
"""Restore a version of the table. This is an in-place operation.
|
"""Restore a version of the table. This is an in-place operation.
|
||||||
|
|
||||||
This creates a new version where the data is equivalent to the
|
This creates a new version where the data is equivalent to the
|
||||||
specified previous version. Note that this creates a new snapshot.
|
specified previous version. Data is not copied (as of python-v0.2.1).
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
version : int
|
version : int, default None
|
||||||
The version to restore.
|
The version to restore. If unspecified then restores the currently
|
||||||
|
checked out version. If the currently checked out version is the
|
||||||
|
latest version then this is a no-op.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
@@ -358,30 +404,33 @@ class LanceTable(Table):
|
|||||||
>>> db = lancedb.connect("./.lancedb")
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
>>> table = db.create_table("my_table", [{"vector": [1.1, 0.9], "type": "vector"}])
|
>>> table = db.create_table("my_table", [{"vector": [1.1, 0.9], "type": "vector"}])
|
||||||
>>> table.version
|
>>> table.version
|
||||||
1
|
2
|
||||||
>>> table.to_pandas()
|
>>> table.to_pandas()
|
||||||
vector type
|
vector type
|
||||||
0 [1.1, 0.9] vector
|
0 [1.1, 0.9] vector
|
||||||
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
>>> table.version
|
>>> table.version
|
||||||
2
|
3
|
||||||
>>> table.restore(1)
|
>>> table.restore(2)
|
||||||
>>> table.to_pandas()
|
>>> table.to_pandas()
|
||||||
vector type
|
vector type
|
||||||
0 [1.1, 0.9] vector
|
0 [1.1, 0.9] vector
|
||||||
>>> len(table.list_versions())
|
>>> len(table.list_versions())
|
||||||
3
|
4
|
||||||
"""
|
"""
|
||||||
max_ver = max([v["version"] for v in self._dataset.versions()])
|
max_ver = max([v["version"] for v in self._dataset.versions()])
|
||||||
if version < 1 or version >= max_ver:
|
if version is None:
|
||||||
|
version = self.version
|
||||||
|
elif version < 1 or version > max_ver:
|
||||||
raise ValueError(f"Invalid version {version}")
|
raise ValueError(f"Invalid version {version}")
|
||||||
if version == max_ver:
|
else:
|
||||||
self._reset_dataset()
|
|
||||||
return
|
|
||||||
self.checkout(version)
|
self.checkout(version)
|
||||||
data = self.to_arrow()
|
|
||||||
self.checkout(max_ver)
|
if version == max_ver:
|
||||||
self.add(data, mode="overwrite")
|
# no-op if restoring the latest version
|
||||||
|
return
|
||||||
|
|
||||||
|
self._dataset.restore()
|
||||||
self._reset_dataset()
|
self._reset_dataset()
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
@@ -495,23 +544,122 @@ class LanceTable(Table):
|
|||||||
"""
|
"""
|
||||||
# TODO: manage table listing and metadata separately
|
# TODO: manage table listing and metadata separately
|
||||||
data = _sanitize_data(
|
data = _sanitize_data(
|
||||||
data, self.schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data,
|
||||||
|
self.schema,
|
||||||
|
metadata=self.schema.metadata,
|
||||||
|
on_bad_vectors=on_bad_vectors,
|
||||||
|
fill_value=fill_value,
|
||||||
)
|
)
|
||||||
lance.write_dataset(data, self._dataset_uri, schema=self.schema, mode=mode)
|
lance.write_dataset(data, self._dataset_uri, schema=self.schema, mode=mode)
|
||||||
self._reset_dataset()
|
self._reset_dataset()
|
||||||
|
|
||||||
|
def merge(
|
||||||
|
self,
|
||||||
|
other_table: Union[LanceTable, ReaderLike],
|
||||||
|
left_on: str,
|
||||||
|
right_on: Optional[str] = None,
|
||||||
|
schema: Optional[Union[pa.Schema, LanceModel]] = None,
|
||||||
|
):
|
||||||
|
"""Merge another table into this table.
|
||||||
|
|
||||||
|
Performs a left join, where the dataset is the left side and other_table
|
||||||
|
is the right side. Rows existing in the dataset but not on the left will
|
||||||
|
be filled with null values, unless Lance doesn't support null values for
|
||||||
|
some types, in which case an error will be raised. The only overlapping
|
||||||
|
column allowed is the join column. If other overlapping columns exist,
|
||||||
|
an error will be raised.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
other_table: LanceTable or Reader-like
|
||||||
|
The data to be merged. Acceptable types are:
|
||||||
|
- Pandas DataFrame, Pyarrow Table, Dataset, Scanner,
|
||||||
|
Iterator[RecordBatch], or RecordBatchReader
|
||||||
|
- LanceTable
|
||||||
|
left_on: str
|
||||||
|
The name of the column in the dataset to join on.
|
||||||
|
right_on: str or None
|
||||||
|
The name of the column in other_table to join on. If None, defaults to
|
||||||
|
left_on.
|
||||||
|
schema: pa.Schema or LanceModel, optional
|
||||||
|
The schema of the other_table.
|
||||||
|
If not provided, the schema is inferred from the data.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> import pyarrow as pa
|
||||||
|
>>> df = pa.table({'x': [1, 2, 3], 'y': ['a', 'b', 'c']})
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("dataset", df)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x y
|
||||||
|
0 1 a
|
||||||
|
1 2 b
|
||||||
|
2 3 c
|
||||||
|
>>> new_df = pa.table({'x': [1, 2, 3], 'z': ['d', 'e', 'f']})
|
||||||
|
>>> table.merge(new_df, 'x')
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x y z
|
||||||
|
0 1 a d
|
||||||
|
1 2 b e
|
||||||
|
2 3 c f
|
||||||
|
"""
|
||||||
|
if isinstance(schema, LanceModel):
|
||||||
|
schema = schema.to_arrow_schema()
|
||||||
|
if isinstance(other_table, LanceTable):
|
||||||
|
other_table = other_table.to_lance()
|
||||||
|
if isinstance(other_table, LanceDataset):
|
||||||
|
other_table = other_table.to_table()
|
||||||
|
self._dataset.merge(
|
||||||
|
other_table, left_on=left_on, right_on=right_on, schema=schema
|
||||||
|
)
|
||||||
|
self._reset_dataset()
|
||||||
|
|
||||||
|
def _get_embedding_function_for_source_col(self, column_name: str):
|
||||||
|
for k, v in self.embedding_functions.items():
|
||||||
|
if v.source_column == column_name:
|
||||||
|
return v
|
||||||
|
return None
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def embedding_functions(self) -> dict:
|
||||||
|
"""
|
||||||
|
Get the embedding functions for the table
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
funcs: dict
|
||||||
|
A mapping of the vector column to the embedding function
|
||||||
|
or empty dict if not configured.
|
||||||
|
"""
|
||||||
|
return EmbeddingFunctionRegistry.get_instance().parse_functions(
|
||||||
|
self.schema.metadata
|
||||||
|
)
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, query: Union[VEC, str], vector_column_name=VECTOR_COLUMN_NAME
|
self,
|
||||||
|
query: Optional[Union[VEC, str]] = None,
|
||||||
|
vector_column_name: str = VECTOR_COLUMN_NAME,
|
||||||
|
query_type: str = "auto",
|
||||||
) -> LanceQueryBuilder:
|
) -> LanceQueryBuilder:
|
||||||
"""Create a search query to find the nearest neighbors
|
"""Create a search query to find the nearest neighbors
|
||||||
of the given query vector.
|
of the given query vector.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
query: list, np.ndarray
|
query: str, list, np.ndarray, or None
|
||||||
The query vector.
|
The query to search for. If None then
|
||||||
|
the select/where/limit clauses are applied to filter
|
||||||
|
the table
|
||||||
vector_column_name: str, default "vector"
|
vector_column_name: str, default "vector"
|
||||||
The name of the vector column to search.
|
The name of the vector column to search.
|
||||||
|
query_type: str, default "auto"
|
||||||
|
"vector", "fts", or "auto"
|
||||||
|
If "auto" then the query type is inferred from the query;
|
||||||
|
If the query is a list/np.ndarray then the query type is "vector";
|
||||||
|
If the query is a string, then the query type is "vector" if the
|
||||||
|
table has embedding functions else the query type is "fts"
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
@@ -521,17 +669,9 @@ class LanceTable(Table):
|
|||||||
and also the "_distance" column which is the distance between the query
|
and also the "_distance" column which is the distance between the query
|
||||||
vector and the returned vector.
|
vector and the returned vector.
|
||||||
"""
|
"""
|
||||||
if isinstance(query, str):
|
return LanceQueryBuilder.create(
|
||||||
# fts
|
self, query, query_type, vector_column_name=vector_column_name
|
||||||
return LanceFtsQueryBuilder(self, query, vector_column_name)
|
)
|
||||||
|
|
||||||
if isinstance(query, list):
|
|
||||||
query = np.array(query)
|
|
||||||
if isinstance(query, np.ndarray):
|
|
||||||
query = query.astype(np.float32)
|
|
||||||
else:
|
|
||||||
raise TypeError(f"Unsupported query type: {type(query)}")
|
|
||||||
return LanceQueryBuilder(self, query, vector_column_name)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
@@ -543,6 +683,7 @@ class LanceTable(Table):
|
|||||||
mode="create",
|
mode="create",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
|
embedding_functions: List[EmbeddingFunctionModel] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a new table.
|
Create a new table.
|
||||||
@@ -580,20 +721,52 @@ class LanceTable(Table):
|
|||||||
One of "error", "drop", "fill".
|
One of "error", "drop", "fill".
|
||||||
fill_value: float, default 0.
|
fill_value: float, default 0.
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
|
embedding_functions: list of EmbeddingFunctionModel, default None
|
||||||
|
The embedding functions to use when creating the table.
|
||||||
"""
|
"""
|
||||||
tbl = LanceTable(db, name)
|
tbl = LanceTable(db, name)
|
||||||
if inspect.isclass(schema) and issubclass(schema, LanceModel):
|
if inspect.isclass(schema) and issubclass(schema, LanceModel):
|
||||||
schema = schema.to_arrow_schema()
|
schema = schema.to_arrow_schema()
|
||||||
|
|
||||||
|
metadata = None
|
||||||
|
if embedding_functions is not None:
|
||||||
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
metadata = registry.get_table_metadata(embedding_functions)
|
||||||
|
|
||||||
if data is not None:
|
if data is not None:
|
||||||
data = _sanitize_data(
|
data = _sanitize_data(
|
||||||
data, schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data,
|
||||||
|
schema,
|
||||||
|
metadata=metadata,
|
||||||
|
on_bad_vectors=on_bad_vectors,
|
||||||
|
fill_value=fill_value,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
if schema is None:
|
if schema is None:
|
||||||
|
if data is None:
|
||||||
raise ValueError("Either data or schema must be provided")
|
raise ValueError("Either data or schema must be provided")
|
||||||
data = pa.Table.from_pylist([], schema=schema)
|
elif hasattr(data, "schema"):
|
||||||
lance.write_dataset(data, tbl._dataset_uri, schema=schema, mode=mode)
|
schema = data.schema
|
||||||
return LanceTable(db, name)
|
elif isinstance(data, Iterable):
|
||||||
|
if metadata:
|
||||||
|
raise TypeError(
|
||||||
|
(
|
||||||
|
"Persistent embedding functions not yet "
|
||||||
|
"supported for generator data input"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if metadata:
|
||||||
|
schema = schema.with_metadata(metadata)
|
||||||
|
|
||||||
|
empty = pa.Table.from_pylist([], schema=schema)
|
||||||
|
lance.write_dataset(empty, tbl._dataset_uri, schema=schema, mode=mode)
|
||||||
|
table = LanceTable(db, name)
|
||||||
|
|
||||||
|
if data is not None:
|
||||||
|
table.add(data)
|
||||||
|
|
||||||
|
return table
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def open(cls, db, name):
|
def open(cls, db, name):
|
||||||
@@ -609,6 +782,56 @@ class LanceTable(Table):
|
|||||||
def delete(self, where: str):
|
def delete(self, where: str):
|
||||||
self._dataset.delete(where)
|
self._dataset.delete(where)
|
||||||
|
|
||||||
|
def update(self, where: str, values: dict):
|
||||||
|
"""
|
||||||
|
EXPERIMENTAL: Update rows in the table (not threadsafe).
|
||||||
|
|
||||||
|
This can be used to update zero to all rows depending on how many
|
||||||
|
rows match the where clause.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
where: str
|
||||||
|
The SQL where clause to use when updating rows. For example, 'x = 2'
|
||||||
|
or 'x IN (1, 2, 3)'. The filter must not be empty, or it will error.
|
||||||
|
values: dict
|
||||||
|
The values to update. The keys are the column names and the values
|
||||||
|
are the values to set.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 2 [3.0, 4.0]
|
||||||
|
2 3 [5.0, 6.0]
|
||||||
|
>>> table.update(where="x = 2", values={"vector": [10, 10]})
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 3 [5.0, 6.0]
|
||||||
|
2 2 [10.0, 10.0]
|
||||||
|
|
||||||
|
"""
|
||||||
|
orig_data = self._dataset.to_table(filter=where).combine_chunks()
|
||||||
|
if len(orig_data) == 0:
|
||||||
|
return
|
||||||
|
for col, val in values.items():
|
||||||
|
i = orig_data.column_names.index(col)
|
||||||
|
if i < 0:
|
||||||
|
raise ValueError(f"Column {col} does not exist")
|
||||||
|
orig_data = orig_data.set_column(
|
||||||
|
i, col, pa.array([val] * len(orig_data), type=orig_data[col].type)
|
||||||
|
)
|
||||||
|
self.delete(where)
|
||||||
|
self.add(orig_data, mode="append")
|
||||||
|
self._reset_dataset()
|
||||||
|
|
||||||
def _execute_query(self, query: Query) -> pa.Table:
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
ds = self.to_lance()
|
ds = self.to_lance()
|
||||||
return ds.to_table(
|
return ds.to_table(
|
||||||
@@ -651,16 +874,30 @@ def _sanitize_schema(
|
|||||||
return data
|
return data
|
||||||
# cast the columns to the expected types
|
# cast the columns to the expected types
|
||||||
data = data.combine_chunks()
|
data = data.combine_chunks()
|
||||||
|
for field in schema:
|
||||||
|
# TODO: we're making an assumption that fixed size list of 10 or more
|
||||||
|
# is a vector column. This is definitely a bit hacky.
|
||||||
|
likely_vector_col = (
|
||||||
|
pa.types.is_fixed_size_list(field.type)
|
||||||
|
and pa.types.is_float32(field.type.value_type)
|
||||||
|
and field.type.list_size >= 10
|
||||||
|
)
|
||||||
|
is_default_vector_col = field.name == VECTOR_COLUMN_NAME
|
||||||
|
if field.name in data.column_names and (
|
||||||
|
likely_vector_col or is_default_vector_col
|
||||||
|
):
|
||||||
data = _sanitize_vector_column(
|
data = _sanitize_vector_column(
|
||||||
data,
|
data,
|
||||||
vector_column_name=VECTOR_COLUMN_NAME,
|
vector_column_name=field.name,
|
||||||
on_bad_vectors=on_bad_vectors,
|
on_bad_vectors=on_bad_vectors,
|
||||||
fill_value=fill_value,
|
fill_value=fill_value,
|
||||||
)
|
)
|
||||||
return pa.Table.from_arrays(
|
return pa.Table.from_arrays(
|
||||||
[data[name] for name in schema.names], schema=schema
|
[data[name] for name in schema.names], schema=schema
|
||||||
)
|
)
|
||||||
|
|
||||||
# just check the vector column
|
# just check the vector column
|
||||||
|
if VECTOR_COLUMN_NAME in data.column_names:
|
||||||
return _sanitize_vector_column(
|
return _sanitize_vector_column(
|
||||||
data,
|
data,
|
||||||
vector_column_name=VECTOR_COLUMN_NAME,
|
vector_column_name=VECTOR_COLUMN_NAME,
|
||||||
@@ -668,6 +905,8 @@ def _sanitize_schema(
|
|||||||
fill_value=fill_value,
|
fill_value=fill_value,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_vector_column(
|
def _sanitize_vector_column(
|
||||||
data: pa.Table,
|
data: pa.Table,
|
||||||
@@ -690,8 +929,6 @@ def _sanitize_vector_column(
|
|||||||
fill_value: float, default 0.0
|
fill_value: float, default 0.0
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
"""
|
"""
|
||||||
if vector_column_name not in data.column_names:
|
|
||||||
raise ValueError(f"Missing vector column: {vector_column_name}")
|
|
||||||
# ChunkedArray is annoying to work with, so we combine chunks here
|
# ChunkedArray is annoying to work with, so we combine chunks here
|
||||||
vec_arr = data[vector_column_name].combine_chunks()
|
vec_arr = data[vector_column_name].combine_chunks()
|
||||||
if pa.types.is_list(data[vector_column_name].type):
|
if pa.types.is_list(data[vector_column_name].type):
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.2.1"
|
version = "0.2.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pylance==0.6.5",
|
"pylance==0.6.5",
|
||||||
"ratelimiter",
|
"ratelimiter",
|
||||||
@@ -9,7 +9,8 @@ dependencies = [
|
|||||||
"aiohttp",
|
"aiohttp",
|
||||||
"pydantic",
|
"pydantic",
|
||||||
"attr",
|
"attr",
|
||||||
"semver>=3.0"
|
"semver>=3.0",
|
||||||
|
"cachetools"
|
||||||
]
|
]
|
||||||
description = "lancedb"
|
description = "lancedb"
|
||||||
authors = [{ name = "LanceDB Devs", email = "dev@lancedb.com" }]
|
authors = [{ name = "LanceDB Devs", email = "dev@lancedb.com" }]
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ def test_ingest_iterator(tmp_path):
|
|||||||
tbl_len = len(tbl)
|
tbl_len = len(tbl)
|
||||||
tbl.add(make_batches())
|
tbl.add(make_batches())
|
||||||
assert len(tbl) == tbl_len * 2
|
assert len(tbl) == tbl_len * 2
|
||||||
assert len(tbl.list_versions()) == 2
|
assert len(tbl.list_versions()) == 3
|
||||||
db.drop_database()
|
db.drop_database()
|
||||||
|
|
||||||
run_tests(arrow_schema)
|
run_tests(arrow_schema)
|
||||||
|
|||||||
@@ -12,10 +12,12 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
from lancedb.embeddings import with_embeddings
|
from lancedb.conftest import MockEmbeddingFunction
|
||||||
|
from lancedb.embeddings import EmbeddingFunctionRegistry, with_embeddings
|
||||||
|
|
||||||
|
|
||||||
def mock_embed_func(input_data):
|
def mock_embed_func(input_data):
|
||||||
@@ -40,3 +42,37 @@ def test_with_embeddings():
|
|||||||
assert data.column_names == ["text", "price", "vector"]
|
assert data.column_names == ["text", "price", "vector"]
|
||||||
assert data.column("text").to_pylist() == ["foo", "bar"]
|
assert data.column("text").to_pylist() == ["foo", "bar"]
|
||||||
assert data.column("price").to_pylist() == [10.0, 20.0]
|
assert data.column("price").to_pylist() == [10.0, 20.0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_embedding_function(tmp_path):
|
||||||
|
registry = EmbeddingFunctionRegistry.get_instance()
|
||||||
|
|
||||||
|
# let's create a table
|
||||||
|
table = pa.table(
|
||||||
|
{
|
||||||
|
"text": pa.array(["hello world", "goodbye world"]),
|
||||||
|
"vector": [np.random.randn(10), np.random.randn(10)],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
func = MockEmbeddingFunction(source_column="text", vector_column="vector")
|
||||||
|
metadata = registry.get_table_metadata([func])
|
||||||
|
table = table.replace_schema_metadata(metadata)
|
||||||
|
|
||||||
|
# Write it to disk
|
||||||
|
lance.write_dataset(table, tmp_path / "test.lance")
|
||||||
|
|
||||||
|
# Load this back
|
||||||
|
ds = lance.dataset(tmp_path / "test.lance")
|
||||||
|
|
||||||
|
# can we get the serialized version back out?
|
||||||
|
functions = registry.parse_functions(ds.schema.metadata)
|
||||||
|
|
||||||
|
func = functions["vector"]
|
||||||
|
actual = func("hello world")
|
||||||
|
|
||||||
|
# We create an instance
|
||||||
|
expected_func = MockEmbeddingFunction(source_column="text", vector_column="vector")
|
||||||
|
# And we make sure we can call it
|
||||||
|
expected = expected_func("hello world")
|
||||||
|
|
||||||
|
assert np.allclose(actual, expected)
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ import pytest
|
|||||||
|
|
||||||
from lancedb.db import LanceDBConnection
|
from lancedb.db import LanceDBConnection
|
||||||
from lancedb.pydantic import LanceModel, vector
|
from lancedb.pydantic import LanceModel, vector
|
||||||
from lancedb.query import LanceQueryBuilder, Query
|
from lancedb.query import LanceVectorQueryBuilder, Query
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
|
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ def test_cast(table):
|
|||||||
str_field: str
|
str_field: str
|
||||||
float_field: float
|
float_field: float
|
||||||
|
|
||||||
q = LanceQueryBuilder(table, [0, 0], "vector").limit(1)
|
q = LanceVectorQueryBuilder(table, [0, 0], "vector").limit(1)
|
||||||
results = q.to_pydantic(TestModel)
|
results = q.to_pydantic(TestModel)
|
||||||
assert len(results) == 1
|
assert len(results) == 1
|
||||||
r0 = results[0]
|
r0 = results[0]
|
||||||
@@ -84,13 +84,15 @@ def test_cast(table):
|
|||||||
|
|
||||||
|
|
||||||
def test_query_builder(table):
|
def test_query_builder(table):
|
||||||
df = LanceQueryBuilder(table, [0, 0], "vector").limit(1).select(["id"]).to_df()
|
df = (
|
||||||
|
LanceVectorQueryBuilder(table, [0, 0], "vector").limit(1).select(["id"]).to_df()
|
||||||
|
)
|
||||||
assert df["id"].values[0] == 1
|
assert df["id"].values[0] == 1
|
||||||
assert all(df["vector"].values[0] == [1, 2])
|
assert all(df["vector"].values[0] == [1, 2])
|
||||||
|
|
||||||
|
|
||||||
def test_query_builder_with_filter(table):
|
def test_query_builder_with_filter(table):
|
||||||
df = LanceQueryBuilder(table, [0, 0], "vector").where("id = 2").to_df()
|
df = LanceVectorQueryBuilder(table, [0, 0], "vector").where("id = 2").to_df()
|
||||||
assert df["id"].values[0] == 2
|
assert df["id"].values[0] == 2
|
||||||
assert all(df["vector"].values[0] == [3, 4])
|
assert all(df["vector"].values[0] == [3, 4])
|
||||||
|
|
||||||
@@ -98,12 +100,14 @@ def test_query_builder_with_filter(table):
|
|||||||
def test_query_builder_with_metric(table):
|
def test_query_builder_with_metric(table):
|
||||||
query = [4, 8]
|
query = [4, 8]
|
||||||
vector_column_name = "vector"
|
vector_column_name = "vector"
|
||||||
df_default = LanceQueryBuilder(table, query, vector_column_name).to_df()
|
df_default = LanceVectorQueryBuilder(table, query, vector_column_name).to_df()
|
||||||
df_l2 = LanceQueryBuilder(table, query, vector_column_name).metric("L2").to_df()
|
df_l2 = (
|
||||||
|
LanceVectorQueryBuilder(table, query, vector_column_name).metric("L2").to_df()
|
||||||
|
)
|
||||||
tm.assert_frame_equal(df_default, df_l2)
|
tm.assert_frame_equal(df_default, df_l2)
|
||||||
|
|
||||||
df_cosine = (
|
df_cosine = (
|
||||||
LanceQueryBuilder(table, query, vector_column_name)
|
LanceVectorQueryBuilder(table, query, vector_column_name)
|
||||||
.metric("cosine")
|
.metric("cosine")
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.to_df()
|
.to_df()
|
||||||
@@ -120,7 +124,7 @@ def test_query_builder_with_different_vector_column():
|
|||||||
query = [4, 8]
|
query = [4, 8]
|
||||||
vector_column_name = "foo_vector"
|
vector_column_name = "foo_vector"
|
||||||
builder = (
|
builder = (
|
||||||
LanceQueryBuilder(table, query, vector_column_name)
|
LanceVectorQueryBuilder(table, query, vector_column_name)
|
||||||
.metric("cosine")
|
.metric("cosine")
|
||||||
.where("b < 10")
|
.where("b < 10")
|
||||||
.select(["b"])
|
.select(["b"])
|
||||||
|
|||||||
@@ -16,11 +16,13 @@ from pathlib import Path
|
|||||||
from typing import List
|
from typing import List
|
||||||
from unittest.mock import PropertyMock, patch
|
from unittest.mock import PropertyMock, patch
|
||||||
|
|
||||||
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from lancedb.conftest import MockEmbeddingFunction
|
||||||
from lancedb.db import LanceDBConnection
|
from lancedb.db import LanceDBConnection
|
||||||
from lancedb.pydantic import LanceModel, vector
|
from lancedb.pydantic import LanceModel, vector
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
@@ -177,16 +179,16 @@ def test_versioning(db):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
assert len(table.list_versions()) == 1
|
|
||||||
assert table.version == 1
|
|
||||||
|
|
||||||
table.add([{"vector": [6.3, 100.5], "item": "new", "price": 30.0}])
|
|
||||||
assert len(table.list_versions()) == 2
|
assert len(table.list_versions()) == 2
|
||||||
assert table.version == 2
|
assert table.version == 2
|
||||||
|
|
||||||
|
table.add([{"vector": [6.3, 100.5], "item": "new", "price": 30.0}])
|
||||||
|
assert len(table.list_versions()) == 3
|
||||||
|
assert table.version == 3
|
||||||
assert len(table) == 3
|
assert len(table) == 3
|
||||||
|
|
||||||
table.checkout(1)
|
table.checkout(2)
|
||||||
assert table.version == 1
|
assert table.version == 2
|
||||||
assert len(table) == 2
|
assert len(table) == 2
|
||||||
|
|
||||||
|
|
||||||
@@ -277,6 +279,165 @@ def test_restore(db):
|
|||||||
data=[{"vector": [1.1, 0.9], "type": "vector"}],
|
data=[{"vector": [1.1, 0.9], "type": "vector"}],
|
||||||
)
|
)
|
||||||
table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
table.restore(1)
|
table.restore(2)
|
||||||
assert len(table.list_versions()) == 3
|
assert len(table.list_versions()) == 4
|
||||||
assert len(table) == 1
|
assert len(table) == 1
|
||||||
|
|
||||||
|
expected = table.to_arrow()
|
||||||
|
table.checkout(2)
|
||||||
|
table.restore()
|
||||||
|
assert len(table.list_versions()) == 5
|
||||||
|
assert table.to_arrow() == expected
|
||||||
|
|
||||||
|
table.restore(5) # latest version should be no-op
|
||||||
|
assert len(table.list_versions()) == 5
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
table.restore(6)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
table.restore(0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge(db, tmp_path):
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
data=[{"vector": [1.1, 0.9], "id": 0}, {"vector": [1.2, 1.9], "id": 1}],
|
||||||
|
)
|
||||||
|
other_table = pa.table({"document": ["foo", "bar"], "id": [0, 1]})
|
||||||
|
table.merge(other_table, left_on="id")
|
||||||
|
assert len(table.list_versions()) == 3
|
||||||
|
expected = pa.table(
|
||||||
|
{"vector": [[1.1, 0.9], [1.2, 1.9]], "id": [0, 1], "document": ["foo", "bar"]},
|
||||||
|
schema=table.schema,
|
||||||
|
)
|
||||||
|
assert table.to_arrow() == expected
|
||||||
|
|
||||||
|
other_dataset = lance.write_dataset(other_table, tmp_path / "other_table.lance")
|
||||||
|
table.restore(1)
|
||||||
|
table.merge(other_dataset, left_on="id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete(db):
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
data=[{"vector": [1.1, 0.9], "id": 0}, {"vector": [1.2, 1.9], "id": 1}],
|
||||||
|
)
|
||||||
|
assert len(table) == 2
|
||||||
|
assert len(table.list_versions()) == 2
|
||||||
|
table.delete("id=0")
|
||||||
|
assert len(table.list_versions()) == 3
|
||||||
|
assert table.version == 3
|
||||||
|
assert len(table) == 1
|
||||||
|
assert table.to_pandas()["id"].tolist() == [1]
|
||||||
|
|
||||||
|
|
||||||
|
def test_update(db):
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
data=[{"vector": [1.1, 0.9], "id": 0}, {"vector": [1.2, 1.9], "id": 1}],
|
||||||
|
)
|
||||||
|
assert len(table) == 2
|
||||||
|
assert len(table.list_versions()) == 2
|
||||||
|
table.update(where="id=0", values={"vector": [1.1, 1.1]})
|
||||||
|
assert len(table.list_versions()) == 4
|
||||||
|
assert table.version == 4
|
||||||
|
assert len(table) == 2
|
||||||
|
v = table.to_arrow()["vector"].combine_chunks()
|
||||||
|
v = v.values.to_numpy().reshape(2, 2)
|
||||||
|
assert np.allclose(v, np.array([[1.2, 1.9], [1.1, 1.1]]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_with_embedding_function(db):
|
||||||
|
class MyTable(LanceModel):
|
||||||
|
text: str
|
||||||
|
vector: vector(10)
|
||||||
|
|
||||||
|
func = MockEmbeddingFunction(source_column="text", vector_column="vector")
|
||||||
|
texts = ["hello world", "goodbye world", "foo bar baz fizz buzz"]
|
||||||
|
df = pd.DataFrame({"text": texts, "vector": func(texts)})
|
||||||
|
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
schema=MyTable,
|
||||||
|
embedding_functions=[func],
|
||||||
|
)
|
||||||
|
table.add(df)
|
||||||
|
|
||||||
|
query_str = "hi how are you?"
|
||||||
|
query_vector = func(query_str)[0]
|
||||||
|
expected = table.search(query_vector).limit(2).to_arrow()
|
||||||
|
|
||||||
|
actual = table.search(query_str).limit(2).to_arrow()
|
||||||
|
assert actual == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_with_embedding_function(db):
|
||||||
|
class MyTable(LanceModel):
|
||||||
|
text: str
|
||||||
|
vector: vector(10)
|
||||||
|
|
||||||
|
func = MockEmbeddingFunction(source_column="text", vector_column="vector")
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
schema=MyTable,
|
||||||
|
embedding_functions=[func],
|
||||||
|
)
|
||||||
|
|
||||||
|
texts = ["hello world", "goodbye world", "foo bar baz fizz buzz"]
|
||||||
|
df = pd.DataFrame({"text": texts})
|
||||||
|
table.add(df)
|
||||||
|
|
||||||
|
texts = ["the quick brown fox", "jumped over the lazy dog"]
|
||||||
|
table.add([{"text": t} for t in texts])
|
||||||
|
|
||||||
|
query_str = "hi how are you?"
|
||||||
|
query_vector = func(query_str)[0]
|
||||||
|
expected = table.search(query_vector).limit(2).to_arrow()
|
||||||
|
|
||||||
|
actual = table.search(query_str).limit(2).to_arrow()
|
||||||
|
assert actual == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_multiple_vector_columns(db):
|
||||||
|
class MyTable(LanceModel):
|
||||||
|
text: str
|
||||||
|
vector1: vector(10)
|
||||||
|
vector2: vector(10)
|
||||||
|
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
schema=MyTable,
|
||||||
|
)
|
||||||
|
|
||||||
|
v1 = np.random.randn(10)
|
||||||
|
v2 = np.random.randn(10)
|
||||||
|
data = [
|
||||||
|
{"vector1": v1, "vector2": v2, "text": "foo"},
|
||||||
|
{"vector1": v2, "vector2": v1, "text": "bar"},
|
||||||
|
]
|
||||||
|
df = pd.DataFrame(data)
|
||||||
|
table.add(df)
|
||||||
|
|
||||||
|
q = np.random.randn(10)
|
||||||
|
result1 = table.search(q, vector_column_name="vector1").limit(1).to_df()
|
||||||
|
result2 = table.search(q, vector_column_name="vector2").limit(1).to_df()
|
||||||
|
|
||||||
|
assert result1["text"].iloc[0] != result2["text"].iloc[0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_query(db):
|
||||||
|
table = LanceTable.create(
|
||||||
|
db,
|
||||||
|
"my_table",
|
||||||
|
data=[{"text": "foo", "id": 0}, {"text": "bar", "id": 1}],
|
||||||
|
)
|
||||||
|
df = table.search().select(["id"]).where("text='bar'").limit(1).to_df()
|
||||||
|
val = df.id.iloc[0]
|
||||||
|
assert val == 1
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb-node"
|
name = "vectordb-node"
|
||||||
version = "0.2.3"
|
version = "0.2.4"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb"
|
name = "vectordb"
|
||||||
version = "0.2.3"
|
version = "0.2.4"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "LanceDB: A serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
repository = "https://github.com/lancedb/lancedb"
|
repository = "https://github.com/lancedb/lancedb"
|
||||||
|
keywords = ["lancedb", "lance", "database", "search"]
|
||||||
|
categories = ["database-implementations"]
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|||||||
3
rust/vectordb/README.md
Normal file
3
rust/vectordb/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# LanceDB Rust
|
||||||
|
|
||||||
|
Rust client for LanceDB, a serverless vector database. Read more at: https://lancedb.com/
|
||||||
Reference in New Issue
Block a user