ci: use larger runner for doctest and fix failing tests (#2801)

Currently test would fail after installing to around pytorch
This commit is contained in:
Jack Ye
2025-11-20 19:44:31 -08:00
committed by GitHub
parent 76bcc78910
commit 0baf807be0
3 changed files with 14 additions and 10 deletions

View File

@@ -79,7 +79,7 @@ jobs:
doctest:
name: "Doctest"
timeout-minutes: 30
runs-on: "ubuntu-24.04"
runs-on: ubuntu-2404-8x-x64
defaults:
run:
shell: bash

View File

@@ -127,13 +127,17 @@ class LanceNamespaceStorageOptionsProvider(StorageOptionsProvider):
Examples
--------
>>> from lance_namespace import connect as namespace_connect
>>> namespace = namespace_connect("rest", {"url": "https://..."})
>>> provider = LanceNamespaceStorageOptionsProvider(
... namespace=namespace,
... table_id=["my_namespace", "my_table"]
... )
>>> options = provider.fetch_storage_options()
Create a provider and fetch storage options::
from lance_namespace import connect as namespace_connect
# Connect to namespace (requires a running namespace server)
namespace = namespace_connect("rest", {"uri": "https://..."})
provider = LanceNamespaceStorageOptionsProvider(
namespace=namespace,
table_id=["my_namespace", "my_table"]
)
options = provider.fetch_storage_options()
"""
def __init__(self, namespace: LanceNamespace, table_id: List[str]):

View File

@@ -1018,7 +1018,7 @@ class Table(ABC):
... .when_not_matched_insert_all() \\
... .execute(new_data)
>>> res
MergeResult(version=2, num_updated_rows=2, num_inserted_rows=1, num_deleted_rows=0)
MergeResult(version=2, num_updated_rows=2, num_inserted_rows=1, num_deleted_rows=0, num_attempts=1)
>>> # The order of new rows is non-deterministic since we use
>>> # a hash-join as part of this operation and so we sort here
>>> table.to_arrow().sort_by("a").to_pandas()
@@ -3634,7 +3634,7 @@ class AsyncTable:
... .when_not_matched_insert_all() \\
... .execute(new_data)
>>> res
MergeResult(version=2, num_updated_rows=2, num_inserted_rows=1, num_deleted_rows=0)
MergeResult(version=2, num_updated_rows=2, num_inserted_rows=1, num_deleted_rows=0, num_attempts=1)
>>> # The order of new rows is non-deterministic since we use
>>> # a hash-join as part of this operation and so we sort here
>>> table.to_arrow().sort_by("a").to_pandas()