build(deps): bump mypy from 1.3.0 to 1.13.0 (#9670)

## Problem
We use a pretty old version of `mypy` 1.3 (released 1.5 years ago), it
produces false positives for `typing.Self`.

## Summary of changes
- Bump `mypy` from 1.3 to 1.13
- Fix new warnings and errors
- Use `typing.Self` whenever we `return self`
This commit is contained in:
Alexander Bayandin
2024-11-22 14:31:36 +00:00
committed by GitHub
parent c10b7f7de9
commit 51d26a261b
17 changed files with 103 additions and 102 deletions

69
poetry.lock generated
View File

@@ -1858,47 +1858,54 @@ files = [
[[package]]
name = "mypy"
version = "1.3.0"
version = "1.13.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"},
{file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"},
{file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"},
{file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"},
{file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"},
{file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"},
{file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"},
{file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"},
{file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"},
{file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"},
{file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"},
{file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"},
{file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"},
{file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"},
{file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"},
{file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"},
{file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"},
{file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"},
{file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"},
{file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"},
{file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"},
{file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"},
{file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"},
{file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"},
{file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"},
{file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"},
{file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
{file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
{file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
{file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
{file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
{file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
{file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
{file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
{file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
{file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
{file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
{file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
{file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
{file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
{file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
{file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
{file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
{file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
{file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
{file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
{file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
{file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
{file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
{file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
{file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
{file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
{file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
{file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
{file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
{file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
{file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
{file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
typing-extensions = ">=3.10"
typing-extensions = ">=4.6.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
faster-cache = ["orjson"]
install-types = ["pip"]
python2 = ["typed-ast (>=1.4.0,<2)"]
mypyc = ["setuptools (>=50)"]
reports = ["lxml"]
[[package]]
@@ -3517,4 +3524,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "5a9b8c8d409acb840c0a94dcdec6aac9777ccec443d74c78dbd511fa223cd6f6"
content-hash = "21debe1116843e5d14bdf37d6e265c68c63a98a64ba04ec8b8a02af2e8d9f486"

View File

@@ -51,7 +51,7 @@ testcontainers = "^4.8.1"
jsonnet = "^0.20.0"
[tool.poetry.group.dev.dependencies]
mypy = "==1.3.0"
mypy = "==1.13.0"
ruff = "^0.7.0"
[build-system]

View File

@@ -194,9 +194,11 @@ async def main_impl(args, report_out, client: Client):
tenant_ids = await client.get_tenant_ids()
get_timeline_id_coros = [client.get_timeline_ids(tenant_id) for tenant_id in tenant_ids]
gathered = await asyncio.gather(*get_timeline_id_coros, return_exceptions=True)
assert len(tenant_ids) == len(gathered)
tenant_and_timline_ids = []
for tid, tlids in zip(tenant_ids, gathered, strict=False):
for tid, tlids in zip(tenant_ids, gathered, strict=True):
# TODO: add error handling if tlids isinstance(Exception)
assert isinstance(tlids, list)
for tlid in tlids:
tenant_and_timline_ids.append((tid, tlid))
elif len(comps) == 1:

View File

@@ -190,10 +190,6 @@ class TenantTimelineId:
)
# Workaround for compat with python 3.9, which does not have `typing.Self`
TTenantShardId = TypeVar("TTenantShardId", bound="TenantShardId")
class TenantShardId:
def __init__(self, tenant_id: TenantId, shard_number: int, shard_count: int):
self.tenant_id = tenant_id
@@ -202,7 +198,7 @@ class TenantShardId:
assert self.shard_number < self.shard_count or self.shard_count == 0
@classmethod
def parse(cls: type[TTenantShardId], input: str) -> TTenantShardId:
def parse(cls: type[TenantShardId], input: str) -> TenantShardId:
if len(input) == 32:
return cls(
tenant_id=TenantId(input),

View File

@@ -69,7 +69,7 @@ def compute_reconfigure_listener(make_httpserver: HTTPServer):
# This causes the endpoint to query storage controller for its location, which
# is redundant since we already have it here, but this avoids extending the
# neon_local CLI to take full lists of locations
reconfigure_threads.submit(lambda workload=workload: workload.reconfigure()) # type: ignore[no-any-return]
reconfigure_threads.submit(lambda workload=workload: workload.reconfigure()) # type: ignore[misc]
return Response(status=200)

View File

@@ -20,12 +20,9 @@ from fixtures.pg_version import PgVersion
if TYPE_CHECKING:
from typing import (
Any,
TypeVar,
cast,
)
T = TypeVar("T")
# Used to be an ABC. abc.ABC removed due to linter without name change.
class AbstractNeonCli:

View File

@@ -102,10 +102,7 @@ from .neon_api import NeonAPI, NeonApiEndpoint
if TYPE_CHECKING:
from collections.abc import Callable
from typing import (
Any,
TypeVar,
)
from typing import Any, Self, TypeVar
from fixtures.paths import SnapshotDirLocked
@@ -838,7 +835,7 @@ class NeonEnvBuilder:
if isinstance(x, S3Storage):
x.do_cleanup()
def __enter__(self) -> NeonEnvBuilder:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -1148,21 +1145,19 @@ class NeonEnv:
with concurrent.futures.ThreadPoolExecutor(
max_workers=2 + len(self.pageservers) + len(self.safekeepers)
) as executor:
futs.append(
executor.submit(lambda: self.broker.start() or None)
) # The `or None` is for the linter
futs.append(executor.submit(lambda: self.broker.start()))
for pageserver in self.pageservers:
futs.append(
executor.submit(
lambda ps=pageserver: ps.start(timeout_in_seconds=timeout_in_seconds)
lambda ps=pageserver: ps.start(timeout_in_seconds=timeout_in_seconds) # type: ignore[misc]
)
)
for safekeeper in self.safekeepers:
futs.append(
executor.submit(
lambda sk=safekeeper: sk.start(timeout_in_seconds=timeout_in_seconds)
lambda sk=safekeeper: sk.start(timeout_in_seconds=timeout_in_seconds) # type: ignore[misc]
)
)
@@ -1602,13 +1597,13 @@ class NeonStorageController(MetricsGetter, LogUtils):
timeout_in_seconds: int | None = None,
instance_id: int | None = None,
base_port: int | None = None,
):
) -> Self:
assert not self.running
self.env.neon_cli.storage_controller_start(timeout_in_seconds, instance_id, base_port)
self.running = True
return self
def stop(self, immediate: bool = False) -> NeonStorageController:
def stop(self, immediate: bool = False) -> Self:
if self.running:
self.env.neon_cli.storage_controller_stop(immediate)
self.running = False
@@ -2282,7 +2277,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
response.raise_for_status()
return [TenantShardId.parse(tid) for tid in response.json()["updated"]]
def __enter__(self) -> NeonStorageController:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -2304,7 +2299,7 @@ class NeonProxiedStorageController(NeonStorageController):
timeout_in_seconds: int | None = None,
instance_id: int | None = None,
base_port: int | None = None,
):
) -> Self:
assert instance_id is not None and base_port is not None
self.env.neon_cli.storage_controller_start(timeout_in_seconds, instance_id, base_port)
@@ -2324,7 +2319,7 @@ class NeonProxiedStorageController(NeonStorageController):
self.running = any(meta["running"] for meta in self.instances.values())
return self
def stop(self, immediate: bool = False) -> NeonStorageController:
def stop(self, immediate: bool = False) -> Self:
for iid, details in self.instances.items():
if details["running"]:
self.env.neon_cli.storage_controller_stop(immediate, iid)
@@ -2446,7 +2441,7 @@ class NeonPageserver(PgProtocol, LogUtils):
self,
extra_env_vars: dict[str, str] | None = None,
timeout_in_seconds: int | None = None,
) -> NeonPageserver:
) -> Self:
"""
Start the page server.
`overrides` allows to add some config to this pageserver start.
@@ -2481,7 +2476,7 @@ class NeonPageserver(PgProtocol, LogUtils):
return self
def stop(self, immediate: bool = False) -> NeonPageserver:
def stop(self, immediate: bool = False) -> Self:
"""
Stop the page server.
Returns self.
@@ -2529,7 +2524,7 @@ class NeonPageserver(PgProtocol, LogUtils):
wait_until(20, 0.5, complete)
def __enter__(self) -> NeonPageserver:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -2957,7 +2952,7 @@ class VanillaPostgres(PgProtocol):
"""Return size of pgdatadir subdirectory in bytes."""
return get_dir_size(self.pgdatadir / subdir)
def __enter__(self) -> VanillaPostgres:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -3006,7 +3001,7 @@ class RemotePostgres(PgProtocol):
# See https://www.postgresql.org/docs/14/functions-admin.html#FUNCTIONS-ADMIN-GENFILE
raise Exception("cannot get size of a Postgres instance")
def __enter__(self) -> RemotePostgres:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -3220,7 +3215,7 @@ class NeonProxy(PgProtocol):
self.http_timeout_seconds = 15
self._popen: subprocess.Popen[bytes] | None = None
def start(self) -> NeonProxy:
def start(self) -> Self:
assert self._popen is None
# generate key of it doesn't exist
@@ -3348,7 +3343,7 @@ class NeonProxy(PgProtocol):
log.info(f"SUCCESS, found auth url: {line}")
return line
def __enter__(self) -> NeonProxy:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -3438,7 +3433,7 @@ class NeonAuthBroker:
self.http_timeout_seconds = 15
self._popen: subprocess.Popen[bytes] | None = None
def start(self) -> NeonAuthBroker:
def start(self) -> Self:
assert self._popen is None
# generate key of it doesn't exist
@@ -3507,7 +3502,7 @@ class NeonAuthBroker:
request_result = requests.get(f"http://{self.host}:{self.http_port}/metrics")
return request_result.text
def __enter__(self) -> NeonAuthBroker:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -3704,7 +3699,7 @@ class Endpoint(PgProtocol, LogUtils):
config_lines: list[str] | None = None,
pageserver_id: int | None = None,
allow_multiple: bool = False,
) -> Endpoint:
) -> Self:
"""
Create a new Postgres endpoint.
Returns self.
@@ -3750,7 +3745,7 @@ class Endpoint(PgProtocol, LogUtils):
safekeepers: list[int] | None = None,
allow_multiple: bool = False,
basebackup_request_tries: int | None = None,
) -> Endpoint:
) -> Self:
"""
Start the Postgres instance.
Returns self.
@@ -3797,7 +3792,7 @@ class Endpoint(PgProtocol, LogUtils):
"""Path to the postgresql.conf in the endpoint directory (not the one in pgdata)"""
return self.endpoint_path() / "postgresql.conf"
def config(self, lines: list[str]) -> Endpoint:
def config(self, lines: list[str]) -> Self:
"""
Add lines to postgresql.conf.
Lines should be an array of valid postgresql.conf rows.
@@ -3873,7 +3868,7 @@ class Endpoint(PgProtocol, LogUtils):
self,
mode: str = "fast",
sks_wait_walreceiver_gone: tuple[list[Safekeeper], TimelineId] | None = None,
) -> Endpoint:
) -> Self:
"""
Stop the Postgres instance if it's running.
@@ -3907,7 +3902,7 @@ class Endpoint(PgProtocol, LogUtils):
return self
def stop_and_destroy(self, mode: str = "immediate") -> Endpoint:
def stop_and_destroy(self, mode: str = "immediate") -> Self:
"""
Stop the Postgres instance, then destroy the endpoint.
Returns self.
@@ -3934,7 +3929,7 @@ class Endpoint(PgProtocol, LogUtils):
pageserver_id: int | None = None,
allow_multiple: bool = False,
basebackup_request_tries: int | None = None,
) -> Endpoint:
) -> Self:
"""
Create an endpoint, apply config, and start Postgres.
Returns self.
@@ -3957,7 +3952,7 @@ class Endpoint(PgProtocol, LogUtils):
return self
def __enter__(self) -> Endpoint:
def __enter__(self) -> Self:
return self
def __exit__(
@@ -4058,7 +4053,7 @@ class EndpointFactory:
pageserver_id=pageserver_id,
)
def stop_all(self, fail_on_error=True) -> EndpointFactory:
def stop_all(self, fail_on_error=True) -> Self:
exception = None
for ep in self.endpoints:
try:
@@ -4154,7 +4149,7 @@ class Safekeeper(LogUtils):
def start(
self, extra_opts: list[str] | None = None, timeout_in_seconds: int | None = None
) -> Safekeeper:
) -> Self:
if extra_opts is None:
# Apply either the extra_opts passed in, or the ones from our constructor: we do not merge the two.
extra_opts = self.extra_opts
@@ -4189,7 +4184,7 @@ class Safekeeper(LogUtils):
break # success
return self
def stop(self, immediate: bool = False) -> Safekeeper:
def stop(self, immediate: bool = False) -> Self:
self.env.neon_cli.safekeeper_stop(self.id, immediate)
self.running = False
return self
@@ -4367,13 +4362,13 @@ class NeonBroker(LogUtils):
def start(
self,
timeout_in_seconds: int | None = None,
):
) -> Self:
assert not self.running
self.env.neon_cli.storage_broker_start(timeout_in_seconds)
self.running = True
return self
def stop(self):
def stop(self) -> Self:
if self.running:
self.env.neon_cli.storage_broker_stop()
self.running = False

View File

@@ -66,6 +66,7 @@ def pytest_generate_tests(metafunc: Metafunc):
metafunc.parametrize("build_type", build_types)
pg_versions: list[PgVersion]
if (v := os.getenv("DEFAULT_PG_VERSION")) is None:
pg_versions = [version for version in PgVersion if version != PgVersion.NOT_SET]
else:

View File

@@ -53,7 +53,7 @@ class Workload:
self._endpoint: Endpoint | None = None
self._endpoint_opts = endpoint_opts or {}
def reconfigure(self):
def reconfigure(self) -> None:
"""
Request the endpoint to reconfigure based on location reported by storage controller
"""

View File

@@ -17,7 +17,7 @@ from fixtures.paths import BASE_DIR, COMPUTE_CONFIG_DIR
if TYPE_CHECKING:
from types import TracebackType
from typing import TypedDict
from typing import Self, TypedDict
from fixtures.neon_fixtures import NeonEnv
from fixtures.pg_version import PgVersion
@@ -185,7 +185,7 @@ class SqlExporterRunner:
def stop(self) -> None:
raise NotImplementedError()
def __enter__(self) -> SqlExporterRunner:
def __enter__(self) -> Self:
self.start()
return self
@@ -242,8 +242,7 @@ if SQL_EXPORTER is None:
self.with_volume_mapping(str(config_file), container_config_file, "z")
self.with_volume_mapping(str(collector_file), container_collector_file, "z")
@override
def start(self) -> SqlExporterContainer:
def start(self) -> Self:
super().start()
log.info("Waiting for sql_exporter to be ready")

View File

@@ -13,7 +13,7 @@ from werkzeug.wrappers.request import Request
from werkzeug.wrappers.response import Response
if TYPE_CHECKING:
from typing import Any
from typing import Any, Self
def handle_db(dbs, roles, operation):
@@ -91,7 +91,7 @@ class DdlForwardingContext:
lambda request: ddl_forward_handler(request, self.dbs, self.roles, self)
)
def __enter__(self):
def __enter__(self) -> Self:
self.pg.start()
return self

View File

@@ -131,7 +131,7 @@ def test_pageserver_small_inmemory_layers(
wait_until_pageserver_is_caught_up(env, last_flush_lsns)
# We didn't write enough data to trigger a size-based checkpoint: we should see dirty data.
wait_until(10, 1, lambda: assert_dirty_bytes_nonzero(env)) # type: ignore
wait_until(10, 1, lambda: assert_dirty_bytes_nonzero(env))
ps_http_client = env.pageserver.http_client()
total_wal_ingested_before_restart = wait_for_wal_ingest_metric(ps_http_client)
@@ -139,7 +139,7 @@ def test_pageserver_small_inmemory_layers(
# Within ~ the checkpoint interval, all the ephemeral layers should be frozen and flushed,
# such that there are zero bytes of ephemeral layer left on the pageserver
log.info("Waiting for background checkpoints...")
wait_until(CHECKPOINT_TIMEOUT_SECONDS * 2, 1, lambda: assert_dirty_bytes(env, 0)) # type: ignore
wait_until(CHECKPOINT_TIMEOUT_SECONDS * 2, 1, lambda: assert_dirty_bytes(env, 0))
# Zero ephemeral layer bytes does not imply that all the frozen layers were uploaded: they
# must be uploaded to remain visible to the pageserver after restart.
@@ -180,7 +180,7 @@ def test_idle_checkpoints(neon_env_builder: NeonEnvBuilder):
wait_until_pageserver_is_caught_up(env, last_flush_lsns)
# We didn't write enough data to trigger a size-based checkpoint: we should see dirty data.
wait_until(10, 1, lambda: assert_dirty_bytes_nonzero(env)) # type: ignore
wait_until(10, 1, lambda: assert_dirty_bytes_nonzero(env))
# Stop the safekeepers, so that we cannot have any more WAL receiver connections
for sk in env.safekeepers:
@@ -193,7 +193,7 @@ def test_idle_checkpoints(neon_env_builder: NeonEnvBuilder):
# Within ~ the checkpoint interval, all the ephemeral layers should be frozen and flushed,
# such that there are zero bytes of ephemeral layer left on the pageserver
log.info("Waiting for background checkpoints...")
wait_until(CHECKPOINT_TIMEOUT_SECONDS * 2, 1, lambda: assert_dirty_bytes(env, 0)) # type: ignore
wait_until(CHECKPOINT_TIMEOUT_SECONDS * 2, 1, lambda: assert_dirty_bytes(env, 0))
# The code below verifies that we do not flush on the first write
# after an idle period longer than the checkpoint timeout.
@@ -210,7 +210,7 @@ def test_idle_checkpoints(neon_env_builder: NeonEnvBuilder):
run_worker_for_tenant(env, 5, tenant_with_extra_writes, offset=ENTRIES_PER_TIMELINE)
)
dirty_after_write = wait_until(10, 1, lambda: assert_dirty_bytes_nonzero(env)) # type: ignore
dirty_after_write = wait_until(10, 1, lambda: assert_dirty_bytes_nonzero(env))
# We shouldn't flush since we've just opened a new layer
waited_for = 0
@@ -312,4 +312,4 @@ def test_total_size_limit(neon_env_builder: NeonEnvBuilder):
dirty_bytes = get_dirty_bytes(env)
assert dirty_bytes < max_dirty_data
wait_until(compaction_period_s * 2, 1, lambda: assert_dirty_data_limited()) # type: ignore
wait_until(compaction_period_s * 2, 1, lambda: assert_dirty_data_limited())

View File

@@ -702,7 +702,7 @@ def test_secondary_background_downloads(neon_env_builder: NeonEnvBuilder):
else:
timeout = int(deadline - now) + 1
try:
wait_until(timeout, 1, lambda: pageserver.assert_log_contains(expression)) # type: ignore
wait_until(timeout, 1, lambda: pageserver.assert_log_contains(expression))
except:
log.error(f"Timed out waiting for '{expression}'")
raise

View File

@@ -1405,7 +1405,7 @@ def test_sharding_split_failures(
# e.g. while waiting for a storage controller to re-attach a parent shard if we failed
# inside the pageserver and the storage controller responds by detaching children and attaching
# parents concurrently (https://github.com/neondatabase/neon/issues/7148)
wait_until(10, 1, lambda: workload.churn_rows(10, upload=False, ingest=False)) # type: ignore
wait_until(10, 1, lambda: workload.churn_rows(10, upload=False, ingest=False))
workload.validate()

View File

@@ -3,13 +3,17 @@ from __future__ import annotations
import socket
import subprocess
from pathlib import Path
from types import TracebackType
from typing import TYPE_CHECKING
import backoff
from fixtures.log_helper import log
from fixtures.neon_fixtures import PgProtocol, VanillaPostgres
from fixtures.port_distributor import PortDistributor
if TYPE_CHECKING:
from types import TracebackType
from typing import Self
def generate_tls_cert(cn, certout, keyout):
subprocess.run(
@@ -54,7 +58,7 @@ class PgSniRouter(PgProtocol):
self._popen: subprocess.Popen[bytes] | None = None
self.test_output_dir = test_output_dir
def start(self) -> PgSniRouter:
def start(self) -> Self:
assert self._popen is None
args = [
str(self.neon_binpath / "pg_sni_router"),
@@ -87,7 +91,7 @@ class PgSniRouter(PgProtocol):
if self._popen:
self._popen.wait(timeout=2)
def __enter__(self) -> PgSniRouter:
def __enter__(self) -> Self:
return self
def __exit__(

View File

@@ -2494,14 +2494,14 @@ def start_env(env: NeonEnv, storage_controller_port: int):
for pageserver in env.pageservers:
futs.append(
executor.submit(
lambda ps=pageserver: ps.start(timeout_in_seconds=timeout_in_seconds)
lambda ps=pageserver: ps.start(timeout_in_seconds=timeout_in_seconds) # type: ignore[misc]
)
)
for safekeeper in env.safekeepers:
futs.append(
executor.submit(
lambda sk=safekeeper: sk.start(timeout_in_seconds=timeout_in_seconds)
lambda sk=safekeeper: sk.start(timeout_in_seconds=timeout_in_seconds) # type: ignore[misc]
)
)

View File

@@ -61,7 +61,7 @@ from fixtures.utils import (
)
if TYPE_CHECKING:
from typing import Any
from typing import Any, Self
def wait_lsn_force_checkpoint(
@@ -1460,7 +1460,7 @@ class SafekeeperEnv:
self.tenant_id: TenantId | None = None
self.timeline_id: TimelineId | None = None
def init(self) -> SafekeeperEnv:
def init(self) -> Self:
assert self.postgres is None, "postgres is already initialized"
assert self.safekeepers is None, "safekeepers are already initialized"
@@ -1541,7 +1541,7 @@ class SafekeeperEnv:
log.info(f"Killing safekeeper with pid {pid}")
os.kill(pid, signal.SIGKILL)
def __enter__(self):
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type, exc_value, traceback):