mirror of
https://github.com/neondatabase/neon.git
synced 2025-12-22 21:59:59 +00:00
Enable all pyupgrade checks in ruff
This will help to keep us from using deprecated Python features going forward. Signed-off-by: Tristan Partin <tristan@neon.tech>
This commit is contained in:
@@ -1,11 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import enum
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
|
||||
@enum.unique
|
||||
@@ -55,12 +56,12 @@ def mypy() -> str:
|
||||
return "poetry run mypy"
|
||||
|
||||
|
||||
def get_commit_files() -> List[str]:
|
||||
def get_commit_files() -> list[str]:
|
||||
files = subprocess.check_output("git diff --cached --name-only --diff-filter=ACM".split())
|
||||
return files.decode().splitlines()
|
||||
|
||||
|
||||
def check(name: str, suffix: str, cmd: str, changed_files: List[str], no_color: bool = False):
|
||||
def check(name: str, suffix: str, cmd: str, changed_files: list[str], no_color: bool = False):
|
||||
print(f"Checking: {name} ", end="")
|
||||
applicable_files = list(filter(lambda fname: fname.strip().endswith(suffix), changed_files))
|
||||
if not applicable_files:
|
||||
|
||||
@@ -97,5 +97,8 @@ select = [
|
||||
"I", # isort
|
||||
"W", # pycodestyle
|
||||
"B", # bugbear
|
||||
"UP032", # f-string
|
||||
"UP", # pyupgrade
|
||||
]
|
||||
|
||||
[tool.ruff.lint.pyupgrade]
|
||||
keep-runtime-typing = true # Remove this stanza when we require Python 3.10
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
@@ -110,7 +111,7 @@ def main(args: argparse.Namespace):
|
||||
output = args.output
|
||||
percentile = args.percentile
|
||||
|
||||
res: Dict[str, float] = {}
|
||||
res: dict[str, float] = {}
|
||||
|
||||
try:
|
||||
logging.info("connecting to the database...")
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
#
|
||||
# This can be useful in disaster recovery.
|
||||
#
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
|
||||
import psycopg2
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import Any, DefaultDict, Dict, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
import toml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional
|
||||
|
||||
FLAKY_TESTS_QUERY = """
|
||||
SELECT
|
||||
DISTINCT parent_suite, suite, name
|
||||
@@ -33,7 +38,7 @@ def main(args: argparse.Namespace):
|
||||
build_type = args.build_type
|
||||
pg_version = args.pg_version
|
||||
|
||||
res: DefaultDict[str, DefaultDict[str, Dict[str, bool]]]
|
||||
res: defaultdict[str, defaultdict[str, dict[str, bool]]]
|
||||
res = defaultdict(lambda: defaultdict(dict))
|
||||
|
||||
try:
|
||||
@@ -60,7 +65,7 @@ def main(args: argparse.Namespace):
|
||||
pageserver_virtual_file_io_engine_parameter = ""
|
||||
|
||||
# re-use existing records of flaky tests from before parametrization by compaction_algorithm
|
||||
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict[str, Any]]:
|
||||
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[dict[str, Any]]:
|
||||
"""Duplicated from parametrize.py"""
|
||||
toml_table = os.getenv("PAGESERVER_DEFAULT_TENANT_CONFIG_COMPACTION_ALGORITHM")
|
||||
if toml_table is None:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
@@ -5,11 +7,15 @@ import logging
|
||||
import signal
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from collections.abc import Awaitable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Awaitable, Dict, List, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import aiohttp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ClientException(Exception):
|
||||
pass
|
||||
@@ -89,7 +95,7 @@ class Client:
|
||||
class Completed:
|
||||
"""The status dict returned by the API"""
|
||||
|
||||
status: Dict[str, Any]
|
||||
status: dict[str, Any]
|
||||
|
||||
|
||||
sigint_received = asyncio.Event()
|
||||
@@ -179,7 +185,7 @@ async def main_impl(args, report_out, client: Client):
|
||||
"""
|
||||
Returns OS exit status.
|
||||
"""
|
||||
tenant_and_timline_ids: List[Tuple[str, str]] = []
|
||||
tenant_and_timline_ids: list[tuple[str, str]] = []
|
||||
# fill tenant_and_timline_ids based on spec
|
||||
for spec in args.what:
|
||||
comps = spec.split(":")
|
||||
@@ -215,14 +221,14 @@ async def main_impl(args, report_out, client: Client):
|
||||
tenant_and_timline_ids = tmp
|
||||
|
||||
logging.info("create tasks and process them at specified concurrency")
|
||||
task_q: asyncio.Queue[Tuple[str, Awaitable[Any]]] = asyncio.Queue()
|
||||
task_q: asyncio.Queue[tuple[str, Awaitable[Any]]] = asyncio.Queue()
|
||||
tasks = {
|
||||
f"{tid}:{tlid}": do_timeline(client, tid, tlid) for tid, tlid in tenant_and_timline_ids
|
||||
}
|
||||
for task in tasks.items():
|
||||
task_q.put_nowait(task)
|
||||
|
||||
result_q: asyncio.Queue[Tuple[str, Any]] = asyncio.Queue()
|
||||
result_q: asyncio.Queue[tuple[str, Any]] = asyncio.Queue()
|
||||
taskq_handlers = []
|
||||
for _ in range(0, args.concurrent_tasks):
|
||||
taskq_handlers.append(taskq_handler(task_q, result_q))
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import json
|
||||
@@ -11,7 +13,6 @@ from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
import backoff
|
||||
import psycopg2
|
||||
@@ -91,7 +92,7 @@ def create_table(cur):
|
||||
cur.execute(CREATE_TABLE)
|
||||
|
||||
|
||||
def parse_test_name(test_name: str) -> Tuple[str, int, str]:
|
||||
def parse_test_name(test_name: str) -> tuple[str, int, str]:
|
||||
build_type, pg_version = None, None
|
||||
if match := TEST_NAME_RE.search(test_name):
|
||||
found = match.groupdict()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
Run the regression tests on the cloud instance of Neon
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
pytest_plugins = (
|
||||
"fixtures.pg_version",
|
||||
"fixtures.parametrize",
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar
|
||||
import dataclasses
|
||||
import enum
|
||||
@@ -5,12 +7,11 @@ import json
|
||||
import os
|
||||
import re
|
||||
import timeit
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Type-related stuff
|
||||
from typing import Callable, ClassVar, Dict, Iterator, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import allure
|
||||
import pytest
|
||||
@@ -23,6 +24,10 @@ from fixtures.common_types import TenantId, TimelineId
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.neon_fixtures import NeonPageserver
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Callable, ClassVar, Optional
|
||||
|
||||
|
||||
"""
|
||||
This file contains fixtures for micro-benchmarks.
|
||||
|
||||
@@ -138,18 +143,6 @@ class PgBenchRunResult:
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PgBenchInitResult:
|
||||
# Taken from https://github.com/postgres/postgres/blob/REL_15_1/src/bin/pgbench/pgbench.c#L5144-L5171
|
||||
EXTRACTORS: ClassVar[Dict[str, re.Pattern]] = { # type: ignore[type-arg]
|
||||
"drop_tables": re.compile(r"drop tables (\d+\.\d+) s"),
|
||||
"create_tables": re.compile(r"create tables (\d+\.\d+) s"),
|
||||
"client_side_generate": re.compile(r"client-side generate (\d+\.\d+) s"),
|
||||
"server_side_generate": re.compile(r"server-side generate (\d+\.\d+) s"),
|
||||
"vacuum": re.compile(r"vacuum (\d+\.\d+) s"),
|
||||
"primary_keys": re.compile(r"primary keys (\d+\.\d+) s"),
|
||||
"foreign_keys": re.compile(r"foreign keys (\d+\.\d+) s"),
|
||||
"total": re.compile(r"done in (\d+\.\d+) s"), # Total time printed by pgbench
|
||||
}
|
||||
|
||||
total: Optional[float]
|
||||
drop_tables: Optional[float]
|
||||
create_tables: Optional[float]
|
||||
@@ -162,6 +155,20 @@ class PgBenchInitResult:
|
||||
start_timestamp: int
|
||||
end_timestamp: int
|
||||
|
||||
# Taken from https://github.com/postgres/postgres/blob/REL_15_1/src/bin/pgbench/pgbench.c#L5144-L5171
|
||||
EXTRACTORS: ClassVar[dict[str, re.Pattern[str]]] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
"drop_tables": re.compile(r"drop tables (\d+\.\d+) s"),
|
||||
"create_tables": re.compile(r"create tables (\d+\.\d+) s"),
|
||||
"client_side_generate": re.compile(r"client-side generate (\d+\.\d+) s"),
|
||||
"server_side_generate": re.compile(r"server-side generate (\d+\.\d+) s"),
|
||||
"vacuum": re.compile(r"vacuum (\d+\.\d+) s"),
|
||||
"primary_keys": re.compile(r"primary keys (\d+\.\d+) s"),
|
||||
"foreign_keys": re.compile(r"foreign keys (\d+\.\d+) s"),
|
||||
"total": re.compile(r"done in (\d+\.\d+) s"), # Total time printed by pgbench
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse_from_stderr(
|
||||
cls,
|
||||
@@ -175,7 +182,7 @@ class PgBenchInitResult:
|
||||
|
||||
last_line = stderr.splitlines()[-1]
|
||||
|
||||
timings: Dict[str, Optional[float]] = {}
|
||||
timings: dict[str, Optional[float]] = {}
|
||||
last_line_items = re.split(r"\(|\)|,", last_line)
|
||||
for item in last_line_items:
|
||||
for key, regex in cls.EXTRACTORS.items():
|
||||
@@ -385,7 +392,7 @@ class NeonBenchmarker:
|
||||
self,
|
||||
pageserver: NeonPageserver,
|
||||
metric_name: str,
|
||||
label_filters: Optional[Dict[str, str]] = None,
|
||||
label_filters: Optional[dict[str, str]] = None,
|
||||
) -> int:
|
||||
"""Fetch the value of given int counter from pageserver metrics."""
|
||||
all_metrics = pageserver.http_client().get_metrics()
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from functools import total_ordering
|
||||
from typing import Any, Dict, Type, TypeVar, Union
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Union
|
||||
|
||||
T = TypeVar("T", bound="Id")
|
||||
|
||||
T = TypeVar("T", bound="Id")
|
||||
|
||||
DEFAULT_WAL_SEG_SIZE = 16 * 1024 * 1024
|
||||
|
||||
@@ -56,7 +62,7 @@ class Lsn:
|
||||
return NotImplemented
|
||||
return self.lsn_int - other.lsn_int
|
||||
|
||||
def __add__(self, other: Union[int, "Lsn"]) -> "Lsn":
|
||||
def __add__(self, other: Union[int, Lsn]) -> Lsn:
|
||||
if isinstance(other, int):
|
||||
return Lsn(self.lsn_int + other)
|
||||
elif isinstance(other, Lsn):
|
||||
@@ -70,7 +76,7 @@ class Lsn:
|
||||
def as_int(self) -> int:
|
||||
return self.lsn_int
|
||||
|
||||
def segment_lsn(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> "Lsn":
|
||||
def segment_lsn(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> Lsn:
|
||||
return Lsn(self.lsn_int - (self.lsn_int % seg_sz))
|
||||
|
||||
def segno(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> int:
|
||||
@@ -127,7 +133,7 @@ class Id:
|
||||
return hash(str(self.id))
|
||||
|
||||
@classmethod
|
||||
def generate(cls: Type[T]) -> T:
|
||||
def generate(cls: type[T]) -> T:
|
||||
"""Generate a random ID"""
|
||||
return cls(random.randbytes(16).hex())
|
||||
|
||||
@@ -162,7 +168,7 @@ class TenantTimelineId:
|
||||
timeline_id: TimelineId
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> "TenantTimelineId":
|
||||
def from_json(cls, d: dict[str, Any]) -> TenantTimelineId:
|
||||
return TenantTimelineId(
|
||||
tenant_id=TenantId(d["tenant_id"]),
|
||||
timeline_id=TimelineId(d["timeline_id"]),
|
||||
@@ -181,7 +187,7 @@ class TenantShardId:
|
||||
assert self.shard_number < self.shard_count or self.shard_count == 0
|
||||
|
||||
@classmethod
|
||||
def parse(cls: Type[TTenantShardId], input) -> TTenantShardId:
|
||||
def parse(cls: type[TTenantShardId], input) -> TTenantShardId:
|
||||
if len(input) == 32:
|
||||
return cls(
|
||||
tenant_id=TenantId(input),
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Iterator
|
||||
from contextlib import _GeneratorContextManager, contextmanager
|
||||
|
||||
# Type-related stuff
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterator, List
|
||||
|
||||
import pytest
|
||||
from _pytest.fixtures import FixtureRequest
|
||||
@@ -72,7 +74,7 @@ class PgCompare(ABC):
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def record_pg_stats(self, pg_stats: List[PgStatTable]) -> Iterator[None]:
|
||||
def record_pg_stats(self, pg_stats: list[PgStatTable]) -> Iterator[None]:
|
||||
init_data = self._retrieve_pg_stats(pg_stats)
|
||||
|
||||
yield
|
||||
@@ -82,8 +84,8 @@ class PgCompare(ABC):
|
||||
for k in set(init_data) & set(data):
|
||||
self.zenbenchmark.record(k, data[k] - init_data[k], "", MetricReport.HIGHER_IS_BETTER)
|
||||
|
||||
def _retrieve_pg_stats(self, pg_stats: List[PgStatTable]) -> Dict[str, int]:
|
||||
results: Dict[str, int] = {}
|
||||
def _retrieve_pg_stats(self, pg_stats: list[PgStatTable]) -> dict[str, int]:
|
||||
results: dict[str, int] = {}
|
||||
|
||||
with self.pg.connect().cursor() as cur:
|
||||
for pg_stat in pg_stats:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import concurrent.futures
|
||||
from typing import Any
|
||||
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections.abc import MutableMapping
|
||||
from pathlib import Path
|
||||
from typing import Any, List, MutableMapping, cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
import pytest
|
||||
from _pytest.config import Config
|
||||
@@ -10,6 +13,9 @@ from allure_pytest.utils import allure_name, allure_suite_labels
|
||||
|
||||
from fixtures.log_helper import log
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
"""
|
||||
The plugin reruns flaky tests.
|
||||
It uses `pytest.mark.flaky` provided by `pytest-rerunfailures` plugin and flaky tests detected by `scripts/flaky_tests.py`
|
||||
@@ -27,7 +33,7 @@ def pytest_addoption(parser: Parser):
|
||||
)
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config: Config, items: List[pytest.Item]):
|
||||
def pytest_collection_modifyitems(config: Config, items: list[pytest.Item]):
|
||||
if not config.getoption("--flaky-tests-json"):
|
||||
return
|
||||
|
||||
@@ -66,5 +72,5 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Item]):
|
||||
# - [2] https://github.com/pytest-dev/pytest-timeout/issues/142
|
||||
timeout_marker = item.get_closest_marker("timeout")
|
||||
if timeout_marker is not None:
|
||||
kwargs = cast(MutableMapping[str, Any], timeout_marker.kwargs)
|
||||
kwargs = cast("MutableMapping[str, Any]", timeout_marker.kwargs)
|
||||
kwargs["func_only"] = True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Tuple
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from pytest_httpserver import HTTPServer
|
||||
@@ -40,6 +40,6 @@ def httpserver(make_httpserver):
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def httpserver_listen_address(port_distributor) -> Tuple[str, int]:
|
||||
def httpserver_listen_address(port_distributor) -> tuple[str, int]:
|
||||
port = port_distributor.get_port()
|
||||
return ("localhost", port)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
|
||||
|
||||
@@ -1,21 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from prometheus_client.parser import text_string_to_metric_families
|
||||
from prometheus_client.samples import Sample
|
||||
|
||||
from fixtures.log_helper import log
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Metrics:
|
||||
metrics: Dict[str, List[Sample]]
|
||||
metrics: dict[str, list[Sample]]
|
||||
name: str
|
||||
|
||||
def __init__(self, name: str = ""):
|
||||
self.metrics = defaultdict(list)
|
||||
self.name = name
|
||||
|
||||
def query_all(self, name: str, filter: Optional[Dict[str, str]] = None) -> List[Sample]:
|
||||
def query_all(self, name: str, filter: Optional[dict[str, str]] = None) -> list[Sample]:
|
||||
filter = filter or {}
|
||||
res = []
|
||||
|
||||
@@ -27,7 +32,7 @@ class Metrics:
|
||||
pass
|
||||
return res
|
||||
|
||||
def query_one(self, name: str, filter: Optional[Dict[str, str]] = None) -> Sample:
|
||||
def query_one(self, name: str, filter: Optional[dict[str, str]] = None) -> Sample:
|
||||
res = self.query_all(name, filter or {})
|
||||
assert len(res) == 1, f"expected single sample for {name} {filter}, found {res}"
|
||||
return res[0]
|
||||
@@ -43,7 +48,7 @@ class MetricsGetter:
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_metric_value(
|
||||
self, name: str, filter: Optional[Dict[str, str]] = None
|
||||
self, name: str, filter: Optional[dict[str, str]] = None
|
||||
) -> Optional[float]:
|
||||
metrics = self.get_metrics()
|
||||
results = metrics.query_all(name, filter=filter)
|
||||
@@ -54,8 +59,8 @@ class MetricsGetter:
|
||||
return results[0].value
|
||||
|
||||
def get_metrics_values(
|
||||
self, names: list[str], filter: Optional[Dict[str, str]] = None, absence_ok=False
|
||||
) -> Dict[str, float]:
|
||||
self, names: list[str], filter: Optional[dict[str, str]] = None, absence_ok=False
|
||||
) -> dict[str, float]:
|
||||
"""
|
||||
When fetching multiple named metrics, it is more efficient to use this
|
||||
than to call `get_metric_value` repeatedly.
|
||||
@@ -97,7 +102,7 @@ def parse_metrics(text: str, name: str = "") -> Metrics:
|
||||
return metrics
|
||||
|
||||
|
||||
def histogram(prefix_without_trailing_underscore: str) -> List[str]:
|
||||
def histogram(prefix_without_trailing_underscore: str) -> list[str]:
|
||||
assert not prefix_without_trailing_underscore.endswith("_")
|
||||
return [f"{prefix_without_trailing_underscore}_{x}" for x in ["bucket", "count", "sum"]]
|
||||
|
||||
@@ -107,7 +112,7 @@ def counter(name: str) -> str:
|
||||
return f"{name}_total"
|
||||
|
||||
|
||||
PAGESERVER_PER_TENANT_REMOTE_TIMELINE_CLIENT_METRICS: Tuple[str, ...] = (
|
||||
PAGESERVER_PER_TENANT_REMOTE_TIMELINE_CLIENT_METRICS: tuple[str, ...] = (
|
||||
"pageserver_remote_timeline_client_calls_started_total",
|
||||
"pageserver_remote_timeline_client_calls_finished_total",
|
||||
"pageserver_remote_physical_size",
|
||||
@@ -115,7 +120,7 @@ PAGESERVER_PER_TENANT_REMOTE_TIMELINE_CLIENT_METRICS: Tuple[str, ...] = (
|
||||
"pageserver_remote_timeline_client_bytes_finished_total",
|
||||
)
|
||||
|
||||
PAGESERVER_GLOBAL_METRICS: Tuple[str, ...] = (
|
||||
PAGESERVER_GLOBAL_METRICS: tuple[str, ...] = (
|
||||
"pageserver_storage_operations_seconds_global_count",
|
||||
"pageserver_storage_operations_seconds_global_sum",
|
||||
"pageserver_storage_operations_seconds_global_bucket",
|
||||
@@ -147,7 +152,7 @@ PAGESERVER_GLOBAL_METRICS: Tuple[str, ...] = (
|
||||
counter("pageserver_tenant_throttling_count_global"),
|
||||
)
|
||||
|
||||
PAGESERVER_PER_TENANT_METRICS: Tuple[str, ...] = (
|
||||
PAGESERVER_PER_TENANT_METRICS: tuple[str, ...] = (
|
||||
"pageserver_current_logical_size",
|
||||
"pageserver_resident_physical_size",
|
||||
"pageserver_io_operations_bytes_total",
|
||||
|
||||
@@ -6,12 +6,12 @@ from typing import TYPE_CHECKING, cast
|
||||
import requests
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Dict, Literal, Optional, Union
|
||||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
from fixtures.pg_version import PgVersion
|
||||
|
||||
|
||||
def connection_parameters_to_env(params: Dict[str, str]) -> Dict[str, str]:
|
||||
def connection_parameters_to_env(params: dict[str, str]) -> dict[str, str]:
|
||||
return {
|
||||
"PGHOST": params["host"],
|
||||
"PGDATABASE": params["database"],
|
||||
@@ -41,8 +41,8 @@ class NeonAPI:
|
||||
branch_name: Optional[str] = None,
|
||||
branch_role_name: Optional[str] = None,
|
||||
branch_database_name: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
data: Dict[str, Any] = {
|
||||
) -> dict[str, Any]:
|
||||
data: dict[str, Any] = {
|
||||
"project": {
|
||||
"branch": {},
|
||||
},
|
||||
@@ -70,9 +70,9 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 201
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def get_project_details(self, project_id: str) -> Dict[str, Any]:
|
||||
def get_project_details(self, project_id: str) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"GET",
|
||||
f"/projects/{project_id}",
|
||||
@@ -82,12 +82,12 @@ class NeonAPI:
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def delete_project(
|
||||
self,
|
||||
project_id: str,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"DELETE",
|
||||
f"/projects/{project_id}",
|
||||
@@ -99,13 +99,13 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def start_endpoint(
|
||||
self,
|
||||
project_id: str,
|
||||
endpoint_id: str,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"POST",
|
||||
f"/projects/{project_id}/endpoints/{endpoint_id}/start",
|
||||
@@ -116,13 +116,13 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def suspend_endpoint(
|
||||
self,
|
||||
project_id: str,
|
||||
endpoint_id: str,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"POST",
|
||||
f"/projects/{project_id}/endpoints/{endpoint_id}/suspend",
|
||||
@@ -133,13 +133,13 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def restart_endpoint(
|
||||
self,
|
||||
project_id: str,
|
||||
endpoint_id: str,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"POST",
|
||||
f"/projects/{project_id}/endpoints/{endpoint_id}/restart",
|
||||
@@ -150,16 +150,16 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def create_endpoint(
|
||||
self,
|
||||
project_id: str,
|
||||
branch_id: str,
|
||||
endpoint_type: Literal["read_write", "read_only"],
|
||||
settings: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
data: Dict[str, Any] = {
|
||||
settings: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
data: dict[str, Any] = {
|
||||
"endpoint": {
|
||||
"branch_id": branch_id,
|
||||
},
|
||||
@@ -182,7 +182,7 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 201
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def get_connection_uri(
|
||||
self,
|
||||
@@ -192,7 +192,7 @@ class NeonAPI:
|
||||
database_name: str = "neondb",
|
||||
role_name: str = "neondb_owner",
|
||||
pooled: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"GET",
|
||||
f"/projects/{project_id}/connection_uri",
|
||||
@@ -210,9 +210,9 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def get_branches(self, project_id: str) -> Dict[str, Any]:
|
||||
def get_branches(self, project_id: str) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"GET",
|
||||
f"/projects/{project_id}/branches",
|
||||
@@ -223,9 +223,9 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def get_endpoints(self, project_id: str) -> Dict[str, Any]:
|
||||
def get_endpoints(self, project_id: str) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"GET",
|
||||
f"/projects/{project_id}/endpoints",
|
||||
@@ -236,9 +236,9 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def get_operations(self, project_id: str) -> Dict[str, Any]:
|
||||
def get_operations(self, project_id: str) -> dict[str, Any]:
|
||||
resp = self.__request(
|
||||
"GET",
|
||||
f"/projects/{project_id}/operations",
|
||||
@@ -250,7 +250,7 @@ class NeonAPI:
|
||||
|
||||
assert resp.status_code == 200
|
||||
|
||||
return cast("Dict[str, Any]", resp.json())
|
||||
return cast("dict[str, Any]", resp.json())
|
||||
|
||||
def wait_for_operation_to_finish(self, project_id: str):
|
||||
has_running = True
|
||||
|
||||
@@ -9,15 +9,7 @@ import tempfile
|
||||
import textwrap
|
||||
from itertools import chain, product
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
cast,
|
||||
)
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
import toml
|
||||
|
||||
@@ -27,7 +19,15 @@ from fixtures.pageserver.common_types import IndexPartDump
|
||||
from fixtures.pg_version import PgVersion
|
||||
from fixtures.utils import AuxFileStore
|
||||
|
||||
T = TypeVar("T")
|
||||
if TYPE_CHECKING:
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
TypeVar,
|
||||
cast,
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class AbstractNeonCli(abc.ABC):
|
||||
@@ -37,7 +37,7 @@ class AbstractNeonCli(abc.ABC):
|
||||
Do not use directly, use specific subclasses instead.
|
||||
"""
|
||||
|
||||
def __init__(self, extra_env: Optional[Dict[str, str]], binpath: Path):
|
||||
def __init__(self, extra_env: Optional[dict[str, str]], binpath: Path):
|
||||
self.extra_env = extra_env
|
||||
self.binpath = binpath
|
||||
|
||||
@@ -45,11 +45,11 @@ class AbstractNeonCli(abc.ABC):
|
||||
|
||||
def raw_cli(
|
||||
self,
|
||||
arguments: List[str],
|
||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
||||
arguments: list[str],
|
||||
extra_env_vars: Optional[dict[str, str]] = None,
|
||||
check_return_code=True,
|
||||
timeout=None,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
"""
|
||||
Run the command with the specified arguments.
|
||||
|
||||
@@ -92,9 +92,8 @@ class AbstractNeonCli(abc.ABC):
|
||||
args,
|
||||
env=env_vars,
|
||||
check=False,
|
||||
universal_newlines=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
except subprocess.TimeoutExpired as e:
|
||||
@@ -118,7 +117,7 @@ class AbstractNeonCli(abc.ABC):
|
||||
if len(lines) < 2:
|
||||
log.debug(f"Run {res.args} success: {stripped}")
|
||||
else:
|
||||
log.debug("Run %s success:\n%s" % (res.args, textwrap.indent(stripped, indent)))
|
||||
log.debug("Run %s success:\n%s", res.args, textwrap.indent(stripped, indent))
|
||||
elif check_return_code:
|
||||
# this way command output will be in recorded and shown in CI in failure message
|
||||
indent = indent * 2
|
||||
@@ -175,7 +174,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
extra_env: Optional[Dict[str, str]],
|
||||
extra_env: Optional[dict[str, str]],
|
||||
binpath: Path,
|
||||
repo_dir: Path,
|
||||
pg_distrib_dir: Path,
|
||||
@@ -197,7 +196,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
tenant_id: TenantId,
|
||||
timeline_id: TimelineId,
|
||||
pg_version: PgVersion,
|
||||
conf: Optional[Dict[str, Any]] = None,
|
||||
conf: Optional[dict[str, Any]] = None,
|
||||
shard_count: Optional[int] = None,
|
||||
shard_stripe_size: Optional[int] = None,
|
||||
placement_policy: Optional[str] = None,
|
||||
@@ -258,7 +257,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
res = self.raw_cli(["tenant", "set-default", "--tenant-id", str(tenant_id)])
|
||||
res.check_returncode()
|
||||
|
||||
def tenant_config(self, tenant_id: TenantId, conf: Dict[str, str]):
|
||||
def tenant_config(self, tenant_id: TenantId, conf: dict[str, str]):
|
||||
"""
|
||||
Update tenant config.
|
||||
"""
|
||||
@@ -274,7 +273,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
res = self.raw_cli(args)
|
||||
res.check_returncode()
|
||||
|
||||
def tenant_list(self) -> "subprocess.CompletedProcess[str]":
|
||||
def tenant_list(self) -> subprocess.CompletedProcess[str]:
|
||||
res = self.raw_cli(["tenant", "list"])
|
||||
res.check_returncode()
|
||||
return res
|
||||
@@ -368,7 +367,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
res = self.raw_cli(cmd)
|
||||
res.check_returncode()
|
||||
|
||||
def timeline_list(self, tenant_id: TenantId) -> List[Tuple[str, TimelineId]]:
|
||||
def timeline_list(self, tenant_id: TenantId) -> list[tuple[str, TimelineId]]:
|
||||
"""
|
||||
Returns a list of (branch_name, timeline_id) tuples out of parsed `neon timeline list` CLI output.
|
||||
"""
|
||||
@@ -389,9 +388,9 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
|
||||
def init(
|
||||
self,
|
||||
init_config: Dict[str, Any],
|
||||
init_config: dict[str, Any],
|
||||
force: Optional[str] = None,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
with tempfile.NamedTemporaryFile(mode="w+") as init_config_tmpfile:
|
||||
init_config_tmpfile.write(toml.dumps(init_config))
|
||||
init_config_tmpfile.flush()
|
||||
@@ -434,15 +433,15 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
def pageserver_start(
|
||||
self,
|
||||
id: int,
|
||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
||||
extra_env_vars: Optional[dict[str, str]] = None,
|
||||
timeout_in_seconds: Optional[int] = None,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
start_args = ["pageserver", "start", f"--id={id}"]
|
||||
if timeout_in_seconds is not None:
|
||||
start_args.append(f"--start-timeout={timeout_in_seconds}s")
|
||||
return self.raw_cli(start_args, extra_env_vars=extra_env_vars)
|
||||
|
||||
def pageserver_stop(self, id: int, immediate=False) -> "subprocess.CompletedProcess[str]":
|
||||
def pageserver_stop(self, id: int, immediate=False) -> subprocess.CompletedProcess[str]:
|
||||
cmd = ["pageserver", "stop", f"--id={id}"]
|
||||
if immediate:
|
||||
cmd.extend(["-m", "immediate"])
|
||||
@@ -453,10 +452,10 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
def safekeeper_start(
|
||||
self,
|
||||
id: int,
|
||||
extra_opts: Optional[List[str]] = None,
|
||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
||||
extra_opts: Optional[list[str]] = None,
|
||||
extra_env_vars: Optional[dict[str, str]] = None,
|
||||
timeout_in_seconds: Optional[int] = None,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
if extra_opts is not None:
|
||||
extra_opts = [f"-e={opt}" for opt in extra_opts]
|
||||
else:
|
||||
@@ -469,7 +468,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
|
||||
def safekeeper_stop(
|
||||
self, id: Optional[int] = None, immediate=False
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
args = ["safekeeper", "stop"]
|
||||
if id is not None:
|
||||
args.append(str(id))
|
||||
@@ -479,13 +478,13 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
|
||||
def storage_broker_start(
|
||||
self, timeout_in_seconds: Optional[int] = None
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
cmd = ["storage_broker", "start"]
|
||||
if timeout_in_seconds is not None:
|
||||
cmd.append(f"--start-timeout={timeout_in_seconds}s")
|
||||
return self.raw_cli(cmd)
|
||||
|
||||
def storage_broker_stop(self) -> "subprocess.CompletedProcess[str]":
|
||||
def storage_broker_stop(self) -> subprocess.CompletedProcess[str]:
|
||||
cmd = ["storage_broker", "stop"]
|
||||
return self.raw_cli(cmd)
|
||||
|
||||
@@ -501,7 +500,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
lsn: Optional[Lsn] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
allow_multiple=False,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
args = [
|
||||
"endpoint",
|
||||
"create",
|
||||
@@ -534,12 +533,12 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
def endpoint_start(
|
||||
self,
|
||||
endpoint_id: str,
|
||||
safekeepers: Optional[List[int]] = None,
|
||||
safekeepers: Optional[list[int]] = None,
|
||||
remote_ext_config: Optional[str] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
allow_multiple=False,
|
||||
basebackup_request_tries: Optional[int] = None,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
args = [
|
||||
"endpoint",
|
||||
"start",
|
||||
@@ -568,9 +567,9 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
endpoint_id: str,
|
||||
tenant_id: Optional[TenantId] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
safekeepers: Optional[List[int]] = None,
|
||||
safekeepers: Optional[list[int]] = None,
|
||||
check_return_code=True,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
args = ["endpoint", "reconfigure", endpoint_id]
|
||||
if tenant_id is not None:
|
||||
args.extend(["--tenant-id", str(tenant_id)])
|
||||
@@ -586,7 +585,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
destroy=False,
|
||||
check_return_code=True,
|
||||
mode: Optional[str] = None,
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
args = [
|
||||
"endpoint",
|
||||
"stop",
|
||||
@@ -602,7 +601,7 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
|
||||
def mappings_map_branch(
|
||||
self, name: str, tenant_id: TenantId, timeline_id: TimelineId
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
"""
|
||||
Map tenant id and timeline id to a neon_local branch name. They do not have to exist.
|
||||
Usually needed when creating branches via PageserverHttpClient and not neon_local.
|
||||
@@ -623,10 +622,10 @@ class NeonLocalCli(AbstractNeonCli):
|
||||
|
||||
return self.raw_cli(args, check_return_code=True)
|
||||
|
||||
def start(self, check_return_code=True) -> "subprocess.CompletedProcess[str]":
|
||||
def start(self, check_return_code=True) -> subprocess.CompletedProcess[str]:
|
||||
return self.raw_cli(["start"], check_return_code=check_return_code)
|
||||
|
||||
def stop(self, check_return_code=True) -> "subprocess.CompletedProcess[str]":
|
||||
def stop(self, check_return_code=True) -> subprocess.CompletedProcess[str]:
|
||||
return self.raw_cli(["stop"], check_return_code=check_return_code)
|
||||
|
||||
|
||||
@@ -638,7 +637,7 @@ class WalCraft(AbstractNeonCli):
|
||||
|
||||
COMMAND = "wal_craft"
|
||||
|
||||
def postgres_config(self) -> List[str]:
|
||||
def postgres_config(self) -> list[str]:
|
||||
res = self.raw_cli(["print-postgres-config"])
|
||||
res.check_returncode()
|
||||
return res.stdout.split("\n")
|
||||
|
||||
@@ -13,6 +13,7 @@ import threading
|
||||
import time
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from collections.abc import Iterable, Iterator
|
||||
from contextlib import closing, contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
@@ -21,20 +22,7 @@ from fcntl import LOCK_EX, LOCK_UN, flock
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from urllib.parse import quote, urlparse
|
||||
|
||||
import asyncpg
|
||||
@@ -99,7 +87,17 @@ from fixtures.utils import AuxFileStore as AuxFileStore # reexport
|
||||
|
||||
from .neon_api import NeonAPI, NeonApiEndpoint
|
||||
|
||||
T = TypeVar("T")
|
||||
if TYPE_CHECKING:
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
"""
|
||||
This file contains pytest fixtures. A fixture is a test resource that can be
|
||||
@@ -118,7 +116,7 @@ Don't import functions from this file, or pytest will emit warnings. Instead
|
||||
put directly-importable functions into utils.py or another separate file.
|
||||
"""
|
||||
|
||||
Env = Dict[str, str]
|
||||
Env = dict[str, str]
|
||||
|
||||
DEFAULT_OUTPUT_DIR: str = "test_output"
|
||||
DEFAULT_BRANCH_NAME: str = "main"
|
||||
@@ -250,7 +248,7 @@ class PgProtocol:
|
||||
"""
|
||||
return str(make_dsn(**self.conn_options(**kwargs)))
|
||||
|
||||
def conn_options(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
def conn_options(self, **kwargs: Any) -> dict[str, Any]:
|
||||
"""
|
||||
Construct a dictionary of connection options from default values and extra parameters.
|
||||
An option can be dropped from the returning dictionary by None-valued extra parameter.
|
||||
@@ -319,7 +317,7 @@ class PgProtocol:
|
||||
conn_options["server_settings"] = {key: val}
|
||||
return await asyncpg.connect(**conn_options)
|
||||
|
||||
def safe_psql(self, query: str, **kwargs: Any) -> List[Tuple[Any, ...]]:
|
||||
def safe_psql(self, query: str, **kwargs: Any) -> list[tuple[Any, ...]]:
|
||||
"""
|
||||
Execute query against the node and return all rows.
|
||||
This method passes all extra params to connstr.
|
||||
@@ -328,12 +326,12 @@ class PgProtocol:
|
||||
|
||||
def safe_psql_many(
|
||||
self, queries: Iterable[str], log_query=True, **kwargs: Any
|
||||
) -> List[List[Tuple[Any, ...]]]:
|
||||
) -> list[list[tuple[Any, ...]]]:
|
||||
"""
|
||||
Execute queries against the node and return all rows.
|
||||
This method passes all extra params to connstr.
|
||||
"""
|
||||
result: List[List[Any]] = []
|
||||
result: list[list[Any]] = []
|
||||
with closing(self.connect(**kwargs)) as conn:
|
||||
with conn.cursor() as cur:
|
||||
for query in queries:
|
||||
@@ -379,7 +377,7 @@ class NeonEnvBuilder:
|
||||
test_overlay_dir: Optional[Path] = None,
|
||||
pageserver_remote_storage: Optional[RemoteStorage] = None,
|
||||
# toml that will be decomposed into `--config-override` flags during `pageserver --init`
|
||||
pageserver_config_override: Optional[str | Callable[[Dict[str, Any]], None]] = None,
|
||||
pageserver_config_override: Optional[str | Callable[[dict[str, Any]], None]] = None,
|
||||
num_safekeepers: int = 1,
|
||||
num_pageservers: int = 1,
|
||||
# Use non-standard SK ids to check for various parsing bugs
|
||||
@@ -394,7 +392,7 @@ class NeonEnvBuilder:
|
||||
initial_timeline: Optional[TimelineId] = None,
|
||||
pageserver_virtual_file_io_engine: Optional[str] = None,
|
||||
pageserver_aux_file_policy: Optional[AuxFileStore] = None,
|
||||
pageserver_default_tenant_config_compaction_algorithm: Optional[Dict[str, Any]] = None,
|
||||
pageserver_default_tenant_config_compaction_algorithm: Optional[dict[str, Any]] = None,
|
||||
safekeeper_extra_opts: Optional[list[str]] = None,
|
||||
storage_controller_port_override: Optional[int] = None,
|
||||
pageserver_io_buffer_alignment: Optional[int] = None,
|
||||
@@ -429,7 +427,7 @@ class NeonEnvBuilder:
|
||||
self.enable_scrub_on_exit = True
|
||||
self.test_output_dir = test_output_dir
|
||||
self.test_overlay_dir = test_overlay_dir
|
||||
self.overlay_mounts_created_by_us: List[Tuple[str, Path]] = []
|
||||
self.overlay_mounts_created_by_us: list[tuple[str, Path]] = []
|
||||
self.config_init_force: Optional[str] = None
|
||||
self.top_output_dir = top_output_dir
|
||||
self.control_plane_compute_hook_api: Optional[str] = None
|
||||
@@ -438,7 +436,7 @@ class NeonEnvBuilder:
|
||||
self.pageserver_virtual_file_io_engine: Optional[str] = pageserver_virtual_file_io_engine
|
||||
|
||||
self.pageserver_default_tenant_config_compaction_algorithm: Optional[
|
||||
Dict[str, Any]
|
||||
dict[str, Any]
|
||||
] = pageserver_default_tenant_config_compaction_algorithm
|
||||
if self.pageserver_default_tenant_config_compaction_algorithm is not None:
|
||||
log.debug(
|
||||
@@ -468,7 +466,7 @@ class NeonEnvBuilder:
|
||||
|
||||
def init_start(
|
||||
self,
|
||||
initial_tenant_conf: Optional[Dict[str, Any]] = None,
|
||||
initial_tenant_conf: Optional[dict[str, Any]] = None,
|
||||
default_remote_storage_if_missing: bool = True,
|
||||
initial_tenant_shard_count: Optional[int] = None,
|
||||
initial_tenant_shard_stripe_size: Optional[int] = None,
|
||||
@@ -823,7 +821,7 @@ class NeonEnvBuilder:
|
||||
|
||||
overlayfs_mounts = {mountpoint for _, mountpoint in self.overlay_mounts_created_by_us}
|
||||
|
||||
directories_to_clean: List[Path] = []
|
||||
directories_to_clean: list[Path] = []
|
||||
for test_entry in Path(self.repo_dir).glob("**/*"):
|
||||
if test_entry in overlayfs_mounts:
|
||||
continue
|
||||
@@ -854,12 +852,12 @@ class NeonEnvBuilder:
|
||||
if isinstance(x, S3Storage):
|
||||
x.do_cleanup()
|
||||
|
||||
def __enter__(self) -> "NeonEnvBuilder":
|
||||
def __enter__(self) -> NeonEnvBuilder:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
):
|
||||
@@ -970,8 +968,8 @@ class NeonEnv:
|
||||
self.port_distributor = config.port_distributor
|
||||
self.s3_mock_server = config.mock_s3_server
|
||||
self.endpoints = EndpointFactory(self)
|
||||
self.safekeepers: List[Safekeeper] = []
|
||||
self.pageservers: List[NeonPageserver] = []
|
||||
self.safekeepers: list[Safekeeper] = []
|
||||
self.pageservers: list[NeonPageserver] = []
|
||||
self.broker = NeonBroker(self)
|
||||
self.pageserver_remote_storage = config.pageserver_remote_storage
|
||||
self.safekeepers_remote_storage = config.safekeepers_remote_storage
|
||||
@@ -1043,7 +1041,7 @@ class NeonEnv:
|
||||
self.pageserver_io_buffer_alignment = config.pageserver_io_buffer_alignment
|
||||
|
||||
# Create the neon_local's `NeonLocalInitConf`
|
||||
cfg: Dict[str, Any] = {
|
||||
cfg: dict[str, Any] = {
|
||||
"default_tenant_id": str(self.initial_tenant),
|
||||
"broker": {
|
||||
"listen_addr": self.broker.listen_addr(),
|
||||
@@ -1072,7 +1070,7 @@ class NeonEnv:
|
||||
http=self.port_distributor.get_port(),
|
||||
)
|
||||
|
||||
ps_cfg: Dict[str, Any] = {
|
||||
ps_cfg: dict[str, Any] = {
|
||||
"id": ps_id,
|
||||
"listen_pg_addr": f"localhost:{pageserver_port.pg}",
|
||||
"listen_http_addr": f"localhost:{pageserver_port.http}",
|
||||
@@ -1120,7 +1118,7 @@ class NeonEnv:
|
||||
http=self.port_distributor.get_port(),
|
||||
)
|
||||
id = config.safekeepers_id_start + i # assign ids sequentially
|
||||
sk_cfg: Dict[str, Any] = {
|
||||
sk_cfg: dict[str, Any] = {
|
||||
"id": id,
|
||||
"pg_port": port.pg,
|
||||
"pg_tenant_only_port": port.pg_tenant_only,
|
||||
@@ -1285,9 +1283,8 @@ class NeonEnv:
|
||||
res = subprocess.run(
|
||||
[bin_pageserver, "--version"],
|
||||
check=True,
|
||||
universal_newlines=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
)
|
||||
return res.stdout
|
||||
|
||||
@@ -1330,13 +1327,13 @@ class NeonEnv:
|
||||
self,
|
||||
tenant_id: Optional[TenantId] = None,
|
||||
timeline_id: Optional[TimelineId] = None,
|
||||
conf: Optional[Dict[str, Any]] = None,
|
||||
conf: Optional[dict[str, Any]] = None,
|
||||
shard_count: Optional[int] = None,
|
||||
shard_stripe_size: Optional[int] = None,
|
||||
placement_policy: Optional[str] = None,
|
||||
set_default: bool = False,
|
||||
aux_file_policy: Optional[AuxFileStore] = None,
|
||||
) -> Tuple[TenantId, TimelineId]:
|
||||
) -> tuple[TenantId, TimelineId]:
|
||||
"""
|
||||
Creates a new tenant, returns its id and its initial timeline's id.
|
||||
"""
|
||||
@@ -1357,7 +1354,7 @@ class NeonEnv:
|
||||
|
||||
return tenant_id, timeline_id
|
||||
|
||||
def config_tenant(self, tenant_id: Optional[TenantId], conf: Dict[str, str]):
|
||||
def config_tenant(self, tenant_id: Optional[TenantId], conf: dict[str, str]):
|
||||
"""
|
||||
Update tenant config.
|
||||
"""
|
||||
@@ -1409,7 +1406,7 @@ def neon_simple_env(
|
||||
pg_version: PgVersion,
|
||||
pageserver_virtual_file_io_engine: str,
|
||||
pageserver_aux_file_policy: Optional[AuxFileStore],
|
||||
pageserver_default_tenant_config_compaction_algorithm: Optional[Dict[str, Any]],
|
||||
pageserver_default_tenant_config_compaction_algorithm: Optional[dict[str, Any]],
|
||||
pageserver_io_buffer_alignment: Optional[int],
|
||||
) -> Iterator[NeonEnv]:
|
||||
"""
|
||||
@@ -1457,7 +1454,7 @@ def neon_env_builder(
|
||||
test_overlay_dir: Path,
|
||||
top_output_dir: Path,
|
||||
pageserver_virtual_file_io_engine: str,
|
||||
pageserver_default_tenant_config_compaction_algorithm: Optional[Dict[str, Any]],
|
||||
pageserver_default_tenant_config_compaction_algorithm: Optional[dict[str, Any]],
|
||||
pageserver_aux_file_policy: Optional[AuxFileStore],
|
||||
record_property: Callable[[str, object], None],
|
||||
pageserver_io_buffer_alignment: Optional[int],
|
||||
@@ -1519,7 +1516,7 @@ class LogUtils:
|
||||
|
||||
def assert_log_contains(
|
||||
self, pattern: str, offset: None | LogCursor = None
|
||||
) -> Tuple[str, LogCursor]:
|
||||
) -> tuple[str, LogCursor]:
|
||||
"""Convenient for use inside wait_until()"""
|
||||
|
||||
res = self.log_contains(pattern, offset=offset)
|
||||
@@ -1528,7 +1525,7 @@ class LogUtils:
|
||||
|
||||
def log_contains(
|
||||
self, pattern: str, offset: None | LogCursor = None
|
||||
) -> Optional[Tuple[str, LogCursor]]:
|
||||
) -> Optional[tuple[str, LogCursor]]:
|
||||
"""Check that the log contains a line that matches the given regex"""
|
||||
logfile = self.logfile
|
||||
if not logfile.exists():
|
||||
@@ -1609,7 +1606,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
self.running = True
|
||||
return self
|
||||
|
||||
def stop(self, immediate: bool = False) -> "NeonStorageController":
|
||||
def stop(self, immediate: bool = False) -> NeonStorageController:
|
||||
if self.running:
|
||||
self.env.neon_cli.storage_controller_stop(immediate)
|
||||
self.running = False
|
||||
@@ -1671,7 +1668,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
|
||||
return resp
|
||||
|
||||
def headers(self, scope: Optional[TokenScope]) -> Dict[str, str]:
|
||||
def headers(self, scope: Optional[TokenScope]) -> dict[str, str]:
|
||||
headers = {}
|
||||
if self.auth_enabled and scope is not None:
|
||||
jwt_token = self.env.auth_keys.generate_token(scope=scope)
|
||||
@@ -1857,13 +1854,13 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
tenant_id: TenantId,
|
||||
shard_count: Optional[int] = None,
|
||||
shard_stripe_size: Optional[int] = None,
|
||||
tenant_config: Optional[Dict[Any, Any]] = None,
|
||||
placement_policy: Optional[Union[Dict[Any, Any] | str]] = None,
|
||||
tenant_config: Optional[dict[Any, Any]] = None,
|
||||
placement_policy: Optional[Union[dict[Any, Any] | str]] = None,
|
||||
):
|
||||
"""
|
||||
Use this rather than pageserver_api() when you need to include shard parameters
|
||||
"""
|
||||
body: Dict[str, Any] = {"new_tenant_id": str(tenant_id)}
|
||||
body: dict[str, Any] = {"new_tenant_id": str(tenant_id)}
|
||||
|
||||
if shard_count is not None:
|
||||
shard_params = {"count": shard_count}
|
||||
@@ -2079,8 +2076,8 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
|
||||
time.sleep(backoff)
|
||||
|
||||
def metadata_health_update(self, healthy: List[TenantShardId], unhealthy: List[TenantShardId]):
|
||||
body: Dict[str, Any] = {
|
||||
def metadata_health_update(self, healthy: list[TenantShardId], unhealthy: list[TenantShardId]):
|
||||
body: dict[str, Any] = {
|
||||
"healthy_tenant_shards": [str(t) for t in healthy],
|
||||
"unhealthy_tenant_shards": [str(t) for t in unhealthy],
|
||||
}
|
||||
@@ -2101,7 +2098,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
return response.json()
|
||||
|
||||
def metadata_health_list_outdated(self, duration: str):
|
||||
body: Dict[str, Any] = {"not_scrubbed_for": duration}
|
||||
body: dict[str, Any] = {"not_scrubbed_for": duration}
|
||||
|
||||
response = self.request(
|
||||
"POST",
|
||||
@@ -2135,7 +2132,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def configure_failpoints(self, config_strings: Tuple[str, str] | List[Tuple[str, str]]):
|
||||
def configure_failpoints(self, config_strings: tuple[str, str] | list[tuple[str, str]]):
|
||||
if isinstance(config_strings, tuple):
|
||||
pairs = [config_strings]
|
||||
else:
|
||||
@@ -2152,13 +2149,13 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
log.info(f"Got failpoints request response code {res.status_code}")
|
||||
res.raise_for_status()
|
||||
|
||||
def get_tenants_placement(self) -> defaultdict[str, Dict[str, Any]]:
|
||||
def get_tenants_placement(self) -> defaultdict[str, dict[str, Any]]:
|
||||
"""
|
||||
Get the intent and observed placements of all tenants known to the storage controller.
|
||||
"""
|
||||
tenants = self.tenant_list()
|
||||
|
||||
tenant_placement: defaultdict[str, Dict[str, Any]] = defaultdict(
|
||||
tenant_placement: defaultdict[str, dict[str, Any]] = defaultdict(
|
||||
lambda: {
|
||||
"observed": {"attached": None, "secondary": []},
|
||||
"intent": {"attached": None, "secondary": []},
|
||||
@@ -2265,12 +2262,12 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
response.raise_for_status()
|
||||
return [TenantShardId.parse(tid) for tid in response.json()["updated"]]
|
||||
|
||||
def __enter__(self) -> "NeonStorageController":
|
||||
def __enter__(self) -> NeonStorageController:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
):
|
||||
@@ -2279,7 +2276,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
||||
|
||||
class NeonProxiedStorageController(NeonStorageController):
|
||||
def __init__(self, env: NeonEnv, proxy_port: int, auth_enabled: bool):
|
||||
super(NeonProxiedStorageController, self).__init__(env, proxy_port, auth_enabled)
|
||||
super().__init__(env, proxy_port, auth_enabled)
|
||||
self.instances: dict[int, dict[str, Any]] = {}
|
||||
|
||||
def start(
|
||||
@@ -2298,7 +2295,7 @@ class NeonProxiedStorageController(NeonStorageController):
|
||||
|
||||
def stop_instance(
|
||||
self, immediate: bool = False, instance_id: Optional[int] = None
|
||||
) -> "NeonStorageController":
|
||||
) -> NeonStorageController:
|
||||
assert instance_id in self.instances
|
||||
if self.instances[instance_id]["running"]:
|
||||
self.env.neon_cli.storage_controller_stop(immediate, instance_id)
|
||||
@@ -2307,7 +2304,7 @@ class NeonProxiedStorageController(NeonStorageController):
|
||||
self.running = any(meta["running"] for meta in self.instances.values())
|
||||
return self
|
||||
|
||||
def stop(self, immediate: bool = False) -> "NeonStorageController":
|
||||
def stop(self, immediate: bool = False) -> NeonStorageController:
|
||||
for iid, details in self.instances.items():
|
||||
if details["running"]:
|
||||
self.env.neon_cli.storage_controller_stop(immediate, iid)
|
||||
@@ -2326,7 +2323,7 @@ class NeonProxiedStorageController(NeonStorageController):
|
||||
|
||||
def log_contains(
|
||||
self, pattern: str, offset: None | LogCursor = None
|
||||
) -> Optional[Tuple[str, LogCursor]]:
|
||||
) -> Optional[tuple[str, LogCursor]]:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@@ -2358,7 +2355,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
# env.pageserver.allowed_errors.append(".*could not open garage door.*")
|
||||
#
|
||||
# The entries in the list are regular experessions.
|
||||
self.allowed_errors: List[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
||||
self.allowed_errors: list[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
||||
|
||||
def timeline_dir(
|
||||
self,
|
||||
@@ -2383,19 +2380,19 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
def config_toml_path(self) -> Path:
|
||||
return self.workdir / "pageserver.toml"
|
||||
|
||||
def edit_config_toml(self, edit_fn: Callable[[Dict[str, Any]], T]) -> T:
|
||||
def edit_config_toml(self, edit_fn: Callable[[dict[str, Any]], T]) -> T:
|
||||
"""
|
||||
Edit the pageserver's config toml file in place.
|
||||
"""
|
||||
path = self.config_toml_path
|
||||
with open(path, "r") as f:
|
||||
with open(path) as f:
|
||||
config = toml.load(f)
|
||||
res = edit_fn(config)
|
||||
with open(path, "w") as f:
|
||||
toml.dump(config, f)
|
||||
return res
|
||||
|
||||
def patch_config_toml_nonrecursive(self, patch: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def patch_config_toml_nonrecursive(self, patch: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
Non-recursively merge the given `patch` dict into the existing config toml, using `dict.update()`.
|
||||
Returns the replaced values.
|
||||
@@ -2404,7 +2401,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
"""
|
||||
replacements = {}
|
||||
|
||||
def doit(config: Dict[str, Any]):
|
||||
def doit(config: dict[str, Any]):
|
||||
while len(patch) > 0:
|
||||
key, new = patch.popitem()
|
||||
old = config.get(key, None)
|
||||
@@ -2416,9 +2413,9 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
|
||||
def start(
|
||||
self,
|
||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
||||
extra_env_vars: Optional[dict[str, str]] = None,
|
||||
timeout_in_seconds: Optional[int] = None,
|
||||
) -> "NeonPageserver":
|
||||
) -> NeonPageserver:
|
||||
"""
|
||||
Start the page server.
|
||||
`overrides` allows to add some config to this pageserver start.
|
||||
@@ -2444,7 +2441,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
|
||||
return self
|
||||
|
||||
def stop(self, immediate: bool = False) -> "NeonPageserver":
|
||||
def stop(self, immediate: bool = False) -> NeonPageserver:
|
||||
"""
|
||||
Stop the page server.
|
||||
Returns self.
|
||||
@@ -2492,12 +2489,12 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
|
||||
wait_until(20, 0.5, complete)
|
||||
|
||||
def __enter__(self) -> "NeonPageserver":
|
||||
def __enter__(self) -> NeonPageserver:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
):
|
||||
@@ -2544,7 +2541,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
def tenant_attach(
|
||||
self,
|
||||
tenant_id: TenantId,
|
||||
config: None | Dict[str, Any] = None,
|
||||
config: None | dict[str, Any] = None,
|
||||
generation: Optional[int] = None,
|
||||
override_storage_controller_generation: bool = False,
|
||||
):
|
||||
@@ -2583,7 +2580,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
) -> dict[str, Any]:
|
||||
path = self.tenant_dir(tenant_shard_id) / "config-v1"
|
||||
log.info(f"Reading location conf from {path}")
|
||||
bytes = open(path, "r").read()
|
||||
bytes = open(path).read()
|
||||
try:
|
||||
decoded: dict[str, Any] = toml.loads(bytes)
|
||||
return decoded
|
||||
@@ -2594,7 +2591,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
||||
def tenant_create(
|
||||
self,
|
||||
tenant_id: TenantId,
|
||||
conf: Optional[Dict[str, Any]] = None,
|
||||
conf: Optional[dict[str, Any]] = None,
|
||||
auth_token: Optional[str] = None,
|
||||
generation: Optional[int] = None,
|
||||
) -> TenantId:
|
||||
@@ -2660,7 +2657,7 @@ class PgBin:
|
||||
self.env = os.environ.copy()
|
||||
self.env["LD_LIBRARY_PATH"] = str(self.pg_lib_dir)
|
||||
|
||||
def _fixpath(self, command: List[str]):
|
||||
def _fixpath(self, command: list[str]):
|
||||
if "/" not in str(command[0]):
|
||||
command[0] = str(self.pg_bin_path / command[0])
|
||||
|
||||
@@ -2680,7 +2677,7 @@ class PgBin:
|
||||
|
||||
def run_nonblocking(
|
||||
self,
|
||||
command: List[str],
|
||||
command: list[str],
|
||||
env: Optional[Env] = None,
|
||||
cwd: Optional[Union[str, Path]] = None,
|
||||
) -> subprocess.Popen[Any]:
|
||||
@@ -2704,7 +2701,7 @@ class PgBin:
|
||||
|
||||
def run(
|
||||
self,
|
||||
command: List[str],
|
||||
command: list[str],
|
||||
env: Optional[Env] = None,
|
||||
cwd: Optional[Union[str, Path]] = None,
|
||||
) -> None:
|
||||
@@ -2727,7 +2724,7 @@ class PgBin:
|
||||
|
||||
def run_capture(
|
||||
self,
|
||||
command: List[str],
|
||||
command: list[str],
|
||||
env: Optional[Env] = None,
|
||||
cwd: Optional[str] = None,
|
||||
with_command_header=True,
|
||||
@@ -2840,14 +2837,14 @@ class VanillaPostgres(PgProtocol):
|
||||
]
|
||||
)
|
||||
|
||||
def configure(self, options: List[str]):
|
||||
def configure(self, options: list[str]):
|
||||
"""Append lines into postgresql.conf file."""
|
||||
assert not self.running
|
||||
with open(os.path.join(self.pgdatadir, "postgresql.conf"), "a") as conf_file:
|
||||
conf_file.write("\n".join(options))
|
||||
conf_file.write("\n")
|
||||
|
||||
def edit_hba(self, hba: List[str]):
|
||||
def edit_hba(self, hba: list[str]):
|
||||
"""Prepend hba lines into pg_hba.conf file."""
|
||||
assert not self.running
|
||||
with open(os.path.join(self.pgdatadir, "pg_hba.conf"), "r+") as conf_file:
|
||||
@@ -2875,12 +2872,12 @@ class VanillaPostgres(PgProtocol):
|
||||
"""Return size of pgdatadir subdirectory in bytes."""
|
||||
return get_dir_size(self.pgdatadir / subdir)
|
||||
|
||||
def __enter__(self) -> "VanillaPostgres":
|
||||
def __enter__(self) -> VanillaPostgres:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
):
|
||||
@@ -2910,7 +2907,7 @@ class RemotePostgres(PgProtocol):
|
||||
# The remote server is assumed to be running already
|
||||
self.running = True
|
||||
|
||||
def configure(self, options: List[str]):
|
||||
def configure(self, options: list[str]):
|
||||
raise Exception("cannot change configuration of remote Posgres instance")
|
||||
|
||||
def start(self):
|
||||
@@ -2924,12 +2921,12 @@ class RemotePostgres(PgProtocol):
|
||||
# See https://www.postgresql.org/docs/14/functions-admin.html#FUNCTIONS-ADMIN-GENFILE
|
||||
raise Exception("cannot get size of a Postgres instance")
|
||||
|
||||
def __enter__(self) -> "RemotePostgres":
|
||||
def __enter__(self) -> RemotePostgres:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
):
|
||||
@@ -3265,7 +3262,7 @@ class NeonProxy(PgProtocol):
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
):
|
||||
@@ -3403,7 +3400,7 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
self.http_port = http_port
|
||||
self.check_stop_result = check_stop_result
|
||||
# passed to endpoint create and endpoint reconfigure
|
||||
self.active_safekeepers: List[int] = list(map(lambda sk: sk.id, env.safekeepers))
|
||||
self.active_safekeepers: list[int] = list(map(lambda sk: sk.id, env.safekeepers))
|
||||
# path to conf is <repo_dir>/endpoints/<endpoint_id>/pgdata/postgresql.conf
|
||||
|
||||
# Semaphore is set to 1 when we start, and acquire'd back to zero when we stop
|
||||
@@ -3426,10 +3423,10 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
endpoint_id: Optional[str] = None,
|
||||
hot_standby: bool = False,
|
||||
lsn: Optional[Lsn] = None,
|
||||
config_lines: Optional[List[str]] = None,
|
||||
config_lines: Optional[list[str]] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
allow_multiple: bool = False,
|
||||
) -> "Endpoint":
|
||||
) -> Endpoint:
|
||||
"""
|
||||
Create a new Postgres endpoint.
|
||||
Returns self.
|
||||
@@ -3472,10 +3469,10 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
self,
|
||||
remote_ext_config: Optional[str] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
safekeepers: Optional[List[int]] = None,
|
||||
safekeepers: Optional[list[int]] = None,
|
||||
allow_multiple: bool = False,
|
||||
basebackup_request_tries: Optional[int] = None,
|
||||
) -> "Endpoint":
|
||||
) -> Endpoint:
|
||||
"""
|
||||
Start the Postgres instance.
|
||||
Returns self.
|
||||
@@ -3524,7 +3521,7 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
"""Path to the postgresql.conf in the endpoint directory (not the one in pgdata)"""
|
||||
return self.endpoint_path() / "postgresql.conf"
|
||||
|
||||
def config(self, lines: List[str]) -> "Endpoint":
|
||||
def config(self, lines: list[str]) -> Endpoint:
|
||||
"""
|
||||
Add lines to postgresql.conf.
|
||||
Lines should be an array of valid postgresql.conf rows.
|
||||
@@ -3538,7 +3535,7 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
|
||||
return self
|
||||
|
||||
def edit_hba(self, hba: List[str]):
|
||||
def edit_hba(self, hba: list[str]):
|
||||
"""Prepend hba lines into pg_hba.conf file."""
|
||||
with open(os.path.join(self.pg_data_dir_path(), "pg_hba.conf"), "r+") as conf_file:
|
||||
data = conf_file.read()
|
||||
@@ -3553,7 +3550,7 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
return self._running._value > 0
|
||||
|
||||
def reconfigure(
|
||||
self, pageserver_id: Optional[int] = None, safekeepers: Optional[List[int]] = None
|
||||
self, pageserver_id: Optional[int] = None, safekeepers: Optional[list[int]] = None
|
||||
):
|
||||
assert self.endpoint_id is not None
|
||||
# If `safekeepers` is not None, they are remember them as active and use
|
||||
@@ -3568,7 +3565,7 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
"""Update the endpoint.json file used by control_plane."""
|
||||
# Read config
|
||||
config_path = os.path.join(self.endpoint_path(), "endpoint.json")
|
||||
with open(config_path, "r") as f:
|
||||
with open(config_path) as f:
|
||||
data_dict: dict[str, Any] = json.load(f)
|
||||
|
||||
# Write it back updated
|
||||
@@ -3601,8 +3598,8 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
def stop(
|
||||
self,
|
||||
mode: str = "fast",
|
||||
sks_wait_walreceiver_gone: Optional[tuple[List[Safekeeper], TimelineId]] = None,
|
||||
) -> "Endpoint":
|
||||
sks_wait_walreceiver_gone: Optional[tuple[list[Safekeeper], TimelineId]] = None,
|
||||
) -> Endpoint:
|
||||
"""
|
||||
Stop the Postgres instance if it's running.
|
||||
|
||||
@@ -3636,7 +3633,7 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
|
||||
return self
|
||||
|
||||
def stop_and_destroy(self, mode: str = "immediate") -> "Endpoint":
|
||||
def stop_and_destroy(self, mode: str = "immediate") -> Endpoint:
|
||||
"""
|
||||
Stop the Postgres instance, then destroy the endpoint.
|
||||
Returns self.
|
||||
@@ -3658,12 +3655,12 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
endpoint_id: Optional[str] = None,
|
||||
hot_standby: bool = False,
|
||||
lsn: Optional[Lsn] = None,
|
||||
config_lines: Optional[List[str]] = None,
|
||||
config_lines: Optional[list[str]] = None,
|
||||
remote_ext_config: Optional[str] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
allow_multiple=False,
|
||||
basebackup_request_tries: Optional[int] = None,
|
||||
) -> "Endpoint":
|
||||
) -> Endpoint:
|
||||
"""
|
||||
Create an endpoint, apply config, and start Postgres.
|
||||
Returns self.
|
||||
@@ -3690,12 +3687,12 @@ class Endpoint(PgProtocol, LogUtils):
|
||||
|
||||
return self
|
||||
|
||||
def __enter__(self) -> "Endpoint":
|
||||
def __enter__(self) -> Endpoint:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
):
|
||||
@@ -3726,7 +3723,7 @@ class EndpointFactory:
|
||||
def __init__(self, env: NeonEnv):
|
||||
self.env = env
|
||||
self.num_instances: int = 0
|
||||
self.endpoints: List[Endpoint] = []
|
||||
self.endpoints: list[Endpoint] = []
|
||||
|
||||
def create_start(
|
||||
self,
|
||||
@@ -3735,7 +3732,7 @@ class EndpointFactory:
|
||||
tenant_id: Optional[TenantId] = None,
|
||||
lsn: Optional[Lsn] = None,
|
||||
hot_standby: bool = False,
|
||||
config_lines: Optional[List[str]] = None,
|
||||
config_lines: Optional[list[str]] = None,
|
||||
remote_ext_config: Optional[str] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
basebackup_request_tries: Optional[int] = None,
|
||||
@@ -3767,7 +3764,7 @@ class EndpointFactory:
|
||||
tenant_id: Optional[TenantId] = None,
|
||||
lsn: Optional[Lsn] = None,
|
||||
hot_standby: bool = False,
|
||||
config_lines: Optional[List[str]] = None,
|
||||
config_lines: Optional[list[str]] = None,
|
||||
pageserver_id: Optional[int] = None,
|
||||
) -> Endpoint:
|
||||
ep = Endpoint(
|
||||
@@ -3791,7 +3788,7 @@ class EndpointFactory:
|
||||
pageserver_id=pageserver_id,
|
||||
)
|
||||
|
||||
def stop_all(self, fail_on_error=True) -> "EndpointFactory":
|
||||
def stop_all(self, fail_on_error=True) -> EndpointFactory:
|
||||
exception = None
|
||||
for ep in self.endpoints:
|
||||
try:
|
||||
@@ -3806,7 +3803,7 @@ class EndpointFactory:
|
||||
return self
|
||||
|
||||
def new_replica(
|
||||
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[List[str]] = None
|
||||
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[list[str]] = None
|
||||
):
|
||||
branch_name = origin.branch_name
|
||||
assert origin in self.endpoints
|
||||
@@ -3822,7 +3819,7 @@ class EndpointFactory:
|
||||
)
|
||||
|
||||
def new_replica_start(
|
||||
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[List[str]] = None
|
||||
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[list[str]] = None
|
||||
):
|
||||
branch_name = origin.branch_name
|
||||
assert origin in self.endpoints
|
||||
@@ -3860,7 +3857,7 @@ class Safekeeper(LogUtils):
|
||||
port: SafekeeperPort,
|
||||
id: int,
|
||||
running: bool = False,
|
||||
extra_opts: Optional[List[str]] = None,
|
||||
extra_opts: Optional[list[str]] = None,
|
||||
):
|
||||
self.env = env
|
||||
self.port = port
|
||||
@@ -3886,8 +3883,8 @@ class Safekeeper(LogUtils):
|
||||
self.extra_opts = extra_opts
|
||||
|
||||
def start(
|
||||
self, extra_opts: Optional[List[str]] = None, timeout_in_seconds: Optional[int] = None
|
||||
) -> "Safekeeper":
|
||||
self, extra_opts: Optional[list[str]] = None, timeout_in_seconds: Optional[int] = None
|
||||
) -> Safekeeper:
|
||||
if extra_opts is None:
|
||||
# Apply either the extra_opts passed in, or the ones from our constructor: we do not merge the two.
|
||||
extra_opts = self.extra_opts
|
||||
@@ -3922,7 +3919,7 @@ class Safekeeper(LogUtils):
|
||||
break # success
|
||||
return self
|
||||
|
||||
def stop(self, immediate: bool = False) -> "Safekeeper":
|
||||
def stop(self, immediate: bool = False) -> Safekeeper:
|
||||
log.info(f"Stopping safekeeper {self.id}")
|
||||
self.env.neon_cli.safekeeper_stop(self.id, immediate)
|
||||
self.running = False
|
||||
@@ -3934,8 +3931,8 @@ class Safekeeper(LogUtils):
|
||||
assert not self.log_contains("timeout while acquiring WalResidentTimeline guard")
|
||||
|
||||
def append_logical_message(
|
||||
self, tenant_id: TenantId, timeline_id: TimelineId, request: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
self, tenant_id: TenantId, timeline_id: TimelineId, request: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Send JSON_CTRL query to append LogicalMessage to WAL and modify
|
||||
safekeeper state. It will construct LogicalMessage from provided
|
||||
@@ -3988,7 +3985,7 @@ class Safekeeper(LogUtils):
|
||||
|
||||
def pull_timeline(
|
||||
self, srcs: list[Safekeeper], tenant_id: TenantId, timeline_id: TimelineId
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
pull_timeline from srcs to self.
|
||||
"""
|
||||
@@ -4024,7 +4021,7 @@ class Safekeeper(LogUtils):
|
||||
mysegs = [s for s in segs if f"sk{self.id}" in s]
|
||||
return mysegs
|
||||
|
||||
def list_segments(self, tenant_id, timeline_id) -> List[str]:
|
||||
def list_segments(self, tenant_id, timeline_id) -> list[str]:
|
||||
"""
|
||||
Get list of segment names of the given timeline.
|
||||
"""
|
||||
@@ -4129,7 +4126,7 @@ class StorageScrubber:
|
||||
self.log_dir = log_dir
|
||||
|
||||
def scrubber_cli(
|
||||
self, args: list[str], timeout, extra_env: Optional[Dict[str, str]] = None
|
||||
self, args: list[str], timeout, extra_env: Optional[dict[str, str]] = None
|
||||
) -> str:
|
||||
assert isinstance(self.env.pageserver_remote_storage, S3Storage)
|
||||
s3_storage = self.env.pageserver_remote_storage
|
||||
@@ -4176,10 +4173,10 @@ class StorageScrubber:
|
||||
|
||||
def scan_metadata_safekeeper(
|
||||
self,
|
||||
timeline_lsns: List[Dict[str, Any]],
|
||||
timeline_lsns: list[dict[str, Any]],
|
||||
cloud_admin_api_url: str,
|
||||
cloud_admin_api_token: str,
|
||||
) -> Tuple[bool, Any]:
|
||||
) -> tuple[bool, Any]:
|
||||
extra_env = {
|
||||
"CLOUD_ADMIN_API_URL": cloud_admin_api_url,
|
||||
"CLOUD_ADMIN_API_TOKEN": cloud_admin_api_token,
|
||||
@@ -4192,9 +4189,9 @@ class StorageScrubber:
|
||||
self,
|
||||
post_to_storage_controller: bool = False,
|
||||
node_kind: NodeKind = NodeKind.PAGESERVER,
|
||||
timeline_lsns: Optional[List[Dict[str, Any]]] = None,
|
||||
extra_env: Optional[Dict[str, str]] = None,
|
||||
) -> Tuple[bool, Any]:
|
||||
timeline_lsns: Optional[list[dict[str, Any]]] = None,
|
||||
extra_env: Optional[dict[str, str]] = None,
|
||||
) -> tuple[bool, Any]:
|
||||
"""
|
||||
Returns the health status and the metadata summary.
|
||||
"""
|
||||
@@ -4501,7 +4498,7 @@ def should_skip_file(filename: str) -> bool:
|
||||
#
|
||||
# Test helpers
|
||||
#
|
||||
def list_files_to_compare(pgdata_dir: Path) -> List[str]:
|
||||
def list_files_to_compare(pgdata_dir: Path) -> list[str]:
|
||||
pgdata_files = []
|
||||
for root, _dirs, filenames in os.walk(pgdata_dir):
|
||||
for filename in filenames:
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterator
|
||||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
import psutil
|
||||
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
from typing import Iterable, List, Tuple
|
||||
from collections.abc import Iterable
|
||||
|
||||
|
||||
def scan_pageserver_log_for_errors(
|
||||
input: Iterable[str], allowed_errors: List[str]
|
||||
) -> List[Tuple[int, str]]:
|
||||
input: Iterable[str], allowed_errors: list[str]
|
||||
) -> list[tuple[int, str]]:
|
||||
error_or_warn = re.compile(r"\s(ERROR|WARN)")
|
||||
errors = []
|
||||
for lineno, line in enumerate(input, start=1):
|
||||
@@ -113,7 +115,7 @@ DEFAULT_STORAGE_CONTROLLER_ALLOWED_ERRORS = [
|
||||
|
||||
|
||||
def _check_allowed_errors(input):
|
||||
allowed_errors: List[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
||||
allowed_errors: list[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
||||
|
||||
# add any test specifics here; cli parsing is not provided for the
|
||||
# difficulty of copypasting regexes as arguments without any quoting
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from fixtures.common_types import KEY_MAX, KEY_MIN, Key, Lsn
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class IndexLayerMetadata:
|
||||
@@ -53,7 +58,7 @@ IMAGE_LAYER_FILE_NAME = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def parse_image_layer(f_name: str) -> Tuple[int, int, int]:
|
||||
def parse_image_layer(f_name: str) -> tuple[int, int, int]:
|
||||
"""Parse an image layer file name. Return key start, key end, and snapshot lsn"""
|
||||
|
||||
match = IMAGE_LAYER_FILE_NAME.match(f_name)
|
||||
@@ -68,7 +73,7 @@ DELTA_LAYER_FILE_NAME = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def parse_delta_layer(f_name: str) -> Tuple[int, int, int, int]:
|
||||
def parse_delta_layer(f_name: str) -> tuple[int, int, int, int]:
|
||||
"""Parse a delta layer file name. Return key start, key end, lsn start, and lsn end"""
|
||||
match = DELTA_LAYER_FILE_NAME.match(f_name)
|
||||
if match is None:
|
||||
@@ -121,11 +126,11 @@ def is_future_layer(layer_file_name: LayerName, disk_consistent_lsn: Lsn):
|
||||
|
||||
@dataclass
|
||||
class IndexPartDump:
|
||||
layer_metadata: Dict[LayerName, IndexLayerMetadata]
|
||||
layer_metadata: dict[LayerName, IndexLayerMetadata]
|
||||
disk_consistent_lsn: Lsn
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> "IndexPartDump":
|
||||
def from_json(cls, d: dict[str, Any]) -> IndexPartDump:
|
||||
return IndexPartDump(
|
||||
layer_metadata={
|
||||
parse_layer_file_name(n): IndexLayerMetadata(v["file_size"], v["generation"])
|
||||
|
||||
@@ -4,7 +4,7 @@ import time
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
@@ -16,6 +16,9 @@ from fixtures.metrics import Metrics, MetricsGetter, parse_metrics
|
||||
from fixtures.pg_version import PgVersion
|
||||
from fixtures.utils import Fn
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional, Union
|
||||
|
||||
|
||||
class PageserverApiException(Exception):
|
||||
def __init__(self, message, status_code: int):
|
||||
@@ -43,7 +46,7 @@ class InMemoryLayerInfo:
|
||||
lsn_end: Optional[str]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> InMemoryLayerInfo:
|
||||
def from_json(cls, d: dict[str, Any]) -> InMemoryLayerInfo:
|
||||
return InMemoryLayerInfo(
|
||||
kind=d["kind"],
|
||||
lsn_start=d["lsn_start"],
|
||||
@@ -64,7 +67,7 @@ class HistoricLayerInfo:
|
||||
visible: bool
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> HistoricLayerInfo:
|
||||
def from_json(cls, d: dict[str, Any]) -> HistoricLayerInfo:
|
||||
# instead of parsing the key range lets keep the definition of "L0" in pageserver
|
||||
l0_ness = d.get("l0")
|
||||
assert l0_ness is None or isinstance(l0_ness, bool)
|
||||
@@ -86,53 +89,53 @@ class HistoricLayerInfo:
|
||||
|
||||
@dataclass
|
||||
class LayerMapInfo:
|
||||
in_memory_layers: List[InMemoryLayerInfo]
|
||||
historic_layers: List[HistoricLayerInfo]
|
||||
in_memory_layers: list[InMemoryLayerInfo]
|
||||
historic_layers: list[HistoricLayerInfo]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> LayerMapInfo:
|
||||
def from_json(cls, d: dict[str, Any]) -> LayerMapInfo:
|
||||
info = LayerMapInfo(in_memory_layers=[], historic_layers=[])
|
||||
|
||||
json_in_memory_layers = d["in_memory_layers"]
|
||||
assert isinstance(json_in_memory_layers, List)
|
||||
assert isinstance(json_in_memory_layers, list)
|
||||
for json_in_memory_layer in json_in_memory_layers:
|
||||
info.in_memory_layers.append(InMemoryLayerInfo.from_json(json_in_memory_layer))
|
||||
|
||||
json_historic_layers = d["historic_layers"]
|
||||
assert isinstance(json_historic_layers, List)
|
||||
assert isinstance(json_historic_layers, list)
|
||||
for json_historic_layer in json_historic_layers:
|
||||
info.historic_layers.append(HistoricLayerInfo.from_json(json_historic_layer))
|
||||
|
||||
return info
|
||||
|
||||
def kind_count(self) -> Dict[str, int]:
|
||||
counts: Dict[str, int] = defaultdict(int)
|
||||
def kind_count(self) -> dict[str, int]:
|
||||
counts: dict[str, int] = defaultdict(int)
|
||||
for inmem_layer in self.in_memory_layers:
|
||||
counts[inmem_layer.kind] += 1
|
||||
for hist_layer in self.historic_layers:
|
||||
counts[hist_layer.kind] += 1
|
||||
return counts
|
||||
|
||||
def delta_layers(self) -> List[HistoricLayerInfo]:
|
||||
def delta_layers(self) -> list[HistoricLayerInfo]:
|
||||
return [x for x in self.historic_layers if x.kind == "Delta"]
|
||||
|
||||
def image_layers(self) -> List[HistoricLayerInfo]:
|
||||
def image_layers(self) -> list[HistoricLayerInfo]:
|
||||
return [x for x in self.historic_layers if x.kind == "Image"]
|
||||
|
||||
def delta_l0_layers(self) -> List[HistoricLayerInfo]:
|
||||
def delta_l0_layers(self) -> list[HistoricLayerInfo]:
|
||||
return [x for x in self.historic_layers if x.kind == "Delta" and x.l0]
|
||||
|
||||
def historic_by_name(self) -> Set[str]:
|
||||
def historic_by_name(self) -> set[str]:
|
||||
return set(x.layer_file_name for x in self.historic_layers)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TenantConfig:
|
||||
tenant_specific_overrides: Dict[str, Any]
|
||||
effective_config: Dict[str, Any]
|
||||
tenant_specific_overrides: dict[str, Any]
|
||||
effective_config: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> TenantConfig:
|
||||
def from_json(cls, d: dict[str, Any]) -> TenantConfig:
|
||||
return TenantConfig(
|
||||
tenant_specific_overrides=d["tenant_specific_overrides"],
|
||||
effective_config=d["effective_config"],
|
||||
@@ -209,7 +212,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
def check_status(self):
|
||||
self.get(f"http://localhost:{self.port}/v1/status").raise_for_status()
|
||||
|
||||
def configure_failpoints(self, config_strings: Tuple[str, str] | List[Tuple[str, str]]):
|
||||
def configure_failpoints(self, config_strings: tuple[str, str] | list[tuple[str, str]]):
|
||||
self.is_testing_enabled_or_skip()
|
||||
|
||||
if isinstance(config_strings, tuple):
|
||||
@@ -233,7 +236,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
res = self.post(f"http://localhost:{self.port}/v1/reload_auth_validation_keys")
|
||||
self.verbose_error(res)
|
||||
|
||||
def tenant_list(self) -> List[Dict[Any, Any]]:
|
||||
def tenant_list(self) -> list[dict[Any, Any]]:
|
||||
res = self.get(f"http://localhost:{self.port}/v1/tenant")
|
||||
self.verbose_error(res)
|
||||
res_json = res.json()
|
||||
@@ -244,7 +247,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
self,
|
||||
tenant_id: Union[TenantId, TenantShardId],
|
||||
generation: int,
|
||||
config: None | Dict[str, Any] = None,
|
||||
config: None | dict[str, Any] = None,
|
||||
):
|
||||
config = config or {}
|
||||
|
||||
@@ -324,7 +327,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
|
||||
def tenant_status(
|
||||
self, tenant_id: Union[TenantId, TenantShardId], activate: bool = False
|
||||
) -> Dict[Any, Any]:
|
||||
) -> dict[Any, Any]:
|
||||
"""
|
||||
:activate: hint the server not to accelerate activation of this tenant in response
|
||||
to this query. False by default for tests, because they generally want to observed the
|
||||
@@ -378,8 +381,8 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
def patch_tenant_config_client_side(
|
||||
self,
|
||||
tenant_id: TenantId,
|
||||
inserts: Optional[Dict[str, Any]] = None,
|
||||
removes: Optional[List[str]] = None,
|
||||
inserts: Optional[dict[str, Any]] = None,
|
||||
removes: Optional[list[str]] = None,
|
||||
):
|
||||
current = self.tenant_config(tenant_id).tenant_specific_overrides
|
||||
if inserts is not None:
|
||||
@@ -394,7 +397,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
|
||||
def tenant_size_and_modelinputs(
|
||||
self, tenant_id: Union[TenantId, TenantShardId]
|
||||
) -> Tuple[int, Dict[str, Any]]:
|
||||
) -> tuple[int, dict[str, Any]]:
|
||||
"""
|
||||
Returns the tenant size, together with the model inputs as the second tuple item.
|
||||
"""
|
||||
@@ -424,7 +427,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
tenant_id: Union[TenantId, TenantShardId],
|
||||
timestamp: datetime,
|
||||
done_if_after: datetime,
|
||||
shard_counts: Optional[List[int]] = None,
|
||||
shard_counts: Optional[list[int]] = None,
|
||||
):
|
||||
"""
|
||||
Issues a request to perform time travel operations on the remote storage
|
||||
@@ -432,7 +435,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
|
||||
if shard_counts is None:
|
||||
shard_counts = []
|
||||
body: Dict[str, Any] = {
|
||||
body: dict[str, Any] = {
|
||||
"shard_counts": shard_counts,
|
||||
}
|
||||
res = self.put(
|
||||
@@ -446,7 +449,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
tenant_id: Union[TenantId, TenantShardId],
|
||||
include_non_incremental_logical_size: bool = False,
|
||||
include_timeline_dir_layer_file_size_sum: bool = False,
|
||||
) -> List[Dict[str, Any]]:
|
||||
) -> list[dict[str, Any]]:
|
||||
params = {}
|
||||
if include_non_incremental_logical_size:
|
||||
params["include-non-incremental-logical-size"] = "true"
|
||||
@@ -470,8 +473,8 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
ancestor_start_lsn: Optional[Lsn] = None,
|
||||
existing_initdb_timeline_id: Optional[TimelineId] = None,
|
||||
**kwargs,
|
||||
) -> Dict[Any, Any]:
|
||||
body: Dict[str, Any] = {
|
||||
) -> dict[Any, Any]:
|
||||
body: dict[str, Any] = {
|
||||
"new_timeline_id": str(new_timeline_id),
|
||||
"ancestor_start_lsn": str(ancestor_start_lsn) if ancestor_start_lsn else None,
|
||||
"ancestor_timeline_id": str(ancestor_timeline_id) if ancestor_timeline_id else None,
|
||||
@@ -504,7 +507,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
include_timeline_dir_layer_file_size_sum: bool = False,
|
||||
force_await_initial_logical_size: bool = False,
|
||||
**kwargs,
|
||||
) -> Dict[Any, Any]:
|
||||
) -> dict[Any, Any]:
|
||||
params = {}
|
||||
if include_non_incremental_logical_size:
|
||||
params["include-non-incremental-logical-size"] = "true"
|
||||
@@ -844,7 +847,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
)
|
||||
if len(res) != 2:
|
||||
return None
|
||||
inc, dec = [res[metric] for metric in metrics]
|
||||
inc, dec = (res[metric] for metric in metrics)
|
||||
queue_count = int(inc) - int(dec)
|
||||
assert queue_count >= 0
|
||||
return queue_count
|
||||
@@ -885,7 +888,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
||||
timeline_id: TimelineId,
|
||||
batch_size: int | None = None,
|
||||
**kwargs,
|
||||
) -> Set[TimelineId]:
|
||||
) -> set[TimelineId]:
|
||||
params = {}
|
||||
if batch_size is not None:
|
||||
params["batch_size"] = batch_size
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import concurrent.futures
|
||||
from typing import Any, Callable, Dict, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import fixtures.pageserver.remote_storage
|
||||
from fixtures.common_types import TenantId, TimelineId
|
||||
@@ -10,10 +12,13 @@ from fixtures.neon_fixtures import (
|
||||
)
|
||||
from fixtures.remote_storage import LocalFsStorage, RemoteStorageKind
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
def single_timeline(
|
||||
neon_env_builder: NeonEnvBuilder,
|
||||
setup_template: Callable[[NeonEnv], Tuple[TenantId, TimelineId, Dict[str, Any]]],
|
||||
setup_template: Callable[[NeonEnv], tuple[TenantId, TimelineId, dict[str, Any]]],
|
||||
ncopies: int,
|
||||
) -> NeonEnv:
|
||||
"""
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import concurrent.futures
|
||||
import os
|
||||
import queue
|
||||
import shutil
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fixtures.common_types import TenantId, TimelineId
|
||||
from fixtures.neon_fixtures import NeonEnv
|
||||
@@ -14,6 +16,9 @@ from fixtures.pageserver.common_types import (
|
||||
)
|
||||
from fixtures.remote_storage import LocalFsStorage
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
def duplicate_one_tenant(env: NeonEnv, template_tenant: TenantId, new_tenant: TenantId):
|
||||
remote_storage = env.pageserver_remote_storage
|
||||
@@ -50,13 +55,13 @@ def duplicate_one_tenant(env: NeonEnv, template_tenant: TenantId, new_tenant: Te
|
||||
return None
|
||||
|
||||
|
||||
def duplicate_tenant(env: NeonEnv, template_tenant: TenantId, ncopies: int) -> List[TenantId]:
|
||||
def duplicate_tenant(env: NeonEnv, template_tenant: TenantId, ncopies: int) -> list[TenantId]:
|
||||
assert isinstance(env.pageserver_remote_storage, LocalFsStorage)
|
||||
|
||||
def work(tenant_id):
|
||||
duplicate_one_tenant(env, template_tenant, tenant_id)
|
||||
|
||||
new_tenants: List[TenantId] = [TenantId.generate() for _ in range(0, ncopies)]
|
||||
new_tenants: list[TenantId] = [TenantId.generate() for _ in range(0, ncopies)]
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
|
||||
executor.map(work, new_tenants)
|
||||
return new_tenants
|
||||
@@ -79,7 +84,7 @@ def local_layer_name_from_remote_name(remote_name: str) -> str:
|
||||
|
||||
|
||||
def copy_all_remote_layer_files_to_local_tenant_dir(
|
||||
env: NeonEnv, tenant_timelines: List[Tuple[TenantId, TimelineId]]
|
||||
env: NeonEnv, tenant_timelines: list[tuple[TenantId, TimelineId]]
|
||||
):
|
||||
remote_storage = env.pageserver_remote_storage
|
||||
assert isinstance(remote_storage, LocalFsStorage)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mypy_boto3_s3.type_defs import (
|
||||
DeleteObjectOutputTypeDef,
|
||||
@@ -14,6 +16,9 @@ from fixtures.pageserver.http import PageserverApiException, PageserverHttpClien
|
||||
from fixtures.remote_storage import RemoteStorage, RemoteStorageKind, S3Storage
|
||||
from fixtures.utils import wait_until
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
|
||||
def assert_tenant_state(
|
||||
pageserver_http: PageserverHttpClient,
|
||||
@@ -66,7 +71,7 @@ def wait_for_upload(
|
||||
)
|
||||
|
||||
|
||||
def _tenant_in_expected_state(tenant_info: Dict[str, Any], expected_state: str):
|
||||
def _tenant_in_expected_state(tenant_info: dict[str, Any], expected_state: str):
|
||||
if tenant_info["state"]["slug"] == expected_state:
|
||||
return True
|
||||
if tenant_info["state"]["slug"] == "Broken":
|
||||
@@ -80,7 +85,7 @@ def wait_until_tenant_state(
|
||||
expected_state: str,
|
||||
iterations: int,
|
||||
period: float = 1.0,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Does not use `wait_until` for debugging purposes
|
||||
"""
|
||||
@@ -136,7 +141,7 @@ def wait_until_timeline_state(
|
||||
expected_state: str,
|
||||
iterations: int,
|
||||
period: float = 1.0,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Does not use `wait_until` for debugging purposes
|
||||
"""
|
||||
@@ -147,7 +152,7 @@ def wait_until_timeline_state(
|
||||
if isinstance(timeline["state"], str):
|
||||
if timeline["state"] == expected_state:
|
||||
return timeline
|
||||
elif isinstance(timeline, Dict):
|
||||
elif isinstance(timeline, dict):
|
||||
if timeline["state"].get(expected_state):
|
||||
return timeline
|
||||
|
||||
@@ -235,7 +240,7 @@ def wait_for_upload_queue_empty(
|
||||
|
||||
# this is `started left join finished`; if match, subtracting start from finished, resulting in queue depth
|
||||
remaining_labels = ["shard_id", "file_kind", "op_kind"]
|
||||
tl: List[Tuple[Any, float]] = []
|
||||
tl: list[tuple[Any, float]] = []
|
||||
for s in started:
|
||||
found = False
|
||||
for f in finished:
|
||||
@@ -302,7 +307,7 @@ def assert_prefix_empty(
|
||||
assert remote_storage is not None
|
||||
response = list_prefix(remote_storage, prefix)
|
||||
keys = response["KeyCount"]
|
||||
objects: List[ObjectTypeDef] = response.get("Contents", [])
|
||||
objects: list[ObjectTypeDef] = response.get("Contents", [])
|
||||
common_prefixes = response.get("CommonPrefixes", [])
|
||||
|
||||
is_mock_s3 = isinstance(remote_storage, S3Storage) and not remote_storage.cleanup
|
||||
@@ -430,7 +435,7 @@ def enable_remote_storage_versioning(
|
||||
return response
|
||||
|
||||
|
||||
def many_small_layers_tenant_config() -> Dict[str, Any]:
|
||||
def many_small_layers_tenant_config() -> dict[str, Any]:
|
||||
"""
|
||||
Create a new dict to avoid issues with deleting from the global value.
|
||||
In python, the global is mutable.
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import allure
|
||||
import pytest
|
||||
@@ -9,6 +11,10 @@ from _pytest.python import Metafunc
|
||||
from fixtures.pg_version import PgVersion
|
||||
from fixtures.utils import AuxFileStore
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
"""
|
||||
Dynamically parametrize tests by different parameters
|
||||
"""
|
||||
@@ -44,7 +50,7 @@ def pageserver_aux_file_policy() -> Optional[AuxFileStore]:
|
||||
return None
|
||||
|
||||
|
||||
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict[str, Any]]:
|
||||
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[dict[str, Any]]:
|
||||
toml_table = os.getenv("PAGESERVER_DEFAULT_TENANT_CONFIG_COMPACTION_ALGORITHM")
|
||||
if toml_table is None:
|
||||
return None
|
||||
@@ -54,7 +60,7 @@ def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict[str, Any]]:
|
||||
def pageserver_default_tenant_config_compaction_algorithm() -> Optional[dict[str, Any]]:
|
||||
return get_pageserver_default_tenant_config_compaction_algorithm()
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import cached_property
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class PgStatTable:
|
||||
table: str
|
||||
columns: List[str]
|
||||
columns: list[str]
|
||||
additional_query: str
|
||||
|
||||
def __init__(self, table: str, columns: List[str], filter_query: str = ""):
|
||||
def __init__(self, table: str, columns: list[str], filter_query: str = ""):
|
||||
self.table = table
|
||||
self.columns = columns
|
||||
self.additional_query = filter_query
|
||||
@@ -20,7 +21,7 @@ class PgStatTable:
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def pg_stats_rw() -> List[PgStatTable]:
|
||||
def pg_stats_rw() -> list[PgStatTable]:
|
||||
return [
|
||||
PgStatTable(
|
||||
"pg_stat_database",
|
||||
@@ -31,7 +32,7 @@ def pg_stats_rw() -> List[PgStatTable]:
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def pg_stats_ro() -> List[PgStatTable]:
|
||||
def pg_stats_ro() -> list[PgStatTable]:
|
||||
return [
|
||||
PgStatTable(
|
||||
"pg_stat_database", ["tup_returned", "tup_fetched"], "WHERE datname='postgres'"
|
||||
@@ -40,7 +41,7 @@ def pg_stats_ro() -> List[PgStatTable]:
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def pg_stats_wo() -> List[PgStatTable]:
|
||||
def pg_stats_wo() -> list[PgStatTable]:
|
||||
return [
|
||||
PgStatTable(
|
||||
"pg_stat_database",
|
||||
@@ -51,7 +52,7 @@ def pg_stats_wo() -> List[PgStatTable]:
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def pg_stats_wal() -> List[PgStatTable]:
|
||||
def pg_stats_wal() -> list[PgStatTable]:
|
||||
return [
|
||||
PgStatTable(
|
||||
"pg_stat_wal",
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import os
|
||||
from typing import Optional
|
||||
@@ -36,7 +38,7 @@ class PgVersion(str, enum.Enum):
|
||||
return f"v{self.value}"
|
||||
|
||||
@classmethod
|
||||
def _missing_(cls, value) -> Optional["PgVersion"]:
|
||||
def _missing_(cls, value) -> Optional[PgVersion]:
|
||||
known_values = {v.value for _, v in cls.__members__.items()}
|
||||
|
||||
# Allow passing version as a string with "v" prefix (e.g. "v14")
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import socket
|
||||
from contextlib import closing
|
||||
from typing import Dict, Union
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fixtures.log_helper import log
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Union
|
||||
|
||||
|
||||
def can_bind(host: str, port: int) -> bool:
|
||||
"""
|
||||
@@ -24,7 +29,7 @@ def can_bind(host: str, port: int) -> bool:
|
||||
sock.bind((host, port))
|
||||
sock.listen()
|
||||
return True
|
||||
except socket.error:
|
||||
except OSError:
|
||||
log.info(f"Port {port} is in use, skipping")
|
||||
return False
|
||||
finally:
|
||||
@@ -34,7 +39,7 @@ def can_bind(host: str, port: int) -> bool:
|
||||
class PortDistributor:
|
||||
def __init__(self, base_port: int, port_number: int):
|
||||
self.iterator = iter(range(base_port, base_port + port_number))
|
||||
self.port_map: Dict[int, int] = {}
|
||||
self.port_map: dict[int, int] = {}
|
||||
|
||||
def get_port(self) -> int:
|
||||
for port in self.iterator:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import hashlib
|
||||
import json
|
||||
@@ -6,7 +8,7 @@ import re
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
import boto3
|
||||
import toml
|
||||
@@ -16,6 +18,10 @@ from fixtures.common_types import TenantId, TenantShardId, TimelineId
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.pageserver.common_types import IndexPartDump
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
TIMELINE_INDEX_PART_FILE_NAME = "index_part.json"
|
||||
TENANT_HEATMAP_FILE_NAME = "heatmap-v1.json"
|
||||
|
||||
@@ -142,7 +148,7 @@ class LocalFsStorage:
|
||||
with self.heatmap_path(tenant_id).open("r") as f:
|
||||
return json.load(f)
|
||||
|
||||
def to_toml_dict(self) -> Dict[str, Any]:
|
||||
def to_toml_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
"local_path": str(self.root),
|
||||
}
|
||||
@@ -175,7 +181,7 @@ class S3Storage:
|
||||
"""formatting deserialized with humantime crate, for example "1s"."""
|
||||
custom_timeout: Optional[str] = None
|
||||
|
||||
def access_env_vars(self) -> Dict[str, str]:
|
||||
def access_env_vars(self) -> dict[str, str]:
|
||||
if self.aws_profile is not None:
|
||||
env = {
|
||||
"AWS_PROFILE": self.aws_profile,
|
||||
@@ -204,7 +210,7 @@ class S3Storage:
|
||||
}
|
||||
)
|
||||
|
||||
def to_toml_dict(self) -> Dict[str, Any]:
|
||||
def to_toml_dict(self) -> dict[str, Any]:
|
||||
rv = {
|
||||
"bucket_name": self.bucket_name,
|
||||
"bucket_region": self.bucket_region,
|
||||
@@ -279,7 +285,7 @@ class S3Storage:
|
||||
) -> str:
|
||||
return f"{self.tenant_path(tenant_id)}/timelines/{timeline_id}"
|
||||
|
||||
def get_latest_index_key(self, index_keys: List[str]) -> str:
|
||||
def get_latest_index_key(self, index_keys: list[str]) -> str:
|
||||
"""
|
||||
Gets the latest index file key.
|
||||
|
||||
@@ -419,7 +425,7 @@ class RemoteStorageKind(str, enum.Enum):
|
||||
)
|
||||
|
||||
|
||||
def available_remote_storages() -> List[RemoteStorageKind]:
|
||||
def available_remote_storages() -> list[RemoteStorageKind]:
|
||||
remote_storages = [RemoteStorageKind.LOCAL_FS, RemoteStorageKind.MOCK_S3]
|
||||
if os.getenv("ENABLE_REAL_S3_REMOTE_STORAGE") is not None:
|
||||
remote_storages.append(RemoteStorageKind.REAL_S3)
|
||||
@@ -429,7 +435,7 @@ def available_remote_storages() -> List[RemoteStorageKind]:
|
||||
return remote_storages
|
||||
|
||||
|
||||
def available_s3_storages() -> List[RemoteStorageKind]:
|
||||
def available_s3_storages() -> list[RemoteStorageKind]:
|
||||
remote_storages = [RemoteStorageKind.MOCK_S3]
|
||||
if os.getenv("ENABLE_REAL_S3_REMOTE_STORAGE") is not None:
|
||||
remote_storages.append(RemoteStorageKind.REAL_S3)
|
||||
@@ -459,7 +465,7 @@ def default_remote_storage() -> RemoteStorageKind:
|
||||
return RemoteStorageKind.LOCAL_FS
|
||||
|
||||
|
||||
def remote_storage_to_toml_dict(remote_storage: RemoteStorage) -> Dict[str, Any]:
|
||||
def remote_storage_to_toml_dict(remote_storage: RemoteStorage) -> dict[str, Any]:
|
||||
if not isinstance(remote_storage, (LocalFsStorage, S3Storage)):
|
||||
raise Exception("invalid remote storage type")
|
||||
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
@@ -10,6 +12,9 @@ from fixtures.log_helper import log
|
||||
from fixtures.metrics import Metrics, MetricsGetter, parse_metrics
|
||||
from fixtures.utils import wait_until
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
|
||||
# Walreceiver as returned by sk's timeline status endpoint.
|
||||
@dataclass
|
||||
@@ -29,7 +34,7 @@ class SafekeeperTimelineStatus:
|
||||
backup_lsn: Lsn
|
||||
peer_horizon_lsn: Lsn
|
||||
remote_consistent_lsn: Lsn
|
||||
walreceivers: List[Walreceiver]
|
||||
walreceivers: list[Walreceiver]
|
||||
|
||||
|
||||
class SafekeeperMetrics(Metrics):
|
||||
@@ -57,7 +62,7 @@ class TermBumpResponse:
|
||||
current_term: int
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, d: Dict[str, Any]) -> "TermBumpResponse":
|
||||
def from_json(cls, d: dict[str, Any]) -> TermBumpResponse:
|
||||
return TermBumpResponse(
|
||||
previous_term=d["previous_term"],
|
||||
current_term=d["current_term"],
|
||||
@@ -93,7 +98,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
if not self.is_testing_enabled:
|
||||
pytest.skip("safekeeper was built without 'testing' feature")
|
||||
|
||||
def configure_failpoints(self, config_strings: Union[Tuple[str, str], List[Tuple[str, str]]]):
|
||||
def configure_failpoints(self, config_strings: Union[tuple[str, str], list[tuple[str, str]]]):
|
||||
self.is_testing_enabled_or_skip()
|
||||
|
||||
if isinstance(config_strings, tuple):
|
||||
@@ -113,14 +118,14 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
assert res_json is None
|
||||
return res_json
|
||||
|
||||
def tenant_delete_force(self, tenant_id: TenantId) -> Dict[Any, Any]:
|
||||
def tenant_delete_force(self, tenant_id: TenantId) -> dict[Any, Any]:
|
||||
res = self.delete(f"http://localhost:{self.port}/v1/tenant/{tenant_id}")
|
||||
res.raise_for_status()
|
||||
res_json = res.json()
|
||||
assert isinstance(res_json, dict)
|
||||
return res_json
|
||||
|
||||
def timeline_list(self) -> List[TenantTimelineId]:
|
||||
def timeline_list(self) -> list[TenantTimelineId]:
|
||||
res = self.get(f"http://localhost:{self.port}/v1/tenant/timeline")
|
||||
res.raise_for_status()
|
||||
resj = res.json()
|
||||
@@ -178,7 +183,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
# only_local doesn't remove segments in the remote storage.
|
||||
def timeline_delete(
|
||||
self, tenant_id: TenantId, timeline_id: TimelineId, only_local: bool = False
|
||||
) -> Dict[Any, Any]:
|
||||
) -> dict[Any, Any]:
|
||||
res = self.delete(
|
||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}",
|
||||
params={
|
||||
@@ -190,7 +195,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
assert isinstance(res_json, dict)
|
||||
return res_json
|
||||
|
||||
def debug_dump(self, params: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
|
||||
def debug_dump(self, params: Optional[dict[str, str]] = None) -> dict[str, Any]:
|
||||
params = params or {}
|
||||
res = self.get(f"http://localhost:{self.port}/v1/debug_dump", params=params)
|
||||
res.raise_for_status()
|
||||
@@ -199,7 +204,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
return res_json
|
||||
|
||||
def debug_dump_timeline(
|
||||
self, timeline_id: TimelineId, params: Optional[Dict[str, str]] = None
|
||||
self, timeline_id: TimelineId, params: Optional[dict[str, str]] = None
|
||||
) -> Any:
|
||||
params = params or {}
|
||||
params["timeline_id"] = str(timeline_id)
|
||||
@@ -214,14 +219,14 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
dump = self.debug_dump_timeline(timeline_id, {"dump_control_file": "true"})
|
||||
return dump["control_file"]["eviction_state"]
|
||||
|
||||
def pull_timeline(self, body: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def pull_timeline(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||
res = self.post(f"http://localhost:{self.port}/v1/pull_timeline", json=body)
|
||||
res.raise_for_status()
|
||||
res_json = res.json()
|
||||
assert isinstance(res_json, dict)
|
||||
return res_json
|
||||
|
||||
def copy_timeline(self, tenant_id: TenantId, timeline_id: TimelineId, body: Dict[str, Any]):
|
||||
def copy_timeline(self, tenant_id: TenantId, timeline_id: TimelineId, body: dict[str, Any]):
|
||||
res = self.post(
|
||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/copy",
|
||||
json=body,
|
||||
@@ -232,8 +237,8 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
self,
|
||||
tenant_id: TenantId,
|
||||
timeline_id: TimelineId,
|
||||
patch: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
patch: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
res = self.patch(
|
||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/control_file",
|
||||
json={
|
||||
@@ -255,7 +260,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
||||
|
||||
def timeline_digest(
|
||||
self, tenant_id: TenantId, timeline_id: TimelineId, from_lsn: Lsn, until_lsn: Lsn
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
res = self.get(
|
||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/digest",
|
||||
params={
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fixtures.common_types import TenantId, TimelineId
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.safekeeper.http import SafekeeperHttpClient
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
from typing import Any, List
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from _pytest.config import Config
|
||||
from _pytest.config.argparsing import Parser
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
"""
|
||||
This plugin allows tests to be marked as slow using pytest.mark.slow. By default slow
|
||||
tests are excluded. They need to be specifically requested with the --runslow flag in
|
||||
@@ -21,7 +27,7 @@ def pytest_configure(config: Config):
|
||||
config.addinivalue_line("markers", "slow: mark test as slow to run")
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config: Config, items: List[Any]):
|
||||
def pytest_collection_modifyitems(config: Config, items: list[Any]):
|
||||
if config.getoption("--runslow"):
|
||||
# --runslow given in cli: do not skip slow tests
|
||||
return
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import enum
|
||||
import json
|
||||
@@ -7,22 +9,10 @@ import subprocess
|
||||
import tarfile
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Iterable
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Any, Callable, TypeVar
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import allure
|
||||
@@ -36,6 +26,12 @@ from fixtures.pageserver.common_types import (
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import (
|
||||
IO,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
from fixtures.neon_fixtures import PgBin
|
||||
from fixtures.common_types import TimelineId
|
||||
|
||||
@@ -44,7 +40,7 @@ Fn = TypeVar("Fn", bound=Callable[..., Any])
|
||||
|
||||
def subprocess_capture(
|
||||
capture_dir: Path,
|
||||
cmd: List[str],
|
||||
cmd: list[str],
|
||||
*,
|
||||
check=False,
|
||||
echo_stderr=False,
|
||||
@@ -53,7 +49,7 @@ def subprocess_capture(
|
||||
timeout=None,
|
||||
with_command_header=True,
|
||||
**popen_kwargs: Any,
|
||||
) -> Tuple[str, Optional[str], int]:
|
||||
) -> tuple[str, Optional[str], int]:
|
||||
"""Run a process and bifurcate its output to files and the `log` logger
|
||||
|
||||
stderr and stdout are always captured in files. They are also optionally
|
||||
@@ -98,7 +94,7 @@ def subprocess_capture(
|
||||
first = False
|
||||
# prefix the files with the command line so that we can
|
||||
# later understand which file is for what command
|
||||
self.out_file.write((f"# {' '.join(cmd)}\n\n").encode("utf-8"))
|
||||
self.out_file.write((f"# {' '.join(cmd)}\n\n").encode())
|
||||
|
||||
# Only bother decoding if we are going to do something more than stream to a file
|
||||
if self.echo or self.capture:
|
||||
@@ -166,7 +162,7 @@ def global_counter() -> int:
|
||||
return _global_counter
|
||||
|
||||
|
||||
def print_gc_result(row: Dict[str, Any]):
|
||||
def print_gc_result(row: dict[str, Any]):
|
||||
log.info("GC duration {elapsed} ms".format_map(row))
|
||||
log.info(
|
||||
" total: {layers_total}, needed_by_cutoff {layers_needed_by_cutoff}, needed_by_pitr {layers_needed_by_pitr}"
|
||||
@@ -304,7 +300,7 @@ def allure_add_grafana_links(host: str, timeline_id: TimelineId, start_ms: int,
|
||||
"proxy logs": f'{{neon_service="proxy-scram", neon_region="{region_id}"}}',
|
||||
}
|
||||
|
||||
params: Dict[str, Any] = {
|
||||
params: dict[str, Any] = {
|
||||
"datasource": LOGS_STAGING_DATASOURCE_ID,
|
||||
"queries": [
|
||||
{
|
||||
@@ -420,7 +416,7 @@ def assert_ge(a, b) -> None:
|
||||
assert a >= b
|
||||
|
||||
|
||||
def run_pg_bench_small(pg_bin: "PgBin", connstr: str):
|
||||
def run_pg_bench_small(pg_bin: PgBin, connstr: str):
|
||||
"""
|
||||
Fast way to populate data.
|
||||
For more layers consider combining with these tenant settings:
|
||||
@@ -465,7 +461,7 @@ def humantime_to_ms(humantime: str) -> float:
|
||||
return round(total_ms, 3)
|
||||
|
||||
|
||||
def scan_log_for_errors(input: Iterable[str], allowed_errors: List[str]) -> List[Tuple[int, str]]:
|
||||
def scan_log_for_errors(input: Iterable[str], allowed_errors: list[str]) -> list[tuple[int, str]]:
|
||||
# FIXME: this duplicates test_runner/fixtures/pageserver/allowed_errors.py
|
||||
error_or_warn = re.compile(r"\s(ERROR|WARN)")
|
||||
errors = []
|
||||
@@ -515,7 +511,7 @@ class AuxFileStore(str, enum.Enum):
|
||||
return f"'aux-{self.value}'"
|
||||
|
||||
|
||||
def assert_pageserver_backups_equal(left: Path, right: Path, skip_files: Set[str]):
|
||||
def assert_pageserver_backups_equal(left: Path, right: Path, skip_files: set[str]):
|
||||
"""
|
||||
This is essentially:
|
||||
|
||||
@@ -539,7 +535,7 @@ def assert_pageserver_backups_equal(left: Path, right: Path, skip_files: Set[str
|
||||
digest.update(buf)
|
||||
return digest.digest()
|
||||
|
||||
def build_hash_list(p: Path) -> List[Tuple[str, bytes]]:
|
||||
def build_hash_list(p: Path) -> list[tuple[str, bytes]]:
|
||||
with tarfile.open(p) as f:
|
||||
matching_files = (info for info in f if info.isreg() and info.name not in skip_files)
|
||||
ret = list(
|
||||
@@ -587,7 +583,7 @@ class PropagatingThread(threading.Thread):
|
||||
self.exc = e
|
||||
|
||||
def join(self, timeout=None):
|
||||
super(PropagatingThread, self).join(timeout)
|
||||
super().join(timeout)
|
||||
if self.exc:
|
||||
raise self.exc
|
||||
return self.ret
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
Test the logical replication in Neon with ClickHouse as a consumer
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import time
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
Test the logical replication in Neon with Debezium as a consumer
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -6,3 +6,5 @@ but then debug a performance problem interactively.
|
||||
It's kind of an abuse of the test framework, but, it's our only tool right
|
||||
now to automate a complex test bench setup.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import pdb
|
||||
|
||||
|
||||
@@ -8,3 +8,5 @@ instead of benchmarking the full stack.
|
||||
See https://github.com/neondatabase/neon/issues/5771
|
||||
for the context in which this was developed.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
@@ -13,6 +15,9 @@ from performance.pageserver.util import (
|
||||
setup_pageserver_with_tenants,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
@pytest.mark.parametrize("duration", [30])
|
||||
@pytest.mark.parametrize("pgbench_scale", [get_scale_for_db(200)])
|
||||
@@ -29,7 +34,7 @@ def test_basebackup_with_high_slru_count(
|
||||
def record(metric, **kwargs):
|
||||
zenbenchmark.record(metric_name=f"pageserver_basebackup.{metric}", **kwargs)
|
||||
|
||||
params: Dict[str, Tuple[Any, Dict[str, Any]]] = {}
|
||||
params: dict[str, tuple[Any, dict[str, Any]]] = {}
|
||||
|
||||
# params from fixtures
|
||||
params.update(
|
||||
@@ -157,7 +162,7 @@ def run_benchmark(env: NeonEnv, pg_bin: PgBin, record, duration_secs: int):
|
||||
results_path = Path(basepath + ".stdout")
|
||||
log.info(f"Benchmark results at: {results_path}")
|
||||
|
||||
with open(results_path, "r") as f:
|
||||
with open(results_path) as f:
|
||||
results = json.load(f)
|
||||
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
@@ -14,6 +16,9 @@ from fixtures.neon_fixtures import (
|
||||
from fixtures.remote_storage import s3_storage
|
||||
from fixtures.utils import humantime_to_ms
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
@pytest.mark.parametrize("duration", [30])
|
||||
@pytest.mark.parametrize("io_engine", ["tokio-epoll-uring", "std-fs"])
|
||||
@@ -30,7 +35,7 @@ def test_download_churn(
|
||||
def record(metric, **kwargs):
|
||||
zenbenchmark.record(metric_name=f"pageserver_ondemand_download_churn.{metric}", **kwargs)
|
||||
|
||||
params: Dict[str, Tuple[Any, Dict[str, Any]]] = {}
|
||||
params: dict[str, tuple[Any, dict[str, Any]]] = {}
|
||||
|
||||
# params from fixtures
|
||||
params.update(
|
||||
@@ -134,7 +139,7 @@ def run_benchmark(
|
||||
results_path = Path(basepath + ".stdout")
|
||||
log.info(f"Benchmark results at: {results_path}")
|
||||
|
||||
with open(results_path, "r") as f:
|
||||
with open(results_path) as f:
|
||||
results = json.load(f)
|
||||
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
@@ -18,6 +20,10 @@ from performance.pageserver.util import (
|
||||
setup_pageserver_with_tenants,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
# The following tests use pagebench "getpage at latest LSN" to characterize the throughput of the pageserver.
|
||||
# originally there was a single test named `test_pageserver_max_throughput_getpage_at_latest_lsn``
|
||||
# so you still see some references to this name in the code.
|
||||
@@ -92,7 +98,7 @@ def setup_and_run_pagebench_benchmark(
|
||||
metric_name=f"pageserver_max_throughput_getpage_at_latest_lsn.{metric}", **kwargs
|
||||
)
|
||||
|
||||
params: Dict[str, Tuple[Any, Dict[str, Any]]] = {}
|
||||
params: dict[str, tuple[Any, dict[str, Any]]] = {}
|
||||
|
||||
# params from fixtures
|
||||
params.update(
|
||||
@@ -225,7 +231,7 @@ def run_pagebench_benchmark(
|
||||
results_path = Path(basepath + ".stdout")
|
||||
log.info(f"Benchmark results at: {results_path}")
|
||||
|
||||
with open(results_path, "r") as f:
|
||||
with open(results_path) as f:
|
||||
results = json.load(f)
|
||||
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
||||
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
Utilities used by all code in this sub-directory
|
||||
"""
|
||||
|
||||
from typing import Any, Callable, Dict, Optional, Tuple
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import fixtures.pageserver.many_tenants as many_tenants
|
||||
from fixtures.common_types import TenantId, TimelineId
|
||||
@@ -13,6 +15,9 @@ from fixtures.neon_fixtures import (
|
||||
)
|
||||
from fixtures.pageserver.utils import wait_until_all_tenants_state
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
|
||||
def ensure_pageserver_ready_for_benchmarking(env: NeonEnv, n_tenants: int):
|
||||
"""
|
||||
@@ -40,7 +45,7 @@ def setup_pageserver_with_tenants(
|
||||
neon_env_builder: NeonEnvBuilder,
|
||||
name: str,
|
||||
n_tenants: int,
|
||||
setup: Callable[[NeonEnv], Tuple[TenantId, TimelineId, Dict[str, Any]]],
|
||||
setup: Callable[[NeonEnv], tuple[TenantId, TimelineId, dict[str, Any]]],
|
||||
timeout_in_seconds: Optional[int] = None,
|
||||
) -> NeonEnv:
|
||||
"""
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import numpy as np # type: ignore [import]
|
||||
import pandas as pd # type: ignore [import]
|
||||
import psycopg2
|
||||
from pgvector.psycopg2 import register_vector
|
||||
from pgvector.psycopg2 import register_vector # type: ignore [import]
|
||||
from psycopg2.extras import execute_values
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
import re
|
||||
import statistics
|
||||
@@ -5,7 +7,6 @@ import threading
|
||||
import time
|
||||
import timeit
|
||||
from contextlib import closing
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
@@ -18,7 +19,7 @@ from fixtures.utils import wait_until
|
||||
from prometheus_client.samples import Sample
|
||||
|
||||
|
||||
def _record_branch_creation_durations(neon_compare: NeonCompare, durs: List[float]):
|
||||
def _record_branch_creation_durations(neon_compare: NeonCompare, durs: list[float]):
|
||||
neon_compare.zenbenchmark.record(
|
||||
"branch_creation_duration_max", max(durs), "s", MetricReport.LOWER_IS_BETTER
|
||||
)
|
||||
@@ -66,7 +67,7 @@ def test_branch_creation_heavy_write(neon_compare: NeonCompare, n_branches: int)
|
||||
|
||||
env.create_branch("b0", tenant_id=tenant)
|
||||
|
||||
threads: List[threading.Thread] = []
|
||||
threads: list[threading.Thread] = []
|
||||
threads.append(threading.Thread(target=run_pgbench, args=("b0",), daemon=True))
|
||||
threads[-1].start()
|
||||
|
||||
@@ -194,7 +195,7 @@ def wait_and_record_startup_metrics(
|
||||
]
|
||||
)
|
||||
|
||||
def metrics_are_filled() -> List[Sample]:
|
||||
def metrics_are_filled() -> list[Sample]:
|
||||
m = client.get_metrics()
|
||||
samples = m.query_all("pageserver_startup_duration_seconds")
|
||||
# we should not have duplicate labels
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import timeit
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from fixtures.benchmark_fixture import PgBenchRunResult
|
||||
from fixtures.compare_fixtures import NeonCompare
|
||||
@@ -22,7 +23,7 @@ def test_compare_child_and_root_pgbench_perf(neon_compare: NeonCompare):
|
||||
env = neon_compare.env
|
||||
pg_bin = neon_compare.pg_bin
|
||||
|
||||
def run_pgbench_on_branch(branch: str, cmd: List[str]):
|
||||
def run_pgbench_on_branch(branch: str, cmd: list[str]):
|
||||
run_start_timestamp = utc_now_timestamp()
|
||||
t0 = timeit.default_timer()
|
||||
out = pg_bin.run_capture(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
from fixtures.benchmark_fixture import MetricReport
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import timeit
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from fixtures.neon_fixtures import NeonEnvBuilder, wait_for_last_flush_lsn
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
from fixtures.compare_fixtures import PgCompare
|
||||
@@ -23,7 +24,7 @@ def test_compare_pg_stats_rw_with_pgbench_default(
|
||||
seed: int,
|
||||
scale: int,
|
||||
duration: int,
|
||||
pg_stats_rw: List[PgStatTable],
|
||||
pg_stats_rw: list[PgStatTable],
|
||||
):
|
||||
env = neon_with_baseline
|
||||
# initialize pgbench
|
||||
@@ -45,7 +46,7 @@ def test_compare_pg_stats_wo_with_pgbench_simple_update(
|
||||
seed: int,
|
||||
scale: int,
|
||||
duration: int,
|
||||
pg_stats_wo: List[PgStatTable],
|
||||
pg_stats_wo: list[PgStatTable],
|
||||
):
|
||||
env = neon_with_baseline
|
||||
# initialize pgbench
|
||||
@@ -67,7 +68,7 @@ def test_compare_pg_stats_ro_with_pgbench_select_only(
|
||||
seed: int,
|
||||
scale: int,
|
||||
duration: int,
|
||||
pg_stats_ro: List[PgStatTable],
|
||||
pg_stats_ro: list[PgStatTable],
|
||||
):
|
||||
env = neon_with_baseline
|
||||
# initialize pgbench
|
||||
@@ -89,7 +90,7 @@ def test_compare_pg_stats_wal_with_pgbench_default(
|
||||
seed: int,
|
||||
scale: int,
|
||||
duration: int,
|
||||
pg_stats_wal: List[PgStatTable],
|
||||
pg_stats_wal: list[PgStatTable],
|
||||
):
|
||||
env = neon_with_baseline
|
||||
# initialize pgbench
|
||||
@@ -106,7 +107,7 @@ def test_compare_pg_stats_wal_with_pgbench_default(
|
||||
@pytest.mark.parametrize("n_tables", [1, 10])
|
||||
@pytest.mark.parametrize("duration", get_durations_matrix(10))
|
||||
def test_compare_pg_stats_wo_with_heavy_write(
|
||||
neon_with_baseline: PgCompare, n_tables: int, duration: int, pg_stats_wo: List[PgStatTable]
|
||||
neon_with_baseline: PgCompare, n_tables: int, duration: int, pg_stats_wo: list[PgStatTable]
|
||||
):
|
||||
env = neon_with_baseline
|
||||
with env.pg.connect().cursor() as cur:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
from io import BufferedReader, RawIOBase
|
||||
from typing import Optional
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
from fixtures.compare_fixtures import PgCompare
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
|
||||
from fixtures.neon_fixtures import NeonEnvBuilder, flush_ep_to_pageserver
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import pytest
|
||||
from _pytest.mark import ParameterSet
|
||||
@@ -45,7 +46,7 @@ def test_clickbench_create_pg_stat_statements(remote_compare: RemoteCompare):
|
||||
#
|
||||
# Disable auto formatting for the list of queries so that it's easier to read
|
||||
# fmt: off
|
||||
QUERIES: Tuple[LabelledQuery, ...] = (
|
||||
QUERIES: tuple[LabelledQuery, ...] = (
|
||||
### ClickBench queries:
|
||||
LabelledQuery("Q0", r"SELECT COUNT(*) FROM hits;"),
|
||||
LabelledQuery("Q1", r"SELECT COUNT(*) FROM hits WHERE AdvEngineID <> 0;"),
|
||||
@@ -105,7 +106,7 @@ QUERIES: Tuple[LabelledQuery, ...] = (
|
||||
#
|
||||
# Disable auto formatting for the list of queries so that it's easier to read
|
||||
# fmt: off
|
||||
PGVECTOR_QUERIES: Tuple[LabelledQuery, ...] = (
|
||||
PGVECTOR_QUERIES: tuple[LabelledQuery, ...] = (
|
||||
LabelledQuery("PGVPREP", r"ALTER EXTENSION VECTOR UPDATE;"),
|
||||
LabelledQuery("PGV0", r"DROP TABLE IF EXISTS hnsw_test_table;"),
|
||||
LabelledQuery("PGV1", r"CREATE TABLE hnsw_test_table AS TABLE documents WITH NO DATA;"),
|
||||
@@ -127,7 +128,7 @@ PGVECTOR_QUERIES: Tuple[LabelledQuery, ...] = (
|
||||
EXPLAIN_STRING: str = "EXPLAIN (ANALYZE, VERBOSE, BUFFERS, COSTS, SETTINGS, FORMAT JSON)"
|
||||
|
||||
|
||||
def get_scale() -> List[str]:
|
||||
def get_scale() -> list[str]:
|
||||
# We parametrize each tpc-h and clickbench test with scale
|
||||
# to distinguish them from each other, but don't really use it inside.
|
||||
# Databases are pre-created and passed through BENCHMARK_CONNSTR env variable.
|
||||
@@ -147,7 +148,7 @@ def run_psql(
|
||||
options = f"-cstatement_timeout=0 {env.pg.default_options.get('options', '')}"
|
||||
connstr = env.pg.connstr(password=None, options=options)
|
||||
|
||||
environ: Dict[str, str] = {}
|
||||
environ: dict[str, str] = {}
|
||||
if password is not None:
|
||||
environ["PGPASSWORD"] = password
|
||||
|
||||
@@ -185,7 +186,7 @@ def test_clickbench(query: LabelledQuery, remote_compare: RemoteCompare, scale:
|
||||
run_psql(remote_compare, query, times=3, explain=explain)
|
||||
|
||||
|
||||
def tpch_queuies() -> Tuple[ParameterSet, ...]:
|
||||
def tpch_queuies() -> tuple[ParameterSet, ...]:
|
||||
"""
|
||||
A list of queries to run for the TPC-H benchmark.
|
||||
- querues in returning tuple are ordered by the query number
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar
|
||||
import enum
|
||||
import os
|
||||
import timeit
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import pytest
|
||||
from fixtures.benchmark_fixture import MetricReport, PgBenchInitResult, PgBenchRunResult
|
||||
@@ -26,7 +27,7 @@ def utc_now_timestamp() -> int:
|
||||
|
||||
|
||||
def init_pgbench(env: PgCompare, cmdline, password: None):
|
||||
environ: Dict[str, str] = {}
|
||||
environ: dict[str, str] = {}
|
||||
if password is not None:
|
||||
environ["PGPASSWORD"] = password
|
||||
|
||||
@@ -54,7 +55,7 @@ def init_pgbench(env: PgCompare, cmdline, password: None):
|
||||
|
||||
|
||||
def run_pgbench(env: PgCompare, prefix: str, cmdline, password: None):
|
||||
environ: Dict[str, str] = {}
|
||||
environ: dict[str, str] = {}
|
||||
if password is not None:
|
||||
environ["PGPASSWORD"] = password
|
||||
|
||||
@@ -177,7 +178,7 @@ def run_test_pgbench(env: PgCompare, scale: int, duration: int, workload_type: P
|
||||
env.report_size()
|
||||
|
||||
|
||||
def get_durations_matrix(default: int = 45) -> List[int]:
|
||||
def get_durations_matrix(default: int = 45) -> list[int]:
|
||||
durations = os.getenv("TEST_PG_BENCH_DURATIONS_MATRIX", default=str(default))
|
||||
rv = []
|
||||
for d in durations.split(","):
|
||||
@@ -193,7 +194,7 @@ def get_durations_matrix(default: int = 45) -> List[int]:
|
||||
return rv
|
||||
|
||||
|
||||
def get_scales_matrix(default: int = 10) -> List[int]:
|
||||
def get_scales_matrix(default: int = 10) -> list[int]:
|
||||
scales = os.getenv("TEST_PG_BENCH_SCALES_MATRIX", default=str(default))
|
||||
rv = []
|
||||
for s in scales.split(","):
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from fixtures.compare_fixtures import PgCompare
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ from fixtures.neon_api import connection_parameters_to_env
|
||||
from fixtures.pg_version import PgVersion
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from fixtures.benchmark_fixture import NeonBenchmarker
|
||||
from fixtures.neon_api import NeonAPI
|
||||
@@ -233,7 +233,7 @@ def test_replication_start_stop(
|
||||
],
|
||||
env=master_env,
|
||||
)
|
||||
replica_pgbench: List[Optional[subprocess.Popen[Any]]] = [None for _ in range(num_replicas)]
|
||||
replica_pgbench: list[Optional[subprocess.Popen[Any]]] = [None for _ in range(num_replicas)]
|
||||
|
||||
# Use the bits of iconfig to tell us which configuration we are on. For example
|
||||
# a iconfig of 2 is 10 in binary, indicating replica 0 is suspended and replica 1 is
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
from contextlib import closing
|
||||
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# Test sequential scan speed
|
||||
#
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import concurrent.futures
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import requests
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
from fixtures.neon_fixtures import NeonEnvBuilder
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import concurrent.futures
|
||||
import random
|
||||
import time
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import statistics
|
||||
import threading
|
||||
import time
|
||||
import timeit
|
||||
from typing import Any, Callable, Generator, List
|
||||
from collections.abc import Generator
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||
@@ -13,6 +16,9 @@ from fixtures.neon_fixtures import NeonEnvBuilder, PgBin, flush_ep_to_pageserver
|
||||
|
||||
from performance.test_perf_pgbench import get_durations_matrix, get_scales_matrix
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
@pytest.fixture(params=["vanilla", "neon_off", "neon_on"])
|
||||
# This fixture constructs multiple `PgCompare` interfaces using a builder pattern.
|
||||
@@ -202,7 +208,7 @@ def record_lsn_write_lag(env: PgCompare, run_cond: Callable[[], bool], pool_inte
|
||||
if not isinstance(env, NeonCompare):
|
||||
return
|
||||
|
||||
lsn_write_lags: List[Any] = []
|
||||
lsn_write_lags: list[Any] = []
|
||||
last_received_lsn = Lsn(0)
|
||||
last_pg_flush_lsn = Lsn(0)
|
||||
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
# in LSN order, writing the oldest layer first. That creates a new 10 MB image
|
||||
# layer to be created for each of those small updates. This is the Write
|
||||
# Amplification problem at its finest.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
from fixtures.compare_fixtures import PgCompare
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
import pg8000.dbapi
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fixtures.common_types import TimelineId
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.neon_fixtures import NeonEnvBuilder
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
from typing import Generator, Optional
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
from fixtures.common_types import TenantId
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from contextlib import closing
|
||||
from pathlib import Path
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.neon_fixtures import (
|
||||
AuxFileStore,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
import time
|
||||
from contextlib import closing, contextmanager
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
import time
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from fixtures.neon_fixtures import NeonEnv
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
import time
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from fixtures.common_types import Lsn, TimelineId
|
||||
from fixtures.log_helper import log
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
from fixtures.common_types import Lsn, TimelineId
|
||||
@@ -56,10 +57,10 @@ def test_branching_with_pgbench(
|
||||
pg_bin.run_capture(["pgbench", "-T15", connstr])
|
||||
|
||||
env.create_branch("b0", tenant_id=tenant)
|
||||
endpoints: List[Endpoint] = []
|
||||
endpoints: list[Endpoint] = []
|
||||
endpoints.append(env.endpoints.create_start("b0", tenant_id=tenant))
|
||||
|
||||
threads: List[threading.Thread] = []
|
||||
threads: list[threading.Thread] = []
|
||||
threads.append(
|
||||
threading.Thread(target=run_pgbench, args=(endpoints[0].connstr(),), daemon=True)
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import concurrent.futures
|
||||
import os
|
||||
from typing import List, Tuple
|
||||
|
||||
import pytest
|
||||
from fixtures.common_types import TenantId, TimelineId
|
||||
@@ -31,7 +32,7 @@ def test_local_corruption(neon_env_builder: NeonEnvBuilder):
|
||||
]
|
||||
)
|
||||
|
||||
tenant_timelines: List[Tuple[TenantId, TimelineId, Endpoint]] = []
|
||||
tenant_timelines: list[tuple[TenantId, TimelineId, Endpoint]] = []
|
||||
|
||||
for _ in range(3):
|
||||
tenant_id, timeline_id = env.create_tenant()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fixtures.metrics import parse_metrics
|
||||
from fixtures.neon_fixtures import NeonEnvBuilder, NeonProxy
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user