mirror of
https://github.com/neondatabase/neon.git
synced 2025-12-23 06:09:59 +00:00
Enable all pyupgrade checks in ruff
This will help to keep us from using deprecated Python features going forward. Signed-off-by: Tristan Partin <tristan@neon.tech>
This commit is contained in:
@@ -1,11 +1,12 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import enum
|
import enum
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from typing import List
|
|
||||||
|
|
||||||
|
|
||||||
@enum.unique
|
@enum.unique
|
||||||
@@ -55,12 +56,12 @@ def mypy() -> str:
|
|||||||
return "poetry run mypy"
|
return "poetry run mypy"
|
||||||
|
|
||||||
|
|
||||||
def get_commit_files() -> List[str]:
|
def get_commit_files() -> list[str]:
|
||||||
files = subprocess.check_output("git diff --cached --name-only --diff-filter=ACM".split())
|
files = subprocess.check_output("git diff --cached --name-only --diff-filter=ACM".split())
|
||||||
return files.decode().splitlines()
|
return files.decode().splitlines()
|
||||||
|
|
||||||
|
|
||||||
def check(name: str, suffix: str, cmd: str, changed_files: List[str], no_color: bool = False):
|
def check(name: str, suffix: str, cmd: str, changed_files: list[str], no_color: bool = False):
|
||||||
print(f"Checking: {name} ", end="")
|
print(f"Checking: {name} ", end="")
|
||||||
applicable_files = list(filter(lambda fname: fname.strip().endswith(suffix), changed_files))
|
applicable_files = list(filter(lambda fname: fname.strip().endswith(suffix), changed_files))
|
||||||
if not applicable_files:
|
if not applicable_files:
|
||||||
|
|||||||
@@ -97,5 +97,8 @@ select = [
|
|||||||
"I", # isort
|
"I", # isort
|
||||||
"W", # pycodestyle
|
"W", # pycodestyle
|
||||||
"B", # bugbear
|
"B", # bugbear
|
||||||
"UP032", # f-string
|
"UP", # pyupgrade
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.pyupgrade]
|
||||||
|
keep-runtime-typing = true # Remove this stanza when we require Python 3.10
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extras
|
import psycopg2.extras
|
||||||
@@ -110,7 +111,7 @@ def main(args: argparse.Namespace):
|
|||||||
output = args.output
|
output = args.output
|
||||||
percentile = args.percentile
|
percentile = args.percentile
|
||||||
|
|
||||||
res: Dict[str, float] = {}
|
res: dict[str, float] = {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logging.info("connecting to the database...")
|
logging.info("connecting to the database...")
|
||||||
|
|||||||
@@ -4,6 +4,9 @@
|
|||||||
#
|
#
|
||||||
# This can be useful in disaster recovery.
|
# This can be useful in disaster recovery.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
|||||||
@@ -1,16 +1,21 @@
|
|||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Any, DefaultDict, Dict, Optional
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extras
|
import psycopg2.extras
|
||||||
import toml
|
import toml
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
FLAKY_TESTS_QUERY = """
|
FLAKY_TESTS_QUERY = """
|
||||||
SELECT
|
SELECT
|
||||||
DISTINCT parent_suite, suite, name
|
DISTINCT parent_suite, suite, name
|
||||||
@@ -33,7 +38,7 @@ def main(args: argparse.Namespace):
|
|||||||
build_type = args.build_type
|
build_type = args.build_type
|
||||||
pg_version = args.pg_version
|
pg_version = args.pg_version
|
||||||
|
|
||||||
res: DefaultDict[str, DefaultDict[str, Dict[str, bool]]]
|
res: defaultdict[str, defaultdict[str, dict[str, bool]]]
|
||||||
res = defaultdict(lambda: defaultdict(dict))
|
res = defaultdict(lambda: defaultdict(dict))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -60,7 +65,7 @@ def main(args: argparse.Namespace):
|
|||||||
pageserver_virtual_file_io_engine_parameter = ""
|
pageserver_virtual_file_io_engine_parameter = ""
|
||||||
|
|
||||||
# re-use existing records of flaky tests from before parametrization by compaction_algorithm
|
# re-use existing records of flaky tests from before parametrization by compaction_algorithm
|
||||||
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict[str, Any]]:
|
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[dict[str, Any]]:
|
||||||
"""Duplicated from parametrize.py"""
|
"""Duplicated from parametrize.py"""
|
||||||
toml_table = os.getenv("PAGESERVER_DEFAULT_TENANT_CONFIG_COMPACTION_ALGORITHM")
|
toml_table = os.getenv("PAGESERVER_DEFAULT_TENANT_CONFIG_COMPACTION_ALGORITHM")
|
||||||
if toml_table is None:
|
if toml_table is None:
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
@@ -5,11 +7,15 @@ import logging
|
|||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from collections.abc import Awaitable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Awaitable, Dict, List, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
class ClientException(Exception):
|
class ClientException(Exception):
|
||||||
pass
|
pass
|
||||||
@@ -89,7 +95,7 @@ class Client:
|
|||||||
class Completed:
|
class Completed:
|
||||||
"""The status dict returned by the API"""
|
"""The status dict returned by the API"""
|
||||||
|
|
||||||
status: Dict[str, Any]
|
status: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
sigint_received = asyncio.Event()
|
sigint_received = asyncio.Event()
|
||||||
@@ -179,7 +185,7 @@ async def main_impl(args, report_out, client: Client):
|
|||||||
"""
|
"""
|
||||||
Returns OS exit status.
|
Returns OS exit status.
|
||||||
"""
|
"""
|
||||||
tenant_and_timline_ids: List[Tuple[str, str]] = []
|
tenant_and_timline_ids: list[tuple[str, str]] = []
|
||||||
# fill tenant_and_timline_ids based on spec
|
# fill tenant_and_timline_ids based on spec
|
||||||
for spec in args.what:
|
for spec in args.what:
|
||||||
comps = spec.split(":")
|
comps = spec.split(":")
|
||||||
@@ -215,14 +221,14 @@ async def main_impl(args, report_out, client: Client):
|
|||||||
tenant_and_timline_ids = tmp
|
tenant_and_timline_ids = tmp
|
||||||
|
|
||||||
logging.info("create tasks and process them at specified concurrency")
|
logging.info("create tasks and process them at specified concurrency")
|
||||||
task_q: asyncio.Queue[Tuple[str, Awaitable[Any]]] = asyncio.Queue()
|
task_q: asyncio.Queue[tuple[str, Awaitable[Any]]] = asyncio.Queue()
|
||||||
tasks = {
|
tasks = {
|
||||||
f"{tid}:{tlid}": do_timeline(client, tid, tlid) for tid, tlid in tenant_and_timline_ids
|
f"{tid}:{tlid}": do_timeline(client, tid, tlid) for tid, tlid in tenant_and_timline_ids
|
||||||
}
|
}
|
||||||
for task in tasks.items():
|
for task in tasks.items():
|
||||||
task_q.put_nowait(task)
|
task_q.put_nowait(task)
|
||||||
|
|
||||||
result_q: asyncio.Queue[Tuple[str, Any]] = asyncio.Queue()
|
result_q: asyncio.Queue[tuple[str, Any]] = asyncio.Queue()
|
||||||
taskq_handlers = []
|
taskq_handlers = []
|
||||||
for _ in range(0, args.concurrent_tasks):
|
for _ in range(0, args.concurrent_tasks):
|
||||||
taskq_handlers.append(taskq_handler(task_q, result_q))
|
taskq_handlers.append(taskq_handler(task_q, result_q))
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import json
|
import json
|
||||||
@@ -11,7 +13,6 @@ from contextlib import contextmanager
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
import backoff
|
import backoff
|
||||||
import psycopg2
|
import psycopg2
|
||||||
@@ -91,7 +92,7 @@ def create_table(cur):
|
|||||||
cur.execute(CREATE_TABLE)
|
cur.execute(CREATE_TABLE)
|
||||||
|
|
||||||
|
|
||||||
def parse_test_name(test_name: str) -> Tuple[str, int, str]:
|
def parse_test_name(test_name: str) -> tuple[str, int, str]:
|
||||||
build_type, pg_version = None, None
|
build_type, pg_version = None, None
|
||||||
if match := TEST_NAME_RE.search(test_name):
|
if match := TEST_NAME_RE.search(test_name):
|
||||||
found = match.groupdict()
|
found = match.groupdict()
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
Run the regression tests on the cloud instance of Neon
|
Run the regression tests on the cloud instance of Neon
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
pytest_plugins = (
|
pytest_plugins = (
|
||||||
"fixtures.pg_version",
|
"fixtures.pg_version",
|
||||||
"fixtures.parametrize",
|
"fixtures.parametrize",
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import calendar
|
import calendar
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import enum
|
import enum
|
||||||
@@ -5,12 +7,11 @@ import json
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import timeit
|
import timeit
|
||||||
|
from collections.abc import Iterator
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
# Type-related stuff
|
|
||||||
from typing import Callable, ClassVar, Dict, Iterator, Optional
|
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
import pytest
|
import pytest
|
||||||
@@ -23,6 +24,10 @@ from fixtures.common_types import TenantId, TimelineId
|
|||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
from fixtures.neon_fixtures import NeonPageserver
|
from fixtures.neon_fixtures import NeonPageserver
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Callable, ClassVar, Optional
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This file contains fixtures for micro-benchmarks.
|
This file contains fixtures for micro-benchmarks.
|
||||||
|
|
||||||
@@ -138,18 +143,6 @@ class PgBenchRunResult:
|
|||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
class PgBenchInitResult:
|
class PgBenchInitResult:
|
||||||
# Taken from https://github.com/postgres/postgres/blob/REL_15_1/src/bin/pgbench/pgbench.c#L5144-L5171
|
|
||||||
EXTRACTORS: ClassVar[Dict[str, re.Pattern]] = { # type: ignore[type-arg]
|
|
||||||
"drop_tables": re.compile(r"drop tables (\d+\.\d+) s"),
|
|
||||||
"create_tables": re.compile(r"create tables (\d+\.\d+) s"),
|
|
||||||
"client_side_generate": re.compile(r"client-side generate (\d+\.\d+) s"),
|
|
||||||
"server_side_generate": re.compile(r"server-side generate (\d+\.\d+) s"),
|
|
||||||
"vacuum": re.compile(r"vacuum (\d+\.\d+) s"),
|
|
||||||
"primary_keys": re.compile(r"primary keys (\d+\.\d+) s"),
|
|
||||||
"foreign_keys": re.compile(r"foreign keys (\d+\.\d+) s"),
|
|
||||||
"total": re.compile(r"done in (\d+\.\d+) s"), # Total time printed by pgbench
|
|
||||||
}
|
|
||||||
|
|
||||||
total: Optional[float]
|
total: Optional[float]
|
||||||
drop_tables: Optional[float]
|
drop_tables: Optional[float]
|
||||||
create_tables: Optional[float]
|
create_tables: Optional[float]
|
||||||
@@ -162,6 +155,20 @@ class PgBenchInitResult:
|
|||||||
start_timestamp: int
|
start_timestamp: int
|
||||||
end_timestamp: int
|
end_timestamp: int
|
||||||
|
|
||||||
|
# Taken from https://github.com/postgres/postgres/blob/REL_15_1/src/bin/pgbench/pgbench.c#L5144-L5171
|
||||||
|
EXTRACTORS: ClassVar[dict[str, re.Pattern[str]]] = dataclasses.field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
"drop_tables": re.compile(r"drop tables (\d+\.\d+) s"),
|
||||||
|
"create_tables": re.compile(r"create tables (\d+\.\d+) s"),
|
||||||
|
"client_side_generate": re.compile(r"client-side generate (\d+\.\d+) s"),
|
||||||
|
"server_side_generate": re.compile(r"server-side generate (\d+\.\d+) s"),
|
||||||
|
"vacuum": re.compile(r"vacuum (\d+\.\d+) s"),
|
||||||
|
"primary_keys": re.compile(r"primary keys (\d+\.\d+) s"),
|
||||||
|
"foreign_keys": re.compile(r"foreign keys (\d+\.\d+) s"),
|
||||||
|
"total": re.compile(r"done in (\d+\.\d+) s"), # Total time printed by pgbench
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_from_stderr(
|
def parse_from_stderr(
|
||||||
cls,
|
cls,
|
||||||
@@ -175,7 +182,7 @@ class PgBenchInitResult:
|
|||||||
|
|
||||||
last_line = stderr.splitlines()[-1]
|
last_line = stderr.splitlines()[-1]
|
||||||
|
|
||||||
timings: Dict[str, Optional[float]] = {}
|
timings: dict[str, Optional[float]] = {}
|
||||||
last_line_items = re.split(r"\(|\)|,", last_line)
|
last_line_items = re.split(r"\(|\)|,", last_line)
|
||||||
for item in last_line_items:
|
for item in last_line_items:
|
||||||
for key, regex in cls.EXTRACTORS.items():
|
for key, regex in cls.EXTRACTORS.items():
|
||||||
@@ -385,7 +392,7 @@ class NeonBenchmarker:
|
|||||||
self,
|
self,
|
||||||
pageserver: NeonPageserver,
|
pageserver: NeonPageserver,
|
||||||
metric_name: str,
|
metric_name: str,
|
||||||
label_filters: Optional[Dict[str, str]] = None,
|
label_filters: Optional[dict[str, str]] = None,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Fetch the value of given int counter from pageserver metrics."""
|
"""Fetch the value of given int counter from pageserver metrics."""
|
||||||
all_metrics = pageserver.http_client().get_metrics()
|
all_metrics = pageserver.http_client().get_metrics()
|
||||||
|
|||||||
@@ -1,10 +1,16 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import random
|
import random
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from functools import total_ordering
|
from functools import total_ordering
|
||||||
from typing import Any, Dict, Type, TypeVar, Union
|
from typing import TYPE_CHECKING, TypeVar
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
T = TypeVar("T", bound="Id")
|
||||||
|
|
||||||
T = TypeVar("T", bound="Id")
|
|
||||||
|
|
||||||
DEFAULT_WAL_SEG_SIZE = 16 * 1024 * 1024
|
DEFAULT_WAL_SEG_SIZE = 16 * 1024 * 1024
|
||||||
|
|
||||||
@@ -56,7 +62,7 @@ class Lsn:
|
|||||||
return NotImplemented
|
return NotImplemented
|
||||||
return self.lsn_int - other.lsn_int
|
return self.lsn_int - other.lsn_int
|
||||||
|
|
||||||
def __add__(self, other: Union[int, "Lsn"]) -> "Lsn":
|
def __add__(self, other: Union[int, Lsn]) -> Lsn:
|
||||||
if isinstance(other, int):
|
if isinstance(other, int):
|
||||||
return Lsn(self.lsn_int + other)
|
return Lsn(self.lsn_int + other)
|
||||||
elif isinstance(other, Lsn):
|
elif isinstance(other, Lsn):
|
||||||
@@ -70,7 +76,7 @@ class Lsn:
|
|||||||
def as_int(self) -> int:
|
def as_int(self) -> int:
|
||||||
return self.lsn_int
|
return self.lsn_int
|
||||||
|
|
||||||
def segment_lsn(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> "Lsn":
|
def segment_lsn(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> Lsn:
|
||||||
return Lsn(self.lsn_int - (self.lsn_int % seg_sz))
|
return Lsn(self.lsn_int - (self.lsn_int % seg_sz))
|
||||||
|
|
||||||
def segno(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> int:
|
def segno(self, seg_sz: int = DEFAULT_WAL_SEG_SIZE) -> int:
|
||||||
@@ -127,7 +133,7 @@ class Id:
|
|||||||
return hash(str(self.id))
|
return hash(str(self.id))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate(cls: Type[T]) -> T:
|
def generate(cls: type[T]) -> T:
|
||||||
"""Generate a random ID"""
|
"""Generate a random ID"""
|
||||||
return cls(random.randbytes(16).hex())
|
return cls(random.randbytes(16).hex())
|
||||||
|
|
||||||
@@ -162,7 +168,7 @@ class TenantTimelineId:
|
|||||||
timeline_id: TimelineId
|
timeline_id: TimelineId
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> "TenantTimelineId":
|
def from_json(cls, d: dict[str, Any]) -> TenantTimelineId:
|
||||||
return TenantTimelineId(
|
return TenantTimelineId(
|
||||||
tenant_id=TenantId(d["tenant_id"]),
|
tenant_id=TenantId(d["tenant_id"]),
|
||||||
timeline_id=TimelineId(d["timeline_id"]),
|
timeline_id=TimelineId(d["timeline_id"]),
|
||||||
@@ -181,7 +187,7 @@ class TenantShardId:
|
|||||||
assert self.shard_number < self.shard_count or self.shard_count == 0
|
assert self.shard_number < self.shard_count or self.shard_count == 0
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse(cls: Type[TTenantShardId], input) -> TTenantShardId:
|
def parse(cls: type[TTenantShardId], input) -> TTenantShardId:
|
||||||
if len(input) == 32:
|
if len(input) == 32:
|
||||||
return cls(
|
return cls(
|
||||||
tenant_id=TenantId(input),
|
tenant_id=TenantId(input),
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections.abc import Iterator
|
||||||
from contextlib import _GeneratorContextManager, contextmanager
|
from contextlib import _GeneratorContextManager, contextmanager
|
||||||
|
|
||||||
# Type-related stuff
|
# Type-related stuff
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Iterator, List
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.fixtures import FixtureRequest
|
from _pytest.fixtures import FixtureRequest
|
||||||
@@ -72,7 +74,7 @@ class PgCompare(ABC):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def record_pg_stats(self, pg_stats: List[PgStatTable]) -> Iterator[None]:
|
def record_pg_stats(self, pg_stats: list[PgStatTable]) -> Iterator[None]:
|
||||||
init_data = self._retrieve_pg_stats(pg_stats)
|
init_data = self._retrieve_pg_stats(pg_stats)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
@@ -82,8 +84,8 @@ class PgCompare(ABC):
|
|||||||
for k in set(init_data) & set(data):
|
for k in set(init_data) & set(data):
|
||||||
self.zenbenchmark.record(k, data[k] - init_data[k], "", MetricReport.HIGHER_IS_BETTER)
|
self.zenbenchmark.record(k, data[k] - init_data[k], "", MetricReport.HIGHER_IS_BETTER)
|
||||||
|
|
||||||
def _retrieve_pg_stats(self, pg_stats: List[PgStatTable]) -> Dict[str, int]:
|
def _retrieve_pg_stats(self, pg_stats: list[PgStatTable]) -> dict[str, int]:
|
||||||
results: Dict[str, int] = {}
|
results: dict[str, int] = {}
|
||||||
|
|
||||||
with self.pg.connect().cursor() as cur:
|
with self.pg.connect().cursor() as cur:
|
||||||
for pg_stat in pg_stats:
|
for pg_stat in pg_stats:
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from collections.abc import MutableMapping
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, List, MutableMapping, cast
|
from typing import TYPE_CHECKING, cast
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.config import Config
|
from _pytest.config import Config
|
||||||
@@ -10,6 +13,9 @@ from allure_pytest.utils import allure_name, allure_suite_labels
|
|||||||
|
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The plugin reruns flaky tests.
|
The plugin reruns flaky tests.
|
||||||
It uses `pytest.mark.flaky` provided by `pytest-rerunfailures` plugin and flaky tests detected by `scripts/flaky_tests.py`
|
It uses `pytest.mark.flaky` provided by `pytest-rerunfailures` plugin and flaky tests detected by `scripts/flaky_tests.py`
|
||||||
@@ -27,7 +33,7 @@ def pytest_addoption(parser: Parser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(config: Config, items: List[pytest.Item]):
|
def pytest_collection_modifyitems(config: Config, items: list[pytest.Item]):
|
||||||
if not config.getoption("--flaky-tests-json"):
|
if not config.getoption("--flaky-tests-json"):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -66,5 +72,5 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Item]):
|
|||||||
# - [2] https://github.com/pytest-dev/pytest-timeout/issues/142
|
# - [2] https://github.com/pytest-dev/pytest-timeout/issues/142
|
||||||
timeout_marker = item.get_closest_marker("timeout")
|
timeout_marker = item.get_closest_marker("timeout")
|
||||||
if timeout_marker is not None:
|
if timeout_marker is not None:
|
||||||
kwargs = cast(MutableMapping[str, Any], timeout_marker.kwargs)
|
kwargs = cast("MutableMapping[str, Any]", timeout_marker.kwargs)
|
||||||
kwargs["func_only"] = True
|
kwargs["func_only"] = True
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from typing import Tuple
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from pytest_httpserver import HTTPServer
|
from pytest_httpserver import HTTPServer
|
||||||
@@ -40,6 +40,6 @@ def httpserver(make_httpserver):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def httpserver_listen_address(port_distributor) -> Tuple[str, int]:
|
def httpserver_listen_address(port_distributor) -> tuple[str, int]:
|
||||||
port = port_distributor.get_port()
|
port = port_distributor.get_port()
|
||||||
return ("localhost", port)
|
return ("localhost", port)
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
|
|
||||||
|
|||||||
@@ -1,21 +1,26 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from prometheus_client.parser import text_string_to_metric_families
|
from prometheus_client.parser import text_string_to_metric_families
|
||||||
from prometheus_client.samples import Sample
|
from prometheus_client.samples import Sample
|
||||||
|
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
class Metrics:
|
class Metrics:
|
||||||
metrics: Dict[str, List[Sample]]
|
metrics: dict[str, list[Sample]]
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
def __init__(self, name: str = ""):
|
def __init__(self, name: str = ""):
|
||||||
self.metrics = defaultdict(list)
|
self.metrics = defaultdict(list)
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def query_all(self, name: str, filter: Optional[Dict[str, str]] = None) -> List[Sample]:
|
def query_all(self, name: str, filter: Optional[dict[str, str]] = None) -> list[Sample]:
|
||||||
filter = filter or {}
|
filter = filter or {}
|
||||||
res = []
|
res = []
|
||||||
|
|
||||||
@@ -27,7 +32,7 @@ class Metrics:
|
|||||||
pass
|
pass
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def query_one(self, name: str, filter: Optional[Dict[str, str]] = None) -> Sample:
|
def query_one(self, name: str, filter: Optional[dict[str, str]] = None) -> Sample:
|
||||||
res = self.query_all(name, filter or {})
|
res = self.query_all(name, filter or {})
|
||||||
assert len(res) == 1, f"expected single sample for {name} {filter}, found {res}"
|
assert len(res) == 1, f"expected single sample for {name} {filter}, found {res}"
|
||||||
return res[0]
|
return res[0]
|
||||||
@@ -43,7 +48,7 @@ class MetricsGetter:
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get_metric_value(
|
def get_metric_value(
|
||||||
self, name: str, filter: Optional[Dict[str, str]] = None
|
self, name: str, filter: Optional[dict[str, str]] = None
|
||||||
) -> Optional[float]:
|
) -> Optional[float]:
|
||||||
metrics = self.get_metrics()
|
metrics = self.get_metrics()
|
||||||
results = metrics.query_all(name, filter=filter)
|
results = metrics.query_all(name, filter=filter)
|
||||||
@@ -54,8 +59,8 @@ class MetricsGetter:
|
|||||||
return results[0].value
|
return results[0].value
|
||||||
|
|
||||||
def get_metrics_values(
|
def get_metrics_values(
|
||||||
self, names: list[str], filter: Optional[Dict[str, str]] = None, absence_ok=False
|
self, names: list[str], filter: Optional[dict[str, str]] = None, absence_ok=False
|
||||||
) -> Dict[str, float]:
|
) -> dict[str, float]:
|
||||||
"""
|
"""
|
||||||
When fetching multiple named metrics, it is more efficient to use this
|
When fetching multiple named metrics, it is more efficient to use this
|
||||||
than to call `get_metric_value` repeatedly.
|
than to call `get_metric_value` repeatedly.
|
||||||
@@ -97,7 +102,7 @@ def parse_metrics(text: str, name: str = "") -> Metrics:
|
|||||||
return metrics
|
return metrics
|
||||||
|
|
||||||
|
|
||||||
def histogram(prefix_without_trailing_underscore: str) -> List[str]:
|
def histogram(prefix_without_trailing_underscore: str) -> list[str]:
|
||||||
assert not prefix_without_trailing_underscore.endswith("_")
|
assert not prefix_without_trailing_underscore.endswith("_")
|
||||||
return [f"{prefix_without_trailing_underscore}_{x}" for x in ["bucket", "count", "sum"]]
|
return [f"{prefix_without_trailing_underscore}_{x}" for x in ["bucket", "count", "sum"]]
|
||||||
|
|
||||||
@@ -107,7 +112,7 @@ def counter(name: str) -> str:
|
|||||||
return f"{name}_total"
|
return f"{name}_total"
|
||||||
|
|
||||||
|
|
||||||
PAGESERVER_PER_TENANT_REMOTE_TIMELINE_CLIENT_METRICS: Tuple[str, ...] = (
|
PAGESERVER_PER_TENANT_REMOTE_TIMELINE_CLIENT_METRICS: tuple[str, ...] = (
|
||||||
"pageserver_remote_timeline_client_calls_started_total",
|
"pageserver_remote_timeline_client_calls_started_total",
|
||||||
"pageserver_remote_timeline_client_calls_finished_total",
|
"pageserver_remote_timeline_client_calls_finished_total",
|
||||||
"pageserver_remote_physical_size",
|
"pageserver_remote_physical_size",
|
||||||
@@ -115,7 +120,7 @@ PAGESERVER_PER_TENANT_REMOTE_TIMELINE_CLIENT_METRICS: Tuple[str, ...] = (
|
|||||||
"pageserver_remote_timeline_client_bytes_finished_total",
|
"pageserver_remote_timeline_client_bytes_finished_total",
|
||||||
)
|
)
|
||||||
|
|
||||||
PAGESERVER_GLOBAL_METRICS: Tuple[str, ...] = (
|
PAGESERVER_GLOBAL_METRICS: tuple[str, ...] = (
|
||||||
"pageserver_storage_operations_seconds_global_count",
|
"pageserver_storage_operations_seconds_global_count",
|
||||||
"pageserver_storage_operations_seconds_global_sum",
|
"pageserver_storage_operations_seconds_global_sum",
|
||||||
"pageserver_storage_operations_seconds_global_bucket",
|
"pageserver_storage_operations_seconds_global_bucket",
|
||||||
@@ -147,7 +152,7 @@ PAGESERVER_GLOBAL_METRICS: Tuple[str, ...] = (
|
|||||||
counter("pageserver_tenant_throttling_count_global"),
|
counter("pageserver_tenant_throttling_count_global"),
|
||||||
)
|
)
|
||||||
|
|
||||||
PAGESERVER_PER_TENANT_METRICS: Tuple[str, ...] = (
|
PAGESERVER_PER_TENANT_METRICS: tuple[str, ...] = (
|
||||||
"pageserver_current_logical_size",
|
"pageserver_current_logical_size",
|
||||||
"pageserver_resident_physical_size",
|
"pageserver_resident_physical_size",
|
||||||
"pageserver_io_operations_bytes_total",
|
"pageserver_io_operations_bytes_total",
|
||||||
|
|||||||
@@ -6,12 +6,12 @@ from typing import TYPE_CHECKING, cast
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Any, Dict, Literal, Optional, Union
|
from typing import Any, Literal, Optional, Union
|
||||||
|
|
||||||
from fixtures.pg_version import PgVersion
|
from fixtures.pg_version import PgVersion
|
||||||
|
|
||||||
|
|
||||||
def connection_parameters_to_env(params: Dict[str, str]) -> Dict[str, str]:
|
def connection_parameters_to_env(params: dict[str, str]) -> dict[str, str]:
|
||||||
return {
|
return {
|
||||||
"PGHOST": params["host"],
|
"PGHOST": params["host"],
|
||||||
"PGDATABASE": params["database"],
|
"PGDATABASE": params["database"],
|
||||||
@@ -41,8 +41,8 @@ class NeonAPI:
|
|||||||
branch_name: Optional[str] = None,
|
branch_name: Optional[str] = None,
|
||||||
branch_role_name: Optional[str] = None,
|
branch_role_name: Optional[str] = None,
|
||||||
branch_database_name: Optional[str] = None,
|
branch_database_name: Optional[str] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
data: Dict[str, Any] = {
|
data: dict[str, Any] = {
|
||||||
"project": {
|
"project": {
|
||||||
"branch": {},
|
"branch": {},
|
||||||
},
|
},
|
||||||
@@ -70,9 +70,9 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 201
|
assert resp.status_code == 201
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def get_project_details(self, project_id: str) -> Dict[str, Any]:
|
def get_project_details(self, project_id: str) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/projects/{project_id}",
|
f"/projects/{project_id}",
|
||||||
@@ -82,12 +82,12 @@ class NeonAPI:
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def delete_project(
|
def delete_project(
|
||||||
self,
|
self,
|
||||||
project_id: str,
|
project_id: str,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"DELETE",
|
"DELETE",
|
||||||
f"/projects/{project_id}",
|
f"/projects/{project_id}",
|
||||||
@@ -99,13 +99,13 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def start_endpoint(
|
def start_endpoint(
|
||||||
self,
|
self,
|
||||||
project_id: str,
|
project_id: str,
|
||||||
endpoint_id: str,
|
endpoint_id: str,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"POST",
|
"POST",
|
||||||
f"/projects/{project_id}/endpoints/{endpoint_id}/start",
|
f"/projects/{project_id}/endpoints/{endpoint_id}/start",
|
||||||
@@ -116,13 +116,13 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def suspend_endpoint(
|
def suspend_endpoint(
|
||||||
self,
|
self,
|
||||||
project_id: str,
|
project_id: str,
|
||||||
endpoint_id: str,
|
endpoint_id: str,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"POST",
|
"POST",
|
||||||
f"/projects/{project_id}/endpoints/{endpoint_id}/suspend",
|
f"/projects/{project_id}/endpoints/{endpoint_id}/suspend",
|
||||||
@@ -133,13 +133,13 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def restart_endpoint(
|
def restart_endpoint(
|
||||||
self,
|
self,
|
||||||
project_id: str,
|
project_id: str,
|
||||||
endpoint_id: str,
|
endpoint_id: str,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"POST",
|
"POST",
|
||||||
f"/projects/{project_id}/endpoints/{endpoint_id}/restart",
|
f"/projects/{project_id}/endpoints/{endpoint_id}/restart",
|
||||||
@@ -150,16 +150,16 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def create_endpoint(
|
def create_endpoint(
|
||||||
self,
|
self,
|
||||||
project_id: str,
|
project_id: str,
|
||||||
branch_id: str,
|
branch_id: str,
|
||||||
endpoint_type: Literal["read_write", "read_only"],
|
endpoint_type: Literal["read_write", "read_only"],
|
||||||
settings: Dict[str, Any],
|
settings: dict[str, Any],
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
data: Dict[str, Any] = {
|
data: dict[str, Any] = {
|
||||||
"endpoint": {
|
"endpoint": {
|
||||||
"branch_id": branch_id,
|
"branch_id": branch_id,
|
||||||
},
|
},
|
||||||
@@ -182,7 +182,7 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 201
|
assert resp.status_code == 201
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def get_connection_uri(
|
def get_connection_uri(
|
||||||
self,
|
self,
|
||||||
@@ -192,7 +192,7 @@ class NeonAPI:
|
|||||||
database_name: str = "neondb",
|
database_name: str = "neondb",
|
||||||
role_name: str = "neondb_owner",
|
role_name: str = "neondb_owner",
|
||||||
pooled: bool = True,
|
pooled: bool = True,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/projects/{project_id}/connection_uri",
|
f"/projects/{project_id}/connection_uri",
|
||||||
@@ -210,9 +210,9 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def get_branches(self, project_id: str) -> Dict[str, Any]:
|
def get_branches(self, project_id: str) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/projects/{project_id}/branches",
|
f"/projects/{project_id}/branches",
|
||||||
@@ -223,9 +223,9 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def get_endpoints(self, project_id: str) -> Dict[str, Any]:
|
def get_endpoints(self, project_id: str) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/projects/{project_id}/endpoints",
|
f"/projects/{project_id}/endpoints",
|
||||||
@@ -236,9 +236,9 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def get_operations(self, project_id: str) -> Dict[str, Any]:
|
def get_operations(self, project_id: str) -> dict[str, Any]:
|
||||||
resp = self.__request(
|
resp = self.__request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/projects/{project_id}/operations",
|
f"/projects/{project_id}/operations",
|
||||||
@@ -250,7 +250,7 @@ class NeonAPI:
|
|||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
return cast("Dict[str, Any]", resp.json())
|
return cast("dict[str, Any]", resp.json())
|
||||||
|
|
||||||
def wait_for_operation_to_finish(self, project_id: str):
|
def wait_for_operation_to_finish(self, project_id: str):
|
||||||
has_running = True
|
has_running = True
|
||||||
|
|||||||
@@ -9,15 +9,7 @@ import tempfile
|
|||||||
import textwrap
|
import textwrap
|
||||||
from itertools import chain, product
|
from itertools import chain, product
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import TYPE_CHECKING, cast
|
||||||
Any,
|
|
||||||
Dict,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
TypeVar,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
import toml
|
import toml
|
||||||
|
|
||||||
@@ -27,7 +19,15 @@ from fixtures.pageserver.common_types import IndexPartDump
|
|||||||
from fixtures.pg_version import PgVersion
|
from fixtures.pg_version import PgVersion
|
||||||
from fixtures.utils import AuxFileStore
|
from fixtures.utils import AuxFileStore
|
||||||
|
|
||||||
T = TypeVar("T")
|
if TYPE_CHECKING:
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
TypeVar,
|
||||||
|
cast,
|
||||||
|
)
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
class AbstractNeonCli(abc.ABC):
|
class AbstractNeonCli(abc.ABC):
|
||||||
@@ -37,7 +37,7 @@ class AbstractNeonCli(abc.ABC):
|
|||||||
Do not use directly, use specific subclasses instead.
|
Do not use directly, use specific subclasses instead.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, extra_env: Optional[Dict[str, str]], binpath: Path):
|
def __init__(self, extra_env: Optional[dict[str, str]], binpath: Path):
|
||||||
self.extra_env = extra_env
|
self.extra_env = extra_env
|
||||||
self.binpath = binpath
|
self.binpath = binpath
|
||||||
|
|
||||||
@@ -45,11 +45,11 @@ class AbstractNeonCli(abc.ABC):
|
|||||||
|
|
||||||
def raw_cli(
|
def raw_cli(
|
||||||
self,
|
self,
|
||||||
arguments: List[str],
|
arguments: list[str],
|
||||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
extra_env_vars: Optional[dict[str, str]] = None,
|
||||||
check_return_code=True,
|
check_return_code=True,
|
||||||
timeout=None,
|
timeout=None,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
"""
|
"""
|
||||||
Run the command with the specified arguments.
|
Run the command with the specified arguments.
|
||||||
|
|
||||||
@@ -92,9 +92,8 @@ class AbstractNeonCli(abc.ABC):
|
|||||||
args,
|
args,
|
||||||
env=env_vars,
|
env=env_vars,
|
||||||
check=False,
|
check=False,
|
||||||
universal_newlines=True,
|
text=True,
|
||||||
stdout=subprocess.PIPE,
|
capture_output=True,
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
except subprocess.TimeoutExpired as e:
|
except subprocess.TimeoutExpired as e:
|
||||||
@@ -118,7 +117,7 @@ class AbstractNeonCli(abc.ABC):
|
|||||||
if len(lines) < 2:
|
if len(lines) < 2:
|
||||||
log.debug(f"Run {res.args} success: {stripped}")
|
log.debug(f"Run {res.args} success: {stripped}")
|
||||||
else:
|
else:
|
||||||
log.debug("Run %s success:\n%s" % (res.args, textwrap.indent(stripped, indent)))
|
log.debug("Run %s success:\n%s", res.args, textwrap.indent(stripped, indent))
|
||||||
elif check_return_code:
|
elif check_return_code:
|
||||||
# this way command output will be in recorded and shown in CI in failure message
|
# this way command output will be in recorded and shown in CI in failure message
|
||||||
indent = indent * 2
|
indent = indent * 2
|
||||||
@@ -175,7 +174,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
extra_env: Optional[Dict[str, str]],
|
extra_env: Optional[dict[str, str]],
|
||||||
binpath: Path,
|
binpath: Path,
|
||||||
repo_dir: Path,
|
repo_dir: Path,
|
||||||
pg_distrib_dir: Path,
|
pg_distrib_dir: Path,
|
||||||
@@ -197,7 +196,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
tenant_id: TenantId,
|
tenant_id: TenantId,
|
||||||
timeline_id: TimelineId,
|
timeline_id: TimelineId,
|
||||||
pg_version: PgVersion,
|
pg_version: PgVersion,
|
||||||
conf: Optional[Dict[str, Any]] = None,
|
conf: Optional[dict[str, Any]] = None,
|
||||||
shard_count: Optional[int] = None,
|
shard_count: Optional[int] = None,
|
||||||
shard_stripe_size: Optional[int] = None,
|
shard_stripe_size: Optional[int] = None,
|
||||||
placement_policy: Optional[str] = None,
|
placement_policy: Optional[str] = None,
|
||||||
@@ -258,7 +257,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
res = self.raw_cli(["tenant", "set-default", "--tenant-id", str(tenant_id)])
|
res = self.raw_cli(["tenant", "set-default", "--tenant-id", str(tenant_id)])
|
||||||
res.check_returncode()
|
res.check_returncode()
|
||||||
|
|
||||||
def tenant_config(self, tenant_id: TenantId, conf: Dict[str, str]):
|
def tenant_config(self, tenant_id: TenantId, conf: dict[str, str]):
|
||||||
"""
|
"""
|
||||||
Update tenant config.
|
Update tenant config.
|
||||||
"""
|
"""
|
||||||
@@ -274,7 +273,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
res = self.raw_cli(args)
|
res = self.raw_cli(args)
|
||||||
res.check_returncode()
|
res.check_returncode()
|
||||||
|
|
||||||
def tenant_list(self) -> "subprocess.CompletedProcess[str]":
|
def tenant_list(self) -> subprocess.CompletedProcess[str]:
|
||||||
res = self.raw_cli(["tenant", "list"])
|
res = self.raw_cli(["tenant", "list"])
|
||||||
res.check_returncode()
|
res.check_returncode()
|
||||||
return res
|
return res
|
||||||
@@ -368,7 +367,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
res = self.raw_cli(cmd)
|
res = self.raw_cli(cmd)
|
||||||
res.check_returncode()
|
res.check_returncode()
|
||||||
|
|
||||||
def timeline_list(self, tenant_id: TenantId) -> List[Tuple[str, TimelineId]]:
|
def timeline_list(self, tenant_id: TenantId) -> list[tuple[str, TimelineId]]:
|
||||||
"""
|
"""
|
||||||
Returns a list of (branch_name, timeline_id) tuples out of parsed `neon timeline list` CLI output.
|
Returns a list of (branch_name, timeline_id) tuples out of parsed `neon timeline list` CLI output.
|
||||||
"""
|
"""
|
||||||
@@ -389,9 +388,9 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
|
|
||||||
def init(
|
def init(
|
||||||
self,
|
self,
|
||||||
init_config: Dict[str, Any],
|
init_config: dict[str, Any],
|
||||||
force: Optional[str] = None,
|
force: Optional[str] = None,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
with tempfile.NamedTemporaryFile(mode="w+") as init_config_tmpfile:
|
with tempfile.NamedTemporaryFile(mode="w+") as init_config_tmpfile:
|
||||||
init_config_tmpfile.write(toml.dumps(init_config))
|
init_config_tmpfile.write(toml.dumps(init_config))
|
||||||
init_config_tmpfile.flush()
|
init_config_tmpfile.flush()
|
||||||
@@ -434,15 +433,15 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
def pageserver_start(
|
def pageserver_start(
|
||||||
self,
|
self,
|
||||||
id: int,
|
id: int,
|
||||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
extra_env_vars: Optional[dict[str, str]] = None,
|
||||||
timeout_in_seconds: Optional[int] = None,
|
timeout_in_seconds: Optional[int] = None,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
start_args = ["pageserver", "start", f"--id={id}"]
|
start_args = ["pageserver", "start", f"--id={id}"]
|
||||||
if timeout_in_seconds is not None:
|
if timeout_in_seconds is not None:
|
||||||
start_args.append(f"--start-timeout={timeout_in_seconds}s")
|
start_args.append(f"--start-timeout={timeout_in_seconds}s")
|
||||||
return self.raw_cli(start_args, extra_env_vars=extra_env_vars)
|
return self.raw_cli(start_args, extra_env_vars=extra_env_vars)
|
||||||
|
|
||||||
def pageserver_stop(self, id: int, immediate=False) -> "subprocess.CompletedProcess[str]":
|
def pageserver_stop(self, id: int, immediate=False) -> subprocess.CompletedProcess[str]:
|
||||||
cmd = ["pageserver", "stop", f"--id={id}"]
|
cmd = ["pageserver", "stop", f"--id={id}"]
|
||||||
if immediate:
|
if immediate:
|
||||||
cmd.extend(["-m", "immediate"])
|
cmd.extend(["-m", "immediate"])
|
||||||
@@ -453,10 +452,10 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
def safekeeper_start(
|
def safekeeper_start(
|
||||||
self,
|
self,
|
||||||
id: int,
|
id: int,
|
||||||
extra_opts: Optional[List[str]] = None,
|
extra_opts: Optional[list[str]] = None,
|
||||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
extra_env_vars: Optional[dict[str, str]] = None,
|
||||||
timeout_in_seconds: Optional[int] = None,
|
timeout_in_seconds: Optional[int] = None,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
if extra_opts is not None:
|
if extra_opts is not None:
|
||||||
extra_opts = [f"-e={opt}" for opt in extra_opts]
|
extra_opts = [f"-e={opt}" for opt in extra_opts]
|
||||||
else:
|
else:
|
||||||
@@ -469,7 +468,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
|
|
||||||
def safekeeper_stop(
|
def safekeeper_stop(
|
||||||
self, id: Optional[int] = None, immediate=False
|
self, id: Optional[int] = None, immediate=False
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
args = ["safekeeper", "stop"]
|
args = ["safekeeper", "stop"]
|
||||||
if id is not None:
|
if id is not None:
|
||||||
args.append(str(id))
|
args.append(str(id))
|
||||||
@@ -479,13 +478,13 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
|
|
||||||
def storage_broker_start(
|
def storage_broker_start(
|
||||||
self, timeout_in_seconds: Optional[int] = None
|
self, timeout_in_seconds: Optional[int] = None
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
cmd = ["storage_broker", "start"]
|
cmd = ["storage_broker", "start"]
|
||||||
if timeout_in_seconds is not None:
|
if timeout_in_seconds is not None:
|
||||||
cmd.append(f"--start-timeout={timeout_in_seconds}s")
|
cmd.append(f"--start-timeout={timeout_in_seconds}s")
|
||||||
return self.raw_cli(cmd)
|
return self.raw_cli(cmd)
|
||||||
|
|
||||||
def storage_broker_stop(self) -> "subprocess.CompletedProcess[str]":
|
def storage_broker_stop(self) -> subprocess.CompletedProcess[str]:
|
||||||
cmd = ["storage_broker", "stop"]
|
cmd = ["storage_broker", "stop"]
|
||||||
return self.raw_cli(cmd)
|
return self.raw_cli(cmd)
|
||||||
|
|
||||||
@@ -501,7 +500,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
lsn: Optional[Lsn] = None,
|
lsn: Optional[Lsn] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
allow_multiple=False,
|
allow_multiple=False,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
args = [
|
args = [
|
||||||
"endpoint",
|
"endpoint",
|
||||||
"create",
|
"create",
|
||||||
@@ -534,12 +533,12 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
def endpoint_start(
|
def endpoint_start(
|
||||||
self,
|
self,
|
||||||
endpoint_id: str,
|
endpoint_id: str,
|
||||||
safekeepers: Optional[List[int]] = None,
|
safekeepers: Optional[list[int]] = None,
|
||||||
remote_ext_config: Optional[str] = None,
|
remote_ext_config: Optional[str] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
allow_multiple=False,
|
allow_multiple=False,
|
||||||
basebackup_request_tries: Optional[int] = None,
|
basebackup_request_tries: Optional[int] = None,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
args = [
|
args = [
|
||||||
"endpoint",
|
"endpoint",
|
||||||
"start",
|
"start",
|
||||||
@@ -568,9 +567,9 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
endpoint_id: str,
|
endpoint_id: str,
|
||||||
tenant_id: Optional[TenantId] = None,
|
tenant_id: Optional[TenantId] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
safekeepers: Optional[List[int]] = None,
|
safekeepers: Optional[list[int]] = None,
|
||||||
check_return_code=True,
|
check_return_code=True,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
args = ["endpoint", "reconfigure", endpoint_id]
|
args = ["endpoint", "reconfigure", endpoint_id]
|
||||||
if tenant_id is not None:
|
if tenant_id is not None:
|
||||||
args.extend(["--tenant-id", str(tenant_id)])
|
args.extend(["--tenant-id", str(tenant_id)])
|
||||||
@@ -586,7 +585,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
destroy=False,
|
destroy=False,
|
||||||
check_return_code=True,
|
check_return_code=True,
|
||||||
mode: Optional[str] = None,
|
mode: Optional[str] = None,
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
args = [
|
args = [
|
||||||
"endpoint",
|
"endpoint",
|
||||||
"stop",
|
"stop",
|
||||||
@@ -602,7 +601,7 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
|
|
||||||
def mappings_map_branch(
|
def mappings_map_branch(
|
||||||
self, name: str, tenant_id: TenantId, timeline_id: TimelineId
|
self, name: str, tenant_id: TenantId, timeline_id: TimelineId
|
||||||
) -> "subprocess.CompletedProcess[str]":
|
) -> subprocess.CompletedProcess[str]:
|
||||||
"""
|
"""
|
||||||
Map tenant id and timeline id to a neon_local branch name. They do not have to exist.
|
Map tenant id and timeline id to a neon_local branch name. They do not have to exist.
|
||||||
Usually needed when creating branches via PageserverHttpClient and not neon_local.
|
Usually needed when creating branches via PageserverHttpClient and not neon_local.
|
||||||
@@ -623,10 +622,10 @@ class NeonLocalCli(AbstractNeonCli):
|
|||||||
|
|
||||||
return self.raw_cli(args, check_return_code=True)
|
return self.raw_cli(args, check_return_code=True)
|
||||||
|
|
||||||
def start(self, check_return_code=True) -> "subprocess.CompletedProcess[str]":
|
def start(self, check_return_code=True) -> subprocess.CompletedProcess[str]:
|
||||||
return self.raw_cli(["start"], check_return_code=check_return_code)
|
return self.raw_cli(["start"], check_return_code=check_return_code)
|
||||||
|
|
||||||
def stop(self, check_return_code=True) -> "subprocess.CompletedProcess[str]":
|
def stop(self, check_return_code=True) -> subprocess.CompletedProcess[str]:
|
||||||
return self.raw_cli(["stop"], check_return_code=check_return_code)
|
return self.raw_cli(["stop"], check_return_code=check_return_code)
|
||||||
|
|
||||||
|
|
||||||
@@ -638,7 +637,7 @@ class WalCraft(AbstractNeonCli):
|
|||||||
|
|
||||||
COMMAND = "wal_craft"
|
COMMAND = "wal_craft"
|
||||||
|
|
||||||
def postgres_config(self) -> List[str]:
|
def postgres_config(self) -> list[str]:
|
||||||
res = self.raw_cli(["print-postgres-config"])
|
res = self.raw_cli(["print-postgres-config"])
|
||||||
res.check_returncode()
|
res.check_returncode()
|
||||||
return res.stdout.split("\n")
|
return res.stdout.split("\n")
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from collections.abc import Iterable, Iterator
|
||||||
from contextlib import closing, contextmanager
|
from contextlib import closing, contextmanager
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@@ -21,20 +22,7 @@ from fcntl import LOCK_EX, LOCK_UN, flock
|
|||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import (
|
from typing import TYPE_CHECKING, cast
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
from urllib.parse import quote, urlparse
|
from urllib.parse import quote, urlparse
|
||||||
|
|
||||||
import asyncpg
|
import asyncpg
|
||||||
@@ -99,7 +87,17 @@ from fixtures.utils import AuxFileStore as AuxFileStore # reexport
|
|||||||
|
|
||||||
from .neon_api import NeonAPI, NeonApiEndpoint
|
from .neon_api import NeonAPI, NeonApiEndpoint
|
||||||
|
|
||||||
T = TypeVar("T")
|
if TYPE_CHECKING:
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This file contains pytest fixtures. A fixture is a test resource that can be
|
This file contains pytest fixtures. A fixture is a test resource that can be
|
||||||
@@ -118,7 +116,7 @@ Don't import functions from this file, or pytest will emit warnings. Instead
|
|||||||
put directly-importable functions into utils.py or another separate file.
|
put directly-importable functions into utils.py or another separate file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
Env = Dict[str, str]
|
Env = dict[str, str]
|
||||||
|
|
||||||
DEFAULT_OUTPUT_DIR: str = "test_output"
|
DEFAULT_OUTPUT_DIR: str = "test_output"
|
||||||
DEFAULT_BRANCH_NAME: str = "main"
|
DEFAULT_BRANCH_NAME: str = "main"
|
||||||
@@ -250,7 +248,7 @@ class PgProtocol:
|
|||||||
"""
|
"""
|
||||||
return str(make_dsn(**self.conn_options(**kwargs)))
|
return str(make_dsn(**self.conn_options(**kwargs)))
|
||||||
|
|
||||||
def conn_options(self, **kwargs: Any) -> Dict[str, Any]:
|
def conn_options(self, **kwargs: Any) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Construct a dictionary of connection options from default values and extra parameters.
|
Construct a dictionary of connection options from default values and extra parameters.
|
||||||
An option can be dropped from the returning dictionary by None-valued extra parameter.
|
An option can be dropped from the returning dictionary by None-valued extra parameter.
|
||||||
@@ -319,7 +317,7 @@ class PgProtocol:
|
|||||||
conn_options["server_settings"] = {key: val}
|
conn_options["server_settings"] = {key: val}
|
||||||
return await asyncpg.connect(**conn_options)
|
return await asyncpg.connect(**conn_options)
|
||||||
|
|
||||||
def safe_psql(self, query: str, **kwargs: Any) -> List[Tuple[Any, ...]]:
|
def safe_psql(self, query: str, **kwargs: Any) -> list[tuple[Any, ...]]:
|
||||||
"""
|
"""
|
||||||
Execute query against the node and return all rows.
|
Execute query against the node and return all rows.
|
||||||
This method passes all extra params to connstr.
|
This method passes all extra params to connstr.
|
||||||
@@ -328,12 +326,12 @@ class PgProtocol:
|
|||||||
|
|
||||||
def safe_psql_many(
|
def safe_psql_many(
|
||||||
self, queries: Iterable[str], log_query=True, **kwargs: Any
|
self, queries: Iterable[str], log_query=True, **kwargs: Any
|
||||||
) -> List[List[Tuple[Any, ...]]]:
|
) -> list[list[tuple[Any, ...]]]:
|
||||||
"""
|
"""
|
||||||
Execute queries against the node and return all rows.
|
Execute queries against the node and return all rows.
|
||||||
This method passes all extra params to connstr.
|
This method passes all extra params to connstr.
|
||||||
"""
|
"""
|
||||||
result: List[List[Any]] = []
|
result: list[list[Any]] = []
|
||||||
with closing(self.connect(**kwargs)) as conn:
|
with closing(self.connect(**kwargs)) as conn:
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
for query in queries:
|
for query in queries:
|
||||||
@@ -379,7 +377,7 @@ class NeonEnvBuilder:
|
|||||||
test_overlay_dir: Optional[Path] = None,
|
test_overlay_dir: Optional[Path] = None,
|
||||||
pageserver_remote_storage: Optional[RemoteStorage] = None,
|
pageserver_remote_storage: Optional[RemoteStorage] = None,
|
||||||
# toml that will be decomposed into `--config-override` flags during `pageserver --init`
|
# toml that will be decomposed into `--config-override` flags during `pageserver --init`
|
||||||
pageserver_config_override: Optional[str | Callable[[Dict[str, Any]], None]] = None,
|
pageserver_config_override: Optional[str | Callable[[dict[str, Any]], None]] = None,
|
||||||
num_safekeepers: int = 1,
|
num_safekeepers: int = 1,
|
||||||
num_pageservers: int = 1,
|
num_pageservers: int = 1,
|
||||||
# Use non-standard SK ids to check for various parsing bugs
|
# Use non-standard SK ids to check for various parsing bugs
|
||||||
@@ -394,7 +392,7 @@ class NeonEnvBuilder:
|
|||||||
initial_timeline: Optional[TimelineId] = None,
|
initial_timeline: Optional[TimelineId] = None,
|
||||||
pageserver_virtual_file_io_engine: Optional[str] = None,
|
pageserver_virtual_file_io_engine: Optional[str] = None,
|
||||||
pageserver_aux_file_policy: Optional[AuxFileStore] = None,
|
pageserver_aux_file_policy: Optional[AuxFileStore] = None,
|
||||||
pageserver_default_tenant_config_compaction_algorithm: Optional[Dict[str, Any]] = None,
|
pageserver_default_tenant_config_compaction_algorithm: Optional[dict[str, Any]] = None,
|
||||||
safekeeper_extra_opts: Optional[list[str]] = None,
|
safekeeper_extra_opts: Optional[list[str]] = None,
|
||||||
storage_controller_port_override: Optional[int] = None,
|
storage_controller_port_override: Optional[int] = None,
|
||||||
pageserver_io_buffer_alignment: Optional[int] = None,
|
pageserver_io_buffer_alignment: Optional[int] = None,
|
||||||
@@ -429,7 +427,7 @@ class NeonEnvBuilder:
|
|||||||
self.enable_scrub_on_exit = True
|
self.enable_scrub_on_exit = True
|
||||||
self.test_output_dir = test_output_dir
|
self.test_output_dir = test_output_dir
|
||||||
self.test_overlay_dir = test_overlay_dir
|
self.test_overlay_dir = test_overlay_dir
|
||||||
self.overlay_mounts_created_by_us: List[Tuple[str, Path]] = []
|
self.overlay_mounts_created_by_us: list[tuple[str, Path]] = []
|
||||||
self.config_init_force: Optional[str] = None
|
self.config_init_force: Optional[str] = None
|
||||||
self.top_output_dir = top_output_dir
|
self.top_output_dir = top_output_dir
|
||||||
self.control_plane_compute_hook_api: Optional[str] = None
|
self.control_plane_compute_hook_api: Optional[str] = None
|
||||||
@@ -438,7 +436,7 @@ class NeonEnvBuilder:
|
|||||||
self.pageserver_virtual_file_io_engine: Optional[str] = pageserver_virtual_file_io_engine
|
self.pageserver_virtual_file_io_engine: Optional[str] = pageserver_virtual_file_io_engine
|
||||||
|
|
||||||
self.pageserver_default_tenant_config_compaction_algorithm: Optional[
|
self.pageserver_default_tenant_config_compaction_algorithm: Optional[
|
||||||
Dict[str, Any]
|
dict[str, Any]
|
||||||
] = pageserver_default_tenant_config_compaction_algorithm
|
] = pageserver_default_tenant_config_compaction_algorithm
|
||||||
if self.pageserver_default_tenant_config_compaction_algorithm is not None:
|
if self.pageserver_default_tenant_config_compaction_algorithm is not None:
|
||||||
log.debug(
|
log.debug(
|
||||||
@@ -468,7 +466,7 @@ class NeonEnvBuilder:
|
|||||||
|
|
||||||
def init_start(
|
def init_start(
|
||||||
self,
|
self,
|
||||||
initial_tenant_conf: Optional[Dict[str, Any]] = None,
|
initial_tenant_conf: Optional[dict[str, Any]] = None,
|
||||||
default_remote_storage_if_missing: bool = True,
|
default_remote_storage_if_missing: bool = True,
|
||||||
initial_tenant_shard_count: Optional[int] = None,
|
initial_tenant_shard_count: Optional[int] = None,
|
||||||
initial_tenant_shard_stripe_size: Optional[int] = None,
|
initial_tenant_shard_stripe_size: Optional[int] = None,
|
||||||
@@ -823,7 +821,7 @@ class NeonEnvBuilder:
|
|||||||
|
|
||||||
overlayfs_mounts = {mountpoint for _, mountpoint in self.overlay_mounts_created_by_us}
|
overlayfs_mounts = {mountpoint for _, mountpoint in self.overlay_mounts_created_by_us}
|
||||||
|
|
||||||
directories_to_clean: List[Path] = []
|
directories_to_clean: list[Path] = []
|
||||||
for test_entry in Path(self.repo_dir).glob("**/*"):
|
for test_entry in Path(self.repo_dir).glob("**/*"):
|
||||||
if test_entry in overlayfs_mounts:
|
if test_entry in overlayfs_mounts:
|
||||||
continue
|
continue
|
||||||
@@ -854,12 +852,12 @@ class NeonEnvBuilder:
|
|||||||
if isinstance(x, S3Storage):
|
if isinstance(x, S3Storage):
|
||||||
x.do_cleanup()
|
x.do_cleanup()
|
||||||
|
|
||||||
def __enter__(self) -> "NeonEnvBuilder":
|
def __enter__(self) -> NeonEnvBuilder:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc_value: Optional[BaseException],
|
exc_value: Optional[BaseException],
|
||||||
traceback: Optional[TracebackType],
|
traceback: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -970,8 +968,8 @@ class NeonEnv:
|
|||||||
self.port_distributor = config.port_distributor
|
self.port_distributor = config.port_distributor
|
||||||
self.s3_mock_server = config.mock_s3_server
|
self.s3_mock_server = config.mock_s3_server
|
||||||
self.endpoints = EndpointFactory(self)
|
self.endpoints = EndpointFactory(self)
|
||||||
self.safekeepers: List[Safekeeper] = []
|
self.safekeepers: list[Safekeeper] = []
|
||||||
self.pageservers: List[NeonPageserver] = []
|
self.pageservers: list[NeonPageserver] = []
|
||||||
self.broker = NeonBroker(self)
|
self.broker = NeonBroker(self)
|
||||||
self.pageserver_remote_storage = config.pageserver_remote_storage
|
self.pageserver_remote_storage = config.pageserver_remote_storage
|
||||||
self.safekeepers_remote_storage = config.safekeepers_remote_storage
|
self.safekeepers_remote_storage = config.safekeepers_remote_storage
|
||||||
@@ -1043,7 +1041,7 @@ class NeonEnv:
|
|||||||
self.pageserver_io_buffer_alignment = config.pageserver_io_buffer_alignment
|
self.pageserver_io_buffer_alignment = config.pageserver_io_buffer_alignment
|
||||||
|
|
||||||
# Create the neon_local's `NeonLocalInitConf`
|
# Create the neon_local's `NeonLocalInitConf`
|
||||||
cfg: Dict[str, Any] = {
|
cfg: dict[str, Any] = {
|
||||||
"default_tenant_id": str(self.initial_tenant),
|
"default_tenant_id": str(self.initial_tenant),
|
||||||
"broker": {
|
"broker": {
|
||||||
"listen_addr": self.broker.listen_addr(),
|
"listen_addr": self.broker.listen_addr(),
|
||||||
@@ -1072,7 +1070,7 @@ class NeonEnv:
|
|||||||
http=self.port_distributor.get_port(),
|
http=self.port_distributor.get_port(),
|
||||||
)
|
)
|
||||||
|
|
||||||
ps_cfg: Dict[str, Any] = {
|
ps_cfg: dict[str, Any] = {
|
||||||
"id": ps_id,
|
"id": ps_id,
|
||||||
"listen_pg_addr": f"localhost:{pageserver_port.pg}",
|
"listen_pg_addr": f"localhost:{pageserver_port.pg}",
|
||||||
"listen_http_addr": f"localhost:{pageserver_port.http}",
|
"listen_http_addr": f"localhost:{pageserver_port.http}",
|
||||||
@@ -1120,7 +1118,7 @@ class NeonEnv:
|
|||||||
http=self.port_distributor.get_port(),
|
http=self.port_distributor.get_port(),
|
||||||
)
|
)
|
||||||
id = config.safekeepers_id_start + i # assign ids sequentially
|
id = config.safekeepers_id_start + i # assign ids sequentially
|
||||||
sk_cfg: Dict[str, Any] = {
|
sk_cfg: dict[str, Any] = {
|
||||||
"id": id,
|
"id": id,
|
||||||
"pg_port": port.pg,
|
"pg_port": port.pg,
|
||||||
"pg_tenant_only_port": port.pg_tenant_only,
|
"pg_tenant_only_port": port.pg_tenant_only,
|
||||||
@@ -1285,9 +1283,8 @@ class NeonEnv:
|
|||||||
res = subprocess.run(
|
res = subprocess.run(
|
||||||
[bin_pageserver, "--version"],
|
[bin_pageserver, "--version"],
|
||||||
check=True,
|
check=True,
|
||||||
universal_newlines=True,
|
text=True,
|
||||||
stdout=subprocess.PIPE,
|
capture_output=True,
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
)
|
)
|
||||||
return res.stdout
|
return res.stdout
|
||||||
|
|
||||||
@@ -1330,13 +1327,13 @@ class NeonEnv:
|
|||||||
self,
|
self,
|
||||||
tenant_id: Optional[TenantId] = None,
|
tenant_id: Optional[TenantId] = None,
|
||||||
timeline_id: Optional[TimelineId] = None,
|
timeline_id: Optional[TimelineId] = None,
|
||||||
conf: Optional[Dict[str, Any]] = None,
|
conf: Optional[dict[str, Any]] = None,
|
||||||
shard_count: Optional[int] = None,
|
shard_count: Optional[int] = None,
|
||||||
shard_stripe_size: Optional[int] = None,
|
shard_stripe_size: Optional[int] = None,
|
||||||
placement_policy: Optional[str] = None,
|
placement_policy: Optional[str] = None,
|
||||||
set_default: bool = False,
|
set_default: bool = False,
|
||||||
aux_file_policy: Optional[AuxFileStore] = None,
|
aux_file_policy: Optional[AuxFileStore] = None,
|
||||||
) -> Tuple[TenantId, TimelineId]:
|
) -> tuple[TenantId, TimelineId]:
|
||||||
"""
|
"""
|
||||||
Creates a new tenant, returns its id and its initial timeline's id.
|
Creates a new tenant, returns its id and its initial timeline's id.
|
||||||
"""
|
"""
|
||||||
@@ -1357,7 +1354,7 @@ class NeonEnv:
|
|||||||
|
|
||||||
return tenant_id, timeline_id
|
return tenant_id, timeline_id
|
||||||
|
|
||||||
def config_tenant(self, tenant_id: Optional[TenantId], conf: Dict[str, str]):
|
def config_tenant(self, tenant_id: Optional[TenantId], conf: dict[str, str]):
|
||||||
"""
|
"""
|
||||||
Update tenant config.
|
Update tenant config.
|
||||||
"""
|
"""
|
||||||
@@ -1409,7 +1406,7 @@ def neon_simple_env(
|
|||||||
pg_version: PgVersion,
|
pg_version: PgVersion,
|
||||||
pageserver_virtual_file_io_engine: str,
|
pageserver_virtual_file_io_engine: str,
|
||||||
pageserver_aux_file_policy: Optional[AuxFileStore],
|
pageserver_aux_file_policy: Optional[AuxFileStore],
|
||||||
pageserver_default_tenant_config_compaction_algorithm: Optional[Dict[str, Any]],
|
pageserver_default_tenant_config_compaction_algorithm: Optional[dict[str, Any]],
|
||||||
pageserver_io_buffer_alignment: Optional[int],
|
pageserver_io_buffer_alignment: Optional[int],
|
||||||
) -> Iterator[NeonEnv]:
|
) -> Iterator[NeonEnv]:
|
||||||
"""
|
"""
|
||||||
@@ -1457,7 +1454,7 @@ def neon_env_builder(
|
|||||||
test_overlay_dir: Path,
|
test_overlay_dir: Path,
|
||||||
top_output_dir: Path,
|
top_output_dir: Path,
|
||||||
pageserver_virtual_file_io_engine: str,
|
pageserver_virtual_file_io_engine: str,
|
||||||
pageserver_default_tenant_config_compaction_algorithm: Optional[Dict[str, Any]],
|
pageserver_default_tenant_config_compaction_algorithm: Optional[dict[str, Any]],
|
||||||
pageserver_aux_file_policy: Optional[AuxFileStore],
|
pageserver_aux_file_policy: Optional[AuxFileStore],
|
||||||
record_property: Callable[[str, object], None],
|
record_property: Callable[[str, object], None],
|
||||||
pageserver_io_buffer_alignment: Optional[int],
|
pageserver_io_buffer_alignment: Optional[int],
|
||||||
@@ -1519,7 +1516,7 @@ class LogUtils:
|
|||||||
|
|
||||||
def assert_log_contains(
|
def assert_log_contains(
|
||||||
self, pattern: str, offset: None | LogCursor = None
|
self, pattern: str, offset: None | LogCursor = None
|
||||||
) -> Tuple[str, LogCursor]:
|
) -> tuple[str, LogCursor]:
|
||||||
"""Convenient for use inside wait_until()"""
|
"""Convenient for use inside wait_until()"""
|
||||||
|
|
||||||
res = self.log_contains(pattern, offset=offset)
|
res = self.log_contains(pattern, offset=offset)
|
||||||
@@ -1528,7 +1525,7 @@ class LogUtils:
|
|||||||
|
|
||||||
def log_contains(
|
def log_contains(
|
||||||
self, pattern: str, offset: None | LogCursor = None
|
self, pattern: str, offset: None | LogCursor = None
|
||||||
) -> Optional[Tuple[str, LogCursor]]:
|
) -> Optional[tuple[str, LogCursor]]:
|
||||||
"""Check that the log contains a line that matches the given regex"""
|
"""Check that the log contains a line that matches the given regex"""
|
||||||
logfile = self.logfile
|
logfile = self.logfile
|
||||||
if not logfile.exists():
|
if not logfile.exists():
|
||||||
@@ -1609,7 +1606,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
self.running = True
|
self.running = True
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def stop(self, immediate: bool = False) -> "NeonStorageController":
|
def stop(self, immediate: bool = False) -> NeonStorageController:
|
||||||
if self.running:
|
if self.running:
|
||||||
self.env.neon_cli.storage_controller_stop(immediate)
|
self.env.neon_cli.storage_controller_stop(immediate)
|
||||||
self.running = False
|
self.running = False
|
||||||
@@ -1671,7 +1668,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def headers(self, scope: Optional[TokenScope]) -> Dict[str, str]:
|
def headers(self, scope: Optional[TokenScope]) -> dict[str, str]:
|
||||||
headers = {}
|
headers = {}
|
||||||
if self.auth_enabled and scope is not None:
|
if self.auth_enabled and scope is not None:
|
||||||
jwt_token = self.env.auth_keys.generate_token(scope=scope)
|
jwt_token = self.env.auth_keys.generate_token(scope=scope)
|
||||||
@@ -1857,13 +1854,13 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
tenant_id: TenantId,
|
tenant_id: TenantId,
|
||||||
shard_count: Optional[int] = None,
|
shard_count: Optional[int] = None,
|
||||||
shard_stripe_size: Optional[int] = None,
|
shard_stripe_size: Optional[int] = None,
|
||||||
tenant_config: Optional[Dict[Any, Any]] = None,
|
tenant_config: Optional[dict[Any, Any]] = None,
|
||||||
placement_policy: Optional[Union[Dict[Any, Any] | str]] = None,
|
placement_policy: Optional[Union[dict[Any, Any] | str]] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Use this rather than pageserver_api() when you need to include shard parameters
|
Use this rather than pageserver_api() when you need to include shard parameters
|
||||||
"""
|
"""
|
||||||
body: Dict[str, Any] = {"new_tenant_id": str(tenant_id)}
|
body: dict[str, Any] = {"new_tenant_id": str(tenant_id)}
|
||||||
|
|
||||||
if shard_count is not None:
|
if shard_count is not None:
|
||||||
shard_params = {"count": shard_count}
|
shard_params = {"count": shard_count}
|
||||||
@@ -2079,8 +2076,8 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
|
|
||||||
time.sleep(backoff)
|
time.sleep(backoff)
|
||||||
|
|
||||||
def metadata_health_update(self, healthy: List[TenantShardId], unhealthy: List[TenantShardId]):
|
def metadata_health_update(self, healthy: list[TenantShardId], unhealthy: list[TenantShardId]):
|
||||||
body: Dict[str, Any] = {
|
body: dict[str, Any] = {
|
||||||
"healthy_tenant_shards": [str(t) for t in healthy],
|
"healthy_tenant_shards": [str(t) for t in healthy],
|
||||||
"unhealthy_tenant_shards": [str(t) for t in unhealthy],
|
"unhealthy_tenant_shards": [str(t) for t in unhealthy],
|
||||||
}
|
}
|
||||||
@@ -2101,7 +2098,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
def metadata_health_list_outdated(self, duration: str):
|
def metadata_health_list_outdated(self, duration: str):
|
||||||
body: Dict[str, Any] = {"not_scrubbed_for": duration}
|
body: dict[str, Any] = {"not_scrubbed_for": duration}
|
||||||
|
|
||||||
response = self.request(
|
response = self.request(
|
||||||
"POST",
|
"POST",
|
||||||
@@ -2135,7 +2132,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
def configure_failpoints(self, config_strings: Tuple[str, str] | List[Tuple[str, str]]):
|
def configure_failpoints(self, config_strings: tuple[str, str] | list[tuple[str, str]]):
|
||||||
if isinstance(config_strings, tuple):
|
if isinstance(config_strings, tuple):
|
||||||
pairs = [config_strings]
|
pairs = [config_strings]
|
||||||
else:
|
else:
|
||||||
@@ -2152,13 +2149,13 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
log.info(f"Got failpoints request response code {res.status_code}")
|
log.info(f"Got failpoints request response code {res.status_code}")
|
||||||
res.raise_for_status()
|
res.raise_for_status()
|
||||||
|
|
||||||
def get_tenants_placement(self) -> defaultdict[str, Dict[str, Any]]:
|
def get_tenants_placement(self) -> defaultdict[str, dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get the intent and observed placements of all tenants known to the storage controller.
|
Get the intent and observed placements of all tenants known to the storage controller.
|
||||||
"""
|
"""
|
||||||
tenants = self.tenant_list()
|
tenants = self.tenant_list()
|
||||||
|
|
||||||
tenant_placement: defaultdict[str, Dict[str, Any]] = defaultdict(
|
tenant_placement: defaultdict[str, dict[str, Any]] = defaultdict(
|
||||||
lambda: {
|
lambda: {
|
||||||
"observed": {"attached": None, "secondary": []},
|
"observed": {"attached": None, "secondary": []},
|
||||||
"intent": {"attached": None, "secondary": []},
|
"intent": {"attached": None, "secondary": []},
|
||||||
@@ -2265,12 +2262,12 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return [TenantShardId.parse(tid) for tid in response.json()["updated"]]
|
return [TenantShardId.parse(tid) for tid in response.json()["updated"]]
|
||||||
|
|
||||||
def __enter__(self) -> "NeonStorageController":
|
def __enter__(self) -> NeonStorageController:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc: Optional[BaseException],
|
exc: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -2279,7 +2276,7 @@ class NeonStorageController(MetricsGetter, LogUtils):
|
|||||||
|
|
||||||
class NeonProxiedStorageController(NeonStorageController):
|
class NeonProxiedStorageController(NeonStorageController):
|
||||||
def __init__(self, env: NeonEnv, proxy_port: int, auth_enabled: bool):
|
def __init__(self, env: NeonEnv, proxy_port: int, auth_enabled: bool):
|
||||||
super(NeonProxiedStorageController, self).__init__(env, proxy_port, auth_enabled)
|
super().__init__(env, proxy_port, auth_enabled)
|
||||||
self.instances: dict[int, dict[str, Any]] = {}
|
self.instances: dict[int, dict[str, Any]] = {}
|
||||||
|
|
||||||
def start(
|
def start(
|
||||||
@@ -2298,7 +2295,7 @@ class NeonProxiedStorageController(NeonStorageController):
|
|||||||
|
|
||||||
def stop_instance(
|
def stop_instance(
|
||||||
self, immediate: bool = False, instance_id: Optional[int] = None
|
self, immediate: bool = False, instance_id: Optional[int] = None
|
||||||
) -> "NeonStorageController":
|
) -> NeonStorageController:
|
||||||
assert instance_id in self.instances
|
assert instance_id in self.instances
|
||||||
if self.instances[instance_id]["running"]:
|
if self.instances[instance_id]["running"]:
|
||||||
self.env.neon_cli.storage_controller_stop(immediate, instance_id)
|
self.env.neon_cli.storage_controller_stop(immediate, instance_id)
|
||||||
@@ -2307,7 +2304,7 @@ class NeonProxiedStorageController(NeonStorageController):
|
|||||||
self.running = any(meta["running"] for meta in self.instances.values())
|
self.running = any(meta["running"] for meta in self.instances.values())
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def stop(self, immediate: bool = False) -> "NeonStorageController":
|
def stop(self, immediate: bool = False) -> NeonStorageController:
|
||||||
for iid, details in self.instances.items():
|
for iid, details in self.instances.items():
|
||||||
if details["running"]:
|
if details["running"]:
|
||||||
self.env.neon_cli.storage_controller_stop(immediate, iid)
|
self.env.neon_cli.storage_controller_stop(immediate, iid)
|
||||||
@@ -2326,7 +2323,7 @@ class NeonProxiedStorageController(NeonStorageController):
|
|||||||
|
|
||||||
def log_contains(
|
def log_contains(
|
||||||
self, pattern: str, offset: None | LogCursor = None
|
self, pattern: str, offset: None | LogCursor = None
|
||||||
) -> Optional[Tuple[str, LogCursor]]:
|
) -> Optional[tuple[str, LogCursor]]:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
@@ -2358,7 +2355,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
# env.pageserver.allowed_errors.append(".*could not open garage door.*")
|
# env.pageserver.allowed_errors.append(".*could not open garage door.*")
|
||||||
#
|
#
|
||||||
# The entries in the list are regular experessions.
|
# The entries in the list are regular experessions.
|
||||||
self.allowed_errors: List[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
self.allowed_errors: list[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
||||||
|
|
||||||
def timeline_dir(
|
def timeline_dir(
|
||||||
self,
|
self,
|
||||||
@@ -2383,19 +2380,19 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
def config_toml_path(self) -> Path:
|
def config_toml_path(self) -> Path:
|
||||||
return self.workdir / "pageserver.toml"
|
return self.workdir / "pageserver.toml"
|
||||||
|
|
||||||
def edit_config_toml(self, edit_fn: Callable[[Dict[str, Any]], T]) -> T:
|
def edit_config_toml(self, edit_fn: Callable[[dict[str, Any]], T]) -> T:
|
||||||
"""
|
"""
|
||||||
Edit the pageserver's config toml file in place.
|
Edit the pageserver's config toml file in place.
|
||||||
"""
|
"""
|
||||||
path = self.config_toml_path
|
path = self.config_toml_path
|
||||||
with open(path, "r") as f:
|
with open(path) as f:
|
||||||
config = toml.load(f)
|
config = toml.load(f)
|
||||||
res = edit_fn(config)
|
res = edit_fn(config)
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
toml.dump(config, f)
|
toml.dump(config, f)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def patch_config_toml_nonrecursive(self, patch: Dict[str, Any]) -> Dict[str, Any]:
|
def patch_config_toml_nonrecursive(self, patch: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Non-recursively merge the given `patch` dict into the existing config toml, using `dict.update()`.
|
Non-recursively merge the given `patch` dict into the existing config toml, using `dict.update()`.
|
||||||
Returns the replaced values.
|
Returns the replaced values.
|
||||||
@@ -2404,7 +2401,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
"""
|
"""
|
||||||
replacements = {}
|
replacements = {}
|
||||||
|
|
||||||
def doit(config: Dict[str, Any]):
|
def doit(config: dict[str, Any]):
|
||||||
while len(patch) > 0:
|
while len(patch) > 0:
|
||||||
key, new = patch.popitem()
|
key, new = patch.popitem()
|
||||||
old = config.get(key, None)
|
old = config.get(key, None)
|
||||||
@@ -2416,9 +2413,9 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
|
|
||||||
def start(
|
def start(
|
||||||
self,
|
self,
|
||||||
extra_env_vars: Optional[Dict[str, str]] = None,
|
extra_env_vars: Optional[dict[str, str]] = None,
|
||||||
timeout_in_seconds: Optional[int] = None,
|
timeout_in_seconds: Optional[int] = None,
|
||||||
) -> "NeonPageserver":
|
) -> NeonPageserver:
|
||||||
"""
|
"""
|
||||||
Start the page server.
|
Start the page server.
|
||||||
`overrides` allows to add some config to this pageserver start.
|
`overrides` allows to add some config to this pageserver start.
|
||||||
@@ -2444,7 +2441,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def stop(self, immediate: bool = False) -> "NeonPageserver":
|
def stop(self, immediate: bool = False) -> NeonPageserver:
|
||||||
"""
|
"""
|
||||||
Stop the page server.
|
Stop the page server.
|
||||||
Returns self.
|
Returns self.
|
||||||
@@ -2492,12 +2489,12 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
|
|
||||||
wait_until(20, 0.5, complete)
|
wait_until(20, 0.5, complete)
|
||||||
|
|
||||||
def __enter__(self) -> "NeonPageserver":
|
def __enter__(self) -> NeonPageserver:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc: Optional[BaseException],
|
exc: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -2544,7 +2541,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
def tenant_attach(
|
def tenant_attach(
|
||||||
self,
|
self,
|
||||||
tenant_id: TenantId,
|
tenant_id: TenantId,
|
||||||
config: None | Dict[str, Any] = None,
|
config: None | dict[str, Any] = None,
|
||||||
generation: Optional[int] = None,
|
generation: Optional[int] = None,
|
||||||
override_storage_controller_generation: bool = False,
|
override_storage_controller_generation: bool = False,
|
||||||
):
|
):
|
||||||
@@ -2583,7 +2580,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
path = self.tenant_dir(tenant_shard_id) / "config-v1"
|
path = self.tenant_dir(tenant_shard_id) / "config-v1"
|
||||||
log.info(f"Reading location conf from {path}")
|
log.info(f"Reading location conf from {path}")
|
||||||
bytes = open(path, "r").read()
|
bytes = open(path).read()
|
||||||
try:
|
try:
|
||||||
decoded: dict[str, Any] = toml.loads(bytes)
|
decoded: dict[str, Any] = toml.loads(bytes)
|
||||||
return decoded
|
return decoded
|
||||||
@@ -2594,7 +2591,7 @@ class NeonPageserver(PgProtocol, LogUtils):
|
|||||||
def tenant_create(
|
def tenant_create(
|
||||||
self,
|
self,
|
||||||
tenant_id: TenantId,
|
tenant_id: TenantId,
|
||||||
conf: Optional[Dict[str, Any]] = None,
|
conf: Optional[dict[str, Any]] = None,
|
||||||
auth_token: Optional[str] = None,
|
auth_token: Optional[str] = None,
|
||||||
generation: Optional[int] = None,
|
generation: Optional[int] = None,
|
||||||
) -> TenantId:
|
) -> TenantId:
|
||||||
@@ -2660,7 +2657,7 @@ class PgBin:
|
|||||||
self.env = os.environ.copy()
|
self.env = os.environ.copy()
|
||||||
self.env["LD_LIBRARY_PATH"] = str(self.pg_lib_dir)
|
self.env["LD_LIBRARY_PATH"] = str(self.pg_lib_dir)
|
||||||
|
|
||||||
def _fixpath(self, command: List[str]):
|
def _fixpath(self, command: list[str]):
|
||||||
if "/" not in str(command[0]):
|
if "/" not in str(command[0]):
|
||||||
command[0] = str(self.pg_bin_path / command[0])
|
command[0] = str(self.pg_bin_path / command[0])
|
||||||
|
|
||||||
@@ -2680,7 +2677,7 @@ class PgBin:
|
|||||||
|
|
||||||
def run_nonblocking(
|
def run_nonblocking(
|
||||||
self,
|
self,
|
||||||
command: List[str],
|
command: list[str],
|
||||||
env: Optional[Env] = None,
|
env: Optional[Env] = None,
|
||||||
cwd: Optional[Union[str, Path]] = None,
|
cwd: Optional[Union[str, Path]] = None,
|
||||||
) -> subprocess.Popen[Any]:
|
) -> subprocess.Popen[Any]:
|
||||||
@@ -2704,7 +2701,7 @@ class PgBin:
|
|||||||
|
|
||||||
def run(
|
def run(
|
||||||
self,
|
self,
|
||||||
command: List[str],
|
command: list[str],
|
||||||
env: Optional[Env] = None,
|
env: Optional[Env] = None,
|
||||||
cwd: Optional[Union[str, Path]] = None,
|
cwd: Optional[Union[str, Path]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -2727,7 +2724,7 @@ class PgBin:
|
|||||||
|
|
||||||
def run_capture(
|
def run_capture(
|
||||||
self,
|
self,
|
||||||
command: List[str],
|
command: list[str],
|
||||||
env: Optional[Env] = None,
|
env: Optional[Env] = None,
|
||||||
cwd: Optional[str] = None,
|
cwd: Optional[str] = None,
|
||||||
with_command_header=True,
|
with_command_header=True,
|
||||||
@@ -2840,14 +2837,14 @@ class VanillaPostgres(PgProtocol):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def configure(self, options: List[str]):
|
def configure(self, options: list[str]):
|
||||||
"""Append lines into postgresql.conf file."""
|
"""Append lines into postgresql.conf file."""
|
||||||
assert not self.running
|
assert not self.running
|
||||||
with open(os.path.join(self.pgdatadir, "postgresql.conf"), "a") as conf_file:
|
with open(os.path.join(self.pgdatadir, "postgresql.conf"), "a") as conf_file:
|
||||||
conf_file.write("\n".join(options))
|
conf_file.write("\n".join(options))
|
||||||
conf_file.write("\n")
|
conf_file.write("\n")
|
||||||
|
|
||||||
def edit_hba(self, hba: List[str]):
|
def edit_hba(self, hba: list[str]):
|
||||||
"""Prepend hba lines into pg_hba.conf file."""
|
"""Prepend hba lines into pg_hba.conf file."""
|
||||||
assert not self.running
|
assert not self.running
|
||||||
with open(os.path.join(self.pgdatadir, "pg_hba.conf"), "r+") as conf_file:
|
with open(os.path.join(self.pgdatadir, "pg_hba.conf"), "r+") as conf_file:
|
||||||
@@ -2875,12 +2872,12 @@ class VanillaPostgres(PgProtocol):
|
|||||||
"""Return size of pgdatadir subdirectory in bytes."""
|
"""Return size of pgdatadir subdirectory in bytes."""
|
||||||
return get_dir_size(self.pgdatadir / subdir)
|
return get_dir_size(self.pgdatadir / subdir)
|
||||||
|
|
||||||
def __enter__(self) -> "VanillaPostgres":
|
def __enter__(self) -> VanillaPostgres:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc: Optional[BaseException],
|
exc: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -2910,7 +2907,7 @@ class RemotePostgres(PgProtocol):
|
|||||||
# The remote server is assumed to be running already
|
# The remote server is assumed to be running already
|
||||||
self.running = True
|
self.running = True
|
||||||
|
|
||||||
def configure(self, options: List[str]):
|
def configure(self, options: list[str]):
|
||||||
raise Exception("cannot change configuration of remote Posgres instance")
|
raise Exception("cannot change configuration of remote Posgres instance")
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
@@ -2924,12 +2921,12 @@ class RemotePostgres(PgProtocol):
|
|||||||
# See https://www.postgresql.org/docs/14/functions-admin.html#FUNCTIONS-ADMIN-GENFILE
|
# See https://www.postgresql.org/docs/14/functions-admin.html#FUNCTIONS-ADMIN-GENFILE
|
||||||
raise Exception("cannot get size of a Postgres instance")
|
raise Exception("cannot get size of a Postgres instance")
|
||||||
|
|
||||||
def __enter__(self) -> "RemotePostgres":
|
def __enter__(self) -> RemotePostgres:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc: Optional[BaseException],
|
exc: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -3265,7 +3262,7 @@ class NeonProxy(PgProtocol):
|
|||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc: Optional[BaseException],
|
exc: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -3403,7 +3400,7 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
self.http_port = http_port
|
self.http_port = http_port
|
||||||
self.check_stop_result = check_stop_result
|
self.check_stop_result = check_stop_result
|
||||||
# passed to endpoint create and endpoint reconfigure
|
# passed to endpoint create and endpoint reconfigure
|
||||||
self.active_safekeepers: List[int] = list(map(lambda sk: sk.id, env.safekeepers))
|
self.active_safekeepers: list[int] = list(map(lambda sk: sk.id, env.safekeepers))
|
||||||
# path to conf is <repo_dir>/endpoints/<endpoint_id>/pgdata/postgresql.conf
|
# path to conf is <repo_dir>/endpoints/<endpoint_id>/pgdata/postgresql.conf
|
||||||
|
|
||||||
# Semaphore is set to 1 when we start, and acquire'd back to zero when we stop
|
# Semaphore is set to 1 when we start, and acquire'd back to zero when we stop
|
||||||
@@ -3426,10 +3423,10 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
endpoint_id: Optional[str] = None,
|
endpoint_id: Optional[str] = None,
|
||||||
hot_standby: bool = False,
|
hot_standby: bool = False,
|
||||||
lsn: Optional[Lsn] = None,
|
lsn: Optional[Lsn] = None,
|
||||||
config_lines: Optional[List[str]] = None,
|
config_lines: Optional[list[str]] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
allow_multiple: bool = False,
|
allow_multiple: bool = False,
|
||||||
) -> "Endpoint":
|
) -> Endpoint:
|
||||||
"""
|
"""
|
||||||
Create a new Postgres endpoint.
|
Create a new Postgres endpoint.
|
||||||
Returns self.
|
Returns self.
|
||||||
@@ -3472,10 +3469,10 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
self,
|
self,
|
||||||
remote_ext_config: Optional[str] = None,
|
remote_ext_config: Optional[str] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
safekeepers: Optional[List[int]] = None,
|
safekeepers: Optional[list[int]] = None,
|
||||||
allow_multiple: bool = False,
|
allow_multiple: bool = False,
|
||||||
basebackup_request_tries: Optional[int] = None,
|
basebackup_request_tries: Optional[int] = None,
|
||||||
) -> "Endpoint":
|
) -> Endpoint:
|
||||||
"""
|
"""
|
||||||
Start the Postgres instance.
|
Start the Postgres instance.
|
||||||
Returns self.
|
Returns self.
|
||||||
@@ -3524,7 +3521,7 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
"""Path to the postgresql.conf in the endpoint directory (not the one in pgdata)"""
|
"""Path to the postgresql.conf in the endpoint directory (not the one in pgdata)"""
|
||||||
return self.endpoint_path() / "postgresql.conf"
|
return self.endpoint_path() / "postgresql.conf"
|
||||||
|
|
||||||
def config(self, lines: List[str]) -> "Endpoint":
|
def config(self, lines: list[str]) -> Endpoint:
|
||||||
"""
|
"""
|
||||||
Add lines to postgresql.conf.
|
Add lines to postgresql.conf.
|
||||||
Lines should be an array of valid postgresql.conf rows.
|
Lines should be an array of valid postgresql.conf rows.
|
||||||
@@ -3538,7 +3535,7 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def edit_hba(self, hba: List[str]):
|
def edit_hba(self, hba: list[str]):
|
||||||
"""Prepend hba lines into pg_hba.conf file."""
|
"""Prepend hba lines into pg_hba.conf file."""
|
||||||
with open(os.path.join(self.pg_data_dir_path(), "pg_hba.conf"), "r+") as conf_file:
|
with open(os.path.join(self.pg_data_dir_path(), "pg_hba.conf"), "r+") as conf_file:
|
||||||
data = conf_file.read()
|
data = conf_file.read()
|
||||||
@@ -3553,7 +3550,7 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
return self._running._value > 0
|
return self._running._value > 0
|
||||||
|
|
||||||
def reconfigure(
|
def reconfigure(
|
||||||
self, pageserver_id: Optional[int] = None, safekeepers: Optional[List[int]] = None
|
self, pageserver_id: Optional[int] = None, safekeepers: Optional[list[int]] = None
|
||||||
):
|
):
|
||||||
assert self.endpoint_id is not None
|
assert self.endpoint_id is not None
|
||||||
# If `safekeepers` is not None, they are remember them as active and use
|
# If `safekeepers` is not None, they are remember them as active and use
|
||||||
@@ -3568,7 +3565,7 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
"""Update the endpoint.json file used by control_plane."""
|
"""Update the endpoint.json file used by control_plane."""
|
||||||
# Read config
|
# Read config
|
||||||
config_path = os.path.join(self.endpoint_path(), "endpoint.json")
|
config_path = os.path.join(self.endpoint_path(), "endpoint.json")
|
||||||
with open(config_path, "r") as f:
|
with open(config_path) as f:
|
||||||
data_dict: dict[str, Any] = json.load(f)
|
data_dict: dict[str, Any] = json.load(f)
|
||||||
|
|
||||||
# Write it back updated
|
# Write it back updated
|
||||||
@@ -3601,8 +3598,8 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
def stop(
|
def stop(
|
||||||
self,
|
self,
|
||||||
mode: str = "fast",
|
mode: str = "fast",
|
||||||
sks_wait_walreceiver_gone: Optional[tuple[List[Safekeeper], TimelineId]] = None,
|
sks_wait_walreceiver_gone: Optional[tuple[list[Safekeeper], TimelineId]] = None,
|
||||||
) -> "Endpoint":
|
) -> Endpoint:
|
||||||
"""
|
"""
|
||||||
Stop the Postgres instance if it's running.
|
Stop the Postgres instance if it's running.
|
||||||
|
|
||||||
@@ -3636,7 +3633,7 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def stop_and_destroy(self, mode: str = "immediate") -> "Endpoint":
|
def stop_and_destroy(self, mode: str = "immediate") -> Endpoint:
|
||||||
"""
|
"""
|
||||||
Stop the Postgres instance, then destroy the endpoint.
|
Stop the Postgres instance, then destroy the endpoint.
|
||||||
Returns self.
|
Returns self.
|
||||||
@@ -3658,12 +3655,12 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
endpoint_id: Optional[str] = None,
|
endpoint_id: Optional[str] = None,
|
||||||
hot_standby: bool = False,
|
hot_standby: bool = False,
|
||||||
lsn: Optional[Lsn] = None,
|
lsn: Optional[Lsn] = None,
|
||||||
config_lines: Optional[List[str]] = None,
|
config_lines: Optional[list[str]] = None,
|
||||||
remote_ext_config: Optional[str] = None,
|
remote_ext_config: Optional[str] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
allow_multiple=False,
|
allow_multiple=False,
|
||||||
basebackup_request_tries: Optional[int] = None,
|
basebackup_request_tries: Optional[int] = None,
|
||||||
) -> "Endpoint":
|
) -> Endpoint:
|
||||||
"""
|
"""
|
||||||
Create an endpoint, apply config, and start Postgres.
|
Create an endpoint, apply config, and start Postgres.
|
||||||
Returns self.
|
Returns self.
|
||||||
@@ -3690,12 +3687,12 @@ class Endpoint(PgProtocol, LogUtils):
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __enter__(self) -> "Endpoint":
|
def __enter__(self) -> Endpoint:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc: Optional[BaseException],
|
exc: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
):
|
):
|
||||||
@@ -3726,7 +3723,7 @@ class EndpointFactory:
|
|||||||
def __init__(self, env: NeonEnv):
|
def __init__(self, env: NeonEnv):
|
||||||
self.env = env
|
self.env = env
|
||||||
self.num_instances: int = 0
|
self.num_instances: int = 0
|
||||||
self.endpoints: List[Endpoint] = []
|
self.endpoints: list[Endpoint] = []
|
||||||
|
|
||||||
def create_start(
|
def create_start(
|
||||||
self,
|
self,
|
||||||
@@ -3735,7 +3732,7 @@ class EndpointFactory:
|
|||||||
tenant_id: Optional[TenantId] = None,
|
tenant_id: Optional[TenantId] = None,
|
||||||
lsn: Optional[Lsn] = None,
|
lsn: Optional[Lsn] = None,
|
||||||
hot_standby: bool = False,
|
hot_standby: bool = False,
|
||||||
config_lines: Optional[List[str]] = None,
|
config_lines: Optional[list[str]] = None,
|
||||||
remote_ext_config: Optional[str] = None,
|
remote_ext_config: Optional[str] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
basebackup_request_tries: Optional[int] = None,
|
basebackup_request_tries: Optional[int] = None,
|
||||||
@@ -3767,7 +3764,7 @@ class EndpointFactory:
|
|||||||
tenant_id: Optional[TenantId] = None,
|
tenant_id: Optional[TenantId] = None,
|
||||||
lsn: Optional[Lsn] = None,
|
lsn: Optional[Lsn] = None,
|
||||||
hot_standby: bool = False,
|
hot_standby: bool = False,
|
||||||
config_lines: Optional[List[str]] = None,
|
config_lines: Optional[list[str]] = None,
|
||||||
pageserver_id: Optional[int] = None,
|
pageserver_id: Optional[int] = None,
|
||||||
) -> Endpoint:
|
) -> Endpoint:
|
||||||
ep = Endpoint(
|
ep = Endpoint(
|
||||||
@@ -3791,7 +3788,7 @@ class EndpointFactory:
|
|||||||
pageserver_id=pageserver_id,
|
pageserver_id=pageserver_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
def stop_all(self, fail_on_error=True) -> "EndpointFactory":
|
def stop_all(self, fail_on_error=True) -> EndpointFactory:
|
||||||
exception = None
|
exception = None
|
||||||
for ep in self.endpoints:
|
for ep in self.endpoints:
|
||||||
try:
|
try:
|
||||||
@@ -3806,7 +3803,7 @@ class EndpointFactory:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def new_replica(
|
def new_replica(
|
||||||
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[List[str]] = None
|
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[list[str]] = None
|
||||||
):
|
):
|
||||||
branch_name = origin.branch_name
|
branch_name = origin.branch_name
|
||||||
assert origin in self.endpoints
|
assert origin in self.endpoints
|
||||||
@@ -3822,7 +3819,7 @@ class EndpointFactory:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def new_replica_start(
|
def new_replica_start(
|
||||||
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[List[str]] = None
|
self, origin: Endpoint, endpoint_id: str, config_lines: Optional[list[str]] = None
|
||||||
):
|
):
|
||||||
branch_name = origin.branch_name
|
branch_name = origin.branch_name
|
||||||
assert origin in self.endpoints
|
assert origin in self.endpoints
|
||||||
@@ -3860,7 +3857,7 @@ class Safekeeper(LogUtils):
|
|||||||
port: SafekeeperPort,
|
port: SafekeeperPort,
|
||||||
id: int,
|
id: int,
|
||||||
running: bool = False,
|
running: bool = False,
|
||||||
extra_opts: Optional[List[str]] = None,
|
extra_opts: Optional[list[str]] = None,
|
||||||
):
|
):
|
||||||
self.env = env
|
self.env = env
|
||||||
self.port = port
|
self.port = port
|
||||||
@@ -3886,8 +3883,8 @@ class Safekeeper(LogUtils):
|
|||||||
self.extra_opts = extra_opts
|
self.extra_opts = extra_opts
|
||||||
|
|
||||||
def start(
|
def start(
|
||||||
self, extra_opts: Optional[List[str]] = None, timeout_in_seconds: Optional[int] = None
|
self, extra_opts: Optional[list[str]] = None, timeout_in_seconds: Optional[int] = None
|
||||||
) -> "Safekeeper":
|
) -> Safekeeper:
|
||||||
if extra_opts is None:
|
if extra_opts is None:
|
||||||
# Apply either the extra_opts passed in, or the ones from our constructor: we do not merge the two.
|
# Apply either the extra_opts passed in, or the ones from our constructor: we do not merge the two.
|
||||||
extra_opts = self.extra_opts
|
extra_opts = self.extra_opts
|
||||||
@@ -3922,7 +3919,7 @@ class Safekeeper(LogUtils):
|
|||||||
break # success
|
break # success
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def stop(self, immediate: bool = False) -> "Safekeeper":
|
def stop(self, immediate: bool = False) -> Safekeeper:
|
||||||
log.info(f"Stopping safekeeper {self.id}")
|
log.info(f"Stopping safekeeper {self.id}")
|
||||||
self.env.neon_cli.safekeeper_stop(self.id, immediate)
|
self.env.neon_cli.safekeeper_stop(self.id, immediate)
|
||||||
self.running = False
|
self.running = False
|
||||||
@@ -3934,8 +3931,8 @@ class Safekeeper(LogUtils):
|
|||||||
assert not self.log_contains("timeout while acquiring WalResidentTimeline guard")
|
assert not self.log_contains("timeout while acquiring WalResidentTimeline guard")
|
||||||
|
|
||||||
def append_logical_message(
|
def append_logical_message(
|
||||||
self, tenant_id: TenantId, timeline_id: TimelineId, request: Dict[str, Any]
|
self, tenant_id: TenantId, timeline_id: TimelineId, request: dict[str, Any]
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Send JSON_CTRL query to append LogicalMessage to WAL and modify
|
Send JSON_CTRL query to append LogicalMessage to WAL and modify
|
||||||
safekeeper state. It will construct LogicalMessage from provided
|
safekeeper state. It will construct LogicalMessage from provided
|
||||||
@@ -3988,7 +3985,7 @@ class Safekeeper(LogUtils):
|
|||||||
|
|
||||||
def pull_timeline(
|
def pull_timeline(
|
||||||
self, srcs: list[Safekeeper], tenant_id: TenantId, timeline_id: TimelineId
|
self, srcs: list[Safekeeper], tenant_id: TenantId, timeline_id: TimelineId
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
pull_timeline from srcs to self.
|
pull_timeline from srcs to self.
|
||||||
"""
|
"""
|
||||||
@@ -4024,7 +4021,7 @@ class Safekeeper(LogUtils):
|
|||||||
mysegs = [s for s in segs if f"sk{self.id}" in s]
|
mysegs = [s for s in segs if f"sk{self.id}" in s]
|
||||||
return mysegs
|
return mysegs
|
||||||
|
|
||||||
def list_segments(self, tenant_id, timeline_id) -> List[str]:
|
def list_segments(self, tenant_id, timeline_id) -> list[str]:
|
||||||
"""
|
"""
|
||||||
Get list of segment names of the given timeline.
|
Get list of segment names of the given timeline.
|
||||||
"""
|
"""
|
||||||
@@ -4129,7 +4126,7 @@ class StorageScrubber:
|
|||||||
self.log_dir = log_dir
|
self.log_dir = log_dir
|
||||||
|
|
||||||
def scrubber_cli(
|
def scrubber_cli(
|
||||||
self, args: list[str], timeout, extra_env: Optional[Dict[str, str]] = None
|
self, args: list[str], timeout, extra_env: Optional[dict[str, str]] = None
|
||||||
) -> str:
|
) -> str:
|
||||||
assert isinstance(self.env.pageserver_remote_storage, S3Storage)
|
assert isinstance(self.env.pageserver_remote_storage, S3Storage)
|
||||||
s3_storage = self.env.pageserver_remote_storage
|
s3_storage = self.env.pageserver_remote_storage
|
||||||
@@ -4176,10 +4173,10 @@ class StorageScrubber:
|
|||||||
|
|
||||||
def scan_metadata_safekeeper(
|
def scan_metadata_safekeeper(
|
||||||
self,
|
self,
|
||||||
timeline_lsns: List[Dict[str, Any]],
|
timeline_lsns: list[dict[str, Any]],
|
||||||
cloud_admin_api_url: str,
|
cloud_admin_api_url: str,
|
||||||
cloud_admin_api_token: str,
|
cloud_admin_api_token: str,
|
||||||
) -> Tuple[bool, Any]:
|
) -> tuple[bool, Any]:
|
||||||
extra_env = {
|
extra_env = {
|
||||||
"CLOUD_ADMIN_API_URL": cloud_admin_api_url,
|
"CLOUD_ADMIN_API_URL": cloud_admin_api_url,
|
||||||
"CLOUD_ADMIN_API_TOKEN": cloud_admin_api_token,
|
"CLOUD_ADMIN_API_TOKEN": cloud_admin_api_token,
|
||||||
@@ -4192,9 +4189,9 @@ class StorageScrubber:
|
|||||||
self,
|
self,
|
||||||
post_to_storage_controller: bool = False,
|
post_to_storage_controller: bool = False,
|
||||||
node_kind: NodeKind = NodeKind.PAGESERVER,
|
node_kind: NodeKind = NodeKind.PAGESERVER,
|
||||||
timeline_lsns: Optional[List[Dict[str, Any]]] = None,
|
timeline_lsns: Optional[list[dict[str, Any]]] = None,
|
||||||
extra_env: Optional[Dict[str, str]] = None,
|
extra_env: Optional[dict[str, str]] = None,
|
||||||
) -> Tuple[bool, Any]:
|
) -> tuple[bool, Any]:
|
||||||
"""
|
"""
|
||||||
Returns the health status and the metadata summary.
|
Returns the health status and the metadata summary.
|
||||||
"""
|
"""
|
||||||
@@ -4501,7 +4498,7 @@ def should_skip_file(filename: str) -> bool:
|
|||||||
#
|
#
|
||||||
# Test helpers
|
# Test helpers
|
||||||
#
|
#
|
||||||
def list_files_to_compare(pgdata_dir: Path) -> List[str]:
|
def list_files_to_compare(pgdata_dir: Path) -> list[str]:
|
||||||
pgdata_files = []
|
pgdata_files = []
|
||||||
for root, _dirs, filenames in os.walk(pgdata_dir):
|
for root, _dirs, filenames in os.walk(pgdata_dir):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Iterator
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Iterable, List, Tuple
|
from collections.abc import Iterable
|
||||||
|
|
||||||
|
|
||||||
def scan_pageserver_log_for_errors(
|
def scan_pageserver_log_for_errors(
|
||||||
input: Iterable[str], allowed_errors: List[str]
|
input: Iterable[str], allowed_errors: list[str]
|
||||||
) -> List[Tuple[int, str]]:
|
) -> list[tuple[int, str]]:
|
||||||
error_or_warn = re.compile(r"\s(ERROR|WARN)")
|
error_or_warn = re.compile(r"\s(ERROR|WARN)")
|
||||||
errors = []
|
errors = []
|
||||||
for lineno, line in enumerate(input, start=1):
|
for lineno, line in enumerate(input, start=1):
|
||||||
@@ -113,7 +115,7 @@ DEFAULT_STORAGE_CONTROLLER_ALLOWED_ERRORS = [
|
|||||||
|
|
||||||
|
|
||||||
def _check_allowed_errors(input):
|
def _check_allowed_errors(input):
|
||||||
allowed_errors: List[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
allowed_errors: list[str] = list(DEFAULT_PAGESERVER_ALLOWED_ERRORS)
|
||||||
|
|
||||||
# add any test specifics here; cli parsing is not provided for the
|
# add any test specifics here; cli parsing is not provided for the
|
||||||
# difficulty of copypasting regexes as arguments without any quoting
|
# difficulty of copypasting regexes as arguments without any quoting
|
||||||
|
|||||||
@@ -1,9 +1,14 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Dict, Tuple, Union
|
from typing import TYPE_CHECKING, Union
|
||||||
|
|
||||||
from fixtures.common_types import KEY_MAX, KEY_MIN, Key, Lsn
|
from fixtures.common_types import KEY_MAX, KEY_MIN, Key, Lsn
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class IndexLayerMetadata:
|
class IndexLayerMetadata:
|
||||||
@@ -53,7 +58,7 @@ IMAGE_LAYER_FILE_NAME = re.compile(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_image_layer(f_name: str) -> Tuple[int, int, int]:
|
def parse_image_layer(f_name: str) -> tuple[int, int, int]:
|
||||||
"""Parse an image layer file name. Return key start, key end, and snapshot lsn"""
|
"""Parse an image layer file name. Return key start, key end, and snapshot lsn"""
|
||||||
|
|
||||||
match = IMAGE_LAYER_FILE_NAME.match(f_name)
|
match = IMAGE_LAYER_FILE_NAME.match(f_name)
|
||||||
@@ -68,7 +73,7 @@ DELTA_LAYER_FILE_NAME = re.compile(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_delta_layer(f_name: str) -> Tuple[int, int, int, int]:
|
def parse_delta_layer(f_name: str) -> tuple[int, int, int, int]:
|
||||||
"""Parse a delta layer file name. Return key start, key end, lsn start, and lsn end"""
|
"""Parse a delta layer file name. Return key start, key end, lsn start, and lsn end"""
|
||||||
match = DELTA_LAYER_FILE_NAME.match(f_name)
|
match = DELTA_LAYER_FILE_NAME.match(f_name)
|
||||||
if match is None:
|
if match is None:
|
||||||
@@ -121,11 +126,11 @@ def is_future_layer(layer_file_name: LayerName, disk_consistent_lsn: Lsn):
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class IndexPartDump:
|
class IndexPartDump:
|
||||||
layer_metadata: Dict[LayerName, IndexLayerMetadata]
|
layer_metadata: dict[LayerName, IndexLayerMetadata]
|
||||||
disk_consistent_lsn: Lsn
|
disk_consistent_lsn: Lsn
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> "IndexPartDump":
|
def from_json(cls, d: dict[str, Any]) -> IndexPartDump:
|
||||||
return IndexPartDump(
|
return IndexPartDump(
|
||||||
layer_metadata={
|
layer_metadata={
|
||||||
parse_layer_file_name(n): IndexLayerMetadata(v["file_size"], v["generation"])
|
parse_layer_file_name(n): IndexLayerMetadata(v["file_size"], v["generation"])
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import time
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
@@ -16,6 +16,9 @@ from fixtures.metrics import Metrics, MetricsGetter, parse_metrics
|
|||||||
from fixtures.pg_version import PgVersion
|
from fixtures.pg_version import PgVersion
|
||||||
from fixtures.utils import Fn
|
from fixtures.utils import Fn
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
|
||||||
class PageserverApiException(Exception):
|
class PageserverApiException(Exception):
|
||||||
def __init__(self, message, status_code: int):
|
def __init__(self, message, status_code: int):
|
||||||
@@ -43,7 +46,7 @@ class InMemoryLayerInfo:
|
|||||||
lsn_end: Optional[str]
|
lsn_end: Optional[str]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> InMemoryLayerInfo:
|
def from_json(cls, d: dict[str, Any]) -> InMemoryLayerInfo:
|
||||||
return InMemoryLayerInfo(
|
return InMemoryLayerInfo(
|
||||||
kind=d["kind"],
|
kind=d["kind"],
|
||||||
lsn_start=d["lsn_start"],
|
lsn_start=d["lsn_start"],
|
||||||
@@ -64,7 +67,7 @@ class HistoricLayerInfo:
|
|||||||
visible: bool
|
visible: bool
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> HistoricLayerInfo:
|
def from_json(cls, d: dict[str, Any]) -> HistoricLayerInfo:
|
||||||
# instead of parsing the key range lets keep the definition of "L0" in pageserver
|
# instead of parsing the key range lets keep the definition of "L0" in pageserver
|
||||||
l0_ness = d.get("l0")
|
l0_ness = d.get("l0")
|
||||||
assert l0_ness is None or isinstance(l0_ness, bool)
|
assert l0_ness is None or isinstance(l0_ness, bool)
|
||||||
@@ -86,53 +89,53 @@ class HistoricLayerInfo:
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class LayerMapInfo:
|
class LayerMapInfo:
|
||||||
in_memory_layers: List[InMemoryLayerInfo]
|
in_memory_layers: list[InMemoryLayerInfo]
|
||||||
historic_layers: List[HistoricLayerInfo]
|
historic_layers: list[HistoricLayerInfo]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> LayerMapInfo:
|
def from_json(cls, d: dict[str, Any]) -> LayerMapInfo:
|
||||||
info = LayerMapInfo(in_memory_layers=[], historic_layers=[])
|
info = LayerMapInfo(in_memory_layers=[], historic_layers=[])
|
||||||
|
|
||||||
json_in_memory_layers = d["in_memory_layers"]
|
json_in_memory_layers = d["in_memory_layers"]
|
||||||
assert isinstance(json_in_memory_layers, List)
|
assert isinstance(json_in_memory_layers, list)
|
||||||
for json_in_memory_layer in json_in_memory_layers:
|
for json_in_memory_layer in json_in_memory_layers:
|
||||||
info.in_memory_layers.append(InMemoryLayerInfo.from_json(json_in_memory_layer))
|
info.in_memory_layers.append(InMemoryLayerInfo.from_json(json_in_memory_layer))
|
||||||
|
|
||||||
json_historic_layers = d["historic_layers"]
|
json_historic_layers = d["historic_layers"]
|
||||||
assert isinstance(json_historic_layers, List)
|
assert isinstance(json_historic_layers, list)
|
||||||
for json_historic_layer in json_historic_layers:
|
for json_historic_layer in json_historic_layers:
|
||||||
info.historic_layers.append(HistoricLayerInfo.from_json(json_historic_layer))
|
info.historic_layers.append(HistoricLayerInfo.from_json(json_historic_layer))
|
||||||
|
|
||||||
return info
|
return info
|
||||||
|
|
||||||
def kind_count(self) -> Dict[str, int]:
|
def kind_count(self) -> dict[str, int]:
|
||||||
counts: Dict[str, int] = defaultdict(int)
|
counts: dict[str, int] = defaultdict(int)
|
||||||
for inmem_layer in self.in_memory_layers:
|
for inmem_layer in self.in_memory_layers:
|
||||||
counts[inmem_layer.kind] += 1
|
counts[inmem_layer.kind] += 1
|
||||||
for hist_layer in self.historic_layers:
|
for hist_layer in self.historic_layers:
|
||||||
counts[hist_layer.kind] += 1
|
counts[hist_layer.kind] += 1
|
||||||
return counts
|
return counts
|
||||||
|
|
||||||
def delta_layers(self) -> List[HistoricLayerInfo]:
|
def delta_layers(self) -> list[HistoricLayerInfo]:
|
||||||
return [x for x in self.historic_layers if x.kind == "Delta"]
|
return [x for x in self.historic_layers if x.kind == "Delta"]
|
||||||
|
|
||||||
def image_layers(self) -> List[HistoricLayerInfo]:
|
def image_layers(self) -> list[HistoricLayerInfo]:
|
||||||
return [x for x in self.historic_layers if x.kind == "Image"]
|
return [x for x in self.historic_layers if x.kind == "Image"]
|
||||||
|
|
||||||
def delta_l0_layers(self) -> List[HistoricLayerInfo]:
|
def delta_l0_layers(self) -> list[HistoricLayerInfo]:
|
||||||
return [x for x in self.historic_layers if x.kind == "Delta" and x.l0]
|
return [x for x in self.historic_layers if x.kind == "Delta" and x.l0]
|
||||||
|
|
||||||
def historic_by_name(self) -> Set[str]:
|
def historic_by_name(self) -> set[str]:
|
||||||
return set(x.layer_file_name for x in self.historic_layers)
|
return set(x.layer_file_name for x in self.historic_layers)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TenantConfig:
|
class TenantConfig:
|
||||||
tenant_specific_overrides: Dict[str, Any]
|
tenant_specific_overrides: dict[str, Any]
|
||||||
effective_config: Dict[str, Any]
|
effective_config: dict[str, Any]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> TenantConfig:
|
def from_json(cls, d: dict[str, Any]) -> TenantConfig:
|
||||||
return TenantConfig(
|
return TenantConfig(
|
||||||
tenant_specific_overrides=d["tenant_specific_overrides"],
|
tenant_specific_overrides=d["tenant_specific_overrides"],
|
||||||
effective_config=d["effective_config"],
|
effective_config=d["effective_config"],
|
||||||
@@ -209,7 +212,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
def check_status(self):
|
def check_status(self):
|
||||||
self.get(f"http://localhost:{self.port}/v1/status").raise_for_status()
|
self.get(f"http://localhost:{self.port}/v1/status").raise_for_status()
|
||||||
|
|
||||||
def configure_failpoints(self, config_strings: Tuple[str, str] | List[Tuple[str, str]]):
|
def configure_failpoints(self, config_strings: tuple[str, str] | list[tuple[str, str]]):
|
||||||
self.is_testing_enabled_or_skip()
|
self.is_testing_enabled_or_skip()
|
||||||
|
|
||||||
if isinstance(config_strings, tuple):
|
if isinstance(config_strings, tuple):
|
||||||
@@ -233,7 +236,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
res = self.post(f"http://localhost:{self.port}/v1/reload_auth_validation_keys")
|
res = self.post(f"http://localhost:{self.port}/v1/reload_auth_validation_keys")
|
||||||
self.verbose_error(res)
|
self.verbose_error(res)
|
||||||
|
|
||||||
def tenant_list(self) -> List[Dict[Any, Any]]:
|
def tenant_list(self) -> list[dict[Any, Any]]:
|
||||||
res = self.get(f"http://localhost:{self.port}/v1/tenant")
|
res = self.get(f"http://localhost:{self.port}/v1/tenant")
|
||||||
self.verbose_error(res)
|
self.verbose_error(res)
|
||||||
res_json = res.json()
|
res_json = res.json()
|
||||||
@@ -244,7 +247,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
self,
|
self,
|
||||||
tenant_id: Union[TenantId, TenantShardId],
|
tenant_id: Union[TenantId, TenantShardId],
|
||||||
generation: int,
|
generation: int,
|
||||||
config: None | Dict[str, Any] = None,
|
config: None | dict[str, Any] = None,
|
||||||
):
|
):
|
||||||
config = config or {}
|
config = config or {}
|
||||||
|
|
||||||
@@ -324,7 +327,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
|
|
||||||
def tenant_status(
|
def tenant_status(
|
||||||
self, tenant_id: Union[TenantId, TenantShardId], activate: bool = False
|
self, tenant_id: Union[TenantId, TenantShardId], activate: bool = False
|
||||||
) -> Dict[Any, Any]:
|
) -> dict[Any, Any]:
|
||||||
"""
|
"""
|
||||||
:activate: hint the server not to accelerate activation of this tenant in response
|
:activate: hint the server not to accelerate activation of this tenant in response
|
||||||
to this query. False by default for tests, because they generally want to observed the
|
to this query. False by default for tests, because they generally want to observed the
|
||||||
@@ -378,8 +381,8 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
def patch_tenant_config_client_side(
|
def patch_tenant_config_client_side(
|
||||||
self,
|
self,
|
||||||
tenant_id: TenantId,
|
tenant_id: TenantId,
|
||||||
inserts: Optional[Dict[str, Any]] = None,
|
inserts: Optional[dict[str, Any]] = None,
|
||||||
removes: Optional[List[str]] = None,
|
removes: Optional[list[str]] = None,
|
||||||
):
|
):
|
||||||
current = self.tenant_config(tenant_id).tenant_specific_overrides
|
current = self.tenant_config(tenant_id).tenant_specific_overrides
|
||||||
if inserts is not None:
|
if inserts is not None:
|
||||||
@@ -394,7 +397,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
|
|
||||||
def tenant_size_and_modelinputs(
|
def tenant_size_and_modelinputs(
|
||||||
self, tenant_id: Union[TenantId, TenantShardId]
|
self, tenant_id: Union[TenantId, TenantShardId]
|
||||||
) -> Tuple[int, Dict[str, Any]]:
|
) -> tuple[int, dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Returns the tenant size, together with the model inputs as the second tuple item.
|
Returns the tenant size, together with the model inputs as the second tuple item.
|
||||||
"""
|
"""
|
||||||
@@ -424,7 +427,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
tenant_id: Union[TenantId, TenantShardId],
|
tenant_id: Union[TenantId, TenantShardId],
|
||||||
timestamp: datetime,
|
timestamp: datetime,
|
||||||
done_if_after: datetime,
|
done_if_after: datetime,
|
||||||
shard_counts: Optional[List[int]] = None,
|
shard_counts: Optional[list[int]] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Issues a request to perform time travel operations on the remote storage
|
Issues a request to perform time travel operations on the remote storage
|
||||||
@@ -432,7 +435,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
|
|
||||||
if shard_counts is None:
|
if shard_counts is None:
|
||||||
shard_counts = []
|
shard_counts = []
|
||||||
body: Dict[str, Any] = {
|
body: dict[str, Any] = {
|
||||||
"shard_counts": shard_counts,
|
"shard_counts": shard_counts,
|
||||||
}
|
}
|
||||||
res = self.put(
|
res = self.put(
|
||||||
@@ -446,7 +449,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
tenant_id: Union[TenantId, TenantShardId],
|
tenant_id: Union[TenantId, TenantShardId],
|
||||||
include_non_incremental_logical_size: bool = False,
|
include_non_incremental_logical_size: bool = False,
|
||||||
include_timeline_dir_layer_file_size_sum: bool = False,
|
include_timeline_dir_layer_file_size_sum: bool = False,
|
||||||
) -> List[Dict[str, Any]]:
|
) -> list[dict[str, Any]]:
|
||||||
params = {}
|
params = {}
|
||||||
if include_non_incremental_logical_size:
|
if include_non_incremental_logical_size:
|
||||||
params["include-non-incremental-logical-size"] = "true"
|
params["include-non-incremental-logical-size"] = "true"
|
||||||
@@ -470,8 +473,8 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
ancestor_start_lsn: Optional[Lsn] = None,
|
ancestor_start_lsn: Optional[Lsn] = None,
|
||||||
existing_initdb_timeline_id: Optional[TimelineId] = None,
|
existing_initdb_timeline_id: Optional[TimelineId] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Dict[Any, Any]:
|
) -> dict[Any, Any]:
|
||||||
body: Dict[str, Any] = {
|
body: dict[str, Any] = {
|
||||||
"new_timeline_id": str(new_timeline_id),
|
"new_timeline_id": str(new_timeline_id),
|
||||||
"ancestor_start_lsn": str(ancestor_start_lsn) if ancestor_start_lsn else None,
|
"ancestor_start_lsn": str(ancestor_start_lsn) if ancestor_start_lsn else None,
|
||||||
"ancestor_timeline_id": str(ancestor_timeline_id) if ancestor_timeline_id else None,
|
"ancestor_timeline_id": str(ancestor_timeline_id) if ancestor_timeline_id else None,
|
||||||
@@ -504,7 +507,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
include_timeline_dir_layer_file_size_sum: bool = False,
|
include_timeline_dir_layer_file_size_sum: bool = False,
|
||||||
force_await_initial_logical_size: bool = False,
|
force_await_initial_logical_size: bool = False,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Dict[Any, Any]:
|
) -> dict[Any, Any]:
|
||||||
params = {}
|
params = {}
|
||||||
if include_non_incremental_logical_size:
|
if include_non_incremental_logical_size:
|
||||||
params["include-non-incremental-logical-size"] = "true"
|
params["include-non-incremental-logical-size"] = "true"
|
||||||
@@ -844,7 +847,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
)
|
)
|
||||||
if len(res) != 2:
|
if len(res) != 2:
|
||||||
return None
|
return None
|
||||||
inc, dec = [res[metric] for metric in metrics]
|
inc, dec = (res[metric] for metric in metrics)
|
||||||
queue_count = int(inc) - int(dec)
|
queue_count = int(inc) - int(dec)
|
||||||
assert queue_count >= 0
|
assert queue_count >= 0
|
||||||
return queue_count
|
return queue_count
|
||||||
@@ -885,7 +888,7 @@ class PageserverHttpClient(requests.Session, MetricsGetter):
|
|||||||
timeline_id: TimelineId,
|
timeline_id: TimelineId,
|
||||||
batch_size: int | None = None,
|
batch_size: int | None = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Set[TimelineId]:
|
) -> set[TimelineId]:
|
||||||
params = {}
|
params = {}
|
||||||
if batch_size is not None:
|
if batch_size is not None:
|
||||||
params["batch_size"] = batch_size
|
params["batch_size"] = batch_size
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
from typing import Any, Callable, Dict, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import fixtures.pageserver.remote_storage
|
import fixtures.pageserver.remote_storage
|
||||||
from fixtures.common_types import TenantId, TimelineId
|
from fixtures.common_types import TenantId, TimelineId
|
||||||
@@ -10,10 +12,13 @@ from fixtures.neon_fixtures import (
|
|||||||
)
|
)
|
||||||
from fixtures.remote_storage import LocalFsStorage, RemoteStorageKind
|
from fixtures.remote_storage import LocalFsStorage, RemoteStorageKind
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
|
||||||
def single_timeline(
|
def single_timeline(
|
||||||
neon_env_builder: NeonEnvBuilder,
|
neon_env_builder: NeonEnvBuilder,
|
||||||
setup_template: Callable[[NeonEnv], Tuple[TenantId, TimelineId, Dict[str, Any]]],
|
setup_template: Callable[[NeonEnv], tuple[TenantId, TimelineId, dict[str, Any]]],
|
||||||
ncopies: int,
|
ncopies: int,
|
||||||
) -> NeonEnv:
|
) -> NeonEnv:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import queue
|
import queue
|
||||||
import shutil
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, List, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from fixtures.common_types import TenantId, TimelineId
|
from fixtures.common_types import TenantId, TimelineId
|
||||||
from fixtures.neon_fixtures import NeonEnv
|
from fixtures.neon_fixtures import NeonEnv
|
||||||
@@ -14,6 +16,9 @@ from fixtures.pageserver.common_types import (
|
|||||||
)
|
)
|
||||||
from fixtures.remote_storage import LocalFsStorage
|
from fixtures.remote_storage import LocalFsStorage
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
def duplicate_one_tenant(env: NeonEnv, template_tenant: TenantId, new_tenant: TenantId):
|
def duplicate_one_tenant(env: NeonEnv, template_tenant: TenantId, new_tenant: TenantId):
|
||||||
remote_storage = env.pageserver_remote_storage
|
remote_storage = env.pageserver_remote_storage
|
||||||
@@ -50,13 +55,13 @@ def duplicate_one_tenant(env: NeonEnv, template_tenant: TenantId, new_tenant: Te
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def duplicate_tenant(env: NeonEnv, template_tenant: TenantId, ncopies: int) -> List[TenantId]:
|
def duplicate_tenant(env: NeonEnv, template_tenant: TenantId, ncopies: int) -> list[TenantId]:
|
||||||
assert isinstance(env.pageserver_remote_storage, LocalFsStorage)
|
assert isinstance(env.pageserver_remote_storage, LocalFsStorage)
|
||||||
|
|
||||||
def work(tenant_id):
|
def work(tenant_id):
|
||||||
duplicate_one_tenant(env, template_tenant, tenant_id)
|
duplicate_one_tenant(env, template_tenant, tenant_id)
|
||||||
|
|
||||||
new_tenants: List[TenantId] = [TenantId.generate() for _ in range(0, ncopies)]
|
new_tenants: list[TenantId] = [TenantId.generate() for _ in range(0, ncopies)]
|
||||||
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
|
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
|
||||||
executor.map(work, new_tenants)
|
executor.map(work, new_tenants)
|
||||||
return new_tenants
|
return new_tenants
|
||||||
@@ -79,7 +84,7 @@ def local_layer_name_from_remote_name(remote_name: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def copy_all_remote_layer_files_to_local_tenant_dir(
|
def copy_all_remote_layer_files_to_local_tenant_dir(
|
||||||
env: NeonEnv, tenant_timelines: List[Tuple[TenantId, TimelineId]]
|
env: NeonEnv, tenant_timelines: list[tuple[TenantId, TimelineId]]
|
||||||
):
|
):
|
||||||
remote_storage = env.pageserver_remote_storage
|
remote_storage = env.pageserver_remote_storage
|
||||||
assert isinstance(remote_storage, LocalFsStorage)
|
assert isinstance(remote_storage, LocalFsStorage)
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from mypy_boto3_s3.type_defs import (
|
from mypy_boto3_s3.type_defs import (
|
||||||
DeleteObjectOutputTypeDef,
|
DeleteObjectOutputTypeDef,
|
||||||
@@ -14,6 +16,9 @@ from fixtures.pageserver.http import PageserverApiException, PageserverHttpClien
|
|||||||
from fixtures.remote_storage import RemoteStorage, RemoteStorageKind, S3Storage
|
from fixtures.remote_storage import RemoteStorage, RemoteStorageKind, S3Storage
|
||||||
from fixtures.utils import wait_until
|
from fixtures.utils import wait_until
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
|
||||||
def assert_tenant_state(
|
def assert_tenant_state(
|
||||||
pageserver_http: PageserverHttpClient,
|
pageserver_http: PageserverHttpClient,
|
||||||
@@ -66,7 +71,7 @@ def wait_for_upload(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _tenant_in_expected_state(tenant_info: Dict[str, Any], expected_state: str):
|
def _tenant_in_expected_state(tenant_info: dict[str, Any], expected_state: str):
|
||||||
if tenant_info["state"]["slug"] == expected_state:
|
if tenant_info["state"]["slug"] == expected_state:
|
||||||
return True
|
return True
|
||||||
if tenant_info["state"]["slug"] == "Broken":
|
if tenant_info["state"]["slug"] == "Broken":
|
||||||
@@ -80,7 +85,7 @@ def wait_until_tenant_state(
|
|||||||
expected_state: str,
|
expected_state: str,
|
||||||
iterations: int,
|
iterations: int,
|
||||||
period: float = 1.0,
|
period: float = 1.0,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Does not use `wait_until` for debugging purposes
|
Does not use `wait_until` for debugging purposes
|
||||||
"""
|
"""
|
||||||
@@ -136,7 +141,7 @@ def wait_until_timeline_state(
|
|||||||
expected_state: str,
|
expected_state: str,
|
||||||
iterations: int,
|
iterations: int,
|
||||||
period: float = 1.0,
|
period: float = 1.0,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Does not use `wait_until` for debugging purposes
|
Does not use `wait_until` for debugging purposes
|
||||||
"""
|
"""
|
||||||
@@ -147,7 +152,7 @@ def wait_until_timeline_state(
|
|||||||
if isinstance(timeline["state"], str):
|
if isinstance(timeline["state"], str):
|
||||||
if timeline["state"] == expected_state:
|
if timeline["state"] == expected_state:
|
||||||
return timeline
|
return timeline
|
||||||
elif isinstance(timeline, Dict):
|
elif isinstance(timeline, dict):
|
||||||
if timeline["state"].get(expected_state):
|
if timeline["state"].get(expected_state):
|
||||||
return timeline
|
return timeline
|
||||||
|
|
||||||
@@ -235,7 +240,7 @@ def wait_for_upload_queue_empty(
|
|||||||
|
|
||||||
# this is `started left join finished`; if match, subtracting start from finished, resulting in queue depth
|
# this is `started left join finished`; if match, subtracting start from finished, resulting in queue depth
|
||||||
remaining_labels = ["shard_id", "file_kind", "op_kind"]
|
remaining_labels = ["shard_id", "file_kind", "op_kind"]
|
||||||
tl: List[Tuple[Any, float]] = []
|
tl: list[tuple[Any, float]] = []
|
||||||
for s in started:
|
for s in started:
|
||||||
found = False
|
found = False
|
||||||
for f in finished:
|
for f in finished:
|
||||||
@@ -302,7 +307,7 @@ def assert_prefix_empty(
|
|||||||
assert remote_storage is not None
|
assert remote_storage is not None
|
||||||
response = list_prefix(remote_storage, prefix)
|
response = list_prefix(remote_storage, prefix)
|
||||||
keys = response["KeyCount"]
|
keys = response["KeyCount"]
|
||||||
objects: List[ObjectTypeDef] = response.get("Contents", [])
|
objects: list[ObjectTypeDef] = response.get("Contents", [])
|
||||||
common_prefixes = response.get("CommonPrefixes", [])
|
common_prefixes = response.get("CommonPrefixes", [])
|
||||||
|
|
||||||
is_mock_s3 = isinstance(remote_storage, S3Storage) and not remote_storage.cleanup
|
is_mock_s3 = isinstance(remote_storage, S3Storage) and not remote_storage.cleanup
|
||||||
@@ -430,7 +435,7 @@ def enable_remote_storage_versioning(
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def many_small_layers_tenant_config() -> Dict[str, Any]:
|
def many_small_layers_tenant_config() -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Create a new dict to avoid issues with deleting from the global value.
|
Create a new dict to avoid issues with deleting from the global value.
|
||||||
In python, the global is mutable.
|
In python, the global is mutable.
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict, Optional
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
import pytest
|
import pytest
|
||||||
@@ -9,6 +11,10 @@ from _pytest.python import Metafunc
|
|||||||
from fixtures.pg_version import PgVersion
|
from fixtures.pg_version import PgVersion
|
||||||
from fixtures.utils import AuxFileStore
|
from fixtures.utils import AuxFileStore
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Dynamically parametrize tests by different parameters
|
Dynamically parametrize tests by different parameters
|
||||||
"""
|
"""
|
||||||
@@ -44,7 +50,7 @@ def pageserver_aux_file_policy() -> Optional[AuxFileStore]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict[str, Any]]:
|
def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[dict[str, Any]]:
|
||||||
toml_table = os.getenv("PAGESERVER_DEFAULT_TENANT_CONFIG_COMPACTION_ALGORITHM")
|
toml_table = os.getenv("PAGESERVER_DEFAULT_TENANT_CONFIG_COMPACTION_ALGORITHM")
|
||||||
if toml_table is None:
|
if toml_table is None:
|
||||||
return None
|
return None
|
||||||
@@ -54,7 +60,7 @@ def get_pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function", autouse=True)
|
@pytest.fixture(scope="function", autouse=True)
|
||||||
def pageserver_default_tenant_config_compaction_algorithm() -> Optional[Dict[str, Any]]:
|
def pageserver_default_tenant_config_compaction_algorithm() -> Optional[dict[str, Any]]:
|
||||||
return get_pageserver_default_tenant_config_compaction_algorithm()
|
return get_pageserver_default_tenant_config_compaction_algorithm()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,16 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
class PgStatTable:
|
class PgStatTable:
|
||||||
table: str
|
table: str
|
||||||
columns: List[str]
|
columns: list[str]
|
||||||
additional_query: str
|
additional_query: str
|
||||||
|
|
||||||
def __init__(self, table: str, columns: List[str], filter_query: str = ""):
|
def __init__(self, table: str, columns: list[str], filter_query: str = ""):
|
||||||
self.table = table
|
self.table = table
|
||||||
self.columns = columns
|
self.columns = columns
|
||||||
self.additional_query = filter_query
|
self.additional_query = filter_query
|
||||||
@@ -20,7 +21,7 @@ class PgStatTable:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def pg_stats_rw() -> List[PgStatTable]:
|
def pg_stats_rw() -> list[PgStatTable]:
|
||||||
return [
|
return [
|
||||||
PgStatTable(
|
PgStatTable(
|
||||||
"pg_stat_database",
|
"pg_stat_database",
|
||||||
@@ -31,7 +32,7 @@ def pg_stats_rw() -> List[PgStatTable]:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def pg_stats_ro() -> List[PgStatTable]:
|
def pg_stats_ro() -> list[PgStatTable]:
|
||||||
return [
|
return [
|
||||||
PgStatTable(
|
PgStatTable(
|
||||||
"pg_stat_database", ["tup_returned", "tup_fetched"], "WHERE datname='postgres'"
|
"pg_stat_database", ["tup_returned", "tup_fetched"], "WHERE datname='postgres'"
|
||||||
@@ -40,7 +41,7 @@ def pg_stats_ro() -> List[PgStatTable]:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def pg_stats_wo() -> List[PgStatTable]:
|
def pg_stats_wo() -> list[PgStatTable]:
|
||||||
return [
|
return [
|
||||||
PgStatTable(
|
PgStatTable(
|
||||||
"pg_stat_database",
|
"pg_stat_database",
|
||||||
@@ -51,7 +52,7 @@ def pg_stats_wo() -> List[PgStatTable]:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def pg_stats_wal() -> List[PgStatTable]:
|
def pg_stats_wal() -> list[PgStatTable]:
|
||||||
return [
|
return [
|
||||||
PgStatTable(
|
PgStatTable(
|
||||||
"pg_stat_wal",
|
"pg_stat_wal",
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
import os
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
@@ -36,7 +38,7 @@ class PgVersion(str, enum.Enum):
|
|||||||
return f"v{self.value}"
|
return f"v{self.value}"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _missing_(cls, value) -> Optional["PgVersion"]:
|
def _missing_(cls, value) -> Optional[PgVersion]:
|
||||||
known_values = {v.value for _, v in cls.__members__.items()}
|
known_values = {v.value for _, v in cls.__members__.items()}
|
||||||
|
|
||||||
# Allow passing version as a string with "v" prefix (e.g. "v14")
|
# Allow passing version as a string with "v" prefix (e.g. "v14")
|
||||||
|
|||||||
@@ -1,10 +1,15 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import Dict, Union
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
|
||||||
def can_bind(host: str, port: int) -> bool:
|
def can_bind(host: str, port: int) -> bool:
|
||||||
"""
|
"""
|
||||||
@@ -24,7 +29,7 @@ def can_bind(host: str, port: int) -> bool:
|
|||||||
sock.bind((host, port))
|
sock.bind((host, port))
|
||||||
sock.listen()
|
sock.listen()
|
||||||
return True
|
return True
|
||||||
except socket.error:
|
except OSError:
|
||||||
log.info(f"Port {port} is in use, skipping")
|
log.info(f"Port {port} is in use, skipping")
|
||||||
return False
|
return False
|
||||||
finally:
|
finally:
|
||||||
@@ -34,7 +39,7 @@ def can_bind(host: str, port: int) -> bool:
|
|||||||
class PortDistributor:
|
class PortDistributor:
|
||||||
def __init__(self, base_port: int, port_number: int):
|
def __init__(self, base_port: int, port_number: int):
|
||||||
self.iterator = iter(range(base_port, base_port + port_number))
|
self.iterator = iter(range(base_port, base_port + port_number))
|
||||||
self.port_map: Dict[int, int] = {}
|
self.port_map: dict[int, int] = {}
|
||||||
|
|
||||||
def get_port(self) -> int:
|
def get_port(self) -> int:
|
||||||
for port in self.iterator:
|
for port in self.iterator:
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
@@ -6,7 +8,7 @@ import re
|
|||||||
import subprocess
|
import subprocess
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import TYPE_CHECKING, Union
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
import toml
|
import toml
|
||||||
@@ -16,6 +18,10 @@ from fixtures.common_types import TenantId, TenantShardId, TimelineId
|
|||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
from fixtures.pageserver.common_types import IndexPartDump
|
from fixtures.pageserver.common_types import IndexPartDump
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
TIMELINE_INDEX_PART_FILE_NAME = "index_part.json"
|
TIMELINE_INDEX_PART_FILE_NAME = "index_part.json"
|
||||||
TENANT_HEATMAP_FILE_NAME = "heatmap-v1.json"
|
TENANT_HEATMAP_FILE_NAME = "heatmap-v1.json"
|
||||||
|
|
||||||
@@ -142,7 +148,7 @@ class LocalFsStorage:
|
|||||||
with self.heatmap_path(tenant_id).open("r") as f:
|
with self.heatmap_path(tenant_id).open("r") as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
|
|
||||||
def to_toml_dict(self) -> Dict[str, Any]:
|
def to_toml_dict(self) -> dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"local_path": str(self.root),
|
"local_path": str(self.root),
|
||||||
}
|
}
|
||||||
@@ -175,7 +181,7 @@ class S3Storage:
|
|||||||
"""formatting deserialized with humantime crate, for example "1s"."""
|
"""formatting deserialized with humantime crate, for example "1s"."""
|
||||||
custom_timeout: Optional[str] = None
|
custom_timeout: Optional[str] = None
|
||||||
|
|
||||||
def access_env_vars(self) -> Dict[str, str]:
|
def access_env_vars(self) -> dict[str, str]:
|
||||||
if self.aws_profile is not None:
|
if self.aws_profile is not None:
|
||||||
env = {
|
env = {
|
||||||
"AWS_PROFILE": self.aws_profile,
|
"AWS_PROFILE": self.aws_profile,
|
||||||
@@ -204,7 +210,7 @@ class S3Storage:
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def to_toml_dict(self) -> Dict[str, Any]:
|
def to_toml_dict(self) -> dict[str, Any]:
|
||||||
rv = {
|
rv = {
|
||||||
"bucket_name": self.bucket_name,
|
"bucket_name": self.bucket_name,
|
||||||
"bucket_region": self.bucket_region,
|
"bucket_region": self.bucket_region,
|
||||||
@@ -279,7 +285,7 @@ class S3Storage:
|
|||||||
) -> str:
|
) -> str:
|
||||||
return f"{self.tenant_path(tenant_id)}/timelines/{timeline_id}"
|
return f"{self.tenant_path(tenant_id)}/timelines/{timeline_id}"
|
||||||
|
|
||||||
def get_latest_index_key(self, index_keys: List[str]) -> str:
|
def get_latest_index_key(self, index_keys: list[str]) -> str:
|
||||||
"""
|
"""
|
||||||
Gets the latest index file key.
|
Gets the latest index file key.
|
||||||
|
|
||||||
@@ -419,7 +425,7 @@ class RemoteStorageKind(str, enum.Enum):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def available_remote_storages() -> List[RemoteStorageKind]:
|
def available_remote_storages() -> list[RemoteStorageKind]:
|
||||||
remote_storages = [RemoteStorageKind.LOCAL_FS, RemoteStorageKind.MOCK_S3]
|
remote_storages = [RemoteStorageKind.LOCAL_FS, RemoteStorageKind.MOCK_S3]
|
||||||
if os.getenv("ENABLE_REAL_S3_REMOTE_STORAGE") is not None:
|
if os.getenv("ENABLE_REAL_S3_REMOTE_STORAGE") is not None:
|
||||||
remote_storages.append(RemoteStorageKind.REAL_S3)
|
remote_storages.append(RemoteStorageKind.REAL_S3)
|
||||||
@@ -429,7 +435,7 @@ def available_remote_storages() -> List[RemoteStorageKind]:
|
|||||||
return remote_storages
|
return remote_storages
|
||||||
|
|
||||||
|
|
||||||
def available_s3_storages() -> List[RemoteStorageKind]:
|
def available_s3_storages() -> list[RemoteStorageKind]:
|
||||||
remote_storages = [RemoteStorageKind.MOCK_S3]
|
remote_storages = [RemoteStorageKind.MOCK_S3]
|
||||||
if os.getenv("ENABLE_REAL_S3_REMOTE_STORAGE") is not None:
|
if os.getenv("ENABLE_REAL_S3_REMOTE_STORAGE") is not None:
|
||||||
remote_storages.append(RemoteStorageKind.REAL_S3)
|
remote_storages.append(RemoteStorageKind.REAL_S3)
|
||||||
@@ -459,7 +465,7 @@ def default_remote_storage() -> RemoteStorageKind:
|
|||||||
return RemoteStorageKind.LOCAL_FS
|
return RemoteStorageKind.LOCAL_FS
|
||||||
|
|
||||||
|
|
||||||
def remote_storage_to_toml_dict(remote_storage: RemoteStorage) -> Dict[str, Any]:
|
def remote_storage_to_toml_dict(remote_storage: RemoteStorage) -> dict[str, Any]:
|
||||||
if not isinstance(remote_storage, (LocalFsStorage, S3Storage)):
|
if not isinstance(remote_storage, (LocalFsStorage, S3Storage)):
|
||||||
raise Exception("invalid remote storage type")
|
raise Exception("invalid remote storage type")
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
@@ -10,6 +12,9 @@ from fixtures.log_helper import log
|
|||||||
from fixtures.metrics import Metrics, MetricsGetter, parse_metrics
|
from fixtures.metrics import Metrics, MetricsGetter, parse_metrics
|
||||||
from fixtures.utils import wait_until
|
from fixtures.utils import wait_until
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
|
||||||
# Walreceiver as returned by sk's timeline status endpoint.
|
# Walreceiver as returned by sk's timeline status endpoint.
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -29,7 +34,7 @@ class SafekeeperTimelineStatus:
|
|||||||
backup_lsn: Lsn
|
backup_lsn: Lsn
|
||||||
peer_horizon_lsn: Lsn
|
peer_horizon_lsn: Lsn
|
||||||
remote_consistent_lsn: Lsn
|
remote_consistent_lsn: Lsn
|
||||||
walreceivers: List[Walreceiver]
|
walreceivers: list[Walreceiver]
|
||||||
|
|
||||||
|
|
||||||
class SafekeeperMetrics(Metrics):
|
class SafekeeperMetrics(Metrics):
|
||||||
@@ -57,7 +62,7 @@ class TermBumpResponse:
|
|||||||
current_term: int
|
current_term: int
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, d: Dict[str, Any]) -> "TermBumpResponse":
|
def from_json(cls, d: dict[str, Any]) -> TermBumpResponse:
|
||||||
return TermBumpResponse(
|
return TermBumpResponse(
|
||||||
previous_term=d["previous_term"],
|
previous_term=d["previous_term"],
|
||||||
current_term=d["current_term"],
|
current_term=d["current_term"],
|
||||||
@@ -93,7 +98,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
if not self.is_testing_enabled:
|
if not self.is_testing_enabled:
|
||||||
pytest.skip("safekeeper was built without 'testing' feature")
|
pytest.skip("safekeeper was built without 'testing' feature")
|
||||||
|
|
||||||
def configure_failpoints(self, config_strings: Union[Tuple[str, str], List[Tuple[str, str]]]):
|
def configure_failpoints(self, config_strings: Union[tuple[str, str], list[tuple[str, str]]]):
|
||||||
self.is_testing_enabled_or_skip()
|
self.is_testing_enabled_or_skip()
|
||||||
|
|
||||||
if isinstance(config_strings, tuple):
|
if isinstance(config_strings, tuple):
|
||||||
@@ -113,14 +118,14 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
assert res_json is None
|
assert res_json is None
|
||||||
return res_json
|
return res_json
|
||||||
|
|
||||||
def tenant_delete_force(self, tenant_id: TenantId) -> Dict[Any, Any]:
|
def tenant_delete_force(self, tenant_id: TenantId) -> dict[Any, Any]:
|
||||||
res = self.delete(f"http://localhost:{self.port}/v1/tenant/{tenant_id}")
|
res = self.delete(f"http://localhost:{self.port}/v1/tenant/{tenant_id}")
|
||||||
res.raise_for_status()
|
res.raise_for_status()
|
||||||
res_json = res.json()
|
res_json = res.json()
|
||||||
assert isinstance(res_json, dict)
|
assert isinstance(res_json, dict)
|
||||||
return res_json
|
return res_json
|
||||||
|
|
||||||
def timeline_list(self) -> List[TenantTimelineId]:
|
def timeline_list(self) -> list[TenantTimelineId]:
|
||||||
res = self.get(f"http://localhost:{self.port}/v1/tenant/timeline")
|
res = self.get(f"http://localhost:{self.port}/v1/tenant/timeline")
|
||||||
res.raise_for_status()
|
res.raise_for_status()
|
||||||
resj = res.json()
|
resj = res.json()
|
||||||
@@ -178,7 +183,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
# only_local doesn't remove segments in the remote storage.
|
# only_local doesn't remove segments in the remote storage.
|
||||||
def timeline_delete(
|
def timeline_delete(
|
||||||
self, tenant_id: TenantId, timeline_id: TimelineId, only_local: bool = False
|
self, tenant_id: TenantId, timeline_id: TimelineId, only_local: bool = False
|
||||||
) -> Dict[Any, Any]:
|
) -> dict[Any, Any]:
|
||||||
res = self.delete(
|
res = self.delete(
|
||||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}",
|
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}",
|
||||||
params={
|
params={
|
||||||
@@ -190,7 +195,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
assert isinstance(res_json, dict)
|
assert isinstance(res_json, dict)
|
||||||
return res_json
|
return res_json
|
||||||
|
|
||||||
def debug_dump(self, params: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
|
def debug_dump(self, params: Optional[dict[str, str]] = None) -> dict[str, Any]:
|
||||||
params = params or {}
|
params = params or {}
|
||||||
res = self.get(f"http://localhost:{self.port}/v1/debug_dump", params=params)
|
res = self.get(f"http://localhost:{self.port}/v1/debug_dump", params=params)
|
||||||
res.raise_for_status()
|
res.raise_for_status()
|
||||||
@@ -199,7 +204,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
return res_json
|
return res_json
|
||||||
|
|
||||||
def debug_dump_timeline(
|
def debug_dump_timeline(
|
||||||
self, timeline_id: TimelineId, params: Optional[Dict[str, str]] = None
|
self, timeline_id: TimelineId, params: Optional[dict[str, str]] = None
|
||||||
) -> Any:
|
) -> Any:
|
||||||
params = params or {}
|
params = params or {}
|
||||||
params["timeline_id"] = str(timeline_id)
|
params["timeline_id"] = str(timeline_id)
|
||||||
@@ -214,14 +219,14 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
dump = self.debug_dump_timeline(timeline_id, {"dump_control_file": "true"})
|
dump = self.debug_dump_timeline(timeline_id, {"dump_control_file": "true"})
|
||||||
return dump["control_file"]["eviction_state"]
|
return dump["control_file"]["eviction_state"]
|
||||||
|
|
||||||
def pull_timeline(self, body: Dict[str, Any]) -> Dict[str, Any]:
|
def pull_timeline(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||||
res = self.post(f"http://localhost:{self.port}/v1/pull_timeline", json=body)
|
res = self.post(f"http://localhost:{self.port}/v1/pull_timeline", json=body)
|
||||||
res.raise_for_status()
|
res.raise_for_status()
|
||||||
res_json = res.json()
|
res_json = res.json()
|
||||||
assert isinstance(res_json, dict)
|
assert isinstance(res_json, dict)
|
||||||
return res_json
|
return res_json
|
||||||
|
|
||||||
def copy_timeline(self, tenant_id: TenantId, timeline_id: TimelineId, body: Dict[str, Any]):
|
def copy_timeline(self, tenant_id: TenantId, timeline_id: TimelineId, body: dict[str, Any]):
|
||||||
res = self.post(
|
res = self.post(
|
||||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/copy",
|
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/copy",
|
||||||
json=body,
|
json=body,
|
||||||
@@ -232,8 +237,8 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
self,
|
self,
|
||||||
tenant_id: TenantId,
|
tenant_id: TenantId,
|
||||||
timeline_id: TimelineId,
|
timeline_id: TimelineId,
|
||||||
patch: Dict[str, Any],
|
patch: dict[str, Any],
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
res = self.patch(
|
res = self.patch(
|
||||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/control_file",
|
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/control_file",
|
||||||
json={
|
json={
|
||||||
@@ -255,7 +260,7 @@ class SafekeeperHttpClient(requests.Session, MetricsGetter):
|
|||||||
|
|
||||||
def timeline_digest(
|
def timeline_digest(
|
||||||
self, tenant_id: TenantId, timeline_id: TimelineId, from_lsn: Lsn, until_lsn: Lsn
|
self, tenant_id: TenantId, timeline_id: TimelineId, from_lsn: Lsn, until_lsn: Lsn
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
res = self.get(
|
res = self.get(
|
||||||
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/digest",
|
f"http://localhost:{self.port}/v1/tenant/{tenant_id}/timeline/{timeline_id}/digest",
|
||||||
params={
|
params={
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from fixtures.common_types import TenantId, TimelineId
|
from fixtures.common_types import TenantId, TimelineId
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
from fixtures.safekeeper.http import SafekeeperHttpClient
|
from fixtures.safekeeper.http import SafekeeperHttpClient
|
||||||
|
|||||||
@@ -1,9 +1,15 @@
|
|||||||
from typing import Any, List
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.config import Config
|
from _pytest.config import Config
|
||||||
from _pytest.config.argparsing import Parser
|
from _pytest.config.argparsing import Parser
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This plugin allows tests to be marked as slow using pytest.mark.slow. By default slow
|
This plugin allows tests to be marked as slow using pytest.mark.slow. By default slow
|
||||||
tests are excluded. They need to be specifically requested with the --runslow flag in
|
tests are excluded. They need to be specifically requested with the --runslow flag in
|
||||||
@@ -21,7 +27,7 @@ def pytest_configure(config: Config):
|
|||||||
config.addinivalue_line("markers", "slow: mark test as slow to run")
|
config.addinivalue_line("markers", "slow: mark test as slow to run")
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(config: Config, items: List[Any]):
|
def pytest_collection_modifyitems(config: Config, items: list[Any]):
|
||||||
if config.getoption("--runslow"):
|
if config.getoption("--runslow"):
|
||||||
# --runslow given in cli: do not skip slow tests
|
# --runslow given in cli: do not skip slow tests
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import enum
|
import enum
|
||||||
import json
|
import json
|
||||||
@@ -7,22 +9,10 @@ import subprocess
|
|||||||
import tarfile
|
import tarfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from collections.abc import Iterable
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import TYPE_CHECKING, Any, Callable, TypeVar
|
||||||
IO,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
@@ -36,6 +26,12 @@ from fixtures.pageserver.common_types import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from fixtures.neon_fixtures import PgBin
|
from fixtures.neon_fixtures import PgBin
|
||||||
from fixtures.common_types import TimelineId
|
from fixtures.common_types import TimelineId
|
||||||
|
|
||||||
@@ -44,7 +40,7 @@ Fn = TypeVar("Fn", bound=Callable[..., Any])
|
|||||||
|
|
||||||
def subprocess_capture(
|
def subprocess_capture(
|
||||||
capture_dir: Path,
|
capture_dir: Path,
|
||||||
cmd: List[str],
|
cmd: list[str],
|
||||||
*,
|
*,
|
||||||
check=False,
|
check=False,
|
||||||
echo_stderr=False,
|
echo_stderr=False,
|
||||||
@@ -53,7 +49,7 @@ def subprocess_capture(
|
|||||||
timeout=None,
|
timeout=None,
|
||||||
with_command_header=True,
|
with_command_header=True,
|
||||||
**popen_kwargs: Any,
|
**popen_kwargs: Any,
|
||||||
) -> Tuple[str, Optional[str], int]:
|
) -> tuple[str, Optional[str], int]:
|
||||||
"""Run a process and bifurcate its output to files and the `log` logger
|
"""Run a process and bifurcate its output to files and the `log` logger
|
||||||
|
|
||||||
stderr and stdout are always captured in files. They are also optionally
|
stderr and stdout are always captured in files. They are also optionally
|
||||||
@@ -98,7 +94,7 @@ def subprocess_capture(
|
|||||||
first = False
|
first = False
|
||||||
# prefix the files with the command line so that we can
|
# prefix the files with the command line so that we can
|
||||||
# later understand which file is for what command
|
# later understand which file is for what command
|
||||||
self.out_file.write((f"# {' '.join(cmd)}\n\n").encode("utf-8"))
|
self.out_file.write((f"# {' '.join(cmd)}\n\n").encode())
|
||||||
|
|
||||||
# Only bother decoding if we are going to do something more than stream to a file
|
# Only bother decoding if we are going to do something more than stream to a file
|
||||||
if self.echo or self.capture:
|
if self.echo or self.capture:
|
||||||
@@ -166,7 +162,7 @@ def global_counter() -> int:
|
|||||||
return _global_counter
|
return _global_counter
|
||||||
|
|
||||||
|
|
||||||
def print_gc_result(row: Dict[str, Any]):
|
def print_gc_result(row: dict[str, Any]):
|
||||||
log.info("GC duration {elapsed} ms".format_map(row))
|
log.info("GC duration {elapsed} ms".format_map(row))
|
||||||
log.info(
|
log.info(
|
||||||
" total: {layers_total}, needed_by_cutoff {layers_needed_by_cutoff}, needed_by_pitr {layers_needed_by_pitr}"
|
" total: {layers_total}, needed_by_cutoff {layers_needed_by_cutoff}, needed_by_pitr {layers_needed_by_pitr}"
|
||||||
@@ -304,7 +300,7 @@ def allure_add_grafana_links(host: str, timeline_id: TimelineId, start_ms: int,
|
|||||||
"proxy logs": f'{{neon_service="proxy-scram", neon_region="{region_id}"}}',
|
"proxy logs": f'{{neon_service="proxy-scram", neon_region="{region_id}"}}',
|
||||||
}
|
}
|
||||||
|
|
||||||
params: Dict[str, Any] = {
|
params: dict[str, Any] = {
|
||||||
"datasource": LOGS_STAGING_DATASOURCE_ID,
|
"datasource": LOGS_STAGING_DATASOURCE_ID,
|
||||||
"queries": [
|
"queries": [
|
||||||
{
|
{
|
||||||
@@ -420,7 +416,7 @@ def assert_ge(a, b) -> None:
|
|||||||
assert a >= b
|
assert a >= b
|
||||||
|
|
||||||
|
|
||||||
def run_pg_bench_small(pg_bin: "PgBin", connstr: str):
|
def run_pg_bench_small(pg_bin: PgBin, connstr: str):
|
||||||
"""
|
"""
|
||||||
Fast way to populate data.
|
Fast way to populate data.
|
||||||
For more layers consider combining with these tenant settings:
|
For more layers consider combining with these tenant settings:
|
||||||
@@ -465,7 +461,7 @@ def humantime_to_ms(humantime: str) -> float:
|
|||||||
return round(total_ms, 3)
|
return round(total_ms, 3)
|
||||||
|
|
||||||
|
|
||||||
def scan_log_for_errors(input: Iterable[str], allowed_errors: List[str]) -> List[Tuple[int, str]]:
|
def scan_log_for_errors(input: Iterable[str], allowed_errors: list[str]) -> list[tuple[int, str]]:
|
||||||
# FIXME: this duplicates test_runner/fixtures/pageserver/allowed_errors.py
|
# FIXME: this duplicates test_runner/fixtures/pageserver/allowed_errors.py
|
||||||
error_or_warn = re.compile(r"\s(ERROR|WARN)")
|
error_or_warn = re.compile(r"\s(ERROR|WARN)")
|
||||||
errors = []
|
errors = []
|
||||||
@@ -515,7 +511,7 @@ class AuxFileStore(str, enum.Enum):
|
|||||||
return f"'aux-{self.value}'"
|
return f"'aux-{self.value}'"
|
||||||
|
|
||||||
|
|
||||||
def assert_pageserver_backups_equal(left: Path, right: Path, skip_files: Set[str]):
|
def assert_pageserver_backups_equal(left: Path, right: Path, skip_files: set[str]):
|
||||||
"""
|
"""
|
||||||
This is essentially:
|
This is essentially:
|
||||||
|
|
||||||
@@ -539,7 +535,7 @@ def assert_pageserver_backups_equal(left: Path, right: Path, skip_files: Set[str
|
|||||||
digest.update(buf)
|
digest.update(buf)
|
||||||
return digest.digest()
|
return digest.digest()
|
||||||
|
|
||||||
def build_hash_list(p: Path) -> List[Tuple[str, bytes]]:
|
def build_hash_list(p: Path) -> list[tuple[str, bytes]]:
|
||||||
with tarfile.open(p) as f:
|
with tarfile.open(p) as f:
|
||||||
matching_files = (info for info in f if info.isreg() and info.name not in skip_files)
|
matching_files = (info for info in f if info.isreg() and info.name not in skip_files)
|
||||||
ret = list(
|
ret = list(
|
||||||
@@ -587,7 +583,7 @@ class PropagatingThread(threading.Thread):
|
|||||||
self.exc = e
|
self.exc = e
|
||||||
|
|
||||||
def join(self, timeout=None):
|
def join(self, timeout=None):
|
||||||
super(PropagatingThread, self).join(timeout)
|
super().join(timeout)
|
||||||
if self.exc:
|
if self.exc:
|
||||||
raise self.exc
|
raise self.exc
|
||||||
return self.ret
|
return self.ret
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
Test the logical replication in Neon with ClickHouse as a consumer
|
Test the logical replication in Neon with ClickHouse as a consumer
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
Test the logical replication in Neon with Debezium as a consumer
|
Test the logical replication in Neon with Debezium as a consumer
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -6,3 +6,5 @@ but then debug a performance problem interactively.
|
|||||||
It's kind of an abuse of the test framework, but, it's our only tool right
|
It's kind of an abuse of the test framework, but, it's our only tool right
|
||||||
now to automate a complex test bench setup.
|
now to automate a complex test bench setup.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import pdb
|
import pdb
|
||||||
|
|
||||||
|
|||||||
@@ -8,3 +8,5 @@ instead of benchmarking the full stack.
|
|||||||
See https://github.com/neondatabase/neon/issues/5771
|
See https://github.com/neondatabase/neon/issues/5771
|
||||||
for the context in which this was developed.
|
for the context in which this was developed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
@@ -13,6 +15,9 @@ from performance.pageserver.util import (
|
|||||||
setup_pageserver_with_tenants,
|
setup_pageserver_with_tenants,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("duration", [30])
|
@pytest.mark.parametrize("duration", [30])
|
||||||
@pytest.mark.parametrize("pgbench_scale", [get_scale_for_db(200)])
|
@pytest.mark.parametrize("pgbench_scale", [get_scale_for_db(200)])
|
||||||
@@ -29,7 +34,7 @@ def test_basebackup_with_high_slru_count(
|
|||||||
def record(metric, **kwargs):
|
def record(metric, **kwargs):
|
||||||
zenbenchmark.record(metric_name=f"pageserver_basebackup.{metric}", **kwargs)
|
zenbenchmark.record(metric_name=f"pageserver_basebackup.{metric}", **kwargs)
|
||||||
|
|
||||||
params: Dict[str, Tuple[Any, Dict[str, Any]]] = {}
|
params: dict[str, tuple[Any, dict[str, Any]]] = {}
|
||||||
|
|
||||||
# params from fixtures
|
# params from fixtures
|
||||||
params.update(
|
params.update(
|
||||||
@@ -157,7 +162,7 @@ def run_benchmark(env: NeonEnv, pg_bin: PgBin, record, duration_secs: int):
|
|||||||
results_path = Path(basepath + ".stdout")
|
results_path = Path(basepath + ".stdout")
|
||||||
log.info(f"Benchmark results at: {results_path}")
|
log.info(f"Benchmark results at: {results_path}")
|
||||||
|
|
||||||
with open(results_path, "r") as f:
|
with open(results_path) as f:
|
||||||
results = json.load(f)
|
results = json.load(f)
|
||||||
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
@@ -14,6 +16,9 @@ from fixtures.neon_fixtures import (
|
|||||||
from fixtures.remote_storage import s3_storage
|
from fixtures.remote_storage import s3_storage
|
||||||
from fixtures.utils import humantime_to_ms
|
from fixtures.utils import humantime_to_ms
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("duration", [30])
|
@pytest.mark.parametrize("duration", [30])
|
||||||
@pytest.mark.parametrize("io_engine", ["tokio-epoll-uring", "std-fs"])
|
@pytest.mark.parametrize("io_engine", ["tokio-epoll-uring", "std-fs"])
|
||||||
@@ -30,7 +35,7 @@ def test_download_churn(
|
|||||||
def record(metric, **kwargs):
|
def record(metric, **kwargs):
|
||||||
zenbenchmark.record(metric_name=f"pageserver_ondemand_download_churn.{metric}", **kwargs)
|
zenbenchmark.record(metric_name=f"pageserver_ondemand_download_churn.{metric}", **kwargs)
|
||||||
|
|
||||||
params: Dict[str, Tuple[Any, Dict[str, Any]]] = {}
|
params: dict[str, tuple[Any, dict[str, Any]]] = {}
|
||||||
|
|
||||||
# params from fixtures
|
# params from fixtures
|
||||||
params.update(
|
params.update(
|
||||||
@@ -134,7 +139,7 @@ def run_benchmark(
|
|||||||
results_path = Path(basepath + ".stdout")
|
results_path = Path(basepath + ".stdout")
|
||||||
log.info(f"Benchmark results at: {results_path}")
|
log.info(f"Benchmark results at: {results_path}")
|
||||||
|
|
||||||
with open(results_path, "r") as f:
|
with open(results_path) as f:
|
||||||
results = json.load(f)
|
results = json.load(f)
|
||||||
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, Tuple
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
@@ -18,6 +20,10 @@ from performance.pageserver.util import (
|
|||||||
setup_pageserver_with_tenants,
|
setup_pageserver_with_tenants,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
# The following tests use pagebench "getpage at latest LSN" to characterize the throughput of the pageserver.
|
# The following tests use pagebench "getpage at latest LSN" to characterize the throughput of the pageserver.
|
||||||
# originally there was a single test named `test_pageserver_max_throughput_getpage_at_latest_lsn``
|
# originally there was a single test named `test_pageserver_max_throughput_getpage_at_latest_lsn``
|
||||||
# so you still see some references to this name in the code.
|
# so you still see some references to this name in the code.
|
||||||
@@ -92,7 +98,7 @@ def setup_and_run_pagebench_benchmark(
|
|||||||
metric_name=f"pageserver_max_throughput_getpage_at_latest_lsn.{metric}", **kwargs
|
metric_name=f"pageserver_max_throughput_getpage_at_latest_lsn.{metric}", **kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
params: Dict[str, Tuple[Any, Dict[str, Any]]] = {}
|
params: dict[str, tuple[Any, dict[str, Any]]] = {}
|
||||||
|
|
||||||
# params from fixtures
|
# params from fixtures
|
||||||
params.update(
|
params.update(
|
||||||
@@ -225,7 +231,7 @@ def run_pagebench_benchmark(
|
|||||||
results_path = Path(basepath + ".stdout")
|
results_path = Path(basepath + ".stdout")
|
||||||
log.info(f"Benchmark results at: {results_path}")
|
log.info(f"Benchmark results at: {results_path}")
|
||||||
|
|
||||||
with open(results_path, "r") as f:
|
with open(results_path) as f:
|
||||||
results = json.load(f)
|
results = json.load(f)
|
||||||
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,9 @@
|
|||||||
Utilities used by all code in this sub-directory
|
Utilities used by all code in this sub-directory
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any, Callable, Dict, Optional, Tuple
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import fixtures.pageserver.many_tenants as many_tenants
|
import fixtures.pageserver.many_tenants as many_tenants
|
||||||
from fixtures.common_types import TenantId, TimelineId
|
from fixtures.common_types import TenantId, TimelineId
|
||||||
@@ -13,6 +15,9 @@ from fixtures.neon_fixtures import (
|
|||||||
)
|
)
|
||||||
from fixtures.pageserver.utils import wait_until_all_tenants_state
|
from fixtures.pageserver.utils import wait_until_all_tenants_state
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
|
|
||||||
def ensure_pageserver_ready_for_benchmarking(env: NeonEnv, n_tenants: int):
|
def ensure_pageserver_ready_for_benchmarking(env: NeonEnv, n_tenants: int):
|
||||||
"""
|
"""
|
||||||
@@ -40,7 +45,7 @@ def setup_pageserver_with_tenants(
|
|||||||
neon_env_builder: NeonEnvBuilder,
|
neon_env_builder: NeonEnvBuilder,
|
||||||
name: str,
|
name: str,
|
||||||
n_tenants: int,
|
n_tenants: int,
|
||||||
setup: Callable[[NeonEnv], Tuple[TenantId, TimelineId, Dict[str, Any]]],
|
setup: Callable[[NeonEnv], tuple[TenantId, TimelineId, dict[str, Any]]],
|
||||||
timeout_in_seconds: Optional[int] = None,
|
timeout_in_seconds: Optional[int] = None,
|
||||||
) -> NeonEnv:
|
) -> NeonEnv:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np # type: ignore [import]
|
||||||
import pandas as pd
|
import pandas as pd # type: ignore [import]
|
||||||
import psycopg2
|
import psycopg2
|
||||||
from pgvector.psycopg2 import register_vector
|
from pgvector.psycopg2 import register_vector # type: ignore [import]
|
||||||
from psycopg2.extras import execute_values
|
from psycopg2.extras import execute_values
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import statistics
|
import statistics
|
||||||
@@ -5,7 +7,6 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import timeit
|
import timeit
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
@@ -18,7 +19,7 @@ from fixtures.utils import wait_until
|
|||||||
from prometheus_client.samples import Sample
|
from prometheus_client.samples import Sample
|
||||||
|
|
||||||
|
|
||||||
def _record_branch_creation_durations(neon_compare: NeonCompare, durs: List[float]):
|
def _record_branch_creation_durations(neon_compare: NeonCompare, durs: list[float]):
|
||||||
neon_compare.zenbenchmark.record(
|
neon_compare.zenbenchmark.record(
|
||||||
"branch_creation_duration_max", max(durs), "s", MetricReport.LOWER_IS_BETTER
|
"branch_creation_duration_max", max(durs), "s", MetricReport.LOWER_IS_BETTER
|
||||||
)
|
)
|
||||||
@@ -66,7 +67,7 @@ def test_branch_creation_heavy_write(neon_compare: NeonCompare, n_branches: int)
|
|||||||
|
|
||||||
env.create_branch("b0", tenant_id=tenant)
|
env.create_branch("b0", tenant_id=tenant)
|
||||||
|
|
||||||
threads: List[threading.Thread] = []
|
threads: list[threading.Thread] = []
|
||||||
threads.append(threading.Thread(target=run_pgbench, args=("b0",), daemon=True))
|
threads.append(threading.Thread(target=run_pgbench, args=("b0",), daemon=True))
|
||||||
threads[-1].start()
|
threads[-1].start()
|
||||||
|
|
||||||
@@ -194,7 +195,7 @@ def wait_and_record_startup_metrics(
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def metrics_are_filled() -> List[Sample]:
|
def metrics_are_filled() -> list[Sample]:
|
||||||
m = client.get_metrics()
|
m = client.get_metrics()
|
||||||
samples = m.query_all("pageserver_startup_duration_seconds")
|
samples = m.query_all("pageserver_startup_duration_seconds")
|
||||||
# we should not have duplicate labels
|
# we should not have duplicate labels
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import timeit
|
import timeit
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from fixtures.benchmark_fixture import PgBenchRunResult
|
from fixtures.benchmark_fixture import PgBenchRunResult
|
||||||
from fixtures.compare_fixtures import NeonCompare
|
from fixtures.compare_fixtures import NeonCompare
|
||||||
@@ -22,7 +23,7 @@ def test_compare_child_and_root_pgbench_perf(neon_compare: NeonCompare):
|
|||||||
env = neon_compare.env
|
env = neon_compare.env
|
||||||
pg_bin = neon_compare.pg_bin
|
pg_bin = neon_compare.pg_bin
|
||||||
|
|
||||||
def run_pgbench_on_branch(branch: str, cmd: List[str]):
|
def run_pgbench_on_branch(branch: str, cmd: list[str]):
|
||||||
run_start_timestamp = utc_now_timestamp()
|
run_start_timestamp = utc_now_timestamp()
|
||||||
t0 = timeit.default_timer()
|
t0 = timeit.default_timer()
|
||||||
out = pg_bin.run_capture(
|
out = pg_bin.run_capture(
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
from fixtures.benchmark_fixture import MetricReport
|
from fixtures.benchmark_fixture import MetricReport
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import timeit
|
import timeit
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.neon_fixtures import NeonEnvBuilder, wait_for_last_flush_lsn
|
from fixtures.neon_fixtures import NeonEnvBuilder, wait_for_last_flush_lsn
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.compare_fixtures import PgCompare
|
from fixtures.compare_fixtures import PgCompare
|
||||||
@@ -23,7 +24,7 @@ def test_compare_pg_stats_rw_with_pgbench_default(
|
|||||||
seed: int,
|
seed: int,
|
||||||
scale: int,
|
scale: int,
|
||||||
duration: int,
|
duration: int,
|
||||||
pg_stats_rw: List[PgStatTable],
|
pg_stats_rw: list[PgStatTable],
|
||||||
):
|
):
|
||||||
env = neon_with_baseline
|
env = neon_with_baseline
|
||||||
# initialize pgbench
|
# initialize pgbench
|
||||||
@@ -45,7 +46,7 @@ def test_compare_pg_stats_wo_with_pgbench_simple_update(
|
|||||||
seed: int,
|
seed: int,
|
||||||
scale: int,
|
scale: int,
|
||||||
duration: int,
|
duration: int,
|
||||||
pg_stats_wo: List[PgStatTable],
|
pg_stats_wo: list[PgStatTable],
|
||||||
):
|
):
|
||||||
env = neon_with_baseline
|
env = neon_with_baseline
|
||||||
# initialize pgbench
|
# initialize pgbench
|
||||||
@@ -67,7 +68,7 @@ def test_compare_pg_stats_ro_with_pgbench_select_only(
|
|||||||
seed: int,
|
seed: int,
|
||||||
scale: int,
|
scale: int,
|
||||||
duration: int,
|
duration: int,
|
||||||
pg_stats_ro: List[PgStatTable],
|
pg_stats_ro: list[PgStatTable],
|
||||||
):
|
):
|
||||||
env = neon_with_baseline
|
env = neon_with_baseline
|
||||||
# initialize pgbench
|
# initialize pgbench
|
||||||
@@ -89,7 +90,7 @@ def test_compare_pg_stats_wal_with_pgbench_default(
|
|||||||
seed: int,
|
seed: int,
|
||||||
scale: int,
|
scale: int,
|
||||||
duration: int,
|
duration: int,
|
||||||
pg_stats_wal: List[PgStatTable],
|
pg_stats_wal: list[PgStatTable],
|
||||||
):
|
):
|
||||||
env = neon_with_baseline
|
env = neon_with_baseline
|
||||||
# initialize pgbench
|
# initialize pgbench
|
||||||
@@ -106,7 +107,7 @@ def test_compare_pg_stats_wal_with_pgbench_default(
|
|||||||
@pytest.mark.parametrize("n_tables", [1, 10])
|
@pytest.mark.parametrize("n_tables", [1, 10])
|
||||||
@pytest.mark.parametrize("duration", get_durations_matrix(10))
|
@pytest.mark.parametrize("duration", get_durations_matrix(10))
|
||||||
def test_compare_pg_stats_wo_with_heavy_write(
|
def test_compare_pg_stats_wo_with_heavy_write(
|
||||||
neon_with_baseline: PgCompare, n_tables: int, duration: int, pg_stats_wo: List[PgStatTable]
|
neon_with_baseline: PgCompare, n_tables: int, duration: int, pg_stats_wo: list[PgStatTable]
|
||||||
):
|
):
|
||||||
env = neon_with_baseline
|
env = neon_with_baseline
|
||||||
with env.pg.connect().cursor() as cur:
|
with env.pg.connect().cursor() as cur:
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from io import BufferedReader, RawIOBase
|
from io import BufferedReader, RawIOBase
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
from fixtures.compare_fixtures import PgCompare
|
from fixtures.compare_fixtures import PgCompare
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from fixtures.neon_fixtures import NeonEnvBuilder, flush_ep_to_pageserver
|
from fixtures.neon_fixtures import NeonEnvBuilder, flush_ep_to_pageserver
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Tuple
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.mark import ParameterSet
|
from _pytest.mark import ParameterSet
|
||||||
@@ -45,7 +46,7 @@ def test_clickbench_create_pg_stat_statements(remote_compare: RemoteCompare):
|
|||||||
#
|
#
|
||||||
# Disable auto formatting for the list of queries so that it's easier to read
|
# Disable auto formatting for the list of queries so that it's easier to read
|
||||||
# fmt: off
|
# fmt: off
|
||||||
QUERIES: Tuple[LabelledQuery, ...] = (
|
QUERIES: tuple[LabelledQuery, ...] = (
|
||||||
### ClickBench queries:
|
### ClickBench queries:
|
||||||
LabelledQuery("Q0", r"SELECT COUNT(*) FROM hits;"),
|
LabelledQuery("Q0", r"SELECT COUNT(*) FROM hits;"),
|
||||||
LabelledQuery("Q1", r"SELECT COUNT(*) FROM hits WHERE AdvEngineID <> 0;"),
|
LabelledQuery("Q1", r"SELECT COUNT(*) FROM hits WHERE AdvEngineID <> 0;"),
|
||||||
@@ -105,7 +106,7 @@ QUERIES: Tuple[LabelledQuery, ...] = (
|
|||||||
#
|
#
|
||||||
# Disable auto formatting for the list of queries so that it's easier to read
|
# Disable auto formatting for the list of queries so that it's easier to read
|
||||||
# fmt: off
|
# fmt: off
|
||||||
PGVECTOR_QUERIES: Tuple[LabelledQuery, ...] = (
|
PGVECTOR_QUERIES: tuple[LabelledQuery, ...] = (
|
||||||
LabelledQuery("PGVPREP", r"ALTER EXTENSION VECTOR UPDATE;"),
|
LabelledQuery("PGVPREP", r"ALTER EXTENSION VECTOR UPDATE;"),
|
||||||
LabelledQuery("PGV0", r"DROP TABLE IF EXISTS hnsw_test_table;"),
|
LabelledQuery("PGV0", r"DROP TABLE IF EXISTS hnsw_test_table;"),
|
||||||
LabelledQuery("PGV1", r"CREATE TABLE hnsw_test_table AS TABLE documents WITH NO DATA;"),
|
LabelledQuery("PGV1", r"CREATE TABLE hnsw_test_table AS TABLE documents WITH NO DATA;"),
|
||||||
@@ -127,7 +128,7 @@ PGVECTOR_QUERIES: Tuple[LabelledQuery, ...] = (
|
|||||||
EXPLAIN_STRING: str = "EXPLAIN (ANALYZE, VERBOSE, BUFFERS, COSTS, SETTINGS, FORMAT JSON)"
|
EXPLAIN_STRING: str = "EXPLAIN (ANALYZE, VERBOSE, BUFFERS, COSTS, SETTINGS, FORMAT JSON)"
|
||||||
|
|
||||||
|
|
||||||
def get_scale() -> List[str]:
|
def get_scale() -> list[str]:
|
||||||
# We parametrize each tpc-h and clickbench test with scale
|
# We parametrize each tpc-h and clickbench test with scale
|
||||||
# to distinguish them from each other, but don't really use it inside.
|
# to distinguish them from each other, but don't really use it inside.
|
||||||
# Databases are pre-created and passed through BENCHMARK_CONNSTR env variable.
|
# Databases are pre-created and passed through BENCHMARK_CONNSTR env variable.
|
||||||
@@ -147,7 +148,7 @@ def run_psql(
|
|||||||
options = f"-cstatement_timeout=0 {env.pg.default_options.get('options', '')}"
|
options = f"-cstatement_timeout=0 {env.pg.default_options.get('options', '')}"
|
||||||
connstr = env.pg.connstr(password=None, options=options)
|
connstr = env.pg.connstr(password=None, options=options)
|
||||||
|
|
||||||
environ: Dict[str, str] = {}
|
environ: dict[str, str] = {}
|
||||||
if password is not None:
|
if password is not None:
|
||||||
environ["PGPASSWORD"] = password
|
environ["PGPASSWORD"] = password
|
||||||
|
|
||||||
@@ -185,7 +186,7 @@ def test_clickbench(query: LabelledQuery, remote_compare: RemoteCompare, scale:
|
|||||||
run_psql(remote_compare, query, times=3, explain=explain)
|
run_psql(remote_compare, query, times=3, explain=explain)
|
||||||
|
|
||||||
|
|
||||||
def tpch_queuies() -> Tuple[ParameterSet, ...]:
|
def tpch_queuies() -> tuple[ParameterSet, ...]:
|
||||||
"""
|
"""
|
||||||
A list of queries to run for the TPC-H benchmark.
|
A list of queries to run for the TPC-H benchmark.
|
||||||
- querues in returning tuple are ordered by the query number
|
- querues in returning tuple are ordered by the query number
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import calendar
|
import calendar
|
||||||
import enum
|
import enum
|
||||||
import os
|
import os
|
||||||
import timeit
|
import timeit
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.benchmark_fixture import MetricReport, PgBenchInitResult, PgBenchRunResult
|
from fixtures.benchmark_fixture import MetricReport, PgBenchInitResult, PgBenchRunResult
|
||||||
@@ -26,7 +27,7 @@ def utc_now_timestamp() -> int:
|
|||||||
|
|
||||||
|
|
||||||
def init_pgbench(env: PgCompare, cmdline, password: None):
|
def init_pgbench(env: PgCompare, cmdline, password: None):
|
||||||
environ: Dict[str, str] = {}
|
environ: dict[str, str] = {}
|
||||||
if password is not None:
|
if password is not None:
|
||||||
environ["PGPASSWORD"] = password
|
environ["PGPASSWORD"] = password
|
||||||
|
|
||||||
@@ -54,7 +55,7 @@ def init_pgbench(env: PgCompare, cmdline, password: None):
|
|||||||
|
|
||||||
|
|
||||||
def run_pgbench(env: PgCompare, prefix: str, cmdline, password: None):
|
def run_pgbench(env: PgCompare, prefix: str, cmdline, password: None):
|
||||||
environ: Dict[str, str] = {}
|
environ: dict[str, str] = {}
|
||||||
if password is not None:
|
if password is not None:
|
||||||
environ["PGPASSWORD"] = password
|
environ["PGPASSWORD"] = password
|
||||||
|
|
||||||
@@ -177,7 +178,7 @@ def run_test_pgbench(env: PgCompare, scale: int, duration: int, workload_type: P
|
|||||||
env.report_size()
|
env.report_size()
|
||||||
|
|
||||||
|
|
||||||
def get_durations_matrix(default: int = 45) -> List[int]:
|
def get_durations_matrix(default: int = 45) -> list[int]:
|
||||||
durations = os.getenv("TEST_PG_BENCH_DURATIONS_MATRIX", default=str(default))
|
durations = os.getenv("TEST_PG_BENCH_DURATIONS_MATRIX", default=str(default))
|
||||||
rv = []
|
rv = []
|
||||||
for d in durations.split(","):
|
for d in durations.split(","):
|
||||||
@@ -193,7 +194,7 @@ def get_durations_matrix(default: int = 45) -> List[int]:
|
|||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def get_scales_matrix(default: int = 10) -> List[int]:
|
def get_scales_matrix(default: int = 10) -> list[int]:
|
||||||
scales = os.getenv("TEST_PG_BENCH_SCALES_MATRIX", default=str(default))
|
scales = os.getenv("TEST_PG_BENCH_SCALES_MATRIX", default=str(default))
|
||||||
rv = []
|
rv = []
|
||||||
for s in scales.split(","):
|
for s in scales.split(","):
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.compare_fixtures import PgCompare
|
from fixtures.compare_fixtures import PgCompare
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from fixtures.neon_api import connection_parameters_to_env
|
|||||||
from fixtures.pg_version import PgVersion
|
from fixtures.pg_version import PgVersion
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Any, List, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from fixtures.benchmark_fixture import NeonBenchmarker
|
from fixtures.benchmark_fixture import NeonBenchmarker
|
||||||
from fixtures.neon_api import NeonAPI
|
from fixtures.neon_api import NeonAPI
|
||||||
@@ -233,7 +233,7 @@ def test_replication_start_stop(
|
|||||||
],
|
],
|
||||||
env=master_env,
|
env=master_env,
|
||||||
)
|
)
|
||||||
replica_pgbench: List[Optional[subprocess.Popen[Any]]] = [None for _ in range(num_replicas)]
|
replica_pgbench: list[Optional[subprocess.Popen[Any]]] = [None for _ in range(num_replicas)]
|
||||||
|
|
||||||
# Use the bits of iconfig to tell us which configuration we are on. For example
|
# Use the bits of iconfig to tell us which configuration we are on. For example
|
||||||
# a iconfig of 2 is 10 in binary, indicating replica 0 is suspended and replica 1 is
|
# a iconfig of 2 is 10 in binary, indicating replica 0 is suspended and replica 1 is
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import random
|
import random
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
# Test sequential scan speed
|
# Test sequential scan speed
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
from fixtures.neon_fixtures import NeonEnvBuilder
|
from fixtures.neon_fixtures import NeonEnvBuilder
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import statistics
|
import statistics
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import timeit
|
import timeit
|
||||||
from typing import Any, Callable, Generator, List
|
from collections.abc import Generator
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
from fixtures.benchmark_fixture import MetricReport, NeonBenchmarker
|
||||||
@@ -13,6 +16,9 @@ from fixtures.neon_fixtures import NeonEnvBuilder, PgBin, flush_ep_to_pageserver
|
|||||||
|
|
||||||
from performance.test_perf_pgbench import get_durations_matrix, get_scales_matrix
|
from performance.test_perf_pgbench import get_durations_matrix, get_scales_matrix
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["vanilla", "neon_off", "neon_on"])
|
@pytest.fixture(params=["vanilla", "neon_off", "neon_on"])
|
||||||
# This fixture constructs multiple `PgCompare` interfaces using a builder pattern.
|
# This fixture constructs multiple `PgCompare` interfaces using a builder pattern.
|
||||||
@@ -202,7 +208,7 @@ def record_lsn_write_lag(env: PgCompare, run_cond: Callable[[], bool], pool_inte
|
|||||||
if not isinstance(env, NeonCompare):
|
if not isinstance(env, NeonCompare):
|
||||||
return
|
return
|
||||||
|
|
||||||
lsn_write_lags: List[Any] = []
|
lsn_write_lags: list[Any] = []
|
||||||
last_received_lsn = Lsn(0)
|
last_received_lsn = Lsn(0)
|
||||||
last_pg_flush_lsn = Lsn(0)
|
last_pg_flush_lsn = Lsn(0)
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,9 @@
|
|||||||
# in LSN order, writing the oldest layer first. That creates a new 10 MB image
|
# in LSN order, writing the oldest layer first. That creates a new 10 MB image
|
||||||
# layer to be created for each of those small updates. This is the Write
|
# layer to be created for each of those small updates. This is the Write
|
||||||
# Amplification problem at its finest.
|
# Amplification problem at its finest.
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
from fixtures.compare_fixtures import PgCompare
|
from fixtures.compare_fixtures import PgCompare
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pg8000.dbapi
|
import pg8000.dbapi
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from fixtures.common_types import TimelineId
|
from fixtures.common_types import TimelineId
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
from fixtures.neon_fixtures import NeonEnvBuilder
|
from fixtures.neon_fixtures import NeonEnvBuilder
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Generator
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Generator, Optional
|
from typing import Optional
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.common_types import TenantId
|
from fixtures.common_types import TenantId
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
from fixtures.neon_fixtures import (
|
from fixtures.neon_fixtures import (
|
||||||
AuxFileStore,
|
AuxFileStore,
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from contextlib import closing, contextmanager
|
from contextlib import closing, contextmanager
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.neon_fixtures import NeonEnv
|
from fixtures.neon_fixtures import NeonEnv
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.common_types import Lsn, TimelineId
|
from fixtures.common_types import Lsn, TimelineId
|
||||||
from fixtures.log_helper import log
|
from fixtures.log_helper import log
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.common_types import Lsn, TimelineId
|
from fixtures.common_types import Lsn, TimelineId
|
||||||
@@ -56,10 +57,10 @@ def test_branching_with_pgbench(
|
|||||||
pg_bin.run_capture(["pgbench", "-T15", connstr])
|
pg_bin.run_capture(["pgbench", "-T15", connstr])
|
||||||
|
|
||||||
env.create_branch("b0", tenant_id=tenant)
|
env.create_branch("b0", tenant_id=tenant)
|
||||||
endpoints: List[Endpoint] = []
|
endpoints: list[Endpoint] = []
|
||||||
endpoints.append(env.endpoints.create_start("b0", tenant_id=tenant))
|
endpoints.append(env.endpoints.create_start("b0", tenant_id=tenant))
|
||||||
|
|
||||||
threads: List[threading.Thread] = []
|
threads: list[threading.Thread] = []
|
||||||
threads.append(
|
threads.append(
|
||||||
threading.Thread(target=run_pgbench, args=(endpoints[0].connstr(),), daemon=True)
|
threading.Thread(target=run_pgbench, args=(endpoints[0].connstr(),), daemon=True)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fixtures.common_types import TenantId, TimelineId
|
from fixtures.common_types import TenantId, TimelineId
|
||||||
@@ -31,7 +32,7 @@ def test_local_corruption(neon_env_builder: NeonEnvBuilder):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
tenant_timelines: List[Tuple[TenantId, TimelineId, Endpoint]] = []
|
tenant_timelines: list[tuple[TenantId, TimelineId, Endpoint]] = []
|
||||||
|
|
||||||
for _ in range(3):
|
for _ in range(3):
|
||||||
tenant_id, timeline_id = env.create_tenant()
|
tenant_id, timeline_id = env.create_tenant()
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from fixtures.metrics import parse_metrics
|
from fixtures.metrics import parse_metrics
|
||||||
from fixtures.neon_fixtures import NeonEnvBuilder, NeonProxy
|
from fixtures.neon_fixtures import NeonEnvBuilder, NeonProxy
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user