mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-05 12:32:54 +00:00
test_runner: use toml instead of formatted strings (#6088)
## Problem A bunch of refactorings extracted from https://github.com/neondatabase/neon/pull/6087 (not required for it); the most significant one is using toml instead of formatted strings. ## Summary of changes - Use toml instead of formatted strings for config - Skip pageserver log check if `pageserver.log` doesn't exist - `chmod -x test_runner/regress/test_config.py`
This commit is contained in:
committed by
GitHub
parent
f0d15cee6f
commit
66a7a226f8
@@ -28,6 +28,7 @@ import jwt
|
||||
import psycopg2
|
||||
import pytest
|
||||
import requests
|
||||
import toml
|
||||
from _pytest.config import Config
|
||||
from _pytest.config.argparsing import Parser
|
||||
from _pytest.fixtures import FixtureRequest
|
||||
@@ -436,7 +437,7 @@ class NeonEnvBuilder:
|
||||
# Pageserver remote storage
|
||||
self.pageserver_remote_storage = pageserver_remote_storage
|
||||
# Safekeepers remote storage
|
||||
self.sk_remote_storage: Optional[RemoteStorage] = None
|
||||
self.safekeepers_remote_storage: Optional[RemoteStorage] = None
|
||||
|
||||
self.broker = broker
|
||||
self.run_id = run_id
|
||||
@@ -534,9 +535,11 @@ class NeonEnvBuilder:
|
||||
self.pageserver_remote_storage = ret
|
||||
|
||||
def enable_safekeeper_remote_storage(self, kind: RemoteStorageKind):
|
||||
assert self.sk_remote_storage is None, "sk_remote_storage already configured"
|
||||
assert (
|
||||
self.safekeepers_remote_storage is None
|
||||
), "safekeepers_remote_storage already configured"
|
||||
|
||||
self.sk_remote_storage = self._configure_and_create_remote_storage(
|
||||
self.safekeepers_remote_storage = self._configure_and_create_remote_storage(
|
||||
kind, RemoteStorageUser.SAFEKEEPER
|
||||
)
|
||||
|
||||
@@ -589,7 +592,7 @@ class NeonEnvBuilder:
|
||||
directory_to_clean.rmdir()
|
||||
|
||||
def cleanup_remote_storage(self):
|
||||
for x in [self.pageserver_remote_storage, self.sk_remote_storage]:
|
||||
for x in [self.pageserver_remote_storage, self.safekeepers_remote_storage]:
|
||||
if isinstance(x, S3Storage):
|
||||
x.do_cleanup()
|
||||
|
||||
@@ -693,7 +696,7 @@ class NeonEnv:
|
||||
self.pageservers: List[NeonPageserver] = []
|
||||
self.broker = config.broker
|
||||
self.pageserver_remote_storage = config.pageserver_remote_storage
|
||||
self.safekeepers_remote_storage = config.sk_remote_storage
|
||||
self.safekeepers_remote_storage = config.safekeepers_remote_storage
|
||||
self.pg_version = config.pg_version
|
||||
# Binary path for pageserver, safekeeper, etc
|
||||
self.neon_binpath = config.neon_binpath
|
||||
@@ -718,25 +721,17 @@ class NeonEnv:
|
||||
self.attachment_service = None
|
||||
|
||||
# Create a config file corresponding to the options
|
||||
toml = textwrap.dedent(
|
||||
f"""
|
||||
default_tenant_id = '{config.initial_tenant}'
|
||||
"""
|
||||
)
|
||||
cfg: Dict[str, Any] = {
|
||||
"default_tenant_id": str(self.initial_tenant),
|
||||
"broker": {
|
||||
"listen_addr": self.broker.listen_addr(),
|
||||
},
|
||||
"pageservers": [],
|
||||
"safekeepers": [],
|
||||
}
|
||||
|
||||
if self.control_plane_api is not None:
|
||||
toml += textwrap.dedent(
|
||||
f"""
|
||||
control_plane_api = '{self.control_plane_api}'
|
||||
"""
|
||||
)
|
||||
|
||||
toml += textwrap.dedent(
|
||||
f"""
|
||||
[broker]
|
||||
listen_addr = '{self.broker.listen_addr()}'
|
||||
"""
|
||||
)
|
||||
cfg["control_plane_api"] = self.control_plane_api
|
||||
|
||||
# Create config for pageserver
|
||||
http_auth_type = "NeonJWT" if config.auth_enabled else "Trust"
|
||||
@@ -749,26 +744,24 @@ class NeonEnv:
|
||||
http=self.port_distributor.get_port(),
|
||||
)
|
||||
|
||||
toml += textwrap.dedent(
|
||||
f"""
|
||||
[[pageservers]]
|
||||
id={ps_id}
|
||||
listen_pg_addr = 'localhost:{pageserver_port.pg}'
|
||||
listen_http_addr = 'localhost:{pageserver_port.http}'
|
||||
pg_auth_type = '{pg_auth_type}'
|
||||
http_auth_type = '{http_auth_type}'
|
||||
"""
|
||||
)
|
||||
|
||||
ps_cfg: Dict[str, Any] = {
|
||||
"id": ps_id,
|
||||
"listen_pg_addr": f"localhost:{pageserver_port.pg}",
|
||||
"listen_http_addr": f"localhost:{pageserver_port.http}",
|
||||
"pg_auth_type": pg_auth_type,
|
||||
"http_auth_type": http_auth_type,
|
||||
}
|
||||
# Create a corresponding NeonPageserver object
|
||||
self.pageservers.append(
|
||||
NeonPageserver(
|
||||
self,
|
||||
ps_id,
|
||||
port=pageserver_port,
|
||||
config_override=config.pageserver_config_override,
|
||||
config_override=self.pageserver_config_override,
|
||||
)
|
||||
)
|
||||
cfg["pageservers"].append(ps_cfg)
|
||||
|
||||
# Create config and a Safekeeper object for each safekeeper
|
||||
for i in range(1, config.num_safekeepers + 1):
|
||||
port = SafekeeperPort(
|
||||
@@ -777,32 +770,22 @@ class NeonEnv:
|
||||
http=self.port_distributor.get_port(),
|
||||
)
|
||||
id = config.safekeepers_id_start + i # assign ids sequentially
|
||||
toml += textwrap.dedent(
|
||||
f"""
|
||||
[[safekeepers]]
|
||||
id = {id}
|
||||
pg_port = {port.pg}
|
||||
pg_tenant_only_port = {port.pg_tenant_only}
|
||||
http_port = {port.http}
|
||||
sync = {'true' if config.safekeepers_enable_fsync else 'false'}"""
|
||||
)
|
||||
sk_cfg: Dict[str, Any] = {
|
||||
"id": id,
|
||||
"pg_port": port.pg,
|
||||
"pg_tenant_only_port": port.pg_tenant_only,
|
||||
"http_port": port.http,
|
||||
"sync": config.safekeepers_enable_fsync,
|
||||
}
|
||||
if config.auth_enabled:
|
||||
toml += textwrap.dedent(
|
||||
"""
|
||||
auth_enabled = true
|
||||
"""
|
||||
)
|
||||
if config.sk_remote_storage is not None:
|
||||
toml += textwrap.dedent(
|
||||
f"""
|
||||
remote_storage = "{remote_storage_to_toml_inline_table(config.sk_remote_storage)}"
|
||||
"""
|
||||
)
|
||||
safekeeper = Safekeeper(env=self, id=id, port=port)
|
||||
self.safekeepers.append(safekeeper)
|
||||
sk_cfg["auth_enabled"] = True
|
||||
if self.safekeepers_remote_storage is not None:
|
||||
sk_cfg["remote_storage"] = self.safekeepers_remote_storage.to_toml_inline_table()
|
||||
self.safekeepers.append(Safekeeper(env=self, id=id, port=port))
|
||||
cfg["safekeepers"].append(sk_cfg)
|
||||
|
||||
log.info(f"Config: {toml}")
|
||||
self.neon_cli.init(toml)
|
||||
log.info(f"Config: {cfg}")
|
||||
self.neon_cli.init(cfg)
|
||||
|
||||
def start(self):
|
||||
# Start up broker, pageserver and all safekeepers
|
||||
@@ -1288,10 +1271,10 @@ class NeonCli(AbstractNeonCli):
|
||||
|
||||
def init(
|
||||
self,
|
||||
config_toml: str,
|
||||
config: Dict[str, Any],
|
||||
) -> "subprocess.CompletedProcess[str]":
|
||||
with tempfile.NamedTemporaryFile(mode="w+") as tmp:
|
||||
tmp.write(config_toml)
|
||||
tmp.write(toml.dumps(config))
|
||||
tmp.flush()
|
||||
|
||||
cmd = ["init", f"--config={tmp.name}", "--pg-version", self.env.pg_version]
|
||||
@@ -1732,8 +1715,13 @@ class NeonPageserver(PgProtocol):
|
||||
return Path(os.path.join(self.env.repo_dir, f"pageserver_{self.id}"))
|
||||
|
||||
def assert_no_errors(self):
|
||||
logfile = open(os.path.join(self.workdir, "pageserver.log"), "r")
|
||||
errors = scan_pageserver_log_for_errors(logfile, self.allowed_errors)
|
||||
logfile = self.workdir / "pageserver.log"
|
||||
if not logfile.exists():
|
||||
log.warning(f"Skipping log check: {logfile} does not exist")
|
||||
return
|
||||
|
||||
with logfile.open("r") as f:
|
||||
errors = scan_pageserver_log_for_errors(f, self.allowed_errors)
|
||||
|
||||
for _lineno, error in errors:
|
||||
log.info(f"not allowed error: {error.strip()}")
|
||||
@@ -1757,7 +1745,10 @@ class NeonPageserver(PgProtocol):
|
||||
|
||||
def log_contains(self, pattern: str) -> Optional[str]:
|
||||
"""Check that the pageserver log contains a line that matches the given regex"""
|
||||
logfile = open(os.path.join(self.workdir, "pageserver.log"), "r")
|
||||
logfile = self.workdir / "pageserver.log"
|
||||
if not logfile.exists():
|
||||
log.warning(f"Skipping log check: {logfile} does not exist")
|
||||
return None
|
||||
|
||||
contains_re = re.compile(pattern)
|
||||
|
||||
@@ -1766,14 +1757,11 @@ class NeonPageserver(PgProtocol):
|
||||
# no guarantee it is already present in the log file. This hasn't
|
||||
# been a problem in practice, our python tests are not fast enough
|
||||
# to hit that race condition.
|
||||
while True:
|
||||
line = logfile.readline()
|
||||
if not line:
|
||||
break
|
||||
|
||||
if contains_re.search(line):
|
||||
# found it!
|
||||
return line
|
||||
with logfile.open("r") as f:
|
||||
for line in f:
|
||||
if contains_re.search(line):
|
||||
# found it!
|
||||
return line
|
||||
|
||||
return None
|
||||
|
||||
@@ -3355,8 +3343,6 @@ def parse_project_git_version_output(s: str) -> str:
|
||||
|
||||
The information is generated by utils::project_git_version!
|
||||
"""
|
||||
import re
|
||||
|
||||
res = re.search(r"git(-env)?:([0-9a-fA-F]{8,40})(-\S+)?", s)
|
||||
if res and (commit := res.group(2)):
|
||||
return commit
|
||||
|
||||
@@ -9,6 +9,7 @@ from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import boto3
|
||||
import toml
|
||||
from mypy_boto3_s3 import S3Client
|
||||
|
||||
from fixtures.log_helper import log
|
||||
@@ -133,7 +134,10 @@ class LocalFsStorage:
|
||||
return json.load(f)
|
||||
|
||||
def to_toml_inline_table(self) -> str:
|
||||
return f"local_path='{self.root}'"
|
||||
rv = {
|
||||
"local_path": str(self.root),
|
||||
}
|
||||
return toml.TomlEncoder().dump_inline_table(rv)
|
||||
|
||||
def cleanup(self):
|
||||
# no cleanup is done here, because there's NeonEnvBuilder.cleanup_local_storage which will remove everything, including localfs files
|
||||
@@ -174,18 +178,18 @@ class S3Storage:
|
||||
)
|
||||
|
||||
def to_toml_inline_table(self) -> str:
|
||||
s = [
|
||||
f"bucket_name='{self.bucket_name}'",
|
||||
f"bucket_region='{self.bucket_region}'",
|
||||
]
|
||||
rv = {
|
||||
"bucket_name": self.bucket_name,
|
||||
"bucket_region": self.bucket_region,
|
||||
}
|
||||
|
||||
if self.prefix_in_bucket is not None:
|
||||
s.append(f"prefix_in_bucket='{self.prefix_in_bucket}'")
|
||||
rv["prefix_in_bucket"] = self.prefix_in_bucket
|
||||
|
||||
if self.endpoint is not None:
|
||||
s.append(f"endpoint='{self.endpoint}'")
|
||||
rv["endpoint"] = self.endpoint
|
||||
|
||||
return ",".join(s)
|
||||
return toml.TomlEncoder().dump_inline_table(rv)
|
||||
|
||||
def do_cleanup(self):
|
||||
if not self.cleanup:
|
||||
@@ -384,4 +388,4 @@ def remote_storage_to_toml_inline_table(remote_storage: RemoteStorage) -> str:
|
||||
if not isinstance(remote_storage, (LocalFsStorage, S3Storage)):
|
||||
raise Exception("invalid remote storage type")
|
||||
|
||||
return f"{{{remote_storage.to_toml_inline_table()}}}"
|
||||
return remote_storage.to_toml_inline_table()
|
||||
|
||||
@@ -7,7 +7,7 @@ from pathlib import Path
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import pytest
|
||||
import toml # TODO: replace with tomllib for Python >= 3.11
|
||||
import toml
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.neon_fixtures import (
|
||||
NeonCli,
|
||||
@@ -411,7 +411,7 @@ def check_neon_works(
|
||||
config.initial_tenant = snapshot_config["default_tenant_id"]
|
||||
config.pg_distrib_dir = pg_distrib_dir
|
||||
config.remote_storage = None
|
||||
config.sk_remote_storage = None
|
||||
config.safekeepers_remote_storage = None
|
||||
|
||||
# Use the "target" binaries to launch the storage nodes
|
||||
config_target = config
|
||||
|
||||
0
test_runner/regress/test_config.py
Executable file → Normal file
0
test_runner/regress/test_config.py
Executable file → Normal file
Reference in New Issue
Block a user