From 65704708fa922b524d3ab75995c39afc5c4f562e Mon Sep 17 00:00:00 2001 From: Dmitry Rodionov Date: Fri, 24 Jun 2022 21:11:49 +0300 Subject: [PATCH] remove unused imports, make more use of pathlib.Path --- .../batch_others/test_ancestor_branch.py | 3 - test_runner/batch_others/test_auth.py | 4 +- test_runner/batch_others/test_backpressure.py | 4 +- .../batch_others/test_basebackup_error.py | 2 - .../batch_others/test_branch_behind.py | 1 - test_runner/batch_others/test_fullbackup.py | 15 ++--- test_runner/batch_others/test_wal_acceptor.py | 13 +++-- test_runner/batch_others/test_wal_restore.py | 12 ++-- .../batch_pg_regress/test_isolation.py | 9 ++- .../batch_pg_regress/test_neon_regress.py | 9 ++- .../batch_pg_regress/test_pg_regress.py | 11 ++-- test_runner/fixtures/neon_fixtures.py | 55 ++++++++++--------- test_runner/fixtures/utils.py | 12 ---- 13 files changed, 61 insertions(+), 89 deletions(-) diff --git a/test_runner/batch_others/test_ancestor_branch.py b/test_runner/batch_others/test_ancestor_branch.py index 656428e5df..20e63b4e5c 100644 --- a/test_runner/batch_others/test_ancestor_branch.py +++ b/test_runner/batch_others/test_ancestor_branch.py @@ -1,6 +1,3 @@ -from contextlib import closing - -import psycopg2.extras import pytest from fixtures.log_helper import log from fixtures.neon_fixtures import NeonEnv, NeonEnvBuilder, NeonPageserverApiException diff --git a/test_runner/batch_others/test_auth.py b/test_runner/batch_others/test_auth.py index b9eb9d7cee..0fd0a5d7e3 100644 --- a/test_runner/batch_others/test_auth.py +++ b/test_runner/batch_others/test_auth.py @@ -1,8 +1,6 @@ from contextlib import closing -from typing import Iterator -from uuid import UUID, uuid4 +from uuid import uuid4 from fixtures.neon_fixtures import NeonEnvBuilder, NeonPageserverApiException -from requests.exceptions import HTTPError import pytest diff --git a/test_runner/batch_others/test_backpressure.py b/test_runner/batch_others/test_backpressure.py index f89ee14691..4ca03b102b 100644 --- a/test_runner/batch_others/test_backpressure.py +++ b/test_runner/batch_others/test_backpressure.py @@ -1,11 +1,9 @@ from contextlib import closing, contextmanager import psycopg2.extras import pytest -from fixtures.neon_fixtures import PgProtocol, NeonEnvBuilder +from fixtures.neon_fixtures import NeonEnvBuilder from fixtures.log_helper import log -import os import time -import asyncpg from fixtures.neon_fixtures import Postgres import threading diff --git a/test_runner/batch_others/test_basebackup_error.py b/test_runner/batch_others/test_basebackup_error.py index 29cbe59d2e..0909ed98a7 100644 --- a/test_runner/batch_others/test_basebackup_error.py +++ b/test_runner/batch_others/test_basebackup_error.py @@ -1,8 +1,6 @@ import pytest -from contextlib import closing from fixtures.neon_fixtures import NeonEnv -from fixtures.log_helper import log # diff --git a/test_runner/batch_others/test_branch_behind.py b/test_runner/batch_others/test_branch_behind.py index 4f4c058b61..0274c6c1e0 100644 --- a/test_runner/batch_others/test_branch_behind.py +++ b/test_runner/batch_others/test_branch_behind.py @@ -1,4 +1,3 @@ -import subprocess from contextlib import closing import psycopg2.extras diff --git a/test_runner/batch_others/test_fullbackup.py b/test_runner/batch_others/test_fullbackup.py index e5d705beab..cd6c40f56b 100644 --- a/test_runner/batch_others/test_fullbackup.py +++ b/test_runner/batch_others/test_fullbackup.py @@ -1,16 +1,10 @@ -import subprocess from contextlib import closing -import psycopg2.extras -import pytest from fixtures.log_helper import log from fixtures.neon_fixtures import NeonEnvBuilder, PgBin, PortDistributor, VanillaPostgres from fixtures.neon_fixtures import pg_distrib_dir import os -from fixtures.utils import mkdir_if_needed, subprocess_capture -import shutil -import getpass -import pwd +from fixtures.utils import subprocess_capture num_rows = 1000 @@ -46,19 +40,20 @@ def test_fullbackup(neon_env_builder: NeonEnvBuilder, psql_env = {'LD_LIBRARY_PATH': os.path.join(str(pg_distrib_dir), 'lib')} # Get and unpack fullbackup from pageserver - restored_dir_path = os.path.join(env.repo_dir, "restored_datadir") + restored_dir_path = env.repo_dir / "restored_datadir" os.mkdir(restored_dir_path, 0o750) query = f"fullbackup {env.initial_tenant.hex} {timeline} {lsn}" cmd = ["psql", "--no-psqlrc", env.pageserver.connstr(), "-c", query] result_basepath = pg_bin.run_capture(cmd, env=psql_env) tar_output_file = result_basepath + ".stdout" - subprocess_capture(str(env.repo_dir), ["tar", "-xf", tar_output_file, "-C", restored_dir_path]) + subprocess_capture(str(env.repo_dir), + ["tar", "-xf", tar_output_file, "-C", str(restored_dir_path)]) # HACK # fullbackup returns neon specific pg_control and first WAL segment # use resetwal to overwrite it pg_resetwal_path = os.path.join(pg_bin.pg_bin_path, 'pg_resetwal') - cmd = [pg_resetwal_path, "-D", restored_dir_path] + cmd = [pg_resetwal_path, "-D", str(restored_dir_path)] pg_bin.run_capture(cmd, env=psql_env) # Restore from the backup and find the data we inserted diff --git a/test_runner/batch_others/test_wal_acceptor.py b/test_runner/batch_others/test_wal_acceptor.py index 2b93dd160a..9b876f780d 100644 --- a/test_runner/batch_others/test_wal_acceptor.py +++ b/test_runner/batch_others/test_wal_acceptor.py @@ -1,3 +1,4 @@ +import pathlib import pytest import random import time @@ -14,7 +15,7 @@ from dataclasses import dataclass, field from multiprocessing import Process, Value from pathlib import Path from fixtures.neon_fixtures import PgBin, Etcd, Postgres, RemoteStorageUsers, Safekeeper, NeonEnv, NeonEnvBuilder, PortDistributor, SafekeeperPort, neon_binpath, PgProtocol -from fixtures.utils import get_dir_size, lsn_to_hex, mkdir_if_needed, lsn_from_hex +from fixtures.utils import get_dir_size, lsn_to_hex, lsn_from_hex from fixtures.log_helper import log from typing import List, Optional, Any from uuid import uuid4 @@ -645,7 +646,7 @@ class ProposerPostgres(PgProtocol): def create_dir_config(self, safekeepers: str): """ Create dir and config for running --sync-safekeepers """ - mkdir_if_needed(self.pg_data_dir_path()) + pathlib.Path(self.pg_data_dir_path()).mkdir(exist_ok=True) with open(self.config_file_path(), "w") as f: cfg = [ "synchronous_standby_names = 'walproposer'\n", @@ -828,7 +829,7 @@ class SafekeeperEnv: self.timeline_id = uuid.uuid4() self.tenant_id = uuid.uuid4() - mkdir_if_needed(str(self.repo_dir)) + self.repo_dir.mkdir(exist_ok=True) # Create config and a Safekeeper object for each safekeeper self.safekeepers = [] @@ -847,8 +848,8 @@ class SafekeeperEnv: http=self.port_distributor.get_port(), ) - safekeeper_dir = os.path.join(self.repo_dir, f"sk{i}") - mkdir_if_needed(safekeeper_dir) + safekeeper_dir = self.repo_dir / f"sk{i}" + safekeeper_dir.mkdir(exist_ok=True) args = [ self.bin_safekeeper, @@ -857,7 +858,7 @@ class SafekeeperEnv: "--listen-http", f"127.0.0.1:{port.http}", "-D", - safekeeper_dir, + str(safekeeper_dir), "--id", str(i), "--broker-endpoints", diff --git a/test_runner/batch_others/test_wal_restore.py b/test_runner/batch_others/test_wal_restore.py index 8ea64d4fce..809e942415 100644 --- a/test_runner/batch_others/test_wal_restore.py +++ b/test_runner/batch_others/test_wal_restore.py @@ -1,19 +1,17 @@ import os -import subprocess +from pathlib import Path from fixtures.neon_fixtures import (NeonEnvBuilder, VanillaPostgres, PortDistributor, PgBin, base_dir, - vanilla_pg, pg_distrib_dir) -from fixtures.log_helper import log def test_wal_restore(neon_env_builder: NeonEnvBuilder, pg_bin: PgBin, - test_output_dir, + test_output_dir: Path, port_distributor: PortDistributor): env = neon_env_builder.init_start() env.neon_cli.create_branch("test_wal_restore") @@ -22,13 +20,13 @@ def test_wal_restore(neon_env_builder: NeonEnvBuilder, tenant_id = pg.safe_psql("show neon.tenant_id")[0][0] env.neon_cli.pageserver_stop() port = port_distributor.get_port() - data_dir = os.path.join(test_output_dir, 'pgsql.restored') + data_dir = test_output_dir / 'pgsql.restored' with VanillaPostgres(data_dir, PgBin(test_output_dir), port) as restored: pg_bin.run_capture([ os.path.join(base_dir, 'libs/utils/scripts/restore_from_wal.sh'), os.path.join(pg_distrib_dir, 'bin'), - os.path.join(test_output_dir, 'repo/safekeepers/sk1/{}/*'.format(tenant_id)), - data_dir, + str(test_output_dir / 'repo' / 'safekeepers' / 'sk1' / str(tenant_id) / '*'), + str(data_dir), str(port) ]) restored.start() diff --git a/test_runner/batch_pg_regress/test_isolation.py b/test_runner/batch_pg_regress/test_isolation.py index 936b31298e..0124459440 100644 --- a/test_runner/batch_pg_regress/test_isolation.py +++ b/test_runner/batch_pg_regress/test_isolation.py @@ -1,13 +1,13 @@ import os +from pathlib import Path import pytest -from fixtures.utils import mkdir_if_needed from fixtures.neon_fixtures import NeonEnv, base_dir, pg_distrib_dir # The isolation tests run for a long time, especially in debug mode, # so use a larger-than-default timeout. @pytest.mark.timeout(1800) -def test_isolation(neon_simple_env: NeonEnv, test_output_dir, pg_bin, capsys): +def test_isolation(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): env = neon_simple_env env.neon_cli.create_branch("test_isolation", "empty") @@ -17,9 +17,8 @@ def test_isolation(neon_simple_env: NeonEnv, test_output_dir, pg_bin, capsys): pg.safe_psql('CREATE DATABASE isolation_regression') # Create some local directories for pg_isolation_regress to run in. - runpath = os.path.join(test_output_dir, 'regress') - mkdir_if_needed(runpath) - mkdir_if_needed(os.path.join(runpath, 'testtablespace')) + runpath = test_output_dir / 'regress' + (runpath / 'testtablespace').mkdir(parents=True) # Compute all the file locations that pg_isolation_regress will need. build_path = os.path.join(pg_distrib_dir, 'build/src/test/isolation') diff --git a/test_runner/batch_pg_regress/test_neon_regress.py b/test_runner/batch_pg_regress/test_neon_regress.py index de3f9705a0..66ea67d9f1 100644 --- a/test_runner/batch_pg_regress/test_neon_regress.py +++ b/test_runner/batch_pg_regress/test_neon_regress.py @@ -1,6 +1,6 @@ import os +from pathlib import Path -from fixtures.utils import mkdir_if_needed from fixtures.neon_fixtures import (NeonEnv, check_restored_datadir_content, base_dir, @@ -8,7 +8,7 @@ from fixtures.neon_fixtures import (NeonEnv, from fixtures.log_helper import log -def test_neon_regress(neon_simple_env: NeonEnv, test_output_dir, pg_bin, capsys): +def test_neon_regress(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): env = neon_simple_env env.neon_cli.create_branch("test_neon_regress", "empty") @@ -17,9 +17,8 @@ def test_neon_regress(neon_simple_env: NeonEnv, test_output_dir, pg_bin, capsys) pg.safe_psql('CREATE DATABASE regression') # Create some local directories for pg_regress to run in. - runpath = os.path.join(test_output_dir, 'regress') - mkdir_if_needed(runpath) - mkdir_if_needed(os.path.join(runpath, 'testtablespace')) + runpath = test_output_dir / 'regress' + (runpath / 'testtablespace').mkdir(parents=True) # Compute all the file locations that pg_regress will need. # This test runs neon specific tests diff --git a/test_runner/batch_pg_regress/test_pg_regress.py b/test_runner/batch_pg_regress/test_pg_regress.py index fb71d31170..b53bc21ca2 100644 --- a/test_runner/batch_pg_regress/test_pg_regress.py +++ b/test_runner/batch_pg_regress/test_pg_regress.py @@ -1,13 +1,13 @@ import os +import pathlib import pytest -from fixtures.utils import mkdir_if_needed from fixtures.neon_fixtures import NeonEnv, check_restored_datadir_content, base_dir, pg_distrib_dir # The pg_regress tests run for a long time, especially in debug mode, # so use a larger-than-default timeout. @pytest.mark.timeout(1800) -def test_pg_regress(neon_simple_env: NeonEnv, test_output_dir: str, pg_bin, capsys): +def test_pg_regress(neon_simple_env: NeonEnv, test_output_dir: pathlib.Path, pg_bin, capsys): env = neon_simple_env env.neon_cli.create_branch("test_pg_regress", "empty") @@ -16,9 +16,8 @@ def test_pg_regress(neon_simple_env: NeonEnv, test_output_dir: str, pg_bin, caps pg.safe_psql('CREATE DATABASE regression') # Create some local directories for pg_regress to run in. - runpath = os.path.join(test_output_dir, 'regress') - mkdir_if_needed(runpath) - mkdir_if_needed(os.path.join(runpath, 'testtablespace')) + runpath = test_output_dir / 'regress' + (runpath / 'testtablespace').mkdir(parents=True) # Compute all the file locations that pg_regress will need. build_path = os.path.join(pg_distrib_dir, 'build/src/test/regress') @@ -51,7 +50,7 @@ def test_pg_regress(neon_simple_env: NeonEnv, test_output_dir: str, pg_bin, caps # checkpoint one more time to ensure that the lsn we get is the latest one pg.safe_psql('CHECKPOINT') - lsn = pg.safe_psql('select pg_current_wal_insert_lsn()')[0][0] + pg.safe_psql('select pg_current_wal_insert_lsn()')[0][0] # Check that we restore the content of the datadir correctly check_restored_datadir_content(test_output_dir, env, pg) diff --git a/test_runner/fixtures/neon_fixtures.py b/test_runner/fixtures/neon_fixtures.py index 7506641fcb..93efc7d5d2 100644 --- a/test_runner/fixtures/neon_fixtures.py +++ b/test_runner/fixtures/neon_fixtures.py @@ -35,12 +35,7 @@ from typing_extensions import Literal import requests import backoff # type: ignore -from .utils import (etcd_path, - get_self_dir, - mkdir_if_needed, - subprocess_capture, - lsn_from_hex, - lsn_to_hex) +from .utils import (etcd_path, get_self_dir, subprocess_capture, lsn_from_hex, lsn_to_hex) from fixtures.log_helper import log """ This file contains pytest fixtures. A fixture is a test resource that can be @@ -127,7 +122,7 @@ def pytest_configure(config): top_output_dir = env_test_output else: top_output_dir = os.path.join(base_dir, DEFAULT_OUTPUT_DIR) - mkdir_if_needed(top_output_dir) + pathlib.Path(top_output_dir).mkdir(exist_ok=True) # Find the postgres installation. global pg_distrib_dir @@ -1316,7 +1311,7 @@ def append_pageserver_param_overrides( class PgBin: """ A helper class for executing postgres binaries """ - def __init__(self, log_dir: str): + def __init__(self, log_dir: Path): self.log_dir = log_dir self.pg_bin_path = os.path.join(str(pg_distrib_dir), 'bin') self.env = os.environ.copy() @@ -1367,22 +1362,27 @@ class PgBin: self._fixpath(command) log.info('Running command "{}"'.format(' '.join(command))) env = self._build_env(env) - return subprocess_capture(self.log_dir, command, env=env, cwd=cwd, check=True, **kwargs) + return subprocess_capture(str(self.log_dir), + command, + env=env, + cwd=cwd, + check=True, + **kwargs) @pytest.fixture(scope='function') -def pg_bin(test_output_dir: str) -> PgBin: +def pg_bin(test_output_dir: Path) -> PgBin: return PgBin(test_output_dir) class VanillaPostgres(PgProtocol): - def __init__(self, pgdatadir: str, pg_bin: PgBin, port: int, init=True): + def __init__(self, pgdatadir: Path, pg_bin: PgBin, port: int, init=True): super().__init__(host='localhost', port=port, dbname='postgres') self.pgdatadir = pgdatadir self.pg_bin = pg_bin self.running = False if init: - self.pg_bin.run_capture(['initdb', '-D', pgdatadir]) + self.pg_bin.run_capture(['initdb', '-D', str(pgdatadir)]) self.configure([f"port = {port}\n"]) def configure(self, options: List[str]): @@ -1398,12 +1398,13 @@ class VanillaPostgres(PgProtocol): if log_path is None: log_path = os.path.join(self.pgdatadir, "pg.log") - self.pg_bin.run_capture(['pg_ctl', '-w', '-D', self.pgdatadir, '-l', log_path, 'start']) + self.pg_bin.run_capture( + ['pg_ctl', '-w', '-D', str(self.pgdatadir), '-l', log_path, 'start']) def stop(self): assert self.running self.running = False - self.pg_bin.run_capture(['pg_ctl', '-w', '-D', self.pgdatadir, 'stop']) + self.pg_bin.run_capture(['pg_ctl', '-w', '-D', str(self.pgdatadir), 'stop']) def get_subdir_size(self, subdir) -> int: """Return size of pgdatadir subdirectory in bytes.""" @@ -1418,9 +1419,9 @@ class VanillaPostgres(PgProtocol): @pytest.fixture(scope='function') -def vanilla_pg(test_output_dir: str, +def vanilla_pg(test_output_dir: Path, port_distributor: PortDistributor) -> Iterator[VanillaPostgres]: - pgdatadir = os.path.join(test_output_dir, "pgdata-vanilla") + pgdatadir = test_output_dir / "pgdata-vanilla" pg_bin = PgBin(test_output_dir) port = port_distributor.get_port() with VanillaPostgres(pgdatadir, pg_bin, port) as vanilla_pg: @@ -1457,7 +1458,7 @@ class RemotePostgres(PgProtocol): @pytest.fixture(scope='function') -def remote_pg(test_output_dir: str) -> Iterator[RemotePostgres]: +def remote_pg(test_output_dir: Path) -> Iterator[RemotePostgres]: pg_bin = PgBin(test_output_dir) connstr = os.getenv("BENCHMARK_CONNSTR") @@ -1980,11 +1981,13 @@ class Etcd: self.handle.wait() -def get_test_output_dir(request: Any) -> str: +def get_test_output_dir(request: Any) -> pathlib.Path: """ Compute the working directory for an individual test. """ test_name = request.node.name - test_dir = os.path.join(str(top_output_dir), test_name) + test_dir = pathlib.Path(top_output_dir) / test_name log.info(f'get_test_output_dir is {test_dir}') + # make mypy happy + assert isinstance(test_dir, pathlib.Path) return test_dir @@ -1998,14 +2001,14 @@ def get_test_output_dir(request: Any) -> str: # this fixture ensures that the directory exists. That works because # 'autouse' fixtures are run before other fixtures. @pytest.fixture(scope='function', autouse=True) -def test_output_dir(request: Any) -> str: +def test_output_dir(request: Any) -> pathlib.Path: """ Create the working directory for an individual test. """ # one directory per test test_dir = get_test_output_dir(request) log.info(f'test_output_dir is {test_dir}') shutil.rmtree(test_dir, ignore_errors=True) - mkdir_if_needed(test_dir) + test_dir.mkdir() return test_dir @@ -2051,7 +2054,7 @@ def should_skip_file(filename: str) -> bool: # # Test helpers # -def list_files_to_compare(pgdata_dir: str): +def list_files_to_compare(pgdata_dir: pathlib.Path): pgdata_files = [] for root, _file, filenames in os.walk(pgdata_dir): for filename in filenames: @@ -2068,7 +2071,7 @@ def list_files_to_compare(pgdata_dir: str): # pg is the existing and running compute node, that we want to compare with a basebackup -def check_restored_datadir_content(test_output_dir: str, env: NeonEnv, pg: Postgres): +def check_restored_datadir_content(test_output_dir: Path, env: NeonEnv, pg: Postgres): # Get the timeline ID. We need it for the 'basebackup' command with closing(pg.connect()) as conn: @@ -2080,8 +2083,8 @@ def check_restored_datadir_content(test_output_dir: str, env: NeonEnv, pg: Postg pg.stop() # Take a basebackup from pageserver - restored_dir_path = os.path.join(env.repo_dir, f"{pg.node_name}_restored_datadir") - mkdir_if_needed(restored_dir_path) + restored_dir_path = env.repo_dir / f"{pg.node_name}_restored_datadir" + restored_dir_path.mkdir(exist_ok=True) pg_bin = PgBin(test_output_dir) psql_path = os.path.join(pg_bin.pg_bin_path, 'psql') @@ -2108,7 +2111,7 @@ def check_restored_datadir_content(test_output_dir: str, env: NeonEnv, pg: Postg # list files we're going to compare assert pg.pgdata_dir - pgdata_files = list_files_to_compare(pg.pgdata_dir) + pgdata_files = list_files_to_compare(pathlib.Path(pg.pgdata_dir)) restored_files = list_files_to_compare(restored_dir_path) # check that file sets are equal diff --git a/test_runner/fixtures/utils.py b/test_runner/fixtures/utils.py index bfa57373b3..05d1a6634d 100644 --- a/test_runner/fixtures/utils.py +++ b/test_runner/fixtures/utils.py @@ -12,18 +12,6 @@ def get_self_dir() -> str: return os.path.dirname(os.path.abspath(__file__)) -def mkdir_if_needed(path: str) -> None: - """ Create a directory if it doesn't already exist - - Note this won't try to create intermediate directories. - """ - try: - os.mkdir(path) - except FileExistsError: - pass - assert os.path.isdir(path) - - def subprocess_capture(capture_dir: str, cmd: List[str], **kwargs: Any) -> str: """ Run a process and capture its output