mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-05 20:42:54 +00:00
Reformat tests using yapf
This commit is contained in:
@@ -8,6 +8,7 @@ pytest = ">=6.0.0"
|
||||
psycopg2 = "*"
|
||||
|
||||
[dev-packages]
|
||||
yapf = "*"
|
||||
|
||||
[requires]
|
||||
# we need at least 3.6, but pipenv doesn't allow to say this directly
|
||||
|
||||
19
test_runner/Pipfile.lock
generated
19
test_runner/Pipfile.lock
generated
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "79fd1b57f1f9eb92a446948e6658880cd0a0f64ab40dd6b38986e72db3007325"
|
||||
"sha256": "f3f49fa53282f9ee1e7c7b1ea5f928a25ad716fc4212c12aa803450938613016"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@@ -21,6 +21,7 @@
|
||||
"sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1",
|
||||
"sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==21.2.0"
|
||||
},
|
||||
"iniconfig": {
|
||||
@@ -35,6 +36,7 @@
|
||||
"sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
|
||||
"sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==20.9"
|
||||
},
|
||||
"pluggy": {
|
||||
@@ -42,6 +44,7 @@
|
||||
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
|
||||
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==0.13.1"
|
||||
},
|
||||
"psycopg2": {
|
||||
@@ -70,6 +73,7 @@
|
||||
"sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3",
|
||||
"sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.10.0"
|
||||
},
|
||||
"pyparsing": {
|
||||
@@ -77,6 +81,7 @@
|
||||
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
|
||||
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.4.7"
|
||||
},
|
||||
"pytest": {
|
||||
@@ -92,8 +97,18 @@
|
||||
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
||||
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==0.10.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
"develop": {
|
||||
"yapf": {
|
||||
"hashes": [
|
||||
"sha256:408fb9a2b254c302f49db83c59f9aa0b4b0fd0ec25be3a5c51181327922ff63d",
|
||||
"sha256:e3a234ba8455fe201eaa649cdac872d590089a18b661e39bbac7020978dd9c2e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.31.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ def test_branch_behind(zenith_cli, pageserver, postgres, pg_bin):
|
||||
print('LSN after 100100 rows: ' + lsn_b)
|
||||
|
||||
# Branch at the point where only 100 rows were inserted
|
||||
zenith_cli.run(["branch", "test_branch_behind_hundred", "test_branch_behind@"+lsn_a])
|
||||
zenith_cli.run(["branch", "test_branch_behind_hundred", "test_branch_behind@" + lsn_a])
|
||||
|
||||
# Insert many more rows. This generates enough WAL to fill a few segments.
|
||||
main_cur.execute('''
|
||||
@@ -54,7 +54,7 @@ def test_branch_behind(zenith_cli, pageserver, postgres, pg_bin):
|
||||
print('LSN after 200100 rows: ' + lsn_c)
|
||||
|
||||
# Branch at the point where only 200 rows were inserted
|
||||
zenith_cli.run(["branch", "test_branch_behind_more", "test_branch_behind@"+lsn_b])
|
||||
zenith_cli.run(["branch", "test_branch_behind_more", "test_branch_behind@" + lsn_b])
|
||||
|
||||
pg_hundred = postgres.create_start("test_branch_behind_hundred")
|
||||
pg_more = postgres.create_start("test_branch_behind_more")
|
||||
@@ -64,15 +64,15 @@ def test_branch_behind(zenith_cli, pageserver, postgres, pg_bin):
|
||||
hundred_pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
hundred_cur = hundred_pg_conn.cursor()
|
||||
hundred_cur.execute('SELECT count(*) FROM foo')
|
||||
assert hundred_cur.fetchone() == (100,)
|
||||
assert hundred_cur.fetchone() == (100, )
|
||||
|
||||
# On the 'more' branch, we should see 100200 rows
|
||||
more_pg_conn = psycopg2.connect(pg_more.connstr())
|
||||
more_pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
more_cur = more_pg_conn.cursor()
|
||||
more_cur.execute('SELECT count(*) FROM foo')
|
||||
assert more_cur.fetchone() == (100100,)
|
||||
assert more_cur.fetchone() == (100100, )
|
||||
|
||||
# All the rows are visible on the main branch
|
||||
main_cur.execute('SELECT count(*) FROM foo')
|
||||
assert main_cur.fetchone() == (200100,)
|
||||
assert main_cur.fetchone() == (200100, )
|
||||
|
||||
@@ -28,4 +28,4 @@ def test_config(zenith_cli, pageserver, postgres, pg_bin):
|
||||
''')
|
||||
|
||||
# check that config change was applied
|
||||
assert cur.fetchone() == ('debug1',)
|
||||
assert cur.fetchone() == ('debug1', )
|
||||
|
||||
@@ -25,7 +25,7 @@ def test_createdb(zenith_cli, pageserver, postgres, pg_bin):
|
||||
lsn = cur.fetchone()[0]
|
||||
|
||||
# Create a branch
|
||||
zenith_cli.run(["branch", "test_createdb2", "test_createdb@"+lsn])
|
||||
zenith_cli.run(["branch", "test_createdb2", "test_createdb@" + lsn])
|
||||
|
||||
pg2 = postgres.create_start('test_createdb2')
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ def test_multixact(pageserver, postgres, pg_bin, zenith_cli, base_dir):
|
||||
assert int(next_multixact_id) > int(next_multixact_id_old)
|
||||
|
||||
# Branch at this point
|
||||
zenith_cli.run(["branch", "test_multixact_new", "test_multixact@"+lsn])
|
||||
zenith_cli.run(["branch", "test_multixact_new", "test_multixact@" + lsn])
|
||||
pg_new = postgres.create_start('test_multixact_new')
|
||||
|
||||
print("postgres is running on 'test_multixact_new' branch")
|
||||
|
||||
@@ -9,7 +9,7 @@ def test_status(pageserver):
|
||||
pg_conn.autocommit = True
|
||||
cur = pg_conn.cursor()
|
||||
cur.execute('status')
|
||||
assert cur.fetchone() == ('hello world',)
|
||||
assert cur.fetchone() == ('hello world', )
|
||||
pg_conn.close()
|
||||
|
||||
|
||||
|
||||
@@ -31,12 +31,12 @@ def test_restart_compute(zenith_cli, pageserver, postgres, pg_bin):
|
||||
|
||||
# We can still see the row
|
||||
cur.execute('SELECT count(*) FROM foo')
|
||||
assert cur.fetchone() == (1,)
|
||||
assert cur.fetchone() == (1, )
|
||||
|
||||
# Insert another row
|
||||
cur.execute("INSERT INTO foo VALUES ('bar2')")
|
||||
cur.execute('SELECT count(*) FROM foo')
|
||||
assert cur.fetchone() == (2,)
|
||||
assert cur.fetchone() == (2, )
|
||||
|
||||
# FIXME: Currently, there is no guarantee that by the time the INSERT commits, the WAL
|
||||
# has been streamed safely to the WAL safekeeper or page server. It is merely stored
|
||||
@@ -55,4 +55,4 @@ def test_restart_compute(zenith_cli, pageserver, postgres, pg_bin):
|
||||
|
||||
# We can still see the rows
|
||||
cur.execute('SELECT count(*) FROM foo')
|
||||
assert cur.fetchone() == (2,)
|
||||
assert cur.fetchone() == (2, )
|
||||
|
||||
@@ -41,7 +41,7 @@ def test_twophase(zenith_cli, pageserver, postgres, pg_bin):
|
||||
cur2.execute("ROLLBACK PREPARED 'insert_two'")
|
||||
|
||||
cur2.execute('SELECT * FROM foo')
|
||||
assert cur2.fetchall() == [('one',)]
|
||||
assert cur2.fetchall() == [('one', )]
|
||||
|
||||
# Neither insert is visible on the original branch, the transactions are still
|
||||
# in prepared state there.
|
||||
|
||||
@@ -27,10 +27,8 @@ def test_isolation(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg
|
||||
mkdir_if_needed(os.path.join(runpath, 'testtablespace'))
|
||||
|
||||
# Compute all the file locations that pg_isolation_regress will need.
|
||||
build_path = os.path.join(
|
||||
pg_distrib_dir, 'build/src/test/isolation')
|
||||
src_path = os.path.join(
|
||||
base_dir, 'vendor/postgres/src/test/isolation')
|
||||
build_path = os.path.join(pg_distrib_dir, 'build/src/test/isolation')
|
||||
src_path = os.path.join(base_dir, 'vendor/postgres/src/test/isolation')
|
||||
bindir = os.path.join(pg_distrib_dir, 'bin')
|
||||
schedule = os.path.join(src_path, 'isolation_schedule')
|
||||
pg_isolation_regress = os.path.join(build_path, 'pg_isolation_regress')
|
||||
@@ -41,7 +39,7 @@ def test_isolation(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg
|
||||
'--bindir={}'.format(bindir),
|
||||
'--dlpath={}'.format(build_path),
|
||||
'--inputdir={}'.format(src_path),
|
||||
'--schedule={}'.format(schedule)
|
||||
'--schedule={}'.format(schedule),
|
||||
]
|
||||
|
||||
env = {
|
||||
|
||||
@@ -26,10 +26,8 @@ def test_pg_regress(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, p
|
||||
mkdir_if_needed(os.path.join(runpath, 'testtablespace'))
|
||||
|
||||
# Compute all the file locations that pg_regress will need.
|
||||
build_path = os.path.join(
|
||||
pg_distrib_dir, 'build/src/test/regress')
|
||||
src_path = os.path.join(
|
||||
base_dir, 'vendor/postgres/src/test/regress')
|
||||
build_path = os.path.join(pg_distrib_dir, 'build/src/test/regress')
|
||||
src_path = os.path.join(base_dir, 'vendor/postgres/src/test/regress')
|
||||
bindir = os.path.join(pg_distrib_dir, 'bin')
|
||||
schedule = os.path.join(src_path, 'parallel_schedule')
|
||||
pg_regress = os.path.join(build_path, 'pg_regress')
|
||||
|
||||
@@ -27,10 +27,8 @@ def test_zenith_regress(pageserver, postgres, pg_bin, zenith_cli, test_output_di
|
||||
|
||||
# Compute all the file locations that pg_regress will need.
|
||||
# This test runs zenith specific tests
|
||||
build_path = os.path.join(
|
||||
pg_distrib_dir, 'build/src/test/regress')
|
||||
src_path = os.path.join(
|
||||
base_dir, 'test_runner/zenith_regress')
|
||||
build_path = os.path.join(pg_distrib_dir, 'build/src/test/regress')
|
||||
src_path = os.path.join(base_dir, 'test_runner/zenith_regress')
|
||||
bindir = os.path.join(pg_distrib_dir, 'bin')
|
||||
schedule = os.path.join(src_path, 'parallel_schedule')
|
||||
pg_regress = os.path.join(build_path, 'pg_regress')
|
||||
|
||||
@@ -4,9 +4,7 @@ import pytest
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from .utils import (get_self_dir, mkdir_if_needed,
|
||||
subprocess_capture)
|
||||
|
||||
from .utils import (get_self_dir, mkdir_if_needed, subprocess_capture)
|
||||
"""
|
||||
This file contains pytest fixtures. A fixture is a test resource that can be
|
||||
summoned by placing its name in the test's arguments.
|
||||
@@ -73,7 +71,6 @@ class ZenithCli:
|
||||
We also store an environment that will tell the CLI to operate
|
||||
on a particular ZENITH_REPO_DIR.
|
||||
"""
|
||||
|
||||
def __init__(self, binpath, repo_dir, pg_distrib_dir):
|
||||
assert os.path.isdir(binpath)
|
||||
self.binpath = binpath
|
||||
@@ -98,7 +95,9 @@ class ZenithCli:
|
||||
assert type(arguments) == list
|
||||
args = [self.bin_zenith] + arguments
|
||||
print('Running command "{}"'.format(' '.join(args)))
|
||||
return subprocess.run(args, env=self.env, check=True,
|
||||
return subprocess.run(args,
|
||||
env=self.env,
|
||||
check=True,
|
||||
universal_newlines=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
@@ -111,7 +110,6 @@ def zenith_cli(zenith_binpath, repo_dir, pg_distrib_dir):
|
||||
|
||||
class ZenithPageserver:
|
||||
""" An object representing a running pageserver. """
|
||||
|
||||
def __init__(self, zenith_cli):
|
||||
self.zenith_cli = zenith_cli
|
||||
self.running = False
|
||||
@@ -135,8 +133,7 @@ class ZenithPageserver:
|
||||
# returns a libpq connection string for connecting to it.
|
||||
def connstr(self):
|
||||
username = getpass.getuser()
|
||||
conn_str = 'host={} port={} dbname=postgres user={}'.format(
|
||||
'localhost', 64000, username)
|
||||
conn_str = 'host={} port={} dbname=postgres user={}'.format('localhost', 64000, username)
|
||||
return conn_str
|
||||
|
||||
|
||||
@@ -167,7 +164,6 @@ def pageserver(zenith_cli):
|
||||
|
||||
class Postgres:
|
||||
""" An object representing a running postgres daemon. """
|
||||
|
||||
def __init__(self, zenith_cli, repo_dir, instance_num):
|
||||
self.zenith_cli = zenith_cli
|
||||
self.instance_num = instance_num
|
||||
@@ -256,15 +252,14 @@ class Postgres:
|
||||
Build a libpq connection string for the Postgres instance.
|
||||
"""
|
||||
|
||||
conn_str = 'host={} port={} dbname={} user={}'.format(
|
||||
self.host, self.port, dbname, self.username)
|
||||
conn_str = 'host={} port={} dbname={} user={}'.format(self.host, self.port, dbname,
|
||||
self.username)
|
||||
|
||||
return conn_str
|
||||
|
||||
|
||||
class PostgresFactory:
|
||||
""" An object representing multiple running postgres daemons. """
|
||||
|
||||
def __init__(self, zenith_cli, repo_dir):
|
||||
self.zenith_cli = zenith_cli
|
||||
self.host = 'localhost'
|
||||
@@ -294,7 +289,6 @@ def postgres(zenith_cli, repo_dir):
|
||||
|
||||
class PgBin:
|
||||
""" A helper class for executing postgres binaries """
|
||||
|
||||
def __init__(self, log_dir, pg_distrib_dir):
|
||||
self.log_dir = log_dir
|
||||
self.pg_install_path = pg_distrib_dir
|
||||
@@ -314,7 +308,8 @@ class PgBin:
|
||||
return env
|
||||
|
||||
def run(self, command, env=None, cwd=None):
|
||||
""" Run one of the postgres binaries.
|
||||
"""
|
||||
Run one of the postgres binaries.
|
||||
|
||||
The command should be in list form, e.g. ['pgbench', '-p', '55432']
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import pytest
|
||||
import os
|
||||
|
||||
pytest_plugins = ("fixtures.zenith_fixtures")
|
||||
|
||||
"""
|
||||
Use this test to see what happens when tests fail.
|
||||
|
||||
@@ -13,11 +12,8 @@ Set the environment variable RUN_BROKEN to see this test run (and fail,
|
||||
and hopefully not leave any server processes behind).
|
||||
"""
|
||||
|
||||
|
||||
run_broken = pytest.mark.skipif(
|
||||
os.environ.get('RUN_BROKEN') is None,
|
||||
reason="only used for testing the fixtures"
|
||||
)
|
||||
run_broken = pytest.mark.skipif(os.environ.get('RUN_BROKEN') is None,
|
||||
reason="only used for testing the fixtures")
|
||||
|
||||
|
||||
@run_broken
|
||||
|
||||
Reference in New Issue
Block a user