From 7d5f7462c1587e99a3888e13987222511b952adb Mon Sep 17 00:00:00 2001 From: Dmitry Ivanov Date: Tue, 1 Jun 2021 19:29:12 +0300 Subject: [PATCH] Tidy up pytest-based tests --- .../batch_others/test_branch_behind.py | 51 +++--- test_runner/batch_others/test_config.py | 27 ++-- test_runner/batch_others/test_createdb.py | 29 ++-- test_runner/batch_others/test_multixact.py | 24 +-- .../batch_others/test_pageserver_api.py | 13 +- test_runner/batch_others/test_pgbench.py | 6 +- .../batch_others/test_restart_compute.py | 41 +++-- test_runner/batch_others/test_twophase.py | 43 +++-- test_runner/batch_others/test_zenith_cli.py | 27 ++-- .../batch_pg_regress/test_isolation.py | 10 +- .../batch_pg_regress/test_pg_regress.py | 11 +- .../batch_pg_regress/test_zenith_regress.py | 11 +- test_runner/fixtures/utils.py | 2 +- test_runner/fixtures/zenith_fixtures.py | 147 +++++++++++++----- test_runner/setup.cfg | 12 ++ test_runner/test_broken.py | 9 +- 16 files changed, 271 insertions(+), 192 deletions(-) create mode 100644 test_runner/setup.cfg diff --git a/test_runner/batch_others/test_branch_behind.py b/test_runner/batch_others/test_branch_behind.py index a4b5599aee..2f9a20af05 100644 --- a/test_runner/batch_others/test_branch_behind.py +++ b/test_runner/batch_others/test_branch_behind.py @@ -1,49 +1,60 @@ -import pytest -import getpass import psycopg2 pytest_plugins = ("fixtures.zenith_fixtures") + # # Create a couple of branches off the main branch, at a historical point in time. # def test_branch_behind(zenith_cli, pageserver, postgres, pg_bin): # Branch at the point where only 100 rows were inserted - zenith_cli.run(["branch", "test_branch_behind", "empty"]); + zenith_cli.run(["branch", "test_branch_behind", "empty"]) pgmain = postgres.create_start('test_branch_behind') print("postgres is running on 'test_branch_behind' branch") - main_pg_conn = psycopg2.connect(pgmain.connstr()); + main_pg_conn = psycopg2.connect(pgmain.connstr()) main_pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) main_cur = main_pg_conn.cursor() # Create table, and insert the first 100 rows - main_cur.execute('CREATE TABLE foo (t text)'); - main_cur.execute("INSERT INTO foo SELECT 'long string to consume some space' || g FROM generate_series(1, 100) g"); - main_cur.execute('SELECT pg_current_wal_insert_lsn()'); + main_cur.execute('CREATE TABLE foo (t text)') + main_cur.execute(''' + INSERT INTO foo + SELECT 'long string to consume some space' || g + FROM generate_series(1, 100) g + ''') + main_cur.execute('SELECT pg_current_wal_insert_lsn()') lsn_a = main_cur.fetchone()[0] print('LSN after 100 rows: ' + lsn_a) # Insert some more rows. (This generates enough WAL to fill a few segments.) - main_cur.execute("INSERT INTO foo SELECT 'long string to consume some space' || g FROM generate_series(1, 100000) g"); - main_cur.execute('SELECT pg_current_wal_insert_lsn()'); + main_cur.execute(''' + INSERT INTO foo + SELECT 'long string to consume some space' || g + FROM generate_series(1, 100000) g + ''') + main_cur.execute('SELECT pg_current_wal_insert_lsn()') lsn_b = main_cur.fetchone()[0] print('LSN after 100100 rows: ' + lsn_b) # Branch at the point where only 100 rows were inserted - zenith_cli.run(["branch", "test_branch_behind_hundred", "test_branch_behind@"+lsn_a]); + zenith_cli.run(["branch", "test_branch_behind_hundred", "test_branch_behind@"+lsn_a]) # Insert many more rows. This generates enough WAL to fill a few segments. - main_cur.execute("INSERT INTO foo SELECT 'long string to consume some space' || g FROM generate_series(1, 100000) g"); - main_cur.execute('SELECT pg_current_wal_insert_lsn()'); + main_cur.execute(''' + INSERT INTO foo + SELECT 'long string to consume some space' || g + FROM generate_series(1, 100000) g + ''') + main_cur.execute('SELECT pg_current_wal_insert_lsn()') - main_cur.execute('SELECT pg_current_wal_insert_lsn()'); + main_cur.execute('SELECT pg_current_wal_insert_lsn()') lsn_c = main_cur.fetchone()[0] print('LSN after 200100 rows: ' + lsn_c) # Branch at the point where only 200 rows were inserted - zenith_cli.run(["branch", "test_branch_behind_more", "test_branch_behind@"+lsn_b]); + zenith_cli.run(["branch", "test_branch_behind_more", "test_branch_behind@"+lsn_b]) pg_hundred = postgres.create_start("test_branch_behind_hundred") pg_more = postgres.create_start("test_branch_behind_more") @@ -52,16 +63,16 @@ def test_branch_behind(zenith_cli, pageserver, postgres, pg_bin): hundred_pg_conn = psycopg2.connect(pg_hundred.connstr()) hundred_pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) hundred_cur = hundred_pg_conn.cursor() - hundred_cur.execute('SELECT count(*) FROM foo'); - assert(hundred_cur.fetchone()[0] == 100); + hundred_cur.execute('SELECT count(*) FROM foo') + assert hundred_cur.fetchone() == (100,) # On the 'more' branch, we should see 100200 rows more_pg_conn = psycopg2.connect(pg_more.connstr()) more_pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) more_cur = more_pg_conn.cursor() - more_cur.execute('SELECT count(*) FROM foo'); - assert(more_cur.fetchone()[0] == 100100); + more_cur.execute('SELECT count(*) FROM foo') + assert more_cur.fetchone() == (100100,) # All the rows are visible on the main branch - main_cur.execute('SELECT count(*) FROM foo'); - assert(main_cur.fetchone()[0] == 200100); + main_cur.execute('SELECT count(*) FROM foo') + assert main_cur.fetchone() == (200100,) diff --git a/test_runner/batch_others/test_config.py b/test_runner/batch_others/test_config.py index 62b69fc198..bf527987e9 100644 --- a/test_runner/batch_others/test_config.py +++ b/test_runner/batch_others/test_config.py @@ -1,6 +1,3 @@ -import pytest -import os -import getpass import psycopg2 pytest_plugins = ("fixtures.zenith_fixtures") @@ -11,20 +8,24 @@ pytest_plugins = ("fixtures.zenith_fixtures") # def test_config(zenith_cli, pageserver, postgres, pg_bin): # Create a branch for us - zenith_cli.run(["branch", "test_config", "empty"]); + zenith_cli.run(["branch", "test_config", "empty"]) # change config pg = postgres.create_start('test_config', ['log_min_messages=debug1']) print('postgres is running on test_config branch') - pg_conn = psycopg2.connect(pg.connstr()) - pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - cur = pg_conn.cursor() + with psycopg2.connect(pg.connstr()) as conn: + conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - #check that config change was applied - cur.execute('SELECT name, setting from pg_settings WHERE source!=%s and source!=%s', ("default","override",)) - for record in cur: - if record[0] == 'log_min_messages': - assert(record[1] == 'debug1') + with conn.cursor() as cur: + cur.execute(''' + SELECT setting + FROM pg_settings + WHERE + source != 'default' + AND source != 'override' + AND name = 'log_min_messages' + ''') - pg_conn.close() + # check that config change was applied + assert cur.fetchone() == ('debug1',) diff --git a/test_runner/batch_others/test_createdb.py b/test_runner/batch_others/test_createdb.py index 2447effa7f..727e0a604d 100644 --- a/test_runner/batch_others/test_createdb.py +++ b/test_runner/batch_others/test_createdb.py @@ -1,37 +1,34 @@ -import pytest -import getpass import psycopg2 pytest_plugins = ("fixtures.zenith_fixtures") + # # Test CREATE DATABASE when there have been relmapper changes # def test_createdb(zenith_cli, pageserver, postgres, pg_bin): - zenith_cli.run(["branch", "test_createdb", "empty"]); + zenith_cli.run(["branch", "test_createdb", "empty"]) pg = postgres.create_start('test_createdb') print("postgres is running on 'test_createdb' branch") - conn = psycopg2.connect(pg.connstr()); - conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - cur = conn.cursor() + with psycopg2.connect(pg.connstr()) as conn: + conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - # Cause a 'relmapper' change in the original branch - cur.execute('VACUUM FULL pg_class'); + with conn.cursor() as cur: + # Cause a 'relmapper' change in the original branch + cur.execute('VACUUM FULL pg_class') - cur.execute('CREATE DATABASE foodb'); + cur.execute('CREATE DATABASE foodb') - cur.execute('SELECT pg_current_wal_insert_lsn()'); - lsn = cur.fetchone()[0] - - conn.close(); + cur.execute('SELECT pg_current_wal_insert_lsn()') + lsn = cur.fetchone()[0] # Create a branch - zenith_cli.run(["branch", "test_createdb2", "test_createdb@"+lsn]); + zenith_cli.run(["branch", "test_createdb2", "test_createdb@"+lsn]) pg2 = postgres.create_start('test_createdb2') # Test that you can connect to the new database on both branches - conn = psycopg2.connect(pg.connstr('foodb')); - conn2 = psycopg2.connect(pg2.connstr('foodb')); + for db in (pg, pg2): + psycopg2.connect(db.connstr('foodb')).close() diff --git a/test_runner/batch_others/test_multixact.py b/test_runner/batch_others/test_multixact.py index 2119c9f7b4..1e30ceb346 100644 --- a/test_runner/batch_others/test_multixact.py +++ b/test_runner/batch_others/test_multixact.py @@ -1,9 +1,8 @@ -import pytest -import os import psycopg2 pytest_plugins = ("fixtures.zenith_fixtures") + # # Test multixact state after branching # Now this test is very minimalistic - @@ -11,7 +10,6 @@ pytest_plugins = ("fixtures.zenith_fixtures") # since we don't have functions to check multixact internals. # def test_multixact(pageserver, postgres, pg_bin, zenith_cli, base_dir): - # Create a branch for us zenith_cli.run(["branch", "test_multixact", "empty"]) pg = postgres.create_start('test_multixact') @@ -21,10 +19,12 @@ def test_multixact(pageserver, postgres, pg_bin, zenith_cli, base_dir): pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = pg_conn.cursor() - cur.execute('CREATE TABLE t1(i int primary key);' - 'INSERT INTO t1 select * from generate_series(1,100);') + cur.execute(''' + CREATE TABLE t1(i int primary key); + INSERT INTO t1 select * from generate_series(1, 100); + ''') - cur.execute('SELECT next_multixact_id FROM pg_control_checkpoint();') + cur.execute('SELECT next_multixact_id FROM pg_control_checkpoint()') next_multixact_id_old = cur.fetchone()[0] # Lock entries in parallel connections to set multixact @@ -33,7 +33,7 @@ def test_multixact(pageserver, postgres, pg_bin, zenith_cli, base_dir): for i in range(nclients): con = psycopg2.connect(pg.connstr()) # Do not turn on autocommit. We want to hold the key-share locks. - con.cursor().execute('select * from t1 for key share;') + con.cursor().execute('select * from t1 for key share') connections.append(con) # We should have a multixact now. We can close the connections. @@ -43,16 +43,16 @@ def test_multixact(pageserver, postgres, pg_bin, zenith_cli, base_dir): # force wal flush cur.execute('checkpoint') - cur.execute('SELECT next_multixact_id, pg_current_wal_flush_lsn() FROM pg_control_checkpoint();') + cur.execute('SELECT next_multixact_id, pg_current_wal_flush_lsn() FROM pg_control_checkpoint()') res = cur.fetchone() next_multixact_id = res[0] lsn = res[1] # Ensure that we did lock some tuples - assert(int(next_multixact_id) > int(next_multixact_id_old)) + assert int(next_multixact_id) > int(next_multixact_id_old) # Branch at this point - zenith_cli.run(["branch", "test_multixact_new", "test_multixact@"+lsn]); + zenith_cli.run(["branch", "test_multixact_new", "test_multixact@"+lsn]) pg_new = postgres.create_start('test_multixact_new') print("postgres is running on 'test_multixact_new' branch") @@ -60,8 +60,8 @@ def test_multixact(pageserver, postgres, pg_bin, zenith_cli, base_dir): pg_new_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur_new = pg_new_conn.cursor() - cur_new.execute('SELECT next_multixact_id FROM pg_control_checkpoint();') + cur_new.execute('SELECT next_multixact_id FROM pg_control_checkpoint()') next_multixact_id_new = cur_new.fetchone()[0] # Check that we restored pg_controlfile correctly - assert(next_multixact_id_new == next_multixact_id) + assert next_multixact_id_new == next_multixact_id diff --git a/test_runner/batch_others/test_pageserver_api.py b/test_runner/batch_others/test_pageserver_api.py index af1c6723db..795caebe35 100644 --- a/test_runner/batch_others/test_pageserver_api.py +++ b/test_runner/batch_others/test_pageserver_api.py @@ -1,28 +1,27 @@ -import pytest import psycopg2 -import getpass import json pytest_plugins = ("fixtures.zenith_fixtures") + def test_status(pageserver): pg_conn = psycopg2.connect(pageserver.connstr()) pg_conn.autocommit = True cur = pg_conn.cursor() - cur.execute('status;') + cur.execute('status') assert cur.fetchone() == ('hello world',) pg_conn.close() -def test_branch_list(pageserver, zenith_cli): +def test_branch_list(pageserver, zenith_cli): # Create a branch for us - zenith_cli.run(["branch", "test_branch_list_main", "empty"]); + zenith_cli.run(["branch", "test_branch_list_main", "empty"]) page_server_conn = psycopg2.connect(pageserver.connstr()) page_server_conn.autocommit = True page_server_cur = page_server_conn.cursor() - page_server_cur.execute('branch_list;') + page_server_cur.execute('branch_list') branches = json.loads(page_server_cur.fetchone()[0]) # Filter out branches created by other tests branches = [x for x in branches if x['name'].startswith('test_branch_list')] @@ -38,7 +37,7 @@ def test_branch_list(pageserver, zenith_cli): zenith_cli.run(['branch', 'test_branch_list_experimental', 'test_branch_list_main']) zenith_cli.run(['pg', 'create', 'test_branch_list_experimental']) - page_server_cur.execute('branch_list;') + page_server_cur.execute('branch_list') new_branches = json.loads(page_server_cur.fetchone()[0]) # Filter out branches created by other tests new_branches = [x for x in new_branches if x['name'].startswith('test_branch_list')] diff --git a/test_runner/batch_others/test_pgbench.py b/test_runner/batch_others/test_pgbench.py index b668012f0c..486dd77496 100644 --- a/test_runner/batch_others/test_pgbench.py +++ b/test_runner/batch_others/test_pgbench.py @@ -1,17 +1,15 @@ -import pytest - pytest_plugins = ("fixtures.zenith_fixtures") def test_pgbench(pageserver, postgres, pg_bin, zenith_cli): # Create a branch for us - zenith_cli.run(["branch", "test_pgbench", "empty"]); + zenith_cli.run(["branch", "test_pgbench", "empty"]) pg = postgres.create_start('test_pgbench') print("postgres is running on 'test_pgbench' branch") - connstr = pg.connstr(); + connstr = pg.connstr() pg_bin.run_capture(['pgbench', '-i', connstr]) pg_bin.run_capture(['pgbench'] + '-c 10 -T 5 -P 1 -M prepared'.split() + [connstr]) diff --git a/test_runner/batch_others/test_restart_compute.py b/test_runner/batch_others/test_restart_compute.py index b6e13b3f36..4f75779512 100644 --- a/test_runner/batch_others/test_restart_compute.py +++ b/test_runner/batch_others/test_restart_compute.py @@ -1,43 +1,42 @@ -import pytest -import getpass import psycopg2 import time pytest_plugins = ("fixtures.zenith_fixtures") + # # Test restarting and recreating a postgres instance # def test_restart_compute(zenith_cli, pageserver, postgres, pg_bin): - zenith_cli.run(["branch", "test_restart_compute", "empty"]); + zenith_cli.run(["branch", "test_restart_compute", "empty"]) pg = postgres.create_start('test_restart_compute') print("postgres is running on 'test_restart_compute' branch") - pg_conn = psycopg2.connect(pg.connstr()); + pg_conn = psycopg2.connect(pg.connstr()) pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = pg_conn.cursor() # Create table, and insert a row - cur.execute('CREATE TABLE foo (t text)'); - cur.execute("INSERT INTO foo VALUES ('bar')"); + cur.execute('CREATE TABLE foo (t text)') + cur.execute("INSERT INTO foo VALUES ('bar')") # Stop and restart the Postgres instance - pg_conn.close(); - pg.stop(); - pg.start(); - pg_conn = psycopg2.connect(pg.connstr()); + pg_conn.close() + pg.stop() + pg.start() + pg_conn = psycopg2.connect(pg.connstr()) pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = pg_conn.cursor() # We can still see the row - cur.execute('SELECT count(*) FROM foo'); - assert(cur.fetchone()[0] == 1); + cur.execute('SELECT count(*) FROM foo') + assert cur.fetchone() == (1,) # Insert another row - cur.execute("INSERT INTO foo VALUES ('bar2')"); - cur.execute('SELECT count(*) FROM foo'); - assert(cur.fetchone()[0] == 2); + cur.execute("INSERT INTO foo VALUES ('bar2')") + cur.execute('SELECT count(*) FROM foo') + assert cur.fetchone() == (2,) # FIXME: Currently, there is no guarantee that by the time the INSERT commits, the WAL # has been streamed safely to the WAL safekeeper or page server. It is merely stored @@ -47,13 +46,13 @@ def test_restart_compute(zenith_cli, pageserver, postgres, pg_bin): time.sleep(5) # Stop, and destroy the Postgres instance. Then recreate and restart it. - pg_conn.close(); - pg.stop_and_destroy(); - pg.create_start('test_restart_compute'); - pg_conn = psycopg2.connect(pg.connstr()); + pg_conn.close() + pg.stop_and_destroy() + pg.create_start('test_restart_compute') + pg_conn = psycopg2.connect(pg.connstr()) pg_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = pg_conn.cursor() # We can still see the rows - cur.execute('SELECT count(*) FROM foo'); - assert(cur.fetchone()[0] == 2); + cur.execute('SELECT count(*) FROM foo') + assert cur.fetchone() == (2,) diff --git a/test_runner/batch_others/test_twophase.py b/test_runner/batch_others/test_twophase.py index 679fc43b71..34abfa2f8a 100644 --- a/test_runner/batch_others/test_twophase.py +++ b/test_runner/batch_others/test_twophase.py @@ -1,50 +1,49 @@ -# -# Test branching, when a transaction is in prepared state -# -import pytest -import getpass import psycopg2 pytest_plugins = ("fixtures.zenith_fixtures") + +# +# Test branching, when a transaction is in prepared state +# def test_twophase(zenith_cli, pageserver, postgres, pg_bin): - zenith_cli.run(["branch", "test_twophase", "empty"]); + zenith_cli.run(["branch", "test_twophase", "empty"]) pg = postgres.create_start('test_twophase', ['max_prepared_transactions=5']) print("postgres is running on 'test_twophase' branch") - conn = psycopg2.connect(pg.connstr()); + conn = psycopg2.connect(pg.connstr()) conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = conn.cursor() - cur.execute('CREATE TABLE foo (t text)'); + cur.execute('CREATE TABLE foo (t text)') # Prepare a transaction that will insert a row - cur.execute('BEGIN'); - cur.execute("INSERT INTO foo VALUES ('one')"); - cur.execute("PREPARE TRANSACTION 'insert_one'"); + cur.execute('BEGIN') + cur.execute("INSERT INTO foo VALUES ('one')") + cur.execute("PREPARE TRANSACTION 'insert_one'") # Prepare another transaction that will insert a row - cur.execute('BEGIN'); - cur.execute("INSERT INTO foo VALUES ('two')"); - cur.execute("PREPARE TRANSACTION 'insert_two'"); + cur.execute('BEGIN') + cur.execute("INSERT INTO foo VALUES ('two')") + cur.execute("PREPARE TRANSACTION 'insert_two'") # Create a branch with the transaction in prepared state - zenith_cli.run(["branch", "test_twophase_prepared", "test_twophase"]); + zenith_cli.run(["branch", "test_twophase_prepared", "test_twophase"]) pg2 = postgres.create_start('test_twophase_prepared', ['max_prepared_transactions=5']) - conn2 = psycopg2.connect(pg2.connstr()); + conn2 = psycopg2.connect(pg2.connstr()) conn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur2 = conn2.cursor() # On the new branch, commit one of the prepared transactions, abort the other one. - cur2.execute("COMMIT PREPARED 'insert_one'"); - cur2.execute("ROLLBACK PREPARED 'insert_two'"); + cur2.execute("COMMIT PREPARED 'insert_one'") + cur2.execute("ROLLBACK PREPARED 'insert_two'") - cur2.execute('SELECT * FROM foo'); - assert(cur2.fetchall() == [('one',)]); + cur2.execute('SELECT * FROM foo') + assert cur2.fetchall() == [('one',)] # Neither insert is visible on the original branch, the transactions are still # in prepared state there. - cur.execute('SELECT * FROM foo'); - assert(cur.fetchall() == []); + cur.execute('SELECT * FROM foo') + assert cur.fetchall() == [] diff --git a/test_runner/batch_others/test_zenith_cli.py b/test_runner/batch_others/test_zenith_cli.py index 510205c003..265831b5a2 100644 --- a/test_runner/batch_others/test_zenith_cli.py +++ b/test_runner/batch_others/test_zenith_cli.py @@ -1,25 +1,26 @@ -import pytest import psycopg2 import json pytest_plugins = ("fixtures.zenith_fixtures") + def helper_compare_branch_list(page_server_cur, zenith_cli): """ Compare branches list returned by CLI and directly via API. Filters out branches created by other tests. """ - page_server_cur.execute('branch_list;') + page_server_cur.execute('branch_list') branches_api = sorted(map(lambda b: b['name'], json.loads(page_server_cur.fetchone()[0]))) branches_api = [b for b in branches_api if b.startswith('test_cli_') or b in ('empty', 'main')] - res = zenith_cli.run(["branch"]); - assert(res.stderr == '') + res = zenith_cli.run(["branch"]) + assert res.stderr == '' branches_cli = sorted(map(lambda b: b.split(':')[-1].strip(), res.stdout.strip().split("\n"))) branches_cli = [b for b in branches_cli if b.startswith('test_cli_') or b in ('empty', 'main')] - assert(branches_api == branches_cli) + assert branches_api == branches_cli + def test_cli_branch_list(pageserver, zenith_cli): @@ -31,19 +32,19 @@ def test_cli_branch_list(pageserver, zenith_cli): helper_compare_branch_list(page_server_cur, zenith_cli) # Create a branch for us - res = zenith_cli.run(["branch", "test_cli_branch_list_main", "main"]); - assert(res.stderr == '') + res = zenith_cli.run(["branch", "test_cli_branch_list_main", "main"]) + assert res.stderr == '' helper_compare_branch_list(page_server_cur, zenith_cli) # Create a nested branch - res = zenith_cli.run(["branch", "test_cli_branch_list_nested", "test_cli_branch_list_main"]); - assert(res.stderr == '') + res = zenith_cli.run(["branch", "test_cli_branch_list_nested", "test_cli_branch_list_main"]) + assert res.stderr == '' helper_compare_branch_list(page_server_cur, zenith_cli) # Check that all new branches are visible via CLI - res = zenith_cli.run(["branch"]); - assert(res.stderr == '') + res = zenith_cli.run(["branch"]) + assert res.stderr == '' branches_cli = sorted(map(lambda b: b.split(':')[-1].strip(), res.stdout.strip().split("\n"))) - assert('test_cli_branch_list_main' in branches_cli) - assert('test_cli_branch_list_nested' in branches_cli) + assert 'test_cli_branch_list_main' in branches_cli + assert 'test_cli_branch_list_nested' in branches_cli diff --git a/test_runner/batch_pg_regress/test_isolation.py b/test_runner/batch_pg_regress/test_isolation.py index 1e66a2a654..8d910205c1 100644 --- a/test_runner/batch_pg_regress/test_isolation.py +++ b/test_runner/batch_pg_regress/test_isolation.py @@ -1,16 +1,16 @@ -import pytest -from fixtures.utils import mkdir_if_needed -import getpass import os import psycopg2 +from fixtures.utils import mkdir_if_needed + pytest_plugins = ("fixtures.zenith_fixtures") -def test_isolation(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg_distrib_dir, base_dir, capsys): +def test_isolation(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg_distrib_dir, + base_dir, capsys): # Create a branch for us - zenith_cli.run(["branch", "test_isolation", "empty"]); + zenith_cli.run(["branch", "test_isolation", "empty"]) # Connect to postgres and create a database called "regression". # isolation tests use prepared transactions, so enable them diff --git a/test_runner/batch_pg_regress/test_pg_regress.py b/test_runner/batch_pg_regress/test_pg_regress.py index 085db529af..536ae3e944 100644 --- a/test_runner/batch_pg_regress/test_pg_regress.py +++ b/test_runner/batch_pg_regress/test_pg_regress.py @@ -1,15 +1,16 @@ -import pytest -from fixtures.utils import mkdir_if_needed -import getpass import os import psycopg2 +from fixtures.utils import mkdir_if_needed + pytest_plugins = ("fixtures.zenith_fixtures") -def test_pg_regress(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg_distrib_dir, base_dir, capsys): + +def test_pg_regress(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg_distrib_dir, + base_dir, capsys): # Create a branch for us - zenith_cli.run(["branch", "test_pg_regress", "empty"]); + zenith_cli.run(["branch", "test_pg_regress", "empty"]) # Connect to postgres and create a database called "regression". pg = postgres.create_start('test_pg_regress') diff --git a/test_runner/batch_pg_regress/test_zenith_regress.py b/test_runner/batch_pg_regress/test_zenith_regress.py index 8f0dacf3c2..45efddc689 100644 --- a/test_runner/batch_pg_regress/test_zenith_regress.py +++ b/test_runner/batch_pg_regress/test_zenith_regress.py @@ -1,15 +1,16 @@ -import pytest -from fixtures.utils import mkdir_if_needed -import getpass import os import psycopg2 +from fixtures.utils import mkdir_if_needed + pytest_plugins = ("fixtures.zenith_fixtures") -def test_zenith_regress(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg_distrib_dir, base_dir, capsys): + +def test_zenith_regress(pageserver, postgres, pg_bin, zenith_cli, test_output_dir, pg_distrib_dir, + base_dir, capsys): # Create a branch for us - zenith_cli.run(["branch", "test_zenith_regress", "empty"]); + zenith_cli.run(["branch", "test_zenith_regress", "empty"]) # Connect to postgres and create a database called "regression". pg = postgres.create_start('test_zenith_regress') diff --git a/test_runner/fixtures/utils.py b/test_runner/fixtures/utils.py index 70d8db3769..4ab283a974 100644 --- a/test_runner/fixtures/utils.py +++ b/test_runner/fixtures/utils.py @@ -1,7 +1,7 @@ - import os import subprocess + def get_self_dir(): """ Get the path to the directory where this script lives. """ return os.path.dirname(os.path.abspath(__file__)) diff --git a/test_runner/fixtures/zenith_fixtures.py b/test_runner/fixtures/zenith_fixtures.py index 1980d3ca1c..6e3a719eac 100644 --- a/test_runner/fixtures/zenith_fixtures.py +++ b/test_runner/fixtures/zenith_fixtures.py @@ -1,12 +1,11 @@ import getpass import os -import psycopg2 import pytest import shutil import subprocess -import sys + from .utils import (get_self_dir, mkdir_if_needed, - subprocess_capture, global_counter) + subprocess_capture) """ This file contains pytest fixtures. A fixture is a test resource that can be @@ -20,7 +19,7 @@ ZENITH_BIN, POSTGRES_DISTRIB_DIR, etc. See README.md for more information. To use fixtures in a test file, add this line of code: - pytest_plugins = ("fixtures.zenith_fixtures") +>>> pytest_plugins = ("fixtures.zenith_fixtures") Don't import functions from this file, or pytest will emit warnings. Instead put directly-importable functions into utils.py or another separate file. @@ -35,7 +34,8 @@ def determine_scope(fixture_name, config): def zenfixture(func): - """ This is a python decorator for fixtures with a flexible scope. + """ + This is a python decorator for fixtures with a flexible scope. By default every test function will set up and tear down a new database. In pytest, this is called fixtures "function" scope. @@ -43,8 +43,8 @@ def zenfixture(func): If the environment variable TEST_SHARED_FIXTURES is set, then all tests will share the same database. State, logs, etc. will be stored in a directory called "shared". - """ + if os.environ.get('TEST_SHARED_FIXTURES') is None: scope = 'function' else: @@ -55,6 +55,7 @@ def zenfixture(func): @pytest.fixture(autouse=True, scope='session') def safety_check(): """ Ensure that no unwanted daemons are running before we start testing. """ + # does not use -c as it is not supported on macOS cmd = ['pgrep', 'pageserver|postgres|wal_acceptor'] result = subprocess.run(cmd, stdout=subprocess.DEVNULL) @@ -66,7 +67,8 @@ def safety_check(): class ZenithCli: - """ An object representing the CLI binary named "zenith". + """ + An object representing the CLI binary named "zenith". We also store an environment that will tell the CLI to operate on a particular ZENITH_REPO_DIR. @@ -81,21 +83,25 @@ class ZenithCli: self.env['POSTGRES_DISTRIB_DIR'] = pg_distrib_dir def run(self, arguments): - """ Run "zenith" with the specified arguments. + """ + Run "zenith" with the specified arguments. - arguments must be in list form, e.g. ['pg', 'create'] + Arguments must be in list form, e.g. ['pg', 'create'] Return both stdout and stderr, which can be accessed as - result = zenith_cli.run(...) - assert(result.stderr == "") - print(result.stdout) - + >>> result = zenith_cli.run(...) + >>> assert result.stderr == "" + >>> print(result.stdout) """ + assert type(arguments) == list args = [self.bin_zenith] + arguments print('Running command "{}"'.format(' '.join(args))) - return subprocess.run(args, env=self.env, check=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + return subprocess.run(args, env=self.env, check=True, + universal_newlines=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) @zenfixture @@ -133,27 +139,32 @@ class ZenithPageserver: 'localhost', 64000, username) return conn_str -# The 'pageserver' fixture provides a Page Server that's up and running. -# -# If TEST_SHARED_FIXTURES is set, the Page Server instance is shared by all -# the tests. To avoid clashing with other tests, don't use the 'main' branch in -# the tests directly. Instead, create a branch off the 'empty' branch and use -# that. -# -# By convention, the test branches are named after the tests. For example, -# test called 'test_foo' would create and use branches with the 'test_foo' prefix. + @zenfixture def pageserver(zenith_cli): + """ + The 'pageserver' fixture provides a Page Server that's up and running. + + If TEST_SHARED_FIXTURES is set, the Page Server instance is shared by all + the tests. To avoid clashing with other tests, don't use the 'main' branch in + the tests directly. Instead, create a branch off the 'empty' branch and use + that. + + By convention, the test branches are named after the tests. For example, + test called 'test_foo' would create and use branches with the 'test_foo' prefix. + """ + ps = ZenithPageserver(zenith_cli) ps.init() ps.start() # For convenience in tests, create a branch from the freshly-initialized cluster. - zenith_cli.run(["branch", "empty", "main"]); + zenith_cli.run(["branch", "empty", "main"]) yield ps # After the yield comes any cleanup code we need. print('Starting pageserver cleanup') ps.stop() + class Postgres: """ An object representing a running postgres daemon. """ @@ -169,22 +180,37 @@ class Postgres: # path to conf is /pgdatadirs//postgresql.conf def create(self, branch, config_lines=None): - """ create the pg data directory """ + """ + Create the pg data directory. + Returns self. + """ + self.zenith_cli.run(['pg', 'create', branch]) self.branch = branch if config_lines is None: config_lines = [] self.config(config_lines) - return + + return self def start(self): - """ start the server """ + """ + Start the Postgres instance. + Returns self. + """ + self.zenith_cli.run(['pg', 'start', self.branch]) self.running = True - return - #lines should be an array of valid postgresql.conf rows + return self + def config(self, lines): + """ + Add lines to postgresql.conf. + Lines should be an array of valid postgresql.conf rows. + Returns self. + """ + filename = 'pgdatadirs/{}/postgresql.conf'.format(self.branch) config_name = os.path.join(self.repo_dir, filename) with open(config_name, 'a') as conf: @@ -192,27 +218,53 @@ class Postgres: conf.write(line) conf.write('\n') + return self + def stop(self): - """ stop the server """ + """ + Stop the Postgres instance if it's running. + Returns self. + """ + if self.running: self.zenith_cli.run(['pg', 'stop', self.branch]) + return self + def stop_and_destroy(self): + """ + Stop the Postgres instance, then destroy it. + Returns self. + """ + self.zenith_cli.run(['pg', 'stop', '--destroy', self.branch]) - def create_start(self, branch, config_lines=None): - self.create(branch, config_lines); - self.start(); - return + return self + + def create_start(self, branch, config_lines=None): + """ + Create a Postgres instance, then start it. + Returns self. + """ + + self.create(branch, config_lines).start() + + return self - # Return a libpq connection string to connect to the Postgres instance def connstr(self, dbname='postgres'): + """ + Build a libpq connection string for the Postgres instance. + """ + conn_str = 'host={} port={} dbname={} user={}'.format( self.host, self.port, dbname, self.username) + return conn_str + class PostgresFactory: """ An object representing multiple running postgres daemons. """ + def __init__(self, zenith_cli, repo_dir): self.zenith_cli = zenith_cli self.host = 'localhost' @@ -224,13 +276,13 @@ class PostgresFactory: pg = Postgres(self.zenith_cli, self.repo_dir, self.num_instances + 1) self.num_instances += 1 self.instances.append(pg) - pg.create_start(branch, config_lines) - return pg + return pg.create_start(branch, config_lines) def stop_all(self): for pg in self.instances: pg.stop() + @zenfixture def postgres(zenith_cli, repo_dir): pgfactory = PostgresFactory(zenith_cli, repo_dir) @@ -251,7 +303,7 @@ class PgBin: self.env['LD_LIBRARY_PATH'] = os.path.join(self.pg_install_path, 'lib') def _fixpath(self, command): - if not '/' in command[0]: + if '/' not in command[0]: command[0] = os.path.join(self.pg_bin_path, command[0]) def _build_env(self, env_add): @@ -272,18 +324,20 @@ class PgBin: characters present), then it will be edited to include the correct path. If you want stdout/stderr captured to files, use `run_capture` instead. - """ + self._fixpath(command) print('Running command "{}"'.format(' '.join(command))) env = self._build_env(env) subprocess.run(command, env=env, cwd=cwd, check=True) def run_capture(self, command, env=None, cwd=None): - """ Run one of the postgres binaries, with stderr and stdout redirected to a file. + """ + Run one of the postgres binaries, with stderr and stdout redirected to a file. This is just like `run`, but for chatty programs. """ + self._fixpath(command) print('Running command "{}"'.format(' '.join(command))) env = self._build_env(env) @@ -298,6 +352,7 @@ def pg_bin(test_output_dir, pg_distrib_dir): @zenfixture def base_dir(): """ find the base directory (currently this is the git root) """ + base_dir = os.path.normpath(os.path.join(get_self_dir(), '../..')) print('base_dir is', base_dir) return base_dir @@ -306,6 +361,7 @@ def base_dir(): @zenfixture def top_output_dir(base_dir): """ Compute the top-level directory for all tests. """ + env_test_output = os.environ.get('TEST_OUTPUT') if env_test_output is not None: output_dir = env_test_output @@ -318,6 +374,7 @@ def top_output_dir(base_dir): @zenfixture def test_output_dir(request, top_output_dir): """ Compute the working directory for an individual test. """ + if os.environ.get('TEST_SHARED_FIXTURES') is None: # one directory per test test_name = request.node.name @@ -334,18 +391,21 @@ def test_output_dir(request, top_output_dir): @zenfixture def repo_dir(request, test_output_dir): - """ Compute the test repo_dir + """ + Compute the test repo_dir. "repo_dir" is the place where all of the pageserver files will go. It doesn't have anything to do with the git repo. """ + repo_dir = os.path.join(test_output_dir, 'repo') return repo_dir @zenfixture def zenith_binpath(base_dir): - """ find the zenith binaries """ + """ Find the zenith binaries. """ + env_zenith_bin = os.environ.get('ZENITH_BIN') if env_zenith_bin: zenith_dir = env_zenith_bin @@ -358,7 +418,8 @@ def zenith_binpath(base_dir): @zenfixture def pg_distrib_dir(base_dir): - """ find the postgress install """ + """ Find the postgress install. """ + env_postgres_bin = os.environ.get('POSTGRES_DISTRIB_DIR') if env_postgres_bin: pg_dir = env_postgres_bin diff --git a/test_runner/setup.cfg b/test_runner/setup.cfg new file mode 100644 index 0000000000..c2c1aa2176 --- /dev/null +++ b/test_runner/setup.cfg @@ -0,0 +1,12 @@ +# Just trying to gather linter settings in one file. +# I wonder if there's a way to de-duplicate them... + +[flake8] +max-line-length = 100 + +[pycodestyle] +max-line-length = 100 + +[yapf] +based_on_style = pep8 +column_limit = 100 diff --git a/test_runner/test_broken.py b/test_runner/test_broken.py index 4d125c9047..3b1cbc3805 100644 --- a/test_runner/test_broken.py +++ b/test_runner/test_broken.py @@ -4,7 +4,6 @@ import os pytest_plugins = ("fixtures.zenith_fixtures") """ - Use this test to see what happens when tests fail. We should be able to clean up after ourselves, including stopping any @@ -12,21 +11,21 @@ postgres or pageserver processes. Set the environment variable RUN_BROKEN to see this test run (and fail, and hopefully not leave any server processes behind). - """ run_broken = pytest.mark.skipif( - os.environ.get('RUN_BROKEN') == None, + os.environ.get('RUN_BROKEN') is None, reason="only used for testing the fixtures" ) + @run_broken def test_broken(zenith_cli, pageserver, postgres, pg_bin): # Create a branch for us - zenith_cli.run(["branch", "test_broken", "empty"]); + zenith_cli.run(["branch", "test_broken", "empty"]) - pg = postgres.create_start("test_broken") + postgres.create_start("test_broken") print('postgres is running') print('THIS NEXT COMMAND WILL FAIL:')