diff --git a/.github/actions/run-python-test-set/action.yml b/.github/actions/run-python-test-set/action.yml index 22447025cb..a4bcaff56d 100644 --- a/.github/actions/run-python-test-set/action.yml +++ b/.github/actions/run-python-test-set/action.yml @@ -149,7 +149,7 @@ runs: fi - name: Upload Allure results - if: ${{ always() && (inputs.test_selection == 'batch_others' || inputs.test_selection == 'batch_pg_regress') }} + if: ${{ always() && (inputs.test_selection == 'regress') }} uses: ./.github/actions/allure-report with: action: store diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index bf6eb69930..8b1dc3a9c4 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -206,7 +206,7 @@ jobs: if: matrix.build_type == 'debug' uses: ./.github/actions/save-coverage-data - pg_regress-tests: + regress-tests: runs-on: dev container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned @@ -224,42 +224,13 @@ jobs: submodules: true fetch-depth: 2 - - name: Pytest regress tests + - name: Pytest regression tests uses: ./.github/actions/run-python-test-set with: build_type: ${{ matrix.build_type }} rust_toolchain: ${{ matrix.rust_toolchain }} - test_selection: batch_pg_regress + test_selection: regress needs_postgres_source: true - - - name: Merge and upload coverage data - if: matrix.build_type == 'debug' - uses: ./.github/actions/save-coverage-data - - other-tests: - runs-on: dev - container: - image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned - options: --init - needs: [ build-neon ] - strategy: - fail-fast: false - matrix: - build_type: [ debug, release ] - rust_toolchain: [ 1.58 ] - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - submodules: true - fetch-depth: 2 - - - name: Pytest other tests - uses: ./.github/actions/run-python-test-set - with: - build_type: ${{ matrix.build_type }} - rust_toolchain: ${{ matrix.rust_toolchain }} - test_selection: batch_others run_with_real_s3: true real_s3_bucket: ci-tests-s3 real_s3_region: us-west-2 @@ -307,7 +278,7 @@ jobs: container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init - needs: [ other-tests, pg_regress-tests ] + needs: [ regress-tests ] if: always() strategy: fail-fast: false @@ -330,7 +301,7 @@ jobs: container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init - needs: [ other-tests, pg_regress-tests ] + needs: [ regress-tests ] strategy: fail-fast: false matrix: @@ -587,7 +558,7 @@ jobs: #container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:latest # We need both storage **and** compute images for deploy, because control plane picks the compute version based on the storage version. # If it notices a fresh storage it may bump the compute version. And if compute image failed to build it may break things badly - needs: [ push-docker-hub, calculate-deploy-targets, tag, other-tests, pg_regress-tests ] + needs: [ push-docker-hub, calculate-deploy-targets, tag, regress-tests ] if: | (github.ref_name == 'main' || github.ref_name == 'release') && github.event_name != 'workflow_dispatch' @@ -642,7 +613,7 @@ jobs: runs-on: dev container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:latest # Compute image isn't strictly required for proxy deploy, but let's still wait for it to run all deploy jobs consistently. - needs: [ push-docker-hub, calculate-deploy-targets, tag, other-tests, pg_regress-tests ] + needs: [ push-docker-hub, calculate-deploy-targets, tag, regress-tests ] if: | (github.ref_name == 'main' || github.ref_name == 'release') && github.event_name != 'workflow_dispatch' diff --git a/pageserver/src/page_service.rs b/pageserver/src/page_service.rs index fbc70f7690..d59a82d488 100644 --- a/pageserver/src/page_service.rs +++ b/pageserver/src/page_service.rs @@ -1077,7 +1077,7 @@ impl postgres_backend::Handler for PageServerHandler { .write_message(&BeMessage::CommandComplete(b"SELECT 1"))?; } else if query_string.starts_with("do_gc ") { // Run GC immediately on given timeline. - // FIXME: This is just for tests. See test_runner/batch_others/test_gc.py. + // FIXME: This is just for tests. See test_runner/regress/test_gc.py. // This probably should require special authentication or a global flag to // enable, I don't think we want to or need to allow regular clients to invoke // GC. diff --git a/test_runner/README.md b/test_runner/README.md index 4b54c45175..c7ec361d65 100644 --- a/test_runner/README.md +++ b/test_runner/README.md @@ -15,12 +15,22 @@ Prerequisites: ### Test Organization -The tests are divided into a few batches, such that each batch takes roughly -the same amount of time. The batches can be run in parallel, to minimize total -runtime. Currently, there are only two batches: +Regression tests are in the 'regress' directory. They can be run in +parallel to minimize total runtime. Most regression test sets up their +environment with its own pageservers and safekeepers (but see +`TEST_SHARED_FIXTURES`). -- test_batch_pg_regress: Runs PostgreSQL regression tests -- test_others: All other tests +'pg_clients' contains tests for connecting with various client +libraries. Each client test uses a Dockerfile that pulls an image that +contains the client, and connects to PostgreSQL with it. The client +tests can be run against an existing PostgreSQL or Neon installation. + +'performance' contains performance regression tests. Each test +exercises a particular scenario or workload, and outputs +measurements. They should be run serially, to avoid the tests +interfering with the performance of each other. Some performance tests +set up their own Neon environment, while others can be run against an +existing PostgreSQL or Neon environment. ### Running the tests diff --git a/test_runner/batch_pg_regress/test_isolation.py b/test_runner/batch_pg_regress/test_isolation.py deleted file mode 100644 index 7127a069b0..0000000000 --- a/test_runner/batch_pg_regress/test_isolation.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -from pathlib import Path - -import pytest -from fixtures.neon_fixtures import NeonEnv, base_dir, pg_distrib_dir - - -# The isolation tests run for a long time, especially in debug mode, -# so use a larger-than-default timeout. -@pytest.mark.timeout(1800) -def test_isolation(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): - env = neon_simple_env - - env.neon_cli.create_branch("test_isolation", "empty") - # Connect to postgres and create a database called "regression". - # isolation tests use prepared transactions, so enable them - pg = env.postgres.create_start("test_isolation", config_lines=["max_prepared_transactions=100"]) - pg.safe_psql("CREATE DATABASE isolation_regression") - - # Create some local directories for pg_isolation_regress to run in. - runpath = test_output_dir / "regress" - (runpath / "testtablespace").mkdir(parents=True) - - # Compute all the file locations that pg_isolation_regress will need. - build_path = os.path.join(pg_distrib_dir, "build/src/test/isolation") - src_path = os.path.join(base_dir, "vendor/postgres/src/test/isolation") - bindir = os.path.join(pg_distrib_dir, "bin") - schedule = os.path.join(src_path, "isolation_schedule") - pg_isolation_regress = os.path.join(build_path, "pg_isolation_regress") - - pg_isolation_regress_command = [ - pg_isolation_regress, - "--use-existing", - "--bindir={}".format(bindir), - "--dlpath={}".format(build_path), - "--inputdir={}".format(src_path), - "--schedule={}".format(schedule), - ] - - env_vars = { - "PGPORT": str(pg.default_options["port"]), - "PGUSER": pg.default_options["user"], - "PGHOST": pg.default_options["host"], - } - - # Run the command. - # We don't capture the output. It's not too chatty, and it always - # logs the exact same data to `regression.out` anyway. - with capsys.disabled(): - pg_bin.run(pg_isolation_regress_command, env=env_vars, cwd=runpath) diff --git a/test_runner/batch_pg_regress/test_neon_regress.py b/test_runner/batch_pg_regress/test_neon_regress.py deleted file mode 100644 index 4619647084..0000000000 --- a/test_runner/batch_pg_regress/test_neon_regress.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -from pathlib import Path - -from fixtures.log_helper import log -from fixtures.neon_fixtures import NeonEnv, base_dir, check_restored_datadir_content, pg_distrib_dir - - -def test_neon_regress(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): - env = neon_simple_env - - env.neon_cli.create_branch("test_neon_regress", "empty") - # Connect to postgres and create a database called "regression". - pg = env.postgres.create_start("test_neon_regress") - pg.safe_psql("CREATE DATABASE regression") - - # Create some local directories for pg_regress to run in. - runpath = test_output_dir / "regress" - (runpath / "testtablespace").mkdir(parents=True) - - # Compute all the file locations that pg_regress will need. - # This test runs neon specific tests - build_path = os.path.join(pg_distrib_dir, "build/src/test/regress") - src_path = os.path.join(base_dir, "test_runner/neon_regress") - bindir = os.path.join(pg_distrib_dir, "bin") - schedule = os.path.join(src_path, "parallel_schedule") - pg_regress = os.path.join(build_path, "pg_regress") - - pg_regress_command = [ - pg_regress, - "--use-existing", - "--bindir={}".format(bindir), - "--dlpath={}".format(build_path), - "--schedule={}".format(schedule), - "--inputdir={}".format(src_path), - ] - - log.info(pg_regress_command) - env_vars = { - "PGPORT": str(pg.default_options["port"]), - "PGUSER": pg.default_options["user"], - "PGHOST": pg.default_options["host"], - } - - # Run the command. - # We don't capture the output. It's not too chatty, and it always - # logs the exact same data to `regression.out` anyway. - with capsys.disabled(): - pg_bin.run(pg_regress_command, env=env_vars, cwd=runpath) - - # checkpoint one more time to ensure that the lsn we get is the latest one - pg.safe_psql("CHECKPOINT") - pg.safe_psql("select pg_current_wal_insert_lsn()")[0][0] - - # Check that we restore the content of the datadir correctly - check_restored_datadir_content(test_output_dir, env, pg) diff --git a/test_runner/batch_pg_regress/test_pg_regress.py b/test_runner/batch_pg_regress/test_pg_regress.py deleted file mode 100644 index 478dbf0a91..0000000000 --- a/test_runner/batch_pg_regress/test_pg_regress.py +++ /dev/null @@ -1,56 +0,0 @@ -import os -import pathlib - -import pytest -from fixtures.neon_fixtures import NeonEnv, base_dir, check_restored_datadir_content, pg_distrib_dir - - -# The pg_regress tests run for a long time, especially in debug mode, -# so use a larger-than-default timeout. -@pytest.mark.timeout(1800) -def test_pg_regress(neon_simple_env: NeonEnv, test_output_dir: pathlib.Path, pg_bin, capsys): - env = neon_simple_env - - env.neon_cli.create_branch("test_pg_regress", "empty") - # Connect to postgres and create a database called "regression". - pg = env.postgres.create_start("test_pg_regress") - pg.safe_psql("CREATE DATABASE regression") - - # Create some local directories for pg_regress to run in. - runpath = test_output_dir / "regress" - (runpath / "testtablespace").mkdir(parents=True) - - # Compute all the file locations that pg_regress will need. - build_path = os.path.join(pg_distrib_dir, "build/src/test/regress") - src_path = os.path.join(base_dir, "vendor/postgres/src/test/regress") - bindir = os.path.join(pg_distrib_dir, "bin") - schedule = os.path.join(src_path, "parallel_schedule") - pg_regress = os.path.join(build_path, "pg_regress") - - pg_regress_command = [ - pg_regress, - '--bindir=""', - "--use-existing", - "--bindir={}".format(bindir), - "--dlpath={}".format(build_path), - "--schedule={}".format(schedule), - "--inputdir={}".format(src_path), - ] - - env_vars = { - "PGPORT": str(pg.default_options["port"]), - "PGUSER": pg.default_options["user"], - "PGHOST": pg.default_options["host"], - } - - # Run the command. - # We don't capture the output. It's not too chatty, and it always - # logs the exact same data to `regression.out` anyway. - with capsys.disabled(): - pg_bin.run(pg_regress_command, env=env_vars, cwd=runpath) - - # checkpoint one more time to ensure that the lsn we get is the latest one - pg.safe_psql("CHECKPOINT") - - # Check that we restore the content of the datadir correctly - check_restored_datadir_content(test_output_dir, env, pg) diff --git a/test_runner/neon_regress/README.md b/test_runner/neon_regress/README.md deleted file mode 100644 index b23a55462e..0000000000 --- a/test_runner/neon_regress/README.md +++ /dev/null @@ -1,8 +0,0 @@ -To add a new SQL test - -- add sql script to run to neon_regress/sql/testname.sql -- add expected output to neon_regress/expected/testname.out -- add testname to parallel_schedule - -That's it. -For more complex tests see PostgreSQL regression tests. These works basically the same. diff --git a/test_runner/batch_others/test_ancestor_branch.py b/test_runner/regress/test_ancestor_branch.py similarity index 100% rename from test_runner/batch_others/test_ancestor_branch.py rename to test_runner/regress/test_ancestor_branch.py diff --git a/test_runner/batch_others/test_auth.py b/test_runner/regress/test_auth.py similarity index 100% rename from test_runner/batch_others/test_auth.py rename to test_runner/regress/test_auth.py diff --git a/test_runner/batch_others/test_backpressure.py b/test_runner/regress/test_backpressure.py similarity index 100% rename from test_runner/batch_others/test_backpressure.py rename to test_runner/regress/test_backpressure.py diff --git a/test_runner/batch_others/test_basebackup_error.py b/test_runner/regress/test_basebackup_error.py similarity index 100% rename from test_runner/batch_others/test_basebackup_error.py rename to test_runner/regress/test_basebackup_error.py diff --git a/test_runner/batch_others/test_branch_and_gc.py b/test_runner/regress/test_branch_and_gc.py similarity index 100% rename from test_runner/batch_others/test_branch_and_gc.py rename to test_runner/regress/test_branch_and_gc.py diff --git a/test_runner/batch_others/test_branch_behind.py b/test_runner/regress/test_branch_behind.py similarity index 100% rename from test_runner/batch_others/test_branch_behind.py rename to test_runner/regress/test_branch_behind.py diff --git a/test_runner/batch_others/test_branching.py b/test_runner/regress/test_branching.py similarity index 91% rename from test_runner/batch_others/test_branching.py rename to test_runner/regress/test_branching.py index 2d08b07f82..0c1490294d 100644 --- a/test_runner/batch_others/test_branching.py +++ b/test_runner/regress/test_branching.py @@ -62,10 +62,11 @@ def test_branching_with_pgbench( time.sleep(delay) log.info(f"Sleep {delay}s") - # If the number of concurrent threads exceeds a threshold, - # wait for all the threads to finish before spawning a new one. - # Because tests defined in `batch_others` are run concurrently in CI, - # we want to avoid the situation that one test exhausts resources for other tests. + # If the number of concurrent threads exceeds a threshold, wait for + # all the threads to finish before spawning a new one. Because the + # regression tests in this directory are run concurrently in CI, we + # want to avoid the situation that one test exhausts resources for + # other tests. if len(threads) >= thread_limit: for thread in threads: thread.join() diff --git a/test_runner/batch_others/test_broken_timeline.py b/test_runner/regress/test_broken_timeline.py similarity index 100% rename from test_runner/batch_others/test_broken_timeline.py rename to test_runner/regress/test_broken_timeline.py diff --git a/test_runner/batch_others/test_clog_truncate.py b/test_runner/regress/test_clog_truncate.py similarity index 100% rename from test_runner/batch_others/test_clog_truncate.py rename to test_runner/regress/test_clog_truncate.py diff --git a/test_runner/batch_others/test_close_fds.py b/test_runner/regress/test_close_fds.py similarity index 100% rename from test_runner/batch_others/test_close_fds.py rename to test_runner/regress/test_close_fds.py diff --git a/test_runner/batch_others/test_config.py b/test_runner/regress/test_config.py similarity index 100% rename from test_runner/batch_others/test_config.py rename to test_runner/regress/test_config.py diff --git a/test_runner/batch_others/test_crafted_wal_end.py b/test_runner/regress/test_crafted_wal_end.py similarity index 100% rename from test_runner/batch_others/test_crafted_wal_end.py rename to test_runner/regress/test_crafted_wal_end.py diff --git a/test_runner/batch_others/test_createdropdb.py b/test_runner/regress/test_createdropdb.py similarity index 100% rename from test_runner/batch_others/test_createdropdb.py rename to test_runner/regress/test_createdropdb.py diff --git a/test_runner/batch_others/test_createuser.py b/test_runner/regress/test_createuser.py similarity index 100% rename from test_runner/batch_others/test_createuser.py rename to test_runner/regress/test_createuser.py diff --git a/test_runner/batch_others/test_fsm_truncate.py b/test_runner/regress/test_fsm_truncate.py similarity index 100% rename from test_runner/batch_others/test_fsm_truncate.py rename to test_runner/regress/test_fsm_truncate.py diff --git a/test_runner/batch_others/test_fullbackup.py b/test_runner/regress/test_fullbackup.py similarity index 100% rename from test_runner/batch_others/test_fullbackup.py rename to test_runner/regress/test_fullbackup.py diff --git a/test_runner/batch_others/test_gc_aggressive.py b/test_runner/regress/test_gc_aggressive.py similarity index 100% rename from test_runner/batch_others/test_gc_aggressive.py rename to test_runner/regress/test_gc_aggressive.py diff --git a/test_runner/batch_others/test_import.py b/test_runner/regress/test_import.py similarity index 100% rename from test_runner/batch_others/test_import.py rename to test_runner/regress/test_import.py diff --git a/test_runner/batch_others/test_large_schema.py b/test_runner/regress/test_large_schema.py similarity index 100% rename from test_runner/batch_others/test_large_schema.py rename to test_runner/regress/test_large_schema.py diff --git a/test_runner/batch_others/test_lsn_mapping.py b/test_runner/regress/test_lsn_mapping.py similarity index 100% rename from test_runner/batch_others/test_lsn_mapping.py rename to test_runner/regress/test_lsn_mapping.py diff --git a/test_runner/batch_others/test_multixact.py b/test_runner/regress/test_multixact.py similarity index 100% rename from test_runner/batch_others/test_multixact.py rename to test_runner/regress/test_multixact.py diff --git a/test_runner/batch_others/test_neon_cli.py b/test_runner/regress/test_neon_cli.py similarity index 100% rename from test_runner/batch_others/test_neon_cli.py rename to test_runner/regress/test_neon_cli.py diff --git a/test_runner/batch_others/test_next_xid.py b/test_runner/regress/test_next_xid.py similarity index 100% rename from test_runner/batch_others/test_next_xid.py rename to test_runner/regress/test_next_xid.py diff --git a/test_runner/batch_others/test_normal_work.py b/test_runner/regress/test_normal_work.py similarity index 100% rename from test_runner/batch_others/test_normal_work.py rename to test_runner/regress/test_normal_work.py diff --git a/test_runner/batch_others/test_old_request_lsn.py b/test_runner/regress/test_old_request_lsn.py similarity index 100% rename from test_runner/batch_others/test_old_request_lsn.py rename to test_runner/regress/test_old_request_lsn.py diff --git a/test_runner/batch_others/test_pageserver_api.py b/test_runner/regress/test_pageserver_api.py similarity index 100% rename from test_runner/batch_others/test_pageserver_api.py rename to test_runner/regress/test_pageserver_api.py diff --git a/test_runner/batch_others/test_pageserver_catchup.py b/test_runner/regress/test_pageserver_catchup.py similarity index 100% rename from test_runner/batch_others/test_pageserver_catchup.py rename to test_runner/regress/test_pageserver_catchup.py diff --git a/test_runner/batch_others/test_pageserver_restart.py b/test_runner/regress/test_pageserver_restart.py similarity index 100% rename from test_runner/batch_others/test_pageserver_restart.py rename to test_runner/regress/test_pageserver_restart.py diff --git a/test_runner/batch_others/test_parallel_copy.py b/test_runner/regress/test_parallel_copy.py similarity index 100% rename from test_runner/batch_others/test_parallel_copy.py rename to test_runner/regress/test_parallel_copy.py diff --git a/test_runner/regress/test_pg_regress.py b/test_runner/regress/test_pg_regress.py new file mode 100644 index 0000000000..119528b8f9 --- /dev/null +++ b/test_runner/regress/test_pg_regress.py @@ -0,0 +1,159 @@ +# +# This file runs pg_regress-based tests. +# +import os +from pathlib import Path + +import pytest +from fixtures.neon_fixtures import NeonEnv, base_dir, check_restored_datadir_content, pg_distrib_dir + + +# Run the main PostgreSQL regression tests, in src/test/regress. +# +# This runs for a long time, especially in debug mode, so use a larger-than-default +# timeout. +@pytest.mark.timeout(1800) +def test_pg_regress(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): + env = neon_simple_env + + env.neon_cli.create_branch("test_pg_regress", "empty") + # Connect to postgres and create a database called "regression". + pg = env.postgres.create_start("test_pg_regress") + pg.safe_psql("CREATE DATABASE regression") + + # Create some local directories for pg_regress to run in. + runpath = test_output_dir / "regress" + (runpath / "testtablespace").mkdir(parents=True) + + # Compute all the file locations that pg_regress will need. + build_path = os.path.join(pg_distrib_dir, "build/src/test/regress") + src_path = os.path.join(base_dir, "vendor/postgres/src/test/regress") + bindir = os.path.join(pg_distrib_dir, "bin") + schedule = os.path.join(src_path, "parallel_schedule") + pg_regress = os.path.join(build_path, "pg_regress") + + pg_regress_command = [ + pg_regress, + '--bindir=""', + "--use-existing", + "--bindir={}".format(bindir), + "--dlpath={}".format(build_path), + "--schedule={}".format(schedule), + "--inputdir={}".format(src_path), + ] + + env_vars = { + "PGPORT": str(pg.default_options["port"]), + "PGUSER": pg.default_options["user"], + "PGHOST": pg.default_options["host"], + } + + # Run the command. + # We don't capture the output. It's not too chatty, and it always + # logs the exact same data to `regression.out` anyway. + with capsys.disabled(): + pg_bin.run(pg_regress_command, env=env_vars, cwd=runpath) + + # checkpoint one more time to ensure that the lsn we get is the latest one + pg.safe_psql("CHECKPOINT") + + # Check that we restore the content of the datadir correctly + check_restored_datadir_content(test_output_dir, env, pg) + + +# Run the PostgreSQL "isolation" tests, in src/test/isolation. +# +# This runs for a long time, especially in debug mode, so use a larger-than-default +# timeout. +@pytest.mark.timeout(1800) +def test_isolation(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): + env = neon_simple_env + + env.neon_cli.create_branch("test_isolation", "empty") + # Connect to postgres and create a database called "regression". + # isolation tests use prepared transactions, so enable them + pg = env.postgres.create_start("test_isolation", config_lines=["max_prepared_transactions=100"]) + pg.safe_psql("CREATE DATABASE isolation_regression") + + # Create some local directories for pg_isolation_regress to run in. + runpath = test_output_dir / "regress" + (runpath / "testtablespace").mkdir(parents=True) + + # Compute all the file locations that pg_isolation_regress will need. + build_path = os.path.join(pg_distrib_dir, "build/src/test/isolation") + src_path = os.path.join(base_dir, "vendor/postgres/src/test/isolation") + bindir = os.path.join(pg_distrib_dir, "bin") + schedule = os.path.join(src_path, "isolation_schedule") + pg_isolation_regress = os.path.join(build_path, "pg_isolation_regress") + + pg_isolation_regress_command = [ + pg_isolation_regress, + "--use-existing", + "--bindir={}".format(bindir), + "--dlpath={}".format(build_path), + "--inputdir={}".format(src_path), + "--schedule={}".format(schedule), + ] + + env_vars = { + "PGPORT": str(pg.default_options["port"]), + "PGUSER": pg.default_options["user"], + "PGHOST": pg.default_options["host"], + } + + # Run the command. + # We don't capture the output. It's not too chatty, and it always + # logs the exact same data to `regression.out` anyway. + with capsys.disabled(): + pg_bin.run(pg_isolation_regress_command, env=env_vars, cwd=runpath) + + +# Run extra Neon-specific pg_regress-based tests. The tests and their +# schedule file are in the sql_regress/ directory. +def test_sql_regress(neon_simple_env: NeonEnv, test_output_dir: Path, pg_bin, capsys): + env = neon_simple_env + + env.neon_cli.create_branch("test_sql_regress", "empty") + # Connect to postgres and create a database called "regression". + pg = env.postgres.create_start("test_sql_regress") + pg.safe_psql("CREATE DATABASE regression") + + # Create some local directories for pg_regress to run in. + runpath = test_output_dir / "regress" + (runpath / "testtablespace").mkdir(parents=True) + + # Compute all the file locations that pg_regress will need. + # This test runs neon specific tests + build_path = os.path.join(pg_distrib_dir, "build/src/test/regress") + src_path = os.path.join(base_dir, "test_runner/sql_regress") + bindir = os.path.join(pg_distrib_dir, "bin") + schedule = os.path.join(src_path, "parallel_schedule") + pg_regress = os.path.join(build_path, "pg_regress") + + pg_regress_command = [ + pg_regress, + "--use-existing", + "--bindir={}".format(bindir), + "--dlpath={}".format(build_path), + "--schedule={}".format(schedule), + "--inputdir={}".format(src_path), + ] + + env_vars = { + "PGPORT": str(pg.default_options["port"]), + "PGUSER": pg.default_options["user"], + "PGHOST": pg.default_options["host"], + } + + # Run the command. + # We don't capture the output. It's not too chatty, and it always + # logs the exact same data to `regression.out` anyway. + with capsys.disabled(): + pg_bin.run(pg_regress_command, env=env_vars, cwd=runpath) + + # checkpoint one more time to ensure that the lsn we get is the latest one + pg.safe_psql("CHECKPOINT") + pg.safe_psql("select pg_current_wal_insert_lsn()")[0][0] + + # Check that we restore the content of the datadir correctly + check_restored_datadir_content(test_output_dir, env, pg) diff --git a/test_runner/batch_others/test_pitr_gc.py b/test_runner/regress/test_pitr_gc.py similarity index 100% rename from test_runner/batch_others/test_pitr_gc.py rename to test_runner/regress/test_pitr_gc.py diff --git a/test_runner/batch_others/test_proxy.py b/test_runner/regress/test_proxy.py similarity index 100% rename from test_runner/batch_others/test_proxy.py rename to test_runner/regress/test_proxy.py diff --git a/test_runner/batch_others/test_read_validation.py b/test_runner/regress/test_read_validation.py similarity index 100% rename from test_runner/batch_others/test_read_validation.py rename to test_runner/regress/test_read_validation.py diff --git a/test_runner/batch_others/test_readonly_node.py b/test_runner/regress/test_readonly_node.py similarity index 100% rename from test_runner/batch_others/test_readonly_node.py rename to test_runner/regress/test_readonly_node.py diff --git a/test_runner/batch_others/test_recovery.py b/test_runner/regress/test_recovery.py similarity index 100% rename from test_runner/batch_others/test_recovery.py rename to test_runner/regress/test_recovery.py diff --git a/test_runner/batch_others/test_remote_storage.py b/test_runner/regress/test_remote_storage.py similarity index 100% rename from test_runner/batch_others/test_remote_storage.py rename to test_runner/regress/test_remote_storage.py diff --git a/test_runner/batch_others/test_setup.py b/test_runner/regress/test_setup.py similarity index 100% rename from test_runner/batch_others/test_setup.py rename to test_runner/regress/test_setup.py diff --git a/test_runner/batch_others/test_subxacts.py b/test_runner/regress/test_subxacts.py similarity index 100% rename from test_runner/batch_others/test_subxacts.py rename to test_runner/regress/test_subxacts.py diff --git a/test_runner/batch_others/test_tenant_conf.py b/test_runner/regress/test_tenant_conf.py similarity index 100% rename from test_runner/batch_others/test_tenant_conf.py rename to test_runner/regress/test_tenant_conf.py diff --git a/test_runner/batch_others/test_tenant_detach.py b/test_runner/regress/test_tenant_detach.py similarity index 100% rename from test_runner/batch_others/test_tenant_detach.py rename to test_runner/regress/test_tenant_detach.py diff --git a/test_runner/batch_others/test_tenant_relocation.py b/test_runner/regress/test_tenant_relocation.py similarity index 100% rename from test_runner/batch_others/test_tenant_relocation.py rename to test_runner/regress/test_tenant_relocation.py diff --git a/test_runner/batch_others/test_tenant_tasks.py b/test_runner/regress/test_tenant_tasks.py similarity index 100% rename from test_runner/batch_others/test_tenant_tasks.py rename to test_runner/regress/test_tenant_tasks.py diff --git a/test_runner/batch_others/test_tenants.py b/test_runner/regress/test_tenants.py similarity index 100% rename from test_runner/batch_others/test_tenants.py rename to test_runner/regress/test_tenants.py diff --git a/test_runner/batch_others/test_tenants_with_remote_storage.py b/test_runner/regress/test_tenants_with_remote_storage.py similarity index 100% rename from test_runner/batch_others/test_tenants_with_remote_storage.py rename to test_runner/regress/test_tenants_with_remote_storage.py diff --git a/test_runner/batch_others/test_timeline_delete.py b/test_runner/regress/test_timeline_delete.py similarity index 100% rename from test_runner/batch_others/test_timeline_delete.py rename to test_runner/regress/test_timeline_delete.py diff --git a/test_runner/batch_others/test_timeline_size.py b/test_runner/regress/test_timeline_size.py similarity index 100% rename from test_runner/batch_others/test_timeline_size.py rename to test_runner/regress/test_timeline_size.py diff --git a/test_runner/batch_others/test_twophase.py b/test_runner/regress/test_twophase.py similarity index 100% rename from test_runner/batch_others/test_twophase.py rename to test_runner/regress/test_twophase.py diff --git a/test_runner/batch_others/test_vm_bits.py b/test_runner/regress/test_vm_bits.py similarity index 100% rename from test_runner/batch_others/test_vm_bits.py rename to test_runner/regress/test_vm_bits.py diff --git a/test_runner/batch_others/test_wal_acceptor.py b/test_runner/regress/test_wal_acceptor.py similarity index 100% rename from test_runner/batch_others/test_wal_acceptor.py rename to test_runner/regress/test_wal_acceptor.py diff --git a/test_runner/batch_others/test_wal_acceptor_async.py b/test_runner/regress/test_wal_acceptor_async.py similarity index 100% rename from test_runner/batch_others/test_wal_acceptor_async.py rename to test_runner/regress/test_wal_acceptor_async.py diff --git a/test_runner/batch_others/test_wal_restore.py b/test_runner/regress/test_wal_restore.py similarity index 100% rename from test_runner/batch_others/test_wal_restore.py rename to test_runner/regress/test_wal_restore.py diff --git a/test_runner/neon_regress/.gitignore b/test_runner/sql_regress/.gitignore similarity index 100% rename from test_runner/neon_regress/.gitignore rename to test_runner/sql_regress/.gitignore diff --git a/test_runner/sql_regress/README.md b/test_runner/sql_regress/README.md new file mode 100644 index 0000000000..1ae8aaf61a --- /dev/null +++ b/test_runner/sql_regress/README.md @@ -0,0 +1,13 @@ +Simple tests that only need a PostgreSQL connection to run. +These are run by the regress/test_pg_regress.py test, which uses +the PostgreSQL pg_regress utility. + +To add a new SQL test: + +- add sql script to run to neon_regress/sql/testname.sql +- add expected output to neon_regress/expected/testname.out +- add testname to parallel_schedule + +That's it. +For more complex tests see PostgreSQL regression tests in src/test/regress. +These work basically the same. diff --git a/test_runner/neon_regress/expected/.gitignore b/test_runner/sql_regress/expected/.gitignore similarity index 100% rename from test_runner/neon_regress/expected/.gitignore rename to test_runner/sql_regress/expected/.gitignore diff --git a/test_runner/neon_regress/expected/neon-cid.out b/test_runner/sql_regress/expected/neon-cid.out similarity index 100% rename from test_runner/neon_regress/expected/neon-cid.out rename to test_runner/sql_regress/expected/neon-cid.out diff --git a/test_runner/neon_regress/expected/neon-clog.out b/test_runner/sql_regress/expected/neon-clog.out similarity index 100% rename from test_runner/neon_regress/expected/neon-clog.out rename to test_runner/sql_regress/expected/neon-clog.out diff --git a/test_runner/neon_regress/expected/neon-rel-truncate.out b/test_runner/sql_regress/expected/neon-rel-truncate.out similarity index 100% rename from test_runner/neon_regress/expected/neon-rel-truncate.out rename to test_runner/sql_regress/expected/neon-rel-truncate.out diff --git a/test_runner/neon_regress/expected/neon-vacuum-full.out b/test_runner/sql_regress/expected/neon-vacuum-full.out similarity index 100% rename from test_runner/neon_regress/expected/neon-vacuum-full.out rename to test_runner/sql_regress/expected/neon-vacuum-full.out diff --git a/test_runner/neon_regress/parallel_schedule b/test_runner/sql_regress/parallel_schedule similarity index 100% rename from test_runner/neon_regress/parallel_schedule rename to test_runner/sql_regress/parallel_schedule diff --git a/test_runner/neon_regress/sql/.gitignore b/test_runner/sql_regress/sql/.gitignore similarity index 100% rename from test_runner/neon_regress/sql/.gitignore rename to test_runner/sql_regress/sql/.gitignore diff --git a/test_runner/neon_regress/sql/neon-cid.sql b/test_runner/sql_regress/sql/neon-cid.sql similarity index 100% rename from test_runner/neon_regress/sql/neon-cid.sql rename to test_runner/sql_regress/sql/neon-cid.sql diff --git a/test_runner/neon_regress/sql/neon-clog.sql b/test_runner/sql_regress/sql/neon-clog.sql similarity index 100% rename from test_runner/neon_regress/sql/neon-clog.sql rename to test_runner/sql_regress/sql/neon-clog.sql diff --git a/test_runner/neon_regress/sql/neon-rel-truncate.sql b/test_runner/sql_regress/sql/neon-rel-truncate.sql similarity index 100% rename from test_runner/neon_regress/sql/neon-rel-truncate.sql rename to test_runner/sql_regress/sql/neon-rel-truncate.sql diff --git a/test_runner/neon_regress/sql/neon-vacuum-full.sql b/test_runner/sql_regress/sql/neon-vacuum-full.sql similarity index 100% rename from test_runner/neon_regress/sql/neon-vacuum-full.sql rename to test_runner/sql_regress/sql/neon-vacuum-full.sql