diff --git a/Makefile b/Makefile index ef26ceee2d..d2a79661f2 100644 --- a/Makefile +++ b/Makefile @@ -78,6 +78,11 @@ postgres: postgres-configure \ $(MAKE) -C tmp_install/build/contrib/zenith install +@echo "Compiling contrib/zenith_test_utils" $(MAKE) -C tmp_install/build/contrib/zenith_test_utils install + +@echo "Compiling pg_buffercache" + $(MAKE) -C tmp_install/build/contrib/pg_buffercache install + +@echo "Compiling pageinspect" + $(MAKE) -C tmp_install/build/contrib/pageinspect install + .PHONY: postgres-clean postgres-clean: diff --git a/test_runner/batch_others/test_read_validation.py b/test_runner/batch_others/test_read_validation.py new file mode 100644 index 0000000000..ee41e6511c --- /dev/null +++ b/test_runner/batch_others/test_read_validation.py @@ -0,0 +1,183 @@ +from contextlib import closing + +from fixtures.zenith_fixtures import ZenithEnv +from fixtures.log_helper import log + +from psycopg2.errors import UndefinedTable +from psycopg2.errors import IoError + +pytest_plugins = ("fixtures.zenith_fixtures") + +extensions = ["pageinspect", "zenith_test_utils", "pg_buffercache"] + + +# +# Validation of reading different page versions +# +def test_read_validation(zenith_simple_env: ZenithEnv): + env = zenith_simple_env + env.zenith_cli.create_branch("test_read_validation", "empty") + + pg = env.postgres.create_start("test_read_validation") + log.info("postgres is running on 'test_read_validation' branch") + + with closing(pg.connect()) as con: + with con.cursor() as c: + + for e in extensions: + c.execute("create extension if not exists {};".format(e)) + + c.execute("create table foo (c int) with (autovacuum_enabled = false)") + c.execute("insert into foo values (1)") + + c.execute("select lsn, lower, upper from page_header(get_raw_page('foo', 'main', 0));") + first = c.fetchone() + + c.execute("select relfilenode from pg_class where relname = 'foo'") + relfilenode = c.fetchone()[0] + + c.execute("insert into foo values (2);") + c.execute("select lsn, lower, upper from page_header(get_raw_page('foo', 'main', 0));") + second = c.fetchone() + + assert first != second, "Failed to update page" + + log.info("Test table is populated, validating buffer cache") + + c.execute( + "select count(*) from pg_buffercache where relfilenode = {}".format(relfilenode)) + assert c.fetchone()[0] > 0, "No buffers cached for the test relation" + + c.execute( + "select reltablespace, reldatabase, relfilenode from pg_buffercache where relfilenode = {}" + .format(relfilenode)) + reln = c.fetchone() + + log.info("Clear buffer cache to ensure no stale pages are brought into the cache") + + c.execute("select clear_buffer_cache()") + + c.execute( + "select count(*) from pg_buffercache where relfilenode = {}".format(relfilenode)) + assert c.fetchone()[0] == 0, "Failed to clear buffer cache" + + log.info("Cache is clear, reading stale page version") + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('foo', 'main', 0, '{}'))" + .format(first[0])) + direct_first = c.fetchone() + assert first == direct_first, "Failed fetch page at historic lsn" + + c.execute( + "select count(*) from pg_buffercache where relfilenode = {}".format(relfilenode)) + assert c.fetchone()[0] == 0, "relation buffers detected after invalidation" + + log.info("Cache is clear, reading latest page version without cache") + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('foo', 'main', 0, NULL))" + ) + direct_latest = c.fetchone() + assert second == direct_latest, "Failed fetch page at latest lsn" + + c.execute( + "select count(*) from pg_buffercache where relfilenode = {}".format(relfilenode)) + assert c.fetchone()[0] == 0, "relation buffers detected after invalidation" + + log.info( + "Cache is clear, reading stale page version without cache using relation identifiers" + ) + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn( {}, {}, {}, 0, 0, '{}' ))" + .format(reln[0], reln[1], reln[2], first[0])) + direct_first = c.fetchone() + assert first == direct_first, "Failed fetch page at historic lsn using oid" + + log.info( + "Cache is clear, reading latest page version without cache using relation identifiers" + ) + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn( {}, {}, {}, 0, 0, NULL ))" + .format(reln[0], reln[1], reln[2])) + direct_latest = c.fetchone() + assert second == direct_latest, "Failed fetch page at latest lsn" + + c.execute('drop table foo;') + + log.info( + "Relation dropped, attempting reading stale page version without cache using relation identifiers" + ) + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn( {}, {}, {}, 0, 0, '{}' ))" + .format(reln[0], reln[1], reln[2], first[0])) + direct_first = c.fetchone() + assert first == direct_first, "Failed fetch page at historic lsn using oid" + + log.info("Validation page inspect won't allow reading pages of dropped relations") + try: + c.execute("select * from page_header(get_raw_page('foo', 'main', 0));") + assert False, "query should have failed" + except UndefinedTable as e: + log.info("Caught an expected failure: {}".format(e)) + + +def test_read_validation_neg(zenith_simple_env: ZenithEnv): + env = zenith_simple_env + env.zenith_cli.create_branch("test_read_validation_neg", "empty") + + pg = env.postgres.create_start("test_read_validation_neg") + log.info("postgres is running on 'test_read_validation_neg' branch") + + with closing(pg.connect()) as con: + with con.cursor() as c: + + for e in extensions: + c.execute("create extension if not exists {};".format(e)) + + log.info("read a page of a missing relation") + try: + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('Unknown', 'main', 0, '0/0'))" + ) + assert False, "query should have failed" + except UndefinedTable as e: + log.info("Caught an expected failure: {}".format(e)) + + c.execute("create table foo (c int) with (autovacuum_enabled = false)") + c.execute("insert into foo values (1)") + + log.info("read a page at lsn 0") + try: + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('foo', 'main', 0, '0/0'))" + ) + assert False, "query should have failed" + except IoError as e: + log.info("Caught an expected failure: {}".format(e)) + + log.info("Pass NULL as an input") + expected = (None, None, None) + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn(NULL, 'main', 0, '0/0'))" + ) + assert c.fetchone() == expected, "Expected null output" + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('foo', NULL, 0, '0/0'))" + ) + assert c.fetchone() == expected, "Expected null output" + + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('foo', 'main', NULL, '0/0'))" + ) + assert c.fetchone() == expected, "Expected null output" + + # This check is currently failing, reading beyond EOF is returning a 0-page + log.info("Read beyond EOF") + c.execute( + "select lsn, lower, upper from page_header(get_raw_page_at_lsn('foo', 'main', 1, NULL))" + ) diff --git a/vendor/postgres b/vendor/postgres index 19164aeacf..5c278ed0ac 160000 --- a/vendor/postgres +++ b/vendor/postgres @@ -1 +1 @@ -Subproject commit 19164aeacfd877ef75d67e70a71647f5d4c0cd2f +Subproject commit 5c278ed0aca5dea9340d9af4ad5f004d905ff1b7