Compare commits

...

1 Commits

Author SHA1 Message Date
Alek Westover
443d9b30d0 test branch 2023-08-09 16:03:18 -04:00
2 changed files with 183 additions and 183 deletions

View File

@@ -1111,14 +1111,14 @@ LIMIT 100",
.as_millis() as u64;
info!("Prepare extensions took {prep_ext_time_delta}ms");
// Don't try to download libraries that are not in the index.
// Assume that they are already present locally.
libs_vec.retain(|lib| {
self.library_index
.get()
.expect("error accessing ext_remote_paths")
.contains_key(lib)
});
// // Don't try to download libraries that are not in the index.
// // Assume that they are already present locally.
// libs_vec.retain(|lib| {
// self.library_index
// .get()
// .expect("error accessing ext_remote_paths")
// .contains_key(lib)
// });
info!("Downloading to shared preload libraries: {:?}", &libs_vec);

View File

@@ -110,7 +110,7 @@ def test_remote_extensions(
"test_remote_extensions",
tenant_id=tenant_id,
remote_ext_config=env.ext_remote_storage.to_string(),
# config_lines=["log_min_messages=debug3"],
config_lines=["shared_preload_libraries='neon,ololo'"],
)
try:
with closing(endpoint.connect()) as conn:
@@ -121,203 +121,203 @@ def test_remote_extensions(
log.info(all_extensions)
assert "anon" in all_extensions
# postgis is on real s3 but not mock s3.
# it's kind of a big file, would rather not upload to github
if remote_storage_kind == RemoteStorageKind.REAL_S3:
assert "postgis" in all_extensions
# this may fail locally if dependency is missing
# we don't really care about the error,
# we just want to make sure it downloaded
try:
cur.execute("CREATE EXTENSION postgis")
except Exception as err:
log.info(f"(expected) error creating postgis extension: {err}")
# we do not check the error, so this is basically a NO-OP
# however checking the log you can make sure that it worked
# and also get valuable information about how long loading the extension took
# # postgis is on real s3 but not mock s3.
# # it's kind of a big file, would rather not upload to github
# if remote_storage_kind == RemoteStorageKind.REAL_S3:
# assert "postgis" in all_extensions
# # this may fail locally if dependency is missing
# # we don't really care about the error,
# # we just want to make sure it downloaded
# try:
# cur.execute("CREATE EXTENSION postgis")
# except Exception as err:
# log.info(f"(expected) error creating postgis extension: {err}")
# # we do not check the error, so this is basically a NO-OP
# # however checking the log you can make sure that it worked
# # and also get valuable information about how long loading the extension took
# this is expected to fail on my computer because I don't have the pgcrypto extension
try:
cur.execute("CREATE EXTENSION anon")
except Exception as err:
log.info("error creating anon extension")
assert "pgcrypto" in str(err), "unexpected error creating anon extension"
# # this is expected to fail on my computer because I don't have the pgcrypto extension
# try:
# cur.execute("CREATE EXTENSION anon")
# except Exception as err:
# log.info("error creating anon extension")
# assert "pgcrypto" in str(err), "unexpected error creating anon extension"
finally:
cleanup(pg_version)
# Test downloading remote library.
@pytest.mark.parametrize("remote_storage_kind", available_s3_storages())
def test_remote_library(
neon_env_builder: NeonEnvBuilder,
remote_storage_kind: RemoteStorageKind,
pg_version: PgVersion,
):
neon_env_builder.enable_remote_storage(
remote_storage_kind=remote_storage_kind,
test_name="test_remote_library",
enable_remote_extensions=True,
)
env = neon_env_builder.init_start()
tenant_id, _ = env.neon_cli.create_tenant()
env.neon_cli.create_timeline("test_remote_library", tenant_id=tenant_id)
# # Test downloading remote library.
# @pytest.mark.parametrize("remote_storage_kind", available_s3_storages())
# def test_remote_library(
# neon_env_builder: NeonEnvBuilder,
# remote_storage_kind: RemoteStorageKind,
# pg_version: PgVersion,
# ):
# neon_env_builder.enable_remote_storage(
# remote_storage_kind=remote_storage_kind,
# test_name="test_remote_library",
# enable_remote_extensions=True,
# )
# env = neon_env_builder.init_start()
# tenant_id, _ = env.neon_cli.create_tenant()
# env.neon_cli.create_timeline("test_remote_library", tenant_id=tenant_id)
assert env.ext_remote_storage is not None # satisfy mypy
assert env.remote_storage_client is not None # satisfy mypy
# assert env.ext_remote_storage is not None # satisfy mypy
# assert env.remote_storage_client is not None # satisfy mypy
# For MOCK_S3 we upload test files.
# For REAL_S3 we use the files already in the bucket
if remote_storage_kind == RemoteStorageKind.MOCK_S3:
upload_files(env)
# # For MOCK_S3 we upload test files.
# # For REAL_S3 we use the files already in the bucket
# if remote_storage_kind == RemoteStorageKind.MOCK_S3:
# upload_files(env)
# and use them to run LOAD library
endpoint = env.endpoints.create_start(
"test_remote_library",
tenant_id=tenant_id,
remote_ext_config=env.ext_remote_storage.to_string(),
# config_lines=["log_min_messages=debug3"],
)
try:
with closing(endpoint.connect()) as conn:
with conn.cursor() as cur:
# try to load library
try:
cur.execute("LOAD 'anon'")
except Exception as err:
log.info(f"error loading anon library: {err}")
raise AssertionError("unexpected error loading anon library") from err
# # and use them to run LOAD library
# endpoint = env.endpoints.create_start(
# "test_remote_library",
# tenant_id=tenant_id,
# remote_ext_config=env.ext_remote_storage.to_string(),
# # config_lines=["log_min_messages=debug3"],
# )
# try:
# with closing(endpoint.connect()) as conn:
# with conn.cursor() as cur:
# # try to load library
# try:
# cur.execute("LOAD 'anon'")
# except Exception as err:
# log.info(f"error loading anon library: {err}")
# raise AssertionError("unexpected error loading anon library") from err
# test library which name is different from extension name
# this may fail locally if dependency is missing
# however, it does successfully download the postgis archive
if remote_storage_kind == RemoteStorageKind.REAL_S3:
try:
cur.execute("LOAD 'postgis_topology-3'")
except Exception as err:
log.info("error loading postgis_topology-3")
assert "No such file or directory" in str(
err
), "unexpected error loading postgis_topology-3"
finally:
cleanup(pg_version)
# # test library which name is different from extension name
# # this may fail locally if dependency is missing
# # however, it does successfully download the postgis archive
# if remote_storage_kind == RemoteStorageKind.REAL_S3:
# try:
# cur.execute("LOAD 'postgis_topology-3'")
# except Exception as err:
# log.info("error loading postgis_topology-3")
# assert "No such file or directory" in str(
# err
# ), "unexpected error loading postgis_topology-3"
# finally:
# cleanup(pg_version)
# Here we test a complex extension
# which has multiple extensions in one archive
# using postgis as an example
@pytest.mark.skipif(
RemoteStorageKind.REAL_S3 not in available_s3_storages(),
reason="skipping test because real s3 not enabled",
)
def test_multiple_extensions_one_archive(
neon_env_builder: NeonEnvBuilder,
pg_version: PgVersion,
):
neon_env_builder.enable_remote_storage(
remote_storage_kind=RemoteStorageKind.REAL_S3,
test_name="test_multiple_extensions_one_archive",
enable_remote_extensions=True,
)
env = neon_env_builder.init_start()
tenant_id, _ = env.neon_cli.create_tenant()
env.neon_cli.create_timeline("test_multiple_extensions_one_archive", tenant_id=tenant_id)
# # Here we test a complex extension
# # which has multiple extensions in one archive
# # using postgis as an example
# @pytest.mark.skipif(
# RemoteStorageKind.REAL_S3 not in available_s3_storages(),
# reason="skipping test because real s3 not enabled",
# )
# def test_multiple_extensions_one_archive(
# neon_env_builder: NeonEnvBuilder,
# pg_version: PgVersion,
# ):
# neon_env_builder.enable_remote_storage(
# remote_storage_kind=RemoteStorageKind.REAL_S3,
# test_name="test_multiple_extensions_one_archive",
# enable_remote_extensions=True,
# )
# env = neon_env_builder.init_start()
# tenant_id, _ = env.neon_cli.create_tenant()
# env.neon_cli.create_timeline("test_multiple_extensions_one_archive", tenant_id=tenant_id)
assert env.ext_remote_storage is not None # satisfy mypy
assert env.remote_storage_client is not None # satisfy mypy
# assert env.ext_remote_storage is not None # satisfy mypy
# assert env.remote_storage_client is not None # satisfy mypy
endpoint = env.endpoints.create_start(
"test_multiple_extensions_one_archive",
tenant_id=tenant_id,
remote_ext_config=env.ext_remote_storage.to_string(),
)
with closing(endpoint.connect()) as conn:
with conn.cursor() as cur:
cur.execute("CREATE EXTENSION address_standardizer;")
cur.execute("CREATE EXTENSION address_standardizer_data_us;")
# execute query to ensure that it works
cur.execute(
"SELECT house_num, name, suftype, city, country, state, unit \
FROM standardize_address('us_lex', 'us_gaz', 'us_rules', \
'One Rust Place, Boston, MA 02109');"
)
res = cur.fetchall()
log.info(res)
assert len(res) > 0
# endpoint = env.endpoints.create_start(
# "test_multiple_extensions_one_archive",
# tenant_id=tenant_id,
# remote_ext_config=env.ext_remote_storage.to_string(),
# )
# with closing(endpoint.connect()) as conn:
# with conn.cursor() as cur:
# cur.execute("CREATE EXTENSION address_standardizer;")
# cur.execute("CREATE EXTENSION address_standardizer_data_us;")
# # execute query to ensure that it works
# cur.execute(
# "SELECT house_num, name, suftype, city, country, state, unit \
# FROM standardize_address('us_lex', 'us_gaz', 'us_rules', \
# 'One Rust Place, Boston, MA 02109');"
# )
# res = cur.fetchall()
# log.info(res)
# assert len(res) > 0
cleanup(pg_version)
# cleanup(pg_version)
# Test that extension is downloaded after endpoint restart,
# when the library is used in the query.
#
# Run the test with mutliple simultaneous connections to an endpoint.
# to ensure that the extension is downloaded only once.
#
def test_extension_download_after_restart(
neon_env_builder: NeonEnvBuilder,
pg_version: PgVersion,
):
if "15" in pg_version: # SKIP v15 for now because test set only has extension built for v14
return None
# # Test that extension is downloaded after endpoint restart,
# # when the library is used in the query.
# #
# # Run the test with mutliple simultaneous connections to an endpoint.
# # to ensure that the extension is downloaded only once.
# #
# def test_extension_download_after_restart(
# neon_env_builder: NeonEnvBuilder,
# pg_version: PgVersion,
# ):
# if "15" in pg_version: # SKIP v15 for now because test set only has extension built for v14
# return None
neon_env_builder.enable_remote_storage(
remote_storage_kind=RemoteStorageKind.MOCK_S3,
test_name="test_extension_download_after_restart",
enable_remote_extensions=True,
)
env = neon_env_builder.init_start()
tenant_id, _ = env.neon_cli.create_tenant()
env.neon_cli.create_timeline("test_extension_download_after_restart", tenant_id=tenant_id)
# neon_env_builder.enable_remote_storage(
# remote_storage_kind=RemoteStorageKind.MOCK_S3,
# test_name="test_extension_download_after_restart",
# enable_remote_extensions=True,
# )
# env = neon_env_builder.init_start()
# tenant_id, _ = env.neon_cli.create_tenant()
# env.neon_cli.create_timeline("test_extension_download_after_restart", tenant_id=tenant_id)
assert env.ext_remote_storage is not None # satisfy mypy
assert env.remote_storage_client is not None # satisfy mypy
# assert env.ext_remote_storage is not None # satisfy mypy
# assert env.remote_storage_client is not None # satisfy mypy
# For MOCK_S3 we upload test files.
upload_files(env)
# # For MOCK_S3 we upload test files.
# upload_files(env)
endpoint = env.endpoints.create_start(
"test_extension_download_after_restart",
tenant_id=tenant_id,
remote_ext_config=env.ext_remote_storage.to_string(),
config_lines=["log_min_messages=debug3"],
)
with closing(endpoint.connect()) as conn:
with conn.cursor() as cur:
cur.execute("CREATE extension pg_buffercache;")
cur.execute("SELECT * from pg_buffercache;")
res = cur.fetchall()
assert len(res) > 0
log.info(res)
# endpoint = env.endpoints.create_start(
# "test_extension_download_after_restart",
# tenant_id=tenant_id,
# remote_ext_config=env.ext_remote_storage.to_string(),
# config_lines=["log_min_messages=debug3"],
# )
# with closing(endpoint.connect()) as conn:
# with conn.cursor() as cur:
# cur.execute("CREATE extension pg_buffercache;")
# cur.execute("SELECT * from pg_buffercache;")
# res = cur.fetchall()
# assert len(res) > 0
# log.info(res)
# shutdown compute node
endpoint.stop()
# remove extension files locally
cleanup(pg_version)
# # shutdown compute node
# endpoint.stop()
# # remove extension files locally
# cleanup(pg_version)
# spin up compute node again (there are no extension files available, because compute is stateless)
endpoint = env.endpoints.create_start(
"test_extension_download_after_restart",
tenant_id=tenant_id,
remote_ext_config=env.ext_remote_storage.to_string(),
config_lines=["log_min_messages=debug3"],
)
# # spin up compute node again (there are no extension files available, because compute is stateless)
# endpoint = env.endpoints.create_start(
# "test_extension_download_after_restart",
# tenant_id=tenant_id,
# remote_ext_config=env.ext_remote_storage.to_string(),
# config_lines=["log_min_messages=debug3"],
# )
# connect to compute node and run the query
# that will trigger the download of the extension
def run_query(endpoint, thread_id: int):
log.info("thread_id {%d} starting", thread_id)
with closing(endpoint.connect()) as conn:
with conn.cursor() as cur:
cur.execute("SELECT * from pg_buffercache;")
res = cur.fetchall()
assert len(res) > 0
log.info("thread_id {%d}, res = %s", thread_id, res)
# # connect to compute node and run the query
# # that will trigger the download of the extension
# def run_query(endpoint, thread_id: int):
# log.info("thread_id {%d} starting", thread_id)
# with closing(endpoint.connect()) as conn:
# with conn.cursor() as cur:
# cur.execute("SELECT * from pg_buffercache;")
# res = cur.fetchall()
# assert len(res) > 0
# log.info("thread_id {%d}, res = %s", thread_id, res)
threads = [threading.Thread(target=run_query, args=(endpoint, i)) for i in range(2)]
# threads = [threading.Thread(target=run_query, args=(endpoint, i)) for i in range(2)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# for thread in threads:
# thread.start()
# for thread in threads:
# thread.join()
cleanup(pg_version)
# cleanup(pg_version)