mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-15 09:22:55 +00:00
use test bucket for test extensions
This commit is contained in:
@@ -8,6 +8,7 @@ use std::sync::{Condvar, Mutex};
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use postgres::{Client, NoTls};
|
||||
use tokio;
|
||||
use tokio_postgres;
|
||||
use tracing::{info, instrument, warn};
|
||||
use utils::id::{TenantId, TimelineId};
|
||||
@@ -18,7 +19,6 @@ use compute_api::spec::{ComputeMode, ComputeSpec};
|
||||
|
||||
use remote_storage::GenericRemoteStorage;
|
||||
|
||||
use crate::extension_server::{get_available_extensions, get_available_libraries};
|
||||
use crate::pg_helpers::*;
|
||||
use crate::spec::*;
|
||||
use crate::{config, extension_server};
|
||||
@@ -668,7 +668,8 @@ LIMIT 100",
|
||||
// If remote extension storage is configured,
|
||||
// download extension control files
|
||||
// and shared preload libraries.
|
||||
pub fn prepare_external_extensions(&self, compute_state: &ComputeState) -> Result<()> {
|
||||
#[tokio::main]
|
||||
pub async fn prepare_external_extensions(&self, compute_state: &ComputeState) -> Result<()> {
|
||||
if let Some(ref ext_remote_storage) = self.ext_remote_storage {
|
||||
let pspec = compute_state.pspec.as_ref().expect("spec must be set");
|
||||
// download preload shared libraries before postgres start (if any)
|
||||
@@ -705,22 +706,21 @@ LIMIT 100",
|
||||
// );
|
||||
|
||||
// download extension control files & shared_preload_libraries
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
|
||||
let pgbin = self.pgbin.clone();
|
||||
rt.block_on(async move {
|
||||
get_available_extensions(ext_remote_storage, &pgbin, &private_ext_prefixes).await?;
|
||||
extension_server::get_available_extensions(
|
||||
ext_remote_storage,
|
||||
&self.pgbin,
|
||||
&private_ext_prefixes,
|
||||
)
|
||||
.await?;
|
||||
|
||||
get_available_libraries(
|
||||
ext_remote_storage,
|
||||
&pgbin,
|
||||
&private_ext_prefixes,
|
||||
&libs_vec,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})?;
|
||||
extension_server::get_available_libraries(
|
||||
ext_remote_storage,
|
||||
&self.pgbin,
|
||||
&private_ext_prefixes,
|
||||
&libs_vec,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -76,8 +76,8 @@ pub async fn get_available_extensions(
|
||||
let from_paths: Vec<RemotePath> = remote_storage.list_files(Some(&remote_sharedir)).await?;
|
||||
|
||||
info!(
|
||||
"get_available_extensions remote_sharedir: {:?}, local_sharedir: {:?}",
|
||||
remote_sharedir, local_sharedir
|
||||
"get_available_extensions remote_sharedir: {:?}, local_sharedir: {:?}, \nall_paths: {:?}",
|
||||
remote_sharedir, local_sharedir, &from_paths
|
||||
);
|
||||
|
||||
for remote_from_path in &from_paths {
|
||||
|
||||
@@ -534,6 +534,16 @@ class S3Storage:
|
||||
"AWS_SECRET_ACCESS_KEY": self.secret_key,
|
||||
}
|
||||
|
||||
def to_string(self) -> str:
|
||||
return json.dumps(
|
||||
{
|
||||
"bucket": self.bucket_name,
|
||||
"region": self.bucket_region,
|
||||
"endpoint": self.endpoint,
|
||||
"prefix": self.prefix_in_bucket,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
RemoteStorage = Union[LocalFsStorage, S3Storage]
|
||||
|
||||
@@ -600,12 +610,12 @@ class NeonEnvBuilder:
|
||||
self.rust_log_override = rust_log_override
|
||||
self.port_distributor = port_distributor
|
||||
self.remote_storage = remote_storage
|
||||
self.ext_remote_storage: Optional[Any] = None
|
||||
self.ext_remote_storage: Optional[S3Storage] = None
|
||||
self.remote_storage_client: Optional[Any] = None
|
||||
self.remote_storage_users = remote_storage_users
|
||||
self.broker = broker
|
||||
self.run_id = run_id
|
||||
self.mock_s3_server = mock_s3_server
|
||||
self.mock_s3_server: MockS3Server = mock_s3_server
|
||||
self.pageserver_config_override = pageserver_config_override
|
||||
self.num_safekeepers = num_safekeepers
|
||||
self.safekeepers_id_start = safekeepers_id_start
|
||||
@@ -713,17 +723,17 @@ class NeonEnvBuilder:
|
||||
bucket_region=mock_region,
|
||||
access_key=self.mock_s3_server.access_key(),
|
||||
secret_key=self.mock_s3_server.secret_key(),
|
||||
prefix_in_bucket="pageserver",
|
||||
)
|
||||
|
||||
if enable_remote_extensions:
|
||||
ext_bucket_name = f"ext_{bucket_name}"
|
||||
self.remote_storage_client.create_bucket(Bucket=ext_bucket_name)
|
||||
self.ext_remote_storage = S3Storage(
|
||||
bucket_name=ext_bucket_name,
|
||||
bucket_name=bucket_name,
|
||||
endpoint=mock_endpoint,
|
||||
bucket_region=mock_region,
|
||||
access_key=self.mock_s3_server.access_key(),
|
||||
secret_key=self.mock_s3_server.secret_key(),
|
||||
prefix_in_bucket="ext",
|
||||
)
|
||||
|
||||
def enable_real_s3_remote_storage(
|
||||
@@ -765,20 +775,16 @@ class NeonEnvBuilder:
|
||||
bucket_region=region,
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
prefix_in_bucket=self.remote_storage_prefix,
|
||||
prefix_in_bucket=f"{self.remote_storage_prefix}/pageserver",
|
||||
)
|
||||
|
||||
if enable_remote_extensions:
|
||||
# TODO: add an env variable for EXT_REMOTE_STORAGE_S3_BUCKET
|
||||
ext_bucket_name = os.getenv("EXT_REMOTE_STORAGE_S3_BUCKET")
|
||||
if ext_bucket_name is None:
|
||||
ext_bucket_name = "neon-dev-extensions"
|
||||
self.ext_remote_storage = S3Storage(
|
||||
bucket_name=ext_bucket_name,
|
||||
bucket_name=bucket_name,
|
||||
bucket_region=region,
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
prefix_in_bucket=self.remote_storage_prefix,
|
||||
prefix_in_bucket=f"{self.remote_storage_prefix}/ext",
|
||||
)
|
||||
|
||||
def cleanup_local_storage(self):
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
import json
|
||||
import os
|
||||
from contextlib import closing
|
||||
from io import BytesIO
|
||||
|
||||
import pytest
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.neon_fixtures import (
|
||||
NeonEnvBuilder,
|
||||
PgBin,
|
||||
RemoteStorageKind,
|
||||
)
|
||||
from fixtures.neon_fixtures import NeonEnvBuilder, PgBin, RemoteStorageKind
|
||||
from fixtures.pg_version import PgVersion
|
||||
from fixtures.types import TenantId
|
||||
|
||||
BUCKET_PREFIX = "5314225671" # in real setup this is the build_tag
|
||||
NUM_EXT = 3
|
||||
|
||||
|
||||
def control_file_content(owner, i):
|
||||
@@ -45,6 +40,7 @@ def prepare_mock_ext_storage(
|
||||
ext_remote_storage,
|
||||
remote_storage_client,
|
||||
):
|
||||
bucket_prefix = ext_remote_storage.prefix_in_bucket
|
||||
private_prefix = str(tenant_id)
|
||||
PUB_EXT_ROOT = f"v{pg_version}/share/postgresql/extension"
|
||||
PRIVATE_EXT_ROOT = f"v{pg_version}/{private_prefix}/share/postgresql/extension"
|
||||
@@ -68,15 +64,15 @@ def prepare_mock_ext_storage(
|
||||
cleanup_files = []
|
||||
|
||||
# Upload several test_ext{i}.control files to the bucket
|
||||
for i in range(5):
|
||||
for i in range(NUM_EXT):
|
||||
# public extensions
|
||||
public_ext = BytesIO(bytes(control_file_content("public", i), "utf-8"))
|
||||
public_remote_name = f"{BUCKET_PREFIX}/{PUB_EXT_ROOT}/test_ext{i}.control"
|
||||
public_remote_name = f"{bucket_prefix}/{PUB_EXT_ROOT}/test_ext{i}.control"
|
||||
public_local_name = f"{LOCAL_EXT_ROOT}/test_ext{i}.control"
|
||||
|
||||
# private extensions
|
||||
private_ext = BytesIO(bytes(control_file_content(str(tenant_id), i), "utf-8"))
|
||||
private_remote_name = f"{BUCKET_PREFIX}/{PRIVATE_EXT_ROOT}/private_ext{i}.control"
|
||||
private_remote_name = f"{bucket_prefix}/{PRIVATE_EXT_ROOT}/private_ext{i}.control"
|
||||
private_local_name = f"{LOCAL_EXT_ROOT}/private_ext{i}.control"
|
||||
|
||||
cleanup_files += [public_local_name, private_local_name]
|
||||
@@ -90,7 +86,7 @@ def prepare_mock_ext_storage(
|
||||
|
||||
# Upload SQL file for the extension we're going to create
|
||||
sql_filename = "test_ext0--1.0.sql"
|
||||
test_sql_public_remote_path = f"{BUCKET_PREFIX}/{PUB_EXT_ROOT}/{sql_filename}"
|
||||
test_sql_public_remote_path = f"{bucket_prefix}/{PUB_EXT_ROOT}/{sql_filename}"
|
||||
test_sql_local_path = f"{LOCAL_EXT_ROOT}/{sql_filename}"
|
||||
test_ext_sql_file = BytesIO(bytes(sql_file_content(), "utf-8"))
|
||||
remote_storage_client.upload_fileobj(
|
||||
@@ -105,7 +101,7 @@ def prepare_mock_ext_storage(
|
||||
for i in range(2):
|
||||
lib_filename = f"test_lib{i}.so"
|
||||
TEST_LIB_PATH = f"{PUB_LIB_ROOT}/{lib_filename}"
|
||||
lib_public_remote_path = f"{BUCKET_PREFIX}/{TEST_LIB_PATH}"
|
||||
lib_public_remote_path = f"{bucket_prefix}/{TEST_LIB_PATH}"
|
||||
lib_local_path = f"{LOCAL_LIB_ROOT}/{lib_filename}"
|
||||
test_lib_file = BytesIO(
|
||||
b"""
|
||||
@@ -129,7 +125,15 @@ def prepare_mock_ext_storage(
|
||||
# Then check that compute nodes can download them and use them
|
||||
# to CREATE EXTENSION and LOAD 'library.so'
|
||||
#
|
||||
# NOTE: you must have appropriate AWS credentials to run REAL_S3 test.
|
||||
# NOTE: You must have appropriate AWS credentials to run REAL_S3 test.
|
||||
# It may also be necessary to set the following environment variables:
|
||||
# export AWS_ACCESS_KEY_ID='test'
|
||||
# export AWS_SECRET_ACCESS_KEY='test'
|
||||
# export AWS_SECURITY_TOKEN='test'
|
||||
# export AWS_SESSION_TOKEN='test'
|
||||
# export AWS_DEFAULT_REGION='us-east-1'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"remote_storage_kind", [RemoteStorageKind.MOCK_S3, RemoteStorageKind.REAL_S3]
|
||||
)
|
||||
@@ -160,21 +164,6 @@ def test_remote_extensions(
|
||||
env.ext_remote_storage,
|
||||
env.remote_storage_client,
|
||||
)
|
||||
|
||||
# TODO what region should we use for the test?
|
||||
region = "us-east-1"
|
||||
if remote_storage_kind == RemoteStorageKind.REAL_S3:
|
||||
region = "eu-central-1"
|
||||
|
||||
remote_ext_config = json.dumps(
|
||||
{
|
||||
"bucket": env.ext_remote_storage.bucket_name,
|
||||
"region": region,
|
||||
"endpoint": env.ext_remote_storage.endpoint,
|
||||
"prefix": BUCKET_PREFIX,
|
||||
}
|
||||
)
|
||||
|
||||
# Start a compute node and check that it can download the extensions
|
||||
# and use them to CREATE EXTENSION and LOAD 'library.so'
|
||||
#
|
||||
@@ -184,17 +173,16 @@ def test_remote_extensions(
|
||||
endpoint = env.endpoints.create_start(
|
||||
"test_remote_extensions",
|
||||
tenant_id=tenant_id,
|
||||
remote_ext_config=remote_ext_config,
|
||||
config_lines=["log_min_messages=debug3"],
|
||||
remote_ext_config=env.ext_remote_storage.to_string(),
|
||||
# config_lines=["log_min_messages=debug3"],
|
||||
)
|
||||
with closing(endpoint.connect()) as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Test query: check that test_ext0 was successfully downloaded
|
||||
cur.execute("SELECT * FROM pg_available_extensions")
|
||||
all_extensions = [x[0] for x in cur.fetchall()]
|
||||
log.info("ALEK*" * 100)
|
||||
log.info(all_extensions)
|
||||
for i in range(5):
|
||||
for i in range(NUM_EXT):
|
||||
assert f"test_ext{i}" in all_extensions
|
||||
assert f"private_ext{i}" in all_extensions
|
||||
|
||||
@@ -242,8 +230,8 @@ def test_remote_extensions(
|
||||
# files
|
||||
for file in cleanup_files:
|
||||
try:
|
||||
log.info(f"Deleting {file}")
|
||||
os.remove(file)
|
||||
log.info(f"Deleted {file}")
|
||||
except FileNotFoundError:
|
||||
log.info(f"{file} does not exist, so cannot be deleted")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user