mirror of
https://github.com/neondatabase/neon.git
synced 2025-12-25 23:29:59 +00:00
Copy pg server cert and key to pgdata with correct permission (#12731)
## Problem Copy certificate and key from secret mount directory to `pgdata` directory where `postgres` is the owner and we can set the key permission to 0600. ## Summary of changes - Added new pgparam `pg_compute_tls_settings` to specify where k8s secret for certificate and key are mounted. - Added a new field to `ComputeSpec` called `databricks_settings`. This is a struct that will be used to store any other settings that needs to be propagate to Compute but should not be persisted to `ComputeSpec` in the database. - Then when the compute container start up, as part of `prepare_pgdata` function, it will copied `server.key` and `server.crt` from k8s mounted directory to `pgdata` directory. ## How is this tested? Add unit tests. Manual test via KIND Co-authored-by: Jarupat Jisarojito <jarupat.jisarojito@databricks.com>
This commit is contained in:
@@ -1462,6 +1462,14 @@ impl ComputeNode {
|
|||||||
// Update pg_hba.conf received with basebackup.
|
// Update pg_hba.conf received with basebackup.
|
||||||
update_pg_hba(pgdata_path, None)?;
|
update_pg_hba(pgdata_path, None)?;
|
||||||
|
|
||||||
|
if let Some(databricks_settings) = spec.databricks_settings.as_ref() {
|
||||||
|
copy_tls_certificates(
|
||||||
|
&databricks_settings.pg_compute_tls_settings.key_file,
|
||||||
|
&databricks_settings.pg_compute_tls_settings.cert_file,
|
||||||
|
pgdata_path,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
// Place pg_dynshmem under /dev/shm. This allows us to use
|
// Place pg_dynshmem under /dev/shm. This allows us to use
|
||||||
// 'dynamic_shared_memory_type = mmap' so that the files are placed in
|
// 'dynamic_shared_memory_type = mmap' so that the files are placed in
|
||||||
// /dev/shm, similar to how 'dynamic_shared_memory_type = posix' works.
|
// /dev/shm, similar to how 'dynamic_shared_memory_type = posix' works.
|
||||||
|
|||||||
@@ -793,6 +793,7 @@ impl Endpoint {
|
|||||||
autoprewarm: args.autoprewarm,
|
autoprewarm: args.autoprewarm,
|
||||||
offload_lfc_interval_seconds: args.offload_lfc_interval_seconds,
|
offload_lfc_interval_seconds: args.offload_lfc_interval_seconds,
|
||||||
suspend_timeout_seconds: -1, // Only used in neon_local.
|
suspend_timeout_seconds: -1, // Only used in neon_local.
|
||||||
|
databricks_settings: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
// this strange code is needed to support respec() in tests
|
// this strange code is needed to support respec() in tests
|
||||||
|
|||||||
@@ -193,6 +193,10 @@ pub struct ComputeSpec {
|
|||||||
///
|
///
|
||||||
/// We use this value to derive other values, such as the installed extensions metric.
|
/// We use this value to derive other values, such as the installed extensions metric.
|
||||||
pub suspend_timeout_seconds: i64,
|
pub suspend_timeout_seconds: i64,
|
||||||
|
|
||||||
|
// Databricks specific options for compute instance.
|
||||||
|
// These settings are not part of postgresql.conf.
|
||||||
|
pub databricks_settings: Option<DatabricksSettings>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Feature flag to signal `compute_ctl` to enable certain experimental functionality.
|
/// Feature flag to signal `compute_ctl` to enable certain experimental functionality.
|
||||||
|
|||||||
Reference in New Issue
Block a user