From a66df1f4bd88ab0182e55c4e99096b43a039f7db Mon Sep 17 00:00:00 2001 From: Jarupat Jisarojito Date: Tue, 23 Jul 2024 17:06:33 -0700 Subject: [PATCH] [BRC-1405] Mount databricks pg_hba and pg_ident from configmap to dblet pod ## Problem For certificate auth, we need to configure pg_hba and pg_ident for it to work. This PR https://github.com/databricks/universe/pull/655011 in universe will create a config map and deployed to `hadron-compute` namespace. HCC needs to mount this config map to all pg compute pod. ## Summary of changes Create `databricks_pg_hba` and `databricks_pg_ident` to configure where the files are located on the pod. These configs are pass down to `compute_ctl`. Compute_ctl uses these config to update `pg_hba.conf` and `pg_ident.conf` file. We append `include_if_exists {databricks_pg_hba}` to `pg_hba.conf` and similarly to `pg_ident.conf`. So that it will refer to databricks config file without much change to existing pg default config file. I renamed `secret_mounts` to `compute_mounts` because now it is used to configure secret and config map mounts. Co-authored-by: Tristan Partin --- compute_tools/src/compute.rs | 10 +++++++--- compute_tools/src/spec.rs | 3 ++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/compute_tools/src/compute.rs b/compute_tools/src/compute.rs index 06a34ec41e..b5415f8742 100644 --- a/compute_tools/src/compute.rs +++ b/compute_tools/src/compute.rs @@ -1505,15 +1505,19 @@ impl ComputeNode { ) })?; - // Update pg_hba.conf received with basebackup. - update_pg_hba(pgdata_path, None)?; - if let Some(databricks_settings) = spec.databricks_settings.as_ref() { copy_tls_certificates( &databricks_settings.pg_compute_tls_settings.key_file, &databricks_settings.pg_compute_tls_settings.cert_file, pgdata_path, )?; + + // Update pg_hba.conf received with basebackup including additional databricks settings. + update_pg_hba(pgdata_path, Some(&databricks_settings.databricks_pg_hba))?; + update_pg_ident(pgdata_path, Some(&databricks_settings.databricks_pg_ident))?; + } else { + // Update pg_hba.conf received with basebackup. + update_pg_hba(pgdata_path, None)?; } // Place pg_dynshmem under /dev/shm. This allows us to use diff --git a/compute_tools/src/spec.rs b/compute_tools/src/spec.rs index d00f86a2c0..751de76ca5 100644 --- a/compute_tools/src/spec.rs +++ b/compute_tools/src/spec.rs @@ -137,12 +137,13 @@ pub fn get_config_from_control_plane(base_uri: &str, compute_id: &str) -> Result /// Check `pg_hba.conf` and update if needed to allow external connections. pub fn update_pg_hba(pgdata_path: &Path, databricks_pg_hba: Option<&String>) -> Result<()> { // XXX: consider making it a part of config.json + info!("checking pg_hba.conf"); let pghba_path = pgdata_path.join("pg_hba.conf"); // Update pg_hba to contains databricks specfic settings before adding neon settings // PG uses the first record that matches to perform authentication, so we need to have // our rules before the default ones from neon. - // See https://www.postgresql.org/docs/16/auth-pg-hba-conf.html + // See https://www.postgresql.org/docs/current/auth-pg-hba-conf.html if let Some(databricks_pg_hba) = databricks_pg_hba { if config::line_in_file( &pghba_path,