diff --git a/control_plane/src/compute.rs b/control_plane/src/compute.rs index 65fbe8b72d..2f7c1d66bd 100644 --- a/control_plane/src/compute.rs +++ b/control_plane/src/compute.rs @@ -80,11 +80,10 @@ impl ComputeControlPlane { &mut self, is_test: bool, timelineid: ZTimelineId, + name: &str, ) -> Result> { - let node_id = self.nodes.len() as u32 + 1; - let node = Arc::new(PostgresNode { - name: format!("pg{}", node_id), + name: name.to_owned(), address: SocketAddr::new("127.0.0.1".parse().unwrap(), self.get_port()), env: self.env.clone(), pageserver: Arc::clone(&self.pageserver), @@ -105,7 +104,7 @@ impl ComputeControlPlane { .expect("failed to get timeline_id") .timeline_id; - let node = self.new_from_page_server(true, timeline_id); + let node = self.new_from_page_server(true, timeline_id, branch_name); let node = node.unwrap(); // Configure the node to stream WAL directly to the pageserver @@ -129,7 +128,9 @@ impl ComputeControlPlane { .expect("failed to get timeline_id") .timeline_id; - let node = self.new_from_page_server(true, timeline_id).unwrap(); + let node = self + .new_from_page_server(true, timeline_id, branch_name) + .unwrap(); node.append_conf( "postgresql.conf", @@ -146,7 +147,9 @@ impl ComputeControlPlane { .expect("failed to get timeline_id") .timeline_id; - let node = self.new_from_page_server(false, timeline_id).unwrap(); + let node = self + .new_from_page_server(false, timeline_id, branch_name) + .unwrap(); // Configure the node to stream WAL directly to the pageserver node.append_conf( diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index 95fc3b29c9..3ae206ab56 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -203,10 +203,11 @@ impl PostgresNodeExt for PostgresNode { println!("--------------- regression.diffs:\n{}", buffer); } // self.dump_log_file(); - if let Ok(mut file) = File::open(self.env.pg_data_dir("pg1").join("log")) { + + if let Ok(mut file) = File::open(self.env.pg_data_dir("main").join("log")) { let mut buffer = String::new(); file.read_to_string(&mut buffer).unwrap(); - println!("--------------- pgdatadirs/pg1/log:\n{}", buffer); + println!("--------------- pgdatadirs/main/log:\n{}", buffer); } } regress_check diff --git a/test_runner/fixtures/zenith_fixtures.py b/test_runner/fixtures/zenith_fixtures.py index 61fb0531ea..f3ecfa4eb2 100644 --- a/test_runner/fixtures/zenith_fixtures.py +++ b/test_runner/fixtures/zenith_fixtures.py @@ -158,21 +158,23 @@ class Postgres: self.host = 'localhost' self.port = 55431 + instance_num self.repo_dir = repo_dir - # path to conf is /pgdatadirs/pg/postgresql.conf + self.branch = None + # path to conf is /pgdatadirs//postgresql.conf def create_start(self, branch, config_lines=None): """ create the pg data directory, and start the server """ self.zenith_cli.run(['pg', 'create', branch]) + self.branch = branch if config_lines is None: config_lines = [] self.config(config_lines) - self.zenith_cli.run(['pg', 'start', 'pg{}'.format(self.instance_num)]) + self.zenith_cli.run(['pg', 'start', branch]) self.running = True return #lines should be an array of valid postgresql.conf rows def config(self, lines): - filename = 'pgdatadirs/pg{}/postgresql.conf'.format(self.instance_num) + filename = 'pgdatadirs/{}/postgresql.conf'.format(self.branch) config_name = os.path.join(self.repo_dir, filename) with open(config_name, 'a') as conf: for line in lines: @@ -181,7 +183,7 @@ class Postgres: def stop(self): if self.running: - self.zenith_cli.run(['pg', 'stop', 'pg{}'.format(self.instance_num)]) + self.zenith_cli.run(['pg', 'stop', self.branch]) # Return a libpq connection string to connect to the Postgres instance def connstr(self): diff --git a/zenith/src/main.rs b/zenith/src/main.rs index 745e1fee97..0aff9a58c9 100644 --- a/zenith/src/main.rs +++ b/zenith/src/main.rs @@ -20,10 +20,10 @@ use zenith_utils::lsn::Lsn; // * Providing CLI api to the pageserver (local or remote) // * TODO: export/import to/from usual postgres fn main() -> Result<()> { - let name_arg = Arg::with_name("NAME") + let timeline_arg = Arg::with_name("timeline") .short("n") .index(1) - .help("name of this postgres instance") + .help("timeline name") .required(true); let matches = App::new("zenith") @@ -53,17 +53,10 @@ fn main() -> Result<()> { SubCommand::with_name("pg") .setting(AppSettings::ArgRequiredElseHelp) .about("Manage postgres instances") - .subcommand( - SubCommand::with_name("create") - // .arg(name_arg.clone() - // .required(false) - // .help("name of this postgres instance (will be pgN if omitted)")) - .arg(Arg::with_name("timeline").required(false).index(1)), - ) .subcommand(SubCommand::with_name("list")) - .subcommand(SubCommand::with_name("start").arg(name_arg.clone())) - .subcommand(SubCommand::with_name("stop").arg(name_arg.clone())) - .subcommand(SubCommand::with_name("destroy").arg(name_arg.clone())), + .subcommand(SubCommand::with_name("create").arg(timeline_arg.clone())) + .subcommand(SubCommand::with_name("start").arg(timeline_arg.clone())) + .subcommand(SubCommand::with_name("stop").arg(timeline_arg.clone())), ) .subcommand( SubCommand::with_name("remote") @@ -180,66 +173,68 @@ fn main() -> Result<()> { /// Returns a map of timeline IDs to branch_name@lsn strings. /// Connects to the pageserver to query this information. -fn get_branch_infos(env: &local_env::LocalEnv) -> Result> { +fn get_branch_infos(env: &local_env::LocalEnv) -> Result> { let page_server = PageServerNode::from_env(env); let branch_infos: Vec = page_server.branches_list()?; - let branch_infos: Result> = branch_infos + let branch_infos: HashMap = branch_infos .into_iter() - .map(|branch_info| { - let lsn_string_opt = branch_info.latest_valid_lsn.map(|lsn| lsn.to_string()); - let lsn_str = lsn_string_opt.as_deref().unwrap_or("?"); - let branch_lsn_string = format!("{}@{}", branch_info.name, lsn_str); - Ok((branch_info.timeline_id, branch_lsn_string)) - }) + .map(|branch_info| (branch_info.timeline_id, branch_info)) .collect(); - branch_infos + Ok(branch_infos) } fn handle_pg(pg_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> { let mut cplane = ComputeControlPlane::load(env.clone())?; match pg_match.subcommand() { - ("create", Some(sub_m)) => { - let timeline_arg = sub_m.value_of("timeline").unwrap_or("main"); - println!("Initializing Postgres on timeline {}...", timeline_arg); - cplane.new_node(timeline_arg)?; - } ("list", Some(_sub_m)) => { let branch_infos = get_branch_infos(env).unwrap_or_else(|e| { eprintln!("Failed to load branch info: {}", e); HashMap::new() }); - println!("NODE\tADDRESS\t\tSTATUS\tBRANCH@LSN"); - for (node_name, node) in cplane.nodes.iter() { + println!("BRANCH\tADDRESS\t\tLSN\t\tSTATUS"); + for (timeline_name, node) in cplane.nodes.iter() { println!( "{}\t{}\t{}\t{}", - node_name, + timeline_name, node.address, - node.status(), branch_infos .get(&node.timelineid) - .map(|s| s.as_str()) - .unwrap_or("?") + .map(|bi| bi + .latest_valid_lsn + .map_or("?".to_string(), |lsn| lsn.to_string())) + .unwrap_or("?".to_string()), + node.status(), ); } } + ("create", Some(sub_m)) => { + let timeline_name = sub_m.value_of("timeline").unwrap_or("main"); + cplane.new_node(timeline_name)?; + } ("start", Some(sub_m)) => { - let name = sub_m.value_of("NAME").unwrap(); - let node = cplane - .nodes - .get(name) - .ok_or_else(|| anyhow!("postgres {} is not found", name))?; - node.start()?; + let timeline_name = sub_m.value_of("timeline").unwrap_or("main"); + + let node = cplane.nodes.get(timeline_name); + + println!("Starting postgres on timeline {}...", timeline_name); + if let Some(node) = node { + node.start()?; + } else { + let node = cplane.new_node(timeline_name)?; + node.start()?; + } } ("stop", Some(sub_m)) => { - let name = sub_m.value_of("NAME").unwrap(); + let timeline_name = sub_m.value_of("timeline").unwrap_or("main"); let node = cplane .nodes - .get(name) - .ok_or_else(|| anyhow!("postgres {} is not found", name))?; + .get(timeline_name) + .ok_or_else(|| anyhow!("postgres {} is not found", timeline_name))?; node.stop()?; + // TODO: destroy data directory here } _ => {}