Tidy up pageserver's endpoints

This commit is contained in:
Kirill Bulatov
2022-02-27 23:57:00 +02:00
committed by Kirill Bulatov
parent 7b5482bac0
commit a5e10c4f64
12 changed files with 246 additions and 172 deletions

View File

@@ -57,12 +57,12 @@ pageserver init succeeded
Starting pageserver at 'localhost:64000' in '.zenith'
Pageserver started
initializing for single for 7676
Starting safekeeper at 'localhost:5454' in '.zenith/safekeepers/single'
Starting safekeeper at '127.0.0.1:5454' in '.zenith/safekeepers/single'
Safekeeper started
# start postgres compute node
> ./target/debug/zenith pg start main
Starting new postgres main on main...
Starting new postgres main on timeline 5b014a9e41b4b63ce1a1febc04503636 ...
Extracting base backup to create postgres instance: path=.zenith/pgdatadirs/tenants/c03ba6b7ad4c5e9cf556f059ade44229/main port=55432
Starting postgres node at 'host=127.0.0.1 port=55432 user=zenith_admin dbname=postgres'
waiting for server to start.... done
@@ -70,8 +70,8 @@ server started
# check list of running postgres instances
> ./target/debug/zenith pg list
BRANCH ADDRESS LSN STATUS
main 127.0.0.1:55432 0/1609610 running
NODE ADDRESS TIMELINES BRANCH NAME LSN STATUS
main 127.0.0.1:55432 5b014a9e41b4b63ce1a1febc04503636 main 0/1609610 running
```
4. Now it is possible to connect to postgres and run some queries:
@@ -91,13 +91,13 @@ postgres=# select * from t;
5. And create branches and run postgres on them:
```sh
# create branch named migration_check
> ./target/debug/zenith branch migration_check main
Created branch 'migration_check' at 0/1609610
> ./target/debug/zenith timeline branch --branch-name migration_check
Created timeline '0e9331cad6efbafe6a88dd73ae21a5c9' at Lsn 0/16F5830 for tenant: c03ba6b7ad4c5e9cf556f059ade44229. Ancestor timeline: 'main'
# check branches tree
> ./target/debug/zenith branch
main
┗━ @0/1609610: migration_check
> ./target/debug/zenith timeline list
main [5b014a9e41b4b63ce1a1febc04503636]
┗━ @0/1609610: migration_check [0e9331cad6efbafe6a88dd73ae21a5c9]
# start postgres on that branch
> ./target/debug/zenith pg start migration_check

View File

@@ -12,6 +12,7 @@ use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use zenith_utils::auth::{encode_from_key_file, Claims, Scope};
use zenith_utils::postgres_backend::AuthType;
use zenith_utils::zid::ZTenantTimelineId;
use zenith_utils::zid::ZTimelineId;
use zenith_utils::zid::{HexZTenantId, ZNodeId, ZTenantId};
@@ -60,8 +61,7 @@ pub struct LocalEnv {
#[serde(default)]
pub safekeepers: Vec<SafekeeperConf>,
/// Every tenant has a first timeline created for it, currently the only one ancestor-less for this tenant.
/// It is used as a default timeline for branching, if no ancestor timeline is specified.
/// Keep human-readable aliases in memory (and persist them to config), to hind ZId hex strings from the user.
#[serde(default)]
// A `HashMap<String, HashMap<ZTenantId, ZTimelineId>>` would be more appropriate here,
// but deserialization into a generic toml object as `toml::Value::try_from` fails with an error.
@@ -158,11 +158,31 @@ impl LocalEnv {
branch_name: String,
tenant_id: ZTenantId,
timeline_id: ZTimelineId,
) {
self.branch_name_mappings
.entry(branch_name)
.or_default()
.push((tenant_id, timeline_id));
) -> anyhow::Result<()> {
let existing_values = self
.branch_name_mappings
.entry(branch_name.clone())
.or_default();
let existing_ids = existing_values
.iter()
.find(|(existing_tenant_id, _)| existing_tenant_id == &tenant_id);
if let Some((_, old_timeline_id)) = existing_ids {
if old_timeline_id == &timeline_id {
Ok(())
} else {
bail!(
"branch '{}' is already mapped to timeline {}, cannot map to another timeline {}",
branch_name,
old_timeline_id,
timeline_id
);
}
} else {
existing_values.push((tenant_id, timeline_id));
Ok(())
}
}
pub fn get_branch_timeline_id(
@@ -177,6 +197,18 @@ impl LocalEnv {
.map(|&(_, timeline_id)| timeline_id)
}
pub fn timeline_name_mappings(&self) -> HashMap<ZTenantTimelineId, String> {
self.branch_name_mappings
.iter()
.map(|(name, tenant_timelines)| {
tenant_timelines.iter().map(|&(tenant_id, timeline_id)| {
(ZTenantTimelineId::new(tenant_id, timeline_id), name.clone())
})
})
.flatten()
.collect()
}
/// Create a LocalEnv from a config file.
///
/// Unlike 'load_config', this function fills in any defaults that are missing

View File

@@ -9,7 +9,7 @@ use anyhow::{bail, Context};
use nix::errno::Errno;
use nix::sys::signal::{kill, Signal};
use nix::unistd::Pid;
use pageserver::http::models::{TenantCreateRequest, TimelineCreateRequest};
use pageserver::http::models::{TenantCreateRequest, TenantCreateResponse, TimelineCreateRequest};
use pageserver::timelines::TimelineInfo;
use postgres::{Config, NoTls};
use reqwest::blocking::{Client, RequestBuilder, Response};
@@ -322,7 +322,7 @@ impl PageServerNode {
}
pub fn check_status(&self) -> Result<()> {
self.http_request(Method::GET, format!("{}/{}", self.http_base_url, "status"))
self.http_request(Method::GET, format!("{}/status", self.http_base_url))
.send()?
.error_from_body()?;
Ok(())
@@ -330,7 +330,7 @@ impl PageServerNode {
pub fn tenant_list(&self) -> Result<Vec<TenantInfo>> {
Ok(self
.http_request(Method::GET, format!("{}/{}", self.http_base_url, "tenant"))
.http_request(Method::GET, format!("{}/tenant", self.http_base_url))
.send()?
.error_from_body()?
.json()?)
@@ -338,13 +338,13 @@ impl PageServerNode {
pub fn tenant_create(
&self,
tenant_id: ZTenantId,
new_tenant_id: Option<ZTenantId>,
initial_timeline_id: Option<ZTimelineId>,
) -> Result<ZTimelineId> {
) -> Result<TenantCreateResponse> {
Ok(self
.http_request(Method::POST, format!("{}/{}", self.http_base_url, "tenant"))
.http_request(Method::POST, format!("{}/tenant", self.http_base_url))
.json(&TenantCreateRequest {
tenant_id,
new_tenant_id,
initial_timeline_id,
})
.send()?
@@ -352,11 +352,11 @@ impl PageServerNode {
.json()?)
}
pub fn timeline_list(&self, tenantid: &ZTenantId) -> Result<Vec<TimelineInfo>> {
pub fn timeline_list(&self, tenant_id: &ZTenantId) -> Result<Vec<TimelineInfo>> {
Ok(self
.http_request(
Method::GET,
format!("{}/timeline/{}", self.http_base_url, tenantid),
format!("{}/tenant/{}/timeline", self.http_base_url, tenant_id),
)
.send()?
.error_from_body()?
@@ -366,16 +366,18 @@ impl PageServerNode {
pub fn timeline_create(
&self,
tenant_id: ZTenantId,
timeline_id: ZTimelineId,
start_lsn: Option<Lsn>,
new_timeline_id: Option<ZTimelineId>,
ancestor_start_lsn: Option<Lsn>,
ancestor_timeline_id: Option<ZTimelineId>,
) -> Result<TimelineInfo> {
Ok(self
.http_request(Method::POST, format!("{}/timeline", self.http_base_url))
.http_request(
Method::POST,
format!("{}/tenant/{}/timeline", self.http_base_url, tenant_id),
)
.json(&TimelineCreateRequest {
tenant_id,
timeline_id,
start_lsn,
new_timeline_id,
ancestor_start_lsn,
ancestor_timeline_id,
})
.send()?

View File

@@ -7,25 +7,33 @@ use zenith_utils::{
#[derive(Serialize, Deserialize)]
pub struct TimelineCreateRequest {
#[serde(with = "hex")]
pub tenant_id: ZTenantId,
#[serde(with = "hex")]
pub timeline_id: ZTimelineId,
#[serde(default)]
#[serde(with = "opt_display_serde")]
pub new_timeline_id: Option<ZTimelineId>,
#[serde(default)]
#[serde(with = "opt_display_serde")]
pub ancestor_timeline_id: Option<ZTimelineId>,
pub start_lsn: Option<Lsn>,
pub ancestor_start_lsn: Option<Lsn>,
}
#[derive(Serialize, Deserialize)]
pub struct TenantCreateRequest {
#[serde(with = "hex")]
pub tenant_id: ZTenantId,
#[serde(default)]
#[serde(with = "opt_display_serde")]
pub new_tenant_id: Option<ZTenantId>,
#[serde(default)]
#[serde(with = "opt_display_serde")]
pub initial_timeline_id: Option<ZTimelineId>,
}
#[derive(Deserialize, Serialize)]
pub struct TenantCreateResponse {
#[serde(with = "hex")]
pub tenant_id: ZTenantId,
#[serde(with = "hex")]
pub timeline_id: ZTimelineId,
}
#[derive(Serialize)]
pub struct StatusResponse {
pub id: ZNodeId,

View File

@@ -22,7 +22,7 @@ paths:
properties:
id:
type: integer
/v1/timeline/{tenant_id}:
/v1/tenant/{tenant_id}/timeline:
parameters:
- name: tenant_id
in: path
@@ -70,7 +70,7 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/Error"
/v1/timeline/{tenant_id}/{timeline_id}:
/v1/tenant/{tenant_id}/timeline/{timeline_id}:
parameters:
- name: tenant_id
in: path
@@ -90,7 +90,7 @@ paths:
type: string
description: Controls calculation of current_logical_size_non_incremental
get:
description: Get timelines for tenant
description: Get info about the timeline
responses:
"200":
description: TimelineInfo
@@ -122,7 +122,14 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/Error"
/v1/timeline/:
/v1/tenant/{tenant_id}/timeline/:
parameters:
- name: tenant_id
in: path
required: true
schema:
type: string
format: hex
post:
description: Create timeline
requestBody:
@@ -130,18 +137,14 @@ paths:
application/json:
schema:
type: object
required:
- "tenant_id"
- "timeline_id"
- "start_point"
properties:
tenant_id:
new_timeline_id:
type: string
format: hex
timeline_id:
ancestor_timeline_id:
type: string
format: hex
start_point:
ancestor_start_lsn:
type: string
responses:
"201":
@@ -149,7 +152,7 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/TImelineInfo"
$ref: "#/components/schemas/TimelineInfo"
"400":
description: Malformed timeline create request
content:
@@ -211,10 +214,11 @@ paths:
application/json:
schema:
type: object
required:
- "tenant_id"
properties:
tenant_id:
new_tenant_id:
type: string
format: hex
initial_timeline_id:
type: string
format: hex
responses:
@@ -223,9 +227,14 @@ paths:
content:
application/json:
schema:
type: array
items:
type: string
type: object
properties:
tenant_id:
type: string
format: hex
timeline_id:
type: string
format: hex
"400":
description: Malformed tenant create request
content:
@@ -268,35 +277,11 @@ components:
type: string
state:
type: string
TimelineInfo:
type: object
required:
- timeline_id
- latest_valid_lsn
- current_logical_size
properties:
timeline_id:
type: string
format: hex
ancestor_id:
type: string
format: hex
ancestor_lsn:
type: string
current_logical_size:
type: integer
current_logical_size_non_incremental:
type: integer
latest_valid_lsn:
type: integer
TimelineInfo:
type: object
required:
- timeline_id
- tenant_id
- last_record_lsn
- prev_record_lsn
- start_lsn
- disk_consistent_lsn
properties:
timeline_id:
@@ -305,19 +290,21 @@ components:
tenant_id:
type: string
format: hex
ancestor_timeline_id:
type: string
format: hex
last_record_lsn:
type: string
prev_record_lsn:
type: string
start_lsn:
ancestor_timeline_id:
type: string
format: hex
ancestor_lsn:
type: string
disk_consistent_lsn:
type: string
timeline_state:
type: string
current_logical_size:
type: integer
current_logical_size_non_incremental:
type: integer
Error:
type: object

View File

@@ -20,6 +20,7 @@ use zenith_utils::zid::{HexZTimelineId, ZTimelineId};
use super::models::StatusResponse;
use super::models::TenantCreateRequest;
use super::models::TenantCreateResponse;
use super::models::TimelineCreateRequest;
use crate::repository::RepositoryTimeline;
use crate::timelines::TimelineInfo;
@@ -69,18 +70,19 @@ async fn status_handler(request: Request<Body>) -> Result<Response<Body>, ApiErr
}
async fn timeline_create_handler(mut request: Request<Body>) -> Result<Response<Body>, ApiError> {
let tenant_id: ZTenantId = parse_request_param(&request, "tenant_id")?;
let request_data: TimelineCreateRequest = json_request(&mut request).await?;
check_permission(&request, Some(request_data.tenant_id))?;
check_permission(&request, Some(tenant_id))?;
let response_data = tokio::task::spawn_blocking(move || {
let _enter = info_span!("/timeline_create", timeline = %request_data.timeline_id, tenant = %request_data.tenant_id, lsn=?request_data.start_lsn).entered();
let _enter = info_span!("/timeline_create", tenant = %tenant_id, new_timeline = ?request_data.new_timeline_id, lsn=?request_data.ancestor_start_lsn).entered();
timelines::create_timeline(
get_config(&request),
request_data.tenant_id,
request_data.timeline_id,
tenant_id,
request_data.new_timeline_id,
request_data.ancestor_timeline_id,
request_data.start_lsn,
request_data.ancestor_start_lsn,
)
})
.await
@@ -214,12 +216,15 @@ async fn tenant_create_handler(mut request: Request<Body>) -> Result<Response<Bo
let request_data: TenantCreateRequest = json_request(&mut request).await?;
let initial_timeline_id = tokio::task::spawn_blocking(move || {
let _enter = info_span!("tenant_create", tenant = %request_data.tenant_id, initial_timeline = ?request_data.initial_timeline_id).entered();
let _enter = info_span!("tenant_create", tenant = ?request_data.new_tenant_id, initial_timeline = ?request_data.initial_timeline_id).entered();
tenant_mgr::create_repository_for_tenant(
get_config(&request),
request_data.tenant_id,
request_data.new_tenant_id,
request_data.initial_timeline_id,
)
).map(|new_ids| TenantCreateResponse {
tenant_id: new_ids.tenant_id,
timeline_id: new_ids.timeline_id,
})
})
.await
.map_err(ApiError::from_err)??;
@@ -253,21 +258,21 @@ pub fn make_router(
router
.data(Arc::new(State::new(conf, auth)))
.get("/v1/status", status_handler)
.get("/v1/timeline/:tenant_id", timeline_list_handler)
.get("/v1/tenant", tenant_list_handler)
.post("/v1/tenant", tenant_create_handler)
.get("/v1/tenant/:tenant_id/timeline", timeline_list_handler)
.post("/v1/tenant/:tenant_id/timeline", timeline_create_handler)
.get(
"/v1/timeline/:tenant_id/:timeline_id",
"/v1/tenant/:tenant_id/timeline/:timeline_id",
timeline_detail_handler,
)
.post(
"/v1/timeline/:tenant_id/:timeline_id/attach",
"/v1/tenant/:tenant_id/timeline/:timeline_id/attach",
timeline_attach_handler,
)
.post(
"/v1/timeline/:tenant_id/:timeline_id/detach",
"/v1/tenant/:tenant_id/timeline/:timeline_id/detach",
timeline_detach_handler,
)
.post("/v1/timeline", timeline_create_handler)
.get("/v1/tenant", tenant_list_handler)
.post("/v1/tenant", tenant_create_handler)
.any(handler_404)
}

View File

@@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
use std::collections::{hash_map, HashMap};
use std::fmt;
use std::sync::{Arc, Mutex, MutexGuard};
use zenith_utils::zid::{ZTenantId, ZTimelineId};
use zenith_utils::zid::{ZTenantId, ZTenantTimelineId, ZTimelineId};
lazy_static! {
static ref TENANTS: Mutex<HashMap<ZTenantId, Tenant>> = Mutex::new(HashMap::new());
@@ -179,9 +179,10 @@ pub fn shutdown_all_tenants() {
pub fn create_repository_for_tenant(
conf: &'static PageServerConf,
tenant_id: ZTenantId,
new_tenant_id: Option<ZTenantId>,
initial_timeline_id: Option<ZTimelineId>,
) -> Result<ZTimelineId> {
) -> Result<ZTenantTimelineId> {
let tenant_id = new_tenant_id.unwrap_or_else(ZTenantId::generate);
let wal_redo_manager = Arc::new(PostgresRedoManager::new(conf, tenant_id));
let (initial_timeline_id, repo) =
timelines::create_repo(conf, tenant_id, initial_timeline_id, wal_redo_manager)?;
@@ -196,7 +197,7 @@ pub fn create_repository_for_tenant(
}
}
Ok(initial_timeline_id)
Ok(ZTenantTimelineId::new(tenant_id, initial_timeline_id))
}
pub fn get_tenant_state(tenantid: ZTenantId) -> Option<TenantState> {

View File

@@ -125,6 +125,13 @@ impl TimelineInfo {
TimelineInfo::Remote { timeline_id, .. } => timeline_id,
}
}
pub fn tenant_id(&self) -> ZTenantId {
match *self {
TimelineInfo::Local { tenant_id, .. } => tenant_id,
TimelineInfo::Remote { tenant_id, .. } => tenant_id,
}
}
}
fn get_current_logical_size_non_incremental(
@@ -335,10 +342,12 @@ pub(crate) fn get_timelines(
pub(crate) fn create_timeline(
conf: &'static PageServerConf,
tenant_id: ZTenantId,
new_timeline_id: ZTimelineId,
new_timeline_id: Option<ZTimelineId>,
ancestor_timeline_id: Option<ZTimelineId>,
ancestor_start_lsn: Option<Lsn>,
) -> Result<TimelineInfo> {
let new_timeline_id = new_timeline_id.unwrap_or_else(ZTimelineId::generate);
if conf.timeline_path(&new_timeline_id, &tenant_id).exists() {
bail!("timeline {} already exists", new_timeline_id);
}

View File

@@ -29,30 +29,27 @@ def test_pageserver_auth(zenith_env_builder: ZenithEnvBuilder):
tenant_id=env.initial_tenant)
# tenant can create branches
tenant_http_client.timeline_create(timeline_id=uuid4(),
tenant_id=env.initial_tenant,
tenant_http_client.timeline_create(tenant_id=env.initial_tenant,
ancestor_timeline_id=new_timeline_id)
# console can create branches for tenant
management_http_client.timeline_create(timeline_id=uuid4(),
tenant_id=env.initial_tenant,
management_http_client.timeline_create(tenant_id=env.initial_tenant,
ancestor_timeline_id=new_timeline_id)
# fail to create branch using token with different tenant_id
with pytest.raises(ZenithPageserverApiException,
match='Forbidden: Tenant id mismatch. Permission denied'):
invalid_tenant_http_client.timeline_create(timeline_id=uuid4(),
tenant_id=env.initial_tenant,
invalid_tenant_http_client.timeline_create(tenant_id=env.initial_tenant,
ancestor_timeline_id=new_timeline_id)
# create tenant using management token
management_http_client.tenant_create(uuid4())
management_http_client.tenant_create()
# fail to create tenant using tenant token
with pytest.raises(
ZenithPageserverApiException,
match='Forbidden: Attempt to access management api with tenant scope. Permission denied'
):
tenant_http_client.tenant_create(uuid4())
tenant_http_client.tenant_create()
@pytest.mark.parametrize('with_wal_acceptors', [False, True])

View File

@@ -15,17 +15,15 @@ def helper_compare_timeline_list(pageserver_http_client: ZenithPageserverHttpCli
Filters out timelines created by other tests.
"""
timelines_api = sorted(
map(lambda t: cast(str, t['timeline_id']),
pageserver_http_client.timeline_list(initial_tenant)))
timelines_cli = env.zenith_cli.list_timelines()
timelines_cli = [
b for b in timelines_cli if b.startswith('test_cli_') or b in ('empty', 'main')
]
assert timelines_cli == env.zenith_cli.list_timelines(initial_tenant)
timelines_cli_with_tenant_arg = env.zenith_cli.list_timelines(initial_tenant)
timelines_cli_with_tenant_arg = [
b for b in timelines_cli if b.startswith('test_cli_') or b in ('empty', 'main')
]
assert timelines_cli == timelines_cli_with_tenant_arg
cli_timeline_ids = sorted([timeline_id for (_, timeline_id) in timelines_cli])
assert timelines_api == cli_timeline_ids
def test_cli_timeline_list(zenith_simple_env: ZenithEnv):
@@ -45,7 +43,7 @@ def test_cli_timeline_list(zenith_simple_env: ZenithEnv):
helper_compare_timeline_list(pageserver_http_client, env, env.initial_tenant)
# Check that all new branches are visible via CLI
timelines_cli = env.zenith_cli.list_timelines()
timelines_cli = [timeline_id for (_, timeline_id) in env.zenith_cli.list_timelines()]
assert main_timeline_id.hex in timelines_cli
assert nested_timeline_id.hex in timelines_cli

View File

@@ -712,27 +712,29 @@ class ZenithPageserverHttpClient(requests.Session):
def timeline_attach(self, tenant_id: uuid.UUID, timeline_id: uuid.UUID):
res = self.post(
f"http://localhost:{self.port}/v1/timeline/{tenant_id.hex}/{timeline_id.hex}/attach", )
f"http://localhost:{self.port}/v1/tenant/{tenant_id.hex}/timeline/{timeline_id.hex}/attach",
)
self.verbose_error(res)
def timeline_detach(self, tenant_id: uuid.UUID, timeline_id: uuid.UUID):
res = self.post(
f"http://localhost:{self.port}/v1/timeline/{tenant_id.hex}/{timeline_id.hex}/detach", )
f"http://localhost:{self.port}/v1/tenant/{tenant_id.hex}/timeline/{timeline_id.hex}/detach",
)
self.verbose_error(res)
def timeline_create(self,
tenant_id: uuid.UUID,
timeline_id: uuid.UUID,
start_lsn: Optional[str] = None,
ancestor_timeline_id: Optional[uuid.UUID] = None) -> Dict[Any, Any]:
res = self.post(f"http://localhost:{self.port}/v1/timeline",
def timeline_create(
self,
tenant_id: uuid.UUID,
timeline_id: Optional[uuid.UUID] = None,
ancestor_timeline_id: Optional[uuid.UUID] = None,
ancestor_start_lsn: Optional[str] = None,
) -> Dict[Any, Any]:
res = self.post(f"http://localhost:{self.port}/v1/tenant/{tenant_id.hex}/timeline",
json={
'tenant_id':
tenant_id.hex,
'timeline_id':
timeline_id.hex,
'start_lsn':
start_lsn,
'new_timeline_id':
timeline_id.hex if timeline_id else None,
'ancestor_start_lsn':
ancestor_start_lsn,
'ancestor_timeline_id':
ancestor_timeline_id.hex if ancestor_timeline_id else None,
})
@@ -748,18 +750,23 @@ class ZenithPageserverHttpClient(requests.Session):
assert isinstance(res_json, list)
return res_json
def tenant_create(self, tenant_id: uuid.UUID):
def tenant_create(self,
tenant_id: Optional[uuid.UUID] = None,
new_timeline_id: Optional[uuid.UUID] = None) -> Dict[Any, Any]:
res = self.post(
f"http://localhost:{self.port}/v1/tenant",
json={
'tenant_id': tenant_id.hex,
'new_tenant_id': tenant_id.hex if tenant_id else None,
'initial_timeline_id': new_timeline_id.hex if new_timeline_id else None,
},
)
self.verbose_error(res)
return res.json()
res_json = res.json()
assert isinstance(res_json, dict)
return res_json
def timeline_list(self, tenant_id: uuid.UUID) -> List[Dict[Any, Any]]:
res = self.get(f"http://localhost:{self.port}/v1/timeline/{tenant_id.hex}")
res = self.get(f"http://localhost:{self.port}/v1/tenant/{tenant_id.hex}/timeline")
self.verbose_error(res)
res_json = res.json()
assert isinstance(res_json, list)
@@ -767,7 +774,7 @@ class ZenithPageserverHttpClient(requests.Session):
def timeline_detail(self, tenant_id: uuid.UUID, timeline_id: uuid.UUID) -> Dict[Any, Any]:
res = self.get(
f"http://localhost:{self.port}/v1/timeline/{tenant_id.hex}/{timeline_id.hex}?include-non-incremental-logical-size=1"
f"http://localhost:{self.port}/v1/tenant/{tenant_id.hex}/timeline/{timeline_id.hex}?include-non-incremental-logical-size=1"
)
self.verbose_error(res)
res_json = res.json()
@@ -861,13 +868,21 @@ class ZenithCli:
else:
return uuid.UUID(created_timeline_id)
def list_timelines(self, tenant_id: Optional[uuid.UUID] = None) -> List[str]:
def list_timelines(self, tenant_id: Optional[uuid.UUID] = None) -> List[Tuple[str, str]]:
"""
Returns a list of (branch_name, timeline_id) tuples out of parsed `zenith timeline list` CLI output.
"""
# (L) main [b49f7954224a0ad25cc0013ea107b54b]
# (L) ┣━ @0/16B5A50: test_cli_branch_list_main [20f98c79111b9015d84452258b7d5540]
timeline_data_extractor = re.compile(
r"\s(?P<branch_name>[^\s]+)\s\[(?P<timeline_id>[^\]]+)\]", re.MULTILINE)
res = self.raw_cli(
['timeline', 'list', '--tenant-id', (tenant_id or self.env.initial_tenant).hex])
branches_cli = sorted(
map(lambda b: b.split(') ')[-1].strip().split(':')[-1].strip(),
res.stdout.strip().split("\n")))
return branches_cli
timelines_cli = sorted(
map(lambda branch_and_id: (branch_and_id[0], branch_and_id[1]),
timeline_data_extractor.findall(res.stdout)))
return timelines_cli
def init(self,
config_toml: str,

View File

@@ -19,7 +19,7 @@ use walkeeper::defaults::{
use zenith_utils::auth::{Claims, Scope};
use zenith_utils::lsn::Lsn;
use zenith_utils::postgres_backend::AuthType;
use zenith_utils::zid::{ZNodeId, ZTenantId, ZTimelineId};
use zenith_utils::zid::{ZNodeId, ZTenantId, ZTenantTimelineId, ZTimelineId};
use zenith_utils::GIT_VERSION;
use pageserver::timelines::TimelineInfo;
@@ -60,6 +60,8 @@ http_port = {safekeeper_http_port}
struct TimelineTreeEl {
/// `TimelineInfo` received from the `pageserver` via the `timeline_list` http API call.
pub info: TimelineInfo,
/// Name, recovered from zenith config mappings
pub name: Option<String>,
/// Holds all direct children of this timeline referenced using `timeline_id`.
pub children: BTreeSet<ZTimelineId>,
}
@@ -150,7 +152,7 @@ fn main() -> Result<()> {
.arg(tenant_id_arg.clone())
.arg(branch_name_arg.clone())
.arg(Arg::new("ancestor-branch-name").long("ancestor-branch-name").takes_value(true)
.help("Use last Lsn of another timeline (and its data) as base when creating the new timeline. The timeline gets resolved by its branch name.").required(true))
.help("Use last Lsn of another timeline (and its data) as base when creating the new timeline. The timeline gets resolved by its branch name.").required(false))
.arg(Arg::new("ancestor-start-lsn").long("ancestor-start-lsn").takes_value(true)
.help("When using another timeline as base, use a specific Lsn in it instead of the latest one").required(false)))
.subcommand(App::new("create")
@@ -218,6 +220,8 @@ fn main() -> Result<()> {
.about("Start a postgres compute node.\n This command actually creates new node from scratch, but preserves existing config files")
.arg(pg_node_arg.clone())
.arg(tenant_id_arg.clone())
.arg(branch_name_arg.clone())
.arg(timeline_id_arg.clone())
.arg(lsn_arg.clone())
.arg(port_arg.clone()))
.subcommand(
@@ -290,7 +294,10 @@ fn main() -> Result<()> {
///
/// Prints timelines list as a tree-like structure.
///
fn print_timelines_tree(timelines: Vec<TimelineInfo>) -> Result<()> {
fn print_timelines_tree(
timelines: Vec<TimelineInfo>,
mut timeline_name_mappings: HashMap<ZTenantTimelineId, String>,
) -> Result<()> {
let mut timelines_hash = timelines
.iter()
.map(|t| {
@@ -299,6 +306,8 @@ fn print_timelines_tree(timelines: Vec<TimelineInfo>) -> Result<()> {
TimelineTreeEl {
info: t.clone(),
children: BTreeSet::new(),
name: timeline_name_mappings
.remove(&ZTenantTimelineId::new(t.tenant_id(), t.timeline_id())),
},
)
})
@@ -380,8 +389,12 @@ fn print_timeline(
print!("{} @{}: ", br_sym, lsn_string);
}
// Finally print a timeline name with new line
println!("{}", timeline.info.timeline_id());
// Finally print a timeline id and name with new line
println!(
"{} [{}]",
timeline.name.as_deref().unwrap_or("_no_name_"),
timeline.info.timeline_id()
);
let len = timeline.children.len();
let mut i: usize = 0;
@@ -492,7 +505,7 @@ fn handle_init(init_match: &ArgMatches) -> Result<LocalEnv> {
DEFAULT_BRANCH_NAME.to_owned(),
initial_tenant_id,
initial_timeline_id,
);
)?;
Ok(env)
}
@@ -514,19 +527,18 @@ fn handle_tenant(tenant_match: &ArgMatches, env: &mut local_env::LocalEnv) -> Re
}
}
Some(("create", create_match)) => {
let tenant_id = parse_tenant_id(create_match)?.unwrap_or_else(ZTenantId::generate);
println!("using tenant id {}", tenant_id);
let initial_tenant_id = parse_tenant_id(create_match)?;
let initial_timeline_id_argument = parse_timeline_id(create_match)?;
let initial_timeline_id =
pageserver.tenant_create(tenant_id, initial_timeline_id_argument)?;
let new_ds =
pageserver.tenant_create(initial_tenant_id, initial_timeline_id_argument)?;
env.register_branch_mapping(
DEFAULT_BRANCH_NAME.to_owned(),
tenant_id,
initial_timeline_id,
);
new_ds.tenant_id,
new_ds.timeline_id,
)?;
println!(
"tenant {} successfully created on the pageserver, initial timeline: '{}'",
tenant_id, initial_timeline_id
new_ds.tenant_id, new_ds.timeline_id
);
}
Some((sub_name, _)) => bail!("Unexpected tenant subcommand '{}'", sub_name),
@@ -542,15 +554,15 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
Some(("list", list_match)) => {
let tenant_id = get_tenant_id(list_match, env)?;
let timelines = pageserver.timeline_list(&tenant_id)?;
print_timelines_tree(timelines)?;
print_timelines_tree(timelines, env.timeline_name_mappings())?;
}
Some(("create", create_match)) => {
let tenant_id = get_tenant_id(create_match, env)?;
let new_timeline_id = ZTimelineId::generate();
let new_branch_name = create_match
.value_of("branch-name")
.ok_or(anyhow!("No branch name provided"))?;
let timeline = pageserver.timeline_create(tenant_id, new_timeline_id, None, None)?;
let timeline = pageserver.timeline_create(tenant_id, None, None, None)?;
let new_timeline_id = timeline.timeline_id();
let last_record_lsn = match timeline {
TimelineInfo::Local {
@@ -563,7 +575,7 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
)
}
};
env.register_branch_mapping(new_branch_name.to_string(), tenant_id, new_timeline_id);
env.register_branch_mapping(new_branch_name.to_string(), tenant_id, new_timeline_id)?;
println!(
"Created timeline '{}' at Lsn {} for tenant: {}",
@@ -574,13 +586,12 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
}
Some(("branch", branch_match)) => {
let tenant_id = get_tenant_id(branch_match, env)?;
let new_timeline_id = ZTimelineId::generate();
let new_branch_name = branch_match
.value_of("branch-name")
.ok_or(anyhow!("No branch name provided"))?;
let ancestor_branch_name = branch_match
.value_of("ancestor-branch-name")
.ok_or(anyhow!("No ancestor branch name provided"))?;
.unwrap_or(DEFAULT_BRANCH_NAME);
let ancestor_timeline_id = env
.get_branch_timeline_id(ancestor_branch_name, tenant_id)
.ok_or_else(|| {
@@ -597,10 +608,11 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
.context("Failed to parse ancestor start Lsn from the request")?;
let timeline = pageserver.timeline_create(
tenant_id,
new_timeline_id,
None,
start_lsn,
Some(ancestor_timeline_id),
)?;
let new_timeline_id = timeline.timeline_id();
let last_record_lsn = match timeline {
TimelineInfo::Local {
@@ -612,7 +624,7 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
),
};
env.register_branch_mapping(new_branch_name.to_string(), tenant_id, new_timeline_id);
env.register_branch_mapping(new_branch_name.to_string(), tenant_id, new_timeline_id)?;
println!(
"Created timeline '{}' at Lsn {} for tenant: {}. Ancestor timeline: '{}'",
@@ -647,7 +659,9 @@ fn handle_pg(pg_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> {
HashMap::new()
});
println!("NODE\tADDRESS\t\tTIMELINE\tLSN\t\tSTATUS");
let timeline_name_mappings = env.timeline_name_mappings();
println!("NODE\tADDRESS\tTIMELINE\tBRANCH NAME\tLSN\t\tSTATUS");
for ((_, node_name), node) in cplane
.nodes
.iter()
@@ -666,11 +680,17 @@ fn handle_pg(pg_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> {
})
.unwrap_or_else(|| '?'.to_string());
let branch_name = timeline_name_mappings
.get(&ZTenantTimelineId::new(tenant_id, node.timeline_id))
.map(|name| name.as_str())
.unwrap_or("?");
println!(
"{}\t{}\t{}\t{}\t{}",
"{}\t{}\t{}\t{}\t{}\t{}",
node_name,
node.address,
node.timeline_id,
branch_name,
lsn_str,
node.status(),
);