Revert "Use actual temporary dir for pageserver unit tests"

This reverts commit 826e89b9ce.

The problem with that commit was that it deletes the TempDir while
there are still EphemeralFile instances open.

At first I thought this could be fixed by simply adding

  Handle::current().block_on(task_mgr::shutdown(None, Some(tenant_id), None))

to TenantHarness::drop, but it turned out to be insufficient.

So, reverting the commit until we find a proper solution.

refs https://github.com/neondatabase/neon/issues/3385
This commit is contained in:
Christian Schwarz
2023-01-19 18:49:36 +01:00
committed by Christian Schwarz
parent a9bd05760f
commit 8ba1699937
10 changed files with 110 additions and 80 deletions

2
.gitignore vendored
View File

@@ -1,5 +1,7 @@
/pg_install
/target
/tmp_check
/tmp_check_cli
__pycache__/
test_output/
.vscode

1
control_plane/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
tmp_check/

View File

@@ -693,6 +693,11 @@ impl PageServerConf {
Ok(t_conf)
}
#[cfg(test)]
pub fn test_repo_dir(test_name: &str) -> PathBuf {
PathBuf::from(format!("../tmp_check/test_{test_name}"))
}
pub fn dummy_conf(repo_dir: PathBuf) -> Self {
let pg_distrib_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../pg_install");

View File

@@ -2626,10 +2626,10 @@ where
#[cfg(test)]
pub mod harness {
use bytes::{Bytes, BytesMut};
use once_cell::sync::Lazy;
use once_cell::sync::OnceCell;
use std::sync::Arc;
use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard};
use std::{fs, path::PathBuf};
use tempfile::TempDir;
use utils::logging;
use utils::lsn::Lsn;
@@ -2661,6 +2661,8 @@ pub mod harness {
buf.freeze()
}
static LOCK: Lazy<RwLock<()>> = Lazy::new(|| RwLock::new(()));
impl From<TenantConf> for TenantConfOpt {
fn from(tenant_conf: TenantConf) -> Self {
Self {
@@ -2681,31 +2683,42 @@ pub mod harness {
}
}
/// The harness saves some boilerplate and provides a way to create functional tenant
/// without running pageserver binary. It uses temporary directory to store data in it.
/// Tempdir gets removed on harness drop.
pub struct TenantHarness {
// keep the struct to not to remove tmp dir during the test
_temp_repo_dir: TempDir,
pub struct TenantHarness<'a> {
pub conf: &'static PageServerConf,
pub tenant_conf: TenantConf,
pub tenant_id: TenantId,
pub lock_guard: (
Option<RwLockReadGuard<'a, ()>>,
Option<RwLockWriteGuard<'a, ()>>,
),
}
static LOG_HANDLE: OnceCell<()> = OnceCell::new();
impl TenantHarness {
pub fn new() -> anyhow::Result<Self> {
impl<'a> TenantHarness<'a> {
pub fn create(test_name: &'static str) -> anyhow::Result<Self> {
Self::create_internal(test_name, false)
}
pub fn create_exclusive(test_name: &'static str) -> anyhow::Result<Self> {
Self::create_internal(test_name, true)
}
fn create_internal(test_name: &'static str, exclusive: bool) -> anyhow::Result<Self> {
let lock_guard = if exclusive {
(None, Some(LOCK.write().unwrap()))
} else {
(Some(LOCK.read().unwrap()), None)
};
LOG_HANDLE.get_or_init(|| {
logging::init(logging::LogFormat::Test).expect("Failed to init test logging")
});
let temp_repo_dir = tempfile::tempdir()?;
// `TempDir` uses a randomly generated subdirectory of a system tmp dir,
// so far it's enough to take care of concurrently running tests.
let repo_dir = temp_repo_dir.path();
let repo_dir = PageServerConf::test_repo_dir(test_name);
let _ = fs::remove_dir_all(&repo_dir);
fs::create_dir_all(&repo_dir)?;
let conf = PageServerConf::dummy_conf(repo_dir.to_path_buf());
let conf = PageServerConf::dummy_conf(repo_dir);
// Make a static copy of the config. This can never be free'd, but that's
// OK in a test.
let conf: &'static PageServerConf = Box::leak(Box::new(conf));
@@ -2723,10 +2736,10 @@ pub mod harness {
fs::create_dir_all(conf.timelines_path(&tenant_id))?;
Ok(Self {
_temp_repo_dir: temp_repo_dir,
conf,
tenant_conf,
tenant_id,
lock_guard,
})
}
@@ -2820,8 +2833,7 @@ mod tests {
#[tokio::test]
async fn test_basic() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_basic")?.load().await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -2854,8 +2866,9 @@ mod tests {
#[tokio::test]
async fn no_duplicate_timelines() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("no_duplicate_timelines")?
.load()
.await;
let _ = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -2886,8 +2899,7 @@ mod tests {
///
#[tokio::test]
async fn test_branch() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_branch")?.load().await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -2984,8 +2996,10 @@ mod tests {
#[tokio::test]
async fn test_prohibit_branch_creation_on_garbage_collected_data() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant =
TenantHarness::create("test_prohibit_branch_creation_on_garbage_collected_data")?
.load()
.await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3020,8 +3034,9 @@ mod tests {
#[tokio::test]
async fn test_prohibit_branch_creation_on_pre_initdb_lsn() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_prohibit_branch_creation_on_pre_initdb_lsn")?
.load()
.await;
tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0x50), DEFAULT_PG_VERSION)?
@@ -3070,8 +3085,9 @@ mod tests {
#[tokio::test]
async fn test_retain_data_in_parent_which_is_needed_for_child() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_retain_data_in_parent_which_is_needed_for_child")?
.load()
.await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3093,8 +3109,9 @@ mod tests {
}
#[tokio::test]
async fn test_parent_keeps_data_forever_after_branching() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_parent_keeps_data_forever_after_branching")?
.load()
.await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3125,7 +3142,8 @@ mod tests {
#[tokio::test]
async fn timeline_load() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
const TEST_NAME: &str = "timeline_load";
let harness = TenantHarness::create(TEST_NAME)?;
{
let tenant = harness.load().await;
let tline = tenant
@@ -3144,7 +3162,8 @@ mod tests {
#[tokio::test]
async fn timeline_load_with_ancestor() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
const TEST_NAME: &str = "timeline_load_with_ancestor";
let harness = TenantHarness::create(TEST_NAME)?;
// create two timelines
{
let tenant = harness.load().await;
@@ -3182,7 +3201,8 @@ mod tests {
#[tokio::test]
async fn corrupt_metadata() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
const TEST_NAME: &str = "corrupt_metadata";
let harness = TenantHarness::create(TEST_NAME)?;
let tenant = harness.load().await;
tenant
@@ -3223,8 +3243,7 @@ mod tests {
#[tokio::test]
async fn test_images() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_images")?.load().await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3291,8 +3310,7 @@ mod tests {
//
#[tokio::test]
async fn test_bulk_insert() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_bulk_insert")?.load().await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3336,8 +3354,7 @@ mod tests {
#[tokio::test]
async fn test_random_updates() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_random_updates")?.load().await;
let tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3410,8 +3427,9 @@ mod tests {
#[tokio::test]
async fn test_traverse_branches() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_traverse_branches")?
.load()
.await;
let mut tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;
@@ -3495,8 +3513,9 @@ mod tests {
#[tokio::test]
async fn test_traverse_ancestors() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_traverse_ancestors")?
.load()
.await;
let mut tline = tenant
.create_empty_timeline(TIMELINE_ID, Lsn(0), DEFAULT_PG_VERSION)?
.initialize()?;

View File

@@ -76,7 +76,7 @@ impl EphemeralFile {
})
}
fn fill_buffer(&self, buf: &mut [u8], blkno: u32) -> io::Result<()> {
fn fill_buffer(&self, buf: &mut [u8], blkno: u32) -> Result<(), io::Error> {
let mut off = 0;
while off < PAGE_SZ {
let n = self
@@ -277,7 +277,7 @@ impl Drop for EphemeralFile {
}
}
pub fn writeback(file_id: u64, blkno: u32, buf: &[u8]) -> io::Result<()> {
pub fn writeback(file_id: u64, blkno: u32, buf: &[u8]) -> Result<(), io::Error> {
if let Some(file) = EPHEMERAL_FILES.read().unwrap().files.get(&file_id) {
match file.write_all_at(buf, blkno as u64 * PAGE_SZ as u64) {
Ok(_) => Ok(()),
@@ -332,17 +332,25 @@ mod tests {
use super::*;
use crate::tenant::blob_io::{BlobCursor, BlobWriter};
use crate::tenant::block_io::BlockCursor;
use crate::tenant::harness::TenantHarness;
use rand::{seq::SliceRandom, thread_rng, RngCore};
use std::fs;
use std::str::FromStr;
fn harness() -> Result<(TenantHarness, TimelineId), io::Error> {
let harness = TenantHarness::new().expect("Failed to create tenant harness");
let timeline_id = TimelineId::from_str("22000000000000000000000000000000").unwrap();
fs::create_dir_all(harness.timeline_path(&timeline_id))?;
fn harness(
test_name: &str,
) -> Result<(&'static PageServerConf, TenantId, TimelineId), io::Error> {
let repo_dir = PageServerConf::test_repo_dir(test_name);
let _ = fs::remove_dir_all(&repo_dir);
let conf = PageServerConf::dummy_conf(repo_dir);
// Make a static copy of the config. This can never be free'd, but that's
// OK in a test.
let conf: &'static PageServerConf = Box::leak(Box::new(conf));
Ok((harness, timeline_id))
let tenant_id = TenantId::from_str("11000000000000000000000000000000").unwrap();
let timeline_id = TimelineId::from_str("22000000000000000000000000000000").unwrap();
fs::create_dir_all(conf.timeline_path(&timeline_id, &tenant_id))?;
Ok((conf, tenant_id, timeline_id))
}
// Helper function to slurp contents of a file, starting at the current position,
@@ -359,10 +367,10 @@ mod tests {
}
#[test]
fn test_ephemeral_files() -> io::Result<()> {
let (harness, timeline_id) = harness()?;
fn test_ephemeral_files() -> Result<(), io::Error> {
let (conf, tenant_id, timeline_id) = harness("ephemeral_files")?;
let file_a = EphemeralFile::create(harness.conf, harness.tenant_id, timeline_id)?;
let file_a = EphemeralFile::create(conf, tenant_id, timeline_id)?;
file_a.write_all_at(b"foo", 0)?;
assert_eq!("foo", read_string(&file_a, 0, 20)?);
@@ -373,7 +381,7 @@ mod tests {
// Open a lot of files, enough to cause some page evictions.
let mut efiles = Vec::new();
for fileno in 0..100 {
let efile = EphemeralFile::create(harness.conf, harness.tenant_id, timeline_id)?;
let efile = EphemeralFile::create(conf, tenant_id, timeline_id)?;
efile.write_all_at(format!("file {}", fileno).as_bytes(), 0)?;
assert_eq!(format!("file {}", fileno), read_string(&efile, 0, 10)?);
efiles.push((fileno, efile));
@@ -390,10 +398,10 @@ mod tests {
}
#[test]
fn test_ephemeral_blobs() -> io::Result<()> {
let (harness, timeline_id) = harness()?;
fn test_ephemeral_blobs() -> Result<(), io::Error> {
let (conf, tenant_id, timeline_id) = harness("ephemeral_blobs")?;
let mut file = EphemeralFile::create(harness.conf, harness.tenant_id, timeline_id)?;
let mut file = EphemeralFile::create(conf, tenant_id, timeline_id)?;
let pos_foo = file.write_blob(b"foo")?;
assert_eq!(b"foo", file.block_cursor().read_blob(pos_foo)?.as_slice());

View File

@@ -1064,7 +1064,7 @@ mod tests {
// Test scheduling
#[test]
fn upload_scheduling() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("upload_scheduling")?;
let timeline_path = harness.timeline_path(&TIMELINE_ID);
std::fs::create_dir_all(&timeline_path)?;

View File

@@ -525,13 +525,12 @@ mod tests {
})
}
fn test_files<OF, FD>(test_name: &str, openfunc: OF) -> Result<(), Error>
fn test_files<OF, FD>(testname: &str, openfunc: OF) -> Result<(), Error>
where
FD: Read + Write + Seek + FileExt,
OF: Fn(&Path, &OpenOptions) -> Result<FD, std::io::Error>,
{
let temp_repo_dir = tempfile::tempdir()?;
let testdir = temp_repo_dir.path().join(test_name);
let testdir = crate::config::PageServerConf::test_repo_dir(testname);
std::fs::create_dir_all(&testdir)?;
let path_a = testdir.join("file_a");
@@ -633,8 +632,7 @@ mod tests {
const THREADS: usize = 100;
const SAMPLE: [u8; SIZE] = [0xADu8; SIZE];
let temp_repo_dir = tempfile::tempdir()?;
let testdir = temp_repo_dir.path().join("vfile_concurrency");
let testdir = crate::config::PageServerConf::test_repo_dir("vfile_concurrency");
std::fs::create_dir_all(&testdir)?;
// Create a test file.

View File

@@ -1146,8 +1146,7 @@ mod tests {
#[tokio::test]
async fn test_relsize() -> Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_relsize")?.load().await;
let tline = create_test_timeline(&tenant, TIMELINE_ID, DEFAULT_PG_VERSION)?;
let mut walingest = init_walingest_test(&tline).await?;
@@ -1324,8 +1323,7 @@ mod tests {
// and then created it again within the same layer.
#[tokio::test]
async fn test_drop_extend() -> Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_drop_extend")?.load().await;
let tline = create_test_timeline(&tenant, TIMELINE_ID, DEFAULT_PG_VERSION)?;
let mut walingest = init_walingest_test(&tline).await?;
@@ -1378,8 +1376,7 @@ mod tests {
// and then extended it again within the same layer.
#[tokio::test]
async fn test_truncate_extend() -> Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_truncate_extend")?.load().await;
let tline = create_test_timeline(&tenant, TIMELINE_ID, DEFAULT_PG_VERSION)?;
let mut walingest = init_walingest_test(&tline).await?;
@@ -1500,8 +1497,7 @@ mod tests {
/// split into multiple 1 GB segments in Postgres.
#[tokio::test]
async fn test_large_rel() -> Result<()> {
let harness = TenantHarness::new()?;
let tenant = harness.load().await;
let tenant = TenantHarness::create("test_large_rel")?.load().await;
let tline = create_test_timeline(&tenant, TIMELINE_ID, DEFAULT_PG_VERSION)?;
let mut walingest = init_walingest_test(&tline).await?;

View File

@@ -846,7 +846,7 @@ mod tests {
#[tokio::test]
async fn no_connection_no_candidate() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("no_connection_no_candidate")?;
let mut state = dummy_state(&harness).await;
let now = Utc::now().naive_utc();
@@ -879,7 +879,7 @@ mod tests {
#[tokio::test]
async fn connection_no_candidate() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("connection_no_candidate")?;
let mut state = dummy_state(&harness).await;
let now = Utc::now().naive_utc();
@@ -942,7 +942,7 @@ mod tests {
#[tokio::test]
async fn no_connection_candidate() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("no_connection_candidate")?;
let mut state = dummy_state(&harness).await;
let now = Utc::now().naive_utc();
@@ -1001,7 +1001,7 @@ mod tests {
#[tokio::test]
async fn candidate_with_many_connection_failures() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("candidate_with_many_connection_failures")?;
let mut state = dummy_state(&harness).await;
let now = Utc::now().naive_utc();
@@ -1041,7 +1041,7 @@ mod tests {
#[tokio::test]
async fn lsn_wal_over_threshhold_current_candidate() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("lsn_wal_over_threshcurrent_candidate")?;
let mut state = dummy_state(&harness).await;
let current_lsn = Lsn(100_000).align();
let now = Utc::now().naive_utc();
@@ -1105,7 +1105,7 @@ mod tests {
#[tokio::test]
async fn timeout_connection_threshhold_current_candidate() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("timeout_connection_threshhold_current_candidate")?;
let mut state = dummy_state(&harness).await;
let current_lsn = Lsn(100_000).align();
let now = Utc::now().naive_utc();
@@ -1166,7 +1166,7 @@ mod tests {
#[tokio::test]
async fn timeout_wal_over_threshhold_current_candidate() -> anyhow::Result<()> {
let harness = TenantHarness::new()?;
let harness = TenantHarness::create("timeout_wal_over_threshhold_current_candidate")?;
let mut state = dummy_state(&harness).await;
let current_lsn = Lsn(100_000).align();
let new_lsn = Lsn(100_100).align();
@@ -1232,7 +1232,7 @@ mod tests {
const DUMMY_SAFEKEEPER_HOST: &str = "safekeeper_connstr";
async fn dummy_state(harness: &TenantHarness) -> WalreceiverState {
async fn dummy_state(harness: &TenantHarness<'_>) -> WalreceiverState {
WalreceiverState {
id: TenantTimelineId {
tenant_id: harness.tenant_id,

View File

@@ -2,6 +2,7 @@
/pg_regress
# Generated subdirectories
/tmp_check/
/results/
/log/