mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-07 13:32:57 +00:00
improve test normal work to start several computes
This commit is contained in:
committed by
Dmitry Rodionov
parent
ca10cc12c1
commit
b1b67cc5a0
@@ -24,9 +24,7 @@ def test_ancestor_branch(zenith_env_builder: ZenithEnvBuilder):
|
||||
'compaction_target_size': '4194304',
|
||||
})
|
||||
|
||||
with closing(env.pageserver.connect()) as psconn:
|
||||
with psconn.cursor(cursor_factory=psycopg2.extras.DictCursor) as pscur:
|
||||
pscur.execute("failpoints flush-frozen=sleep(10000)")
|
||||
env.pageserver.safe_psql("failpoints flush-frozen=sleep(10000)")
|
||||
|
||||
pg_branch0 = env.postgres.create_start('main', tenant_id=tenant)
|
||||
branch0_cur = pg_branch0.connect().cursor()
|
||||
|
||||
47
test_runner/batch_others/test_normal_work.py
Normal file
47
test_runner/batch_others/test_normal_work.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from fixtures.log_helper import log
|
||||
from fixtures.zenith_fixtures import ZenithEnv, ZenithEnvBuilder, ZenithPageserverHttpClient
|
||||
|
||||
|
||||
def check_tenant(env: ZenithEnv, pageserver_http: ZenithPageserverHttpClient):
|
||||
tenant_id, timeline_id = env.zenith_cli.create_tenant()
|
||||
pg = env.postgres.create_start('main', tenant_id=tenant_id)
|
||||
# we rely upon autocommit after each statement
|
||||
res_1 = pg.safe_psql_many(queries=[
|
||||
'CREATE TABLE t(key int primary key, value text)',
|
||||
'INSERT INTO t SELECT generate_series(1,100000), \'payload\'',
|
||||
'SELECT sum(key) FROM t',
|
||||
])
|
||||
|
||||
assert res_1[-1][0] == (5000050000, )
|
||||
# TODO check detach on live instance
|
||||
log.info("stopping compute")
|
||||
pg.stop()
|
||||
log.info("compute stopped")
|
||||
|
||||
pg.start()
|
||||
res_2 = pg.safe_psql('SELECT sum(key) FROM t')
|
||||
assert res_2[0] == (5000050000, )
|
||||
|
||||
pg.stop()
|
||||
pageserver_http.timeline_detach(tenant_id, timeline_id)
|
||||
|
||||
|
||||
def test_normal_work(zenith_env_builder: ZenithEnvBuilder):
|
||||
"""
|
||||
Basic test:
|
||||
* create new tenant with a timeline
|
||||
* write some data
|
||||
* ensure that it was successfully written
|
||||
* restart compute
|
||||
* check that the data is there
|
||||
* stop compute
|
||||
* detach timeline
|
||||
|
||||
Repeat check for several tenants/timelines.
|
||||
"""
|
||||
|
||||
env = zenith_env_builder.init_start()
|
||||
pageserver_http = env.pageserver.http_client()
|
||||
|
||||
for _ in range(3):
|
||||
check_tenant(env, pageserver_http)
|
||||
@@ -18,25 +18,6 @@ from fixtures.log_helper import log
|
||||
from typing import List, Optional, Any
|
||||
|
||||
|
||||
# basic test, write something in setup with wal acceptors, ensure that commits
|
||||
# succeed and data is written
|
||||
def test_normal_work(zenith_env_builder: ZenithEnvBuilder):
|
||||
zenith_env_builder.num_safekeepers = 3
|
||||
env = zenith_env_builder.init_start()
|
||||
|
||||
env.zenith_cli.create_branch('test_safekeepers_normal_work')
|
||||
pg = env.postgres.create_start('test_safekeepers_normal_work')
|
||||
|
||||
with closing(pg.connect()) as conn:
|
||||
with conn.cursor() as cur:
|
||||
# we rely upon autocommit after each statement
|
||||
# as waiting for acceptors happens there
|
||||
cur.execute('CREATE TABLE t(key int primary key, value text)')
|
||||
cur.execute("INSERT INTO t SELECT generate_series(1,100000), 'payload'")
|
||||
cur.execute('SELECT sum(key) FROM t')
|
||||
assert cur.fetchone() == (5000050000, )
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimelineMetrics:
|
||||
timeline_id: str
|
||||
|
||||
@@ -338,18 +338,30 @@ class PgProtocol:
|
||||
conn_options['server_settings'] = {key: val}
|
||||
return await asyncpg.connect(**conn_options)
|
||||
|
||||
def safe_psql(self, query: str, **kwargs: Any) -> List[Any]:
|
||||
def safe_psql(self, query: str, **kwargs: Any) -> List[Tuple[Any, ...]]:
|
||||
"""
|
||||
Execute query against the node and return all rows.
|
||||
This method passes all extra params to connstr.
|
||||
"""
|
||||
return self.safe_psql_many([query], **kwargs)[0]
|
||||
|
||||
def safe_psql_many(self, queries: List[str], **kwargs: Any) -> List[List[Tuple[Any, ...]]]:
|
||||
"""
|
||||
Execute queries against the node and return all rows.
|
||||
This method passes all extra params to connstr.
|
||||
"""
|
||||
result: List[List[Any]] = []
|
||||
with closing(self.connect(**kwargs)) as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(query)
|
||||
if cur.description is None:
|
||||
return [] # query didn't return data
|
||||
return cast(List[Any], cur.fetchall())
|
||||
for query in queries:
|
||||
log.info(f"Executing query: {query}")
|
||||
cur.execute(query)
|
||||
|
||||
if cur.description is None:
|
||||
result.append([]) # query didn't return data
|
||||
else:
|
||||
result.append(cast(List[Any], cur.fetchall()))
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
Reference in New Issue
Block a user