Add in-memory storage engine-a

This commit is contained in:
Konstantin Knizhnik
2021-07-22 10:59:28 +03:00
parent a533d22f71
commit 96e73fb585
3 changed files with 396 additions and 0 deletions

View File

@@ -0,0 +1,25 @@
from contextlib import closing
import psycopg2.extras
pytest_plugins = ("fixtures.zenith_fixtures")
#
# Test insertion of larg number of records
#
# This test is pretty tightly coupled with the current implementation of page version storage
# and garbage collection in object_repository.rs.
#
def test_bulk_insert(zenith_cli, pageserver, postgres, pg_bin):
zenith_cli.run(["branch", "test_bulk_insert", "empty"])
pg = postgres.create_start('test_bulk_insert')
with closing(pg.connect()) as conn:
with conn.cursor() as cur:
cur.execute("create table t(c1 bigint, c2 bigint, c3 bigint, c4 bigint, c5 bigint)")
cur.execute("create index on t(c1)")
cur.execute("create index on t(c2)")
cur.execute("create index on t(c3)")
cur.execute("create index on t(c4)")
cur.execute("create index on t(c5)")
cur.execute("insert into t values (generate_series(1,1000000),generate_series(1,1000000),generate_series(1,1000000),generate_series(1,1000000),generate_series(1,1000000))")
cur.execute("insert into t values (generate_series(1,1000000),random()*1000000,random()*1000000,random()*1000000,random()*1000000)")

View File

@@ -0,0 +1,26 @@
from contextlib import closing
import psycopg2.extras
import time
pytest_plugins = ("fixtures.zenith_fixtures")
#
# Test insertion of larg number of records
#
# This test is pretty tightly coupled with the current implementation of page version storage
# and garbage collection in object_repository.rs.
#
def test_seq_scan(zenith_cli, pageserver, postgres, pg_bin):
zenith_cli.run(["branch", "test_seq_scan", "empty"])
pg = postgres.create_start('test_seq_scan')
with closing(pg.connect()) as conn:
with conn.cursor() as cur:
cur.execute("create table t(c1 bigint, c2 bigint, c3 bigint, c4 bigint, c5 bigint)")
cur.execute("insert into t values (generate_series(1,1000000),generate_series(1,1000000),generate_series(1,1000000),generate_series(1,1000000),generate_series(1,1000000))")
cur.execute("set max_parallel_workers_per_gather=0");
for i in range(100):
start = time.time()
cur.execute("select count(*) from t");
stop = time.time()
print(f'Elapsed time for iterating through 1000000 records is {stop - start}')