Add data format backward compatibility tests (#2626)

This commit is contained in:
Alexander Bayandin
2022-10-25 16:41:50 +02:00
committed by GitHub
parent df18b041c0
commit 834ffe1bac
6 changed files with 352 additions and 12 deletions

View File

@@ -73,6 +73,13 @@ runs:
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}
run: ./scripts/pysync run: ./scripts/pysync
- name: Download compatibility snapshot for Postgres 14
uses: ./.github/actions/download
with:
name: compatibility-snapshot-${{ inputs.build_type }}-pg14
path: /tmp/compatibility_snapshot_pg14
prefix: latest
- name: Run pytest - name: Run pytest
env: env:
NEON_BIN: /tmp/neon/bin NEON_BIN: /tmp/neon/bin
@@ -80,6 +87,8 @@ runs:
BUILD_TYPE: ${{ inputs.build_type }} BUILD_TYPE: ${{ inputs.build_type }}
AWS_ACCESS_KEY_ID: ${{ inputs.real_s3_access_key_id }} AWS_ACCESS_KEY_ID: ${{ inputs.real_s3_access_key_id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.real_s3_secret_access_key }} AWS_SECRET_ACCESS_KEY: ${{ inputs.real_s3_secret_access_key }}
COMPATIBILITY_SNAPSHOT_DIR: /tmp/compatibility_snapshot_pg14
ALLOW_BREAKING_CHANGES: contains(github.event.pull_request.labels.*.name, 'breaking changes')
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}
run: | run: |
# PLATFORM will be embedded in the perf test report # PLATFORM will be embedded in the perf test report
@@ -154,6 +163,15 @@ runs:
scripts/generate_and_push_perf_report.sh scripts/generate_and_push_perf_report.sh
fi fi
- name: Upload compatibility snapshot for Postgres 14
if: github.ref_name == 'release'
uses: ./.github/actions/upload
with:
name: compatibility-snapshot-${{ inputs.build_type }}-pg14-${{ github.run_id }}
# The path includes a test name (test_prepare_snapshot) and directory that the test creates (compatibility_snapshot_pg14), keep the path in sync with the test
path: /tmp/test_output/test_prepare_snapshot/compatibility_snapshot_pg14/
prefix: latest
- name: Create Allure report - name: Create Allure report
if: always() if: always()
uses: ./.github/actions/allure-report uses: ./.github/actions/allure-report

View File

@@ -844,7 +844,7 @@ jobs:
submodules: true submodules: true
fetch-depth: 0 fetch-depth: 0
- name: Configure environment - name: Configure environment
run: | run: |
helm repo add neondatabase https://neondatabase.github.io/helm-charts helm repo add neondatabase https://neondatabase.github.io/helm-charts
aws --region us-east-2 eks update-kubeconfig --name dev-us-east-2-beta --role-arn arn:aws:iam::369495373322:role/github-runner aws --region us-east-2 eks update-kubeconfig --name dev-us-east-2-beta --role-arn arn:aws:iam::369495373322:role/github-runner
@@ -853,3 +853,24 @@ jobs:
run: | run: |
DOCKER_TAG=${{needs.tag.outputs.build-tag}} DOCKER_TAG=${{needs.tag.outputs.build-tag}}
helm upgrade neon-proxy-scram neondatabase/neon-proxy --namespace neon-proxy --create-namespace --install -f .github/helm-values/dev-us-east-2-beta.neon-proxy-scram.yaml --set image.tag=${DOCKER_TAG} --wait --timeout 15m0s helm upgrade neon-proxy-scram neondatabase/neon-proxy --namespace neon-proxy --create-namespace --install -f .github/helm-values/dev-us-east-2-beta.neon-proxy-scram.yaml --set image.tag=${DOCKER_TAG} --wait --timeout 15m0s
promote-compatibility-test-snapshot:
runs-on: dev
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
needs: [ deploy, deploy-proxy ]
if: github.ref_name == 'release' && github.event_name != 'workflow_dispatch'
steps:
- name: Promote compatibility snapshot for the release
shell: bash -euxo pipefail {0}
env:
BUCKET: neon-github-public-dev
PREFIX: artifacts/latest
run: |
for build_type in debug release; do
OLD_FILENAME=compatibility-snapshot-${build_type}-pg14-${GITHUB_RUN_ID}.tar.zst
NEW_FILENAME=compatibility-snapshot-${build_type}-pg14.tar.zst
time aws s3 mv --only-show-errors s3://${BUCKET}/${PREFIX}/${OLD_FILENAME} s3://${BUCKET}/${PREFIX}/${NEW_FILENAME}
done

52
poetry.lock generated
View File

@@ -11,7 +11,7 @@ async-timeout = ">=3.0,<5.0"
psycopg2-binary = ">=2.8.4" psycopg2-binary = ">=2.8.4"
[package.extras] [package.extras]
sa = ["sqlalchemy[postgresql_psycopg2binary] (>=1.3,<1.5)"] sa = ["sqlalchemy[postgresql-psycopg2binary] (>=1.3,<1.5)"]
[[package]] [[package]]
name = "allure-pytest" name = "allure-pytest"
@@ -80,7 +80,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
[[package]] [[package]]
name = "aws-sam-translator" name = "aws-sam-translator"
@@ -560,7 +560,7 @@ optional = false
python-versions = ">=3.6.0" python-versions = ">=3.6.0"
[package.extras] [package.extras]
unicode_backport = ["unicodedata2"] unicode-backport = ["unicodedata2"]
[[package]] [[package]]
name = "click" name = "click"
@@ -593,7 +593,7 @@ python-versions = ">=3.6"
cffi = ">=1.12" cffi = ">=1.12"
[package.extras] [package.extras]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"]
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
sdist = ["setuptools_rust (>=0.11.4)"] sdist = ["setuptools_rust (>=0.11.4)"]
@@ -738,9 +738,9 @@ python-versions = ">=3.6.1,<4.0"
[package.extras] [package.extras]
colors = ["colorama (>=0.4.3,<0.5.0)"] colors = ["colorama (>=0.4.3,<0.5.0)"]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"] pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
plugins = ["setuptools"] plugins = ["setuptools"]
requirements_deprecated_finder = ["pip-api", "pipreqs"] requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]] [[package]]
name = "itsdangerous" name = "itsdangerous"
@@ -815,7 +815,7 @@ python-versions = ">=2.7"
[package.extras] [package.extras]
docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["ecdsa", "enum34", "feedparser", "jsonlib", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0)", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] testing = ["ecdsa", "enum34", "feedparser", "jsonlib", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0)", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"]
"testing.libs" = ["simplejson", "ujson", "yajl"] testing-libs = ["simplejson", "ujson", "yajl"]
[[package]] [[package]]
name = "jsonpointer" name = "jsonpointer"
@@ -836,11 +836,12 @@ python-versions = "*"
[package.dependencies] [package.dependencies]
attrs = ">=17.4.0" attrs = ">=17.4.0"
pyrsistent = ">=0.14.0" pyrsistent = ">=0.14.0"
setuptools = "*"
six = ">=1.11.0" six = ">=1.11.0"
[package.extras] [package.extras]
format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"]
format_nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"]
[[package]] [[package]]
name = "junit-xml" name = "junit-xml"
@@ -900,6 +901,7 @@ pytz = "*"
PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""} PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""}
requests = ">=2.5" requests = ">=2.5"
responses = ">=0.9.0" responses = ">=0.9.0"
setuptools = {version = "*", optional = true, markers = "extra == \"server\""}
sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""} sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""}
werkzeug = ">=0.5,<2.2.0" werkzeug = ">=0.5,<2.2.0"
xmltodict = "*" xmltodict = "*"
@@ -1008,6 +1010,7 @@ python-versions = ">=3.7.0,<4.0.0"
jsonschema = ">=3.2.0,<5.0.0" jsonschema = ">=3.2.0,<5.0.0"
openapi-schema-validator = ">=0.2.0,<0.3.0" openapi-schema-validator = ">=0.2.0,<0.3.0"
PyYAML = ">=5.1" PyYAML = ">=5.1"
setuptools = "*"
[package.extras] [package.extras]
requests = ["requests"] requests = ["requests"]
@@ -1340,7 +1343,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras] [package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"] socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "responses" name = "responses"
@@ -1394,6 +1397,19 @@ python-versions = ">= 2.7"
attrs = "*" attrs = "*"
pbr = "*" pbr = "*"
[[package]]
name = "setuptools"
version = "65.5.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]] [[package]]
name = "six" name = "six"
version = "1.16.0" version = "1.16.0"
@@ -1460,6 +1476,14 @@ category = "main"
optional = false optional = false
python-versions = ">=3.7,<4.0" python-versions = ">=3.7,<4.0"
[[package]]
name = "types-toml"
version = "0.10.8"
description = "Typing stubs for toml"
category = "dev"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "types-urllib3" name = "types-urllib3"
version = "1.26.17" version = "1.26.17"
@@ -1544,7 +1568,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.9" python-versions = "^3.9"
content-hash = "ead1495454ee6d880bb240447025db93a25ebe263c2709de5f144cc2d85dc975" content-hash = "17cdbfe90f1b06dffaf24c3e076384ec08dd4a2dce5a05e50565f7364932eb2d"
[metadata.files] [metadata.files]
aiopg = [ aiopg = [
@@ -2182,6 +2206,10 @@ sarif-om = [
{file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"}, {file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"},
{file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"}, {file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"},
] ]
setuptools = [
{file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"},
{file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"},
]
six = [ six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@@ -2210,6 +2238,10 @@ types-s3transfer = [
{file = "types-s3transfer-0.6.0.post3.tar.gz", hash = "sha256:92c3704e5d041202bfb5ddb79d083fd1a02de2c5dfec6a91576823e6b5c93993"}, {file = "types-s3transfer-0.6.0.post3.tar.gz", hash = "sha256:92c3704e5d041202bfb5ddb79d083fd1a02de2c5dfec6a91576823e6b5c93993"},
{file = "types_s3transfer-0.6.0.post3-py3-none-any.whl", hash = "sha256:eedc5117275565b3c83662c0ccc81662a34da5dda8bd502b89d296b6d5cb091d"}, {file = "types_s3transfer-0.6.0.post3-py3-none-any.whl", hash = "sha256:eedc5117275565b3c83662c0ccc81662a34da5dda8bd502b89d296b6d5cb091d"},
] ]
types-toml = [
{file = "types-toml-0.10.8.tar.gz", hash = "sha256:b7e7ea572308b1030dc86c3ba825c5210814c2825612ec679eb7814f8dd9295a"},
{file = "types_toml-0.10.8-py3-none-any.whl", hash = "sha256:8300fd093e5829eb9c1fba69cee38130347d4b74ddf32d0a7df650ae55c2b599"},
]
types-urllib3 = [ types-urllib3 = [
{file = "types-urllib3-1.26.17.tar.gz", hash = "sha256:73fd274524c3fc7cd8cd9ceb0cb67ed99b45f9cb2831013e46d50c1451044800"}, {file = "types-urllib3-1.26.17.tar.gz", hash = "sha256:73fd274524c3fc7cd8cd9ceb0cb67ed99b45f9cb2831013e46d50c1451044800"},
{file = "types_urllib3-1.26.17-py3-none-any.whl", hash = "sha256:0d027fcd27dbb3cb532453b4d977e05bc1e13aefd70519866af211b3003d895d"}, {file = "types_urllib3-1.26.17-py3-none-any.whl", hash = "sha256:0d027fcd27dbb3cb532453b4d977e05bc1e13aefd70519866af211b3003d895d"},

View File

@@ -28,12 +28,14 @@ Werkzeug = "2.1.2"
pytest-order = "^1.0.1" pytest-order = "^1.0.1"
allure-pytest = "^2.10.0" allure-pytest = "^2.10.0"
pytest-asyncio = "^0.19.0" pytest-asyncio = "^0.19.0"
toml = "^0.10.2"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
flake8 = "^5.0.4" flake8 = "^5.0.4"
mypy = "==0.971" mypy = "==0.971"
black = "^22.6.0" black = "^22.6.0"
isort = "^5.10.1" isort = "^5.10.1"
types-toml = "^0.10.8"
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]

View File

@@ -970,7 +970,7 @@ class NeonPageserverApiException(Exception):
class NeonPageserverHttpClient(requests.Session): class NeonPageserverHttpClient(requests.Session):
def __init__(self, port: int, is_testing_enabled_or_skip, auth_token: Optional[str] = None): def __init__(self, port: int, is_testing_enabled_or_skip: Fn, auth_token: Optional[str] = None):
super().__init__() super().__init__()
self.port = port self.port = port
self.auth_token = auth_token self.auth_token = auth_token

View File

@@ -0,0 +1,267 @@
import os
import re
import shutil
import subprocess
from pathlib import Path
from typing import Any, Dict, Union
import pytest
import toml
from fixtures.neon_fixtures import (
NeonCli,
NeonEnvBuilder,
NeonPageserverHttpClient,
PgBin,
PortDistributor,
wait_for_last_record_lsn,
wait_for_upload,
)
from fixtures.types import Lsn
from pytest import FixtureRequest
def dump_differs(first: Path, second: Path, output: Path) -> bool:
"""
Runs diff(1) command on two SQL dumps and write the output to the given output file.
Returns True if the dumps differ, False otherwise.
"""
with output.open("w") as stdout:
rv = subprocess.run(
[
"diff",
"--unified", # Make diff output more readable
"--ignore-matching-lines=^--", # Ignore changes in comments
"--ignore-blank-lines",
str(first),
str(second),
],
stdout=stdout,
)
return rv.returncode != 0
class PortReplacer(object):
"""
Class-helper for replacing ports in config files.
"""
def __init__(self, port_distributor: PortDistributor):
self.port_distributor = port_distributor
self.port_map: Dict[int, int] = {}
def replace_port(self, value: Union[int, str]) -> Union[int, str]:
if isinstance(value, int):
if (known_port := self.port_map.get(value)) is not None:
return known_port
self.port_map[value] = self.port_distributor.get_port()
return self.port_map[value]
if isinstance(value, str):
# Use regex to find port in a string
# urllib.parse.urlparse produces inconvenient results for cases without scheme like "localhost:5432"
# See https://bugs.python.org/issue27657
ports = re.findall(r":(\d+)(?:/|$)", value)
assert len(ports) == 1, f"can't find port in {value}"
port_int = int(ports[0])
if (known_port := self.port_map.get(port_int)) is not None:
return value.replace(f":{port_int}", f":{known_port}")
self.port_map[port_int] = self.port_distributor.get_port()
return value.replace(f":{port_int}", f":{self.port_map[port_int]}")
raise TypeError(f"unsupported type {type(value)} of {value=}")
def test_backward_compatibility(
pg_bin: PgBin, port_distributor: PortDistributor, test_output_dir: Path, request: FixtureRequest
):
compatibility_snapshot_dir_env = os.environ.get("COMPATIBILITY_SNAPSHOT_DIR")
assert (
compatibility_snapshot_dir_env is not None
), "COMPATIBILITY_SNAPSHOT_DIR is not set. It should be set to `compatibility_snapshot_pg14` path generateted by test_prepare_snapshot"
compatibility_snapshot_dir = Path(compatibility_snapshot_dir_env).resolve()
# Make compatibility snapshot artifacts pickupable by Allure
# by copying the snapshot directory to the curent test output directory.
repo_dir = test_output_dir / "compatibility_snapshot" / "repo"
shutil.copytree(compatibility_snapshot_dir / "repo", repo_dir)
# Remove old logs to avoid confusion in test artifacts
for logfile in repo_dir.glob("**/*.log"):
logfile.unlink()
# Remove tenants data for computes
for tenant in (repo_dir / "pgdatadirs" / "tenants").glob("*"):
shutil.rmtree(tenant)
# Remove wal-redo temp directory
for tenant in (repo_dir / "tenants").glob("*"):
shutil.rmtree(tenant / "wal-redo-datadir.___temp")
# Update paths and ports in config files
pr = PortReplacer(port_distributor)
pageserver_toml = repo_dir / "pageserver.toml"
pageserver_config = toml.load(pageserver_toml)
new_local_path = pageserver_config["remote_storage"]["local_path"].replace(
"/test_prepare_snapshot/",
"/test_backward_compatibility/compatibility_snapshot/",
)
pageserver_config["remote_storage"]["local_path"] = new_local_path
pageserver_config["listen_http_addr"] = pr.replace_port(pageserver_config["listen_http_addr"])
pageserver_config["listen_pg_addr"] = pr.replace_port(pageserver_config["listen_pg_addr"])
pageserver_config["broker_endpoints"] = [
pr.replace_port(ep) for ep in pageserver_config["broker_endpoints"]
]
with pageserver_toml.open("w") as f:
toml.dump(pageserver_config, f)
snapshot_config_toml = repo_dir / "config"
snapshot_config = toml.load(snapshot_config_toml)
snapshot_config["etcd_broker"]["broker_endpoints"] = [
pr.replace_port(ep) for ep in snapshot_config["etcd_broker"]["broker_endpoints"]
]
snapshot_config["pageserver"]["listen_http_addr"] = pr.replace_port(
snapshot_config["pageserver"]["listen_http_addr"]
)
snapshot_config["pageserver"]["listen_pg_addr"] = pr.replace_port(
snapshot_config["pageserver"]["listen_pg_addr"]
)
for sk in snapshot_config["safekeepers"]:
sk["http_port"] = pr.replace_port(sk["http_port"])
sk["pg_port"] = pr.replace_port(sk["pg_port"])
with (snapshot_config_toml).open("w") as f:
toml.dump(snapshot_config, f)
# Ensure that snapshot doesn't contain references to the original path
rv = subprocess.run(
[
"grep",
"--recursive",
"--binary-file=without-match",
"--files-with-matches",
"test_prepare_snapshot/repo",
str(repo_dir),
],
capture_output=True,
text=True,
)
assert (
rv.returncode != 0
), f"there're files referencing `test_prepare_snapshot/repo`, this path should be replaced with {repo_dir}:\n{rv.stdout}"
# NeonEnv stub to make NeonCli happy
config: Any = type("NeonEnvStub", (object,), {})
config.rust_log_override = None
config.repo_dir = repo_dir
config.pg_version = "14" # Note: `pg_dumpall` (from pg_bin) version is set by DEFAULT_PG_VERSION_DEFAULT and can be overriden by DEFAULT_PG_VERSION env var
config.initial_tenant = snapshot_config["default_tenant_id"]
# Check that we can start the project
cli = NeonCli(config)
try:
cli.raw_cli(["start"])
request.addfinalizer(lambda: cli.raw_cli(["stop"]))
result = cli.pg_start("main")
request.addfinalizer(lambda: cli.pg_stop("main"))
except Exception:
breaking_changes_allowed = (
os.environ.get("ALLOW_BREAKING_CHANGES", "false").lower() == "true"
)
if breaking_changes_allowed:
pytest.xfail("Breaking changes are allowed by ALLOW_BREAKING_CHANGES env var")
else:
raise
connstr_all = re.findall(r"Starting postgres node at '([^']+)'", result.stdout)
assert len(connstr_all) == 1, f"can't parse connstr from {result.stdout}"
connstr = connstr_all[0]
# Check that the project produces the same dump as the previous version.
# The assert itself deferred to the end of the test
# to allow us to perform checks that change data before failing
pg_bin.run(["pg_dumpall", f"--dbname={connstr}", f"--file={test_output_dir / 'dump.sql'}"])
initial_dump_differs = dump_differs(
compatibility_snapshot_dir / "dump.sql",
test_output_dir / "dump.sql",
test_output_dir / "dump.filediff",
)
# Check that project can be recovered from WAL
# loosely based on https://github.com/neondatabase/cloud/wiki/Recovery-from-WAL
tenant_id = snapshot_config["default_tenant_id"]
timeline_id = dict(snapshot_config["branch_name_mappings"]["main"])[tenant_id]
pageserver_port = snapshot_config["pageserver"]["listen_http_addr"].split(":")[-1]
auth_token = snapshot_config["pageserver"]["auth_token"]
pageserver_http = NeonPageserverHttpClient(
port=pageserver_port,
is_testing_enabled_or_skip=lambda: True, # TODO: check if testing really enabled
auth_token=auth_token,
)
shutil.rmtree(repo_dir / "local_fs_remote_storage")
pageserver_http.timeline_delete(tenant_id, timeline_id)
pageserver_http.timeline_create(tenant_id, timeline_id)
pg_bin.run(
["pg_dumpall", f"--dbname={connstr}", f"--file={test_output_dir / 'dump-from-wal.sql'}"]
)
# The assert itself deferred to the end of the test
# to allow us to perform checks that change data before failing
dump_from_wal_differs = dump_differs(
test_output_dir / "dump.sql",
test_output_dir / "dump-from-wal.sql",
test_output_dir / "dump-from-wal.filediff",
)
# Check that we can interract with the data
pg_bin.run(["pgbench", "--time=10", "--progress=2", connstr])
assert not dump_from_wal_differs, "dump from WAL differs"
assert not initial_dump_differs, "initial dump differs"
@pytest.mark.order(after="test_backward_compatibility")
# Note: if renaming this test, don't forget to update a reference to it in a workflow file:
# "Upload compatibility snapshot" step in .github/actions/run-python-test-set/action.yml
def test_prepare_snapshot(neon_env_builder: NeonEnvBuilder, pg_bin: PgBin, test_output_dir: Path):
# The test doesn't really test anything
# it creates a new snapshot for releases after we tested the current version against the previous snapshot in `test_backward_compatibility`.
#
# There's no cleanup here, it allows to adjust the data in `test_backward_compatibility` itself without re-collecting it.
neon_env_builder.pg_version = "14"
neon_env_builder.num_safekeepers = 3
neon_env_builder.enable_local_fs_remote_storage()
env = neon_env_builder.init_start()
pg = env.postgres.create_start("main")
pg_bin.run(["pgbench", "--initialize", "--scale=10", pg.connstr()])
pg_bin.run(["pgbench", "--time=60", "--progress=2", pg.connstr()])
pg_bin.run(["pg_dumpall", f"--dbname={pg.connstr()}", f"--file={test_output_dir / 'dump.sql'}"])
snapshot_config = toml.load(test_output_dir / "repo" / "config")
tenant_id = snapshot_config["default_tenant_id"]
timeline_id = dict(snapshot_config["branch_name_mappings"]["main"])[tenant_id]
pageserver_http = env.pageserver.http_client()
lsn = Lsn(pg.safe_psql("SELECT pg_current_wal_flush_lsn()")[0][0])
pageserver_http.timeline_checkpoint(tenant_id, timeline_id)
wait_for_last_record_lsn(pageserver_http, tenant_id, timeline_id, lsn)
wait_for_upload(pageserver_http, tenant_id, timeline_id, lsn)
env.postgres.stop_all()
for sk in env.safekeepers:
sk.stop()
env.pageserver.stop()
shutil.copytree(test_output_dir, test_output_dir / "compatibility_snapshot_pg14")
# Directory `test_output_dir / "compatibility_snapshot_pg14"` is uploaded to S3 in a workflow, keep the name in sync with it