mirror of
https://github.com/neondatabase/neon.git
synced 2025-12-22 21:59:59 +00:00
Enable sanitizers for postgres v17 (#10401)
Add a build with sanitizers (asan, ubsan) to the CI pipeline and run tests on it. See https://github.com/neondatabase/neon/issues/6053 --------- Co-authored-by: Alexander Bayandin <alexander@neon.tech>
This commit is contained in:
@@ -121,6 +121,8 @@ runs:
|
|||||||
export DEFAULT_PG_VERSION=${PG_VERSION#v}
|
export DEFAULT_PG_VERSION=${PG_VERSION#v}
|
||||||
export LD_LIBRARY_PATH=${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/lib
|
export LD_LIBRARY_PATH=${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/lib
|
||||||
export BENCHMARK_CONNSTR=${BENCHMARK_CONNSTR:-}
|
export BENCHMARK_CONNSTR=${BENCHMARK_CONNSTR:-}
|
||||||
|
export ASAN_OPTIONS=detect_leaks=0:detect_stack_use_after_return=0:abort_on_error=1:strict_string_checks=1:check_initialization_order=1:strict_init_order=1
|
||||||
|
export UBSAN_OPTIONS=abort_on_error=1:print_stacktrace=1
|
||||||
|
|
||||||
if [ "${BUILD_TYPE}" = "remote" ]; then
|
if [ "${BUILD_TYPE}" = "remote" ]; then
|
||||||
export REMOTE_ENV=1
|
export REMOTE_ENV=1
|
||||||
|
|||||||
35
.github/workflows/_build-and-test-locally.yml
vendored
35
.github/workflows/_build-and-test-locally.yml
vendored
@@ -20,7 +20,7 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
test-cfg:
|
test-cfg:
|
||||||
description: 'a json object of postgres versions and lfc states to run regression tests on'
|
description: 'a json object of postgres versions and lfc/sanitizers states to build and run regression tests on'
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
@@ -48,6 +48,8 @@ jobs:
|
|||||||
# io_uring will account the memory of the CQ and SQ as locked.
|
# io_uring will account the memory of the CQ and SQ as locked.
|
||||||
# More details: https://github.com/neondatabase/neon/issues/6373#issuecomment-1905814391
|
# More details: https://github.com/neondatabase/neon/issues/6373#issuecomment-1905814391
|
||||||
options: --init --shm-size=512mb --ulimit memlock=67108864:67108864
|
options: --init --shm-size=512mb --ulimit memlock=67108864:67108864
|
||||||
|
strategy:
|
||||||
|
matrix: ${{ fromJSON(format('{{"include":{0}}}', inputs.test-cfg)) }}
|
||||||
env:
|
env:
|
||||||
BUILD_TYPE: ${{ inputs.build-type }}
|
BUILD_TYPE: ${{ inputs.build-type }}
|
||||||
GIT_VERSION: ${{ github.event.pull_request.head.sha || github.sha }}
|
GIT_VERSION: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
@@ -87,6 +89,7 @@ jobs:
|
|||||||
- name: Set env variables
|
- name: Set env variables
|
||||||
env:
|
env:
|
||||||
ARCH: ${{ inputs.arch }}
|
ARCH: ${{ inputs.arch }}
|
||||||
|
SANITIZERS: ${{ matrix.sanitizers }}
|
||||||
run: |
|
run: |
|
||||||
CARGO_FEATURES="--features testing"
|
CARGO_FEATURES="--features testing"
|
||||||
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
||||||
@@ -99,8 +102,14 @@ jobs:
|
|||||||
cov_prefix=""
|
cov_prefix=""
|
||||||
CARGO_FLAGS="--locked --release"
|
CARGO_FLAGS="--locked --release"
|
||||||
fi
|
fi
|
||||||
|
if [[ $SANITIZERS == 'enabled' ]]; then
|
||||||
|
make_vars="WITH_SANITIZERS=yes"
|
||||||
|
else
|
||||||
|
make_vars=""
|
||||||
|
fi
|
||||||
{
|
{
|
||||||
echo "cov_prefix=${cov_prefix}"
|
echo "cov_prefix=${cov_prefix}"
|
||||||
|
echo "make_vars=${make_vars}"
|
||||||
echo "CARGO_FEATURES=${CARGO_FEATURES}"
|
echo "CARGO_FEATURES=${CARGO_FEATURES}"
|
||||||
echo "CARGO_FLAGS=${CARGO_FLAGS}"
|
echo "CARGO_FLAGS=${CARGO_FLAGS}"
|
||||||
echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo"
|
echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo"
|
||||||
@@ -136,35 +145,39 @@ jobs:
|
|||||||
|
|
||||||
- name: Build postgres v14
|
- name: Build postgres v14
|
||||||
if: steps.cache_pg_14.outputs.cache-hit != 'true'
|
if: steps.cache_pg_14.outputs.cache-hit != 'true'
|
||||||
run: mold -run make postgres-v14 -j$(nproc)
|
run: mold -run make ${make_vars} postgres-v14 -j$(nproc)
|
||||||
|
|
||||||
- name: Build postgres v15
|
- name: Build postgres v15
|
||||||
if: steps.cache_pg_15.outputs.cache-hit != 'true'
|
if: steps.cache_pg_15.outputs.cache-hit != 'true'
|
||||||
run: mold -run make postgres-v15 -j$(nproc)
|
run: mold -run make ${make_vars} postgres-v15 -j$(nproc)
|
||||||
|
|
||||||
- name: Build postgres v16
|
- name: Build postgres v16
|
||||||
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
||||||
run: mold -run make postgres-v16 -j$(nproc)
|
run: mold -run make ${make_vars} postgres-v16 -j$(nproc)
|
||||||
|
|
||||||
- name: Build postgres v17
|
- name: Build postgres v17
|
||||||
if: steps.cache_pg_17.outputs.cache-hit != 'true'
|
if: steps.cache_pg_17.outputs.cache-hit != 'true'
|
||||||
run: mold -run make postgres-v17 -j$(nproc)
|
run: mold -run make ${make_vars} postgres-v17 -j$(nproc)
|
||||||
|
|
||||||
- name: Build neon extensions
|
- name: Build neon extensions
|
||||||
run: mold -run make neon-pg-ext -j$(nproc)
|
run: mold -run make ${make_vars} neon-pg-ext -j$(nproc)
|
||||||
|
|
||||||
- name: Build walproposer-lib
|
- name: Build walproposer-lib
|
||||||
run: mold -run make walproposer-lib -j$(nproc)
|
run: mold -run make ${make_vars} walproposer-lib -j$(nproc)
|
||||||
|
|
||||||
- name: Run cargo build
|
- name: Run cargo build
|
||||||
|
env:
|
||||||
|
WITH_TESTS: ${{ matrix.sanitizers != 'enabled' && '--tests' || '' }}
|
||||||
run: |
|
run: |
|
||||||
${cov_prefix} mold -run cargo build $CARGO_FLAGS $CARGO_FEATURES --bins --tests
|
export ASAN_OPTIONS=detect_leaks=0
|
||||||
|
${cov_prefix} mold -run cargo build $CARGO_FLAGS $CARGO_FEATURES --bins ${WITH_TESTS}
|
||||||
|
|
||||||
# Do install *before* running rust tests because they might recompile the
|
# Do install *before* running rust tests because they might recompile the
|
||||||
# binaries with different features/flags.
|
# binaries with different features/flags.
|
||||||
- name: Install rust binaries
|
- name: Install rust binaries
|
||||||
env:
|
env:
|
||||||
ARCH: ${{ inputs.arch }}
|
ARCH: ${{ inputs.arch }}
|
||||||
|
SANITIZERS: ${{ matrix.sanitizers }}
|
||||||
run: |
|
run: |
|
||||||
# Install target binaries
|
# Install target binaries
|
||||||
mkdir -p /tmp/neon/bin/
|
mkdir -p /tmp/neon/bin/
|
||||||
@@ -179,7 +192,7 @@ jobs:
|
|||||||
done
|
done
|
||||||
|
|
||||||
# Install test executables and write list of all binaries (for code coverage)
|
# Install test executables and write list of all binaries (for code coverage)
|
||||||
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' && $SANITIZERS != 'enabled' ]]; then
|
||||||
# Keep bloated coverage data files away from the rest of the artifact
|
# Keep bloated coverage data files away from the rest of the artifact
|
||||||
mkdir -p /tmp/coverage/
|
mkdir -p /tmp/coverage/
|
||||||
|
|
||||||
@@ -212,6 +225,7 @@ jobs:
|
|||||||
role-duration-seconds: 18000 # 5 hours
|
role-duration-seconds: 18000 # 5 hours
|
||||||
|
|
||||||
- name: Run rust tests
|
- name: Run rust tests
|
||||||
|
if: ${{ matrix.sanitizers != 'enabled' }}
|
||||||
env:
|
env:
|
||||||
NEXTEST_RETRIES: 3
|
NEXTEST_RETRIES: 3
|
||||||
run: |
|
run: |
|
||||||
@@ -319,7 +333,7 @@ jobs:
|
|||||||
- name: Pytest regression tests
|
- name: Pytest regression tests
|
||||||
continue-on-error: ${{ matrix.lfc_state == 'with-lfc' && inputs.build-type == 'debug' }}
|
continue-on-error: ${{ matrix.lfc_state == 'with-lfc' && inputs.build-type == 'debug' }}
|
||||||
uses: ./.github/actions/run-python-test-set
|
uses: ./.github/actions/run-python-test-set
|
||||||
timeout-minutes: 60
|
timeout-minutes: ${{ matrix.sanitizers != 'enabled' && 60 || 180 }}
|
||||||
with:
|
with:
|
||||||
build_type: ${{ inputs.build-type }}
|
build_type: ${{ inputs.build-type }}
|
||||||
test_selection: regress
|
test_selection: regress
|
||||||
@@ -337,6 +351,7 @@ jobs:
|
|||||||
PAGESERVER_VIRTUAL_FILE_IO_ENGINE: tokio-epoll-uring
|
PAGESERVER_VIRTUAL_FILE_IO_ENGINE: tokio-epoll-uring
|
||||||
PAGESERVER_GET_VECTORED_CONCURRENT_IO: sidecar-task
|
PAGESERVER_GET_VECTORED_CONCURRENT_IO: sidecar-task
|
||||||
USE_LFC: ${{ matrix.lfc_state == 'with-lfc' && 'true' || 'false' }}
|
USE_LFC: ${{ matrix.lfc_state == 'with-lfc' && 'true' || 'false' }}
|
||||||
|
SANITIZERS: ${{ matrix.sanitizers }}
|
||||||
|
|
||||||
# Temporary disable this step until we figure out why it's so flaky
|
# Temporary disable this step until we figure out why it's so flaky
|
||||||
# Ref https://github.com/neondatabase/neon/issues/4540
|
# Ref https://github.com/neondatabase/neon/issues/4540
|
||||||
|
|||||||
133
.github/workflows/build_and_test_with_sanitizers.yml
vendored
Normal file
133
.github/workflows/build_and_test_with_sanitizers.yml
vendored
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
name: Build and Test with Sanitizers
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# * is a special character in YAML so you have to quote this string
|
||||||
|
# ┌───────────── minute (0 - 59)
|
||||||
|
# │ ┌───────────── hour (0 - 23)
|
||||||
|
# │ │ ┌───────────── day of the month (1 - 31)
|
||||||
|
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
||||||
|
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
||||||
|
- cron: '0 1 * * *' # run once a day, timezone is utc
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash -euxo pipefail {0}
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
# Allow only one workflow per any non-`main` branch.
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
RUST_BACKTRACE: 1
|
||||||
|
COPT: '-Werror'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tag:
|
||||||
|
runs-on: [ self-hosted, small ]
|
||||||
|
container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned
|
||||||
|
outputs:
|
||||||
|
build-tag: ${{steps.build-tag.outputs.tag}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Need `fetch-depth: 0` to count the number of commits in the branch
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Get build tag
|
||||||
|
run: |
|
||||||
|
echo run:$GITHUB_RUN_ID
|
||||||
|
echo ref:$GITHUB_REF_NAME
|
||||||
|
echo rev:$(git rev-list --count HEAD)
|
||||||
|
if [[ "$GITHUB_REF_NAME" == "main" ]]; then
|
||||||
|
echo "tag=$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "$GITHUB_REF_NAME" == "release" ]]; then
|
||||||
|
echo "tag=release-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "$GITHUB_REF_NAME" == "release-proxy" ]]; then
|
||||||
|
echo "tag=release-proxy-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "$GITHUB_REF_NAME" == "release-compute" ]]; then
|
||||||
|
echo "tag=release-compute-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release', 'release-proxy', 'release-compute'"
|
||||||
|
echo "tag=$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
shell: bash
|
||||||
|
id: build-tag
|
||||||
|
|
||||||
|
build-build-tools-image:
|
||||||
|
uses: ./.github/workflows/build-build-tools-image.yml
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
build-and-test-locally:
|
||||||
|
needs: [ tag, build-build-tools-image ]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [ x64, arm64 ]
|
||||||
|
build-type: [ release ]
|
||||||
|
uses: ./.github/workflows/_build-and-test-locally.yml
|
||||||
|
with:
|
||||||
|
arch: ${{ matrix.arch }}
|
||||||
|
build-tools-image: ${{ needs.build-build-tools-image.outputs.image }}-bookworm
|
||||||
|
build-tag: ${{ needs.tag.outputs.build-tag }}
|
||||||
|
build-type: ${{ matrix.build-type }}
|
||||||
|
test-cfg: '[{"pg_version":"v17", "sanitizers": "enabled"}]'
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
|
||||||
|
create-test-report:
|
||||||
|
needs: [ build-and-test-locally, build-build-tools-image ]
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
permissions:
|
||||||
|
id-token: write # aws-actions/configure-aws-credentials
|
||||||
|
statuses: write
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
outputs:
|
||||||
|
report-url: ${{ steps.create-allure-report.outputs.report-url }}
|
||||||
|
|
||||||
|
runs-on: [ self-hosted, small ]
|
||||||
|
container:
|
||||||
|
image: ${{ needs.build-build-tools-image.outputs.image }}-bookworm
|
||||||
|
credentials:
|
||||||
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
|
options: --init
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Create Allure report
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
id: create-allure-report
|
||||||
|
uses: ./.github/actions/allure-report-generate
|
||||||
|
with:
|
||||||
|
store-test-results-into-db: true
|
||||||
|
aws-oicd-role-arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
||||||
|
env:
|
||||||
|
REGRESS_TEST_RESULT_CONNSTR_NEW: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR_NEW }}
|
||||||
|
|
||||||
|
- uses: actions/github-script@v7
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
with:
|
||||||
|
# Retry script for 5XX server errors: https://github.com/actions/github-script#retries
|
||||||
|
retries: 5
|
||||||
|
script: |
|
||||||
|
const report = {
|
||||||
|
reportUrl: "${{ steps.create-allure-report.outputs.report-url }}",
|
||||||
|
reportJsonUrl: "${{ steps.create-allure-report.outputs.report-json-url }}",
|
||||||
|
}
|
||||||
|
|
||||||
|
const coverage = {}
|
||||||
|
|
||||||
|
const script = require("./scripts/comment-test-report.js")
|
||||||
|
await script({
|
||||||
|
github,
|
||||||
|
context,
|
||||||
|
fetch,
|
||||||
|
report,
|
||||||
|
coverage,
|
||||||
|
})
|
||||||
17
Makefile
17
Makefile
@@ -10,18 +10,29 @@ ICU_PREFIX_DIR := /usr/local/icu
|
|||||||
# environment variable.
|
# environment variable.
|
||||||
#
|
#
|
||||||
BUILD_TYPE ?= debug
|
BUILD_TYPE ?= debug
|
||||||
|
WITH_SANITIZERS ?= no
|
||||||
ifeq ($(BUILD_TYPE),release)
|
ifeq ($(BUILD_TYPE),release)
|
||||||
PG_CONFIGURE_OPTS = --enable-debug --with-openssl
|
PG_CONFIGURE_OPTS = --enable-debug --with-openssl
|
||||||
PG_CFLAGS = -O2 -g3 $(CFLAGS)
|
PG_CFLAGS = -O2 -g3 $(CFLAGS)
|
||||||
|
PG_LDFLAGS = $(LDFLAGS)
|
||||||
# Unfortunately, `--profile=...` is a nightly feature
|
# Unfortunately, `--profile=...` is a nightly feature
|
||||||
CARGO_BUILD_FLAGS += --release
|
CARGO_BUILD_FLAGS += --release
|
||||||
else ifeq ($(BUILD_TYPE),debug)
|
else ifeq ($(BUILD_TYPE),debug)
|
||||||
PG_CONFIGURE_OPTS = --enable-debug --with-openssl --enable-cassert --enable-depend
|
PG_CONFIGURE_OPTS = --enable-debug --with-openssl --enable-cassert --enable-depend
|
||||||
PG_CFLAGS = -O0 -g3 $(CFLAGS)
|
PG_CFLAGS = -O0 -g3 $(CFLAGS)
|
||||||
|
PG_LDFLAGS = $(LDFLAGS)
|
||||||
else
|
else
|
||||||
$(error Bad build type '$(BUILD_TYPE)', see Makefile for options)
|
$(error Bad build type '$(BUILD_TYPE)', see Makefile for options)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifeq ($(WITH_SANITIZERS),yes)
|
||||||
|
PG_CFLAGS += -fsanitize=address -fsanitize=undefined -fno-sanitize-recover
|
||||||
|
COPT += -Wno-error # to avoid failing on warnings induced by sanitizers
|
||||||
|
PG_LDFLAGS = -fsanitize=address -fsanitize=undefined -static-libasan -static-libubsan $(LDFLAGS)
|
||||||
|
export CC := gcc
|
||||||
|
export ASAN_OPTIONS := detect_leaks=0
|
||||||
|
endif
|
||||||
|
|
||||||
ifeq ($(shell test -e /home/nonroot/.docker_build && echo -n yes),yes)
|
ifeq ($(shell test -e /home/nonroot/.docker_build && echo -n yes),yes)
|
||||||
# Exclude static build openssl, icu for local build (MacOS, Linux)
|
# Exclude static build openssl, icu for local build (MacOS, Linux)
|
||||||
# Only keep for build type release and debug
|
# Only keep for build type release and debug
|
||||||
@@ -33,7 +44,9 @@ endif
|
|||||||
UNAME_S := $(shell uname -s)
|
UNAME_S := $(shell uname -s)
|
||||||
ifeq ($(UNAME_S),Linux)
|
ifeq ($(UNAME_S),Linux)
|
||||||
# Seccomp BPF is only available for Linux
|
# Seccomp BPF is only available for Linux
|
||||||
PG_CONFIGURE_OPTS += --with-libseccomp
|
ifneq ($(WITH_SANITIZERS),yes)
|
||||||
|
PG_CONFIGURE_OPTS += --with-libseccomp
|
||||||
|
endif
|
||||||
else ifeq ($(UNAME_S),Darwin)
|
else ifeq ($(UNAME_S),Darwin)
|
||||||
PG_CFLAGS += -DUSE_PREFETCH
|
PG_CFLAGS += -DUSE_PREFETCH
|
||||||
ifndef DISABLE_HOMEBREW
|
ifndef DISABLE_HOMEBREW
|
||||||
@@ -106,7 +119,7 @@ $(POSTGRES_INSTALL_DIR)/build/%/config.status:
|
|||||||
EXTRA_VERSION=$$(cd $(ROOT_PROJECT_DIR)/vendor/postgres-$$VERSION && git rev-parse HEAD); \
|
EXTRA_VERSION=$$(cd $(ROOT_PROJECT_DIR)/vendor/postgres-$$VERSION && git rev-parse HEAD); \
|
||||||
(cd $(POSTGRES_INSTALL_DIR)/build/$$VERSION && \
|
(cd $(POSTGRES_INSTALL_DIR)/build/$$VERSION && \
|
||||||
env PATH="$(EXTRA_PATH_OVERRIDES):$$PATH" $(ROOT_PROJECT_DIR)/vendor/postgres-$$VERSION/configure \
|
env PATH="$(EXTRA_PATH_OVERRIDES):$$PATH" $(ROOT_PROJECT_DIR)/vendor/postgres-$$VERSION/configure \
|
||||||
CFLAGS='$(PG_CFLAGS)' \
|
CFLAGS='$(PG_CFLAGS)' LDFLAGS='$(PG_LDFLAGS)' \
|
||||||
$(PG_CONFIGURE_OPTS) --with-extra-version=" ($$EXTRA_VERSION)" \
|
$(PG_CONFIGURE_OPTS) --with-extra-version=" ($$EXTRA_VERSION)" \
|
||||||
--prefix=$(abspath $(POSTGRES_INSTALL_DIR))/$$VERSION > configure.log)
|
--prefix=$(abspath $(POSTGRES_INSTALL_DIR))/$$VERSION > configure.log)
|
||||||
|
|
||||||
|
|||||||
@@ -231,6 +231,14 @@ pub(crate) async fn main() -> anyhow::Result<()> {
|
|||||||
])
|
])
|
||||||
.env_clear()
|
.env_clear()
|
||||||
.env("LD_LIBRARY_PATH", &pg_lib_dir)
|
.env("LD_LIBRARY_PATH", &pg_lib_dir)
|
||||||
|
.env(
|
||||||
|
"ASAN_OPTIONS",
|
||||||
|
std::env::var("ASAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.env(
|
||||||
|
"UBSAN_OPTIONS",
|
||||||
|
std::env::var("UBSAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
.stdout(std::process::Stdio::piped())
|
.stdout(std::process::Stdio::piped())
|
||||||
.stderr(std::process::Stdio::piped())
|
.stderr(std::process::Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
|
|||||||
@@ -261,7 +261,13 @@ fn fill_rust_env_vars(cmd: &mut Command) -> &mut Command {
|
|||||||
let mut filled_cmd = cmd.env_clear().env("RUST_BACKTRACE", backtrace_setting);
|
let mut filled_cmd = cmd.env_clear().env("RUST_BACKTRACE", backtrace_setting);
|
||||||
|
|
||||||
// Pass through these environment variables to the command
|
// Pass through these environment variables to the command
|
||||||
for var in ["LLVM_PROFILE_FILE", "FAILPOINTS", "RUST_LOG"] {
|
for var in [
|
||||||
|
"LLVM_PROFILE_FILE",
|
||||||
|
"FAILPOINTS",
|
||||||
|
"RUST_LOG",
|
||||||
|
"ASAN_OPTIONS",
|
||||||
|
"UBSAN_OPTIONS",
|
||||||
|
] {
|
||||||
if let Some(val) = std::env::var_os(var) {
|
if let Some(val) = std::env::var_os(var) {
|
||||||
filled_cmd = filled_cmd.env(var, val);
|
filled_cmd = filled_cmd.env(var, val);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -221,7 +221,17 @@ impl StorageController {
|
|||||||
"-p",
|
"-p",
|
||||||
&format!("{}", postgres_port),
|
&format!("{}", postgres_port),
|
||||||
];
|
];
|
||||||
let exitcode = Command::new(bin_path).args(args).spawn()?.wait().await?;
|
let pg_lib_dir = self.get_pg_lib_dir().await.unwrap();
|
||||||
|
let envs = [
|
||||||
|
("LD_LIBRARY_PATH".to_owned(), pg_lib_dir.to_string()),
|
||||||
|
("DYLD_LIBRARY_PATH".to_owned(), pg_lib_dir.to_string()),
|
||||||
|
];
|
||||||
|
let exitcode = Command::new(bin_path)
|
||||||
|
.args(args)
|
||||||
|
.envs(envs)
|
||||||
|
.spawn()?
|
||||||
|
.wait()
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(exitcode.success())
|
Ok(exitcode.success())
|
||||||
}
|
}
|
||||||
@@ -242,6 +252,11 @@ impl StorageController {
|
|||||||
|
|
||||||
let pg_bin_dir = self.get_pg_bin_dir().await?;
|
let pg_bin_dir = self.get_pg_bin_dir().await?;
|
||||||
let createdb_path = pg_bin_dir.join("createdb");
|
let createdb_path = pg_bin_dir.join("createdb");
|
||||||
|
let pg_lib_dir = self.get_pg_lib_dir().await.unwrap();
|
||||||
|
let envs = [
|
||||||
|
("LD_LIBRARY_PATH".to_owned(), pg_lib_dir.to_string()),
|
||||||
|
("DYLD_LIBRARY_PATH".to_owned(), pg_lib_dir.to_string()),
|
||||||
|
];
|
||||||
let output = Command::new(&createdb_path)
|
let output = Command::new(&createdb_path)
|
||||||
.args([
|
.args([
|
||||||
"-h",
|
"-h",
|
||||||
@@ -254,6 +269,7 @@ impl StorageController {
|
|||||||
&username(),
|
&username(),
|
||||||
DB_NAME,
|
DB_NAME,
|
||||||
])
|
])
|
||||||
|
.envs(envs)
|
||||||
.output()
|
.output()
|
||||||
.await
|
.await
|
||||||
.expect("Failed to spawn createdb");
|
.expect("Failed to spawn createdb");
|
||||||
|
|||||||
@@ -76,7 +76,15 @@ impl Conf {
|
|||||||
let mut cmd = Command::new(path);
|
let mut cmd = Command::new(path);
|
||||||
cmd.env_clear()
|
cmd.env_clear()
|
||||||
.env("LD_LIBRARY_PATH", self.pg_lib_dir()?)
|
.env("LD_LIBRARY_PATH", self.pg_lib_dir()?)
|
||||||
.env("DYLD_LIBRARY_PATH", self.pg_lib_dir()?);
|
.env("DYLD_LIBRARY_PATH", self.pg_lib_dir()?)
|
||||||
|
.env(
|
||||||
|
"ASAN_OPTIONS",
|
||||||
|
std::env::var("ASAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.env(
|
||||||
|
"UBSAN_OPTIONS",
|
||||||
|
std::env::var("UBSAN_OPTIONS").unwrap_or_default(),
|
||||||
|
);
|
||||||
Ok(cmd)
|
Ok(cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -64,6 +64,14 @@ pub async fn do_run_initdb(args: RunInitdbArgs<'_>) -> Result<(), Error> {
|
|||||||
.env_clear()
|
.env_clear()
|
||||||
.env("LD_LIBRARY_PATH", library_search_path)
|
.env("LD_LIBRARY_PATH", library_search_path)
|
||||||
.env("DYLD_LIBRARY_PATH", library_search_path)
|
.env("DYLD_LIBRARY_PATH", library_search_path)
|
||||||
|
.env(
|
||||||
|
"ASAN_OPTIONS",
|
||||||
|
std::env::var("ASAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.env(
|
||||||
|
"UBSAN_OPTIONS",
|
||||||
|
std::env::var("UBSAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
.stdin(std::process::Stdio::null())
|
.stdin(std::process::Stdio::null())
|
||||||
// stdout invocation produces the same output every time, we don't need it
|
// stdout invocation produces the same output every time, we don't need it
|
||||||
.stdout(std::process::Stdio::null())
|
.stdout(std::process::Stdio::null())
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ function initdb_with_args {
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
eval env -i LD_LIBRARY_PATH="$PG_BIN"/../lib "${cmd[*]}"
|
eval env -i LD_LIBRARY_PATH="$PG_BIN"/../lib ASAN_OPTIONS="${ASAN_OPTIONS-}" UBSAN_OPTIONS="${UBSAN_OPTIONS-}" "${cmd[*]}"
|
||||||
}
|
}
|
||||||
|
|
||||||
rm -fr "$DATA_DIR"
|
rm -fr "$DATA_DIR"
|
||||||
|
|||||||
@@ -79,6 +79,14 @@ impl WalRedoProcess {
|
|||||||
.env_clear()
|
.env_clear()
|
||||||
.env("LD_LIBRARY_PATH", &pg_lib_dir_path)
|
.env("LD_LIBRARY_PATH", &pg_lib_dir_path)
|
||||||
.env("DYLD_LIBRARY_PATH", &pg_lib_dir_path)
|
.env("DYLD_LIBRARY_PATH", &pg_lib_dir_path)
|
||||||
|
.env(
|
||||||
|
"ASAN_OPTIONS",
|
||||||
|
std::env::var("ASAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.env(
|
||||||
|
"UBSAN_OPTIONS",
|
||||||
|
std::env::var("UBSAN_OPTIONS").unwrap_or_default(),
|
||||||
|
)
|
||||||
// NB: The redo process is not trusted after we sent it the first
|
// NB: The redo process is not trusted after we sent it the first
|
||||||
// walredo work. Before that, it is trusted. Specifically, we trust
|
// walredo work. Before that, it is trusted. Specifically, we trust
|
||||||
// it to
|
// it to
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ CREATE TABLE IF NOT EXISTS results (
|
|||||||
flaky BOOLEAN NOT NULL,
|
flaky BOOLEAN NOT NULL,
|
||||||
arch arch DEFAULT 'X64',
|
arch arch DEFAULT 'X64',
|
||||||
lfc BOOLEAN DEFAULT false NOT NULL,
|
lfc BOOLEAN DEFAULT false NOT NULL,
|
||||||
|
sanitizers BOOLEAN DEFAULT false NOT NULL,
|
||||||
build_type TEXT NOT NULL,
|
build_type TEXT NOT NULL,
|
||||||
pg_version INT NOT NULL,
|
pg_version INT NOT NULL,
|
||||||
run_id BIGINT NOT NULL,
|
run_id BIGINT NOT NULL,
|
||||||
@@ -39,7 +40,7 @@ CREATE TABLE IF NOT EXISTS results (
|
|||||||
reference TEXT NOT NULL,
|
reference TEXT NOT NULL,
|
||||||
revision CHAR(40) NOT NULL,
|
revision CHAR(40) NOT NULL,
|
||||||
raw JSONB COMPRESSION lz4 NOT NULL,
|
raw JSONB COMPRESSION lz4 NOT NULL,
|
||||||
UNIQUE (parent_suite, suite, name, arch, build_type, pg_version, started_at, stopped_at, run_id)
|
UNIQUE (parent_suite, suite, name, arch, lfc, sanitizers, build_type, pg_version, started_at, stopped_at, run_id)
|
||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -56,6 +57,7 @@ class Row:
|
|||||||
flaky: bool
|
flaky: bool
|
||||||
arch: str
|
arch: str
|
||||||
lfc: bool
|
lfc: bool
|
||||||
|
sanitizers: bool
|
||||||
build_type: str
|
build_type: str
|
||||||
pg_version: int
|
pg_version: int
|
||||||
run_id: int
|
run_id: int
|
||||||
@@ -135,6 +137,7 @@ def ingest_test_result(
|
|||||||
}
|
}
|
||||||
arch = parameters.get("arch", "UNKNOWN").strip("'")
|
arch = parameters.get("arch", "UNKNOWN").strip("'")
|
||||||
lfc = parameters.get("lfc", "without-lfc").strip("'") == "with-lfc"
|
lfc = parameters.get("lfc", "without-lfc").strip("'") == "with-lfc"
|
||||||
|
sanitizers = parameters.get("sanitizers", "disabled").strip("'") == "enabled"
|
||||||
|
|
||||||
build_type, pg_version, unparametrized_name = parse_test_name(test["name"])
|
build_type, pg_version, unparametrized_name = parse_test_name(test["name"])
|
||||||
labels = {label["name"]: label["value"] for label in test["labels"]}
|
labels = {label["name"]: label["value"] for label in test["labels"]}
|
||||||
@@ -149,6 +152,7 @@ def ingest_test_result(
|
|||||||
flaky=test["flaky"] or test["retriesStatusChange"],
|
flaky=test["flaky"] or test["retriesStatusChange"],
|
||||||
arch=arch,
|
arch=arch,
|
||||||
lfc=lfc,
|
lfc=lfc,
|
||||||
|
sanitizers=sanitizers,
|
||||||
build_type=build_type,
|
build_type=build_type,
|
||||||
pg_version=pg_version,
|
pg_version=pg_version,
|
||||||
run_id=run_id,
|
run_id=run_id,
|
||||||
|
|||||||
@@ -124,5 +124,8 @@ def pytest_runtest_makereport(*args, **kwargs):
|
|||||||
allure.dynamic.parameter(
|
allure.dynamic.parameter(
|
||||||
"__lfc", "with-lfc" if os.getenv("USE_LFC") != "false" else "without-lfc"
|
"__lfc", "with-lfc" if os.getenv("USE_LFC") != "false" else "without-lfc"
|
||||||
)
|
)
|
||||||
|
allure.dynamic.parameter(
|
||||||
|
"__sanitizers", "enabled" if os.getenv("SANITIZERS") == "enabled" else "disabled"
|
||||||
|
)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|||||||
@@ -314,7 +314,10 @@ def test_forward_compatibility(
|
|||||||
|
|
||||||
|
|
||||||
def check_neon_works(env: NeonEnv, test_output_dir: Path, sql_dump_path: Path, repo_dir: Path):
|
def check_neon_works(env: NeonEnv, test_output_dir: Path, sql_dump_path: Path, repo_dir: Path):
|
||||||
ep = env.endpoints.create_start("main")
|
ep = env.endpoints.create("main")
|
||||||
|
ep_env = {"LD_LIBRARY_PATH": str(env.pg_distrib_dir / f"v{env.pg_version}/lib")}
|
||||||
|
ep.start(env=ep_env)
|
||||||
|
|
||||||
connstr = ep.connstr()
|
connstr = ep.connstr()
|
||||||
|
|
||||||
pg_bin = PgBin(test_output_dir, env.pg_distrib_dir, env.pg_version)
|
pg_bin = PgBin(test_output_dir, env.pg_distrib_dir, env.pg_version)
|
||||||
@@ -363,7 +366,7 @@ def check_neon_works(env: NeonEnv, test_output_dir: Path, sql_dump_path: Path, r
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Timeline exists again: restart the endpoint
|
# Timeline exists again: restart the endpoint
|
||||||
ep.start()
|
ep.start(env=ep_env)
|
||||||
|
|
||||||
pg_bin.run_capture(
|
pg_bin.run_capture(
|
||||||
["pg_dumpall", f"--dbname={connstr}", f"--file={test_output_dir / 'dump-from-wal.sql'}"]
|
["pg_dumpall", f"--dbname={connstr}", f"--file={test_output_dir / 'dump-from-wal.sql'}"]
|
||||||
|
|||||||
@@ -120,7 +120,7 @@ def post_checks(env: NeonEnv, test_output_dir: Path, db_name: str, endpoint: End
|
|||||||
|
|
||||||
# Run the main PostgreSQL regression tests, in src/test/regress.
|
# Run the main PostgreSQL regression tests, in src/test/regress.
|
||||||
#
|
#
|
||||||
@pytest.mark.timeout(900) # Contains many sub-tests, is slow in debug builds
|
@pytest.mark.timeout(3000) # Contains many sub-tests, is slow in debug builds
|
||||||
@pytest.mark.parametrize("shard_count", [None, 4])
|
@pytest.mark.parametrize("shard_count", [None, 4])
|
||||||
def test_pg_regress(
|
def test_pg_regress(
|
||||||
neon_env_builder: NeonEnvBuilder,
|
neon_env_builder: NeonEnvBuilder,
|
||||||
@@ -194,7 +194,7 @@ def test_pg_regress(
|
|||||||
|
|
||||||
# Run the PostgreSQL "isolation" tests, in src/test/isolation.
|
# Run the PostgreSQL "isolation" tests, in src/test/isolation.
|
||||||
#
|
#
|
||||||
@pytest.mark.timeout(600) # Contains many sub-tests, is slow in debug builds
|
@pytest.mark.timeout(1500) # Contains many sub-tests, is slow in debug builds
|
||||||
@pytest.mark.parametrize("shard_count", [None, 4])
|
@pytest.mark.parametrize("shard_count", [None, 4])
|
||||||
def test_isolation(
|
def test_isolation(
|
||||||
neon_env_builder: NeonEnvBuilder,
|
neon_env_builder: NeonEnvBuilder,
|
||||||
@@ -222,6 +222,8 @@ def test_isolation(
|
|||||||
"max_prepared_transactions=100",
|
"max_prepared_transactions=100",
|
||||||
# Enable the test mode, so that we don't need to patch the test cases.
|
# Enable the test mode, so that we don't need to patch the test cases.
|
||||||
"neon.regress_test_mode = true",
|
"neon.regress_test_mode = true",
|
||||||
|
# Stack size should be increased for tests to pass with asan.
|
||||||
|
"max_stack_depth = 4MB",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
endpoint.safe_psql(f"CREATE DATABASE {DBNAME}")
|
endpoint.safe_psql(f"CREATE DATABASE {DBNAME}")
|
||||||
@@ -417,7 +419,7 @@ def test_tx_abort_with_many_relations(
|
|||||||
try:
|
try:
|
||||||
# Rollback phase should be fast: this is one WAL record that we should process efficiently
|
# Rollback phase should be fast: this is one WAL record that we should process efficiently
|
||||||
fut = exec.submit(rollback_and_wait)
|
fut = exec.submit(rollback_and_wait)
|
||||||
fut.result(timeout=5)
|
fut.result(timeout=15)
|
||||||
except:
|
except:
|
||||||
exec.shutdown(wait=False, cancel_futures=True)
|
exec.shutdown(wait=False, cancel_futures=True)
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -3,12 +3,14 @@ from __future__ import annotations
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import pytest
|
||||||
from fixtures.neon_fixtures import NeonEnv
|
from fixtures.neon_fixtures import NeonEnv
|
||||||
from fixtures.utils import wait_until
|
from fixtures.utils import wait_until
|
||||||
|
|
||||||
|
|
||||||
# This test checks of logical replication subscriber is able to correctly restart replication without receiving duplicates.
|
# This test checks of logical replication subscriber is able to correctly restart replication without receiving duplicates.
|
||||||
# It requires tracking information about replication origins at page server side
|
# It requires tracking information about replication origins at page server side
|
||||||
|
@pytest.mark.timeout(900) # This test is slow with sanitizers enabled, especially on ARM
|
||||||
def test_subscriber_restart(neon_simple_env: NeonEnv):
|
def test_subscriber_restart(neon_simple_env: NeonEnv):
|
||||||
env = neon_simple_env
|
env = neon_simple_env
|
||||||
env.create_branch("publisher")
|
env.create_branch("publisher")
|
||||||
|
|||||||
Reference in New Issue
Block a user