Compare commits

..

7 Commits

Author SHA1 Message Date
Dmitry Ivanov
3f8751191b Simplify SNI parsing 2023-04-05 13:03:16 +03:00
Dmitry Ivanov
aba8cec279 More logging 2023-04-04 21:14:30 +03:00
Dmitry Ivanov
67e1d6f6fc Finally, build TLS config in proxy's main 2023-04-04 21:02:23 +03:00
Dmitry Ivanov
febce3903a Implement GlobMap for cert resolution 2023-04-04 19:58:56 +03:00
Dmitry Ivanov
a271ca6c8c Properly extract cert names 2023-04-03 22:22:48 +03:00
Dmitry Ivanov
cee9c726d2 Implement proper parsing 2023-04-03 20:27:42 +03:00
Dmitry Ivanov
a12c85449a WIP 2023-04-03 20:27:42 +03:00
39 changed files with 1029 additions and 824 deletions

View File

@@ -15,32 +15,10 @@ outputs:
report-url:
description: 'Allure report URL'
value: ${{ steps.generate-report.outputs.report-url }}
report-json-url:
description: 'Allure report JSON URL'
value: ${{ steps.generate-report.outputs.report-json-url }}
runs:
using: "composite"
steps:
# We're using some of env variables quite offen, so let's set them once.
#
# It would be nice to have them set in common runs.env[0] section, but it doesn't work[1]
#
# - [0] https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#runsenv
# - [1] https://github.com/neondatabase/neon/pull/3907#discussion_r1154703456
#
- name: Set common environment variables
shell: bash -euxo pipefail {0}
run: |
echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV
echo "BUCKET=${BUCKET}" >> $GITHUB_ENV
echo "TEST_OUTPUT=${TEST_OUTPUT}" >> $GITHUB_ENV
env:
BUILD_TYPE: ${{ inputs.build_type }}
BUCKET: neon-github-public-dev
TEST_OUTPUT: /tmp/test_output
- name: Validate input parameters
shell: bash -euxo pipefail {0}
run: |
@@ -98,14 +76,16 @@ runs:
rm -f ${ALLURE_ZIP}
fi
env:
ALLURE_VERSION: 2.21.0
ALLURE_ZIP_MD5: c8db4dd8e2a7882583d569ed2c82879c
ALLURE_VERSION: 2.19.0
ALLURE_ZIP_MD5: ced21401a1a8b9dfb68cee9e4c210464
- name: Upload Allure results
if: ${{ inputs.action == 'store' }}
env:
REPORT_PREFIX: reports/${{ steps.calculate-vars.outputs.KEY }}/${{ inputs.build_type }}
RAW_PREFIX: reports-raw/${{ steps.calculate-vars.outputs.KEY }}/${{ inputs.build_type }}
TEST_OUTPUT: /tmp/test_output
BUCKET: neon-github-public-dev
TEST_SELECTION: ${{ steps.calculate-vars.outputs.TEST_SELECTION }}
shell: bash -euxo pipefail {0}
run: |
@@ -124,7 +104,7 @@ runs:
EOF
cat <<EOF > $TEST_OUTPUT/allure/results/environment.properties
TEST_SELECTION=${{ inputs.test_selection }}
BUILD_TYPE=${BUILD_TYPE}
BUILD_TYPE=${{ inputs.build_type }}
EOF
ARCHIVE="${GITHUB_RUN_ID}-${TEST_SELECTION}-${GITHUB_RUN_ATTEMPT}-$(date +%s).tar.zst"
@@ -133,12 +113,13 @@ runs:
tar -C ${TEST_OUTPUT}/allure/results -cf ${ARCHIVE} --zstd .
aws s3 mv --only-show-errors ${ARCHIVE} "s3://${BUCKET}/${RAW_PREFIX}/${ARCHIVE}"
# Potentially we could have several running build for the same key (for example for the main branch), so we use improvised lock for this
# Potentially we could have several running build for the same key (for example for the main branch), so we use improvised lock for this
- name: Acquire Allure lock
if: ${{ inputs.action == 'generate' }}
shell: bash -euxo pipefail {0}
env:
LOCK_FILE: reports/${{ steps.calculate-vars.outputs.KEY }}/lock.txt
BUCKET: neon-github-public-dev
TEST_SELECTION: ${{ steps.calculate-vars.outputs.TEST_SELECTION }}
run: |
LOCK_TIMEOUT=300 # seconds
@@ -168,6 +149,8 @@ runs:
env:
REPORT_PREFIX: reports/${{ steps.calculate-vars.outputs.KEY }}/${{ inputs.build_type }}
RAW_PREFIX: reports-raw/${{ steps.calculate-vars.outputs.KEY }}/${{ inputs.build_type }}
TEST_OUTPUT: /tmp/test_output
BUCKET: neon-github-public-dev
shell: bash -euxo pipefail {0}
run: |
# Get previously uploaded data for this run
@@ -203,24 +186,24 @@ runs:
REPORT_URL=https://${BUCKET}.s3.amazonaws.com/${REPORT_PREFIX}/${GITHUB_RUN_ID}/index.html
# Generate redirect
cat <<EOF > ${TEST_OUTPUT}/allure/index.html
cat <<EOF > ./index.html
<!DOCTYPE html>
<meta charset="utf-8">
<title>Redirecting to ${REPORT_URL}</title>
<meta http-equiv="refresh" content="0; URL=${REPORT_URL}">
EOF
aws s3 cp --only-show-errors ${TEST_OUTPUT}/allure/index.html "s3://${BUCKET}/${REPORT_PREFIX}/latest/index.html"
aws s3 cp --only-show-errors ./index.html "s3://${BUCKET}/${REPORT_PREFIX}/latest/index.html"
echo "[Allure Report](${REPORT_URL})" >> ${GITHUB_STEP_SUMMARY}
echo "report-url=${REPORT_URL}" >> $GITHUB_OUTPUT
echo "report-json-url=${REPORT_URL%/index.html}/data/suites.json" >> $GITHUB_OUTPUT
- name: Release Allure lock
if: ${{ inputs.action == 'generate' && always() }}
shell: bash -euxo pipefail {0}
env:
LOCK_FILE: reports/${{ steps.calculate-vars.outputs.KEY }}/lock.txt
BUCKET: neon-github-public-dev
TEST_SELECTION: ${{ steps.calculate-vars.outputs.TEST_SELECTION }}
run: |
aws s3 cp --only-show-errors "s3://${BUCKET}/${LOCK_FILE}" ./lock.txt || exit 0
@@ -229,16 +212,11 @@ runs:
aws s3 rm "s3://${BUCKET}/${LOCK_FILE}"
fi
- name: Cleanup
if: always()
shell: bash -euxo pipefail {0}
run: |
rm -rf ${TEST_OUTPUT}/allure
- uses: actions/github-script@v6
if: ${{ inputs.action == 'generate' && always() }}
env:
REPORT_URL: ${{ steps.generate-report.outputs.report-url }}
BUILD_TYPE: ${{ inputs.build_type }}
SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |

View File

@@ -44,10 +44,6 @@ inputs:
description: 'Secret access key'
required: false
default: ''
rerun_flaky:
description: 'Whether to rerun flaky tests'
required: false
default: 'false'
runs:
using: "composite"
@@ -105,7 +101,6 @@ runs:
COMPATIBILITY_SNAPSHOT_DIR: /tmp/compatibility_snapshot_pg14
ALLOW_BACKWARD_COMPATIBILITY_BREAKAGE: contains(github.event.pull_request.labels.*.name, 'backward compatibility breakage')
ALLOW_FORWARD_COMPATIBILITY_BREAKAGE: contains(github.event.pull_request.labels.*.name, 'forward compatibility breakage')
RERUN_FLAKY: ${{ inputs.rerun_flaky }}
shell: bash -euxo pipefail {0}
run: |
# PLATFORM will be embedded in the perf test report
@@ -148,13 +143,6 @@ runs:
EXTRA_PARAMS="--out-dir $PERF_REPORT_DIR $EXTRA_PARAMS"
fi
if [ "${RERUN_FLAKY}" == "true" ]; then
mkdir -p $TEST_OUTPUT
poetry run ./scripts/flaky_tests.py "${TEST_RESULT_CONNSTR}" --days 10 --output "$TEST_OUTPUT/flaky.json"
EXTRA_PARAMS="--flaky-tests-json $TEST_OUTPUT/flaky.json $EXTRA_PARAMS"
fi
if [[ "${{ inputs.build_type }}" == "debug" ]]; then
cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run)
elif [[ "${{ inputs.build_type }}" == "release" ]]; then

View File

@@ -3,8 +3,6 @@
# fetch params from meta-data service
INSTANCE_ID=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)
AZ_ID=$(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone)
INSTANCE_TYPE=$(curl -s http://169.254.169.254/latest/meta-data/instance-type)
DISK_SIZE=$(df -B1 /storage | tail -1 | awk '{print $2}')
# store fqdn hostname in var
HOST=$(hostname -f)
@@ -20,9 +18,7 @@ cat <<EOF | tee /tmp/payload
"http_host": "${HOST}",
"http_port": 9898,
"active": false,
"availability_zone_id": "${AZ_ID}",
"disk_size": ${DISK_SIZE},
"instance_type": "${INSTANCE_TYPE}"
"availability_zone_id": "${AZ_ID}"
}
EOF

View File

@@ -335,9 +335,6 @@ jobs:
real_s3_region: us-west-2
real_s3_access_key_id: "${{ secrets.AWS_ACCESS_KEY_ID_CI_TESTS_S3 }}"
real_s3_secret_access_key: "${{ secrets.AWS_SECRET_ACCESS_KEY_CI_TESTS_S3 }}"
rerun_flaky: true
env:
TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR }}
- name: Merge and upload coverage data
if: matrix.build_type == 'debug'
@@ -374,90 +371,42 @@ jobs:
# XXX: no coverage data handling here, since benchmarks are run on release builds,
# while coverage is currently collected for the debug ones
create-test-report:
merge-allure-report:
runs-on: [ self-hosted, gen3, small ]
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
needs: [ regress-tests, benchmarks ]
if: ${{ !cancelled() }}
strategy:
fail-fast: false
matrix:
build_type: [ debug, release ]
steps:
- uses: actions/checkout@v3
- name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- name: Create Allure report (debug)
if: ${{ !cancelled() }}
id: create-allure-report-debug
- name: Create Allure report
id: create-allure-report
uses: ./.github/actions/allure-report
with:
action: generate
build_type: debug
- name: Create Allure report (release)
if: ${{ !cancelled() }}
id: create-allure-report-release
uses: ./.github/actions/allure-report
with:
action: generate
build_type: release
- uses: actions/github-script@v6
if: >
!cancelled() &&
github.event_name == 'pull_request' && (
steps.create-allure-report-debug.outputs.report-url ||
steps.create-allure-report-release.outputs.report-url
)
with:
# Retry script for 5XX server errors: https://github.com/actions/github-script#retries
retries: 5
script: |
const reports = [{
buildType: "debug",
reportUrl: "${{ steps.create-allure-report-debug.outputs.report-url }}",
jsonUrl: "${{ steps.create-allure-report-debug.outputs.report-json-url }}",
}, {
buildType: "release",
reportUrl: "${{ steps.create-allure-report-release.outputs.report-url }}",
jsonUrl: "${{ steps.create-allure-report-release.outputs.report-json-url }}",
}]
const script = require("./scripts/pr-comment-test-report.js")
await script({
github,
context,
fetch,
reports,
})
build_type: ${{ matrix.build_type }}
- name: Store Allure test stat in the DB
if: >
!cancelled() && (
steps.create-allure-report-debug.outputs.report-url ||
steps.create-allure-report-release.outputs.report-url
)
if: ${{ steps.create-allure-report.outputs.report-url }}
env:
BUILD_TYPE: ${{ matrix.build_type }}
SHA: ${{ github.event.pull_request.head.sha || github.sha }}
REPORT_JSON_URL_DEBUG: ${{ steps.create-allure-report-debug.outputs.report-json-url }}
REPORT_JSON_URL_RELEASE: ${{ steps.create-allure-report-release.outputs.report-json-url }}
REPORT_URL: ${{ steps.create-allure-report.outputs.report-url }}
TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR }}
run: |
curl --fail --output suites.json ${REPORT_URL%/index.html}/data/suites.json
./scripts/pysync
for report_url in $REPORT_JSON_URL_DEBUG $REPORT_JSON_URL_RELEASE; do
if [ -z "$report_url" ]; then
continue
fi
if [[ "$report_url" == "$REPORT_JSON_URL_DEBUG" ]]; then
BUILD_TYPE=debug
else
BUILD_TYPE=release
fi
curl --fail --output suites.json "${report_url}"
DATABASE_URL="$TEST_RESULT_CONNSTR" poetry run python3 scripts/ingest_regress_test_result.py --revision ${SHA} --reference ${GITHUB_REF} --build-type ${BUILD_TYPE} --ingest suites.json
done
DATABASE_URL="$TEST_RESULT_CONNSTR" poetry run python3 scripts/ingest_regress_test_result.py --revision ${SHA} --reference ${GITHUB_REF} --build-type ${BUILD_TYPE} --ingest suites.json
coverage-report:
runs-on: [ self-hosted, gen3, small ]

268
Cargo.lock generated
View File

@@ -2,6 +2,37 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "abnf"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33741baa462d86e43fdec5e8ffca7c6ac82847ad06cbfb382c1bdbf527de9e6b"
dependencies = [
"abnf-core",
"nom",
]
[[package]]
name = "abnf-core"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c44e09c43ae1c368fb91a03a566472d0087c26cf7e1b9e8e289c14ede681dd7d"
dependencies = [
"nom",
]
[[package]]
name = "abnf_to_pest"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "939d59666dd9a7964a3a5312b9d24c9c107630752ee64f2dd5038189a23fe331"
dependencies = [
"abnf",
"indexmap",
"itertools",
"pretty",
]
[[package]]
name = "addr2line"
version = "0.19.0"
@@ -63,6 +94,15 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "annotate-snippets"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36"
dependencies = [
"unicode-width",
]
[[package]]
name = "anyhow"
version = "1.0.68"
@@ -81,6 +121,12 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "arrayvec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]]
name = "asn1-rs"
version = "0.5.1"
@@ -737,7 +783,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b"
dependencies = [
"ciborium-io",
"half",
"half 1.8.2",
]
[[package]]
@@ -1084,6 +1130,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-common"
version = "0.1.6"
@@ -1216,6 +1268,42 @@ dependencies = [
"rusticata-macros",
]
[[package]]
name = "dhall"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec26264de25a8e3642fbb37abb24a6c6be9e19795444e6cf1bb88be5c2d55cc7"
dependencies = [
"abnf_to_pest",
"annotate-snippets",
"elsa",
"half 2.2.1",
"hex",
"home",
"itertools",
"lazy_static",
"minicbor",
"once_cell",
"percent-encoding",
"pest",
"pest_consume",
"pest_generator",
"quote",
"sha2",
"url",
]
[[package]]
name = "dhall_proc_macros"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efcdb228bf802b21cd843e5ac3959b6255966238e5ec06d2e4bc6b9935475653"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "digest"
version = "0.10.6"
@@ -1238,12 +1326,27 @@ dependencies = [
"syn",
]
[[package]]
name = "doc-comment"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "either"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "elsa"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f74077c3c3aedb99a2683919698285596662518ea13e5eedcf8bdd43b0d0453b"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "encoding_rs"
version = "0.8.32"
@@ -1556,6 +1659,19 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc"
dependencies = [
"aho-corasick",
"bstr",
"fnv",
"log",
"regex",
]
[[package]]
name = "h2"
version = "0.3.15"
@@ -1581,6 +1697,15 @@ version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
[[package]]
name = "half"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b4af3693f1b705df946e9fe5631932443781d0aabb423b62fcd4d73f6d2fd0"
dependencies = [
"crunchy",
]
[[package]]
name = "hash32"
version = "0.3.1"
@@ -1677,6 +1802,15 @@ dependencies = [
"digest",
]
[[package]]
name = "home"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
dependencies = [
"winapi",
]
[[package]]
name = "hostname"
version = "0.3.1"
@@ -2123,6 +2257,27 @@ dependencies = [
"unicase",
]
[[package]]
name = "minicbor"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a20020e8e2d1881d8736f64011bb5ff99f1db9947ce3089706945c8915695cb"
dependencies = [
"half 1.8.2",
"minicbor-derive",
]
[[package]]
name = "minicbor-derive"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8608fb1c805b5b6b3d5ab7bd95c40c396df622b64d77b2d621a5eae1eed050ee"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@@ -2551,6 +2706,72 @@ version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
[[package]]
name = "pest"
version = "2.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cbd939b234e95d72bc393d51788aec68aeeb5d51e748ca08ff3aad58cb722f7"
dependencies = [
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_consume"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79447402d15d18e7142e14c72f2e63fa3d155be1bc5b70b3ccbb610ac55f536b"
dependencies = [
"pest",
"pest_consume_macros",
"pest_derive",
]
[[package]]
name = "pest_consume_macros"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d8630a7a899cb344ec1c16ba0a6b24240029af34bdc0a21f84e411d7f793f29"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_derive"
version = "2.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a81186863f3d0a27340815be8f2078dd8050b14cd71913db9fbda795e5f707d7"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75a1ef20bf3193c15ac345acb32e26b3dc3223aff4d77ae4fc5359567683796b"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_meta"
version = "2.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e3b284b1f13a20dc5ebc90aff59a51b8d7137c221131b52a7260c08cbc1cc80"
dependencies = [
"once_cell",
"pest",
"sha2",
]
[[package]]
name = "petgraph"
version = "0.6.2"
@@ -2761,6 +2982,18 @@ dependencies = [
"workspace_hack",
]
[[package]]
name = "pretty"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83f3aa1e3ca87d3b124db7461265ac176b40c277f37e503eaa29c9c75c037846"
dependencies = [
"arrayvec",
"log",
"typed-arena",
"unicode-segmentation",
]
[[package]]
name = "prettyplease"
version = "0.1.23"
@@ -2909,6 +3142,7 @@ dependencies = [
"consumption_metrics",
"futures",
"git-version",
"globset",
"hashbrown 0.13.2",
"hashlink",
"hex",
@@ -2939,6 +3173,7 @@ dependencies = [
"rustls-pemfile",
"scopeguard",
"serde",
"serde_dhall",
"serde_json",
"sha2",
"socket2",
@@ -3546,6 +3781,19 @@ dependencies = [
"syn",
]
[[package]]
name = "serde_dhall"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "655a5c686ad80aef90d2e6bfea3715778623c9a659017c8346bc97eb58f9b27d"
dependencies = [
"dhall",
"dhall_proc_macros",
"doc-comment",
"serde",
"url",
]
[[package]]
name = "serde_json"
version = "1.0.91"
@@ -4433,12 +4681,24 @@ dependencies = [
"utf-8",
]
[[package]]
name = "typed-arena"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
name = "typenum"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "ucd-trie"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81"
[[package]]
name = "uname"
version = "0.1.1"
@@ -4478,6 +4738,12 @@ dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "unicode-width"
version = "0.1.10"

View File

@@ -47,6 +47,7 @@ futures = "0.3"
futures-core = "0.3"
futures-util = "0.3"
git-version = "0.3"
globset = "0.4.10"
hashbrown = "0.13"
hashlink = "0.8.1"
hex = "0.4"
@@ -87,6 +88,7 @@ rustls-split = "0.3"
scopeguard = "1.1"
sentry = { version = "0.29", default-features = false, features = ["backtrace", "contexts", "panic", "rustls", "reqwest" ] }
serde = { version = "1.0", features = ["derive"] }
serde_dhall = { version = "0.12.1", default_features = false }
serde_json = "1"
serde_with = "2.0"
sha2 = "0.10.2"

View File

@@ -38,7 +38,6 @@ RUN cd postgres && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/insert_username.control && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/intagg.control && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/moddatetime.control && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/pg_stat_statements.control && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/pgrowlocks.control && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/pgstattuple.control && \
echo 'trusted = true' >> /usr/local/pgsql/share/extension/refint.control && \

View File

@@ -78,6 +78,9 @@ impl RemotePath {
/// providing basic CRUD operations for storage files.
#[async_trait::async_trait]
pub trait RemoteStorage: Send + Sync + 'static {
/// Lists all items the storage has right now.
async fn list(&self) -> anyhow::Result<Vec<RemotePath>>;
/// Lists all top level subdirectories for a given prefix
/// Note: here we assume that if the prefix is passed it was obtained via remote_object_id
/// which already takes into account any kind of global prefix (prefix_in_bucket for S3 or storage_root for LocalFS)

View File

@@ -73,8 +73,10 @@ impl LocalFs {
Ok(None)
}
}
}
#[cfg(test)]
#[async_trait::async_trait]
impl RemoteStorage for LocalFs {
async fn list(&self) -> anyhow::Result<Vec<RemotePath>> {
Ok(get_all_files(&self.storage_root, true)
.await?
@@ -89,10 +91,7 @@ impl LocalFs {
})
.collect())
}
}
#[async_trait::async_trait]
impl RemoteStorage for LocalFs {
async fn list_prefixes(
&self,
prefix: Option<&RemotePath>,

View File

@@ -275,6 +275,50 @@ impl<S: AsyncRead> AsyncRead for RatelimitedAsyncRead<S> {
#[async_trait::async_trait]
impl RemoteStorage for S3Bucket {
async fn list(&self) -> anyhow::Result<Vec<RemotePath>> {
let mut document_keys = Vec::new();
let mut continuation_token = None;
loop {
let _guard = self
.concurrency_limiter
.acquire()
.await
.context("Concurrency limiter semaphore got closed during S3 list")?;
metrics::inc_list_objects();
let fetch_response = self
.client
.list_objects_v2()
.bucket(self.bucket_name.clone())
.set_prefix(self.prefix_in_bucket.clone())
.delimiter(REMOTE_STORAGE_PREFIX_SEPARATOR.to_string())
.set_continuation_token(continuation_token)
.set_max_keys(self.max_keys_per_list_response)
.send()
.await
.map_err(|e| {
metrics::inc_list_objects_fail();
e
})?;
document_keys.extend(
fetch_response
.contents
.unwrap_or_default()
.into_iter()
.filter_map(|o| Some(self.s3_object_to_relative_path(o.key()?))),
);
match fetch_response.next_continuation_token {
Some(new_token) => continuation_token = Some(new_token),
None => break,
}
}
Ok(document_keys)
}
/// See the doc for `RemoteStorage::list_prefixes`
/// Note: it wont include empty "directories"
async fn list_prefixes(

View File

@@ -20,6 +20,7 @@ pub struct UnreliableWrapper {
/// Used to identify retries of different unique operation.
#[derive(Debug, Hash, Eq, PartialEq)]
enum RemoteOp {
List,
ListPrefixes(Option<RemotePath>),
Upload(RemotePath),
Download(RemotePath),
@@ -74,6 +75,12 @@ impl UnreliableWrapper {
#[async_trait::async_trait]
impl RemoteStorage for UnreliableWrapper {
/// Lists all items the storage has right now.
async fn list(&self) -> anyhow::Result<Vec<RemotePath>> {
self.attempt(RemoteOp::List)?;
self.inner.list().await
}
async fn list_prefixes(
&self,
prefix: Option<&RemotePath>,

View File

@@ -11,14 +11,6 @@ use serde::{Deserialize, Serialize};
pub struct Percent(#[serde(deserialize_with = "deserialize_pct_0_to_100")] u8);
impl Percent {
pub const fn new(pct: u8) -> Option<Self> {
if pct <= 100 {
Some(Percent(pct))
} else {
None
}
}
pub fn get(&self) -> u8 {
self.0
}

View File

@@ -639,7 +639,7 @@ mod filesystem_level_usage {
),
(
"max_usage_pct",
usage_pct >= self.config.max_usage_pct.get() as u64,
usage_pct > self.config.max_usage_pct.get() as u64,
),
];
@@ -686,43 +686,4 @@ mod filesystem_level_usage {
avail_bytes,
})
}
#[test]
fn max_usage_pct_pressure() {
use super::Usage as _;
use std::time::Duration;
use utils::serde_percent::Percent;
let mut usage = Usage {
config: &DiskUsageEvictionTaskConfig {
max_usage_pct: Percent::new(85).unwrap(),
min_avail_bytes: 0,
period: Duration::MAX,
#[cfg(feature = "testing")]
mock_statvfs: None,
},
total_bytes: 100_000,
avail_bytes: 0,
};
assert!(usage.has_pressure(), "expected pressure at 100%");
usage.add_available_bytes(14_000);
assert!(usage.has_pressure(), "expected pressure at 86%");
usage.add_available_bytes(999);
assert!(usage.has_pressure(), "expected pressure at 85.001%");
usage.add_available_bytes(1);
assert!(usage.has_pressure(), "expected pressure at precisely 85%");
usage.add_available_bytes(1);
assert!(!usage.has_pressure(), "no pressure at 84.999%");
usage.add_available_bytes(999);
assert!(!usage.has_pressure(), "no pressure at 84%");
usage.add_available_bytes(16_000);
assert!(!usage.has_pressure());
}
}

38
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
[[package]]
name = "aiohttp"
@@ -79,35 +79,37 @@ sa = ["sqlalchemy[postgresql-psycopg2binary] (>=1.3,<1.5)"]
[[package]]
name = "allure-pytest"
version = "2.13.1"
version = "2.10.0"
description = "Allure pytest integration"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "allure-pytest-2.13.1.tar.gz", hash = "sha256:68d69456eeb65af4061ec06a80bc941163b0616e8216554d36b070a6bf070e08"},
{file = "allure_pytest-2.13.1-py3-none-any.whl", hash = "sha256:a8de2fc3b3effe2d8f98801646920de3f055b779710f4c806dbee7c613c24633"},
{file = "allure-pytest-2.10.0.tar.gz", hash = "sha256:3b2ab67629f4cbd8617abd817d2b22292c6eb7efd5584f992d1af8143aea6ee7"},
{file = "allure_pytest-2.10.0-py3-none-any.whl", hash = "sha256:08274096594758447db54c3b2c382526ee04f1fe12119cdaee92d2d93c84b530"},
]
[package.dependencies]
allure-python-commons = "2.13.1"
allure-python-commons = "2.10.0"
pytest = ">=4.5.0"
six = ">=1.9.0"
[[package]]
name = "allure-python-commons"
version = "2.13.1"
version = "2.10.0"
description = "Common module for integrate allure with python-based frameworks"
category = "main"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.5"
files = [
{file = "allure-python-commons-2.13.1.tar.gz", hash = "sha256:3fc13e1da8ebb23f9ab5c9c72ad04595023cdd5078dbb8604939997faebed5cb"},
{file = "allure_python_commons-2.13.1-py3-none-any.whl", hash = "sha256:d08e04867bddf44fef55def3d67f4bc25af58a1bf9fcffcf4ec3331f7f2ef0d0"},
{file = "allure-python-commons-2.10.0.tar.gz", hash = "sha256:d4d31344b0f0037a4a11e16b91b28cf0eeb23ffa0e50c27fcfc6aabe72212d3c"},
{file = "allure_python_commons-2.10.0-py3-none-any.whl", hash = "sha256:2a717e8ca8d296bf89cd57f38fc3c21893bd7ea8cd02a6ae5420e6d1a6eda5d0"},
]
[package.dependencies]
attrs = ">=16.0.0"
pluggy = ">=0.4.0"
six = ">=1.9.0"
[[package]]
name = "async-timeout"
@@ -1930,22 +1932,6 @@ pytest = [
{version = ">=6.2.4", markers = "python_version >= \"3.10\""},
]
[[package]]
name = "pytest-rerunfailures"
version = "11.1.2"
description = "pytest plugin to re-run tests to eliminate flaky failures"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-rerunfailures-11.1.2.tar.gz", hash = "sha256:55611661e873f1cafa384c82f08d07883954f4b76435f4b8a5b470c1954573de"},
{file = "pytest_rerunfailures-11.1.2-py3-none-any.whl", hash = "sha256:d21fe2e46d9774f8ad95f1aa799544ae95cac3a223477af94aa985adfae92b7e"},
]
[package.dependencies]
packaging = ">=17.1"
pytest = ">=5.3"
[[package]]
name = "pytest-timeout"
version = "2.1.0"
@@ -2611,4 +2597,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "b689ffd6eae32b966f1744b5ac3343fe0dd26b31ee1f50e13daf5045ee0623e1"
content-hash = "2515a9320c2960076012fbc036fb33c4f6a23515c8d143785931dc18c6722d91"

View File

@@ -16,6 +16,7 @@ clap.workspace = true
consumption_metrics.workspace = true
futures.workspace = true
git-version.workspace = true
globset.workspace = true
hashbrown.workspace = true
hashlink.workspace = true
hex.workspace = true
@@ -44,6 +45,7 @@ rustls-pemfile.workspace = true
rustls.workspace = true
scopeguard.workspace = true
serde.workspace = true
serde_dhall.workspace = true
serde_json.workspace = true
sha2.workspace = true
socket2.workspace = true

0
proxy/config/README.md Normal file
View File

View File

@@ -0,0 +1,77 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
67:2b:fc:80:27:9f:65:dd:42:d7:ef:a8:0a:fe:bd:d1:a8:2d:c8:da
Signature Algorithm: sha256WithRSAEncryption
Issuer: CN = *.foo.bar.localhost
Validity
Not Before: Mar 30 12:39:55 2023 GMT
Not After : Sep 3 12:39:55 2202 GMT
Subject: CN = *.foo.bar.localhost
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:a3:67:45:c1:97:47:83:a1:1b:34:6a:a5:fa:1a:
0c:d7:b9:4e:ef:bd:03:8f:64:bf:e3:ca:51:d8:22:
1e:8b:52:71:09:4e:e3:43:2f:92:45:ea:61:86:06:
fe:49:23:c4:18:a7:ef:4c:81:77:8d:ce:a5:1b:80:
ad:b0:d1:19:71:68:9e:b7:53:6e:d4:9f:d7:ff:d9:
c0:7a:92:8e:04:e9:2b:a4:df:b2:e4:a8:ae:28:da:
c8:5a:f2:d0:b6:98:e3:c4:2d:3a:c7:c3:07:b6:32:
15:0d:f9:e2:05:77:32:b6:d7:e3:64:b5:8c:c0:83:
32:25:7d:7f:ad:88:39:25:68:3f:0f:48:4d:60:67:
b9:47:ad:bd:6d:93:73:5c:78:41:d7:db:fa:e9:bf:
6b:9a:6b:e0:66:c6:90:3c:da:fb:85:2c:45:32:6c:
0f:18:66:6e:42:f7:0f:93:35:4f:3e:d1:1f:a8:fb:
18:75:87:19:9a:3a:af:28:28:73:45:9a:87:89:b2:
a6:33:1b:25:83:69:9e:75:8c:06:d6:f3:2a:b2:bc:
52:64:27:8d:ee:ec:50:88:28:5c:86:6d:8a:92:50:
00:10:dd:08:42:7c:0d:5a:f8:2b:a2:d6:df:23:0d:
5f:a8:da:c6:ce:d5:c9:f6:10:a4:de:62:0c:9b:29:
ca:af
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Subject Key Identifier:
FE:74:48:82:19:2C:85:19:EB:55:37:8A:70:DF:94:2C:FA:6B:A9:6B
X509v3 Authority Key Identifier:
FE:74:48:82:19:2C:85:19:EB:55:37:8A:70:DF:94:2C:FA:6B:A9:6B
X509v3 Basic Constraints: critical
CA:TRUE
Signature Algorithm: sha256WithRSAEncryption
Signature Value:
08:2b:2a:bc:b7:0f:3e:ec:96:b0:78:87:12:a5:f1:d7:0e:96:
08:65:34:7d:0a:9b:3d:bd:df:dc:de:3f:5e:0f:57:75:75:a3:
fd:a2:68:29:80:5b:ce:1a:ea:0f:7b:96:ab:ea:92:d0:da:07:
61:28:32:71:37:d1:3c:6a:ca:e8:f9:9b:80:49:c5:16:40:cc:
8d:50:8c:4f:4e:6d:73:7b:3a:55:6b:11:76:e3:68:fd:6f:a0:
c8:06:f9:a1:af:28:4d:b9:c5:0d:fd:2c:98:61:7d:22:b6:87:
43:2b:62:fc:25:9e:fb:f4:09:24:c1:3c:7f:c7:e8:04:b4:c5:
5a:4b:4e:17:5d:7f:38:f1:d4:35:0c:82:bf:20:46:c7:f4:96:
8f:12:94:c4:ee:92:e0:5d:09:45:de:a1:40:e5:b4:34:2f:11:
fe:72:5f:81:a5:11:24:a5:04:98:e5:07:59:dc:d8:dc:7b:f6:
12:ba:8b:d3:cf:dd:de:06:84:23:e7:b3:29:b2:8f:b1:6b:c3:
71:ee:da:bc:9e:b5:62:a6:68:cb:ea:49:19:34:6c:29:be:ce:
6d:3b:7a:59:28:59:67:83:e9:6d:37:06:fd:29:f7:ce:fc:fc:
72:de:23:f5:2b:f6:dc:d2:82:3e:45:bb:e1:ce:14:d7:85:d5:
ec:3b:1c:3c
-----BEGIN CERTIFICATE-----
MIIDHzCCAgegAwIBAgIUZyv8gCefZd1C1++oCv690agtyNowDQYJKoZIhvcNAQEL
BQAwHjEcMBoGA1UEAwwTKi5mb28uYmFyLmxvY2FsaG9zdDAgFw0yMzAzMzAxMjM5
NTVaGA8yMjAyMDkwMzEyMzk1NVowHjEcMBoGA1UEAwwTKi5mb28uYmFyLmxvY2Fs
aG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKNnRcGXR4OhGzRq
pfoaDNe5Tu+9A49kv+PKUdgiHotScQlO40MvkkXqYYYG/kkjxBin70yBd43OpRuA
rbDRGXFonrdTbtSf1//ZwHqSjgTpK6TfsuSorijayFry0LaY48QtOsfDB7YyFQ35
4gV3MrbX42S1jMCDMiV9f62IOSVoPw9ITWBnuUetvW2Tc1x4Qdfb+um/a5pr4GbG
kDza+4UsRTJsDxhmbkL3D5M1Tz7RH6j7GHWHGZo6rygoc0Wah4mypjMbJYNpnnWM
BtbzKrK8UmQnje7sUIgoXIZtipJQABDdCEJ8DVr4K6LW3yMNX6jaxs7VyfYQpN5i
DJspyq8CAwEAAaNTMFEwHQYDVR0OBBYEFP50SIIZLIUZ61U3inDflCz6a6lrMB8G
A1UdIwQYMBaAFP50SIIZLIUZ61U3inDflCz6a6lrMA8GA1UdEwEB/wQFMAMBAf8w
DQYJKoZIhvcNAQELBQADggEBAAgrKry3Dz7slrB4hxKl8dcOlghlNH0Kmz2939ze
P14PV3V1o/2iaCmAW84a6g97lqvqktDaB2EoMnE30Txqyuj5m4BJxRZAzI1QjE9O
bXN7OlVrEXbjaP1voMgG+aGvKE25xQ39LJhhfSK2h0MrYvwlnvv0CSTBPH/H6AS0
xVpLThddfzjx1DUMgr8gRsf0lo8SlMTukuBdCUXeoUDltDQvEf5yX4GlESSlBJjl
B1nc2Nx79hK6i9PP3d4GhCPnsymyj7Frw3Hu2ryetWKmaMvqSRk0bCm+zm07elko
WWeD6W03Bv0p9878/HLeI/Ur9tzSgj5Fu+HOFNeF1ew7HDw=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,5 @@
{
server_name = "*.foo.bar.localhost",
certificate = ./server.crt as Text,
private_key = ./server.key as Text,
}

View File

@@ -0,0 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCjZ0XBl0eDoRs0
aqX6GgzXuU7vvQOPZL/jylHYIh6LUnEJTuNDL5JF6mGGBv5JI8QYp+9MgXeNzqUb
gK2w0RlxaJ63U27Un9f/2cB6ko4E6Suk37LkqK4o2sha8tC2mOPELTrHwwe2MhUN
+eIFdzK21+NktYzAgzIlfX+tiDklaD8PSE1gZ7lHrb1tk3NceEHX2/rpv2uaa+Bm
xpA82vuFLEUybA8YZm5C9w+TNU8+0R+o+xh1hxmaOq8oKHNFmoeJsqYzGyWDaZ51
jAbW8yqyvFJkJ43u7FCIKFyGbYqSUAAQ3QhCfA1a+Cui1t8jDV+o2sbO1cn2EKTe
YgybKcqvAgMBAAECggEACcd9ZwqPzcKxyXiCgSmOYn53kkmjaIzCjKQaV/z+wkNi
6FFY1UoH6oqOX3lLPjoOP92COy6RQjUtMAfT1Cu1L8BLE2uLgt0jjgGVs+lkMKYM
pfwNDXD6pjQBOhjHrxcO7XDL0JJWcVCBAMp76qMb1D+u+poSqg1rcqrNeZVO3s6n
ot4aejb+hDEB8t4ytpDnqULaPvnDmlc5WvVpS8qbGTRiq8DSe5RzEQZp7WHR9Una
0o/rQPu+RiH2+7CesQugg9bc3iaW83DbJjLuODU/4Au/aU/7I0giV8T89UIvtXDl
JzyVDRJNr1qPabZ/H2CFRkfHKovNRpJDEX48VuSR3QKBgQDmr61HA6rRh6zmmehd
V1mroKb9UCR/edBklBSsO2kvWMHPtFEwlszebydcrJa2GwbE37NjKZwkEp1Q/mIk
yX2mjd629j62vzjOejFrW22YOX2LN2cK2Ns9c5T0qnX7ttxy8lw3dE7uU9Y42X8E
Wxm5yIiXImgxIaxCpceR4lzdqwKBgQC1VYUSZcxXIRNjyd98vcD7Ai3zTai4wO3k
WC+Myu2DZr1ZhmY2N0M5b5UUsTL6yOt9fKgqMz3Ww21/zweXx91ts6XmygXIj5ZA
7TyQxPE/PGzG6Olg5o00L//68P9jSBz9HgRWdRtxyJ8NWOUTTGpqA5dKjEj7tia/
E/QcErSbDQKBgAgNug7wodYO1op2dRZNJmRHh4zwb1XD+vKH+PDKYjG09484zFzV
5vEdEFK788cHyoS1Cp47pafcvoFFYEfIgQp/iXb5wda/dkw/F9qXpovZ9fgWRxKp
332Vu22PRe8zwx6AN5f4B4lqg+AYN8b/JzbFOX+NQ/XzJwBsqTr+nB9hAoGBAIch
McdiAQK07UQhvd+xcEwddayoJKF5dE4DwXuEBbc0Ksq6MxUX3YrBsjD3U+w7KfIb
oR3BjcWrYMArwZbEJCiKBYmU5vZsuiWsJMQlXzomh1E7ZB8H8BYB5xpT2Z1cse2W
Htlm74q9XHmP0zWsbmiOQIIXRJP/S6R89B6vedNJAoGBAJOjRfwepnFpIVXG0waS
S1zavMQZvDsJcvNhedyi34ui5XXi79w+uc13xoFTilhG8DS+tOnw/LsDTP8NeLKv
yENcF/zSHuC4GJJjxoJ+SPlaW+mlofsoCT7zRGRVG27xS6jOwo5/7fYg0aELsq2o
oS/TVaWFJKVtJW64vHdGzMnx
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,77 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
1b:91:d6:a3:a3:1b:c1:47:b3:76:16:a2:43:64:3b:63:61:0d:0c:c8
Signature Algorithm: sha256WithRSAEncryption
Issuer: CN = *.neon.localhost
Validity
Not Before: Mar 30 13:19:00 2023 GMT
Not After : Sep 3 13:19:00 2202 GMT
Subject: CN = *.neon.localhost
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:ab:27:17:8b:71:a5:08:65:ec:46:58:69:3d:7d:
b7:91:34:33:3a:50:5c:27:91:34:70:fe:2c:81:fc:
aa:f2:32:60:b3:49:ca:c1:c8:fb:76:73:97:ac:e9:
ff:20:4a:73:48:62:fa:65:d3:81:65:1b:cc:78:00:
6d:a8:32:8b:ff:61:3c:8b:a2:23:61:de:e3:b4:5f:
cd:ce:7e:ce:05:49:5b:64:81:42:74:81:49:4a:5c:
3a:f0:d9:40:2d:42:7d:ef:db:b9:d3:54:a5:11:52:
0b:23:65:fd:c1:5e:58:f7:98:9c:ba:3d:9a:f9:a2:
50:0c:c4:c8:bf:63:66:aa:e3:29:fe:40:ae:13:8c:
18:af:20:24:41:63:3e:1d:af:cd:91:32:fa:b9:26:
96:fb:35:12:1c:62:58:93:33:d2:2c:76:08:f9:c4:
07:ef:fc:6f:eb:ea:c8:81:5d:d8:73:0a:05:47:79:
52:b3:24:34:08:c3:b6:a0:aa:af:de:8b:62:f9:6f:
3a:8f:eb:07:85:ef:2b:de:f7:21:69:7a:63:17:27:
4a:88:b5:4c:e7:52:73:09:10:c6:ca:eb:f0:c8:31:
cc:d4:59:d6:64:82:ac:ae:96:69:18:5e:19:17:5d:
9b:86:39:36:a1:9f:90:34:45:73:9b:43:a2:b0:d3:
8b:e1
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Subject Key Identifier:
E3:8E:E8:BF:5E:72:36:0A:6D:7F:BE:AF:68:38:A7:CE:71:15:CE:14
X509v3 Authority Key Identifier:
E3:8E:E8:BF:5E:72:36:0A:6D:7F:BE:AF:68:38:A7:CE:71:15:CE:14
X509v3 Basic Constraints: critical
CA:TRUE
Signature Algorithm: sha256WithRSAEncryption
Signature Value:
69:23:54:9d:eb:4c:57:f5:cf:8d:5c:d3:f8:2e:65:0b:f1:b8:
8e:a8:ef:67:dd:89:96:8d:df:f1:9b:36:3a:49:0d:55:dc:07:
eb:c5:e7:c8:73:12:02:6a:02:d3:ca:92:63:f0:61:4b:8a:2a:
32:c2:23:c2:53:ee:33:6b:eb:9f:e5:f4:df:78:55:bf:d5:86:
1e:e5:a8:90:e2:df:d7:c2:b8:63:27:a7:2d:ba:43:34:d3:45:
e0:94:53:0a:26:fb:66:ac:c3:96:76:c3:45:a1:ae:d6:30:e0:
b6:c0:a6:d1:8e:51:c7:56:fb:ed:5c:04:a2:66:b9:74:c6:6d:
ef:1e:9e:9a:58:7b:fc:e0:1c:94:fc:17:df:5b:70:e7:cd:f9:
22:49:3d:59:83:8e:c3:bf:bc:3b:39:68:9e:5a:34:88:1a:61:
f7:53:ac:86:de:76:85:75:f6:b7:86:3f:20:4b:98:63:97:03:
8b:29:37:32:2c:c1:9a:65:a2:58:17:f2:7b:79:e7:ee:6a:33:
5b:d0:bd:af:04:dd:02:43:98:a7:e9:0f:35:cb:c0:9d:a6:95:
bf:98:57:4d:cf:b8:a9:bb:de:0c:4d:51:93:df:62:f6:20:bf:
61:27:7d:2c:be:14:48:5d:d1:75:f9:cb:d9:b3:0a:2b:de:ea:
2a:4b:9e:c0
-----BEGIN CERTIFICATE-----
MIIDGTCCAgGgAwIBAgIUG5HWo6MbwUezdhaiQ2Q7Y2ENDMgwDQYJKoZIhvcNAQEL
BQAwGzEZMBcGA1UEAwwQKi5uZW9uLmxvY2FsaG9zdDAgFw0yMzAzMzAxMzE5MDBa
GA8yMjAyMDkwMzEzMTkwMFowGzEZMBcGA1UEAwwQKi5uZW9uLmxvY2FsaG9zdDCC
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKsnF4txpQhl7EZYaT19t5E0
MzpQXCeRNHD+LIH8qvIyYLNJysHI+3Zzl6zp/yBKc0hi+mXTgWUbzHgAbagyi/9h
PIuiI2He47Rfzc5+zgVJW2SBQnSBSUpcOvDZQC1Cfe/budNUpRFSCyNl/cFeWPeY
nLo9mvmiUAzEyL9jZqrjKf5ArhOMGK8gJEFjPh2vzZEy+rkmlvs1EhxiWJMz0ix2
CPnEB+/8b+vqyIFd2HMKBUd5UrMkNAjDtqCqr96LYvlvOo/rB4XvK973IWl6Yxcn
Soi1TOdScwkQxsrr8MgxzNRZ1mSCrK6WaRheGRddm4Y5NqGfkDRFc5tDorDTi+EC
AwEAAaNTMFEwHQYDVR0OBBYEFOOO6L9ecjYKbX++r2g4p85xFc4UMB8GA1UdIwQY
MBaAFOOO6L9ecjYKbX++r2g4p85xFc4UMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI
hvcNAQELBQADggEBAGkjVJ3rTFf1z41c0/guZQvxuI6o72fdiZaN3/GbNjpJDVXc
B+vF58hzEgJqAtPKkmPwYUuKKjLCI8JT7jNr65/l9N94Vb/Vhh7lqJDi39fCuGMn
py26QzTTReCUUwom+2asw5Z2w0WhrtYw4LbAptGOUcdW++1cBKJmuXTGbe8enppY
e/zgHJT8F99bcOfN+SJJPVmDjsO/vDs5aJ5aNIgaYfdTrIbedoV19reGPyBLmGOX
A4spNzIswZplolgX8nt55+5qM1vQva8E3QJDmKfpDzXLwJ2mlb+YV03PuKm73gxN
UZPfYvYgv2EnfSy+FEhd0XX5y9mzCive6ipLnsA=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,5 @@
{
server_name = "*.neon.localhost",
certificate = ./server.crt as Text,
private_key = ./server.key as Text,
}

View File

@@ -0,0 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCrJxeLcaUIZexG
WGk9fbeRNDM6UFwnkTRw/iyB/KryMmCzScrByPt2c5es6f8gSnNIYvpl04FlG8x4
AG2oMov/YTyLoiNh3uO0X83Ofs4FSVtkgUJ0gUlKXDrw2UAtQn3v27nTVKURUgsj
Zf3BXlj3mJy6PZr5olAMxMi/Y2aq4yn+QK4TjBivICRBYz4dr82RMvq5Jpb7NRIc
YliTM9Isdgj5xAfv/G/r6siBXdhzCgVHeVKzJDQIw7agqq/ei2L5bzqP6weF7yve
9yFpemMXJ0qItUznUnMJEMbK6/DIMczUWdZkgqyulmkYXhkXXZuGOTahn5A0RXOb
Q6Kw04vhAgMBAAECggEADrLZw8urwlqL0x1uqa2X9pbGu4BWmb36Wfs03qj7aWId
ieg8IIOz4jVagQAg5/4Hg9+e5OB9jAMPfqgoGA+B6cRzta45XwNhsjD0H4LRC1qE
cyTityy58EfsIUPBzjaX/Yx08LWj7iaJ9wKgVgYAmqr28suto+NmVTe6jIKV46EL
bWmnU0dySOa43ukdhkvQN+FG3hL4iIl+mZ5aTVY8dz885sxYdrKOyrzMREAvHFW6
m01fWwgbdiMfR2Gu2ZWmvom4+PiE8EES8/Cpct4/E27SFLr3pdB+voIBSh6kotF9
w0dNqnK1dyIC89gxhcH/PO4rC6uKPM68ZezBsqHZ3QKBgQDMB54bMbcVNK/92nRV
xtM8sk567oAeDwL7VcMq35vmwZU1OcjPg/QswIIZNIx46SXO8a668Wn6OLZYq4dR
FGBWpsMHbQSEdyurYY2bqi5tK5dnKiuCqNmTTtxPA2QgC+PdDcTlJ2FI/RmCeODM
GUcKJd0FyR5BNy8TPM31kFs7zwKBgQDWv6A4cpKzzfY7hf/iOcgmgyipPfXBAdCE
6w6HAEU5JKJDtxIC3roosxOVbrqCMGqPWKCkQvzOr7d9Ok5fYk4WF+qM67xxdzHa
KzmE2+PKDsWBejxjnIMBtDKBkWhOU5/bg/HLDv4RPNwn7f0MjxFpxzYuq3q/dIPN
TcZthbU5TwKBgQCJj1FAEILZ304RH2p0MrtVHvre01K58XEXN7mAfIbGTBpnanBD
yTmlup18lPtowfjlz/j4va+wLvByVCPFvLE/euvfY9c54Icm43zwSQtIO622tq3j
SCh5sx/CfgzRtnKJJbFstuJWrZ63YvxdX2WQJ/se3Xxyh9xLYiGSwSNh7QKBgDFO
/rL3W8f9WrSAKCkBq3tsUkHKAEu45vAeKM/GuB5O0xNJTdFq4sPFmpGNQzXxeAZC
C2CsIPA0WKVgZe5w3A0moKyK1FIZVFEL68Ed3Efg7Gi2cHdO0KXrgk1N3e1eNi5p
NXOylZPPrZ1df+UKVK09GKvOo/iiAEF7wjwTn3DxAoGBAK1iTDV+HrGyxCi2PcoK
yyFCB2QEFw5vvMMMu5lvRjaI9r+igEg1Y8DWhXpUb0hsXnTV192dwhqmB+NYn7Yz
xwlFeKolv+j+5H+IQ4vmEleOlaBLGBH/lAdCcJ0bGcKHHRP+chF6En2tOTk45Gai
4gsDafbyi89fJ/5EoLGRYMe/
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,7 @@
let Server = { server_name : Text, certificate : Text, private_key : Text }
let servers
: List Server
= [ ./foo.bar.localhost/server.dhall, ./neon.localhost/server.dhall ]
in servers

View File

@@ -53,7 +53,7 @@ pub async fn password_hack(
.await?;
info!(project = &payload.project, "received missing parameter");
creds.project = Some(payload.project);
creds.project = Some(payload.project.into());
let mut node = api.wake_compute(extra, creds).await?;
node.config.password(payload.password);

View File

@@ -2,7 +2,7 @@
use crate::error::UserFacingError;
use pq_proto::StartupMessageParams;
use std::collections::HashSet;
use std::borrow::Cow;
use thiserror::Error;
use tracing::info;
@@ -18,12 +18,6 @@ pub enum ClientCredsParseError {
)]
InconsistentProjectNames { domain: String, option: String },
#[error(
"Common name inferred from SNI ('{}') is not known",
.cn,
)]
UnknownCommonName { cn: String },
#[error("Project name ('{0}') must contain only alphanumeric characters and hyphen.")]
MalformedProjectName(String),
}
@@ -36,7 +30,7 @@ impl UserFacingError for ClientCredsParseError {}
pub struct ClientCredentials<'a> {
pub user: &'a str,
// TODO: this is a severe misnomer! We should think of a new name ASAP.
pub project: Option<String>,
pub project: Option<Cow<'a, str>>,
}
impl ClientCredentials<'_> {
@@ -50,7 +44,6 @@ impl<'a> ClientCredentials<'a> {
pub fn parse(
params: &'a StartupMessageParams,
sni: Option<&str>,
common_names: Option<HashSet<String>>,
) -> Result<Self, ClientCredsParseError> {
use ClientCredsParseError::*;
@@ -59,43 +52,29 @@ impl<'a> ClientCredentials<'a> {
let user = get_param("user")?;
// Project name might be passed via PG's command-line options.
let project_option = params
.options_raw()
.and_then(|mut options| options.find_map(|opt| opt.strip_prefix("project=")))
.map(|name| name.to_string());
let project_option = params.options_raw().and_then(|mut options| {
options
.find_map(|opt| opt.strip_prefix("project="))
.map(Cow::Borrowed)
});
let project_from_domain = if let Some(sni_str) = sni {
if let Some(cn) = common_names {
let common_name_from_sni = sni_str.split_once('.').map(|(_, domain)| domain);
// Alternative project name is in fact a subdomain from SNI.
let project_domain = sni.and_then(|sni| {
let (domain, _) = sni.split_once('.')?;
Some(Cow::from(domain.to_owned()))
});
let project = common_name_from_sni
.and_then(|domain| {
if cn.contains(domain) {
subdomain_from_sni(sni_str, domain)
} else {
None
}
})
.ok_or_else(|| UnknownCommonName {
cn: common_name_from_sni.unwrap_or("").into(),
})?;
Some(project)
} else {
None
}
} else {
None
};
let project = match (project_option, project_from_domain) {
let project = match (project_option, project_domain) {
// Invariant: if we have both project name variants, they should match.
(Some(option), Some(domain)) if option != domain => {
Some(Err(InconsistentProjectNames { domain, option }))
Some(Err(InconsistentProjectNames {
domain: domain.into(),
option: option.into(),
}))
}
// Invariant: project name may not contain certain characters.
(a, b) => a.or(b).map(|name| match project_name_valid(&name) {
false => Err(MalformedProjectName(name)),
false => Err(MalformedProjectName(name.into())),
true => Ok(name),
}),
}
@@ -111,12 +90,6 @@ fn project_name_valid(name: &str) -> bool {
name.chars().all(|c| c.is_alphanumeric() || c == '-')
}
fn subdomain_from_sni(sni: &str, common_name: &str) -> Option<String> {
sni.strip_suffix(common_name)?
.strip_suffix('.')
.map(str::to_owned)
}
#[cfg(test)]
mod tests {
use super::*;
@@ -127,7 +100,7 @@ mod tests {
// According to postgresql, only `user` should be required.
let options = StartupMessageParams::new([("user", "john_doe")]);
let creds = ClientCredentials::parse(&options, None, None)?;
let creds = ClientCredentials::parse(&options, None)?;
assert_eq!(creds.user, "john_doe");
assert_eq!(creds.project, None);
@@ -142,7 +115,7 @@ mod tests {
("foo", "bar"), // should be ignored
]);
let creds = ClientCredentials::parse(&options, None, None)?;
let creds = ClientCredentials::parse(&options, None)?;
assert_eq!(creds.user, "john_doe");
assert_eq!(creds.project, None);
@@ -154,9 +127,8 @@ mod tests {
let options = StartupMessageParams::new([("user", "john_doe")]);
let sni = Some("foo.localhost");
let common_names = Some(["localhost".into()].into());
let creds = ClientCredentials::parse(&options, sni, common_names)?;
let creds = ClientCredentials::parse(&options, sni)?;
assert_eq!(creds.user, "john_doe");
assert_eq!(creds.project.as_deref(), Some("foo"));
@@ -170,7 +142,7 @@ mod tests {
("options", "-ckey=1 project=bar -c geqo=off"),
]);
let creds = ClientCredentials::parse(&options, None, None)?;
let creds = ClientCredentials::parse(&options, None)?;
assert_eq!(creds.user, "john_doe");
assert_eq!(creds.project.as_deref(), Some("bar"));
@@ -182,41 +154,22 @@ mod tests {
let options = StartupMessageParams::new([("user", "john_doe"), ("options", "project=baz")]);
let sni = Some("baz.localhost");
let common_names = Some(["localhost".into()].into());
let creds = ClientCredentials::parse(&options, sni, common_names)?;
let creds = ClientCredentials::parse(&options, sni)?;
assert_eq!(creds.user, "john_doe");
assert_eq!(creds.project.as_deref(), Some("baz"));
Ok(())
}
#[test]
fn parse_multi_common_names() -> anyhow::Result<()> {
let options = StartupMessageParams::new([("user", "john_doe")]);
let common_names = Some(["a.com".into(), "b.com".into()].into());
let sni = Some("p1.a.com");
let creds = ClientCredentials::parse(&options, sni, common_names)?;
assert_eq!(creds.project.as_deref(), Some("p1"));
let common_names = Some(["a.com".into(), "b.com".into()].into());
let sni = Some("p1.b.com");
let creds = ClientCredentials::parse(&options, sni, common_names)?;
assert_eq!(creds.project.as_deref(), Some("p1"));
Ok(())
}
#[test]
fn parse_projects_different() {
let options =
StartupMessageParams::new([("user", "john_doe"), ("options", "project=first")]);
let sni = Some("second.localhost");
let common_names = Some(["localhost".into()].into());
let err = ClientCredentials::parse(&options, sni, common_names).expect_err("should fail");
let err = ClientCredentials::parse(&options, sni).expect_err("should fail");
match err {
InconsistentProjectNames { domain, option } => {
assert_eq!(option, "first");
@@ -225,20 +178,4 @@ mod tests {
_ => panic!("bad error: {err:?}"),
}
}
#[test]
fn parse_inconsistent_sni() {
let options = StartupMessageParams::new([("user", "john_doe")]);
let sni = Some("project.localhost");
let common_names = Some(["example.com".into()].into());
let err = ClientCredentials::parse(&options, sni, common_names).expect_err("should fail");
match err {
UnknownCommonName { cn } => {
assert_eq!(cn, "localhost");
}
_ => panic!("bad error: {err:?}"),
}
}
}

312
proxy/src/certs.rs Normal file
View File

@@ -0,0 +1,312 @@
use rustls::{
server::{ClientHello, ResolvesServerCert},
sign::CertifiedKey,
};
use std::{collections::BTreeMap, io, ops::Bound, sync::Arc};
use tracing::{info, warn};
/// App-level configuration structs for TLS certificates.
pub mod config {
use super::*;
use serde::{de, Deserialize};
use std::path::Path;
/// Collection of TLS-related configurations of virtual proxy servers.
#[derive(Debug, Default, Clone, Deserialize)]
#[serde(transparent)]
pub struct TlsServers(pub Vec<TlsServer>);
impl TlsServers {
/// Load [`Self`] config from a file.
pub fn from_config_file(path: impl AsRef<Path>) -> anyhow::Result<Self> {
info!(path = %path.as_ref().display(), "loading TLS servers config file");
let config = serde_dhall::from_file(path).parse()?;
Ok(config)
}
}
/// This lets us merge multiple configs into one (semigroup).
impl FromIterator<Self> for TlsServers {
fn from_iter<T: IntoIterator<Item = Self>>(iter: T) -> Self {
Self(iter.into_iter().flat_map(|xs| xs.0).collect())
}
}
/// Helps deserialize certificate chain from a string.
#[derive(Debug, Clone, Deserialize)]
#[serde(transparent)]
pub struct TlsCert(
/// The wrapped rustls certificate.
#[serde(deserialize_with = "deserialize_certs")]
pub Vec<rustls::Certificate>,
);
fn deserialize_certs<'de, D>(des: D) -> Result<Vec<rustls::Certificate>, D::Error>
where
D: serde::Deserializer<'de>,
{
let text = String::deserialize(des)?;
parse_certs(&mut text.as_bytes()).map_err(de::Error::custom)
}
/// Helps deserialize private key from a string.
#[derive(Debug, Clone, Deserialize)]
#[serde(transparent)]
pub struct TlsKey(
/// The wrapped rustls private key.
#[serde(deserialize_with = "deserialize_key")]
pub rustls::PrivateKey,
);
fn deserialize_key<'de, D>(des: D) -> Result<rustls::PrivateKey, D::Error>
where
D: serde::Deserializer<'de>,
{
let text = String::deserialize(des)?;
parse_key(&mut text.as_bytes()).map_err(de::Error::custom)
}
/// Represents TLS config of a single virtual proxy server.
#[derive(Debug, Clone, Deserialize)]
pub struct TlsServer {
/// Proxy server's certificate chain.
pub certificate: TlsCert,
/// Proxy server's private key.
pub private_key: TlsKey,
}
impl TlsServer {
pub fn into_certified_key(
self,
) -> Result<rustls::sign::CertifiedKey, rustls::sign::SignError> {
Ok(rustls::sign::CertifiedKey::new(
self.certificate.0,
rustls::sign::any_supported_type(&self.private_key.0)?,
))
}
}
}
/// Parse TLS certificate chain from a byte buffer.
fn parse_certs(buf: &mut impl io::BufRead) -> io::Result<Vec<rustls::Certificate>> {
let chain = rustls_pemfile::certs(buf)?
.into_iter()
.map(rustls::Certificate)
.collect();
Ok(chain)
}
/// Parse exactly one TLS private key from a byte buffer.
fn parse_key(buf: &mut impl io::BufRead) -> io::Result<rustls::PrivateKey> {
let mut keys = rustls_pemfile::pkcs8_private_keys(buf)?;
// We expect to see only 1 key.
if keys.len() != 1 {
return Err(io::Error::new(
io::ErrorKind::Other,
"there should be exactly one TLS key in buffer",
));
}
Ok(rustls::PrivateKey(keys.pop().unwrap()))
}
/// Extract domain names from a certificate: first CN, then SANs.
/// Further reading: <https://www.rfc-editor.org/rfc/rfc4985>.
fn certificate_names(cert: &rustls::Certificate) -> anyhow::Result<Vec<String>> {
use x509_parser::{extensions::GeneralName, x509::AttributeTypeAndValue};
let get_dns_name = |gn: &GeneralName| match gn {
GeneralName::DNSName(name) => Some(name.to_string()),
_other => None,
};
let get_common_name = |attr: &AttributeTypeAndValue| {
// There really shouldn't be anything but string here.
attr.attr_value().as_string().expect("bad CN attribute")
};
let (rest, cert) = x509_parser::parse_x509_certificate(cert.0.as_ref())?;
anyhow::ensure!(rest.is_empty(), "excessive bytes in DER certificate");
// Extract CN, Common Name.
let mut names: Vec<String> = cert
.subject()
.iter_common_name()
.map(get_common_name)
.collect();
// Now append SANs, Subject Alternative Names, if any.
if let Some(extension) = cert.subject_alternative_name()? {
let alt_names = &extension.value.general_names;
names.extend(alt_names.iter().filter_map(get_dns_name));
}
Ok(names)
}
#[derive(Debug)]
struct GlobMapBuilder<V> {
builder: globset::GlobSetBuilder,
values: BTreeMap<usize, V>,
}
impl<V> GlobMapBuilder<V> {
fn new() -> Self {
Self {
builder: globset::GlobSetBuilder::new(),
values: Default::default(),
}
}
fn add(&mut self, globs: impl IntoIterator<Item = globset::Glob>, value: V) -> &mut Self {
let mut cnt = 0;
for glob in globs {
self.builder.add(glob);
cnt += 1;
}
if cnt > 0 {
let offset = self.values.last_key_value().map(|(k, _)| *k).unwrap_or(0);
self.values.insert(offset + cnt, value);
}
self
}
fn build(self) -> Result<GlobMap<V>, globset::Error> {
Ok(GlobMap {
set: self.builder.build()?,
values: self.values,
})
}
}
/// Maps a set of matching globs to an arbitrary value.
/// See the tests below in case this description doesn't help.
#[derive(Debug)]
struct GlobMap<V> {
/// An ordered set of all loaded globs.
set: globset::GlobSet,
/// Store single value per range of globs.
values: BTreeMap<usize, V>,
}
impl<V> GlobMap<V> {
fn query(&self, text: &str) -> Vec<&V> {
let indices = self.set.matches(text);
let mut res = Vec::with_capacity(indices.len());
for i in indices {
let mut range = self.values.range((Bound::Excluded(i), Bound::Unbounded));
let (_, value) = range.next().expect("invariant: entry must exist");
res.push(value);
}
res
}
}
pub struct CertResolverEntry {
pub raw: Arc<rustls::sign::CertifiedKey>,
pub names: Vec<String>,
}
pub struct CertResolver {
storage: GlobMap<CertResolverEntry>,
}
impl CertResolver {
pub fn new(config: config::TlsServers) -> anyhow::Result<Self> {
let mut builder = GlobMapBuilder::new();
for server in config.0 {
let Some(cert) = server.certificate.0.first() else {
warn!("found empty certificate, skipping");
continue;
};
let names = certificate_names(cert)?;
let globs = names
.iter()
.map(|s| globset::Glob::new(s))
.collect::<Result<Vec<_>, _>>()?;
info!(?names, "loading TLS certificate");
let entry = CertResolverEntry {
raw: Arc::new(server.into_certified_key()?),
names,
};
builder.add(globs, entry);
}
Ok(Self {
storage: builder.build()?,
})
}
#[tracing::instrument(name = "resolve_tls_cert", fields(%server_name), skip_all)]
pub fn resolve_raw(&self, server_name: &str) -> Option<&CertResolverEntry> {
info!("trying to resolve TLS certificate");
let entries = self.storage.query(server_name);
info!("found {} matching entries", entries.len());
entries.first().copied()
}
}
impl ResolvesServerCert for CertResolver {
fn resolve(&self, message: ClientHello) -> Option<Arc<CertifiedKey>> {
let name = message.server_name()?;
self.resolve_raw(name).map(|entry| entry.raw.clone())
}
}
#[cfg(test)]
mod tests {
use super::*;
use globset::Glob;
#[test]
fn check_glob_map_basic() -> anyhow::Result<()> {
let mut builder = GlobMapBuilder::new();
builder
.add([Glob::new("*.localhost")?], 0)
.add([Glob::new("bar.localhost")?], 1)
.add([Glob::new("*.foo.localhost")?], 2);
let map = builder.build()?;
assert!(map.query("random").is_empty());
assert!(map.query("localhost").is_empty());
assert_eq!(map.query("foo.localhost"), [&0]);
assert_eq!(map.query("bar.localhost"), [&0, &1]);
assert_eq!(map.query("project.foo.localhost"), [&0, &2]);
Ok(())
}
#[test]
fn check_glob_map() -> anyhow::Result<()> {
let mut builder = GlobMapBuilder::new();
builder
.add(
[
Glob::new("*.neon.tech")?,
Glob::new("*.neon.internal.tech")?,
],
"neon",
)
.add([Glob::new("*.localhost")?], "mock");
let map = builder.build()?;
assert!(map.query("random").is_empty());
assert!(map.query("localhost").is_empty());
assert_eq!(map.query("ep-1.neon.tech"), [&"neon"]);
assert_eq!(map.query("ep-1.neon.internal.tech"), [&"neon"]);
assert_eq!(map.query("ep-1.foo.localhost"), [&"mock"]);
Ok(())
}
}

View File

@@ -1,12 +1,6 @@
use crate::auth;
use anyhow::{bail, ensure, Context, Ok};
use rustls::sign;
use std::{
collections::{HashMap, HashSet},
str::FromStr,
sync::Arc,
time::Duration,
};
use crate::{auth, certs};
use anyhow::{bail, Context};
use std::{str::FromStr, sync::Arc, time::Duration};
pub struct ProxyConfig {
pub tls_config: Option<TlsConfig>,
@@ -22,7 +16,6 @@ pub struct MetricCollectionConfig {
pub struct TlsConfig {
pub config: Arc<rustls::ServerConfig>,
pub common_names: Option<HashSet<String>>,
}
impl TlsConfig {
@@ -31,138 +24,21 @@ impl TlsConfig {
}
}
/// Configure TLS for the main endpoint.
pub fn configure_tls(
key_path: &str,
cert_path: &str,
certs_dir: Option<&String>,
) -> anyhow::Result<TlsConfig> {
let mut cert_resolver = CertResolver::new();
impl TlsConfig {
pub fn new(resolver: certs::CertResolver) -> anyhow::Result<Self> {
let rustls_config = rustls::ServerConfig::builder()
.with_safe_default_cipher_suites()
.with_safe_default_kx_groups()
// allow TLS 1.2 to be compatible with older client libraries
.with_protocol_versions(&[&rustls::version::TLS13, &rustls::version::TLS12])?
.with_no_client_auth()
.with_cert_resolver(Arc::new(resolver));
// add default certificate
cert_resolver.add_cert(key_path, cert_path)?;
// add extra certificates
if let Some(certs_dir) = certs_dir {
for entry in std::fs::read_dir(certs_dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
// file names aligned with default cert-manager names
let key_path = path.join("tls.key");
let cert_path = path.join("tls.crt");
if key_path.exists() && cert_path.exists() {
cert_resolver
.add_cert(&key_path.to_string_lossy(), &cert_path.to_string_lossy())?;
}
}
}
}
let common_names = cert_resolver.get_common_names();
let config = rustls::ServerConfig::builder()
.with_safe_default_cipher_suites()
.with_safe_default_kx_groups()
// allow TLS 1.2 to be compatible with older client libraries
.with_protocol_versions(&[&rustls::version::TLS13, &rustls::version::TLS12])?
.with_no_client_auth()
.with_cert_resolver(Arc::new(cert_resolver))
.into();
Ok(TlsConfig {
config,
common_names: Some(common_names),
})
}
struct CertResolver {
certs: HashMap<String, Arc<rustls::sign::CertifiedKey>>,
}
impl CertResolver {
fn new() -> Self {
Self {
certs: HashMap::new(),
}
}
fn add_cert(&mut self, key_path: &str, cert_path: &str) -> anyhow::Result<()> {
let priv_key = {
let key_bytes = std::fs::read(key_path).context("TLS key file")?;
let mut keys = rustls_pemfile::pkcs8_private_keys(&mut &key_bytes[..])
.context(format!("Failed to read TLS keys at '{key_path}'"))?;
ensure!(keys.len() == 1, "keys.len() = {} (should be 1)", keys.len());
keys.pop().map(rustls::PrivateKey).unwrap()
let config = TlsConfig {
config: Arc::new(rustls_config),
};
let key = sign::any_supported_type(&priv_key).context("invalid private key")?;
let cert_chain_bytes = std::fs::read(cert_path)
.context(format!("Failed to read TLS cert file at '{cert_path}.'"))?;
let cert_chain = {
rustls_pemfile::certs(&mut &cert_chain_bytes[..])
.context(format!(
"Failed to read TLS certificate chain from bytes from file at '{cert_path}'."
))?
.into_iter()
.map(rustls::Certificate)
.collect()
};
let common_name = {
let pem = x509_parser::pem::parse_x509_pem(&cert_chain_bytes)
.context(format!(
"Failed to parse PEM object from bytes from file at '{cert_path}'."
))?
.1;
let common_name = pem.parse_x509()?.subject().to_string();
common_name.strip_prefix("CN=*.").map(|s| s.to_string())
}
.context(format!(
"Failed to parse common name from certificate at '{cert_path}'."
))?;
self.certs.insert(
common_name,
Arc::new(rustls::sign::CertifiedKey::new(cert_chain, key)),
);
Ok(())
}
fn get_common_names(&self) -> HashSet<String> {
self.certs.keys().map(|s| s.to_string()).collect()
}
}
impl rustls::server::ResolvesServerCert for CertResolver {
fn resolve(
&self,
_client_hello: rustls::server::ClientHello,
) -> Option<Arc<rustls::sign::CertifiedKey>> {
// loop here and cut off more and more subdomains until we find
// a match to get a proper wildcard support. OTOH, we now do not
// use nested domains, so keep this simple for now.
//
// With the current coding foo.com will match *.foo.com and that
// repeats behavior of the old code.
if let Some(mut sni_name) = _client_hello.server_name() {
loop {
if let Some(cert) = self.certs.get(sni_name) {
return Some(cert.clone());
}
if let Some((_, rest)) = sni_name.split_once('.') {
sni_name = rest;
} else {
return None;
}
}
} else {
None
}
Ok(config)
}
}

View File

@@ -7,6 +7,7 @@
mod auth;
mod cache;
mod cancellation;
mod certs;
mod compute;
mod config;
mod console;
@@ -23,10 +24,12 @@ mod url;
mod waiters;
use anyhow::{bail, Context};
use auth::BackendType;
use certs::config::TlsServers;
use clap::{self, Arg};
use config::ProxyConfig;
use config::{MetricCollectionConfig, ProxyConfig, TlsConfig};
use futures::FutureExt;
use std::{borrow::Cow, future::Future, net::SocketAddr};
use std::{borrow::Cow, future::Future, net::SocketAddr, path::PathBuf};
use tokio::{net::TcpListener, task::JoinError};
use tracing::{info, warn};
use utils::{project_git_version, sentry_init::init_sentry};
@@ -126,27 +129,34 @@ async fn handle_signals() -> anyhow::Result<()> {
}
}
/// ProxyConfig is created at proxy startup, and lives forever.
fn build_config(args: &clap::ArgMatches) -> anyhow::Result<&'static ProxyConfig> {
let tls_config = match (
args.get_one::<String>("tls-key"),
args.get_one::<String>("tls-cert"),
) {
(Some(key_path), Some(cert_path)) => Some(config::configure_tls(
key_path,
cert_path,
args.get_one::<String>("certs-dir"),
)?),
(None, None) => None,
fn build_tls_config(args: &clap::ArgMatches) -> anyhow::Result<Option<TlsConfig>> {
let tls_config = args.get_one::<PathBuf>("tls-config");
let main = tls_config.map(TlsServers::from_config_file).transpose()?;
let tls_cert = args.get_one::<PathBuf>("tls-cert");
let tls_key = args.get_one::<PathBuf>("tls-key");
let _aux = match (tls_cert, tls_key) {
(Some(_key), Some(_cert)) => todo!("implement legacy TLS setup"),
(None, None) => None::<()>,
_ => bail!("either both or neither tls-key and tls-cert must be specified"),
};
let metric_collection = match (
args.get_one::<String>("metric-collection-endpoint"),
args.get_one::<String>("metric-collection-interval"),
) {
// TODO: first merge `main` and `_aux` into one.
main.map(|servers| {
let resolver = certs::CertResolver::new(servers)?;
TlsConfig::new(resolver)
})
.transpose()
}
fn build_metrics_config(args: &clap::ArgMatches) -> anyhow::Result<Option<MetricCollectionConfig>> {
let endpoint = args.get_one::<String>("metric-collection-endpoint");
let interval = args.get_one::<String>("metric-collection-interval");
let config = match (endpoint, interval) {
(Some(endpoint), Some(interval)) => Some(config::MetricCollectionConfig {
endpoint: endpoint.parse()?,
endpoint: endpoint.parse().context("bad metrics endpoint")?,
interval: humantime::parse_duration(interval)?,
}),
(None, None) => None,
@@ -156,7 +166,11 @@ fn build_config(args: &clap::ArgMatches) -> anyhow::Result<&'static ProxyConfig>
),
};
let auth_backend = match args.get_one::<String>("auth-backend").unwrap().as_str() {
Ok(config)
}
fn build_auth_config(args: &clap::ArgMatches) -> anyhow::Result<BackendType<'static, ()>> {
let config = match args.get_one::<String>("auth-backend").unwrap().as_str() {
"console" => {
let config::CacheOptions { size, ttl } = args
.get_one::<String>("wake-compute-cache")
@@ -186,10 +200,15 @@ fn build_config(args: &clap::ArgMatches) -> anyhow::Result<&'static ProxyConfig>
other => bail!("unsupported auth backend: {other}"),
};
Ok(config)
}
/// ProxyConfig is created at proxy startup, and lives forever.
fn build_config(args: &clap::ArgMatches) -> anyhow::Result<&'static ProxyConfig> {
let config = Box::leak(Box::new(ProxyConfig {
tls_config,
auth_backend,
metric_collection,
tls_config: build_tls_config(args)?,
auth_backend: build_auth_config(args)?,
metric_collection: build_metrics_config(args)?,
}));
Ok(config)
@@ -249,20 +268,22 @@ fn cli() -> clap::Command {
.short('k')
.long("tls-key")
.alias("ssl-key") // backwards compatibility
.help("path to TLS key for client postgres connections"),
.help("path to TLS key for client postgres connections")
.value_parser(clap::builder::PathBufValueParser::new()),
)
.arg(
Arg::new("tls-cert")
.short('c')
.long("tls-cert")
.alias("ssl-cert") // backwards compatibility
.help("path to TLS cert for client postgres connections"),
.help("path to TLS cert for client postgres connections")
.value_parser(clap::builder::PathBufValueParser::new()),
)
// tls-key and tls-cert are for backwards compatibility, we can put all certs in one dir
.arg(
Arg::new("certs-dir")
.long("certs-dir")
.help("path to directory with TLS certificates for client postgres connections"),
Arg::new("tls-config")
.long("tls-config")
.help("path to the TLS config file (example: config/servers.dhall)")
.value_parser(clap::builder::PathBufValueParser::new()),
)
.arg(
Arg::new("metric-collection-endpoint")

View File

@@ -112,7 +112,6 @@ pub async fn handle_ws_client(
NUM_CONNECTIONS_CLOSED_COUNTER.inc();
}
let tls = config.tls_config.as_ref();
let hostname = hostname.as_deref();
// TLS is None here, because the connection is already encrypted.
@@ -124,11 +123,10 @@ pub async fn handle_ws_client(
// Extract credentials which we're going to use for auth.
let creds = {
let common_names = tls.and_then(|tls| tls.common_names.clone());
let result = config
.auth_backend
.as_ref()
.map(|_| auth::ClientCredentials::parse(&params, hostname, common_names))
.map(|_| auth::ClientCredentials::parse(&params, hostname))
.transpose();
async { result }.or_else(|e| stream.throw_error(e)).await?
@@ -163,11 +161,10 @@ async fn handle_client(
// Extract credentials which we're going to use for auth.
let creds = {
let sni = stream.get_ref().sni_hostname();
let common_names = tls.and_then(|tls| tls.common_names.clone());
let result = config
.auth_backend
.as_ref()
.map(|_| auth::ClientCredentials::parse(&params, sni, common_names))
.map(|_| auth::ClientCredentials::parse(&params, sni))
.transpose();
async { result }.or_else(|e| stream.throw_error(e)).await?

View File

@@ -41,10 +41,7 @@ impl ClientConfig<'_> {
}
/// Generate TLS certificates and build rustls configs for client and server.
fn generate_tls_config<'a>(
hostname: &'a str,
common_name: &'a str,
) -> anyhow::Result<(ClientConfig<'a>, TlsConfig)> {
fn generate_tls_config(hostname: &str) -> anyhow::Result<(ClientConfig<'_>, TlsConfig)> {
let (ca, cert, key) = generate_certs(hostname)?;
let tls_config = {
@@ -54,12 +51,7 @@ fn generate_tls_config<'a>(
.with_single_cert(vec![cert], key)?
.into();
let common_names = Some([common_name.to_owned()].iter().cloned().collect());
TlsConfig {
config,
common_names,
}
TlsConfig { config }
};
let client_config = {
@@ -152,7 +144,7 @@ async fn dummy_proxy(
async fn handshake_tls_is_enforced_by_proxy() -> anyhow::Result<()> {
let (client, server) = tokio::io::duplex(1024);
let (_, server_config) = generate_tls_config("generic-project-name.localhost", "localhost")?;
let (_, server_config) = generate_tls_config("generic-project-name.localhost")?;
let proxy = tokio::spawn(dummy_proxy(client, Some(server_config), NoAuth));
let client_err = tokio_postgres::Config::new()
@@ -180,8 +172,7 @@ async fn handshake_tls_is_enforced_by_proxy() -> anyhow::Result<()> {
async fn handshake_tls() -> anyhow::Result<()> {
let (client, server) = tokio::io::duplex(1024);
let (client_config, server_config) =
generate_tls_config("generic-project-name.localhost", "localhost")?;
let (client_config, server_config) = generate_tls_config("generic-project-name.localhost")?;
let proxy = tokio::spawn(dummy_proxy(client, Some(server_config), NoAuth));
let (_client, _conn) = tokio_postgres::Config::new()
@@ -239,8 +230,7 @@ async fn keepalive_is_inherited() -> anyhow::Result<()> {
async fn scram_auth_good(#[case] password: &str) -> anyhow::Result<()> {
let (client, server) = tokio::io::duplex(1024);
let (client_config, server_config) =
generate_tls_config("generic-project-name.localhost", "localhost")?;
let (client_config, server_config) = generate_tls_config("generic-project-name.localhost")?;
let proxy = tokio::spawn(dummy_proxy(
client,
Some(server_config),
@@ -262,8 +252,7 @@ async fn scram_auth_good(#[case] password: &str) -> anyhow::Result<()> {
async fn scram_auth_mock() -> anyhow::Result<()> {
let (client, server) = tokio::io::duplex(1024);
let (client_config, server_config) =
generate_tls_config("generic-project-name.localhost", "localhost")?;
let (client_config, server_config) = generate_tls_config("generic-project-name.localhost")?;
let proxy = tokio::spawn(dummy_proxy(
client,
Some(server_config),

View File

@@ -26,7 +26,7 @@ prometheus-client = "^0.14.1"
pytest-timeout = "^2.1.0"
Werkzeug = "^2.2.3"
pytest-order = "^1.0.1"
allure-pytest = "^2.13.1"
allure-pytest = "^2.10.0"
pytest-asyncio = "^0.19.0"
toml = "^0.10.2"
psutil = "^5.9.4"
@@ -34,7 +34,6 @@ types-psutil = "^5.9.5.4"
types-toml = "^0.10.8"
pytest-httpserver = "^1.0.6"
aiohttp = "3.7.4"
pytest-rerunfailures = "^11.1.2"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"
@@ -70,9 +69,6 @@ strict = true
module = [
"asyncpg.*",
"pg8000.*",
"allure.*",
"allure_commons.*",
"allure_pytest.*",
]
ignore_missing_imports = true

View File

@@ -674,8 +674,7 @@ impl Timeline {
bail!(TimelineError::Cancelled(self.ttid));
}
let mut state = self.write_shared_state();
state.sk.inmem.backup_lsn = max(state.sk.inmem.backup_lsn, backup_lsn);
self.write_shared_state().sk.inmem.backup_lsn = backup_lsn;
// we should check whether to shut down offloader, but this will be done
// soon by peer communication anyway.
Ok(())

View File

@@ -322,20 +322,25 @@ impl WalBackupTask {
continue;
}
if let Err(e) = backup_lsn_range(
match backup_lsn_range(
backup_lsn,
commit_lsn,
self.wal_seg_size,
&self.timeline_dir,
&self.workspace_dir,
&self.timeline,
)
.await
{
if e.to_string().contains("set_wal_backup_lsn") {
error!("failed to set wal_backup_lsn: {e:#}");
return;
} else {
Ok(backup_lsn_result) => {
backup_lsn = backup_lsn_result;
let res = self.timeline.set_wal_backup_lsn(backup_lsn_result);
if let Err(e) = res {
error!("failed to set wal_backup_lsn: {}", e);
return;
}
retry_attempt = 0;
}
Err(e) => {
error!(
"failed while offloading range {}-{}: {:?}",
backup_lsn, commit_lsn, e
@@ -354,19 +359,15 @@ pub async fn backup_lsn_range(
wal_seg_size: usize,
timeline_dir: &Path,
workspace_dir: &Path,
timeline: &Arc<Timeline>,
) -> Result<()> {
let mut backup_lsn;
) -> Result<Lsn> {
let mut res = start_lsn;
let segments = get_segments(start_lsn, end_lsn, wal_seg_size);
for s in &segments {
backup_single_segment(s, timeline_dir, workspace_dir)
.await
.with_context(|| format!("offloading segno {}", s.seg_no))?;
backup_lsn = s.end_lsn;
// error is possible iff timeline was canceled
timeline
.set_wal_backup_lsn(backup_lsn)
.context("set_wal_backup_lsn")?;
res = s.end_lsn;
}
info!(
"offloaded segnos {:?} up to {}, previous backup_lsn {}",
@@ -374,8 +375,7 @@ pub async fn backup_lsn_range(
end_lsn,
start_lsn,
);
Ok(())
Ok(res)
}
async fn backup_single_segment(

View File

@@ -1,87 +0,0 @@
#! /usr/bin/env python3
import argparse
import json
import logging
from collections import defaultdict
from typing import DefaultDict, Dict
import psycopg2
import psycopg2.extras
# We call the test "flaky" if it failed at least once on the main branch in the last N=10 days.
FLAKY_TESTS_QUERY = """
SELECT
DISTINCT parent_suite, suite, test
FROM
(
SELECT
revision,
jsonb_array_elements(data -> 'children') -> 'name' as parent_suite,
jsonb_array_elements(jsonb_array_elements(data -> 'children') -> 'children') -> 'name' as suite,
jsonb_array_elements(jsonb_array_elements(jsonb_array_elements(data -> 'children') -> 'children') -> 'children') -> 'name' as test,
jsonb_array_elements(jsonb_array_elements(jsonb_array_elements(data -> 'children') -> 'children') -> 'children') -> 'status' as status,
to_timestamp((jsonb_array_elements(jsonb_array_elements(jsonb_array_elements(data -> 'children') -> 'children') -> 'children') -> 'time' -> 'start')::bigint / 1000)::date as timestamp
FROM
regress_test_results
WHERE
reference = 'refs/heads/main'
) data
WHERE
timestamp > CURRENT_DATE - INTERVAL '%s' day
AND status::text IN ('"failed"', '"broken"')
;
"""
def main(args: argparse.Namespace):
connstr = args.connstr
interval_days = args.days
output = args.output
res: DefaultDict[str, DefaultDict[str, Dict[str, bool]]]
res = defaultdict(lambda: defaultdict(dict))
logging.info("connecting to the database...")
with psycopg2.connect(connstr, connect_timeout=10) as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
logging.info("fetching flaky tests...")
cur.execute(FLAKY_TESTS_QUERY, (interval_days,))
rows = cur.fetchall()
for row in rows:
logging.info(f"\t{row['parent_suite'].replace('.', '/')}/{row['suite']}.py::{row['test']}")
res[row["parent_suite"]][row["suite"]][row["test"]] = True
logging.info(f"saving results to {output.name}")
json.dump(res, output, indent=2)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Detect flaky tests in the last N days")
parser.add_argument(
"--output",
type=argparse.FileType("w"),
default="flaky.json",
help="path to output json file (default: flaky.json)",
)
parser.add_argument(
"--days",
required=False,
default=10,
type=int,
help="how many days to look back for flaky tests (default: 10)",
)
parser.add_argument(
"connstr",
help="connection string to the test results database",
)
args = parser.parse_args()
level = logging.INFO
logging.basicConfig(
format="%(message)s",
level=level,
)
main(args)

View File

@@ -1,125 +0,0 @@
//
// The script parses Allure reports and posts a comment with a summary of the test results to the PR.
// It accepts an array of items and creates a comment with a summary for each one (for "release" and "debug", together or separately if any of them failed to be generated).
//
// The comment is updated on each run with the latest results.
//
// It is designed to be used with actions/github-script from GitHub Workflows:
// - uses: actions/github-script@v6
// with:
// script: |
// const script = require("./scripts/pr-comment-test-report.js")
// await script({
// github,
// context,
// fetch,
// reports: [{...}, ...], // each report is expected to have "buildType", "reportUrl", and "jsonUrl" properties
// })
//
module.exports = async ({ github, context, fetch, reports }) => {
// Marker to find the comment in the subsequent runs
const startMarker = `<!--AUTOMATIC COMMENT START #${context.payload.number}-->`
// GitHub bot id taken from (https://api.github.com/users/github-actions[bot])
const githubActionsBotId = 41898282
// The latest commit in the PR URL
const commitUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/pull/${context.payload.number}/commits/${context.payload.pull_request.head.sha}`
// Commend body itself
let commentBody = `${startMarker}\n### Test results for ${commitUrl}:\n___\n`
// Common parameters for GitHub API requests
const ownerRepoParams = {
owner: context.repo.owner,
repo: context.repo.repo,
}
for (const report of reports) {
const {buildType, reportUrl, jsonUrl} = report
if (!reportUrl || !jsonUrl) {
console.warn(`"reportUrl" or "jsonUrl" aren't set for ${buildType} build`)
continue
}
const suites = await (await fetch(jsonUrl)).json()
// Allure distinguishes "failed" (with an assertion error) and "broken" (with any other error) tests.
// For this report it's ok to treat them in the same way (as failed).
failedTests = []
passedTests = []
skippedTests = []
retriedTests = []
retriedStatusChangedTests = []
for (const parentSuite of suites.children) {
for (const suite of parentSuite.children) {
for (const test of suite.children) {
pytestName = `${parentSuite.name.replace(".", "/")}/${suite.name}.py::${test.name}`
test.pytestName = pytestName
if (test.status === "passed") {
passedTests.push(test);
} else if (test.status === "failed" || test.status === "broken") {
failedTests.push(test);
} else if (test.status === "skipped") {
skippedTests.push(test);
}
if (test.retriesCount > 0) {
retriedTests.push(test);
if (test.retriedStatusChangedTests) {
retriedStatusChangedTests.push(test);
}
}
}
}
}
const totalTestsCount = failedTests.length + passedTests.length + skippedTests.length
commentBody += `#### ${buildType} build: ${totalTestsCount} tests run: ${passedTests.length} passed, ${failedTests.length} failed, ${skippedTests.length} ([full report](${reportUrl}))\n`
if (failedTests.length > 0) {
commentBody += `Failed tests:\n`
for (const test of failedTests) {
const allureLink = `${reportUrl}#suites/${test.parentUid}/${test.uid}`
commentBody += `- [\`${test.pytestName}\`](${allureLink})`
if (test.retriesCount > 0) {
commentBody += ` (ran [${test.retriesCount + 1} times](${allureLink}/retries))`
}
commentBody += "\n"
}
commentBody += "\n"
}
if (retriedStatusChangedTests > 0) {
commentBody += `Flaky tests:\n`
for (const test of retriedStatusChangedTests) {
const status = test.status === "passed" ? ":white_check_mark:" : ":x:"
commentBody += `- ${status} [\`${test.pytestName}\`](${reportUrl}#suites/${test.parentUid}/${test.uid}/retries)\n`
}
commentBody += "\n"
}
commentBody += "___\n"
}
const { data: comments } = await github.rest.issues.listComments({
issue_number: context.payload.number,
...ownerRepoParams,
})
const comment = comments.find(comment => comment.user.id === githubActionsBotId && comment.body.startsWith(startMarker))
if (comment) {
await github.rest.issues.updateComment({
comment_id: comment.id,
body: commentBody,
...ownerRepoParams,
})
} else {
await github.rest.issues.createComment({
issue_number: context.payload.number,
body: commentBody,
...ownerRepoParams,
})
}
}

View File

@@ -4,5 +4,4 @@ pytest_plugins = (
"fixtures.pg_stats",
"fixtures.compare_fixtures",
"fixtures.slow",
"fixtures.flaky",
)

View File

@@ -1,58 +0,0 @@
import json
from pathlib import Path
from typing import List
import pytest
from _pytest.config import Config
from _pytest.config.argparsing import Parser
from allure_commons.types import LabelType
from allure_pytest.utils import allure_name, allure_suite_labels
from fixtures.log_helper import log
"""
The plugin reruns flaky tests.
It uses `pytest.mark.flaky` provided by `pytest-rerunfailures` plugin and flaky tests detected by `scripts/flaky_tests.py`
Note: the logic of getting flaky tests is extracted to a separate script to avoid running it for each of N xdist workers
"""
def pytest_addoption(parser: Parser):
parser.addoption(
"--flaky-tests-json",
action="store",
type=Path,
help="Path to json file with flaky tests generated by scripts/flaky_tests.py",
)
def pytest_collection_modifyitems(config: Config, items: List[pytest.Item]):
if not config.getoption("--flaky-tests-json"):
return
# Any error with getting flaky tests aren't critical, so just do not rerun any tests
flaky_json = config.getoption("--flaky-tests-json")
if not flaky_json.exists():
return
content = flaky_json.read_text()
try:
flaky_tests = json.loads(content)
except ValueError:
log.error(f"Can't parse {content} as json")
return
for item in items:
# Use the same logic for constructing test name as Allure does (we store allure-provided data in DB)
# Ref https://github.com/allure-framework/allure-python/blob/2.13.1/allure-pytest/src/listener.py#L98-L100
allure_labels = dict(allure_suite_labels(item))
parent_suite = str(allure_labels.get(LabelType.PARENT_SUITE))
suite = str(allure_labels.get(LabelType.SUITE))
params = item.callspec.params if hasattr(item, "callspec") else {}
name = allure_name(item, params)
if flaky_tests.get(parent_suite, {}).get(suite, {}).get(name, False):
# Rerun 3 times = 1 original run + 2 reruns
log.info(f"Marking {item.nodeid} as flaky. It will be rerun up to 3 times")
item.add_marker(pytest.mark.flaky(reruns=2))

View File

@@ -17,7 +17,6 @@ import uuid
from collections import defaultdict
from contextlib import closing, contextmanager
from dataclasses import dataclass, field
from datetime import datetime
from enum import Flag, auto
from functools import cached_property
from itertools import chain, product
@@ -49,7 +48,6 @@ from fixtures.types import Lsn, TenantId, TimelineId
from fixtures.utils import (
ATTACHMENT_NAME_REGEX,
Fn,
allure_add_grafana_links,
allure_attach_from_dir,
get_self_dir,
subprocess_capture,
@@ -2438,16 +2436,10 @@ def remote_pg(
connstr = os.getenv("BENCHMARK_CONNSTR")
if connstr is None:
raise ValueError("no connstr provided, use BENCHMARK_CONNSTR environment variable")
start_ms = int(datetime.utcnow().timestamp() * 1000)
with RemotePostgres(pg_bin, connstr) as remote_pg:
yield remote_pg
end_ms = int(datetime.utcnow().timestamp() * 1000)
host = parse_dsn(connstr).get("host", "")
if host.endswith(".neon.build"):
# Add 10s margin to the start and end times
allure_add_grafana_links(host, start_ms - 10_000, end_ms + 10_000)
class PSQL:
"""

View File

@@ -1,5 +1,4 @@
import contextlib
import json
import os
import re
import subprocess
@@ -7,9 +6,8 @@ import tarfile
import time
from pathlib import Path
from typing import Any, Callable, Dict, List, Tuple, TypeVar
from urllib.parse import urlencode
import allure
import allure # type: ignore
from psycopg2.extensions import cursor
from fixtures.log_helper import log
@@ -186,46 +184,6 @@ def allure_attach_from_dir(dir: Path):
allure.attach.file(source, name, attachment_type, extension)
DATASOURCE_ID = "xHHYY0dVz"
def allure_add_grafana_links(host: str, start_ms: int, end_ms: int):
"""Add links to server logs in Grafana to Allure report"""
# We expect host to be in format like ep-divine-night-159320.us-east-2.aws.neon.build
endpoint_id, region_id, _ = host.split(".", 2)
expressions = {
"compute logs": f'{{app="compute-node-{endpoint_id}", neon_region="{region_id}"}}',
"k8s events": f'{{job="integrations/kubernetes/eventhandler"}} |~ "name=compute-node-{endpoint_id}-"',
"console logs": f'{{neon_service="console", neon_region="{region_id}"}} | json | endpoint_id = "{endpoint_id}"',
"proxy logs": f'{{neon_service="proxy-scram", neon_region="{region_id}"}}',
}
params: Dict[str, Any] = {
"datasource": DATASOURCE_ID,
"queries": [
{
"expr": "<PUT AN EXPRESSION HERE>",
"refId": "A",
"datasource": {"type": "loki", "uid": DATASOURCE_ID},
"editorMode": "code",
"queryType": "range",
}
],
"range": {
"from": str(start_ms),
"to": str(end_ms),
},
}
for name, expr in expressions.items():
params["queries"][0]["expr"] = expr
query_string = urlencode({"orgId": 1, "left": json.dumps(params)})
link = f"https://neonprod.grafana.net/explore?{query_string}"
allure.dynamic.link(link, name=name)
log.info(f"{name}: {link}")
def start_in_background(
command: list[str], cwd: Path, log_file_name: str, is_started: Fn
) -> subprocess.Popen[bytes]: