mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-31 17:20:37 +00:00
Compare commits
15 Commits
conrad/pro
...
image_laye
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70d1086e0f | ||
|
|
5a8e8baf9f | ||
|
|
57a4119a7b | ||
|
|
aaef3789b0 | ||
|
|
0b57e0b8f2 | ||
|
|
485ecbaf8f | ||
|
|
0bcbce197a | ||
|
|
19d59e58d2 | ||
|
|
ce65d13dbd | ||
|
|
18fefff026 | ||
|
|
2a69861896 | ||
|
|
98375b3896 | ||
|
|
8c60359ae5 | ||
|
|
8c7136b057 | ||
|
|
0df6c41eaa |
@@ -23,33 +23,10 @@ platforms = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[final-excludes]
|
[final-excludes]
|
||||||
workspace-members = [
|
# vm_monitor benefits from the same Cargo.lock as the rest of our artifacts, but
|
||||||
# vm_monitor benefits from the same Cargo.lock as the rest of our artifacts, but
|
# it is built primarly in separate repo neondatabase/autoscaling and thus is excluded
|
||||||
# it is built primarly in separate repo neondatabase/autoscaling and thus is excluded
|
# from depending on workspace-hack because most of the dependencies are not used.
|
||||||
# from depending on workspace-hack because most of the dependencies are not used.
|
workspace-members = ["vm_monitor"]
|
||||||
"vm_monitor",
|
|
||||||
# All of these exist in libs and are not usually built independently.
|
|
||||||
# Putting workspace hack there adds a bottleneck for cargo builds.
|
|
||||||
"compute_api",
|
|
||||||
"consumption_metrics",
|
|
||||||
"desim",
|
|
||||||
"metrics",
|
|
||||||
"pageserver_api",
|
|
||||||
"postgres_backend",
|
|
||||||
"postgres_connection",
|
|
||||||
"postgres_ffi",
|
|
||||||
"pq_proto",
|
|
||||||
"remote_storage",
|
|
||||||
"safekeeper_api",
|
|
||||||
"tenant_size_model",
|
|
||||||
"tracing-utils",
|
|
||||||
"utils",
|
|
||||||
"wal_craft",
|
|
||||||
"walproposer",
|
|
||||||
"postgres-protocol2",
|
|
||||||
"postgres-types2",
|
|
||||||
"tokio-postgres2",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Write out exact versions rather than a semver range. (Defaults to false.)
|
# Write out exact versions rather than a semver range. (Defaults to false.)
|
||||||
# exact-versions = true
|
# exact-versions = true
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
[profile.default]
|
[profile.default]
|
||||||
slow-timeout = { period = "60s", terminate-after = 3 }
|
slow-timeout = { period = "20s", terminate-after = 3 }
|
||||||
|
|||||||
@@ -5,22 +5,23 @@
|
|||||||
!Cargo.toml
|
!Cargo.toml
|
||||||
!Makefile
|
!Makefile
|
||||||
!rust-toolchain.toml
|
!rust-toolchain.toml
|
||||||
|
!scripts/combine_control_files.py
|
||||||
!scripts/ninstall.sh
|
!scripts/ninstall.sh
|
||||||
!docker-compose/run-tests.sh
|
!vm-cgconfig.conf
|
||||||
|
|
||||||
# Directories
|
# Directories
|
||||||
!.cargo/
|
!.cargo/
|
||||||
!.config/
|
!.config/
|
||||||
!compute/
|
|
||||||
!compute_tools/
|
!compute_tools/
|
||||||
!control_plane/
|
!control_plane/
|
||||||
!libs/
|
!libs/
|
||||||
|
!neon_local/
|
||||||
!pageserver/
|
!pageserver/
|
||||||
!pgxn/
|
!pgxn/
|
||||||
!proxy/
|
!proxy/
|
||||||
!storage_scrubber/
|
!s3_scrubber/
|
||||||
!safekeeper/
|
!safekeeper/
|
||||||
!storage_broker/
|
!storage_broker/
|
||||||
!storage_controller/
|
!trace/
|
||||||
!vendor/postgres-*/
|
!vendor/postgres-*/
|
||||||
!workspace_hack/
|
!workspace_hack/
|
||||||
|
|||||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,2 +0,0 @@
|
|||||||
# allows for nicer hunk headers with git show
|
|
||||||
*.rs diff=rust
|
|
||||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
blank_issues_enabled: true
|
|
||||||
contact_links:
|
|
||||||
- name: Feature request
|
|
||||||
url: https://console.neon.tech/app/projects?modal=feedback
|
|
||||||
about: For feature requests in the Neon product, please submit via the feedback form on `https://console.neon.tech`
|
|
||||||
17
.github/actionlint.yml
vendored
17
.github/actionlint.yml
vendored
@@ -1,23 +1,14 @@
|
|||||||
self-hosted-runner:
|
self-hosted-runner:
|
||||||
labels:
|
labels:
|
||||||
- arm64
|
- arm64
|
||||||
|
- dev
|
||||||
|
- gen3
|
||||||
- large
|
- large
|
||||||
- large-arm64
|
# Remove `macos-14` from the list after https://github.com/rhysd/actionlint/pull/392 is merged.
|
||||||
|
- macos-14
|
||||||
- small
|
- small
|
||||||
- small-arm64
|
|
||||||
- us-east-2
|
- us-east-2
|
||||||
config-variables:
|
config-variables:
|
||||||
- AZURE_DEV_CLIENT_ID
|
|
||||||
- AZURE_DEV_REGISTRY_NAME
|
|
||||||
- AZURE_DEV_SUBSCRIPTION_ID
|
|
||||||
- AZURE_PROD_CLIENT_ID
|
|
||||||
- AZURE_PROD_REGISTRY_NAME
|
|
||||||
- AZURE_PROD_SUBSCRIPTION_ID
|
|
||||||
- AZURE_TENANT_ID
|
|
||||||
- BENCHMARK_PROJECT_ID_PUB
|
|
||||||
- BENCHMARK_PROJECT_ID_SUB
|
|
||||||
- REMOTE_STORAGE_AZURE_CONTAINER
|
- REMOTE_STORAGE_AZURE_CONTAINER
|
||||||
- REMOTE_STORAGE_AZURE_REGION
|
- REMOTE_STORAGE_AZURE_REGION
|
||||||
- SLACK_UPCOMING_RELEASE_CHANNEL_ID
|
- SLACK_UPCOMING_RELEASE_CHANNEL_ID
|
||||||
- DEV_AWS_OIDC_ROLE_ARN
|
|
||||||
- BENCHMARK_INGEST_TARGET_PROJECTID
|
|
||||||
|
|||||||
@@ -7,10 +7,6 @@ inputs:
|
|||||||
type: boolean
|
type: boolean
|
||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
aws_oicd_role_arn:
|
|
||||||
description: 'the OIDC role arn to (re-)acquire for allure report upload - if not set call must acquire OIDC role'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
base-url:
|
base-url:
|
||||||
@@ -43,8 +39,7 @@ runs:
|
|||||||
PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH" || true)
|
PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH" || true)
|
||||||
if [ "${PR_NUMBER}" != "null" ]; then
|
if [ "${PR_NUMBER}" != "null" ]; then
|
||||||
BRANCH_OR_PR=pr-${PR_NUMBER}
|
BRANCH_OR_PR=pr-${PR_NUMBER}
|
||||||
elif [ "${GITHUB_REF_NAME}" = "main" ] || [ "${GITHUB_REF_NAME}" = "release" ] || \
|
elif [ "${GITHUB_REF_NAME}" = "main" ] || [ "${GITHUB_REF_NAME}" = "release" ] || [ "${GITHUB_REF_NAME}" = "release-proxy" ]; then
|
||||||
[ "${GITHUB_REF_NAME}" = "release-proxy" ] || [ "${GITHUB_REF_NAME}" = "release-compute" ]; then
|
|
||||||
# Shortcut for special branches
|
# Shortcut for special branches
|
||||||
BRANCH_OR_PR=${GITHUB_REF_NAME}
|
BRANCH_OR_PR=${GITHUB_REF_NAME}
|
||||||
else
|
else
|
||||||
@@ -84,14 +79,6 @@ runs:
|
|||||||
ALLURE_VERSION: 2.27.0
|
ALLURE_VERSION: 2.27.0
|
||||||
ALLURE_ZIP_SHA256: b071858fb2fa542c65d8f152c5c40d26267b2dfb74df1f1608a589ecca38e777
|
ALLURE_ZIP_SHA256: b071858fb2fa542c65d8f152c5c40d26267b2dfb74df1f1608a589ecca38e777
|
||||||
|
|
||||||
- name: (Re-)configure AWS credentials # necessary to upload reports to S3 after a long-running test
|
|
||||||
if: ${{ !cancelled() && (inputs.aws_oicd_role_arn != '') }}
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ inputs.aws_oicd_role_arn }}
|
|
||||||
role-duration-seconds: 3600 # 1 hour should be more than enough to upload report
|
|
||||||
|
|
||||||
# Potentially we could have several running build for the same key (for example, for the main branch), so we use improvised lock for this
|
# Potentially we could have several running build for the same key (for example, for the main branch), so we use improvised lock for this
|
||||||
- name: Acquire lock
|
- name: Acquire lock
|
||||||
shell: bash -euxo pipefail {0}
|
shell: bash -euxo pipefail {0}
|
||||||
@@ -163,7 +150,7 @@ runs:
|
|||||||
|
|
||||||
# Use aws s3 cp (instead of aws s3 sync) to keep files from previous runs to make old URLs work,
|
# Use aws s3 cp (instead of aws s3 sync) to keep files from previous runs to make old URLs work,
|
||||||
# and to keep files on the host to upload them to the database
|
# and to keep files on the host to upload them to the database
|
||||||
time s5cmd --log error cp "${WORKDIR}/report/*" "s3://${BUCKET}/${REPORT_PREFIX}/${GITHUB_RUN_ID}/"
|
time aws s3 cp --recursive --only-show-errors "${WORKDIR}/report" "s3://${BUCKET}/${REPORT_PREFIX}/${GITHUB_RUN_ID}"
|
||||||
|
|
||||||
# Generate redirect
|
# Generate redirect
|
||||||
cat <<EOF > ${WORKDIR}/index.html
|
cat <<EOF > ${WORKDIR}/index.html
|
||||||
@@ -196,7 +183,7 @@ runs:
|
|||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pypoetry/virtualenvs
|
path: ~/.cache/pypoetry/virtualenvs
|
||||||
key: v2-${{ runner.os }}-${{ runner.arch }}-python-deps-bookworm-${{ hashFiles('poetry.lock') }}
|
key: v2-${{ runner.os }}-python-deps-${{ hashFiles('poetry.lock') }}
|
||||||
|
|
||||||
- name: Store Allure test stat in the DB (new)
|
- name: Store Allure test stat in the DB (new)
|
||||||
if: ${{ !cancelled() && inputs.store-test-results-into-db == 'true' }}
|
if: ${{ !cancelled() && inputs.store-test-results-into-db == 'true' }}
|
||||||
@@ -234,8 +221,6 @@ runs:
|
|||||||
REPORT_URL: ${{ steps.generate-report.outputs.report-url }}
|
REPORT_URL: ${{ steps.generate-report.outputs.report-url }}
|
||||||
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
with:
|
with:
|
||||||
# Retry script for 5XX server errors: https://github.com/actions/github-script#retries
|
|
||||||
retries: 5
|
|
||||||
script: |
|
script: |
|
||||||
const { REPORT_URL, COMMIT_SHA } = process.env
|
const { REPORT_URL, COMMIT_SHA } = process.env
|
||||||
|
|
||||||
|
|||||||
15
.github/actions/allure-report-store/action.yml
vendored
15
.github/actions/allure-report-store/action.yml
vendored
@@ -8,10 +8,6 @@ inputs:
|
|||||||
unique-key:
|
unique-key:
|
||||||
description: 'string to distinguish different results in the same run'
|
description: 'string to distinguish different results in the same run'
|
||||||
required: true
|
required: true
|
||||||
aws_oicd_role_arn:
|
|
||||||
description: 'the OIDC role arn to (re-)acquire for allure report upload - if not set call must acquire OIDC role'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@@ -23,8 +19,7 @@ runs:
|
|||||||
PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH" || true)
|
PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH" || true)
|
||||||
if [ "${PR_NUMBER}" != "null" ]; then
|
if [ "${PR_NUMBER}" != "null" ]; then
|
||||||
BRANCH_OR_PR=pr-${PR_NUMBER}
|
BRANCH_OR_PR=pr-${PR_NUMBER}
|
||||||
elif [ "${GITHUB_REF_NAME}" = "main" ] || [ "${GITHUB_REF_NAME}" = "release" ] || \
|
elif [ "${GITHUB_REF_NAME}" = "main" ] || [ "${GITHUB_REF_NAME}" = "release" ] || [ "${GITHUB_REF_NAME}" = "release-proxy" ]; then
|
||||||
[ "${GITHUB_REF_NAME}" = "release-proxy" ] || [ "${GITHUB_REF_NAME}" = "release-compute" ]; then
|
|
||||||
# Shortcut for special branches
|
# Shortcut for special branches
|
||||||
BRANCH_OR_PR=${GITHUB_REF_NAME}
|
BRANCH_OR_PR=${GITHUB_REF_NAME}
|
||||||
else
|
else
|
||||||
@@ -36,14 +31,6 @@ runs:
|
|||||||
env:
|
env:
|
||||||
REPORT_DIR: ${{ inputs.report-dir }}
|
REPORT_DIR: ${{ inputs.report-dir }}
|
||||||
|
|
||||||
- name: (Re-)configure AWS credentials # necessary to upload reports to S3 after a long-running test
|
|
||||||
if: ${{ !cancelled() && (inputs.aws_oicd_role_arn != '') }}
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ inputs.aws_oicd_role_arn }}
|
|
||||||
role-duration-seconds: 3600 # 1 hour should be more than enough to upload report
|
|
||||||
|
|
||||||
- name: Upload test results
|
- name: Upload test results
|
||||||
shell: bash -euxo pipefail {0}
|
shell: bash -euxo pipefail {0}
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/actions/download/action.yml
vendored
2
.github/actions/download/action.yml
vendored
@@ -26,7 +26,7 @@ runs:
|
|||||||
TARGET: ${{ inputs.path }}
|
TARGET: ${{ inputs.path }}
|
||||||
ARCHIVE: /tmp/downloads/${{ inputs.name }}.tar.zst
|
ARCHIVE: /tmp/downloads/${{ inputs.name }}.tar.zst
|
||||||
SKIP_IF_DOES_NOT_EXIST: ${{ inputs.skip-if-does-not-exist }}
|
SKIP_IF_DOES_NOT_EXIST: ${{ inputs.skip-if-does-not-exist }}
|
||||||
PREFIX: artifacts/${{ inputs.prefix || format('{0}/{1}/{2}', github.event.pull_request.head.sha || github.sha, github.run_id, github.run_attempt) }}
|
PREFIX: artifacts/${{ inputs.prefix || format('{0}/{1}', github.run_id, github.run_attempt) }}
|
||||||
run: |
|
run: |
|
||||||
BUCKET=neon-github-public-dev
|
BUCKET=neon-github-public-dev
|
||||||
FILENAME=$(basename $ARCHIVE)
|
FILENAME=$(basename $ARCHIVE)
|
||||||
|
|||||||
@@ -3,14 +3,14 @@ description: 'Create Branch using API'
|
|||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
api_key:
|
api_key:
|
||||||
description: 'Neon API key'
|
desctiption: 'Neon API key'
|
||||||
required: true
|
required: true
|
||||||
project_id:
|
project_id:
|
||||||
description: 'ID of the Project to create Branch in'
|
desctiption: 'ID of the Project to create Branch in'
|
||||||
required: true
|
required: true
|
||||||
api_host:
|
api_host:
|
||||||
description: 'Neon API host'
|
desctiption: 'Neon API host'
|
||||||
default: console-stage.neon.build
|
default: console.stage.neon.tech
|
||||||
outputs:
|
outputs:
|
||||||
dsn:
|
dsn:
|
||||||
description: 'Created Branch DSN (for main database)'
|
description: 'Created Branch DSN (for main database)'
|
||||||
|
|||||||
10
.github/actions/neon-branch-delete/action.yml
vendored
10
.github/actions/neon-branch-delete/action.yml
vendored
@@ -3,17 +3,17 @@ description: 'Delete Branch using API'
|
|||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
api_key:
|
api_key:
|
||||||
description: 'Neon API key'
|
desctiption: 'Neon API key'
|
||||||
required: true
|
required: true
|
||||||
project_id:
|
project_id:
|
||||||
description: 'ID of the Project which should be deleted'
|
desctiption: 'ID of the Project which should be deleted'
|
||||||
required: true
|
required: true
|
||||||
branch_id:
|
branch_id:
|
||||||
description: 'ID of the branch to delete'
|
desctiption: 'ID of the branch to delete'
|
||||||
required: true
|
required: true
|
||||||
api_host:
|
api_host:
|
||||||
description: 'Neon API host'
|
desctiption: 'Neon API host'
|
||||||
default: console-stage.neon.build
|
default: console.stage.neon.tech
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
|
|||||||
24
.github/actions/neon-project-create/action.yml
vendored
24
.github/actions/neon-project-create/action.yml
vendored
@@ -3,19 +3,22 @@ description: 'Create Neon Project using API'
|
|||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
api_key:
|
api_key:
|
||||||
description: 'Neon API key'
|
desctiption: 'Neon API key'
|
||||||
required: true
|
required: true
|
||||||
region_id:
|
region_id:
|
||||||
description: 'Region ID, if not set the project will be created in the default region'
|
desctiption: 'Region ID, if not set the project will be created in the default region'
|
||||||
default: aws-us-east-2
|
default: aws-us-east-2
|
||||||
postgres_version:
|
postgres_version:
|
||||||
description: 'Postgres version; default is 16'
|
desctiption: 'Postgres version; default is 15'
|
||||||
default: '16'
|
default: 15
|
||||||
api_host:
|
api_host:
|
||||||
description: 'Neon API host'
|
desctiption: 'Neon API host'
|
||||||
default: console-stage.neon.build
|
default: console.stage.neon.tech
|
||||||
|
provisioner:
|
||||||
|
desctiption: 'k8s-pod or k8s-neonvm'
|
||||||
|
default: 'k8s-pod'
|
||||||
compute_units:
|
compute_units:
|
||||||
description: '[Min, Max] compute units'
|
desctiption: '[Min, Max] compute units; Min and Max are used for k8s-neonvm with autoscaling, for k8s-pod values Min and Max should be equal'
|
||||||
default: '[1, 1]'
|
default: '[1, 1]'
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
@@ -34,6 +37,10 @@ runs:
|
|||||||
# A shell without `set -x` to not to expose password/dsn in logs
|
# A shell without `set -x` to not to expose password/dsn in logs
|
||||||
shell: bash -euo pipefail {0}
|
shell: bash -euo pipefail {0}
|
||||||
run: |
|
run: |
|
||||||
|
if [ "${PROVISIONER}" == "k8s-pod" ] && [ "${MIN_CU}" != "${MAX_CU}" ]; then
|
||||||
|
echo >&2 "For k8s-pod provisioner MIN_CU should be equal to MAX_CU"
|
||||||
|
fi
|
||||||
|
|
||||||
project=$(curl \
|
project=$(curl \
|
||||||
"https://${API_HOST}/api/v2/projects" \
|
"https://${API_HOST}/api/v2/projects" \
|
||||||
--fail \
|
--fail \
|
||||||
@@ -45,7 +52,7 @@ runs:
|
|||||||
\"name\": \"Created by actions/neon-project-create; GITHUB_RUN_ID=${GITHUB_RUN_ID}\",
|
\"name\": \"Created by actions/neon-project-create; GITHUB_RUN_ID=${GITHUB_RUN_ID}\",
|
||||||
\"pg_version\": ${POSTGRES_VERSION},
|
\"pg_version\": ${POSTGRES_VERSION},
|
||||||
\"region_id\": \"${REGION_ID}\",
|
\"region_id\": \"${REGION_ID}\",
|
||||||
\"provisioner\": \"k8s-neonvm\",
|
\"provisioner\": \"${PROVISIONER}\",
|
||||||
\"autoscaling_limit_min_cu\": ${MIN_CU},
|
\"autoscaling_limit_min_cu\": ${MIN_CU},
|
||||||
\"autoscaling_limit_max_cu\": ${MAX_CU},
|
\"autoscaling_limit_max_cu\": ${MAX_CU},
|
||||||
\"settings\": { }
|
\"settings\": { }
|
||||||
@@ -68,5 +75,6 @@ runs:
|
|||||||
API_KEY: ${{ inputs.api_key }}
|
API_KEY: ${{ inputs.api_key }}
|
||||||
REGION_ID: ${{ inputs.region_id }}
|
REGION_ID: ${{ inputs.region_id }}
|
||||||
POSTGRES_VERSION: ${{ inputs.postgres_version }}
|
POSTGRES_VERSION: ${{ inputs.postgres_version }}
|
||||||
|
PROVISIONER: ${{ inputs.provisioner }}
|
||||||
MIN_CU: ${{ fromJSON(inputs.compute_units)[0] }}
|
MIN_CU: ${{ fromJSON(inputs.compute_units)[0] }}
|
||||||
MAX_CU: ${{ fromJSON(inputs.compute_units)[1] }}
|
MAX_CU: ${{ fromJSON(inputs.compute_units)[1] }}
|
||||||
|
|||||||
@@ -3,14 +3,14 @@ description: 'Delete Neon Project using API'
|
|||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
api_key:
|
api_key:
|
||||||
description: 'Neon API key'
|
desctiption: 'Neon API key'
|
||||||
required: true
|
required: true
|
||||||
project_id:
|
project_id:
|
||||||
description: 'ID of the Project to delete'
|
desctiption: 'ID of the Project to delete'
|
||||||
required: true
|
required: true
|
||||||
api_host:
|
api_host:
|
||||||
description: 'Neon API host'
|
desctiption: 'Neon API host'
|
||||||
default: console-stage.neon.build
|
default: console.stage.neon.tech
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
|
|||||||
72
.github/actions/run-python-test-set/action.yml
vendored
72
.github/actions/run-python-test-set/action.yml
vendored
@@ -36,22 +36,18 @@ inputs:
|
|||||||
description: 'Region name for real s3 tests'
|
description: 'Region name for real s3 tests'
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
rerun_failed:
|
rerun_flaky:
|
||||||
description: 'Whether to rerun failed tests'
|
description: 'Whether to rerun flaky tests'
|
||||||
required: false
|
required: false
|
||||||
default: 'false'
|
default: 'false'
|
||||||
pg_version:
|
pg_version:
|
||||||
description: 'Postgres version to use for tests'
|
description: 'Postgres version to use for tests'
|
||||||
required: false
|
required: false
|
||||||
default: 'v16'
|
default: 'v14'
|
||||||
benchmark_durations:
|
benchmark_durations:
|
||||||
description: 'benchmark durations JSON'
|
description: 'benchmark durations JSON'
|
||||||
required: false
|
required: false
|
||||||
default: '{}'
|
default: '{}'
|
||||||
aws_oicd_role_arn:
|
|
||||||
description: 'the OIDC role arn to (re-)acquire for allure report upload - if not set call must acquire OIDC role'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@@ -60,14 +56,14 @@ runs:
|
|||||||
if: inputs.build_type != 'remote'
|
if: inputs.build_type != 'remote'
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build_type }}-artifact
|
name: neon-${{ runner.os }}-${{ inputs.build_type }}-artifact
|
||||||
path: /tmp/neon
|
path: /tmp/neon
|
||||||
|
|
||||||
- name: Download Neon binaries for the previous release
|
- name: Download Neon binaries for the previous release
|
||||||
if: inputs.build_type != 'remote'
|
if: inputs.build_type != 'remote'
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build_type }}-artifact
|
name: neon-${{ runner.os }}-${{ inputs.build_type }}-artifact
|
||||||
path: /tmp/neon-previous
|
path: /tmp/neon-previous
|
||||||
prefix: latest
|
prefix: latest
|
||||||
|
|
||||||
@@ -75,7 +71,7 @@ runs:
|
|||||||
if: inputs.build_type != 'remote'
|
if: inputs.build_type != 'remote'
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: compatibility-snapshot-${{ runner.arch }}-${{ inputs.build_type }}-pg${{ inputs.pg_version }}
|
name: compatibility-snapshot-${{ inputs.build_type }}-pg${{ inputs.pg_version }}
|
||||||
path: /tmp/compatibility_snapshot_pg${{ inputs.pg_version }}
|
path: /tmp/compatibility_snapshot_pg${{ inputs.pg_version }}
|
||||||
prefix: latest
|
prefix: latest
|
||||||
# The lack of compatibility snapshot (for example, for the new Postgres version)
|
# The lack of compatibility snapshot (for example, for the new Postgres version)
|
||||||
@@ -87,12 +83,13 @@ runs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
- name: Cache poetry deps
|
- name: Cache poetry deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pypoetry/virtualenvs
|
path: ~/.cache/pypoetry/virtualenvs
|
||||||
key: v2-${{ runner.os }}-${{ runner.arch }}-python-deps-bookworm-${{ hashFiles('poetry.lock') }}
|
key: v2-${{ runner.os }}-python-deps-${{ hashFiles('poetry.lock') }}
|
||||||
|
|
||||||
- name: Install Python deps
|
- name: Install Python deps
|
||||||
shell: bash -euxo pipefail {0}
|
shell: bash -euxo pipefail {0}
|
||||||
@@ -108,7 +105,7 @@ runs:
|
|||||||
COMPATIBILITY_SNAPSHOT_DIR: /tmp/compatibility_snapshot_pg${{ inputs.pg_version }}
|
COMPATIBILITY_SNAPSHOT_DIR: /tmp/compatibility_snapshot_pg${{ inputs.pg_version }}
|
||||||
ALLOW_BACKWARD_COMPATIBILITY_BREAKAGE: contains(github.event.pull_request.labels.*.name, 'backward compatibility breakage')
|
ALLOW_BACKWARD_COMPATIBILITY_BREAKAGE: contains(github.event.pull_request.labels.*.name, 'backward compatibility breakage')
|
||||||
ALLOW_FORWARD_COMPATIBILITY_BREAKAGE: contains(github.event.pull_request.labels.*.name, 'forward compatibility breakage')
|
ALLOW_FORWARD_COMPATIBILITY_BREAKAGE: contains(github.event.pull_request.labels.*.name, 'forward compatibility breakage')
|
||||||
RERUN_FAILED: ${{ inputs.rerun_failed }}
|
RERUN_FLAKY: ${{ inputs.rerun_flaky }}
|
||||||
PG_VERSION: ${{ inputs.pg_version }}
|
PG_VERSION: ${{ inputs.pg_version }}
|
||||||
shell: bash -euxo pipefail {0}
|
shell: bash -euxo pipefail {0}
|
||||||
run: |
|
run: |
|
||||||
@@ -117,8 +114,6 @@ runs:
|
|||||||
export PLATFORM=${PLATFORM:-github-actions-selfhosted}
|
export PLATFORM=${PLATFORM:-github-actions-selfhosted}
|
||||||
export POSTGRES_DISTRIB_DIR=${POSTGRES_DISTRIB_DIR:-/tmp/neon/pg_install}
|
export POSTGRES_DISTRIB_DIR=${POSTGRES_DISTRIB_DIR:-/tmp/neon/pg_install}
|
||||||
export DEFAULT_PG_VERSION=${PG_VERSION#v}
|
export DEFAULT_PG_VERSION=${PG_VERSION#v}
|
||||||
export LD_LIBRARY_PATH=${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/lib
|
|
||||||
export BENCHMARK_CONNSTR=${BENCHMARK_CONNSTR:-}
|
|
||||||
|
|
||||||
if [ "${BUILD_TYPE}" = "remote" ]; then
|
if [ "${BUILD_TYPE}" = "remote" ]; then
|
||||||
export REMOTE_ENV=1
|
export REMOTE_ENV=1
|
||||||
@@ -134,8 +129,8 @@ runs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
if [[ "${{ inputs.run_in_parallel }}" == "true" ]]; then
|
if [[ "${{ inputs.run_in_parallel }}" == "true" ]]; then
|
||||||
# -n sets the number of parallel processes that pytest-xdist will run
|
# -n16 uses sixteen processes to run tests via pytest-xdist
|
||||||
EXTRA_PARAMS="-n12 $EXTRA_PARAMS"
|
EXTRA_PARAMS="-n16 $EXTRA_PARAMS"
|
||||||
|
|
||||||
# --dist=loadgroup points tests marked with @pytest.mark.xdist_group
|
# --dist=loadgroup points tests marked with @pytest.mark.xdist_group
|
||||||
# to the same worker to make @pytest.mark.order work with xdist
|
# to the same worker to make @pytest.mark.order work with xdist
|
||||||
@@ -154,8 +149,15 @@ runs:
|
|||||||
EXTRA_PARAMS="--out-dir $PERF_REPORT_DIR $EXTRA_PARAMS"
|
EXTRA_PARAMS="--out-dir $PERF_REPORT_DIR $EXTRA_PARAMS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "${RERUN_FAILED}" == "true" ]; then
|
if [ "${RERUN_FLAKY}" == "true" ]; then
|
||||||
EXTRA_PARAMS="--reruns 2 $EXTRA_PARAMS"
|
mkdir -p $TEST_OUTPUT
|
||||||
|
poetry run ./scripts/flaky_tests.py "${TEST_RESULT_CONNSTR}" \
|
||||||
|
--days 7 \
|
||||||
|
--output "$TEST_OUTPUT/flaky.json" \
|
||||||
|
--pg-version "${DEFAULT_PG_VERSION}" \
|
||||||
|
--build-type "${BUILD_TYPE}"
|
||||||
|
|
||||||
|
EXTRA_PARAMS="--flaky-tests-json $TEST_OUTPUT/flaky.json $EXTRA_PARAMS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# We use pytest-split plugin to run benchmarks in parallel on different CI runners
|
# We use pytest-split plugin to run benchmarks in parallel on different CI runners
|
||||||
@@ -166,28 +168,23 @@ runs:
|
|||||||
EXTRA_PARAMS="--durations-path $TEST_OUTPUT/benchmark_durations.json $EXTRA_PARAMS"
|
EXTRA_PARAMS="--durations-path $TEST_OUTPUT/benchmark_durations.json $EXTRA_PARAMS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ $BUILD_TYPE == "debug" && $RUNNER_ARCH == 'X64' ]]; then
|
if [[ "${{ inputs.build_type }}" == "debug" ]]; then
|
||||||
cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run)
|
cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run)
|
||||||
|
elif [[ "${{ inputs.build_type }}" == "release" ]]; then
|
||||||
|
cov_prefix=()
|
||||||
else
|
else
|
||||||
cov_prefix=()
|
cov_prefix=()
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Wake up the cluster if we use remote neon instance
|
# Wake up the cluster if we use remote neon instance
|
||||||
if [ "${{ inputs.build_type }}" = "remote" ] && [ -n "${BENCHMARK_CONNSTR}" ]; then
|
if [ "${{ inputs.build_type }}" = "remote" ] && [ -n "${BENCHMARK_CONNSTR}" ]; then
|
||||||
QUERIES=("SELECT version()")
|
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/psql ${BENCHMARK_CONNSTR} -c "SELECT version();"
|
||||||
if [[ "${PLATFORM}" = "neon"* ]]; then
|
|
||||||
QUERIES+=("SHOW neon.tenant_id")
|
|
||||||
QUERIES+=("SHOW neon.timeline_id")
|
|
||||||
fi
|
|
||||||
|
|
||||||
for q in "${QUERIES[@]}"; do
|
|
||||||
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/psql ${BENCHMARK_CONNSTR} -c "${q}"
|
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Run the tests.
|
# Run the tests.
|
||||||
#
|
#
|
||||||
# --alluredir saves test results in Allure format (in a specified directory)
|
# The junit.xml file allows CI tools to display more fine-grained test information
|
||||||
|
# in its "Tests" tab in the results page.
|
||||||
# --verbose prints name of each test (helpful when there are
|
# --verbose prints name of each test (helpful when there are
|
||||||
# multiple tests in one file)
|
# multiple tests in one file)
|
||||||
# -rA prints summary in the end
|
# -rA prints summary in the end
|
||||||
@@ -196,6 +193,7 @@ runs:
|
|||||||
#
|
#
|
||||||
mkdir -p $TEST_OUTPUT/allure/results
|
mkdir -p $TEST_OUTPUT/allure/results
|
||||||
"${cov_prefix[@]}" ./scripts/pytest \
|
"${cov_prefix[@]}" ./scripts/pytest \
|
||||||
|
--junitxml=$TEST_OUTPUT/junit.xml \
|
||||||
--alluredir=$TEST_OUTPUT/allure/results \
|
--alluredir=$TEST_OUTPUT/allure/results \
|
||||||
--tb=short \
|
--tb=short \
|
||||||
--verbose \
|
--verbose \
|
||||||
@@ -208,24 +206,14 @@ runs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload compatibility snapshot
|
- name: Upload compatibility snapshot
|
||||||
# Note, that we use `github.base_ref` which is a target branch for a PR
|
if: github.ref_name == 'release'
|
||||||
if: github.event_name == 'pull_request' && github.base_ref == 'release'
|
|
||||||
uses: ./.github/actions/upload
|
uses: ./.github/actions/upload
|
||||||
with:
|
with:
|
||||||
name: compatibility-snapshot-${{ runner.arch }}-${{ inputs.build_type }}-pg${{ inputs.pg_version }}
|
name: compatibility-snapshot-${{ inputs.build_type }}-pg${{ inputs.pg_version }}-${{ github.run_id }}
|
||||||
# Directory is created by test_compatibility.py::test_create_snapshot, keep the path in sync with the test
|
# Directory is created by test_compatibility.py::test_create_snapshot, keep the path in sync with the test
|
||||||
path: /tmp/test_output/compatibility_snapshot_pg${{ inputs.pg_version }}/
|
path: /tmp/test_output/compatibility_snapshot_pg${{ inputs.pg_version }}/
|
||||||
# The lack of compatibility snapshot shouldn't fail the job
|
prefix: latest
|
||||||
# (for example if we didn't run the test for non build-and-test workflow)
|
|
||||||
skip-if-does-not-exist: true
|
|
||||||
|
|
||||||
- name: (Re-)configure AWS credentials # necessary to upload reports to S3 after a long-running test
|
|
||||||
if: ${{ !cancelled() && (inputs.aws_oicd_role_arn != '') }}
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ inputs.aws_oicd_role_arn }}
|
|
||||||
role-duration-seconds: 3600 # 1 hour should be more than enough to upload report
|
|
||||||
- name: Upload test results
|
- name: Upload test results
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
uses: ./.github/actions/allure-report-store
|
uses: ./.github/actions/allure-report-store
|
||||||
|
|||||||
22
.github/actions/upload/action.yml
vendored
22
.github/actions/upload/action.yml
vendored
@@ -7,24 +7,18 @@ inputs:
|
|||||||
path:
|
path:
|
||||||
description: "A directory or file to upload"
|
description: "A directory or file to upload"
|
||||||
required: true
|
required: true
|
||||||
skip-if-does-not-exist:
|
|
||||||
description: "Allow to skip if path doesn't exist, fail otherwise"
|
|
||||||
default: false
|
|
||||||
required: false
|
|
||||||
prefix:
|
prefix:
|
||||||
description: "S3 prefix. Default is '${GITHUB_SHA}/${GITHUB_RUN_ID}/${GITHUB_RUN_ATTEMPT}'"
|
description: "S3 prefix. Default is '${GITHUB_RUN_ID}/${GITHUB_RUN_ATTEMPT}'"
|
||||||
required: false
|
required: false
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Prepare artifact
|
- name: Prepare artifact
|
||||||
id: prepare-artifact
|
|
||||||
shell: bash -euxo pipefail {0}
|
shell: bash -euxo pipefail {0}
|
||||||
env:
|
env:
|
||||||
SOURCE: ${{ inputs.path }}
|
SOURCE: ${{ inputs.path }}
|
||||||
ARCHIVE: /tmp/uploads/${{ inputs.name }}.tar.zst
|
ARCHIVE: /tmp/uploads/${{ inputs.name }}.tar.zst
|
||||||
SKIP_IF_DOES_NOT_EXIST: ${{ inputs.skip-if-does-not-exist }}
|
|
||||||
run: |
|
run: |
|
||||||
mkdir -p $(dirname $ARCHIVE)
|
mkdir -p $(dirname $ARCHIVE)
|
||||||
|
|
||||||
@@ -39,27 +33,19 @@ runs:
|
|||||||
elif [ -f ${SOURCE} ]; then
|
elif [ -f ${SOURCE} ]; then
|
||||||
time tar -cf ${ARCHIVE} --zstd ${SOURCE}
|
time tar -cf ${ARCHIVE} --zstd ${SOURCE}
|
||||||
elif ! ls ${SOURCE} > /dev/null 2>&1; then
|
elif ! ls ${SOURCE} > /dev/null 2>&1; then
|
||||||
if [ "${SKIP_IF_DOES_NOT_EXIST}" = "true" ]; then
|
echo >&2 "${SOURCE} does not exist"
|
||||||
echo 'SKIPPED=true' >> $GITHUB_OUTPUT
|
exit 2
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
echo >&2 "${SOURCE} does not exist"
|
|
||||||
exit 2
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
echo >&2 "${SOURCE} is neither a directory nor a file, do not know how to handle it"
|
echo >&2 "${SOURCE} is neither a directory nor a file, do not know how to handle it"
|
||||||
exit 3
|
exit 3
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo 'SKIPPED=false' >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
if: ${{ steps.prepare-artifact.outputs.SKIPPED == 'false' }}
|
|
||||||
shell: bash -euxo pipefail {0}
|
shell: bash -euxo pipefail {0}
|
||||||
env:
|
env:
|
||||||
SOURCE: ${{ inputs.path }}
|
SOURCE: ${{ inputs.path }}
|
||||||
ARCHIVE: /tmp/uploads/${{ inputs.name }}.tar.zst
|
ARCHIVE: /tmp/uploads/${{ inputs.name }}.tar.zst
|
||||||
PREFIX: artifacts/${{ inputs.prefix || format('{0}/{1}/{2}', github.event.pull_request.head.sha || github.sha, github.run_id , github.run_attempt) }}
|
PREFIX: artifacts/${{ inputs.prefix || format('{0}/{1}', github.run_id, github.run_attempt) }}
|
||||||
run: |
|
run: |
|
||||||
BUCKET=neon-github-public-dev
|
BUCKET=neon-github-public-dev
|
||||||
FILENAME=$(basename $ARCHIVE)
|
FILENAME=$(basename $ARCHIVE)
|
||||||
|
|||||||
11
.github/pull_request_template.md
vendored
11
.github/pull_request_template.md
vendored
@@ -1,3 +1,14 @@
|
|||||||
## Problem
|
## Problem
|
||||||
|
|
||||||
## Summary of changes
|
## Summary of changes
|
||||||
|
|
||||||
|
## Checklist before requesting a review
|
||||||
|
|
||||||
|
- [ ] I have performed a self-review of my code.
|
||||||
|
- [ ] If it is a core feature, I have added thorough tests.
|
||||||
|
- [ ] Do we need to implement analytics? if so did you add the relevant metrics to the dashboard?
|
||||||
|
- [ ] If this PR requires public announcement, mark it with /release-notes label and add several sentences in this section.
|
||||||
|
|
||||||
|
## Checklist before merging
|
||||||
|
|
||||||
|
- [ ] Do not forget to reformat commit message to not include the above checklist
|
||||||
|
|||||||
168
.github/workflows/_benchmarking_preparation.yml
vendored
168
.github/workflows/_benchmarking_preparation.yml
vendored
@@ -1,168 +0,0 @@
|
|||||||
name: Prepare benchmarking databases by restoring dumps
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
# no inputs needed
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
setup-databases:
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform: [ aws-rds-postgres, aws-aurora-serverless-v2-postgres, neon ]
|
|
||||||
database: [ clickbench, tpch, userexample ]
|
|
||||||
|
|
||||||
env:
|
|
||||||
LD_LIBRARY_PATH: /tmp/neon/pg_install/v16/lib
|
|
||||||
PLATFORM: ${{ matrix.platform }}
|
|
||||||
PG_BINARIES: /tmp/neon/pg_install/v16/bin
|
|
||||||
|
|
||||||
runs-on: [ self-hosted, us-east-2, x64 ]
|
|
||||||
container:
|
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set up Connection String
|
|
||||||
id: set-up-prep-connstr
|
|
||||||
run: |
|
|
||||||
case "${PLATFORM}" in
|
|
||||||
neon)
|
|
||||||
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CONNSTR }}
|
|
||||||
;;
|
|
||||||
aws-rds-postgres)
|
|
||||||
CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CONNSTR }}
|
|
||||||
;;
|
|
||||||
aws-aurora-serverless-v2-postgres)
|
|
||||||
CONNSTR=${{ secrets.BENCHMARK_RDS_AURORA_CONNSTR }}
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo >&2 "Unknown PLATFORM=${PLATFORM}"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
|
||||||
uses: ./.github/actions/download
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
|
||||||
path: /tmp/neon/
|
|
||||||
prefix: latest
|
|
||||||
|
|
||||||
# we create a table that has one row for each database that we want to restore with the status whether the restore is done
|
|
||||||
- name: Create benchmark_restore_status table if it does not exist
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-prep-connstr.outputs.connstr }}
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
# to avoid a race condition of multiple jobs trying to create the table at the same time,
|
|
||||||
# we use an advisory lock
|
|
||||||
run: |
|
|
||||||
${PG_BINARIES}/psql "${{ env.BENCHMARK_CONNSTR }}" -c "
|
|
||||||
SELECT pg_advisory_lock(4711);
|
|
||||||
CREATE TABLE IF NOT EXISTS benchmark_restore_status (
|
|
||||||
databasename text primary key,
|
|
||||||
restore_done boolean
|
|
||||||
);
|
|
||||||
SELECT pg_advisory_unlock(4711);
|
|
||||||
"
|
|
||||||
|
|
||||||
- name: Check if restore is already done
|
|
||||||
id: check-restore-done
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-prep-connstr.outputs.connstr }}
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
run: |
|
|
||||||
skip=false
|
|
||||||
if ${PG_BINARIES}/psql "${{ env.BENCHMARK_CONNSTR }}" -tAc "SELECT 1 FROM benchmark_restore_status WHERE databasename='${{ env.DATABASE_NAME }}' AND restore_done=true;" | grep -q 1; then
|
|
||||||
echo "Restore already done for database ${{ env.DATABASE_NAME }} on platform ${{ env.PLATFORM }}. Skipping this database."
|
|
||||||
skip=true
|
|
||||||
fi
|
|
||||||
echo "skip=${skip}" | tee -a $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Check and create database if it does not exist
|
|
||||||
if: steps.check-restore-done.outputs.skip != 'true'
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-prep-connstr.outputs.connstr }}
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
run: |
|
|
||||||
DB_EXISTS=$(${PG_BINARIES}/psql "${{ env.BENCHMARK_CONNSTR }}" -tAc "SELECT 1 FROM pg_database WHERE datname='${{ env.DATABASE_NAME }}'")
|
|
||||||
if [ "$DB_EXISTS" != "1" ]; then
|
|
||||||
echo "Database ${{ env.DATABASE_NAME }} does not exist. Creating it..."
|
|
||||||
${PG_BINARIES}/psql "${{ env.BENCHMARK_CONNSTR }}" -c "CREATE DATABASE \"${{ env.DATABASE_NAME }}\";"
|
|
||||||
else
|
|
||||||
echo "Database ${{ env.DATABASE_NAME }} already exists."
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Download dump from S3 to /tmp/dumps
|
|
||||||
if: steps.check-restore-done.outputs.skip != 'true'
|
|
||||||
env:
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
run: |
|
|
||||||
mkdir -p /tmp/dumps
|
|
||||||
aws s3 cp s3://neon-github-dev/performance/pgdumps/$DATABASE_NAME/$DATABASE_NAME.pg_dump /tmp/dumps/
|
|
||||||
|
|
||||||
- name: Replace database name in connection string
|
|
||||||
if: steps.check-restore-done.outputs.skip != 'true'
|
|
||||||
id: replace-dbname
|
|
||||||
env:
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-prep-connstr.outputs.connstr }}
|
|
||||||
run: |
|
|
||||||
# Extract the part before the database name
|
|
||||||
base_connstr="${BENCHMARK_CONNSTR%/*}"
|
|
||||||
# Extract the query parameters (if any) after the database name
|
|
||||||
query_params="${BENCHMARK_CONNSTR#*\?}"
|
|
||||||
# Reconstruct the new connection string
|
|
||||||
if [ "$query_params" != "$BENCHMARK_CONNSTR" ]; then
|
|
||||||
new_connstr="${base_connstr}/${DATABASE_NAME}?${query_params}"
|
|
||||||
else
|
|
||||||
new_connstr="${base_connstr}/${DATABASE_NAME}"
|
|
||||||
fi
|
|
||||||
echo "database_connstr=${new_connstr}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Restore dump
|
|
||||||
if: steps.check-restore-done.outputs.skip != 'true'
|
|
||||||
env:
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
DATABASE_CONNSTR: ${{ steps.replace-dbname.outputs.database_connstr }}
|
|
||||||
# the following works only with larger computes:
|
|
||||||
# PGOPTIONS: "-c maintenance_work_mem=8388608 -c max_parallel_maintenance_workers=7"
|
|
||||||
# we add the || true because:
|
|
||||||
# the dumps were created with Neon and contain neon extensions that are not
|
|
||||||
# available in RDS, so we will always report an error, but we can ignore it
|
|
||||||
run: |
|
|
||||||
${PG_BINARIES}/pg_restore --clean --if-exists --no-owner --jobs=4 \
|
|
||||||
-d "${DATABASE_CONNSTR}" /tmp/dumps/${DATABASE_NAME}.pg_dump || true
|
|
||||||
|
|
||||||
- name: Update benchmark_restore_status table
|
|
||||||
if: steps.check-restore-done.outputs.skip != 'true'
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-prep-connstr.outputs.connstr }}
|
|
||||||
DATABASE_NAME: ${{ matrix.database }}
|
|
||||||
run: |
|
|
||||||
${PG_BINARIES}/psql "${{ env.BENCHMARK_CONNSTR }}" -c "
|
|
||||||
INSERT INTO benchmark_restore_status (databasename, restore_done) VALUES ('${{ env.DATABASE_NAME }}', true)
|
|
||||||
ON CONFLICT (databasename) DO UPDATE SET restore_done = true;
|
|
||||||
"
|
|
||||||
311
.github/workflows/_build-and-test-locally.yml
vendored
311
.github/workflows/_build-and-test-locally.yml
vendored
@@ -1,311 +0,0 @@
|
|||||||
name: Build and Test Locally
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
arch:
|
|
||||||
description: 'x64 or arm64'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-tag:
|
|
||||||
description: 'build tag'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-tools-image:
|
|
||||||
description: 'build-tools image'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-type:
|
|
||||||
description: 'debug or release'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
test-cfg:
|
|
||||||
description: 'a json object of postgres versions and lfc states to run regression tests on'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
env:
|
|
||||||
RUST_BACKTRACE: 1
|
|
||||||
COPT: '-Werror'
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-neon:
|
|
||||||
runs-on: ${{ fromJson(format('["self-hosted", "{0}"]', inputs.arch == 'arm64' && 'large-arm64' || 'large')) }}
|
|
||||||
container:
|
|
||||||
image: ${{ inputs.build-tools-image }}
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
# Raise locked memory limit for tokio-epoll-uring.
|
|
||||||
# On 5.10 LTS kernels < 5.10.162 (and generally mainline kernels < 5.12),
|
|
||||||
# io_uring will account the memory of the CQ and SQ as locked.
|
|
||||||
# More details: https://github.com/neondatabase/neon/issues/6373#issuecomment-1905814391
|
|
||||||
options: --init --shm-size=512mb --ulimit memlock=67108864:67108864
|
|
||||||
env:
|
|
||||||
BUILD_TYPE: ${{ inputs.build-type }}
|
|
||||||
GIT_VERSION: ${{ github.event.pull_request.head.sha || github.sha }}
|
|
||||||
BUILD_TAG: ${{ inputs.build-tag }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
|
|
||||||
- name: Set pg 14 revision for caching
|
|
||||||
id: pg_v14_rev
|
|
||||||
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v14) >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Set pg 15 revision for caching
|
|
||||||
id: pg_v15_rev
|
|
||||||
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v15) >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Set pg 16 revision for caching
|
|
||||||
id: pg_v16_rev
|
|
||||||
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v16) >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Set pg 17 revision for caching
|
|
||||||
id: pg_v17_rev
|
|
||||||
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v17) >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Set some environment variables used by all the steps.
|
|
||||||
#
|
|
||||||
# CARGO_FLAGS is extra options to pass to "cargo build", "cargo test" etc.
|
|
||||||
# It also includes --features, if any
|
|
||||||
#
|
|
||||||
# CARGO_FEATURES is passed to "cargo metadata". It is separate from CARGO_FLAGS,
|
|
||||||
# because "cargo metadata" doesn't accept --release or --debug options
|
|
||||||
#
|
|
||||||
# We run tests with addtional features, that are turned off by default (e.g. in release builds), see
|
|
||||||
# corresponding Cargo.toml files for their descriptions.
|
|
||||||
- name: Set env variables
|
|
||||||
env:
|
|
||||||
ARCH: ${{ inputs.arch }}
|
|
||||||
run: |
|
|
||||||
CARGO_FEATURES="--features testing"
|
|
||||||
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
|
||||||
cov_prefix="scripts/coverage --profraw-prefix=$GITHUB_JOB --dir=/tmp/coverage run"
|
|
||||||
CARGO_FLAGS="--locked"
|
|
||||||
elif [[ $BUILD_TYPE == "debug" ]]; then
|
|
||||||
cov_prefix=""
|
|
||||||
CARGO_FLAGS="--locked"
|
|
||||||
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
||||||
cov_prefix=""
|
|
||||||
CARGO_FLAGS="--locked --release"
|
|
||||||
fi
|
|
||||||
{
|
|
||||||
echo "cov_prefix=${cov_prefix}"
|
|
||||||
echo "CARGO_FEATURES=${CARGO_FEATURES}"
|
|
||||||
echo "CARGO_FLAGS=${CARGO_FLAGS}"
|
|
||||||
echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo"
|
|
||||||
} >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Cache postgres v14 build
|
|
||||||
id: cache_pg_14
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: pg_install/v14
|
|
||||||
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v14_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
||||||
|
|
||||||
- name: Cache postgres v15 build
|
|
||||||
id: cache_pg_15
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: pg_install/v15
|
|
||||||
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v15_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
||||||
|
|
||||||
- name: Cache postgres v16 build
|
|
||||||
id: cache_pg_16
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: pg_install/v16
|
|
||||||
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v16_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
||||||
|
|
||||||
- name: Cache postgres v17 build
|
|
||||||
id: cache_pg_17
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: pg_install/v17
|
|
||||||
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v17_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
||||||
|
|
||||||
- name: Build postgres v14
|
|
||||||
if: steps.cache_pg_14.outputs.cache-hit != 'true'
|
|
||||||
run: mold -run make postgres-v14 -j$(nproc)
|
|
||||||
|
|
||||||
- name: Build postgres v15
|
|
||||||
if: steps.cache_pg_15.outputs.cache-hit != 'true'
|
|
||||||
run: mold -run make postgres-v15 -j$(nproc)
|
|
||||||
|
|
||||||
- name: Build postgres v16
|
|
||||||
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
|
||||||
run: mold -run make postgres-v16 -j$(nproc)
|
|
||||||
|
|
||||||
- name: Build postgres v17
|
|
||||||
if: steps.cache_pg_17.outputs.cache-hit != 'true'
|
|
||||||
run: mold -run make postgres-v17 -j$(nproc)
|
|
||||||
|
|
||||||
- name: Build neon extensions
|
|
||||||
run: mold -run make neon-pg-ext -j$(nproc)
|
|
||||||
|
|
||||||
- name: Build walproposer-lib
|
|
||||||
run: mold -run make walproposer-lib -j$(nproc)
|
|
||||||
|
|
||||||
- name: Run cargo build
|
|
||||||
run: |
|
|
||||||
PQ_LIB_DIR=$(pwd)/pg_install/v16/lib
|
|
||||||
export PQ_LIB_DIR
|
|
||||||
${cov_prefix} mold -run cargo build $CARGO_FLAGS $CARGO_FEATURES --bins --tests
|
|
||||||
|
|
||||||
# Do install *before* running rust tests because they might recompile the
|
|
||||||
# binaries with different features/flags.
|
|
||||||
- name: Install rust binaries
|
|
||||||
env:
|
|
||||||
ARCH: ${{ inputs.arch }}
|
|
||||||
run: |
|
|
||||||
# Install target binaries
|
|
||||||
mkdir -p /tmp/neon/bin/
|
|
||||||
binaries=$(
|
|
||||||
${cov_prefix} cargo metadata $CARGO_FEATURES --format-version=1 --no-deps |
|
|
||||||
jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name'
|
|
||||||
)
|
|
||||||
for bin in $binaries; do
|
|
||||||
SRC=target/$BUILD_TYPE/$bin
|
|
||||||
DST=/tmp/neon/bin/$bin
|
|
||||||
cp "$SRC" "$DST"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Install test executables and write list of all binaries (for code coverage)
|
|
||||||
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
|
||||||
# Keep bloated coverage data files away from the rest of the artifact
|
|
||||||
mkdir -p /tmp/coverage/
|
|
||||||
|
|
||||||
mkdir -p /tmp/neon/test_bin/
|
|
||||||
|
|
||||||
test_exe_paths=$(
|
|
||||||
${cov_prefix} cargo test $CARGO_FLAGS $CARGO_FEATURES --message-format=json --no-run |
|
|
||||||
jq -r '.executable | select(. != null)'
|
|
||||||
)
|
|
||||||
for bin in $test_exe_paths; do
|
|
||||||
SRC=$bin
|
|
||||||
DST=/tmp/neon/test_bin/$(basename $bin)
|
|
||||||
|
|
||||||
# We don't need debug symbols for code coverage, so strip them out to make
|
|
||||||
# the artifact smaller.
|
|
||||||
strip "$SRC" -o "$DST"
|
|
||||||
echo "$DST" >> /tmp/coverage/binaries.list
|
|
||||||
done
|
|
||||||
|
|
||||||
for bin in $binaries; do
|
|
||||||
echo "/tmp/neon/bin/$bin" >> /tmp/coverage/binaries.list
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Run rust tests
|
|
||||||
env:
|
|
||||||
NEXTEST_RETRIES: 3
|
|
||||||
run: |
|
|
||||||
PQ_LIB_DIR=$(pwd)/pg_install/v16/lib
|
|
||||||
export PQ_LIB_DIR
|
|
||||||
LD_LIBRARY_PATH=$(pwd)/pg_install/v17/lib
|
|
||||||
export LD_LIBRARY_PATH
|
|
||||||
|
|
||||||
#nextest does not yet support running doctests
|
|
||||||
${cov_prefix} cargo test --doc $CARGO_FLAGS $CARGO_FEATURES
|
|
||||||
|
|
||||||
# run all non-pageserver tests
|
|
||||||
${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E '!package(pageserver)'
|
|
||||||
|
|
||||||
# run pageserver tests with different settings
|
|
||||||
for io_engine in std-fs tokio-epoll-uring ; do
|
|
||||||
NEON_PAGESERVER_UNIT_TEST_VIRTUAL_FILE_IOENGINE=$io_engine ${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E 'package(pageserver)'
|
|
||||||
done
|
|
||||||
|
|
||||||
# Run separate tests for real S3
|
|
||||||
export ENABLE_REAL_S3_REMOTE_STORAGE=nonempty
|
|
||||||
export REMOTE_STORAGE_S3_BUCKET=neon-github-ci-tests
|
|
||||||
export REMOTE_STORAGE_S3_REGION=eu-central-1
|
|
||||||
${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E 'package(remote_storage)' -E 'test(test_real_s3)'
|
|
||||||
|
|
||||||
# Run separate tests for real Azure Blob Storage
|
|
||||||
# XXX: replace region with `eu-central-1`-like region
|
|
||||||
export ENABLE_REAL_AZURE_REMOTE_STORAGE=y
|
|
||||||
export AZURE_STORAGE_ACCOUNT="${{ secrets.AZURE_STORAGE_ACCOUNT_DEV }}"
|
|
||||||
export AZURE_STORAGE_ACCESS_KEY="${{ secrets.AZURE_STORAGE_ACCESS_KEY_DEV }}"
|
|
||||||
export REMOTE_STORAGE_AZURE_CONTAINER="${{ vars.REMOTE_STORAGE_AZURE_CONTAINER }}"
|
|
||||||
export REMOTE_STORAGE_AZURE_REGION="${{ vars.REMOTE_STORAGE_AZURE_REGION }}"
|
|
||||||
${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E 'package(remote_storage)' -E 'test(test_real_azure)'
|
|
||||||
|
|
||||||
- name: Install postgres binaries
|
|
||||||
run: |
|
|
||||||
# Use tar to copy files matching the pattern, preserving the paths in the destionation
|
|
||||||
tar c \
|
|
||||||
pg_install/v* \
|
|
||||||
pg_install/build/*/src/test/regress/*.so \
|
|
||||||
pg_install/build/*/src/test/regress/pg_regress \
|
|
||||||
pg_install/build/*/src/test/isolation/isolationtester \
|
|
||||||
pg_install/build/*/src/test/isolation/pg_isolation_regress \
|
|
||||||
| tar x -C /tmp/neon
|
|
||||||
|
|
||||||
- name: Upload Neon artifact
|
|
||||||
uses: ./.github/actions/upload
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-artifact
|
|
||||||
path: /tmp/neon
|
|
||||||
|
|
||||||
# XXX: keep this after the binaries.list is formed, so the coverage can properly work later
|
|
||||||
- name: Merge and upload coverage data
|
|
||||||
if: inputs.build-type == 'debug'
|
|
||||||
uses: ./.github/actions/save-coverage-data
|
|
||||||
|
|
||||||
regress-tests:
|
|
||||||
# Don't run regression tests on debug arm64 builds
|
|
||||||
if: inputs.build-type != 'debug' || inputs.arch != 'arm64'
|
|
||||||
needs: [ build-neon ]
|
|
||||||
runs-on: ${{ fromJson(format('["self-hosted", "{0}"]', inputs.arch == 'arm64' && 'large-arm64' || 'large')) }}
|
|
||||||
container:
|
|
||||||
image: ${{ inputs.build-tools-image }}
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
# for changed limits, see comments on `options:` earlier in this file
|
|
||||||
options: --init --shm-size=512mb --ulimit memlock=67108864:67108864
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix: ${{ fromJSON(format('{{"include":{0}}}', inputs.test-cfg)) }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
|
|
||||||
- name: Pytest regression tests
|
|
||||||
continue-on-error: ${{ matrix.lfc_state == 'with-lfc' }}
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
timeout-minutes: 60
|
|
||||||
with:
|
|
||||||
build_type: ${{ inputs.build-type }}
|
|
||||||
test_selection: regress
|
|
||||||
needs_postgres_source: true
|
|
||||||
run_with_real_s3: true
|
|
||||||
real_s3_bucket: neon-github-ci-tests
|
|
||||||
real_s3_region: eu-central-1
|
|
||||||
rerun_failed: true
|
|
||||||
pg_version: ${{ matrix.pg_version }}
|
|
||||||
env:
|
|
||||||
TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR_NEW }}
|
|
||||||
CHECK_ONDISK_DATA_COMPATIBILITY: nonempty
|
|
||||||
BUILD_TAG: ${{ inputs.build-tag }}
|
|
||||||
PAGESERVER_VIRTUAL_FILE_IO_ENGINE: tokio-epoll-uring
|
|
||||||
USE_LFC: ${{ matrix.lfc_state == 'with-lfc' && 'true' || 'false' }}
|
|
||||||
|
|
||||||
# Temporary disable this step until we figure out why it's so flaky
|
|
||||||
# Ref https://github.com/neondatabase/neon/issues/4540
|
|
||||||
- name: Merge and upload coverage data
|
|
||||||
if: |
|
|
||||||
false &&
|
|
||||||
inputs.build-type == 'debug' && matrix.pg_version == 'v16'
|
|
||||||
uses: ./.github/actions/save-coverage-data
|
|
||||||
37
.github/workflows/_check-codestyle-python.yml
vendored
37
.github/workflows/_check-codestyle-python.yml
vendored
@@ -1,37 +0,0 @@
|
|||||||
name: Check Codestyle Python
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
build-tools-image:
|
|
||||||
description: 'build-tools image'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-codestyle-python:
|
|
||||||
runs-on: [ self-hosted, small ]
|
|
||||||
container:
|
|
||||||
image: ${{ inputs.build-tools-image }}
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pypoetry/virtualenvs
|
|
||||||
key: v2-${{ runner.os }}-${{ runner.arch }}-python-deps-bookworm-${{ hashFiles('poetry.lock') }}
|
|
||||||
|
|
||||||
- run: ./scripts/pysync
|
|
||||||
|
|
||||||
- run: poetry run ruff check .
|
|
||||||
- run: poetry run ruff format --check .
|
|
||||||
- run: poetry run mypy .
|
|
||||||
79
.github/workflows/_create-release-pr.yml
vendored
79
.github/workflows/_create-release-pr.yml
vendored
@@ -1,79 +0,0 @@
|
|||||||
name: Create Release PR
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
component-name:
|
|
||||||
description: 'Component name'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
release-branch:
|
|
||||||
description: 'Release branch'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
ci-access-token:
|
|
||||||
description: 'CI access token'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euo pipefail {0}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-release-branch:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write # for `git push`
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
|
|
||||||
- name: Set variables
|
|
||||||
id: vars
|
|
||||||
env:
|
|
||||||
COMPONENT_NAME: ${{ inputs.component-name }}
|
|
||||||
RELEASE_BRANCH: ${{ inputs.release-branch }}
|
|
||||||
run: |
|
|
||||||
today=$(date +'%Y-%m-%d')
|
|
||||||
echo "title=${COMPONENT_NAME} release ${today}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
echo "rc-branch=rc/${RELEASE_BRANCH}/${today}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
|
|
||||||
- name: Configure git
|
|
||||||
run: |
|
|
||||||
git config user.name "github-actions[bot]"
|
|
||||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
|
||||||
|
|
||||||
- name: Create RC branch
|
|
||||||
env:
|
|
||||||
RC_BRANCH: ${{ steps.vars.outputs.rc-branch }}
|
|
||||||
TITLE: ${{ steps.vars.outputs.title }}
|
|
||||||
run: |
|
|
||||||
git checkout -b "${RC_BRANCH}"
|
|
||||||
|
|
||||||
# create an empty commit to distinguish workflow runs
|
|
||||||
# from other possible releases from the same commit
|
|
||||||
git commit --allow-empty -m "${TITLE}"
|
|
||||||
|
|
||||||
git push origin "${RC_BRANCH}"
|
|
||||||
|
|
||||||
- name: Create a PR into ${{ inputs.release-branch }}
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.ci-access-token }}
|
|
||||||
RC_BRANCH: ${{ steps.vars.outputs.rc-branch }}
|
|
||||||
RELEASE_BRANCH: ${{ inputs.release-branch }}
|
|
||||||
TITLE: ${{ steps.vars.outputs.title }}
|
|
||||||
run: |
|
|
||||||
cat << EOF > body.md
|
|
||||||
## ${TITLE}
|
|
||||||
|
|
||||||
**Please merge this Pull Request using 'Create a merge commit' button**
|
|
||||||
EOF
|
|
||||||
|
|
||||||
gh pr create --title "${TITLE}" \
|
|
||||||
--body-file "body.md" \
|
|
||||||
--head "${RC_BRANCH}" \
|
|
||||||
--base "${RELEASE_BRANCH}"
|
|
||||||
56
.github/workflows/_push-to-acr.yml
vendored
56
.github/workflows/_push-to-acr.yml
vendored
@@ -1,56 +0,0 @@
|
|||||||
name: Push images to ACR
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
client_id:
|
|
||||||
description: Client ID of Azure managed identity or Entra app
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
image_tag:
|
|
||||||
description: Tag for the container image
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
images:
|
|
||||||
description: Images to push
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
registry_name:
|
|
||||||
description: Name of the container registry
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
subscription_id:
|
|
||||||
description: Azure subscription ID
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
tenant_id:
|
|
||||||
description: Azure tenant ID
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
push-to-acr:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
permissions:
|
|
||||||
contents: read # This is required for actions/checkout
|
|
||||||
id-token: write # This is required for Azure Login to work.
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Azure login
|
|
||||||
uses: azure/login@6c251865b4e6290e7b78be643ea2d005bc51f69a # @v2.1.1
|
|
||||||
with:
|
|
||||||
client-id: ${{ inputs.client_id }}
|
|
||||||
subscription-id: ${{ inputs.subscription_id }}
|
|
||||||
tenant-id: ${{ inputs.tenant_id }}
|
|
||||||
|
|
||||||
- name: Login to ACR
|
|
||||||
run: |
|
|
||||||
az acr login --name=${{ inputs.registry_name }}
|
|
||||||
|
|
||||||
- name: Copy docker images to ACR ${{ inputs.registry_name }}
|
|
||||||
run: |
|
|
||||||
images='${{ inputs.images }}'
|
|
||||||
for image in ${images}; do
|
|
||||||
docker buildx imagetools create \
|
|
||||||
-t ${{ inputs.registry_name }}.azurecr.io/neondatabase/${image}:${{ inputs.image_tag }} \
|
|
||||||
neondatabase/${image}:${{ inputs.image_tag }}
|
|
||||||
done
|
|
||||||
15
.github/workflows/actionlint.yml
vendored
15
.github/workflows/actionlint.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
|
|
||||||
actionlint:
|
actionlint:
|
||||||
needs: [ check-permissions ]
|
needs: [ check-permissions ]
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: reviewdog/action-actionlint@v1
|
- uses: reviewdog/action-actionlint@v1
|
||||||
@@ -36,16 +36,3 @@ jobs:
|
|||||||
fail_on_error: true
|
fail_on_error: true
|
||||||
filter_mode: nofilter
|
filter_mode: nofilter
|
||||||
level: error
|
level: error
|
||||||
|
|
||||||
- name: Disallow 'ubuntu-latest' runners
|
|
||||||
run: |
|
|
||||||
PAT='^\s*runs-on:.*-latest'
|
|
||||||
if grep -ERq $PAT .github/workflows; then
|
|
||||||
grep -ERl $PAT .github/workflows |\
|
|
||||||
while read -r f
|
|
||||||
do
|
|
||||||
l=$(grep -nE $PAT $f | awk -F: '{print $1}' | head -1)
|
|
||||||
echo "::error file=$f,line=$l::Please use 'ubuntu-22.04' instead of 'ubuntu-latest'"
|
|
||||||
done
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|||||||
60
.github/workflows/approved-for-ci-run.yml
vendored
60
.github/workflows/approved-for-ci-run.yml
vendored
@@ -18,7 +18,6 @@ on:
|
|||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@@ -44,7 +43,7 @@ jobs:
|
|||||||
contains(fromJSON('["opened", "synchronize", "reopened", "closed"]'), github.event.action) &&
|
contains(fromJSON('["opened", "synchronize", "reopened", "closed"]'), github.event.action) &&
|
||||||
contains(github.event.pull_request.labels.*.name, 'approved-for-ci-run')
|
contains(github.event.pull_request.labels.*.name, 'approved-for-ci-run')
|
||||||
|
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: gh pr --repo "${GITHUB_REPOSITORY}" edit "${PR_NUMBER}" --remove-label "approved-for-ci-run"
|
- run: gh pr --repo "${GITHUB_REPOSITORY}" edit "${PR_NUMBER}" --remove-label "approved-for-ci-run"
|
||||||
@@ -60,7 +59,7 @@ jobs:
|
|||||||
github.event.action == 'labeled' &&
|
github.event.action == 'labeled' &&
|
||||||
contains(github.event.pull_request.labels.*.name, 'approved-for-ci-run')
|
contains(github.event.pull_request.labels.*.name, 'approved-for-ci-run')
|
||||||
|
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: gh pr --repo "${GITHUB_REPOSITORY}" edit "${PR_NUMBER}" --remove-label "approved-for-ci-run"
|
- run: gh pr --repo "${GITHUB_REPOSITORY}" edit "${PR_NUMBER}" --remove-label "approved-for-ci-run"
|
||||||
@@ -69,41 +68,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||||
|
|
||||||
- name: Look for existing PR
|
|
||||||
id: get-pr
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
run: |
|
|
||||||
ALREADY_CREATED="$(gh pr --repo ${GITHUB_REPOSITORY} list --head ${BRANCH} --base main --json number --jq '.[].number')"
|
|
||||||
echo "ALREADY_CREATED=${ALREADY_CREATED}" >> ${GITHUB_OUTPUT}
|
|
||||||
|
|
||||||
- name: Get changed labels
|
|
||||||
id: get-labels
|
|
||||||
if: steps.get-pr.outputs.ALREADY_CREATED != ''
|
|
||||||
env:
|
|
||||||
ALREADY_CREATED: ${{ steps.get-pr.outputs.ALREADY_CREATED }}
|
|
||||||
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
run: |
|
|
||||||
LABELS_TO_REMOVE=$(comm -23 <(gh pr --repo ${GITHUB_REPOSITORY} view ${ALREADY_CREATED} --json labels --jq '.labels.[].name'| ( grep -E '^run' || true ) | sort) \
|
|
||||||
<(gh pr --repo ${GITHUB_REPOSITORY} view ${PR_NUMBER} --json labels --jq '.labels.[].name' | ( grep -E '^run' || true ) | sort ) |\
|
|
||||||
( grep -v run-e2e-tests-in-draft || true ) | paste -sd , -)
|
|
||||||
LABELS_TO_ADD=$(comm -13 <(gh pr --repo ${GITHUB_REPOSITORY} view ${ALREADY_CREATED} --json labels --jq '.labels.[].name'| ( grep -E '^run' || true ) |sort) \
|
|
||||||
<(gh pr --repo ${GITHUB_REPOSITORY} view ${PR_NUMBER} --json labels --jq '.labels.[].name' | ( grep -E '^run' || true ) | sort ) |\
|
|
||||||
paste -sd , -)
|
|
||||||
echo "LABELS_TO_ADD=${LABELS_TO_ADD}" >> ${GITHUB_OUTPUT}
|
|
||||||
echo "LABELS_TO_REMOVE=${LABELS_TO_REMOVE}" >> ${GITHUB_OUTPUT}
|
|
||||||
|
|
||||||
- run: gh pr checkout "${PR_NUMBER}"
|
- run: gh pr checkout "${PR_NUMBER}"
|
||||||
|
|
||||||
- run: git checkout -b "${BRANCH}"
|
- run: git checkout -b "${BRANCH}"
|
||||||
|
|
||||||
- run: git push --force origin "${BRANCH}"
|
- run: git push --force origin "${BRANCH}"
|
||||||
if: steps.get-pr.outputs.ALREADY_CREATED == ''
|
|
||||||
|
|
||||||
- name: Create a Pull Request for CI run (if required)
|
- name: Create a Pull Request for CI run (if required)
|
||||||
if: steps.get-pr.outputs.ALREADY_CREATED == ''
|
env:
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
cat << EOF > body.md
|
cat << EOF > body.md
|
||||||
@@ -114,33 +87,16 @@ jobs:
|
|||||||
Feel free to review/comment/discuss the original PR #${PR_NUMBER}.
|
Feel free to review/comment/discuss the original PR #${PR_NUMBER}.
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
LABELS=$( (gh pr --repo "${GITHUB_REPOSITORY}" view ${PR_NUMBER} --json labels --jq '.labels.[].name'; echo run-e2e-tests-in-draft )| \
|
ALREADY_CREATED="$(gh pr --repo ${GITHUB_REPOSITORY} list --head ${BRANCH} --base main --json number --jq '.[].number')"
|
||||||
grep -E '^run' | paste -sd , -)
|
if [ -z "${ALREADY_CREATED}" ]; then
|
||||||
gh pr --repo "${GITHUB_REPOSITORY}" create --title "CI run for PR #${PR_NUMBER}" \
|
gh pr --repo "${GITHUB_REPOSITORY}" create --title "CI run for PR #${PR_NUMBER}" \
|
||||||
--body-file "body.md" \
|
--body-file "body.md" \
|
||||||
--head "${BRANCH}" \
|
--head "${BRANCH}" \
|
||||||
--base "main" \
|
--base "main" \
|
||||||
--label ${LABELS} \
|
--label "run-e2e-tests-in-draft" \
|
||||||
--draft
|
--draft
|
||||||
- name: Modify the existing pull request (if required)
|
|
||||||
if: steps.get-pr.outputs.ALREADY_CREATED != ''
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
LABELS_TO_ADD: ${{ steps.get-labels.outputs.LABELS_TO_ADD }}
|
|
||||||
LABELS_TO_REMOVE: ${{ steps.get-labels.outputs.LABELS_TO_REMOVE }}
|
|
||||||
ALREADY_CREATED: ${{ steps.get-pr.outputs.ALREADY_CREATED }}
|
|
||||||
run: |
|
|
||||||
ADD_CMD=
|
|
||||||
REMOVE_CMD=
|
|
||||||
[ -z "${LABELS_TO_ADD}" ] || ADD_CMD="--add-label ${LABELS_TO_ADD}"
|
|
||||||
[ -z "${LABELS_TO_REMOVE}" ] || REMOVE_CMD="--remove-label ${LABELS_TO_REMOVE}"
|
|
||||||
if [ -n "${ADD_CMD}" ] || [ -n "${REMOVE_CMD}" ]; then
|
|
||||||
gh pr --repo "${GITHUB_REPOSITORY}" edit ${ALREADY_CREATED} ${ADD_CMD} ${REMOVE_CMD}
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- run: git push --force origin "${BRANCH}"
|
|
||||||
if: steps.get-pr.outputs.ALREADY_CREATED != ''
|
|
||||||
|
|
||||||
cleanup:
|
cleanup:
|
||||||
# Close PRs and delete branchs if the original PR is closed.
|
# Close PRs and delete branchs if the original PR is closed.
|
||||||
|
|
||||||
@@ -152,7 +108,7 @@ jobs:
|
|||||||
github.event.action == 'closed' &&
|
github.event.action == 'closed' &&
|
||||||
github.event.pull_request.head.repo.full_name != github.repository
|
github.event.pull_request.head.repo.full_name != github.repository
|
||||||
|
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Close PR and delete `ci-run/pr-${{ env.PR_NUMBER }}` branch
|
- name: Close PR and delete `ci-run/pr-${{ env.PR_NUMBER }}` branch
|
||||||
|
|||||||
580
.github/workflows/benchmarking.yml
vendored
580
.github/workflows/benchmarking.yml
vendored
@@ -12,6 +12,7 @@ on:
|
|||||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
||||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
||||||
- cron: '0 3 * * *' # run once a day, timezone is utc
|
- cron: '0 3 * * *' # run once a day, timezone is utc
|
||||||
|
|
||||||
workflow_dispatch: # adds ability to run this manually
|
workflow_dispatch: # adds ability to run this manually
|
||||||
inputs:
|
inputs:
|
||||||
region_id:
|
region_id:
|
||||||
@@ -37,11 +38,6 @@ on:
|
|||||||
description: 'AWS-RDS and AWS-AURORA normally only run on Saturday. Set this to true to run them on every workflow_dispatch'
|
description: 'AWS-RDS and AWS-AURORA normally only run on Saturday. Set this to true to run them on every workflow_dispatch'
|
||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
run_only_pgvector_tests:
|
|
||||||
type: boolean
|
|
||||||
description: 'Run pgvector tests but no other tests. If not set, all tests including pgvector tests will be run'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -54,55 +50,28 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bench:
|
bench:
|
||||||
if: ${{ github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null }}
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- DEFAULT_PG_VERSION: 16
|
|
||||||
PLATFORM: "neon-staging"
|
|
||||||
region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
|
|
||||||
RUNNER: [ self-hosted, us-east-2, x64 ]
|
|
||||||
- DEFAULT_PG_VERSION: 16
|
|
||||||
PLATFORM: "azure-staging"
|
|
||||||
region_id: 'azure-eastus2'
|
|
||||||
RUNNER: [ self-hosted, eastus2, x64 ]
|
|
||||||
env:
|
env:
|
||||||
TEST_PG_BENCH_DURATIONS_MATRIX: "300"
|
TEST_PG_BENCH_DURATIONS_MATRIX: "300"
|
||||||
TEST_PG_BENCH_SCALES_MATRIX: "10,100"
|
TEST_PG_BENCH_SCALES_MATRIX: "10,100"
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
||||||
DEFAULT_PG_VERSION: ${{ matrix.DEFAULT_PG_VERSION }}
|
DEFAULT_PG_VERSION: 14
|
||||||
TEST_OUTPUT: /tmp/test_output
|
TEST_OUTPUT: /tmp/test_output
|
||||||
BUILD_TYPE: remote
|
BUILD_TYPE: remote
|
||||||
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
||||||
PLATFORM: ${{ matrix.PLATFORM }}
|
PLATFORM: "neon-staging"
|
||||||
|
|
||||||
runs-on: ${{ matrix.RUNNER }}
|
runs-on: [ self-hosted, us-east-2, x64 ]
|
||||||
container:
|
container:
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:pinned
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
options: --init
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Configure AWS credentials # necessary on Azure runners
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
- name: Download Neon artifact
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
name: neon-${{ runner.os }}-release-artifact
|
||||||
path: /tmp/neon/
|
path: /tmp/neon/
|
||||||
prefix: latest
|
prefix: latest
|
||||||
|
|
||||||
@@ -110,7 +79,7 @@ jobs:
|
|||||||
id: create-neon-project
|
id: create-neon-project
|
||||||
uses: ./.github/actions/neon-project-create
|
uses: ./.github/actions/neon-project-create
|
||||||
with:
|
with:
|
||||||
region_id: ${{ matrix.region_id }}
|
region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
|
||||||
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
||||||
|
|
||||||
@@ -121,20 +90,10 @@ jobs:
|
|||||||
test_selection: performance
|
test_selection: performance
|
||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
# Set --sparse-ordering option of pytest-order plugin
|
# Set --sparse-ordering option of pytest-order plugin
|
||||||
# to ensure tests are running in order of appears in the file.
|
# to ensure tests are running in order of appears in the file.
|
||||||
# It's important for test_perf_pgbench.py::test_pgbench_remote_* tests
|
# It's important for test_perf_pgbench.py::test_pgbench_remote_* tests
|
||||||
extra_params:
|
extra_params: -m remote_cluster --sparse-ordering --timeout 5400 --ignore test_runner/performance/test_perf_olap.py
|
||||||
-m remote_cluster
|
|
||||||
--sparse-ordering
|
|
||||||
--timeout 14400
|
|
||||||
--ignore test_runner/performance/test_perf_olap.py
|
|
||||||
--ignore test_runner/performance/test_perf_pgvector_queries.py
|
|
||||||
--ignore test_runner/performance/test_logical_replication.py
|
|
||||||
--ignore test_runner/performance/test_physical_replication.py
|
|
||||||
--ignore test_runner/performance/test_perf_ingest_using_pgcopydb.py
|
|
||||||
env:
|
env:
|
||||||
BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
|
BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
@@ -148,134 +107,30 @@ jobs:
|
|||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
||||||
|
|
||||||
- name: Create Allure report
|
- name: Create Allure report
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
uses: ./.github/actions/allure-report-generate
|
uses: ./.github/actions/allure-report-generate
|
||||||
with:
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
- name: Post to a Slack channel
|
||||||
if: ${{ github.event.schedule && failure() }}
|
if: ${{ github.event.schedule && failure() }}
|
||||||
uses: slackapi/slack-github-action@v1
|
uses: slackapi/slack-github-action@v1
|
||||||
with:
|
with:
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
channel-id: "C033QLM5P7D" # dev-staging-stream
|
||||||
slack-message: |
|
slack-message: "Periodic perf testing: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
Periodic perf testing: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
|
||||||
|
|
||||||
replication-tests:
|
|
||||||
if: ${{ github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null }}
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
env:
|
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
|
||||||
DEFAULT_PG_VERSION: 16
|
|
||||||
TEST_OUTPUT: /tmp/test_output
|
|
||||||
BUILD_TYPE: remote
|
|
||||||
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
|
||||||
PLATFORM: "neon-staging"
|
|
||||||
|
|
||||||
runs-on: [ self-hosted, us-east-2, x64 ]
|
|
||||||
container:
|
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
|
||||||
uses: ./.github/actions/download
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
|
||||||
path: /tmp/neon/
|
|
||||||
prefix: latest
|
|
||||||
|
|
||||||
- name: Run Logical Replication benchmarks
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: ${{ env.BUILD_TYPE }}
|
|
||||||
test_selection: performance/test_logical_replication.py
|
|
||||||
run_in_parallel: false
|
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
|
||||||
extra_params: -m remote_cluster --timeout 5400
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
|
||||||
NEON_API_KEY: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
BENCHMARK_PROJECT_ID_PUB: ${{ vars.BENCHMARK_PROJECT_ID_PUB }}
|
|
||||||
BENCHMARK_PROJECT_ID_SUB: ${{ vars.BENCHMARK_PROJECT_ID_SUB }}
|
|
||||||
|
|
||||||
- name: Run Physical Replication benchmarks
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: ${{ env.BUILD_TYPE }}
|
|
||||||
test_selection: performance/test_physical_replication.py
|
|
||||||
run_in_parallel: false
|
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
|
||||||
extra_params: -m remote_cluster --timeout 5400
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
|
||||||
NEON_API_KEY: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
|
|
||||||
- name: Create Allure report
|
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
uses: ./.github/actions/allure-report-generate
|
|
||||||
with:
|
|
||||||
store-test-results-into-db: true
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
|
||||||
REGRESS_TEST_RESULT_CONNSTR_NEW: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR_NEW }}
|
|
||||||
|
|
||||||
# Post both success and failure to the Slack channel
|
|
||||||
- name: Post to a Slack channel
|
|
||||||
if: ${{ github.event.schedule && !cancelled() }}
|
|
||||||
uses: slackapi/slack-github-action@v1
|
|
||||||
with:
|
|
||||||
channel-id: "C06T9AMNDQQ" # on-call-compute-staging-stream
|
|
||||||
slack-message: |
|
|
||||||
Periodic replication testing: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
env:
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||||
|
|
||||||
generate-matrices:
|
generate-matrices:
|
||||||
if: ${{ github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null }}
|
|
||||||
# Create matrices for the benchmarking jobs, so we run benchmarks on rds only once a week (on Saturday)
|
# Create matrices for the benchmarking jobs, so we run benchmarks on rds only once a week (on Saturday)
|
||||||
#
|
#
|
||||||
# Available platforms:
|
# Available platforms:
|
||||||
# - neonvm-captest-new: Freshly created project (1 CU)
|
# - neon-captest-new: Freshly created project (1 CU)
|
||||||
# - neonvm-captest-freetier: Use freetier-sized compute (0.25 CU)
|
# - neon-captest-freetier: Use freetier-sized compute (0.25 CU)
|
||||||
# - neonvm-captest-azure-new: Freshly created project (1 CU) in azure region
|
# - neon-captest-reuse: Reusing existing project
|
||||||
# - neonvm-captest-azure-freetier: Use freetier-sized compute (0.25 CU) in azure region
|
|
||||||
# - neonvm-captest-reuse: Reusing existing project
|
|
||||||
# - rds-aurora: Aurora Postgres Serverless v2 with autoscaling from 0.5 to 2 ACUs
|
# - rds-aurora: Aurora Postgres Serverless v2 with autoscaling from 0.5 to 2 ACUs
|
||||||
# - rds-postgres: RDS Postgres db.m5.large instance (2 vCPU, 8 GiB) with gp3 EBS storage
|
# - rds-postgres: RDS Postgres db.m5.large instance (2 vCPU, 8 GiB) with gp3 EBS storage
|
||||||
env:
|
env:
|
||||||
RUN_AWS_RDS_AND_AURORA: ${{ github.event.inputs.run_AWS_RDS_AND_AURORA || 'false' }}
|
RUN_AWS_RDS_AND_AURORA: ${{ github.event.inputs.run_AWS_RDS_AND_AURORA || 'false' }}
|
||||||
DEFAULT_REGION_ID: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
outputs:
|
outputs:
|
||||||
pgbench-compare-matrix: ${{ steps.pgbench-compare-matrix.outputs.matrix }}
|
pgbench-compare-matrix: ${{ steps.pgbench-compare-matrix.outputs.matrix }}
|
||||||
olap-compare-matrix: ${{ steps.olap-compare-matrix.outputs.matrix }}
|
olap-compare-matrix: ${{ steps.olap-compare-matrix.outputs.matrix }}
|
||||||
@@ -285,37 +140,22 @@ jobs:
|
|||||||
- name: Generate matrix for pgbench benchmark
|
- name: Generate matrix for pgbench benchmark
|
||||||
id: pgbench-compare-matrix
|
id: pgbench-compare-matrix
|
||||||
run: |
|
run: |
|
||||||
region_id_default=${{ env.DEFAULT_REGION_ID }}
|
|
||||||
runner_default='["self-hosted", "us-east-2", "x64"]'
|
|
||||||
runner_azure='["self-hosted", "eastus2", "x64"]'
|
|
||||||
image_default="neondatabase/build-tools:pinned-bookworm"
|
|
||||||
matrix='{
|
matrix='{
|
||||||
"pg_version" : [
|
|
||||||
16
|
|
||||||
],
|
|
||||||
"region_id" : [
|
|
||||||
"'"$region_id_default"'"
|
|
||||||
],
|
|
||||||
"platform": [
|
"platform": [
|
||||||
"neonvm-captest-new",
|
"neon-captest-new",
|
||||||
"neonvm-captest-reuse",
|
"neon-captest-reuse",
|
||||||
"neonvm-captest-new"
|
"neonvm-captest-new"
|
||||||
],
|
],
|
||||||
"db_size": [ "10gb" ],
|
"db_size": [ "10gb" ],
|
||||||
"runner": ['"$runner_default"'],
|
"include": [{ "platform": "neon-captest-freetier", "db_size": "3gb" },
|
||||||
"image": [ "'"$image_default"'" ],
|
{ "platform": "neon-captest-new", "db_size": "50gb" },
|
||||||
"include": [{ "pg_version": 16, "region_id": "'"$region_id_default"'", "platform": "neonvm-captest-freetier", "db_size": "3gb" ,"runner": '"$runner_default"', "image": "'"$image_default"'" },
|
{ "platform": "neonvm-captest-freetier", "db_size": "3gb" },
|
||||||
{ "pg_version": 16, "region_id": "'"$region_id_default"'", "platform": "neonvm-captest-new", "db_size": "10gb","runner": '"$runner_default"', "image": "'"$image_default"'" },
|
{ "platform": "neonvm-captest-new", "db_size": "50gb" }]
|
||||||
{ "pg_version": 16, "region_id": "'"$region_id_default"'", "platform": "neonvm-captest-new", "db_size": "50gb","runner": '"$runner_default"', "image": "'"$image_default"'" },
|
|
||||||
{ "pg_version": 16, "region_id": "azure-eastus2", "platform": "neonvm-azure-captest-freetier", "db_size": "3gb" ,"runner": '"$runner_azure"', "image": "neondatabase/build-tools:pinned-bookworm" },
|
|
||||||
{ "pg_version": 16, "region_id": "azure-eastus2", "platform": "neonvm-azure-captest-new", "db_size": "10gb","runner": '"$runner_azure"', "image": "neondatabase/build-tools:pinned-bookworm" },
|
|
||||||
{ "pg_version": 16, "region_id": "azure-eastus2", "platform": "neonvm-azure-captest-new", "db_size": "50gb","runner": '"$runner_azure"', "image": "neondatabase/build-tools:pinned-bookworm" },
|
|
||||||
{ "pg_version": 16, "region_id": "'"$region_id_default"'", "platform": "neonvm-captest-sharding-reuse", "db_size": "50gb","runner": '"$runner_default"', "image": "'"$image_default"'" }]
|
|
||||||
}'
|
}'
|
||||||
|
|
||||||
if [ "$(date +%A)" = "Saturday" ] || [ ${RUN_AWS_RDS_AND_AURORA} = "true" ]; then
|
if [ "$(date +%A)" = "Saturday" ]; then
|
||||||
matrix=$(echo "$matrix" | jq '.include += [{ "pg_version": 16, "region_id": "'"$region_id_default"'", "platform": "rds-postgres", "db_size": "10gb","runner": '"$runner_default"', "image": "'"$image_default"'" },
|
matrix=$(echo "$matrix" | jq '.include += [{ "platform": "rds-postgres", "db_size": "10gb"},
|
||||||
{ "pg_version": 16, "region_id": "'"$region_id_default"'", "platform": "rds-aurora", "db_size": "10gb","runner": '"$runner_default"', "image": "'"$image_default"'" }]')
|
{ "platform": "rds-aurora", "db_size": "50gb"}]')
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
|
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
|
||||||
@@ -325,13 +165,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
matrix='{
|
matrix='{
|
||||||
"platform": [
|
"platform": [
|
||||||
"neonvm-captest-reuse"
|
"neon-captest-reuse"
|
||||||
]
|
]
|
||||||
}'
|
}'
|
||||||
|
|
||||||
if [ "$(date +%A)" = "Saturday" ] || [ ${RUN_AWS_RDS_AND_AURORA} = "true" ]; then
|
if [ "$(date +%A)" = "Saturday" ] || [ ${RUN_AWS_RDS_AND_AURORA} = "true" ]; then
|
||||||
matrix=$(echo "$matrix" | jq '.include += [{ "platform": "rds-postgres" },
|
matrix=$(echo "$matrix" | jq '.include += [{ "platform": "rds-postgres" },
|
||||||
{ "platform": "rds-aurora" }]')
|
{ "platform": "rds-aurora" }]')
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
|
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
|
||||||
@@ -341,7 +181,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
matrix='{
|
matrix='{
|
||||||
"platform": [
|
"platform": [
|
||||||
"neonvm-captest-reuse"
|
"neon-captest-reuse"
|
||||||
],
|
],
|
||||||
"scale": [
|
"scale": [
|
||||||
"10"
|
"10"
|
||||||
@@ -350,22 +190,13 @@ jobs:
|
|||||||
|
|
||||||
if [ "$(date +%A)" = "Saturday" ] || [ ${RUN_AWS_RDS_AND_AURORA} = "true" ]; then
|
if [ "$(date +%A)" = "Saturday" ] || [ ${RUN_AWS_RDS_AND_AURORA} = "true" ]; then
|
||||||
matrix=$(echo "$matrix" | jq '.include += [{ "platform": "rds-postgres", "scale": "10" },
|
matrix=$(echo "$matrix" | jq '.include += [{ "platform": "rds-postgres", "scale": "10" },
|
||||||
{ "platform": "rds-aurora", "scale": "10" }]')
|
{ "platform": "rds-aurora", "scale": "10" }]')
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
|
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
prepare_AWS_RDS_databases:
|
|
||||||
uses: ./.github/workflows/_benchmarking_preparation.yml
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
pgbench-compare:
|
pgbench-compare:
|
||||||
if: ${{ github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null }}
|
needs: [ generate-matrices ]
|
||||||
needs: [ generate-matrices, prepare_AWS_RDS_databases ]
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -375,15 +206,15 @@ jobs:
|
|||||||
TEST_PG_BENCH_DURATIONS_MATRIX: "60m"
|
TEST_PG_BENCH_DURATIONS_MATRIX: "60m"
|
||||||
TEST_PG_BENCH_SCALES_MATRIX: ${{ matrix.db_size }}
|
TEST_PG_BENCH_SCALES_MATRIX: ${{ matrix.db_size }}
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
||||||
DEFAULT_PG_VERSION: ${{ matrix.pg_version }}
|
DEFAULT_PG_VERSION: 14
|
||||||
TEST_OUTPUT: /tmp/test_output
|
TEST_OUTPUT: /tmp/test_output
|
||||||
BUILD_TYPE: remote
|
BUILD_TYPE: remote
|
||||||
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
||||||
PLATFORM: ${{ matrix.platform }}
|
PLATFORM: ${{ matrix.platform }}
|
||||||
|
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: [ self-hosted, us-east-2, x64 ]
|
||||||
container:
|
container:
|
||||||
image: ${{ matrix.image }}
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:pinned
|
||||||
options: --init
|
options: --init
|
||||||
|
|
||||||
# Increase timeout to 8h, default timeout is 6h
|
# Increase timeout to 8h, default timeout is 6h
|
||||||
@@ -392,41 +223,37 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
- name: Download Neon artifact
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
name: neon-${{ runner.os }}-release-artifact
|
||||||
path: /tmp/neon/
|
path: /tmp/neon/
|
||||||
prefix: latest
|
prefix: latest
|
||||||
|
|
||||||
|
- name: Add Postgres binaries to PATH
|
||||||
|
run: |
|
||||||
|
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
|
||||||
|
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Create Neon Project
|
- name: Create Neon Project
|
||||||
if: contains(fromJson('["neonvm-captest-new", "neonvm-captest-freetier", "neonvm-azure-captest-freetier", "neonvm-azure-captest-new"]'), matrix.platform)
|
if: contains(fromJson('["neon-captest-new", "neon-captest-freetier", "neonvm-captest-new", "neonvm-captest-freetier"]'), matrix.platform)
|
||||||
id: create-neon-project
|
id: create-neon-project
|
||||||
uses: ./.github/actions/neon-project-create
|
uses: ./.github/actions/neon-project-create
|
||||||
with:
|
with:
|
||||||
region_id: ${{ matrix.region_id }}
|
region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
|
||||||
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
||||||
compute_units: ${{ (contains(matrix.platform, 'captest-freetier') && '[0.25, 0.25]') || '[1, 1]' }}
|
compute_units: ${{ (matrix.platform == 'neon-captest-freetier' && '[0.25, 0.25]') || '[1, 1]' }}
|
||||||
|
provisioner: ${{ (contains(matrix.platform, 'neonvm-') && 'k8s-neonvm') || 'k8s-pod' }}
|
||||||
|
|
||||||
- name: Set up Connection String
|
- name: Set up Connection String
|
||||||
id: set-up-connstr
|
id: set-up-connstr
|
||||||
run: |
|
run: |
|
||||||
case "${PLATFORM}" in
|
case "${PLATFORM}" in
|
||||||
neonvm-captest-reuse)
|
neon-captest-reuse)
|
||||||
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CONNSTR }}
|
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CONNSTR }}
|
||||||
;;
|
;;
|
||||||
neonvm-captest-sharding-reuse)
|
neon-captest-new | neon-captest-freetier | neonvm-captest-new | neonvm-captest-freetier)
|
||||||
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_SHARDING_CONNSTR }}
|
|
||||||
;;
|
|
||||||
neonvm-captest-new | neonvm-captest-freetier | neonvm-azure-captest-new | neonvm-azure-captest-freetier)
|
|
||||||
CONNSTR=${{ steps.create-neon-project.outputs.dsn }}
|
CONNSTR=${{ steps.create-neon-project.outputs.dsn }}
|
||||||
;;
|
;;
|
||||||
rds-aurora)
|
rds-aurora)
|
||||||
@@ -443,6 +270,12 @@ jobs:
|
|||||||
|
|
||||||
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
QUERY="SELECT version();"
|
||||||
|
if [[ "${PLATFORM}" = "neon"* ]]; then
|
||||||
|
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
|
||||||
|
fi
|
||||||
|
psql ${CONNSTR} -c "${QUERY}"
|
||||||
|
|
||||||
- name: Benchmark init
|
- name: Benchmark init
|
||||||
uses: ./.github/actions/run-python-test-set
|
uses: ./.github/actions/run-python-test-set
|
||||||
with:
|
with:
|
||||||
@@ -451,8 +284,6 @@ jobs:
|
|||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_init
|
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_init
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
env:
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
@@ -466,8 +297,6 @@ jobs:
|
|||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_simple_update
|
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_simple_update
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
env:
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
@@ -481,8 +310,6 @@ jobs:
|
|||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_select_only
|
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_select_only
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
env:
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
@@ -496,158 +323,15 @@ jobs:
|
|||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
||||||
|
|
||||||
- name: Create Allure report
|
- name: Create Allure report
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
uses: ./.github/actions/allure-report-generate
|
uses: ./.github/actions/allure-report-generate
|
||||||
with:
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
- name: Post to a Slack channel
|
||||||
if: ${{ github.event.schedule && failure() }}
|
if: ${{ github.event.schedule && failure() }}
|
||||||
uses: slackapi/slack-github-action@v1
|
uses: slackapi/slack-github-action@v1
|
||||||
with:
|
with:
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
channel-id: "C033QLM5P7D" # dev-staging-stream
|
||||||
slack-message: |
|
slack-message: "Periodic perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
Periodic perf testing on ${{ matrix.platform }}: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
|
||||||
|
|
||||||
pgbench-pgvector:
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- PLATFORM: "neonvm-captest-pgvector"
|
|
||||||
RUNNER: [ self-hosted, us-east-2, x64 ]
|
|
||||||
- PLATFORM: "azure-captest-pgvector"
|
|
||||||
RUNNER: [ self-hosted, eastus2, x64 ]
|
|
||||||
|
|
||||||
env:
|
|
||||||
TEST_PG_BENCH_DURATIONS_MATRIX: "15m"
|
|
||||||
TEST_PG_BENCH_SCALES_MATRIX: "1"
|
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
|
||||||
DEFAULT_PG_VERSION: 16
|
|
||||||
TEST_OUTPUT: /tmp/test_output
|
|
||||||
BUILD_TYPE: remote
|
|
||||||
|
|
||||||
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
|
||||||
PLATFORM: ${{ matrix.PLATFORM }}
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.RUNNER }}
|
|
||||||
container:
|
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
# until https://github.com/neondatabase/neon/issues/8275 is fixed we temporarily install postgresql-16
|
|
||||||
# instead of using Neon artifacts containing pgbench
|
|
||||||
- name: Install postgresql-16 where pytest expects it
|
|
||||||
run: |
|
|
||||||
# Just to make it easier to test things locally on macOS (with arm64)
|
|
||||||
arch=$(uname -m | sed 's/x86_64/amd64/g' | sed 's/aarch64/arm64/g')
|
|
||||||
|
|
||||||
cd /home/nonroot
|
|
||||||
wget -q "https://apt.postgresql.org/pub/repos/apt/pool/main/p/postgresql-17/libpq5_17.2-1.pgdg120+1_${arch}.deb"
|
|
||||||
wget -q "https://apt.postgresql.org/pub/repos/apt/pool/main/p/postgresql-16/postgresql-client-16_16.6-1.pgdg120+1_${arch}.deb"
|
|
||||||
wget -q "https://apt.postgresql.org/pub/repos/apt/pool/main/p/postgresql-16/postgresql-16_16.6-1.pgdg120+1_${arch}.deb"
|
|
||||||
dpkg -x libpq5_17.2-1.pgdg120+1_${arch}.deb pg
|
|
||||||
dpkg -x postgresql-16_16.6-1.pgdg120+1_${arch}.deb pg
|
|
||||||
dpkg -x postgresql-client-16_16.6-1.pgdg120+1_${arch}.deb pg
|
|
||||||
|
|
||||||
mkdir -p /tmp/neon/pg_install/v16/bin
|
|
||||||
ln -s /home/nonroot/pg/usr/lib/postgresql/16/bin/pgbench /tmp/neon/pg_install/v16/bin/pgbench
|
|
||||||
ln -s /home/nonroot/pg/usr/lib/postgresql/16/bin/psql /tmp/neon/pg_install/v16/bin/psql
|
|
||||||
ln -s /home/nonroot/pg/usr/lib/$(uname -m)-linux-gnu /tmp/neon/pg_install/v16/lib
|
|
||||||
|
|
||||||
LD_LIBRARY_PATH="/home/nonroot/pg/usr/lib/$(uname -m)-linux-gnu:${LD_LIBRARY_PATH:-}"
|
|
||||||
export LD_LIBRARY_PATH
|
|
||||||
echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" >> ${GITHUB_ENV}
|
|
||||||
|
|
||||||
/tmp/neon/pg_install/v16/bin/pgbench --version
|
|
||||||
/tmp/neon/pg_install/v16/bin/psql --version
|
|
||||||
|
|
||||||
- name: Set up Connection String
|
|
||||||
id: set-up-connstr
|
|
||||||
run: |
|
|
||||||
case "${PLATFORM}" in
|
|
||||||
neonvm-captest-pgvector)
|
|
||||||
CONNSTR=${{ secrets.BENCHMARK_PGVECTOR_CONNSTR }}
|
|
||||||
;;
|
|
||||||
azure-captest-pgvector)
|
|
||||||
CONNSTR=${{ secrets.BENCHMARK_PGVECTOR_CONNSTR_AZURE }}
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo >&2 "Unknown PLATFORM=${PLATFORM}"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Benchmark pgvector hnsw indexing
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: ${{ env.BUILD_TYPE }}
|
|
||||||
test_selection: performance/test_perf_olap.py
|
|
||||||
run_in_parallel: false
|
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
|
||||||
extra_params: -m remote_cluster --timeout 21600 -k test_pgvector_indexing
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
|
||||||
|
|
||||||
- name: Benchmark pgvector queries
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: ${{ env.BUILD_TYPE }}
|
|
||||||
test_selection: performance/test_perf_pgvector_queries.py
|
|
||||||
run_in_parallel: false
|
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
|
||||||
extra_params: -m remote_cluster --timeout 21600
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
|
||||||
|
|
||||||
- name: Create Allure report
|
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
uses: ./.github/actions/allure-report-generate
|
|
||||||
with:
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
|
||||||
if: ${{ github.event.schedule && failure() }}
|
|
||||||
uses: slackapi/slack-github-action@v1
|
|
||||||
with:
|
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
|
||||||
slack-message: |
|
|
||||||
Periodic perf testing on ${{ env.PLATFORM }}: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
env:
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||||
|
|
||||||
@@ -659,12 +343,8 @@ jobs:
|
|||||||
#
|
#
|
||||||
# *_CLICKBENCH_CONNSTR: Genuine ClickBench DB with ~100M rows
|
# *_CLICKBENCH_CONNSTR: Genuine ClickBench DB with ~100M rows
|
||||||
# *_CLICKBENCH_10M_CONNSTR: DB with the first 10M rows of ClickBench DB
|
# *_CLICKBENCH_10M_CONNSTR: DB with the first 10M rows of ClickBench DB
|
||||||
if: ${{ !cancelled() && (github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null) }}
|
if: ${{ !cancelled() }}
|
||||||
permissions:
|
needs: [ generate-matrices, pgbench-compare ]
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
needs: [ generate-matrices, pgbench-compare, prepare_AWS_RDS_databases ]
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -672,7 +352,7 @@ jobs:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
||||||
DEFAULT_PG_VERSION: 16
|
DEFAULT_PG_VERSION: 14
|
||||||
TEST_OUTPUT: /tmp/test_output
|
TEST_OUTPUT: /tmp/test_output
|
||||||
TEST_OLAP_COLLECT_EXPLAIN: ${{ github.event.inputs.collect_olap_explain }}
|
TEST_OLAP_COLLECT_EXPLAIN: ${{ github.event.inputs.collect_olap_explain }}
|
||||||
TEST_OLAP_COLLECT_PG_STAT_STATEMENTS: ${{ github.event.inputs.collect_pg_stat_statements }}
|
TEST_OLAP_COLLECT_PG_STAT_STATEMENTS: ${{ github.event.inputs.collect_pg_stat_statements }}
|
||||||
@@ -682,38 +362,29 @@ jobs:
|
|||||||
|
|
||||||
runs-on: [ self-hosted, us-east-2, x64 ]
|
runs-on: [ self-hosted, us-east-2, x64 ]
|
||||||
container:
|
container:
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:pinned
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
options: --init
|
||||||
|
|
||||||
# Increase timeout to 12h, default timeout is 6h
|
|
||||||
# we have regression in clickbench causing it to run 2-3x longer
|
|
||||||
timeout-minutes: 720
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
- name: Download Neon artifact
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
name: neon-${{ runner.os }}-release-artifact
|
||||||
path: /tmp/neon/
|
path: /tmp/neon/
|
||||||
prefix: latest
|
prefix: latest
|
||||||
|
|
||||||
|
- name: Add Postgres binaries to PATH
|
||||||
|
run: |
|
||||||
|
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
|
||||||
|
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Set up Connection String
|
- name: Set up Connection String
|
||||||
id: set-up-connstr
|
id: set-up-connstr
|
||||||
run: |
|
run: |
|
||||||
case "${PLATFORM}" in
|
case "${PLATFORM}" in
|
||||||
neonvm-captest-reuse)
|
neon-captest-reuse)
|
||||||
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CLICKBENCH_10M_CONNSTR }}
|
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CLICKBENCH_10M_CONNSTR }}
|
||||||
;;
|
;;
|
||||||
rds-aurora)
|
rds-aurora)
|
||||||
@@ -723,13 +394,19 @@ jobs:
|
|||||||
CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CLICKBENCH_10M_CONNSTR }}
|
CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CLICKBENCH_10M_CONNSTR }}
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neonvm-captest-reuse', 'rds-aurora', or 'rds-postgres'"
|
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
QUERY="SELECT version();"
|
||||||
|
if [[ "${PLATFORM}" = "neon"* ]]; then
|
||||||
|
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
|
||||||
|
fi
|
||||||
|
psql ${CONNSTR} -c "${QUERY}"
|
||||||
|
|
||||||
- name: ClickBench benchmark
|
- name: ClickBench benchmark
|
||||||
uses: ./.github/actions/run-python-test-set
|
uses: ./.github/actions/run-python-test-set
|
||||||
with:
|
with:
|
||||||
@@ -737,9 +414,7 @@ jobs:
|
|||||||
test_selection: performance/test_perf_olap.py
|
test_selection: performance/test_perf_olap.py
|
||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
extra_params: -m remote_cluster --timeout 43200 -k test_clickbench
|
extra_params: -m remote_cluster --timeout 21600 -k test_clickbench
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
env:
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
||||||
@@ -749,21 +424,15 @@ jobs:
|
|||||||
TEST_OLAP_SCALE: 10
|
TEST_OLAP_SCALE: 10
|
||||||
|
|
||||||
- name: Create Allure report
|
- name: Create Allure report
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
uses: ./.github/actions/allure-report-generate
|
uses: ./.github/actions/allure-report-generate
|
||||||
with:
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
- name: Post to a Slack channel
|
||||||
if: ${{ github.event.schedule && failure() }}
|
if: ${{ github.event.schedule && failure() }}
|
||||||
uses: slackapi/slack-github-action@v1
|
uses: slackapi/slack-github-action@v1
|
||||||
with:
|
with:
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
channel-id: "C033QLM5P7D" # dev-staging-stream
|
||||||
slack-message: |
|
slack-message: "Periodic OLAP perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
Periodic OLAP perf testing on ${{ matrix.platform }}: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
env:
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||||
|
|
||||||
@@ -774,12 +443,8 @@ jobs:
|
|||||||
# We might change it after https://github.com/neondatabase/neon/issues/2900.
|
# We might change it after https://github.com/neondatabase/neon/issues/2900.
|
||||||
#
|
#
|
||||||
# *_TPCH_S10_CONNSTR: DB generated with scale factor 10 (~10 GB)
|
# *_TPCH_S10_CONNSTR: DB generated with scale factor 10 (~10 GB)
|
||||||
if: ${{ !cancelled() && (github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null) }}
|
if: ${{ !cancelled() }}
|
||||||
permissions:
|
needs: [ generate-matrices, clickbench-compare ]
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
needs: [ generate-matrices, clickbench-compare, prepare_AWS_RDS_databases ]
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -787,7 +452,7 @@ jobs:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
||||||
DEFAULT_PG_VERSION: 16
|
DEFAULT_PG_VERSION: 14
|
||||||
TEST_OUTPUT: /tmp/test_output
|
TEST_OUTPUT: /tmp/test_output
|
||||||
BUILD_TYPE: remote
|
BUILD_TYPE: remote
|
||||||
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
||||||
@@ -796,43 +461,38 @@ jobs:
|
|||||||
|
|
||||||
runs-on: [ self-hosted, us-east-2, x64 ]
|
runs-on: [ self-hosted, us-east-2, x64 ]
|
||||||
container:
|
container:
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:pinned
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
options: --init
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
- name: Download Neon artifact
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
name: neon-${{ runner.os }}-release-artifact
|
||||||
path: /tmp/neon/
|
path: /tmp/neon/
|
||||||
prefix: latest
|
prefix: latest
|
||||||
|
|
||||||
|
- name: Add Postgres binaries to PATH
|
||||||
|
run: |
|
||||||
|
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
|
||||||
|
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Get Connstring Secret Name
|
- name: Get Connstring Secret Name
|
||||||
run: |
|
run: |
|
||||||
case "${PLATFORM}" in
|
case "${PLATFORM}" in
|
||||||
neonvm-captest-reuse)
|
neon-captest-reuse)
|
||||||
ENV_PLATFORM=CAPTEST_TPCH
|
ENV_PLATFORM=CAPTEST_TPCH
|
||||||
;;
|
;;
|
||||||
rds-aurora)
|
rds-aurora)
|
||||||
ENV_PLATFORM=RDS_AURORA_TPCH
|
ENV_PLATFORM=RDS_AURORA_TPCH
|
||||||
;;
|
;;
|
||||||
rds-postgres)
|
rds-postgres)
|
||||||
ENV_PLATFORM=RDS_POSTGRES_TPCH
|
ENV_PLATFORM=RDS_AURORA_TPCH
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neonvm-captest-reuse', 'rds-aurora', or 'rds-postgres'"
|
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@@ -847,6 +507,12 @@ jobs:
|
|||||||
|
|
||||||
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
QUERY="SELECT version();"
|
||||||
|
if [[ "${PLATFORM}" = "neon"* ]]; then
|
||||||
|
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
|
||||||
|
fi
|
||||||
|
psql ${CONNSTR} -c "${QUERY}"
|
||||||
|
|
||||||
- name: Run TPC-H benchmark
|
- name: Run TPC-H benchmark
|
||||||
uses: ./.github/actions/run-python-test-set
|
uses: ./.github/actions/run-python-test-set
|
||||||
with:
|
with:
|
||||||
@@ -855,8 +521,6 @@ jobs:
|
|||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
extra_params: -m remote_cluster --timeout 21600 -k test_tpch
|
extra_params: -m remote_cluster --timeout 21600 -k test_tpch
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
env:
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
||||||
@@ -864,31 +528,21 @@ jobs:
|
|||||||
TEST_OLAP_SCALE: ${{ matrix.scale }}
|
TEST_OLAP_SCALE: ${{ matrix.scale }}
|
||||||
|
|
||||||
- name: Create Allure report
|
- name: Create Allure report
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
uses: ./.github/actions/allure-report-generate
|
uses: ./.github/actions/allure-report-generate
|
||||||
with:
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
- name: Post to a Slack channel
|
||||||
if: ${{ github.event.schedule && failure() }}
|
if: ${{ github.event.schedule && failure() }}
|
||||||
uses: slackapi/slack-github-action@v1
|
uses: slackapi/slack-github-action@v1
|
||||||
with:
|
with:
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
channel-id: "C033QLM5P7D" # dev-staging-stream
|
||||||
slack-message: |
|
slack-message: "Periodic TPC-H perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
Periodic TPC-H perf testing on ${{ matrix.platform }}: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
env:
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||||
|
|
||||||
user-examples-compare:
|
user-examples-compare:
|
||||||
if: ${{ !cancelled() && (github.event.inputs.run_only_pgvector_tests == 'false' || github.event.inputs.run_only_pgvector_tests == null) }}
|
if: ${{ !cancelled() }}
|
||||||
permissions:
|
needs: [ generate-matrices, tpch-compare ]
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
needs: [ generate-matrices, tpch-compare, prepare_AWS_RDS_databases ]
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -896,7 +550,7 @@ jobs:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
||||||
DEFAULT_PG_VERSION: 16
|
DEFAULT_PG_VERSION: 14
|
||||||
TEST_OUTPUT: /tmp/test_output
|
TEST_OUTPUT: /tmp/test_output
|
||||||
BUILD_TYPE: remote
|
BUILD_TYPE: remote
|
||||||
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
|
||||||
@@ -904,34 +558,29 @@ jobs:
|
|||||||
|
|
||||||
runs-on: [ self-hosted, us-east-2, x64 ]
|
runs-on: [ self-hosted, us-east-2, x64 ]
|
||||||
container:
|
container:
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:pinned
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
options: --init
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
- name: Download Neon artifact
|
||||||
uses: ./.github/actions/download
|
uses: ./.github/actions/download
|
||||||
with:
|
with:
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
name: neon-${{ runner.os }}-release-artifact
|
||||||
path: /tmp/neon/
|
path: /tmp/neon/
|
||||||
prefix: latest
|
prefix: latest
|
||||||
|
|
||||||
|
- name: Add Postgres binaries to PATH
|
||||||
|
run: |
|
||||||
|
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
|
||||||
|
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Set up Connection String
|
- name: Set up Connection String
|
||||||
id: set-up-connstr
|
id: set-up-connstr
|
||||||
run: |
|
run: |
|
||||||
case "${PLATFORM}" in
|
case "${PLATFORM}" in
|
||||||
neonvm-captest-reuse)
|
neon-captest-reuse)
|
||||||
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_CAPTEST_CONNSTR }}
|
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_CAPTEST_CONNSTR }}
|
||||||
;;
|
;;
|
||||||
rds-aurora)
|
rds-aurora)
|
||||||
@@ -941,13 +590,19 @@ jobs:
|
|||||||
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_RDS_POSTGRES_CONNSTR }}
|
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_RDS_POSTGRES_CONNSTR }}
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neonvm-captest-reuse', 'rds-aurora', or 'rds-postgres'"
|
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
QUERY="SELECT version();"
|
||||||
|
if [[ "${PLATFORM}" = "neon"* ]]; then
|
||||||
|
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
|
||||||
|
fi
|
||||||
|
psql ${CONNSTR} -c "${QUERY}"
|
||||||
|
|
||||||
- name: Run user examples
|
- name: Run user examples
|
||||||
uses: ./.github/actions/run-python-test-set
|
uses: ./.github/actions/run-python-test-set
|
||||||
with:
|
with:
|
||||||
@@ -956,29 +611,20 @@ jobs:
|
|||||||
run_in_parallel: false
|
run_in_parallel: false
|
||||||
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
|
||||||
extra_params: -m remote_cluster --timeout 21600 -k test_user_examples
|
extra_params: -m remote_cluster --timeout 21600 -k test_user_examples
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
env:
|
||||||
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
||||||
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
|
||||||
|
|
||||||
- name: Create Allure report
|
- name: Create Allure report
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
uses: ./.github/actions/allure-report-generate
|
uses: ./.github/actions/allure-report-generate
|
||||||
with:
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
- name: Post to a Slack channel
|
||||||
if: ${{ github.event.schedule && failure() }}
|
if: ${{ github.event.schedule && failure() }}
|
||||||
uses: slackapi/slack-github-action@v1
|
uses: slackapi/slack-github-action@v1
|
||||||
with:
|
with:
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
channel-id: "C033QLM5P7D" # dev-staging-stream
|
||||||
slack-message: |
|
slack-message: "Periodic User example perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
Periodic TPC-H perf testing on ${{ matrix.platform }}: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||||
|
|||||||
158
.github/workflows/build-build-tools-image.yml
vendored
158
.github/workflows/build-build-tools-image.yml
vendored
@@ -3,134 +3,94 @@ name: Build build-tools image
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
archs:
|
image-tag:
|
||||||
description: "Json array of architectures to build"
|
description: "build-tools image tag"
|
||||||
# Default values are set in `check-image` job, `set-variables` step
|
required: true
|
||||||
type: string
|
type: string
|
||||||
required: false
|
|
||||||
debians:
|
|
||||||
description: "Json array of Debian versions to build"
|
|
||||||
# Default values are set in `check-image` job, `set-variables` step
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
outputs:
|
outputs:
|
||||||
image-tag:
|
image-tag:
|
||||||
description: "build-tools tag"
|
description: "build-tools tag"
|
||||||
value: ${{ jobs.check-image.outputs.tag }}
|
value: ${{ inputs.image-tag }}
|
||||||
image:
|
image:
|
||||||
description: "build-tools image"
|
description: "build-tools image"
|
||||||
value: neondatabase/build-tools:${{ jobs.check-image.outputs.tag }}
|
value: neondatabase/build-tools:${{ inputs.image-tag }}
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash -euo pipefail {0}
|
shell: bash -euo pipefail {0}
|
||||||
|
|
||||||
# The initial idea was to prevent the waste of resources by not re-building the `build-tools` image
|
concurrency:
|
||||||
# for the same tag in parallel workflow runs, and queue them to be skipped once we have
|
group: build-build-tools-image-${{ inputs.image-tag }}
|
||||||
# the first image pushed to Docker registry, but GitHub's concurrency mechanism is not working as expected.
|
|
||||||
# GitHub can't have more than 1 job in a queue and removes the previous one, it causes failures if the dependent jobs.
|
|
||||||
#
|
|
||||||
# Ref https://github.com/orgs/community/discussions/41518
|
|
||||||
#
|
|
||||||
# concurrency:
|
|
||||||
# group: build-build-tools-image-${{ inputs.image-tag }}
|
|
||||||
# cancel-in-progress: false
|
|
||||||
|
|
||||||
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
||||||
permissions: {}
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-image:
|
check-image:
|
||||||
runs-on: ubuntu-22.04
|
uses: ./.github/workflows/check-build-tools-image.yml
|
||||||
outputs:
|
|
||||||
archs: ${{ steps.set-variables.outputs.archs }}
|
|
||||||
debians: ${{ steps.set-variables.outputs.debians }}
|
|
||||||
tag: ${{ steps.set-variables.outputs.image-tag }}
|
|
||||||
everything: ${{ steps.set-more-variables.outputs.everything }}
|
|
||||||
found: ${{ steps.set-more-variables.outputs.found }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set variables
|
|
||||||
id: set-variables
|
|
||||||
env:
|
|
||||||
ARCHS: ${{ inputs.archs || '["x64","arm64"]' }}
|
|
||||||
DEBIANS: ${{ inputs.debians || '["bullseye","bookworm"]' }}
|
|
||||||
IMAGE_TAG: |
|
|
||||||
${{ hashFiles('build-tools.Dockerfile',
|
|
||||||
'.github/workflows/build-build-tools-image.yml') }}
|
|
||||||
run: |
|
|
||||||
echo "archs=${ARCHS}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
echo "debians=${DEBIANS}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
echo "image-tag=${IMAGE_TAG}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
|
|
||||||
- name: Set more variables
|
|
||||||
id: set-more-variables
|
|
||||||
env:
|
|
||||||
IMAGE_TAG: ${{ steps.set-variables.outputs.image-tag }}
|
|
||||||
EVERYTHING: |
|
|
||||||
${{ contains(fromJson(steps.set-variables.outputs.archs), 'x64') &&
|
|
||||||
contains(fromJson(steps.set-variables.outputs.archs), 'arm64') &&
|
|
||||||
contains(fromJson(steps.set-variables.outputs.debians), 'bullseye') &&
|
|
||||||
contains(fromJson(steps.set-variables.outputs.debians), 'bookworm') }}
|
|
||||||
run: |
|
|
||||||
if docker manifest inspect neondatabase/build-tools:${IMAGE_TAG}; then
|
|
||||||
found=true
|
|
||||||
else
|
|
||||||
found=false
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "everything=${EVERYTHING}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
echo "found=${found}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
|
|
||||||
|
# This job uses older version of GitHub Actions because it's run on gen2 runners, which don't support node 20 (for newer versions)
|
||||||
build-image:
|
build-image:
|
||||||
needs: [ check-image ]
|
needs: [ check-image ]
|
||||||
if: needs.check-image.outputs.found == 'false'
|
if: needs.check-image.outputs.found == 'false'
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ fromJson(needs.check-image.outputs.archs) }}
|
arch: [ x64, arm64 ]
|
||||||
debian: ${{ fromJson(needs.check-image.outputs.debians) }}
|
|
||||||
|
|
||||||
runs-on: ${{ fromJson(format('["self-hosted", "{0}"]', matrix.arch == 'arm64' && 'large-arm64' || 'large')) }}
|
runs-on: ${{ fromJson(format('["self-hosted", "dev", "{0}"]', matrix.arch)) }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_TAG: ${{ inputs.image-tag }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Check `input.tag` is correct
|
||||||
|
env:
|
||||||
|
INPUTS_IMAGE_TAG: ${{ inputs.image-tag }}
|
||||||
|
CHECK_IMAGE_TAG : ${{ needs.check-image.outputs.image-tag }}
|
||||||
|
run: |
|
||||||
|
if [ "${INPUTS_IMAGE_TAG}" != "${CHECK_IMAGE_TAG}" ]; then
|
||||||
|
echo "'inputs.image-tag' (${INPUTS_IMAGE_TAG}) does not match the tag of the latest build-tools image 'inputs.image-tag' (${CHECK_IMAGE_TAG})"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
- uses: neondatabase/dev-actions/set-docker-config-dir@6094485bf440001c94a94a3f9e221e81ff6b6193
|
- uses: actions/checkout@v3
|
||||||
- uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
cache-binary: false
|
|
||||||
|
|
||||||
- uses: docker/login-action@v3
|
# Use custom DOCKER_CONFIG directory to avoid conflicts with default settings
|
||||||
|
# The default value is ~/.docker
|
||||||
|
- name: Set custom docker config directory
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/.docker-custom
|
||||||
|
echo DOCKER_CONFIG=/tmp/.docker-custom >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
|
|
||||||
- uses: docker/login-action@v3
|
- uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
registry: cache.neon.build
|
|
||||||
username: ${{ secrets.NEON_CI_DOCKERCACHE_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_CI_DOCKERCACHE_PASSWORD }}
|
|
||||||
|
|
||||||
- uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
file: build-tools.Dockerfile
|
|
||||||
context: .
|
context: .
|
||||||
provenance: false
|
provenance: false
|
||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
build-args: |
|
file: Dockerfile.build-tools
|
||||||
DEBIAN_VERSION=${{ matrix.debian }}
|
cache-from: type=registry,ref=neondatabase/build-tools:cache-${{ matrix.arch }}
|
||||||
cache-from: type=registry,ref=cache.neon.build/build-tools:cache-${{ matrix.debian }}-${{ matrix.arch }}
|
cache-to: type=registry,ref=neondatabase/build-tools:cache-${{ matrix.arch }},mode=max
|
||||||
cache-to: ${{ github.ref_name == 'main' && format('type=registry,ref=cache.neon.build/build-tools:cache-{0}-{1},mode=max', matrix.debian, matrix.arch) || '' }}
|
tags: neondatabase/build-tools:${{ inputs.image-tag }}-${{ matrix.arch }}
|
||||||
tags: |
|
|
||||||
neondatabase/build-tools:${{ needs.check-image.outputs.tag }}-${{ matrix.debian }}-${{ matrix.arch }}
|
- name: Remove custom docker config directory
|
||||||
|
run: |
|
||||||
|
rm -rf /tmp/.docker-custom
|
||||||
|
|
||||||
merge-images:
|
merge-images:
|
||||||
needs: [ check-image, build-image ]
|
needs: [ build-image ]
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_TAG: ${{ inputs.image-tag }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: docker/login-action@v3
|
- uses: docker/login-action@v3
|
||||||
@@ -139,23 +99,7 @@ jobs:
|
|||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
|
|
||||||
- name: Create multi-arch image
|
- name: Create multi-arch image
|
||||||
env:
|
|
||||||
DEFAULT_DEBIAN_VERSION: bookworm
|
|
||||||
ARCHS: ${{ join(fromJson(needs.check-image.outputs.archs), ' ') }}
|
|
||||||
DEBIANS: ${{ join(fromJson(needs.check-image.outputs.debians), ' ') }}
|
|
||||||
EVERYTHING: ${{ needs.check-image.outputs.everything }}
|
|
||||||
IMAGE_TAG: ${{ needs.check-image.outputs.tag }}
|
|
||||||
run: |
|
run: |
|
||||||
for debian in ${DEBIANS}; do
|
docker buildx imagetools create -t neondatabase/build-tools:${IMAGE_TAG} \
|
||||||
tags=("-t" "neondatabase/build-tools:${IMAGE_TAG}-${debian}")
|
neondatabase/build-tools:${IMAGE_TAG}-x64 \
|
||||||
|
neondatabase/build-tools:${IMAGE_TAG}-arm64
|
||||||
if [ "${EVERYTHING}" == "true" ] && [ "${debian}" == "${DEFAULT_DEBIAN_VERSION}" ]; then
|
|
||||||
tags+=("-t" "neondatabase/build-tools:${IMAGE_TAG}")
|
|
||||||
fi
|
|
||||||
|
|
||||||
for arch in ${ARCHS}; do
|
|
||||||
tags+=("neondatabase/build-tools:${IMAGE_TAG}-${debian}-${arch}")
|
|
||||||
done
|
|
||||||
|
|
||||||
docker buildx imagetools create "${tags[@]}"
|
|
||||||
done
|
|
||||||
|
|||||||
1166
.github/workflows/build_and_test.yml
vendored
1166
.github/workflows/build_and_test.yml
vendored
File diff suppressed because it is too large
Load Diff
58
.github/workflows/check-build-tools-image.yml
vendored
Normal file
58
.github/workflows/check-build-tools-image.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
name: Check build-tools image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
outputs:
|
||||||
|
image-tag:
|
||||||
|
description: "build-tools image tag"
|
||||||
|
value: ${{ jobs.check-image.outputs.tag }}
|
||||||
|
found:
|
||||||
|
description: "Whether the image is found in the registry"
|
||||||
|
value: ${{ jobs.check-image.outputs.found }}
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash -euo pipefail {0}
|
||||||
|
|
||||||
|
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
tag: ${{ steps.get-build-tools-tag.outputs.image-tag }}
|
||||||
|
found: ${{ steps.check-image.outputs.found }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Get build-tools image tag for the current commit
|
||||||
|
id: get-build-tools-tag
|
||||||
|
env:
|
||||||
|
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
LAST_BUILD_TOOLS_SHA=$(
|
||||||
|
gh api \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
--method GET \
|
||||||
|
--field path=Dockerfile.build-tools \
|
||||||
|
--field sha=${COMMIT_SHA} \
|
||||||
|
--field per_page=1 \
|
||||||
|
--jq ".[0].sha" \
|
||||||
|
"/repos/${GITHUB_REPOSITORY}/commits"
|
||||||
|
)
|
||||||
|
echo "image-tag=${LAST_BUILD_TOOLS_SHA}" | tee -a $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Check if such tag found in the registry
|
||||||
|
id: check-image
|
||||||
|
env:
|
||||||
|
IMAGE_TAG: ${{ steps.get-build-tools-tag.outputs.image-tag }}
|
||||||
|
run: |
|
||||||
|
if docker manifest inspect neondatabase/build-tools:${IMAGE_TAG}; then
|
||||||
|
found=true
|
||||||
|
else
|
||||||
|
found=false
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "found=${found}" | tee -a $GITHUB_OUTPUT
|
||||||
2
.github/workflows/check-permissions.yml
vendored
2
.github/workflows/check-permissions.yml
vendored
@@ -16,7 +16,7 @@ permissions: {}
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-permissions:
|
check-permissions:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Disallow CI runs on PRs from forks
|
- name: Disallow CI runs on PRs from forks
|
||||||
if: |
|
if: |
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup:
|
cleanup:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
102
.github/workflows/cloud-regress.yml
vendored
102
.github/workflows/cloud-regress.yml
vendored
@@ -1,102 +0,0 @@
|
|||||||
name: Cloud Regression Test
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
# * is a special character in YAML so you have to quote this string
|
|
||||||
# ┌───────────── minute (0 - 59)
|
|
||||||
# │ ┌───────────── hour (0 - 23)
|
|
||||||
# │ │ ┌───────────── day of the month (1 - 31)
|
|
||||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
|
||||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
|
||||||
- cron: '45 1 * * *' # run once a day, timezone is utc
|
|
||||||
workflow_dispatch: # adds ability to run this manually
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
# Allow only one workflow
|
|
||||||
group: ${{ github.workflow }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
regress:
|
|
||||||
env:
|
|
||||||
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
|
||||||
DEFAULT_PG_VERSION: 16
|
|
||||||
TEST_OUTPUT: /tmp/test_output
|
|
||||||
BUILD_TYPE: remote
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
|
||||||
|
|
||||||
runs-on: us-east-2
|
|
||||||
container:
|
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
|
||||||
options: --init
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
|
|
||||||
- name: Patch the test
|
|
||||||
run: |
|
|
||||||
cd "vendor/postgres-v${DEFAULT_PG_VERSION}"
|
|
||||||
patch -p1 < "../../compute/patches/cloud_regress_pg${DEFAULT_PG_VERSION}.patch"
|
|
||||||
|
|
||||||
- name: Generate a random password
|
|
||||||
id: pwgen
|
|
||||||
run: |
|
|
||||||
set +x
|
|
||||||
DBPASS=$(dd if=/dev/random bs=48 count=1 2>/dev/null | base64)
|
|
||||||
echo "::add-mask::${DBPASS//\//}"
|
|
||||||
echo DBPASS="${DBPASS//\//}" >> "${GITHUB_OUTPUT}"
|
|
||||||
|
|
||||||
- name: Change tests according to the generated password
|
|
||||||
env:
|
|
||||||
DBPASS: ${{ steps.pwgen.outputs.DBPASS }}
|
|
||||||
run: |
|
|
||||||
cd vendor/postgres-v"${DEFAULT_PG_VERSION}"/src/test/regress
|
|
||||||
for fname in sql/*.sql expected/*.out; do
|
|
||||||
sed -i.bak s/NEON_PASSWORD_PLACEHOLDER/"'${DBPASS}'"/ "${fname}"
|
|
||||||
done
|
|
||||||
for ph in $(grep NEON_MD5_PLACEHOLDER expected/password.out | awk '{print $3;}' | sort | uniq); do
|
|
||||||
USER=$(echo "${ph}" | cut -c 22-)
|
|
||||||
MD5=md5$(echo -n "${DBPASS}${USER}" | md5sum | awk '{print $1;}')
|
|
||||||
sed -i.bak "s/${ph}/${MD5}/" expected/password.out
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
|
||||||
uses: ./.github/actions/download
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
|
||||||
path: /tmp/neon/
|
|
||||||
prefix: latest
|
|
||||||
|
|
||||||
- name: Run the regression tests
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: ${{ env.BUILD_TYPE }}
|
|
||||||
test_selection: cloud_regress
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
extra_params: -m remote_cluster
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ secrets.PG_REGRESS_CONNSTR }}
|
|
||||||
|
|
||||||
- name: Create Allure report
|
|
||||||
id: create-allure-report
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
uses: ./.github/actions/allure-report-generate
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
|
||||||
if: ${{ github.event.schedule && failure() }}
|
|
||||||
uses: slackapi/slack-github-action@v1
|
|
||||||
with:
|
|
||||||
channel-id: "C033QLM5P7D" # on-call-staging-stream
|
|
||||||
slack-message: |
|
|
||||||
Periodic pg_regress on staging: ${{ job.status }}
|
|
||||||
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|GitHub Run>
|
|
||||||
<${{ steps.create-allure-report.outputs.report-url }}|Allure report>
|
|
||||||
env:
|
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
|
||||||
|
|
||||||
160
.github/workflows/ingest_benchmark.yml
vendored
160
.github/workflows/ingest_benchmark.yml
vendored
@@ -1,160 +0,0 @@
|
|||||||
name: benchmarking ingest
|
|
||||||
|
|
||||||
on:
|
|
||||||
# uncomment to run on push for debugging your PR
|
|
||||||
# push:
|
|
||||||
# branches: [ your branch ]
|
|
||||||
schedule:
|
|
||||||
# * is a special character in YAML so you have to quote this string
|
|
||||||
# ┌───────────── minute (0 - 59)
|
|
||||||
# │ ┌───────────── hour (0 - 23)
|
|
||||||
# │ │ ┌───────────── day of the month (1 - 31)
|
|
||||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
|
||||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
|
||||||
- cron: '0 9 * * *' # run once a day, timezone is utc
|
|
||||||
workflow_dispatch: # adds ability to run this manually
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
# Allow only one workflow globally because we need dedicated resources which only exist once
|
|
||||||
group: ingest-bench-workflow
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
ingest:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false # allow other variants to continue even if one fails
|
|
||||||
matrix:
|
|
||||||
target_project: [new_empty_project, large_existing_project]
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
statuses: write
|
|
||||||
id-token: write # aws-actions/configure-aws-credentials
|
|
||||||
env:
|
|
||||||
PG_CONFIG: /tmp/neon/pg_install/v16/bin/pg_config
|
|
||||||
PSQL: /tmp/neon/pg_install/v16/bin/psql
|
|
||||||
PG_16_LIB_PATH: /tmp/neon/pg_install/v16/lib
|
|
||||||
PGCOPYDB: /pgcopydb/bin/pgcopydb
|
|
||||||
PGCOPYDB_LIB_PATH: /pgcopydb/lib
|
|
||||||
runs-on: [ self-hosted, us-east-2, x64 ]
|
|
||||||
container:
|
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
|
||||||
timeout-minutes: 1440
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Configure AWS credentials # necessary to download artefacts
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
role-duration-seconds: 18000 # 5 hours is currently max associated with IAM role
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
|
||||||
uses: ./.github/actions/download
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
|
||||||
path: /tmp/neon/
|
|
||||||
prefix: latest
|
|
||||||
|
|
||||||
- name: Create Neon Project
|
|
||||||
if: ${{ matrix.target_project == 'new_empty_project' }}
|
|
||||||
id: create-neon-project-ingest-target
|
|
||||||
uses: ./.github/actions/neon-project-create
|
|
||||||
with:
|
|
||||||
region_id: aws-us-east-2
|
|
||||||
postgres_version: 16
|
|
||||||
compute_units: '[7, 7]' # we want to test large compute here to avoid compute-side bottleneck
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
|
|
||||||
- name: Initialize Neon project
|
|
||||||
if: ${{ matrix.target_project == 'new_empty_project' }}
|
|
||||||
env:
|
|
||||||
BENCHMARK_INGEST_TARGET_CONNSTR: ${{ steps.create-neon-project-ingest-target.outputs.dsn }}
|
|
||||||
NEW_PROJECT_ID: ${{ steps.create-neon-project-ingest-target.outputs.project_id }}
|
|
||||||
run: |
|
|
||||||
echo "Initializing Neon project with project_id: ${NEW_PROJECT_ID}"
|
|
||||||
export LD_LIBRARY_PATH=${PG_16_LIB_PATH}
|
|
||||||
${PSQL} "${BENCHMARK_INGEST_TARGET_CONNSTR}" -c "CREATE EXTENSION IF NOT EXISTS neon; CREATE EXTENSION IF NOT EXISTS neon_utils;"
|
|
||||||
echo "BENCHMARK_INGEST_TARGET_CONNSTR=${BENCHMARK_INGEST_TARGET_CONNSTR}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Create Neon Branch for large tenant
|
|
||||||
if: ${{ matrix.target_project == 'large_existing_project' }}
|
|
||||||
id: create-neon-branch-ingest-target
|
|
||||||
uses: ./.github/actions/neon-branch-create
|
|
||||||
with:
|
|
||||||
project_id: ${{ vars.BENCHMARK_INGEST_TARGET_PROJECTID }}
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
|
|
||||||
- name: Initialize Neon project
|
|
||||||
if: ${{ matrix.target_project == 'large_existing_project' }}
|
|
||||||
env:
|
|
||||||
BENCHMARK_INGEST_TARGET_CONNSTR: ${{ steps.create-neon-branch-ingest-target.outputs.dsn }}
|
|
||||||
NEW_BRANCH_ID: ${{ steps.create-neon-branch-ingest-target.outputs.branch_id }}
|
|
||||||
run: |
|
|
||||||
echo "Initializing Neon branch with branch_id: ${NEW_BRANCH_ID}"
|
|
||||||
export LD_LIBRARY_PATH=${PG_16_LIB_PATH}
|
|
||||||
# Extract the part before the database name
|
|
||||||
base_connstr="${BENCHMARK_INGEST_TARGET_CONNSTR%/*}"
|
|
||||||
# Extract the query parameters (if any) after the database name
|
|
||||||
query_params="${BENCHMARK_INGEST_TARGET_CONNSTR#*\?}"
|
|
||||||
# Reconstruct the new connection string
|
|
||||||
if [ "$query_params" != "$BENCHMARK_INGEST_TARGET_CONNSTR" ]; then
|
|
||||||
new_connstr="${base_connstr}/neondb?${query_params}"
|
|
||||||
else
|
|
||||||
new_connstr="${base_connstr}/neondb"
|
|
||||||
fi
|
|
||||||
${PSQL} "${new_connstr}" -c "drop database ludicrous;"
|
|
||||||
${PSQL} "${new_connstr}" -c "CREATE DATABASE ludicrous;"
|
|
||||||
if [ "$query_params" != "$BENCHMARK_INGEST_TARGET_CONNSTR" ]; then
|
|
||||||
BENCHMARK_INGEST_TARGET_CONNSTR="${base_connstr}/ludicrous?${query_params}"
|
|
||||||
else
|
|
||||||
BENCHMARK_INGEST_TARGET_CONNSTR="${base_connstr}/ludicrous"
|
|
||||||
fi
|
|
||||||
${PSQL} "${BENCHMARK_INGEST_TARGET_CONNSTR}" -c "CREATE EXTENSION IF NOT EXISTS neon; CREATE EXTENSION IF NOT EXISTS neon_utils;"
|
|
||||||
echo "BENCHMARK_INGEST_TARGET_CONNSTR=${BENCHMARK_INGEST_TARGET_CONNSTR}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Invoke pgcopydb
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: remote
|
|
||||||
test_selection: performance/test_perf_ingest_using_pgcopydb.py
|
|
||||||
run_in_parallel: false
|
|
||||||
extra_params: -s -m remote_cluster --timeout 86400 -k test_ingest_performance_using_pgcopydb
|
|
||||||
pg_version: v16
|
|
||||||
save_perf_report: true
|
|
||||||
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
||||||
env:
|
|
||||||
BENCHMARK_INGEST_SOURCE_CONNSTR: ${{ secrets.BENCHMARK_INGEST_SOURCE_CONNSTR }}
|
|
||||||
TARGET_PROJECT_TYPE: ${{ matrix.target_project }}
|
|
||||||
# we report PLATFORM in zenbenchmark NeonBenchmarker perf database and want to distinguish between new project and large tenant
|
|
||||||
PLATFORM: "${{ matrix.target_project }}-us-east-2-staging"
|
|
||||||
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
|
||||||
|
|
||||||
- name: show tables sizes after ingest
|
|
||||||
run: |
|
|
||||||
export LD_LIBRARY_PATH=${PG_16_LIB_PATH}
|
|
||||||
${PSQL} "${BENCHMARK_INGEST_TARGET_CONNSTR}" -c "\dt+"
|
|
||||||
|
|
||||||
- name: Delete Neon Project
|
|
||||||
if: ${{ always() && matrix.target_project == 'new_empty_project' }}
|
|
||||||
uses: ./.github/actions/neon-project-delete
|
|
||||||
with:
|
|
||||||
project_id: ${{ steps.create-neon-project-ingest-target.outputs.project_id }}
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
|
|
||||||
- name: Delete Neon Branch for large tenant
|
|
||||||
if: ${{ always() && matrix.target_project == 'large_existing_project' }}
|
|
||||||
uses: ./.github/actions/neon-branch-delete
|
|
||||||
with:
|
|
||||||
project_id: ${{ vars.BENCHMARK_INGEST_TARGET_PROJECTID }}
|
|
||||||
branch_id: ${{ steps.create-neon-branch-ingest-target.outputs.branch_id }}
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
78
.github/workflows/label-for-external-users.yml
vendored
78
.github/workflows/label-for-external-users.yml
vendored
@@ -1,78 +0,0 @@
|
|||||||
name: Add `external` label to issues and PRs created by external users
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
pull_request_target:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
github-actor:
|
|
||||||
description: 'GitHub username. If empty, the username of the current user will be used'
|
|
||||||
required: false
|
|
||||||
|
|
||||||
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
env:
|
|
||||||
LABEL: external
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-user:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
is-member: ${{ steps.check-user.outputs.is-member }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check whether `${{ github.actor }}` is a member of `${{ github.repository_owner }}`
|
|
||||||
id: check-user
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
ACTOR: ${{ inputs.github-actor || github.actor }}
|
|
||||||
run: |
|
|
||||||
expected_error="User does not exist or is not a member of the organization"
|
|
||||||
output_file=output.txt
|
|
||||||
|
|
||||||
for i in $(seq 1 10); do
|
|
||||||
if gh api "/orgs/${GITHUB_REPOSITORY_OWNER}/members/${ACTOR}" \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" > ${output_file}; then
|
|
||||||
|
|
||||||
is_member=true
|
|
||||||
break
|
|
||||||
elif grep -q "${expected_error}" ${output_file}; then
|
|
||||||
is_member=false
|
|
||||||
break
|
|
||||||
elif [ $i -eq 10 ]; then
|
|
||||||
title="Failed to get memmbership status for ${ACTOR}"
|
|
||||||
message="The latest GitHub API error message: '$(cat ${output_file})'"
|
|
||||||
echo "::error file=.github/workflows/label-for-external-users.yml,title=${title}::${message}"
|
|
||||||
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "is-member=${is_member}" | tee -a ${GITHUB_OUTPUT}
|
|
||||||
|
|
||||||
add-label:
|
|
||||||
if: needs.check-user.outputs.is-member == 'false'
|
|
||||||
needs: [ check-user ]
|
|
||||||
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
permissions:
|
|
||||||
pull-requests: write # for `gh pr edit`
|
|
||||||
issues: write # for `gh issue edit`
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Add `${{ env.LABEL }}` label
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
ITEM_NUMBER: ${{ github.event[github.event_name == 'pull_request_target' && 'pull_request' || 'issue'].number }}
|
|
||||||
GH_CLI_COMMAND: ${{ github.event_name == 'pull_request_target' && 'pr' || 'issue' }}
|
|
||||||
run: |
|
|
||||||
gh ${GH_CLI_COMMAND} --repo ${GITHUB_REPOSITORY} edit --add-label=${LABEL} ${ITEM_NUMBER}
|
|
||||||
238
.github/workflows/neon_extra_builds.yml
vendored
238
.github/workflows/neon_extra_builds.yml
vendored
@@ -26,9 +26,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
github-event-name: ${{ github.event_name}}
|
github-event-name: ${{ github.event_name}}
|
||||||
|
|
||||||
build-build-tools-image:
|
check-build-tools-image:
|
||||||
needs: [ check-permissions ]
|
needs: [ check-permissions ]
|
||||||
|
uses: ./.github/workflows/check-build-tools-image.yml
|
||||||
|
|
||||||
|
build-build-tools-image:
|
||||||
|
needs: [ check-build-tools-image ]
|
||||||
uses: ./.github/workflows/build-build-tools-image.yml
|
uses: ./.github/workflows/build-build-tools-image.yml
|
||||||
|
with:
|
||||||
|
image-tag: ${{ needs.check-build-tools-image.outputs.image-tag }}
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
check-macos-build:
|
check-macos-build:
|
||||||
@@ -38,7 +44,7 @@ jobs:
|
|||||||
contains(github.event.pull_request.labels.*.name, 'run-extra-build-*') ||
|
contains(github.event.pull_request.labels.*.name, 'run-extra-build-*') ||
|
||||||
github.ref_name == 'main'
|
github.ref_name == 'main'
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
runs-on: macos-15
|
runs-on: macos-14
|
||||||
|
|
||||||
env:
|
env:
|
||||||
# Use release build only, to have less debug info around
|
# Use release build only, to have less debug info around
|
||||||
@@ -50,9 +56,10 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
- name: Install macOS postgres dependencies
|
- name: Install macOS postgres dependencies
|
||||||
run: brew install flex bison openssl protobuf icu4c
|
run: brew install flex bison openssl protobuf icu4c pkg-config
|
||||||
|
|
||||||
- name: Set pg 14 revision for caching
|
- name: Set pg 14 revision for caching
|
||||||
id: pg_v14_rev
|
id: pg_v14_rev
|
||||||
@@ -66,10 +73,6 @@ jobs:
|
|||||||
id: pg_v16_rev
|
id: pg_v16_rev
|
||||||
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v16) >> $GITHUB_OUTPUT
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v16) >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Set pg 17 revision for caching
|
|
||||||
id: pg_v17_rev
|
|
||||||
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v17) >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Cache postgres v14 build
|
- name: Cache postgres v14 build
|
||||||
id: cache_pg_14
|
id: cache_pg_14
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
@@ -91,13 +94,6 @@ jobs:
|
|||||||
path: pg_install/v16
|
path: pg_install/v16
|
||||||
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ env.BUILD_TYPE }}-pg-${{ steps.pg_v16_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ env.BUILD_TYPE }}-pg-${{ steps.pg_v16_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
||||||
|
|
||||||
- name: Cache postgres v17 build
|
|
||||||
id: cache_pg_17
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: pg_install/v17
|
|
||||||
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ env.BUILD_TYPE }}-pg-${{ steps.pg_v17_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
|
||||||
|
|
||||||
- name: Set extra env for macOS
|
- name: Set extra env for macOS
|
||||||
run: |
|
run: |
|
||||||
echo 'LDFLAGS=-L/usr/local/opt/openssl@3/lib' >> $GITHUB_ENV
|
echo 'LDFLAGS=-L/usr/local/opt/openssl@3/lib' >> $GITHUB_ENV
|
||||||
@@ -125,10 +121,6 @@ jobs:
|
|||||||
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
||||||
run: make postgres-v16 -j$(sysctl -n hw.ncpu)
|
run: make postgres-v16 -j$(sysctl -n hw.ncpu)
|
||||||
|
|
||||||
- name: Build postgres v17
|
|
||||||
if: steps.cache_pg_17.outputs.cache-hit != 'true'
|
|
||||||
run: make postgres-v17 -j$(sysctl -n hw.ncpu)
|
|
||||||
|
|
||||||
- name: Build neon extensions
|
- name: Build neon extensions
|
||||||
run: make neon-pg-ext -j$(sysctl -n hw.ncpu)
|
run: make neon-pg-ext -j$(sysctl -n hw.ncpu)
|
||||||
|
|
||||||
@@ -141,15 +133,214 @@ jobs:
|
|||||||
- name: Check that no warnings are produced
|
- name: Check that no warnings are produced
|
||||||
run: ./run_clippy.sh
|
run: ./run_clippy.sh
|
||||||
|
|
||||||
|
check-linux-arm-build:
|
||||||
|
needs: [ check-permissions, build-build-tools-image ]
|
||||||
|
timeout-minutes: 90
|
||||||
|
runs-on: [ self-hosted, dev, arm64 ]
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Use release build only, to have less debug info around
|
||||||
|
# Hence keeping target/ (and general cache size) smaller
|
||||||
|
BUILD_TYPE: release
|
||||||
|
CARGO_FEATURES: --features testing
|
||||||
|
CARGO_FLAGS: --release
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
||||||
|
|
||||||
|
container:
|
||||||
|
image: ${{ needs.build-build-tools-image.outputs.image }}
|
||||||
|
credentials:
|
||||||
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
|
options: --init
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Fix git ownership
|
||||||
|
run: |
|
||||||
|
# Workaround for `fatal: detected dubious ownership in repository at ...`
|
||||||
|
#
|
||||||
|
# Use both ${{ github.workspace }} and ${GITHUB_WORKSPACE} because they're different on host and in containers
|
||||||
|
# Ref https://github.com/actions/checkout/issues/785
|
||||||
|
#
|
||||||
|
git config --global --add safe.directory ${{ github.workspace }}
|
||||||
|
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
||||||
|
for r in 14 15 16; do
|
||||||
|
git config --global --add safe.directory "${{ github.workspace }}/vendor/postgres-v$r"
|
||||||
|
git config --global --add safe.directory "${GITHUB_WORKSPACE}/vendor/postgres-v$r"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Set pg 14 revision for caching
|
||||||
|
id: pg_v14_rev
|
||||||
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v14) >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Set pg 15 revision for caching
|
||||||
|
id: pg_v15_rev
|
||||||
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v15) >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Set pg 16 revision for caching
|
||||||
|
id: pg_v16_rev
|
||||||
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v16) >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Set env variables
|
||||||
|
run: |
|
||||||
|
echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Cache postgres v14 build
|
||||||
|
id: cache_pg_14
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: pg_install/v14
|
||||||
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ env.BUILD_TYPE }}-pg-${{ steps.pg_v14_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
||||||
|
|
||||||
|
- name: Cache postgres v15 build
|
||||||
|
id: cache_pg_15
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: pg_install/v15
|
||||||
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ env.BUILD_TYPE }}-pg-${{ steps.pg_v15_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
||||||
|
|
||||||
|
- name: Cache postgres v16 build
|
||||||
|
id: cache_pg_16
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: pg_install/v16
|
||||||
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ env.BUILD_TYPE }}-pg-${{ steps.pg_v16_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
||||||
|
|
||||||
|
- name: Build postgres v14
|
||||||
|
if: steps.cache_pg_14.outputs.cache-hit != 'true'
|
||||||
|
run: mold -run make postgres-v14 -j$(nproc)
|
||||||
|
|
||||||
|
- name: Build postgres v15
|
||||||
|
if: steps.cache_pg_15.outputs.cache-hit != 'true'
|
||||||
|
run: mold -run make postgres-v15 -j$(nproc)
|
||||||
|
|
||||||
|
- name: Build postgres v16
|
||||||
|
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
||||||
|
run: mold -run make postgres-v16 -j$(nproc)
|
||||||
|
|
||||||
|
- name: Build neon extensions
|
||||||
|
run: mold -run make neon-pg-ext -j$(nproc)
|
||||||
|
|
||||||
|
- name: Build walproposer-lib
|
||||||
|
run: mold -run make walproposer-lib -j$(nproc)
|
||||||
|
|
||||||
|
- name: Run cargo build
|
||||||
|
run: |
|
||||||
|
mold -run cargo build --locked $CARGO_FLAGS $CARGO_FEATURES --bins --tests
|
||||||
|
|
||||||
|
- name: Run cargo test
|
||||||
|
env:
|
||||||
|
NEXTEST_RETRIES: 3
|
||||||
|
run: |
|
||||||
|
cargo nextest run $CARGO_FEATURES
|
||||||
|
|
||||||
|
# Run separate tests for real S3
|
||||||
|
export ENABLE_REAL_S3_REMOTE_STORAGE=nonempty
|
||||||
|
export REMOTE_STORAGE_S3_BUCKET=neon-github-ci-tests
|
||||||
|
export REMOTE_STORAGE_S3_REGION=eu-central-1
|
||||||
|
# Avoid `$CARGO_FEATURES` since there's no `testing` feature in the e2e tests now
|
||||||
|
cargo nextest run --package remote_storage --test test_real_s3
|
||||||
|
|
||||||
|
# Run separate tests for real Azure Blob Storage
|
||||||
|
# XXX: replace region with `eu-central-1`-like region
|
||||||
|
export ENABLE_REAL_AZURE_REMOTE_STORAGE=y
|
||||||
|
export AZURE_STORAGE_ACCOUNT="${{ secrets.AZURE_STORAGE_ACCOUNT_DEV }}"
|
||||||
|
export AZURE_STORAGE_ACCESS_KEY="${{ secrets.AZURE_STORAGE_ACCESS_KEY_DEV }}"
|
||||||
|
export REMOTE_STORAGE_AZURE_CONTAINER="${{ vars.REMOTE_STORAGE_AZURE_CONTAINER }}"
|
||||||
|
export REMOTE_STORAGE_AZURE_REGION="${{ vars.REMOTE_STORAGE_AZURE_REGION }}"
|
||||||
|
# Avoid `$CARGO_FEATURES` since there's no `testing` feature in the e2e tests now
|
||||||
|
cargo nextest run --package remote_storage --test test_real_azure
|
||||||
|
|
||||||
|
check-codestyle-rust-arm:
|
||||||
|
needs: [ check-permissions, build-build-tools-image ]
|
||||||
|
timeout-minutes: 90
|
||||||
|
runs-on: [ self-hosted, dev, arm64 ]
|
||||||
|
|
||||||
|
container:
|
||||||
|
image: ${{ needs.build-build-tools-image.outputs.image }}
|
||||||
|
credentials:
|
||||||
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
|
options: --init
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Fix git ownership
|
||||||
|
run: |
|
||||||
|
# Workaround for `fatal: detected dubious ownership in repository at ...`
|
||||||
|
#
|
||||||
|
# Use both ${{ github.workspace }} and ${GITHUB_WORKSPACE} because they're different on host and in containers
|
||||||
|
# Ref https://github.com/actions/checkout/issues/785
|
||||||
|
#
|
||||||
|
git config --global --add safe.directory ${{ github.workspace }}
|
||||||
|
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
||||||
|
for r in 14 15 16; do
|
||||||
|
git config --global --add safe.directory "${{ github.workspace }}/vendor/postgres-v$r"
|
||||||
|
git config --global --add safe.directory "${GITHUB_WORKSPACE}/vendor/postgres-v$r"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
# Some of our rust modules use FFI and need those to be checked
|
||||||
|
- name: Get postgres headers
|
||||||
|
run: make postgres-headers -j$(nproc)
|
||||||
|
|
||||||
|
# cargo hack runs the given cargo subcommand (clippy in this case) for all feature combinations.
|
||||||
|
# This will catch compiler & clippy warnings in all feature combinations.
|
||||||
|
# TODO: use cargo hack for build and test as well, but, that's quite expensive.
|
||||||
|
# NB: keep clippy args in sync with ./run_clippy.sh
|
||||||
|
- run: |
|
||||||
|
CLIPPY_COMMON_ARGS="$( source .neon_clippy_args; echo "$CLIPPY_COMMON_ARGS")"
|
||||||
|
if [ "$CLIPPY_COMMON_ARGS" = "" ]; then
|
||||||
|
echo "No clippy args found in .neon_clippy_args"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "CLIPPY_COMMON_ARGS=${CLIPPY_COMMON_ARGS}" >> $GITHUB_ENV
|
||||||
|
- name: Run cargo clippy (debug)
|
||||||
|
run: cargo hack --feature-powerset clippy $CLIPPY_COMMON_ARGS
|
||||||
|
- name: Run cargo clippy (release)
|
||||||
|
run: cargo hack --feature-powerset clippy --release $CLIPPY_COMMON_ARGS
|
||||||
|
|
||||||
|
- name: Check documentation generation
|
||||||
|
run: cargo doc --workspace --no-deps --document-private-items
|
||||||
|
env:
|
||||||
|
RUSTDOCFLAGS: "-Dwarnings -Arustdoc::private_intra_doc_links"
|
||||||
|
|
||||||
|
# Use `${{ !cancelled() }}` to run quck tests after the longer clippy run
|
||||||
|
- name: Check formatting
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
|
# https://github.com/facebookincubator/cargo-guppy/tree/bec4e0eb29dcd1faac70b1b5360267fc02bf830e/tools/cargo-hakari#2-keep-the-workspace-hack-up-to-date-in-ci
|
||||||
|
- name: Check rust dependencies
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
run: |
|
||||||
|
cargo hakari generate --diff # workspace-hack Cargo.toml is up-to-date
|
||||||
|
cargo hakari manage-deps --dry-run # all workspace crates depend on workspace-hack
|
||||||
|
|
||||||
|
# https://github.com/EmbarkStudios/cargo-deny
|
||||||
|
- name: Check rust licenses/bans/advisories/sources
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
run: cargo deny check
|
||||||
|
|
||||||
gather-rust-build-stats:
|
gather-rust-build-stats:
|
||||||
needs: [ check-permissions, build-build-tools-image ]
|
needs: [ check-permissions, build-build-tools-image ]
|
||||||
if: |
|
if: |
|
||||||
contains(github.event.pull_request.labels.*.name, 'run-extra-build-stats') ||
|
contains(github.event.pull_request.labels.*.name, 'run-extra-build-stats') ||
|
||||||
contains(github.event.pull_request.labels.*.name, 'run-extra-build-*') ||
|
contains(github.event.pull_request.labels.*.name, 'run-extra-build-*') ||
|
||||||
github.ref_name == 'main'
|
github.ref_name == 'main'
|
||||||
runs-on: [ self-hosted, large ]
|
runs-on: [ self-hosted, gen3, large ]
|
||||||
container:
|
container:
|
||||||
image: ${{ needs.build-build-tools-image.outputs.image }}-bookworm
|
image: ${{ needs.build-build-tools-image.outputs.image }}
|
||||||
credentials:
|
credentials:
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
@@ -157,6 +348,8 @@ jobs:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_TYPE: release
|
BUILD_TYPE: release
|
||||||
|
# remove the cachepot wrapper and build without crate caches
|
||||||
|
RUSTC_WRAPPER: ""
|
||||||
# build with incremental compilation produce partial results
|
# build with incremental compilation produce partial results
|
||||||
# so do not attempt to cache this build, also disable the incremental compilation
|
# so do not attempt to cache this build, also disable the incremental compilation
|
||||||
CARGO_INCREMENTAL: 0
|
CARGO_INCREMENTAL: 0
|
||||||
@@ -166,6 +359,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
# Some of our rust modules use FFI and need those to be checked
|
# Some of our rust modules use FFI and need those to be checked
|
||||||
- name: Get postgres headers
|
- name: Get postgres headers
|
||||||
@@ -175,7 +369,7 @@ jobs:
|
|||||||
run: make walproposer-lib -j$(nproc)
|
run: make walproposer-lib -j$(nproc)
|
||||||
|
|
||||||
- name: Produce the build stats
|
- name: Produce the build stats
|
||||||
run: PQ_LIB_DIR=$(pwd)/pg_install/v17/lib cargo build --all --release --timings -j$(nproc)
|
run: cargo build --all --release --timings
|
||||||
|
|
||||||
- name: Upload the build stats
|
- name: Upload the build stats
|
||||||
id: upload-stats
|
id: upload-stats
|
||||||
@@ -195,8 +389,6 @@ jobs:
|
|||||||
REPORT_URL: ${{ steps.upload-stats.outputs.report-url }}
|
REPORT_URL: ${{ steps.upload-stats.outputs.report-url }}
|
||||||
SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
with:
|
with:
|
||||||
# Retry script for 5XX server errors: https://github.com/actions/github-script#retries
|
|
||||||
retries: 5
|
|
||||||
script: |
|
script: |
|
||||||
const { REPORT_URL, SHA } = process.env
|
const { REPORT_URL, SHA } = process.env
|
||||||
|
|
||||||
|
|||||||
155
.github/workflows/periodic_pagebench.yml
vendored
155
.github/workflows/periodic_pagebench.yml
vendored
@@ -1,155 +0,0 @@
|
|||||||
name: Periodic pagebench performance test on dedicated EC2 machine in eu-central-1 region
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
# * is a special character in YAML so you have to quote this string
|
|
||||||
# ┌───────────── minute (0 - 59)
|
|
||||||
# │ ┌───────────── hour (0 - 23)
|
|
||||||
# │ │ ┌───────────── day of the month (1 - 31)
|
|
||||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
|
||||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
|
||||||
- cron: '0 18 * * *' # Runs at 6 PM UTC every day
|
|
||||||
workflow_dispatch: # Allows manual triggering of the workflow
|
|
||||||
inputs:
|
|
||||||
commit_hash:
|
|
||||||
type: string
|
|
||||||
description: 'The long neon repo commit hash for the system under test (pageserver) to be tested.'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euo pipefail {0}
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
trigger_bench_on_ec2_machine_in_eu_central_1:
|
|
||||||
runs-on: [ self-hosted, small ]
|
|
||||||
container:
|
|
||||||
image: neondatabase/build-tools:pinned-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init
|
|
||||||
timeout-minutes: 360 # Set the timeout to 6 hours
|
|
||||||
env:
|
|
||||||
API_KEY: ${{ secrets.PERIODIC_PAGEBENCH_EC2_RUNNER_API_KEY }}
|
|
||||||
RUN_ID: ${{ github.run_id }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_EC2_US_TEST_RUNNER_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY : ${{ secrets.AWS_EC2_US_TEST_RUNNER_ACCESS_KEY_SECRET }}
|
|
||||||
AWS_DEFAULT_REGION : "eu-central-1"
|
|
||||||
AWS_INSTANCE_ID : "i-02a59a3bf86bc7e74"
|
|
||||||
steps:
|
|
||||||
# we don't need the neon source code because we run everything remotely
|
|
||||||
# however we still need the local github actions to run the allure step below
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Show my own (github runner) external IP address - usefull for IP allowlisting
|
|
||||||
run: curl https://ifconfig.me
|
|
||||||
|
|
||||||
- name: Start EC2 instance and wait for the instance to boot up
|
|
||||||
run: |
|
|
||||||
aws ec2 start-instances --instance-ids $AWS_INSTANCE_ID
|
|
||||||
aws ec2 wait instance-running --instance-ids $AWS_INSTANCE_ID
|
|
||||||
sleep 60 # sleep some time to allow cloudinit and our API server to start up
|
|
||||||
|
|
||||||
- name: Determine public IP of the EC2 instance and set env variable EC2_MACHINE_URL_US
|
|
||||||
run: |
|
|
||||||
public_ip=$(aws ec2 describe-instances --instance-ids $AWS_INSTANCE_ID --query 'Reservations[*].Instances[*].PublicIpAddress' --output text)
|
|
||||||
echo "Public IP of the EC2 instance: $public_ip"
|
|
||||||
echo "EC2_MACHINE_URL_US=https://${public_ip}:8443" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Determine commit hash
|
|
||||||
env:
|
|
||||||
INPUT_COMMIT_HASH: ${{ github.event.inputs.commit_hash }}
|
|
||||||
run: |
|
|
||||||
if [ -z "$INPUT_COMMIT_HASH" ]; then
|
|
||||||
echo "COMMIT_HASH=$(curl -s https://api.github.com/repos/neondatabase/neon/commits/main | jq -r '.sha')" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "COMMIT_HASH=$INPUT_COMMIT_HASH" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Start Bench with run_id
|
|
||||||
run: |
|
|
||||||
curl -k -X 'POST' \
|
|
||||||
"${EC2_MACHINE_URL_US}/start_test/${GITHUB_RUN_ID}" \
|
|
||||||
-H 'accept: application/json' \
|
|
||||||
-H 'Content-Type: application/json' \
|
|
||||||
-H "Authorization: Bearer $API_KEY" \
|
|
||||||
-d "{\"neonRepoCommitHash\": \"${COMMIT_HASH}\"}"
|
|
||||||
|
|
||||||
- name: Poll Test Status
|
|
||||||
id: poll_step
|
|
||||||
run: |
|
|
||||||
status=""
|
|
||||||
while [[ "$status" != "failure" && "$status" != "success" ]]; do
|
|
||||||
response=$(curl -k -X 'GET' \
|
|
||||||
"${EC2_MACHINE_URL_US}/test_status/${GITHUB_RUN_ID}" \
|
|
||||||
-H 'accept: application/json' \
|
|
||||||
-H "Authorization: Bearer $API_KEY")
|
|
||||||
echo "Response: $response"
|
|
||||||
set +x
|
|
||||||
status=$(echo $response | jq -r '.status')
|
|
||||||
echo "Test status: $status"
|
|
||||||
if [[ "$status" == "failure" ]]; then
|
|
||||||
echo "Test failed"
|
|
||||||
exit 1 # Fail the job step if status is failure
|
|
||||||
elif [[ "$status" == "success" || "$status" == "null" ]]; then
|
|
||||||
break
|
|
||||||
elif [[ "$status" == "too_many_runs" ]]; then
|
|
||||||
echo "Too many runs already running"
|
|
||||||
echo "too_many_runs=true" >> "$GITHUB_OUTPUT"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
sleep 60 # Poll every 60 seconds
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Retrieve Test Logs
|
|
||||||
if: always() && steps.poll_step.outputs.too_many_runs != 'true'
|
|
||||||
run: |
|
|
||||||
curl -k -X 'GET' \
|
|
||||||
"${EC2_MACHINE_URL_US}/test_log/${GITHUB_RUN_ID}" \
|
|
||||||
-H 'accept: application/gzip' \
|
|
||||||
-H "Authorization: Bearer $API_KEY" \
|
|
||||||
--output "test_log_${GITHUB_RUN_ID}.gz"
|
|
||||||
|
|
||||||
- name: Unzip Test Log and Print it into this job's log
|
|
||||||
if: always() && steps.poll_step.outputs.too_many_runs != 'true'
|
|
||||||
run: |
|
|
||||||
gzip -d "test_log_${GITHUB_RUN_ID}.gz"
|
|
||||||
cat "test_log_${GITHUB_RUN_ID}"
|
|
||||||
|
|
||||||
- name: Create Allure report
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
uses: ./.github/actions/allure-report-generate
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
|
||||||
if: ${{ github.event.schedule && failure() }}
|
|
||||||
uses: slackapi/slack-github-action@v1
|
|
||||||
with:
|
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
|
||||||
slack-message: "Periodic pagebench testing on dedicated hardware: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
|
||||||
env:
|
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
|
||||||
|
|
||||||
- name: Cleanup Test Resources
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
curl -k -X 'POST' \
|
|
||||||
"${EC2_MACHINE_URL_US}/cleanup_test/${GITHUB_RUN_ID}" \
|
|
||||||
-H 'accept: application/json' \
|
|
||||||
-H "Authorization: Bearer $API_KEY" \
|
|
||||||
-d ''
|
|
||||||
|
|
||||||
- name: Stop EC2 instance and wait for the instance to be stopped
|
|
||||||
if: always() && steps.poll_step.outputs.too_many_runs != 'true'
|
|
||||||
run: |
|
|
||||||
aws ec2 stop-instances --instance-ids $AWS_INSTANCE_ID
|
|
||||||
aws ec2 wait instance-stopped --instance-ids $AWS_INSTANCE_ID
|
|
||||||
205
.github/workflows/pg-clients.yml
vendored
205
.github/workflows/pg-clients.yml
vendored
@@ -1,205 +0,0 @@
|
|||||||
name: Test Postgres client libraries
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
# * is a special character in YAML so you have to quote this string
|
|
||||||
# ┌───────────── minute (0 - 59)
|
|
||||||
# │ ┌───────────── hour (0 - 23)
|
|
||||||
# │ │ ┌───────────── day of the month (1 - 31)
|
|
||||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
|
||||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
|
||||||
- cron: '23 02 * * *' # run once a day, timezone is utc
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- '.github/workflows/pg-clients.yml'
|
|
||||||
- 'test_runner/pg_clients/**'
|
|
||||||
- 'test_runner/logical_repl/**'
|
|
||||||
- 'poetry.lock'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
|
||||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
env:
|
|
||||||
DEFAULT_PG_VERSION: 16
|
|
||||||
PLATFORM: neon-captest-new
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
|
||||||
AWS_DEFAULT_REGION: eu-central-1
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-permissions:
|
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'run-no-ci') }}
|
|
||||||
uses: ./.github/workflows/check-permissions.yml
|
|
||||||
with:
|
|
||||||
github-event-name: ${{ github.event_name }}
|
|
||||||
|
|
||||||
build-build-tools-image:
|
|
||||||
needs: [ check-permissions ]
|
|
||||||
uses: ./.github/workflows/build-build-tools-image.yml
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
test-logical-replication:
|
|
||||||
needs: [ build-build-tools-image ]
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
|
|
||||||
container:
|
|
||||||
image: ${{ needs.build-build-tools-image.outputs.image }}-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init --user root
|
|
||||||
services:
|
|
||||||
clickhouse:
|
|
||||||
image: clickhouse/clickhouse-server:24.6.3.64
|
|
||||||
ports:
|
|
||||||
- 9000:9000
|
|
||||||
- 8123:8123
|
|
||||||
zookeeper:
|
|
||||||
image: quay.io/debezium/zookeeper:2.7
|
|
||||||
ports:
|
|
||||||
- 2181:2181
|
|
||||||
kafka:
|
|
||||||
image: quay.io/debezium/kafka:2.7
|
|
||||||
env:
|
|
||||||
ZOOKEEPER_CONNECT: "zookeeper:2181"
|
|
||||||
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
|
|
||||||
KAFKA_BROKER_ID: 1
|
|
||||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
|
||||||
KAFKA_JMX_PORT: 9991
|
|
||||||
ports:
|
|
||||||
- 9092:9092
|
|
||||||
debezium:
|
|
||||||
image: quay.io/debezium/connect:2.7
|
|
||||||
env:
|
|
||||||
BOOTSTRAP_SERVERS: kafka:9092
|
|
||||||
GROUP_ID: 1
|
|
||||||
CONFIG_STORAGE_TOPIC: debezium-config
|
|
||||||
OFFSET_STORAGE_TOPIC: debezium-offset
|
|
||||||
STATUS_STORAGE_TOPIC: debezium-status
|
|
||||||
DEBEZIUM_CONFIG_CONNECTOR_CLASS: io.debezium.connector.postgresql.PostgresConnector
|
|
||||||
ports:
|
|
||||||
- 8083:8083
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
|
||||||
uses: ./.github/actions/download
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
|
||||||
path: /tmp/neon/
|
|
||||||
prefix: latest
|
|
||||||
|
|
||||||
- name: Create Neon Project
|
|
||||||
id: create-neon-project
|
|
||||||
uses: ./.github/actions/neon-project-create
|
|
||||||
with:
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: remote
|
|
||||||
test_selection: logical_repl
|
|
||||||
run_in_parallel: false
|
|
||||||
extra_params: -m remote_cluster
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
|
|
||||||
|
|
||||||
- name: Delete Neon Project
|
|
||||||
if: always()
|
|
||||||
uses: ./.github/actions/neon-project-delete
|
|
||||||
with:
|
|
||||||
project_id: ${{ steps.create-neon-project.outputs.project_id }}
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
|
|
||||||
- name: Create Allure report
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
id: create-allure-report
|
|
||||||
uses: ./.github/actions/allure-report-generate
|
|
||||||
with:
|
|
||||||
store-test-results-into-db: true
|
|
||||||
env:
|
|
||||||
REGRESS_TEST_RESULT_CONNSTR_NEW: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR_NEW }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
|
||||||
if: github.event.schedule && failure()
|
|
||||||
uses: slackapi/slack-github-action@v1
|
|
||||||
with:
|
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
|
||||||
slack-message: |
|
|
||||||
Testing the logical replication: <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ job.status }}> (<${{ steps.create-allure-report.outputs.report-url }}|test report>)
|
|
||||||
env:
|
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
|
||||||
|
|
||||||
test-postgres-client-libs:
|
|
||||||
needs: [ build-build-tools-image ]
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
|
|
||||||
container:
|
|
||||||
image: ${{ needs.build-build-tools-image.outputs.image }}-bookworm
|
|
||||||
credentials:
|
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
||||||
options: --init --user root
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download Neon artifact
|
|
||||||
uses: ./.github/actions/download
|
|
||||||
with:
|
|
||||||
name: neon-${{ runner.os }}-${{ runner.arch }}-release-artifact
|
|
||||||
path: /tmp/neon/
|
|
||||||
prefix: latest
|
|
||||||
|
|
||||||
- name: Create Neon Project
|
|
||||||
id: create-neon-project
|
|
||||||
uses: ./.github/actions/neon-project-create
|
|
||||||
with:
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
uses: ./.github/actions/run-python-test-set
|
|
||||||
with:
|
|
||||||
build_type: remote
|
|
||||||
test_selection: pg_clients
|
|
||||||
run_in_parallel: false
|
|
||||||
extra_params: -m remote_cluster
|
|
||||||
pg_version: ${{ env.DEFAULT_PG_VERSION }}
|
|
||||||
env:
|
|
||||||
BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
|
|
||||||
|
|
||||||
- name: Delete Neon Project
|
|
||||||
if: always()
|
|
||||||
uses: ./.github/actions/neon-project-delete
|
|
||||||
with:
|
|
||||||
project_id: ${{ steps.create-neon-project.outputs.project_id }}
|
|
||||||
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
|
||||||
|
|
||||||
- name: Create Allure report
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
id: create-allure-report
|
|
||||||
uses: ./.github/actions/allure-report-generate
|
|
||||||
with:
|
|
||||||
store-test-results-into-db: true
|
|
||||||
env:
|
|
||||||
REGRESS_TEST_RESULT_CONNSTR_NEW: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR_NEW }}
|
|
||||||
|
|
||||||
- name: Post to a Slack channel
|
|
||||||
if: github.event.schedule && failure()
|
|
||||||
uses: slackapi/slack-github-action@v1
|
|
||||||
with:
|
|
||||||
channel-id: "C06KHQVQ7U3" # on-call-qa-staging-stream
|
|
||||||
slack-message: |
|
|
||||||
Testing Postgres clients: <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ job.status }}> (<${{ steps.create-allure-report.outputs.report-url }}|test report>)
|
|
||||||
env:
|
|
||||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
|
||||||
98
.github/workflows/pg_clients.yml
vendored
Normal file
98
.github/workflows/pg_clients.yml
vendored
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
name: Test Postgres client libraries
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# * is a special character in YAML so you have to quote this string
|
||||||
|
# ┌───────────── minute (0 - 59)
|
||||||
|
# │ ┌───────────── hour (0 - 23)
|
||||||
|
# │ │ ┌───────────── day of the month (1 - 31)
|
||||||
|
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
||||||
|
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
||||||
|
- cron: '23 02 * * *' # run once a day, timezone is utc
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
# Allow only one workflow per any non-`main` branch.
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-postgres-client-libs:
|
||||||
|
# TODO: switch to gen2 runner, requires docker
|
||||||
|
runs-on: [ ubuntu-latest ]
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_PG_VERSION: 14
|
||||||
|
TEST_OUTPUT: /tmp/test_output
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
|
||||||
|
- name: Cache poetry deps
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.cache/pypoetry/virtualenvs
|
||||||
|
key: v2-${{ runner.os }}-python-deps-ubunutu-latest-${{ hashFiles('poetry.lock') }}
|
||||||
|
|
||||||
|
- name: Install Python deps
|
||||||
|
shell: bash -euxo pipefail {0}
|
||||||
|
run: ./scripts/pysync
|
||||||
|
|
||||||
|
- name: Create Neon Project
|
||||||
|
id: create-neon-project
|
||||||
|
uses: ./.github/actions/neon-project-create
|
||||||
|
with:
|
||||||
|
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
||||||
|
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
|
||||||
|
|
||||||
|
- name: Run pytest
|
||||||
|
env:
|
||||||
|
REMOTE_ENV: 1
|
||||||
|
BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
|
||||||
|
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
|
||||||
|
shell: bash -euxo pipefail {0}
|
||||||
|
run: |
|
||||||
|
# Test framework expects we have psql binary;
|
||||||
|
# but since we don't really need it in this test, let's mock it
|
||||||
|
mkdir -p "$POSTGRES_DISTRIB_DIR/v${DEFAULT_PG_VERSION}/bin" && touch "$POSTGRES_DISTRIB_DIR/v${DEFAULT_PG_VERSION}/bin/psql";
|
||||||
|
./scripts/pytest \
|
||||||
|
--junitxml=$TEST_OUTPUT/junit.xml \
|
||||||
|
--tb=short \
|
||||||
|
--verbose \
|
||||||
|
-m "remote_cluster" \
|
||||||
|
-rA "test_runner/pg_clients"
|
||||||
|
|
||||||
|
- name: Delete Neon Project
|
||||||
|
if: ${{ always() }}
|
||||||
|
uses: ./.github/actions/neon-project-delete
|
||||||
|
with:
|
||||||
|
project_id: ${{ steps.create-neon-project.outputs.project_id }}
|
||||||
|
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
|
||||||
|
|
||||||
|
# We use GitHub's action upload-artifact because `ubuntu-latest` doesn't have configured AWS CLI.
|
||||||
|
# It will be fixed after switching to gen2 runner
|
||||||
|
- name: Upload python test logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
retention-days: 7
|
||||||
|
name: python-test-pg_clients-${{ runner.os }}-stage-logs
|
||||||
|
path: ${{ env.TEST_OUTPUT }}
|
||||||
|
|
||||||
|
- name: Post to a Slack channel
|
||||||
|
if: ${{ github.event.schedule && failure() }}
|
||||||
|
uses: slackapi/slack-github-action@v1
|
||||||
|
with:
|
||||||
|
channel-id: "C033QLM5P7D" # dev-staging-stream
|
||||||
|
slack-message: "Testing Postgres clients: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
|
env:
|
||||||
|
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||||
78
.github/workflows/pin-build-tools-image.yml
vendored
78
.github/workflows/pin-build-tools-image.yml
vendored
@@ -7,20 +7,12 @@ on:
|
|||||||
description: 'Source tag'
|
description: 'Source tag'
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
force:
|
|
||||||
description: 'Force the image to be pinned'
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
from-tag:
|
from-tag:
|
||||||
description: 'Source tag'
|
description: 'Source tag'
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
force:
|
|
||||||
description: 'Force the image to be pinned'
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -28,20 +20,16 @@ defaults:
|
|||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: pin-build-tools-image-${{ inputs.from-tag }}
|
group: pin-build-tools-image-${{ inputs.from-tag }}
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
|
||||||
permissions: {}
|
permissions: {}
|
||||||
|
|
||||||
env:
|
|
||||||
FROM_TAG: ${{ inputs.from-tag }}
|
|
||||||
TO_TAG: pinned
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-manifests:
|
tag-image:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
|
||||||
skip: ${{ steps.check-manifests.outputs.skip }}
|
env:
|
||||||
|
FROM_TAG: ${{ inputs.from-tag }}
|
||||||
|
TO_TAG: pinned
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check if we really need to pin the image
|
- name: Check if we really need to pin the image
|
||||||
@@ -58,57 +46,27 @@ jobs:
|
|||||||
|
|
||||||
echo "skip=${skip}" | tee -a $GITHUB_OUTPUT
|
echo "skip=${skip}" | tee -a $GITHUB_OUTPUT
|
||||||
|
|
||||||
tag-image:
|
|
||||||
needs: check-manifests
|
|
||||||
|
|
||||||
# use format(..) to catch both inputs.force = true AND inputs.force = 'true'
|
|
||||||
if: needs.check-manifests.outputs.skip == 'false' || format('{0}', inputs.force) == 'true'
|
|
||||||
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
id-token: write # for `azure/login`
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: docker/login-action@v3
|
- uses: docker/login-action@v3
|
||||||
|
if: steps.check-manifests.outputs.skip == 'false'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Tag build-tools with `${{ env.TO_TAG }}` in Docker Hub
|
||||||
|
if: steps.check-manifests.outputs.skip == 'false'
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create -t neondatabase/build-tools:${TO_TAG} \
|
||||||
|
neondatabase/build-tools:${FROM_TAG}
|
||||||
|
|
||||||
- uses: docker/login-action@v3
|
- uses: docker/login-action@v3
|
||||||
|
if: steps.check-manifests.outputs.skip == 'false'
|
||||||
with:
|
with:
|
||||||
registry: 369495373322.dkr.ecr.eu-central-1.amazonaws.com
|
registry: 369495373322.dkr.ecr.eu-central-1.amazonaws.com
|
||||||
username: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
username: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
||||||
password: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
password: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
||||||
|
|
||||||
- name: Azure login
|
- name: Tag build-tools with `${{ env.TO_TAG }}` in ECR
|
||||||
uses: azure/login@6c251865b4e6290e7b78be643ea2d005bc51f69a # @v2.1.1
|
if: steps.check-manifests.outputs.skip == 'false'
|
||||||
with:
|
|
||||||
client-id: ${{ secrets.AZURE_DEV_CLIENT_ID }}
|
|
||||||
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
|
|
||||||
subscription-id: ${{ secrets.AZURE_DEV_SUBSCRIPTION_ID }}
|
|
||||||
|
|
||||||
- name: Login to ACR
|
|
||||||
run: |
|
run: |
|
||||||
az acr login --name=neoneastus2
|
docker buildx imagetools create -t 369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:${TO_TAG} \
|
||||||
|
neondatabase/build-tools:${FROM_TAG}
|
||||||
- name: Tag build-tools with `${{ env.TO_TAG }}` in Docker Hub, ECR, and ACR
|
|
||||||
env:
|
|
||||||
DEFAULT_DEBIAN_VERSION: bookworm
|
|
||||||
run: |
|
|
||||||
for debian_version in bullseye bookworm; do
|
|
||||||
tags=()
|
|
||||||
|
|
||||||
tags+=("-t" "neondatabase/build-tools:${TO_TAG}-${debian_version}")
|
|
||||||
tags+=("-t" "369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:${TO_TAG}-${debian_version}")
|
|
||||||
tags+=("-t" "neoneastus2.azurecr.io/neondatabase/build-tools:${TO_TAG}-${debian_version}")
|
|
||||||
|
|
||||||
if [ "${debian_version}" == "${DEFAULT_DEBIAN_VERSION}" ]; then
|
|
||||||
tags+=("-t" "neondatabase/build-tools:${TO_TAG}")
|
|
||||||
tags+=("-t" "369495373322.dkr.ecr.eu-central-1.amazonaws.com/build-tools:${TO_TAG}")
|
|
||||||
tags+=("-t" "neoneastus2.azurecr.io/neondatabase/build-tools:${TO_TAG}")
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker buildx imagetools create "${tags[@]}" \
|
|
||||||
neondatabase/build-tools:${FROM_TAG}-${debian_version}
|
|
||||||
done
|
|
||||||
|
|||||||
95
.github/workflows/pre-merge-checks.yml
vendored
95
.github/workflows/pre-merge-checks.yml
vendored
@@ -1,95 +0,0 @@
|
|||||||
name: Pre-merge checks
|
|
||||||
|
|
||||||
on:
|
|
||||||
merge_group:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash -euxo pipefail {0}
|
|
||||||
|
|
||||||
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
get-changed-files:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
outputs:
|
|
||||||
python-changed: ${{ steps.python-src.outputs.any_changed }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: tj-actions/changed-files@4edd678ac3f81e2dc578756871e4d00c19191daf # v45.0.4
|
|
||||||
id: python-src
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
.github/workflows/_check-codestyle-python.yml
|
|
||||||
.github/workflows/build-build-tools-image.yml
|
|
||||||
.github/workflows/pre-merge-checks.yml
|
|
||||||
**/**.py
|
|
||||||
poetry.lock
|
|
||||||
pyproject.toml
|
|
||||||
|
|
||||||
- name: PRINT ALL CHANGED FILES FOR DEBUG PURPOSES
|
|
||||||
env:
|
|
||||||
PYTHON_CHANGED_FILES: ${{ steps.python-src.outputs.all_changed_files }}
|
|
||||||
run: |
|
|
||||||
echo "${PYTHON_CHANGED_FILES}"
|
|
||||||
|
|
||||||
build-build-tools-image:
|
|
||||||
if: needs.get-changed-files.outputs.python-changed == 'true'
|
|
||||||
needs: [ get-changed-files ]
|
|
||||||
uses: ./.github/workflows/build-build-tools-image.yml
|
|
||||||
with:
|
|
||||||
# Build only one combination to save time
|
|
||||||
archs: '["x64"]'
|
|
||||||
debians: '["bookworm"]'
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
check-codestyle-python:
|
|
||||||
if: needs.get-changed-files.outputs.python-changed == 'true'
|
|
||||||
needs: [ get-changed-files, build-build-tools-image ]
|
|
||||||
uses: ./.github/workflows/_check-codestyle-python.yml
|
|
||||||
with:
|
|
||||||
# `-bookworm-x64` suffix should match the combination in `build-build-tools-image`
|
|
||||||
build-tools-image: ${{ needs.build-build-tools-image.outputs.image }}-bookworm-x64
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
# To get items from the merge queue merged into main we need to satisfy "Status checks that are required".
|
|
||||||
# Currently we require 2 jobs (checks with exact name):
|
|
||||||
# - conclusion
|
|
||||||
# - neon-cloud-e2e
|
|
||||||
conclusion:
|
|
||||||
if: always()
|
|
||||||
permissions:
|
|
||||||
statuses: write # for `github.repos.createCommitStatus(...)`
|
|
||||||
needs:
|
|
||||||
- get-changed-files
|
|
||||||
- check-codestyle-python
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
- name: Create fake `neon-cloud-e2e` check
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
# Retry script for 5XX server errors: https://github.com/actions/github-script#retries
|
|
||||||
retries: 5
|
|
||||||
script: |
|
|
||||||
const { repo, owner } = context.repo;
|
|
||||||
const targetUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`;
|
|
||||||
|
|
||||||
await github.rest.repos.createCommitStatus({
|
|
||||||
owner: owner,
|
|
||||||
repo: repo,
|
|
||||||
sha: context.sha,
|
|
||||||
context: `neon-cloud-e2e`,
|
|
||||||
state: `success`,
|
|
||||||
target_url: targetUrl,
|
|
||||||
description: `fake check for merge queue`,
|
|
||||||
});
|
|
||||||
|
|
||||||
- name: Fail the job if any of the dependencies do not succeed or skipped
|
|
||||||
run: exit 1
|
|
||||||
if: |
|
|
||||||
(contains(needs.check-codestyle-python.result, 'skipped') && needs.get-changed-files.outputs.python-changed == 'true')
|
|
||||||
|| contains(needs.*.result, 'failure')
|
|
||||||
|| contains(needs.*.result, 'cancelled')
|
|
||||||
2
.github/workflows/release-notify.yml
vendored
2
.github/workflows/release-notify.yml
vendored
@@ -19,7 +19,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
notify:
|
notify:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: [ ubuntu-latest ]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: neondatabase/dev-actions/release-pr-notify@main
|
- uses: neondatabase/dev-actions/release-pr-notify@main
|
||||||
|
|||||||
95
.github/workflows/release.yml
vendored
95
.github/workflows/release.yml
vendored
@@ -15,10 +15,6 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
description: 'Create Proxy release PR'
|
description: 'Create Proxy release PR'
|
||||||
required: false
|
required: false
|
||||||
create-compute-release-branch:
|
|
||||||
type: boolean
|
|
||||||
description: 'Create Compute release PR'
|
|
||||||
required: false
|
|
||||||
|
|
||||||
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
# No permission for GITHUB_TOKEN by default; the **minimal required** set of permissions should be granted in each job.
|
||||||
permissions: {}
|
permissions: {}
|
||||||
@@ -29,40 +25,79 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create-storage-release-branch:
|
create-storage-release-branch:
|
||||||
if: ${{ github.event.schedule == '0 6 * * MON' || inputs.create-storage-release-branch }}
|
if: ${{ github.event.schedule == '0 6 * * MON' || format('{0}', inputs.create-storage-release-branch) == 'true' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write # for `git push`
|
||||||
|
|
||||||
uses: ./.github/workflows/_create-release-pr.yml
|
steps:
|
||||||
with:
|
- name: Check out code
|
||||||
component-name: 'Storage'
|
uses: actions/checkout@v4
|
||||||
release-branch: 'release'
|
with:
|
||||||
secrets:
|
ref: main
|
||||||
ci-access-token: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
|
- name: Set environment variables
|
||||||
|
run: |
|
||||||
|
echo "RELEASE_DATE=$(date +'%Y-%m-%d')" | tee -a $GITHUB_ENV
|
||||||
|
echo "RELEASE_BRANCH=rc/$(date +'%Y-%m-%d')" | tee -a $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Create release branch
|
||||||
|
run: git checkout -b $RELEASE_BRANCH
|
||||||
|
|
||||||
|
- name: Push new branch
|
||||||
|
run: git push origin $RELEASE_BRANCH
|
||||||
|
|
||||||
|
- name: Create pull request into release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||||
|
run: |
|
||||||
|
cat << EOF > body.md
|
||||||
|
## Release ${RELEASE_DATE}
|
||||||
|
|
||||||
|
**Please merge this Pull Request using 'Create a merge commit' button**
|
||||||
|
EOF
|
||||||
|
|
||||||
|
gh pr create --title "Release ${RELEASE_DATE}" \
|
||||||
|
--body-file "body.md" \
|
||||||
|
--head "${RELEASE_BRANCH}" \
|
||||||
|
--base "release"
|
||||||
|
|
||||||
create-proxy-release-branch:
|
create-proxy-release-branch:
|
||||||
if: ${{ github.event.schedule == '0 6 * * THU' || inputs.create-proxy-release-branch }}
|
if: ${{ github.event.schedule == '0 6 * * THU' || format('{0}', inputs.create-proxy-release-branch) == 'true' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write # for `git push`
|
||||||
|
|
||||||
uses: ./.github/workflows/_create-release-pr.yml
|
steps:
|
||||||
with:
|
- name: Check out code
|
||||||
component-name: 'Proxy'
|
uses: actions/checkout@v4
|
||||||
release-branch: 'release-proxy'
|
with:
|
||||||
secrets:
|
ref: main
|
||||||
ci-access-token: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
|
|
||||||
create-compute-release-branch:
|
- name: Set environment variables
|
||||||
if: inputs.create-compute-release-branch
|
run: |
|
||||||
|
echo "RELEASE_DATE=$(date +'%Y-%m-%d')" | tee -a $GITHUB_ENV
|
||||||
|
echo "RELEASE_BRANCH=rc/proxy/$(date +'%Y-%m-%d')" | tee -a $GITHUB_ENV
|
||||||
|
|
||||||
permissions:
|
- name: Create release branch
|
||||||
contents: write
|
run: git checkout -b $RELEASE_BRANCH
|
||||||
|
|
||||||
uses: ./.github/workflows/_create-release-pr.yml
|
- name: Push new branch
|
||||||
with:
|
run: git push origin $RELEASE_BRANCH
|
||||||
component-name: 'Compute'
|
|
||||||
release-branch: 'release-compute'
|
- name: Create pull request into release
|
||||||
secrets:
|
env:
|
||||||
ci-access-token: ${{ secrets.CI_ACCESS_TOKEN }}
|
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||||
|
run: |
|
||||||
|
cat << EOF > body.md
|
||||||
|
## Proxy release ${RELEASE_DATE}
|
||||||
|
|
||||||
|
**Please merge this Pull Request using 'Create a merge commit' button**
|
||||||
|
EOF
|
||||||
|
|
||||||
|
gh pr create --title "Proxy release ${RELEASE_DATE}" \
|
||||||
|
--body-file "body.md" \
|
||||||
|
--head "${RELEASE_BRANCH}" \
|
||||||
|
--base "release-proxy"
|
||||||
|
|||||||
@@ -1,53 +0,0 @@
|
|||||||
name: Report Workflow Stats Batch
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '*/15 * * * *'
|
|
||||||
- cron: '25 0 * * *'
|
|
||||||
- cron: '25 1 * * 6'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
gh-workflow-stats-batch-2h:
|
|
||||||
name: GitHub Workflow Stats Batch 2 hours
|
|
||||||
if: github.event.schedule == '*/15 * * * *'
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
steps:
|
|
||||||
- name: Export Workflow Run for the past 2 hours
|
|
||||||
uses: neondatabase/gh-workflow-stats-action@v0.2.1
|
|
||||||
with:
|
|
||||||
db_uri: ${{ secrets.GH_REPORT_STATS_DB_RW_CONNSTR }}
|
|
||||||
db_table: "gh_workflow_stats_neon"
|
|
||||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
duration: '2h'
|
|
||||||
|
|
||||||
gh-workflow-stats-batch-48h:
|
|
||||||
name: GitHub Workflow Stats Batch 48 hours
|
|
||||||
if: github.event.schedule == '25 0 * * *'
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
steps:
|
|
||||||
- name: Export Workflow Run for the past 48 hours
|
|
||||||
uses: neondatabase/gh-workflow-stats-action@v0.2.1
|
|
||||||
with:
|
|
||||||
db_uri: ${{ secrets.GH_REPORT_STATS_DB_RW_CONNSTR }}
|
|
||||||
db_table: "gh_workflow_stats_neon"
|
|
||||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
duration: '48h'
|
|
||||||
|
|
||||||
gh-workflow-stats-batch-30d:
|
|
||||||
name: GitHub Workflow Stats Batch 30 days
|
|
||||||
if: github.event.schedule == '25 1 * * 6'
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
steps:
|
|
||||||
- name: Export Workflow Run for the past 30 days
|
|
||||||
uses: neondatabase/gh-workflow-stats-action@v0.2.1
|
|
||||||
with:
|
|
||||||
db_uri: ${{ secrets.GH_REPORT_STATS_DB_RW_CONNSTR }}
|
|
||||||
db_table: "gh_workflow_stats_neon"
|
|
||||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
duration: '720h'
|
|
||||||
137
.github/workflows/trigger-e2e-tests.yml
vendored
137
.github/workflows/trigger-e2e-tests.yml
vendored
@@ -13,11 +13,13 @@ defaults:
|
|||||||
env:
|
env:
|
||||||
# A concurrency group that we use for e2e-tests runs, matches `concurrency.group` above with `github.repository` as a prefix
|
# A concurrency group that we use for e2e-tests runs, matches `concurrency.group` above with `github.repository` as a prefix
|
||||||
E2E_CONCURRENCY_GROUP: ${{ github.repository }}-e2e-tests-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
E2E_CONCURRENCY_GROUP: ${{ github.repository }}-e2e-tests-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cancel-previous-e2e-tests:
|
cancel-previous-e2e-tests:
|
||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Cancel previous e2e-tests runs for this PR
|
- name: Cancel previous e2e-tests runs for this PR
|
||||||
@@ -29,13 +31,13 @@ jobs:
|
|||||||
--field concurrency_group="${{ env.E2E_CONCURRENCY_GROUP }}"
|
--field concurrency_group="${{ env.E2E_CONCURRENCY_GROUP }}"
|
||||||
|
|
||||||
tag:
|
tag:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: [ ubuntu-latest ]
|
||||||
outputs:
|
outputs:
|
||||||
build-tag: ${{ steps.build-tag.outputs.tag }}
|
build-tag: ${{ steps.build-tag.outputs.tag }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Need `fetch-depth: 0` to count the number of commits in the branch
|
- name: Checkout
|
||||||
- uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -51,8 +53,6 @@ jobs:
|
|||||||
echo "tag=release-$(git rev-list --count HEAD)" | tee -a $GITHUB_OUTPUT
|
echo "tag=release-$(git rev-list --count HEAD)" | tee -a $GITHUB_OUTPUT
|
||||||
elif [[ "$GITHUB_REF_NAME" == "release-proxy" ]]; then
|
elif [[ "$GITHUB_REF_NAME" == "release-proxy" ]]; then
|
||||||
echo "tag=release-proxy-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
echo "tag=release-proxy-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
||||||
elif [[ "$GITHUB_REF_NAME" == "release-compute" ]]; then
|
|
||||||
echo "tag=release-compute-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
|
||||||
else
|
else
|
||||||
echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release'"
|
echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release'"
|
||||||
BUILD_AND_TEST_RUN_ID=$(gh run list -b $CURRENT_BRANCH -c $CURRENT_SHA -w 'Build and Test' -L 1 --json databaseId --jq '.[].databaseId')
|
BUILD_AND_TEST_RUN_ID=$(gh run list -b $CURRENT_BRANCH -c $CURRENT_SHA -w 'Build and Test' -L 1 --json databaseId --jq '.[].databaseId')
|
||||||
@@ -62,93 +62,58 @@ jobs:
|
|||||||
|
|
||||||
trigger-e2e-tests:
|
trigger-e2e-tests:
|
||||||
needs: [ tag ]
|
needs: [ tag ]
|
||||||
runs-on: ubuntu-22.04
|
runs-on: [ self-hosted, gen3, small ]
|
||||||
env:
|
env:
|
||||||
EVENT_ACTION: ${{ github.event.action }}
|
|
||||||
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
TAG: ${{ needs.tag.outputs.build-tag }}
|
TAG: ${{ needs.tag.outputs.build-tag }}
|
||||||
|
container:
|
||||||
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned
|
||||||
|
options: --init
|
||||||
steps:
|
steps:
|
||||||
- name: Wait for `promote-images` job to finish
|
- name: check if ecr image are present
|
||||||
# It's important to have a timeout here, the script in the step can run infinitely
|
|
||||||
timeout-minutes: 60
|
|
||||||
run: |
|
run: |
|
||||||
if [ "${GITHUB_EVENT_NAME}" != "pull_request" ] || [ "${EVENT_ACTION}" != "ready_for_review" ]; then
|
for REPO in neon compute-tools compute-node-v14 vm-compute-node-v14 compute-node-v15 vm-compute-node-v15 compute-node-v16 vm-compute-node-v16; do
|
||||||
exit 0
|
OUTPUT=$(aws ecr describe-images --repository-name ${REPO} --region eu-central-1 --query "imageDetails[?imageTags[?contains(@, '${TAG}')]]" --output text)
|
||||||
fi
|
if [ "$OUTPUT" == "" ]; then
|
||||||
|
echo "$REPO with image tag $TAG not found" >> $GITHUB_OUTPUT
|
||||||
# For PRs we use the run id as the tag
|
exit 1
|
||||||
BUILD_AND_TEST_RUN_ID=${TAG}
|
fi
|
||||||
while true; do
|
|
||||||
conclusion=$(gh run --repo ${GITHUB_REPOSITORY} view ${BUILD_AND_TEST_RUN_ID} --json jobs --jq '.jobs[] | select(.name == "promote-images") | .conclusion')
|
|
||||||
case "$conclusion" in
|
|
||||||
success)
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
failure | cancelled | skipped)
|
|
||||||
echo "The 'promote-images' job didn't succeed: '${conclusion}'. Exiting..."
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "The 'promote-images' hasn't succeed yet. Waiting..."
|
|
||||||
sleep 60
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
done
|
||||||
|
|
||||||
- name: Set e2e-platforms
|
|
||||||
id: e2e-platforms
|
|
||||||
env:
|
|
||||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
|
||||||
# Default set of platforms to run e2e tests on
|
|
||||||
platforms='["docker", "k8s"]'
|
|
||||||
|
|
||||||
# If a PR changes anything that affects computes, add k8s-neonvm to the list of platforms.
|
|
||||||
# If the workflow run is not a pull request, add k8s-neonvm to the list.
|
|
||||||
if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then
|
|
||||||
for f in $(gh api "/repos/${GITHUB_REPOSITORY}/pulls/${PR_NUMBER}/files" --paginate --jq '.[].filename'); do
|
|
||||||
case "$f" in
|
|
||||||
# List of directories that contain code which affect compute images.
|
|
||||||
#
|
|
||||||
# This isn't exhaustive, just the paths that are most directly compute-related.
|
|
||||||
# For example, compute_ctl also depends on libs/utils, but we don't trigger
|
|
||||||
# an e2e run on that.
|
|
||||||
vendor/*|pgxn/*|compute_tools/*|libs/vm_monitor/*|compute/compute-node.Dockerfile)
|
|
||||||
platforms=$(echo "${platforms}" | jq --compact-output '. += ["k8s-neonvm"] | unique')
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
# no-op
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
else
|
|
||||||
platforms=$(echo "${platforms}" | jq --compact-output '. += ["k8s-neonvm"] | unique')
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "e2e-platforms=${platforms}" | tee -a $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Set PR's status to pending and request a remote CI test
|
- name: Set PR's status to pending and request a remote CI test
|
||||||
env:
|
|
||||||
E2E_PLATFORMS: ${{ steps.e2e-platforms.outputs.e2e-platforms }}
|
|
||||||
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
|
||||||
GH_TOKEN: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
run: |
|
run: |
|
||||||
REMOTE_REPO="${GITHUB_REPOSITORY_OWNER}/cloud"
|
# For pull requests, GH Actions set "github.sha" variable to point at a fake merge commit
|
||||||
|
# but we need to use a real sha of a latest commit in the PR's branch for the e2e job,
|
||||||
|
# to place a job run status update later.
|
||||||
|
COMMIT_SHA=${{ github.event.pull_request.head.sha }}
|
||||||
|
# For non-PR kinds of runs, the above will produce an empty variable, pick the original sha value for those
|
||||||
|
COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}}
|
||||||
|
|
||||||
gh api "/repos/${GITHUB_REPOSITORY}/statuses/${COMMIT_SHA}" \
|
REMOTE_REPO="${{ github.repository_owner }}/cloud"
|
||||||
--method POST \
|
|
||||||
--raw-field "state=pending" \
|
|
||||||
--raw-field "description=[$REMOTE_REPO] Remote CI job is about to start" \
|
|
||||||
--raw-field "context=neon-cloud-e2e"
|
|
||||||
|
|
||||||
gh workflow --repo ${REMOTE_REPO} \
|
curl -f -X POST \
|
||||||
run testing.yml \
|
https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \
|
||||||
--ref "main" \
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
--raw-field "ci_job_name=neon-cloud-e2e" \
|
--user "${{ secrets.CI_ACCESS_TOKEN }}" \
|
||||||
--raw-field "commit_hash=$COMMIT_SHA" \
|
--data \
|
||||||
--raw-field "remote_repo=${GITHUB_REPOSITORY}" \
|
"{
|
||||||
--raw-field "storage_image_tag=${TAG}" \
|
\"state\": \"pending\",
|
||||||
--raw-field "compute_image_tag=${TAG}" \
|
\"context\": \"neon-cloud-e2e\",
|
||||||
--raw-field "concurrency_group=${E2E_CONCURRENCY_GROUP}" \
|
\"description\": \"[$REMOTE_REPO] Remote CI job is about to start\"
|
||||||
--raw-field "e2e-platforms=${E2E_PLATFORMS}"
|
}"
|
||||||
|
|
||||||
|
curl -f -X POST \
|
||||||
|
https://api.github.com/repos/$REMOTE_REPO/actions/workflows/testing.yml/dispatches \
|
||||||
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
|
--user "${{ secrets.CI_ACCESS_TOKEN }}" \
|
||||||
|
--data \
|
||||||
|
"{
|
||||||
|
\"ref\": \"main\",
|
||||||
|
\"inputs\": {
|
||||||
|
\"ci_job_name\": \"neon-cloud-e2e\",
|
||||||
|
\"commit_hash\": \"$COMMIT_SHA\",
|
||||||
|
\"remote_repo\": \"${{ github.repository }}\",
|
||||||
|
\"storage_image_tag\": \"${TAG}\",
|
||||||
|
\"compute_image_tag\": \"${TAG}\",
|
||||||
|
\"concurrency_group\": \"${{ env.E2E_CONCURRENCY_GROUP }}\"
|
||||||
|
}
|
||||||
|
}"
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,8 +6,6 @@ __pycache__/
|
|||||||
test_output/
|
test_output/
|
||||||
.vscode
|
.vscode
|
||||||
.idea
|
.idea
|
||||||
*.swp
|
|
||||||
tags
|
|
||||||
neon.iml
|
neon.iml
|
||||||
/.neon
|
/.neon
|
||||||
/integration_tests/.neon
|
/integration_tests/.neon
|
||||||
|
|||||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -10,7 +10,3 @@
|
|||||||
path = vendor/postgres-v16
|
path = vendor/postgres-v16
|
||||||
url = https://github.com/neondatabase/postgres.git
|
url = https://github.com/neondatabase/postgres.git
|
||||||
branch = REL_16_STABLE_neon
|
branch = REL_16_STABLE_neon
|
||||||
[submodule "vendor/postgres-v17"]
|
|
||||||
path = vendor/postgres-v17
|
|
||||||
url = https://github.com/neondatabase/postgres.git
|
|
||||||
branch = REL_17_STABLE_neon
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
# * `-A unknown_lints` – do not warn about unknown lint suppressions
|
# * `-A unknown_lints` – do not warn about unknown lint suppressions
|
||||||
# that people with newer toolchains might use
|
# that people with newer toolchains might use
|
||||||
# * `-D warnings` - fail on any warnings (`cargo` returns non-zero exit status)
|
# * `-D warnings` - fail on any warnings (`cargo` returns non-zero exit status)
|
||||||
# * `-D clippy::todo` - don't let `todo!()` slip into `main`
|
export CLIPPY_COMMON_ARGS="--locked --workspace --all-targets -- -A unknown_lints -D warnings"
|
||||||
export CLIPPY_COMMON_ARGS="--locked --workspace --all-targets -- -A unknown_lints -D warnings -D clippy::todo"
|
|
||||||
|
|||||||
12
CODEOWNERS
12
CODEOWNERS
@@ -1,16 +1,12 @@
|
|||||||
/.github/ @neondatabase/developer-productivity
|
|
||||||
/compute_tools/ @neondatabase/control-plane @neondatabase/compute
|
/compute_tools/ @neondatabase/control-plane @neondatabase/compute
|
||||||
|
/control_plane/attachment_service @neondatabase/storage
|
||||||
/libs/pageserver_api/ @neondatabase/storage
|
/libs/pageserver_api/ @neondatabase/storage
|
||||||
/libs/postgres_ffi/ @neondatabase/compute @neondatabase/storage
|
/libs/postgres_ffi/ @neondatabase/compute
|
||||||
/libs/proxy/ @neondatabase/proxy
|
|
||||||
/libs/remote_storage/ @neondatabase/storage
|
/libs/remote_storage/ @neondatabase/storage
|
||||||
/libs/safekeeper_api/ @neondatabase/storage
|
/libs/safekeeper_api/ @neondatabase/safekeepers
|
||||||
/libs/vm_monitor/ @neondatabase/autoscaling
|
/libs/vm_monitor/ @neondatabase/autoscaling
|
||||||
/pageserver/ @neondatabase/storage
|
/pageserver/ @neondatabase/storage
|
||||||
/pgxn/ @neondatabase/compute
|
/pgxn/ @neondatabase/compute
|
||||||
/pgxn/neon/ @neondatabase/compute @neondatabase/storage
|
|
||||||
/proxy/ @neondatabase/proxy
|
/proxy/ @neondatabase/proxy
|
||||||
/safekeeper/ @neondatabase/storage
|
/safekeeper/ @neondatabase/safekeepers
|
||||||
/storage_controller @neondatabase/storage
|
|
||||||
/storage_scrubber @neondatabase/storage
|
|
||||||
/vendor/ @neondatabase/compute
|
/vendor/ @neondatabase/compute
|
||||||
|
|||||||
3360
Cargo.lock
generated
3360
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
187
Cargo.toml
187
Cargo.toml
@@ -3,7 +3,7 @@ resolver = "2"
|
|||||||
members = [
|
members = [
|
||||||
"compute_tools",
|
"compute_tools",
|
||||||
"control_plane",
|
"control_plane",
|
||||||
"control_plane/storcon_cli",
|
"control_plane/attachment_service",
|
||||||
"pageserver",
|
"pageserver",
|
||||||
"pageserver/compaction",
|
"pageserver/compaction",
|
||||||
"pageserver/ctl",
|
"pageserver/ctl",
|
||||||
@@ -12,10 +12,9 @@ members = [
|
|||||||
"proxy",
|
"proxy",
|
||||||
"safekeeper",
|
"safekeeper",
|
||||||
"storage_broker",
|
"storage_broker",
|
||||||
"storage_controller",
|
"s3_scrubber",
|
||||||
"storage_controller/client",
|
|
||||||
"storage_scrubber",
|
|
||||||
"workspace_hack",
|
"workspace_hack",
|
||||||
|
"trace",
|
||||||
"libs/compute_api",
|
"libs/compute_api",
|
||||||
"libs/pageserver_api",
|
"libs/pageserver_api",
|
||||||
"libs/postgres_ffi",
|
"libs/postgres_ffi",
|
||||||
@@ -33,11 +32,6 @@ members = [
|
|||||||
"libs/postgres_ffi/wal_craft",
|
"libs/postgres_ffi/wal_craft",
|
||||||
"libs/vm_monitor",
|
"libs/vm_monitor",
|
||||||
"libs/walproposer",
|
"libs/walproposer",
|
||||||
"libs/wal_decoder",
|
|
||||||
"libs/postgres_initdb",
|
|
||||||
"libs/proxy/postgres-protocol2",
|
|
||||||
"libs/proxy/postgres-types2",
|
|
||||||
"libs/proxy/tokio-postgres2",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
@@ -46,215 +40,200 @@ license = "Apache-2.0"
|
|||||||
|
|
||||||
## All dependency versions, used in the project
|
## All dependency versions, used in the project
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
ahash = "0.8"
|
|
||||||
anyhow = { version = "1.0", features = ["backtrace"] }
|
anyhow = { version = "1.0", features = ["backtrace"] }
|
||||||
arc-swap = "1.6"
|
arc-swap = "1.6"
|
||||||
async-compression = { version = "0.4.0", features = ["tokio", "gzip", "zstd"] }
|
async-compression = { version = "0.4.0", features = ["tokio", "gzip", "zstd"] }
|
||||||
atomic-take = "1.1.0"
|
azure_core = "0.18"
|
||||||
azure_core = { version = "0.19", default-features = false, features = ["enable_reqwest_rustls", "hmac_rust"] }
|
azure_identity = "0.18"
|
||||||
azure_identity = { version = "0.19", default-features = false, features = ["enable_reqwest_rustls"] }
|
azure_storage = "0.18"
|
||||||
azure_storage = { version = "0.19", default-features = false, features = ["enable_reqwest_rustls"] }
|
azure_storage_blobs = "0.18"
|
||||||
azure_storage_blobs = { version = "0.19", default-features = false, features = ["enable_reqwest_rustls"] }
|
|
||||||
flate2 = "1.0.26"
|
flate2 = "1.0.26"
|
||||||
async-stream = "0.3"
|
async-stream = "0.3"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
aws-config = { version = "1.5", default-features = false, features=["rustls", "sso"] }
|
aws-config = { version = "1.1.4", default-features = false, features=["rustls"] }
|
||||||
aws-sdk-s3 = "1.52"
|
aws-sdk-s3 = "1.14"
|
||||||
aws-sdk-iam = "1.46.0"
|
aws-sdk-secretsmanager = { version = "1.14.0" }
|
||||||
aws-sdk-kms = "1.47.0"
|
aws-smithy-async = { version = "1.1.4", default-features = false, features=["rt-tokio"] }
|
||||||
aws-smithy-async = { version = "1.2.1", default-features = false, features=["rt-tokio"] }
|
aws-smithy-types = "1.1.4"
|
||||||
aws-smithy-types = "1.2"
|
aws-credential-types = "1.1.4"
|
||||||
aws-credential-types = "1.2.0"
|
axum = { version = "0.6.20", features = ["ws"] }
|
||||||
aws-sigv4 = { version = "1.2", features = ["sign-http"] }
|
|
||||||
aws-types = "1.3"
|
|
||||||
axum = { version = "0.7.5", features = ["ws"] }
|
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
bincode = "1.3"
|
bincode = "1.3"
|
||||||
bindgen = "0.70"
|
bindgen = "0.65"
|
||||||
bit_field = "0.10.2"
|
|
||||||
bstr = "1.0"
|
bstr = "1.0"
|
||||||
byteorder = "1.4"
|
byteorder = "1.4"
|
||||||
bytes = "1.9"
|
bytes = "1.0"
|
||||||
camino = "1.1.6"
|
camino = "1.1.6"
|
||||||
cfg-if = "1.0.0"
|
cfg-if = "1.0.0"
|
||||||
chrono = { version = "0.4", default-features = false, features = ["clock"] }
|
chrono = { version = "0.4", default-features = false, features = ["clock"] }
|
||||||
clap = { version = "4.0", features = ["derive", "env"] }
|
clap = { version = "4.0", features = ["derive"] }
|
||||||
comfy-table = "7.1"
|
comfy-table = "6.1"
|
||||||
const_format = "0.2"
|
const_format = "0.2"
|
||||||
crc32c = "0.6"
|
crc32c = "0.6"
|
||||||
|
crossbeam-utils = "0.8.5"
|
||||||
dashmap = { version = "5.5.0", features = ["raw-api"] }
|
dashmap = { version = "5.5.0", features = ["raw-api"] }
|
||||||
diatomic-waker = { version = "0.2.3" }
|
|
||||||
either = "1.8"
|
either = "1.8"
|
||||||
enum-map = "2.4.2"
|
enum-map = "2.4.2"
|
||||||
enumset = "1.0.12"
|
enumset = "1.0.12"
|
||||||
fail = "0.5.0"
|
fail = "0.5.0"
|
||||||
fallible-iterator = "0.2"
|
fs2 = "0.4.3"
|
||||||
framed-websockets = { version = "0.1.0", git = "https://github.com/neondatabase/framed-websockets" }
|
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
futures-core = "0.3"
|
futures-core = "0.3"
|
||||||
futures-util = "0.3"
|
futures-util = "0.3"
|
||||||
git-version = "0.3"
|
git-version = "0.3"
|
||||||
hashbrown = "0.14"
|
hashbrown = "0.13"
|
||||||
hashlink = "0.9.1"
|
hashlink = "0.8.4"
|
||||||
hdrhistogram = "7.5.2"
|
hdrhistogram = "7.5.2"
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
hex-literal = "0.4"
|
hex-literal = "0.4"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
hostname = "0.4"
|
hostname = "0.3.1"
|
||||||
http = {version = "1.1.0", features = ["std"]}
|
|
||||||
http-types = { version = "2", default-features = false }
|
http-types = { version = "2", default-features = false }
|
||||||
http-body-util = "0.1.2"
|
|
||||||
humantime = "2.1"
|
humantime = "2.1"
|
||||||
humantime-serde = "1.1.1"
|
humantime-serde = "1.1.1"
|
||||||
hyper0 = { package = "hyper", version = "0.14" }
|
hyper = "0.14"
|
||||||
hyper = "1.4"
|
hyper-tungstenite = "0.11"
|
||||||
hyper-util = "0.1"
|
inotify = "0.10.2"
|
||||||
tokio-tungstenite = "0.21.0"
|
ipnet = "2.9.0"
|
||||||
indexmap = "2"
|
|
||||||
indoc = "2"
|
|
||||||
ipnet = "2.10.0"
|
|
||||||
itertools = "0.10"
|
itertools = "0.10"
|
||||||
itoa = "1.0.11"
|
|
||||||
jemalloc_pprof = "0.6"
|
|
||||||
jsonwebtoken = "9"
|
jsonwebtoken = "9"
|
||||||
lasso = "0.7"
|
lasso = "0.7"
|
||||||
|
leaky-bucket = "1.0.1"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
|
lz4_flex = "0.11.1"
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
measured = { version = "0.0.22", features=["lasso"] }
|
memoffset = "0.8"
|
||||||
measured-process = { version = "0.0.22" }
|
native-tls = "0.2"
|
||||||
memoffset = "0.9"
|
nix = { version = "0.27", features = ["fs", "process", "socket", "signal", "poll"] }
|
||||||
nix = { version = "0.27", features = ["dir", "fs", "process", "socket", "signal", "poll"] }
|
|
||||||
notify = "6.0.0"
|
notify = "6.0.0"
|
||||||
num_cpus = "1.15"
|
num_cpus = "1.15"
|
||||||
num-traits = "0.2.15"
|
num-traits = "0.2.15"
|
||||||
once_cell = "1.13"
|
once_cell = "1.13"
|
||||||
opentelemetry = "0.26"
|
opentelemetry = "0.20.0"
|
||||||
opentelemetry_sdk = "0.26"
|
opentelemetry-otlp = { version = "0.13.0", default_features=false, features = ["http-proto", "trace", "http", "reqwest-client"] }
|
||||||
opentelemetry-otlp = { version = "0.26", default-features=false, features = ["http-proto", "trace", "http", "reqwest-client"] }
|
opentelemetry-semantic-conventions = "0.12.0"
|
||||||
opentelemetry-semantic-conventions = "0.26"
|
|
||||||
parking_lot = "0.12"
|
parking_lot = "0.12"
|
||||||
parquet = { version = "53", default-features = false, features = ["zstd"] }
|
parquet = { version = "49.0.0", default-features = false, features = ["zstd"] }
|
||||||
parquet_derive = "53"
|
parquet_derive = "49.0.0"
|
||||||
pbkdf2 = { version = "0.12.1", features = ["simple", "std"] }
|
pbkdf2 = { version = "0.12.1", features = ["simple", "std"] }
|
||||||
pin-project-lite = "0.2"
|
pin-project-lite = "0.2"
|
||||||
pprof = { version = "0.14", features = ["criterion", "flamegraph", "protobuf", "protobuf-codec"] }
|
procfs = "0.14"
|
||||||
procfs = "0.16"
|
prometheus = {version = "0.13", default_features=false, features = ["process"]} # removes protobuf dependency
|
||||||
prometheus = {version = "0.13", default-features=false, features = ["process"]} # removes protobuf dependency
|
prost = "0.11"
|
||||||
prost = "0.13"
|
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
redis = { version = "0.25.2", features = ["tokio-rustls-comp", "keep-alive"] }
|
redis = { version = "0.24.0", features = ["tokio-rustls-comp", "keep-alive"] }
|
||||||
regex = "1.10.2"
|
regex = "1.10.2"
|
||||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] }
|
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls"] }
|
||||||
reqwest-tracing = { version = "0.5", features = ["opentelemetry_0_26"] }
|
reqwest-tracing = { version = "0.4.7", features = ["opentelemetry_0_20"] }
|
||||||
reqwest-middleware = "0.4"
|
reqwest-middleware = "0.2.0"
|
||||||
reqwest-retry = "0.7"
|
reqwest-retry = "0.2.2"
|
||||||
routerify = "3"
|
routerify = "3"
|
||||||
rpds = "0.13"
|
rpds = "0.13"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
rustls = { version = "0.23.16", default-features = false }
|
rustls = "0.22"
|
||||||
rustls-pemfile = "2"
|
rustls-pemfile = "2"
|
||||||
|
rustls-split = "0.3"
|
||||||
scopeguard = "1.1"
|
scopeguard = "1.1"
|
||||||
sysinfo = "0.29.2"
|
sysinfo = "0.29.2"
|
||||||
sd-notify = "0.4.1"
|
sd-notify = "0.4.1"
|
||||||
send-future = "0.1.0"
|
sentry = { version = "0.31", default-features = false, features = ["backtrace", "contexts", "panic", "rustls", "reqwest" ] }
|
||||||
sentry = { version = "0.32", default-features = false, features = ["backtrace", "contexts", "panic", "rustls", "reqwest" ] }
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
serde_path_to_error = "0.1"
|
serde_path_to_error = "0.1"
|
||||||
serde_with = { version = "2.0", features = [ "base64" ] }
|
serde_with = "2.0"
|
||||||
serde_assert = "0.5.0"
|
serde_assert = "0.5.0"
|
||||||
sha2 = "0.10.2"
|
sha2 = "0.10.2"
|
||||||
signal-hook = "0.3"
|
signal-hook = "0.3"
|
||||||
smallvec = "1.11"
|
smallvec = "1.11"
|
||||||
smol_str = { version = "0.2.0", features = ["serde"] }
|
smol_str = { version = "0.2.0", features = ["serde"] }
|
||||||
socket2 = "0.5"
|
socket2 = "0.5"
|
||||||
strum = "0.26"
|
strum = "0.24"
|
||||||
strum_macros = "0.26"
|
strum_macros = "0.24"
|
||||||
"subtle" = "2.5.0"
|
svg_fmt = "0.4.1"
|
||||||
svg_fmt = "0.4.3"
|
|
||||||
sync_wrapper = "0.1.2"
|
sync_wrapper = "0.1.2"
|
||||||
tar = "0.4"
|
tar = "0.4"
|
||||||
test-context = "0.3"
|
task-local-extensions = "0.1.4"
|
||||||
|
test-context = "0.1"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
tikv-jemallocator = { version = "0.6", features = ["profiling", "stats", "unprefixed_malloc_on_supported_platforms"] }
|
tikv-jemallocator = "0.5"
|
||||||
tikv-jemalloc-ctl = { version = "0.6", features = ["stats"] }
|
tikv-jemalloc-ctl = "0.5"
|
||||||
tokio = { version = "1.17", features = ["macros"] }
|
tokio = { version = "1.17", features = ["macros"] }
|
||||||
tokio-epoll-uring = { git = "https://github.com/neondatabase/tokio-epoll-uring.git" , branch = "main" }
|
tokio-epoll-uring = { git = "https://github.com/neondatabase/tokio-epoll-uring.git" , branch = "main" }
|
||||||
tokio-io-timeout = "1.2.0"
|
tokio-io-timeout = "1.2.0"
|
||||||
tokio-postgres-rustls = "0.12.0"
|
tokio-postgres-rustls = "0.11.0"
|
||||||
tokio-rustls = { version = "0.26.0", default-features = false, features = ["tls12", "ring"]}
|
tokio-rustls = "0.25"
|
||||||
tokio-stream = "0.1"
|
tokio-stream = "0.1"
|
||||||
tokio-tar = "0.3"
|
tokio-tar = "0.3"
|
||||||
tokio-util = { version = "0.7.10", features = ["io", "rt"] }
|
tokio-util = { version = "0.7.10", features = ["io", "rt"] }
|
||||||
toml = "0.8"
|
toml = "0.7"
|
||||||
toml_edit = "0.22"
|
toml_edit = "0.19"
|
||||||
tonic = {version = "0.12.3", features = ["tls", "tls-roots"]}
|
tonic = {version = "0.9", features = ["tls", "tls-roots"]}
|
||||||
tower-service = "0.3.2"
|
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
tracing-error = "0.2"
|
tracing-error = "0.2.0"
|
||||||
tracing-opentelemetry = "0.27"
|
tracing-opentelemetry = "0.20.0"
|
||||||
tracing-subscriber = { version = "0.3", default-features = false, features = ["smallvec", "fmt", "tracing-log", "std", "env-filter", "json"] }
|
tracing-subscriber = { version = "0.3", default_features = false, features = ["smallvec", "fmt", "tracing-log", "std", "env-filter", "json"] }
|
||||||
try-lock = "0.2.5"
|
|
||||||
twox-hash = { version = "1.6.3", default-features = false }
|
twox-hash = { version = "1.6.3", default-features = false }
|
||||||
typed-json = "0.1"
|
|
||||||
url = "2.2"
|
url = "2.2"
|
||||||
urlencoding = "2.1"
|
urlencoding = "2.1"
|
||||||
uuid = { version = "1.6.1", features = ["v4", "v7", "serde"] }
|
uuid = { version = "1.6.1", features = ["v4", "v7", "serde"] }
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.2"
|
||||||
rustls-native-certs = "0.8"
|
webpki-roots = "0.25"
|
||||||
x509-parser = "0.16"
|
x509-parser = "0.15"
|
||||||
whoami = "1.5.1"
|
|
||||||
zerocopy = { version = "0.7", features = ["derive"] }
|
|
||||||
|
|
||||||
## TODO replace this with tracing
|
## TODO replace this with tracing
|
||||||
env_logger = "0.10"
|
env_logger = "0.10"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
|
|
||||||
## Libraries from neondatabase/ git forks, ideally with changes to be upstreamed
|
## Libraries from neondatabase/ git forks, ideally with changes to be upstreamed
|
||||||
postgres = { git = "https://github.com/neondatabase/rust-postgres.git", branch = "neon" }
|
postgres = { git = "https://github.com/neondatabase/rust-postgres.git", branch="neon" }
|
||||||
postgres-protocol = { git = "https://github.com/neondatabase/rust-postgres.git", branch = "neon" }
|
postgres-native-tls = { git = "https://github.com/neondatabase/rust-postgres.git", branch="neon" }
|
||||||
postgres-types = { git = "https://github.com/neondatabase/rust-postgres.git", branch = "neon" }
|
postgres-protocol = { git = "https://github.com/neondatabase/rust-postgres.git", branch="neon" }
|
||||||
tokio-postgres = { git = "https://github.com/neondatabase/rust-postgres.git", branch = "neon" }
|
postgres-types = { git = "https://github.com/neondatabase/rust-postgres.git", branch="neon" }
|
||||||
|
tokio-postgres = { git = "https://github.com/neondatabase/rust-postgres.git", branch="neon" }
|
||||||
|
|
||||||
|
## Other git libraries
|
||||||
|
heapless = { default-features=false, features=[], git = "https://github.com/japaric/heapless.git", rev = "644653bf3b831c6bb4963be2de24804acf5e5001" } # upstream release pending
|
||||||
|
|
||||||
## Local libraries
|
## Local libraries
|
||||||
compute_api = { version = "0.1", path = "./libs/compute_api/" }
|
compute_api = { version = "0.1", path = "./libs/compute_api/" }
|
||||||
consumption_metrics = { version = "0.1", path = "./libs/consumption_metrics/" }
|
consumption_metrics = { version = "0.1", path = "./libs/consumption_metrics/" }
|
||||||
metrics = { version = "0.1", path = "./libs/metrics/" }
|
metrics = { version = "0.1", path = "./libs/metrics/" }
|
||||||
pageserver = { path = "./pageserver" }
|
|
||||||
pageserver_api = { version = "0.1", path = "./libs/pageserver_api/" }
|
pageserver_api = { version = "0.1", path = "./libs/pageserver_api/" }
|
||||||
pageserver_client = { path = "./pageserver/client" }
|
pageserver_client = { path = "./pageserver/client" }
|
||||||
pageserver_compaction = { version = "0.1", path = "./pageserver/compaction/" }
|
pageserver_compaction = { version = "0.1", path = "./pageserver/compaction/" }
|
||||||
postgres_backend = { version = "0.1", path = "./libs/postgres_backend/" }
|
postgres_backend = { version = "0.1", path = "./libs/postgres_backend/" }
|
||||||
postgres_connection = { version = "0.1", path = "./libs/postgres_connection/" }
|
postgres_connection = { version = "0.1", path = "./libs/postgres_connection/" }
|
||||||
postgres_ffi = { version = "0.1", path = "./libs/postgres_ffi/" }
|
postgres_ffi = { version = "0.1", path = "./libs/postgres_ffi/" }
|
||||||
postgres_initdb = { path = "./libs/postgres_initdb" }
|
|
||||||
pq_proto = { version = "0.1", path = "./libs/pq_proto/" }
|
pq_proto = { version = "0.1", path = "./libs/pq_proto/" }
|
||||||
remote_storage = { version = "0.1", path = "./libs/remote_storage/" }
|
remote_storage = { version = "0.1", path = "./libs/remote_storage/" }
|
||||||
safekeeper_api = { version = "0.1", path = "./libs/safekeeper_api" }
|
safekeeper_api = { version = "0.1", path = "./libs/safekeeper_api" }
|
||||||
desim = { version = "0.1", path = "./libs/desim" }
|
desim = { version = "0.1", path = "./libs/desim" }
|
||||||
storage_broker = { version = "0.1", path = "./storage_broker/" } # Note: main broker code is inside the binary crate, so linking with the library shouldn't be heavy.
|
storage_broker = { version = "0.1", path = "./storage_broker/" } # Note: main broker code is inside the binary crate, so linking with the library shouldn't be heavy.
|
||||||
storage_controller_client = { path = "./storage_controller/client" }
|
|
||||||
tenant_size_model = { version = "0.1", path = "./libs/tenant_size_model/" }
|
tenant_size_model = { version = "0.1", path = "./libs/tenant_size_model/" }
|
||||||
tracing-utils = { version = "0.1", path = "./libs/tracing-utils/" }
|
tracing-utils = { version = "0.1", path = "./libs/tracing-utils/" }
|
||||||
utils = { version = "0.1", path = "./libs/utils/" }
|
utils = { version = "0.1", path = "./libs/utils/" }
|
||||||
vm_monitor = { version = "0.1", path = "./libs/vm_monitor/" }
|
vm_monitor = { version = "0.1", path = "./libs/vm_monitor/" }
|
||||||
walproposer = { version = "0.1", path = "./libs/walproposer/" }
|
walproposer = { version = "0.1", path = "./libs/walproposer/" }
|
||||||
wal_decoder = { version = "0.1", path = "./libs/wal_decoder" }
|
|
||||||
|
|
||||||
## Common library dependency
|
## Common library dependency
|
||||||
workspace_hack = { version = "0.1", path = "./workspace_hack/" }
|
workspace_hack = { version = "0.1", path = "./workspace_hack/" }
|
||||||
|
|
||||||
## Build dependencies
|
## Build dependencies
|
||||||
criterion = "0.5.1"
|
criterion = "0.5.1"
|
||||||
rcgen = "0.13"
|
rcgen = "0.12"
|
||||||
rstest = "0.18"
|
rstest = "0.18"
|
||||||
camino-tempfile = "1.0.2"
|
camino-tempfile = "1.0.2"
|
||||||
tonic-build = "0.12"
|
tonic-build = "0.9"
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
|
|
||||||
# Needed to get `tokio-postgres-rustls` to depend on our fork.
|
# This is only needed for proxy's tests.
|
||||||
tokio-postgres = { git = "https://github.com/neondatabase/rust-postgres.git", branch = "neon" }
|
# TODO: we should probably fork `tokio-postgres-rustls` instead.
|
||||||
|
tokio-postgres = { git = "https://github.com/neondatabase/rust-postgres.git", branch="neon" }
|
||||||
|
|
||||||
|
# bug fixes for UUID
|
||||||
|
parquet = { git = "https://github.com/neondatabase/arrow-rs", branch = "neon-fix-bugs" }
|
||||||
|
parquet_derive = { git = "https://github.com/neondatabase/arrow-rs", branch = "neon-fix-bugs" }
|
||||||
|
|
||||||
################# Binary contents sections
|
################# Binary contents sections
|
||||||
|
|
||||||
|
|||||||
58
Dockerfile
58
Dockerfile
@@ -5,10 +5,6 @@
|
|||||||
ARG REPOSITORY=neondatabase
|
ARG REPOSITORY=neondatabase
|
||||||
ARG IMAGE=build-tools
|
ARG IMAGE=build-tools
|
||||||
ARG TAG=pinned
|
ARG TAG=pinned
|
||||||
ARG DEFAULT_PG_VERSION=17
|
|
||||||
ARG STABLE_PG_VERSION=16
|
|
||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
ARG DEBIAN_FLAVOR=${DEBIAN_VERSION}-slim
|
|
||||||
|
|
||||||
# Build Postgres
|
# Build Postgres
|
||||||
FROM $REPOSITORY/$IMAGE:$TAG AS pg-build
|
FROM $REPOSITORY/$IMAGE:$TAG AS pg-build
|
||||||
@@ -17,12 +13,11 @@ WORKDIR /home/nonroot
|
|||||||
COPY --chown=nonroot vendor/postgres-v14 vendor/postgres-v14
|
COPY --chown=nonroot vendor/postgres-v14 vendor/postgres-v14
|
||||||
COPY --chown=nonroot vendor/postgres-v15 vendor/postgres-v15
|
COPY --chown=nonroot vendor/postgres-v15 vendor/postgres-v15
|
||||||
COPY --chown=nonroot vendor/postgres-v16 vendor/postgres-v16
|
COPY --chown=nonroot vendor/postgres-v16 vendor/postgres-v16
|
||||||
COPY --chown=nonroot vendor/postgres-v17 vendor/postgres-v17
|
|
||||||
COPY --chown=nonroot pgxn pgxn
|
COPY --chown=nonroot pgxn pgxn
|
||||||
COPY --chown=nonroot Makefile Makefile
|
COPY --chown=nonroot Makefile Makefile
|
||||||
COPY --chown=nonroot scripts/ninstall.sh scripts/ninstall.sh
|
COPY --chown=nonroot scripts/ninstall.sh scripts/ninstall.sh
|
||||||
|
|
||||||
ENV BUILD_TYPE=release
|
ENV BUILD_TYPE release
|
||||||
RUN set -e \
|
RUN set -e \
|
||||||
&& mold -run make -j $(nproc) -s neon-pg-ext \
|
&& mold -run make -j $(nproc) -s neon-pg-ext \
|
||||||
&& rm -rf pg_install/build \
|
&& rm -rf pg_install/build \
|
||||||
@@ -33,19 +28,26 @@ FROM $REPOSITORY/$IMAGE:$TAG AS build
|
|||||||
WORKDIR /home/nonroot
|
WORKDIR /home/nonroot
|
||||||
ARG GIT_VERSION=local
|
ARG GIT_VERSION=local
|
||||||
ARG BUILD_TAG
|
ARG BUILD_TAG
|
||||||
ARG STABLE_PG_VERSION
|
|
||||||
|
# Enable https://github.com/paritytech/cachepot to cache Rust crates' compilation results in Docker builds.
|
||||||
|
# Set up cachepot to use an AWS S3 bucket for cache results, to reuse it between `docker build` invocations.
|
||||||
|
# cachepot falls back to local filesystem if S3 is misconfigured, not failing the build
|
||||||
|
ARG RUSTC_WRAPPER=cachepot
|
||||||
|
ENV AWS_REGION=eu-central-1
|
||||||
|
ENV CACHEPOT_S3_KEY_PREFIX=cachepot
|
||||||
|
ARG CACHEPOT_BUCKET=neon-github-dev
|
||||||
|
#ARG AWS_ACCESS_KEY_ID
|
||||||
|
#ARG AWS_SECRET_ACCESS_KEY
|
||||||
|
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v14/include/postgresql/server pg_install/v14/include/postgresql/server
|
COPY --from=pg-build /home/nonroot/pg_install/v14/include/postgresql/server pg_install/v14/include/postgresql/server
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v15/include/postgresql/server pg_install/v15/include/postgresql/server
|
COPY --from=pg-build /home/nonroot/pg_install/v15/include/postgresql/server pg_install/v15/include/postgresql/server
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v16/include/postgresql/server pg_install/v16/include/postgresql/server
|
COPY --from=pg-build /home/nonroot/pg_install/v16/include/postgresql/server pg_install/v16/include/postgresql/server
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v17/include/postgresql/server pg_install/v17/include/postgresql/server
|
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v16/lib pg_install/v16/lib
|
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v17/lib pg_install/v17/lib
|
|
||||||
COPY --chown=nonroot . .
|
COPY --chown=nonroot . .
|
||||||
|
|
||||||
ARG ADDITIONAL_RUSTFLAGS
|
# Show build caching stats to check if it was used in the end.
|
||||||
|
# Has to be the part of the same RUN since cachepot daemon is killed in the end of this RUN, losing the compilation stats.
|
||||||
RUN set -e \
|
RUN set -e \
|
||||||
&& PQ_LIB_DIR=$(pwd)/pg_install/v${STABLE_PG_VERSION}/lib RUSTFLAGS="-Clinker=clang -Clink-arg=-fuse-ld=mold -Clink-arg=-Wl,--no-rosegment ${ADDITIONAL_RUSTFLAGS}" cargo build \
|
&& RUSTFLAGS="-Clinker=clang -Clink-arg=-fuse-ld=mold -Clink-arg=-Wl,--no-rosegment" cargo build \
|
||||||
--bin pg_sni_router \
|
--bin pg_sni_router \
|
||||||
--bin pageserver \
|
--bin pageserver \
|
||||||
--bin pagectl \
|
--bin pagectl \
|
||||||
@@ -54,13 +56,12 @@ RUN set -e \
|
|||||||
--bin storage_controller \
|
--bin storage_controller \
|
||||||
--bin proxy \
|
--bin proxy \
|
||||||
--bin neon_local \
|
--bin neon_local \
|
||||||
--bin storage_scrubber \
|
--locked --release \
|
||||||
--locked --release
|
&& cachepot -s
|
||||||
|
|
||||||
# Build final image
|
# Build final image
|
||||||
#
|
#
|
||||||
FROM debian:${DEBIAN_FLAVOR}
|
FROM debian:bullseye-slim
|
||||||
ARG DEFAULT_PG_VERSION
|
|
||||||
WORKDIR /data
|
WORKDIR /data
|
||||||
|
|
||||||
RUN set -e \
|
RUN set -e \
|
||||||
@@ -68,6 +69,8 @@ RUN set -e \
|
|||||||
&& apt install -y \
|
&& apt install -y \
|
||||||
libreadline-dev \
|
libreadline-dev \
|
||||||
libseccomp-dev \
|
libseccomp-dev \
|
||||||
|
libicu67 \
|
||||||
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
|
||||||
&& useradd -d /data neon \
|
&& useradd -d /data neon \
|
||||||
@@ -81,35 +84,28 @@ COPY --from=build --chown=neon:neon /home/nonroot/target/release/storage_broker
|
|||||||
COPY --from=build --chown=neon:neon /home/nonroot/target/release/storage_controller /usr/local/bin
|
COPY --from=build --chown=neon:neon /home/nonroot/target/release/storage_controller /usr/local/bin
|
||||||
COPY --from=build --chown=neon:neon /home/nonroot/target/release/proxy /usr/local/bin
|
COPY --from=build --chown=neon:neon /home/nonroot/target/release/proxy /usr/local/bin
|
||||||
COPY --from=build --chown=neon:neon /home/nonroot/target/release/neon_local /usr/local/bin
|
COPY --from=build --chown=neon:neon /home/nonroot/target/release/neon_local /usr/local/bin
|
||||||
COPY --from=build --chown=neon:neon /home/nonroot/target/release/storage_scrubber /usr/local/bin
|
|
||||||
|
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v14 /usr/local/v14/
|
COPY --from=pg-build /home/nonroot/pg_install/v14 /usr/local/v14/
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v15 /usr/local/v15/
|
COPY --from=pg-build /home/nonroot/pg_install/v15 /usr/local/v15/
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v16 /usr/local/v16/
|
COPY --from=pg-build /home/nonroot/pg_install/v16 /usr/local/v16/
|
||||||
COPY --from=pg-build /home/nonroot/pg_install/v17 /usr/local/v17/
|
|
||||||
COPY --from=pg-build /home/nonroot/postgres_install.tar.gz /data/
|
COPY --from=pg-build /home/nonroot/postgres_install.tar.gz /data/
|
||||||
|
|
||||||
# By default, pageserver uses `.neon/` working directory in WORKDIR, so create one and fill it with the dummy config.
|
# By default, pageserver uses `.neon/` working directory in WORKDIR, so create one and fill it with the dummy config.
|
||||||
# Now, when `docker run ... pageserver` is run, it can start without errors, yet will have some default dummy values.
|
# Now, when `docker run ... pageserver` is run, it can start without errors, yet will have some default dummy values.
|
||||||
RUN mkdir -p /data/.neon/ && \
|
RUN mkdir -p /data/.neon/ && chown -R neon:neon /data/.neon/ \
|
||||||
echo "id=1234" > "/data/.neon/identity.toml" && \
|
&& /usr/local/bin/pageserver -D /data/.neon/ --init \
|
||||||
echo "broker_endpoint='http://storage_broker:50051'\n" \
|
-c "id=1234" \
|
||||||
"pg_distrib_dir='/usr/local/'\n" \
|
-c "broker_endpoint='http://storage_broker:50051'" \
|
||||||
"listen_pg_addr='0.0.0.0:6400'\n" \
|
-c "pg_distrib_dir='/usr/local/'" \
|
||||||
"listen_http_addr='0.0.0.0:9898'\n" \
|
-c "listen_pg_addr='0.0.0.0:6400'" \
|
||||||
"availability_zone='local'\n" \
|
-c "listen_http_addr='0.0.0.0:9898'"
|
||||||
> /data/.neon/pageserver.toml && \
|
|
||||||
chown -R neon:neon /data/.neon
|
|
||||||
|
|
||||||
# When running a binary that links with libpq, default to using our most recent postgres version. Binaries
|
# When running a binary that links with libpq, default to using our most recent postgres version. Binaries
|
||||||
# that want a particular postgres version will select it explicitly: this is just a default.
|
# that want a particular postgres version will select it explicitly: this is just a default.
|
||||||
ENV LD_LIBRARY_PATH=/usr/local/v${DEFAULT_PG_VERSION}/lib
|
ENV LD_LIBRARY_PATH /usr/local/v16/lib
|
||||||
|
|
||||||
|
|
||||||
VOLUME ["/data"]
|
VOLUME ["/data"]
|
||||||
USER neon
|
USER neon
|
||||||
EXPOSE 6400
|
EXPOSE 6400
|
||||||
EXPOSE 9898
|
EXPOSE 9898
|
||||||
|
|
||||||
CMD ["/usr/local/bin/pageserver", "-D", "/data/.neon"]
|
|
||||||
|
|
||||||
|
|||||||
166
Dockerfile.build-tools
Normal file
166
Dockerfile.build-tools
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
FROM debian:bullseye-slim
|
||||||
|
|
||||||
|
# Add nonroot user
|
||||||
|
RUN useradd -ms /bin/bash nonroot -b /home
|
||||||
|
SHELL ["/bin/bash", "-c"]
|
||||||
|
|
||||||
|
# System deps
|
||||||
|
RUN set -e \
|
||||||
|
&& apt update \
|
||||||
|
&& apt install -y \
|
||||||
|
autoconf \
|
||||||
|
automake \
|
||||||
|
bison \
|
||||||
|
build-essential \
|
||||||
|
ca-certificates \
|
||||||
|
cmake \
|
||||||
|
curl \
|
||||||
|
flex \
|
||||||
|
git \
|
||||||
|
gnupg \
|
||||||
|
gzip \
|
||||||
|
jq \
|
||||||
|
libcurl4-openssl-dev \
|
||||||
|
libbz2-dev \
|
||||||
|
libffi-dev \
|
||||||
|
liblzma-dev \
|
||||||
|
libncurses5-dev \
|
||||||
|
libncursesw5-dev \
|
||||||
|
libpq-dev \
|
||||||
|
libreadline-dev \
|
||||||
|
libseccomp-dev \
|
||||||
|
libsqlite3-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libstdc++-10-dev \
|
||||||
|
libtool \
|
||||||
|
libxml2-dev \
|
||||||
|
libxmlsec1-dev \
|
||||||
|
libxxhash-dev \
|
||||||
|
lsof \
|
||||||
|
make \
|
||||||
|
netcat \
|
||||||
|
net-tools \
|
||||||
|
openssh-client \
|
||||||
|
parallel \
|
||||||
|
pkg-config \
|
||||||
|
unzip \
|
||||||
|
wget \
|
||||||
|
xz-utils \
|
||||||
|
zlib1g-dev \
|
||||||
|
zstd \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
# protobuf-compiler (protoc)
|
||||||
|
ENV PROTOC_VERSION 25.1
|
||||||
|
RUN curl -fsSL "https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-$(uname -m | sed 's/aarch64/aarch_64/g').zip" -o "protoc.zip" \
|
||||||
|
&& unzip -q protoc.zip -d protoc \
|
||||||
|
&& mv protoc/bin/protoc /usr/local/bin/protoc \
|
||||||
|
&& mv protoc/include/google /usr/local/include/google \
|
||||||
|
&& rm -rf protoc.zip protoc
|
||||||
|
|
||||||
|
# LLVM
|
||||||
|
ENV LLVM_VERSION=17
|
||||||
|
RUN curl -fsSL 'https://apt.llvm.org/llvm-snapshot.gpg.key' | apt-key add - \
|
||||||
|
&& echo "deb http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.stable.list \
|
||||||
|
&& apt update \
|
||||||
|
&& apt install -y clang-${LLVM_VERSION} llvm-${LLVM_VERSION} \
|
||||||
|
&& bash -c 'for f in /usr/bin/clang*-${LLVM_VERSION} /usr/bin/llvm*-${LLVM_VERSION}; do ln -s "${f}" "${f%-${LLVM_VERSION}}"; done' \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
# PostgreSQL 14
|
||||||
|
RUN curl -fsSL 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' | apt-key add - \
|
||||||
|
&& echo 'deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main' > /etc/apt/sources.list.d/pgdg.list \
|
||||||
|
&& apt update \
|
||||||
|
&& apt install -y postgresql-client-14 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
# AWS CLI
|
||||||
|
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip" \
|
||||||
|
&& unzip -q awscliv2.zip \
|
||||||
|
&& ./aws/install \
|
||||||
|
&& rm awscliv2.zip
|
||||||
|
|
||||||
|
# Mold: A Modern Linker
|
||||||
|
ENV MOLD_VERSION v2.4.0
|
||||||
|
RUN set -e \
|
||||||
|
&& git clone https://github.com/rui314/mold.git \
|
||||||
|
&& mkdir mold/build \
|
||||||
|
&& cd mold/build \
|
||||||
|
&& git checkout ${MOLD_VERSION} \
|
||||||
|
&& cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang++ .. \
|
||||||
|
&& cmake --build . -j $(nproc) \
|
||||||
|
&& cmake --install . \
|
||||||
|
&& cd .. \
|
||||||
|
&& rm -rf mold
|
||||||
|
|
||||||
|
# LCOV
|
||||||
|
# Build lcov from a fork:
|
||||||
|
# It includes several bug fixes on top on v2.0 release (https://github.com/linux-test-project/lcov/compare/v2.0...master)
|
||||||
|
# And patches from us:
|
||||||
|
# - Generates json file with code coverage summary (https://github.com/neondatabase/lcov/commit/426e7e7a22f669da54278e9b55e6d8caabd00af0.tar.gz)
|
||||||
|
RUN for package in Capture::Tiny DateTime Devel::Cover Digest::MD5 File::Spec JSON::XS Memory::Process Time::HiRes JSON; do yes | perl -MCPAN -e "CPAN::Shell->notest('install', '$package')"; done \
|
||||||
|
&& wget https://github.com/neondatabase/lcov/archive/426e7e7a22f669da54278e9b55e6d8caabd00af0.tar.gz -O lcov.tar.gz \
|
||||||
|
&& echo "61a22a62e20908b8b9e27d890bd0ea31f567a7b9668065589266371dcbca0992 lcov.tar.gz" | sha256sum --check \
|
||||||
|
&& mkdir -p lcov && tar -xzf lcov.tar.gz -C lcov --strip-components=1 \
|
||||||
|
&& cd lcov \
|
||||||
|
&& make install \
|
||||||
|
&& rm -rf ../lcov.tar.gz
|
||||||
|
|
||||||
|
# Switch to nonroot user
|
||||||
|
USER nonroot:nonroot
|
||||||
|
WORKDIR /home/nonroot
|
||||||
|
|
||||||
|
# Python
|
||||||
|
ENV PYTHON_VERSION=3.9.18 \
|
||||||
|
PYENV_ROOT=/home/nonroot/.pyenv \
|
||||||
|
PATH=/home/nonroot/.pyenv/shims:/home/nonroot/.pyenv/bin:/home/nonroot/.poetry/bin:$PATH
|
||||||
|
RUN set -e \
|
||||||
|
&& cd $HOME \
|
||||||
|
&& curl -sSO https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer \
|
||||||
|
&& chmod +x pyenv-installer \
|
||||||
|
&& ./pyenv-installer \
|
||||||
|
&& export PYENV_ROOT=/home/nonroot/.pyenv \
|
||||||
|
&& export PATH="$PYENV_ROOT/bin:$PATH" \
|
||||||
|
&& export PATH="$PYENV_ROOT/shims:$PATH" \
|
||||||
|
&& pyenv install ${PYTHON_VERSION} \
|
||||||
|
&& pyenv global ${PYTHON_VERSION} \
|
||||||
|
&& python --version \
|
||||||
|
&& pip install --upgrade pip \
|
||||||
|
&& pip --version \
|
||||||
|
&& pip install pipenv wheel poetry
|
||||||
|
|
||||||
|
# Switch to nonroot user (again)
|
||||||
|
USER nonroot:nonroot
|
||||||
|
WORKDIR /home/nonroot
|
||||||
|
|
||||||
|
# Rust
|
||||||
|
# Please keep the version of llvm (installed above) in sync with rust llvm (`rustc --version --verbose | grep LLVM`)
|
||||||
|
ENV RUSTC_VERSION=1.76.0
|
||||||
|
ENV RUSTUP_HOME="/home/nonroot/.rustup"
|
||||||
|
ENV PATH="/home/nonroot/.cargo/bin:${PATH}"
|
||||||
|
RUN curl -sSO https://static.rust-lang.org/rustup/dist/$(uname -m)-unknown-linux-gnu/rustup-init && whoami && \
|
||||||
|
chmod +x rustup-init && \
|
||||||
|
./rustup-init -y --default-toolchain ${RUSTC_VERSION} && \
|
||||||
|
rm rustup-init && \
|
||||||
|
export PATH="$HOME/.cargo/bin:$PATH" && \
|
||||||
|
. "$HOME/.cargo/env" && \
|
||||||
|
cargo --version && rustup --version && \
|
||||||
|
rustup component add llvm-tools-preview rustfmt clippy && \
|
||||||
|
cargo install --git https://github.com/paritytech/cachepot && \
|
||||||
|
cargo install rustfilt && \
|
||||||
|
cargo install cargo-hakari && \
|
||||||
|
cargo install cargo-deny && \
|
||||||
|
cargo install cargo-hack && \
|
||||||
|
cargo install cargo-nextest && \
|
||||||
|
rm -rf /home/nonroot/.cargo/registry && \
|
||||||
|
rm -rf /home/nonroot/.cargo/git
|
||||||
|
ENV RUSTC_WRAPPER=cachepot
|
||||||
|
|
||||||
|
# Show versions
|
||||||
|
RUN whoami \
|
||||||
|
&& python --version \
|
||||||
|
&& pip --version \
|
||||||
|
&& cargo --version --verbose \
|
||||||
|
&& rustup --version --verbose \
|
||||||
|
&& rustc --version --verbose \
|
||||||
|
&& clang --version
|
||||||
File diff suppressed because it is too large
Load Diff
131
Makefile
131
Makefile
@@ -3,9 +3,6 @@ ROOT_PROJECT_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
|||||||
# Where to install Postgres, default is ./pg_install, maybe useful for package managers
|
# Where to install Postgres, default is ./pg_install, maybe useful for package managers
|
||||||
POSTGRES_INSTALL_DIR ?= $(ROOT_PROJECT_DIR)/pg_install/
|
POSTGRES_INSTALL_DIR ?= $(ROOT_PROJECT_DIR)/pg_install/
|
||||||
|
|
||||||
OPENSSL_PREFIX_DIR := /usr/local/openssl
|
|
||||||
ICU_PREFIX_DIR := /usr/local/icu
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# We differentiate between release / debug build types using the BUILD_TYPE
|
# We differentiate between release / debug build types using the BUILD_TYPE
|
||||||
# environment variable.
|
# environment variable.
|
||||||
@@ -23,32 +20,19 @@ else
|
|||||||
$(error Bad build type '$(BUILD_TYPE)', see Makefile for options)
|
$(error Bad build type '$(BUILD_TYPE)', see Makefile for options)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(shell test -e /home/nonroot/.docker_build && echo -n yes),yes)
|
|
||||||
# Exclude static build openssl, icu for local build (MacOS, Linux)
|
|
||||||
# Only keep for build type release and debug
|
|
||||||
PG_CFLAGS += -I$(OPENSSL_PREFIX_DIR)/include
|
|
||||||
PG_CONFIGURE_OPTS += --with-icu
|
|
||||||
PG_CONFIGURE_OPTS += ICU_CFLAGS='-I/$(ICU_PREFIX_DIR)/include -DU_STATIC_IMPLEMENTATION'
|
|
||||||
PG_CONFIGURE_OPTS += ICU_LIBS='-L$(ICU_PREFIX_DIR)/lib -L$(ICU_PREFIX_DIR)/lib64 -licui18n -licuuc -licudata -lstdc++ -Wl,-Bdynamic -lm'
|
|
||||||
PG_CONFIGURE_OPTS += LDFLAGS='-L$(OPENSSL_PREFIX_DIR)/lib -L$(OPENSSL_PREFIX_DIR)/lib64 -L$(ICU_PREFIX_DIR)/lib -L$(ICU_PREFIX_DIR)/lib64 -Wl,-Bstatic -lssl -lcrypto -Wl,-Bdynamic -lrt -lm -ldl -lpthread'
|
|
||||||
endif
|
|
||||||
|
|
||||||
UNAME_S := $(shell uname -s)
|
UNAME_S := $(shell uname -s)
|
||||||
ifeq ($(UNAME_S),Linux)
|
ifeq ($(UNAME_S),Linux)
|
||||||
# Seccomp BPF is only available for Linux
|
# Seccomp BPF is only available for Linux
|
||||||
PG_CONFIGURE_OPTS += --with-libseccomp
|
PG_CONFIGURE_OPTS += --with-libseccomp
|
||||||
else ifeq ($(UNAME_S),Darwin)
|
else ifeq ($(UNAME_S),Darwin)
|
||||||
PG_CFLAGS += -DUSE_PREFETCH
|
# macOS with brew-installed openssl requires explicit paths
|
||||||
ifndef DISABLE_HOMEBREW
|
# It can be configured with OPENSSL_PREFIX variable
|
||||||
# macOS with brew-installed openssl requires explicit paths
|
OPENSSL_PREFIX ?= $(shell brew --prefix openssl@3)
|
||||||
# It can be configured with OPENSSL_PREFIX variable
|
PG_CONFIGURE_OPTS += --with-includes=$(OPENSSL_PREFIX)/include --with-libraries=$(OPENSSL_PREFIX)/lib
|
||||||
OPENSSL_PREFIX := $(shell brew --prefix openssl@3)
|
PG_CONFIGURE_OPTS += PKG_CONFIG_PATH=$(shell brew --prefix icu4c)/lib/pkgconfig
|
||||||
PG_CONFIGURE_OPTS += --with-includes=$(OPENSSL_PREFIX)/include --with-libraries=$(OPENSSL_PREFIX)/lib
|
# macOS already has bison and flex in the system, but they are old and result in postgres-v14 target failure
|
||||||
PG_CONFIGURE_OPTS += PKG_CONFIG_PATH=$(shell brew --prefix icu4c)/lib/pkgconfig
|
# brew formulae are keg-only and not symlinked into HOMEBREW_PREFIX, force their usage
|
||||||
# macOS already has bison and flex in the system, but they are old and result in postgres-v14 target failure
|
EXTRA_PATH_OVERRIDES += $(shell brew --prefix bison)/bin/:$(shell brew --prefix flex)/bin/:
|
||||||
# brew formulae are keg-only and not symlinked into HOMEBREW_PREFIX, force their usage
|
|
||||||
EXTRA_PATH_OVERRIDES += $(shell brew --prefix bison)/bin/:$(shell brew --prefix flex)/bin/:
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Use -C option so that when PostgreSQL "make install" installs the
|
# Use -C option so that when PostgreSQL "make install" installs the
|
||||||
@@ -70,8 +54,6 @@ CARGO_CMD_PREFIX += CARGO_TERM_PROGRESS_WHEN=never CI=1
|
|||||||
# Set PQ_LIB_DIR to make sure `storage_controller` get linked with bundled libpq (through diesel)
|
# Set PQ_LIB_DIR to make sure `storage_controller` get linked with bundled libpq (through diesel)
|
||||||
CARGO_CMD_PREFIX += PQ_LIB_DIR=$(POSTGRES_INSTALL_DIR)/v16/lib
|
CARGO_CMD_PREFIX += PQ_LIB_DIR=$(POSTGRES_INSTALL_DIR)/v16/lib
|
||||||
|
|
||||||
CACHEDIR_TAG_CONTENTS := "Signature: 8a477f597d28d172789f06886806bc55"
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Top level Makefile to build Neon and PostgreSQL
|
# Top level Makefile to build Neon and PostgreSQL
|
||||||
#
|
#
|
||||||
@@ -82,46 +64,32 @@ all: neon postgres neon-pg-ext
|
|||||||
#
|
#
|
||||||
# The 'postgres_ffi' depends on the Postgres headers.
|
# The 'postgres_ffi' depends on the Postgres headers.
|
||||||
.PHONY: neon
|
.PHONY: neon
|
||||||
neon: postgres-headers walproposer-lib cargo-target-dir
|
neon: postgres-headers walproposer-lib
|
||||||
+@echo "Compiling Neon"
|
+@echo "Compiling Neon"
|
||||||
$(CARGO_CMD_PREFIX) cargo build $(CARGO_BUILD_FLAGS)
|
$(CARGO_CMD_PREFIX) cargo build $(CARGO_BUILD_FLAGS)
|
||||||
.PHONY: cargo-target-dir
|
|
||||||
cargo-target-dir:
|
|
||||||
# https://github.com/rust-lang/cargo/issues/14281
|
|
||||||
mkdir -p target
|
|
||||||
test -e target/CACHEDIR.TAG || echo "$(CACHEDIR_TAG_CONTENTS)" > target/CACHEDIR.TAG
|
|
||||||
|
|
||||||
### PostgreSQL parts
|
### PostgreSQL parts
|
||||||
# Some rules are duplicated for Postgres v14 and 15. We may want to refactor
|
# Some rules are duplicated for Postgres v14 and 15. We may want to refactor
|
||||||
# to avoid the duplication in the future, but it's tolerable for now.
|
# to avoid the duplication in the future, but it's tolerable for now.
|
||||||
#
|
#
|
||||||
$(POSTGRES_INSTALL_DIR)/build/%/config.status:
|
$(POSTGRES_INSTALL_DIR)/build/%/config.status:
|
||||||
|
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)
|
|
||||||
test -e $(POSTGRES_INSTALL_DIR)/CACHEDIR.TAG || echo "$(CACHEDIR_TAG_CONTENTS)" > $(POSTGRES_INSTALL_DIR)/CACHEDIR.TAG
|
|
||||||
|
|
||||||
+@echo "Configuring Postgres $* build"
|
+@echo "Configuring Postgres $* build"
|
||||||
@test -s $(ROOT_PROJECT_DIR)/vendor/postgres-$*/configure || { \
|
@test -s $(ROOT_PROJECT_DIR)/vendor/postgres-$*/configure || { \
|
||||||
echo "\nPostgres submodule not found in $(ROOT_PROJECT_DIR)/vendor/postgres-$*/, execute "; \
|
echo "\nPostgres submodule not found in $(ROOT_PROJECT_DIR)/vendor/postgres-$*/, execute "; \
|
||||||
echo "'git submodule update --init --recursive --depth 2 --progress .' in project root.\n"; \
|
echo "'git submodule update --init --recursive --depth 2 --progress .' in project root.\n"; \
|
||||||
exit 1; }
|
exit 1; }
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/$*
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/$*
|
||||||
|
(cd $(POSTGRES_INSTALL_DIR)/build/$* && \
|
||||||
VERSION=$*; \
|
env PATH="$(EXTRA_PATH_OVERRIDES):$$PATH" $(ROOT_PROJECT_DIR)/vendor/postgres-$*/configure \
|
||||||
EXTRA_VERSION=$$(cd $(ROOT_PROJECT_DIR)/vendor/postgres-$$VERSION && git rev-parse HEAD); \
|
|
||||||
(cd $(POSTGRES_INSTALL_DIR)/build/$$VERSION && \
|
|
||||||
env PATH="$(EXTRA_PATH_OVERRIDES):$$PATH" $(ROOT_PROJECT_DIR)/vendor/postgres-$$VERSION/configure \
|
|
||||||
CFLAGS='$(PG_CFLAGS)' \
|
CFLAGS='$(PG_CFLAGS)' \
|
||||||
$(PG_CONFIGURE_OPTS) --with-extra-version=" ($$EXTRA_VERSION)" \
|
$(PG_CONFIGURE_OPTS) \
|
||||||
--prefix=$(abspath $(POSTGRES_INSTALL_DIR))/$$VERSION > configure.log)
|
--prefix=$(abspath $(POSTGRES_INSTALL_DIR))/$* > configure.log)
|
||||||
|
|
||||||
# nicer alias to run 'configure'
|
# nicer alias to run 'configure'
|
||||||
# Note: I've been unable to use templates for this part of our configuration.
|
# Note: I've been unable to use templates for this part of our configuration.
|
||||||
# I'm not sure why it wouldn't work, but this is the only place (apart from
|
# I'm not sure why it wouldn't work, but this is the only place (apart from
|
||||||
# the "build-all-versions" entry points) where direct mention of PostgreSQL
|
# the "build-all-versions" entry points) where direct mention of PostgreSQL
|
||||||
# versions is used.
|
# versions is used.
|
||||||
.PHONY: postgres-configure-v17
|
|
||||||
postgres-configure-v17: $(POSTGRES_INSTALL_DIR)/build/v17/config.status
|
|
||||||
.PHONY: postgres-configure-v16
|
.PHONY: postgres-configure-v16
|
||||||
postgres-configure-v16: $(POSTGRES_INSTALL_DIR)/build/v16/config.status
|
postgres-configure-v16: $(POSTGRES_INSTALL_DIR)/build/v16/config.status
|
||||||
.PHONY: postgres-configure-v15
|
.PHONY: postgres-configure-v15
|
||||||
@@ -147,14 +115,10 @@ postgres-%: postgres-configure-% \
|
|||||||
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pg_prewarm install
|
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pg_prewarm install
|
||||||
+@echo "Compiling pg_buffercache $*"
|
+@echo "Compiling pg_buffercache $*"
|
||||||
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pg_buffercache install
|
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pg_buffercache install
|
||||||
+@echo "Compiling pg_visibility $*"
|
|
||||||
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pg_visibility install
|
|
||||||
+@echo "Compiling pageinspect $*"
|
+@echo "Compiling pageinspect $*"
|
||||||
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pageinspect install
|
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/pageinspect install
|
||||||
+@echo "Compiling amcheck $*"
|
+@echo "Compiling amcheck $*"
|
||||||
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/amcheck install
|
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/amcheck install
|
||||||
+@echo "Compiling test_decoding $*"
|
|
||||||
$(MAKE) -C $(POSTGRES_INSTALL_DIR)/build/$*/contrib/test_decoding install
|
|
||||||
|
|
||||||
.PHONY: postgres-clean-%
|
.PHONY: postgres-clean-%
|
||||||
postgres-clean-%:
|
postgres-clean-%:
|
||||||
@@ -171,27 +135,27 @@ postgres-check-%: postgres-%
|
|||||||
neon-pg-ext-%: postgres-%
|
neon-pg-ext-%: postgres-%
|
||||||
+@echo "Compiling neon $*"
|
+@echo "Compiling neon $*"
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-$*
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-$*
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/neon-$* \
|
-C $(POSTGRES_INSTALL_DIR)/build/neon-$* \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile install
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile install
|
||||||
+@echo "Compiling neon_walredo $*"
|
+@echo "Compiling neon_walredo $*"
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-walredo-$*
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-walredo-$*
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/neon-walredo-$* \
|
-C $(POSTGRES_INSTALL_DIR)/build/neon-walredo-$* \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon_walredo/Makefile install
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon_walredo/Makefile install
|
||||||
+@echo "Compiling neon_rmgr $*"
|
+@echo "Compiling neon_rmgr $*"
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-rmgr-$*
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-rmgr-$*
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/neon-rmgr-$* \
|
-C $(POSTGRES_INSTALL_DIR)/build/neon-rmgr-$* \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon_rmgr/Makefile install
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon_rmgr/Makefile install
|
||||||
+@echo "Compiling neon_test_utils $*"
|
+@echo "Compiling neon_test_utils $*"
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-test-utils-$*
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-test-utils-$*
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/neon-test-utils-$* \
|
-C $(POSTGRES_INSTALL_DIR)/build/neon-test-utils-$* \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon_test_utils/Makefile install
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon_test_utils/Makefile install
|
||||||
+@echo "Compiling neon_utils $*"
|
+@echo "Compiling neon_utils $*"
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-utils-$*
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/neon-utils-$*
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/$*/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/neon-utils-$* \
|
-C $(POSTGRES_INSTALL_DIR)/build/neon-utils-$* \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon_utils/Makefile install
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon_utils/Makefile install
|
||||||
|
|
||||||
@@ -220,31 +184,29 @@ neon-pg-clean-ext-%:
|
|||||||
# they depend on openssl and other libraries that are not included in our
|
# they depend on openssl and other libraries that are not included in our
|
||||||
# Rust build.
|
# Rust build.
|
||||||
.PHONY: walproposer-lib
|
.PHONY: walproposer-lib
|
||||||
walproposer-lib: neon-pg-ext-v17
|
walproposer-lib: neon-pg-ext-v16
|
||||||
+@echo "Compiling walproposer-lib"
|
+@echo "Compiling walproposer-lib"
|
||||||
mkdir -p $(POSTGRES_INSTALL_DIR)/build/walproposer-lib
|
mkdir -p $(POSTGRES_INSTALL_DIR)/build/walproposer-lib
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/v17/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/v16/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/walproposer-lib \
|
-C $(POSTGRES_INSTALL_DIR)/build/walproposer-lib \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile walproposer-lib
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile walproposer-lib
|
||||||
cp $(POSTGRES_INSTALL_DIR)/v17/lib/libpgport.a $(POSTGRES_INSTALL_DIR)/build/walproposer-lib
|
cp $(POSTGRES_INSTALL_DIR)/v16/lib/libpgport.a $(POSTGRES_INSTALL_DIR)/build/walproposer-lib
|
||||||
cp $(POSTGRES_INSTALL_DIR)/v17/lib/libpgcommon.a $(POSTGRES_INSTALL_DIR)/build/walproposer-lib
|
cp $(POSTGRES_INSTALL_DIR)/v16/lib/libpgcommon.a $(POSTGRES_INSTALL_DIR)/build/walproposer-lib
|
||||||
|
ifeq ($(UNAME_S),Linux)
|
||||||
$(AR) d $(POSTGRES_INSTALL_DIR)/build/walproposer-lib/libpgport.a \
|
$(AR) d $(POSTGRES_INSTALL_DIR)/build/walproposer-lib/libpgport.a \
|
||||||
pg_strong_random.o
|
pg_strong_random.o
|
||||||
$(AR) d $(POSTGRES_INSTALL_DIR)/build/walproposer-lib/libpgcommon.a \
|
$(AR) d $(POSTGRES_INSTALL_DIR)/build/walproposer-lib/libpgcommon.a \
|
||||||
checksum_helper.o \
|
pg_crc32c.o \
|
||||||
cryptohash_openssl.o \
|
|
||||||
hmac_openssl.o \
|
hmac_openssl.o \
|
||||||
|
cryptohash_openssl.o \
|
||||||
|
scram-common.o \
|
||||||
md5_common.o \
|
md5_common.o \
|
||||||
parse_manifest.o \
|
checksum_helper.o
|
||||||
scram-common.o
|
|
||||||
ifeq ($(UNAME_S),Linux)
|
|
||||||
$(AR) d $(POSTGRES_INSTALL_DIR)/build/walproposer-lib/libpgcommon.a \
|
|
||||||
pg_crc32c.o
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
.PHONY: walproposer-lib-clean
|
.PHONY: walproposer-lib-clean
|
||||||
walproposer-lib-clean:
|
walproposer-lib-clean:
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/v17/bin/pg_config \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/v16/bin/pg_config \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/walproposer-lib \
|
-C $(POSTGRES_INSTALL_DIR)/build/walproposer-lib \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile clean
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile clean
|
||||||
|
|
||||||
@@ -252,55 +214,48 @@ walproposer-lib-clean:
|
|||||||
neon-pg-ext: \
|
neon-pg-ext: \
|
||||||
neon-pg-ext-v14 \
|
neon-pg-ext-v14 \
|
||||||
neon-pg-ext-v15 \
|
neon-pg-ext-v15 \
|
||||||
neon-pg-ext-v16 \
|
neon-pg-ext-v16
|
||||||
neon-pg-ext-v17
|
|
||||||
|
|
||||||
.PHONY: neon-pg-clean-ext
|
.PHONY: neon-pg-clean-ext
|
||||||
neon-pg-clean-ext: \
|
neon-pg-clean-ext: \
|
||||||
neon-pg-clean-ext-v14 \
|
neon-pg-clean-ext-v14 \
|
||||||
neon-pg-clean-ext-v15 \
|
neon-pg-clean-ext-v15 \
|
||||||
neon-pg-clean-ext-v16 \
|
neon-pg-clean-ext-v16
|
||||||
neon-pg-clean-ext-v17
|
|
||||||
|
|
||||||
# shorthand to build all Postgres versions
|
# shorthand to build all Postgres versions
|
||||||
.PHONY: postgres
|
.PHONY: postgres
|
||||||
postgres: \
|
postgres: \
|
||||||
postgres-v14 \
|
postgres-v14 \
|
||||||
postgres-v15 \
|
postgres-v15 \
|
||||||
postgres-v16 \
|
postgres-v16
|
||||||
postgres-v17
|
|
||||||
|
|
||||||
.PHONY: postgres-headers
|
.PHONY: postgres-headers
|
||||||
postgres-headers: \
|
postgres-headers: \
|
||||||
postgres-headers-v14 \
|
postgres-headers-v14 \
|
||||||
postgres-headers-v15 \
|
postgres-headers-v15 \
|
||||||
postgres-headers-v16 \
|
postgres-headers-v16
|
||||||
postgres-headers-v17
|
|
||||||
|
|
||||||
.PHONY: postgres-clean
|
.PHONY: postgres-clean
|
||||||
postgres-clean: \
|
postgres-clean: \
|
||||||
postgres-clean-v14 \
|
postgres-clean-v14 \
|
||||||
postgres-clean-v15 \
|
postgres-clean-v15 \
|
||||||
postgres-clean-v16 \
|
postgres-clean-v16
|
||||||
postgres-clean-v17
|
|
||||||
|
|
||||||
.PHONY: postgres-check
|
.PHONY: postgres-check
|
||||||
postgres-check: \
|
postgres-check: \
|
||||||
postgres-check-v14 \
|
postgres-check-v14 \
|
||||||
postgres-check-v15 \
|
postgres-check-v15 \
|
||||||
postgres-check-v16 \
|
postgres-check-v16
|
||||||
postgres-check-v17
|
|
||||||
|
|
||||||
# This doesn't remove the effects of 'configure'.
|
# This doesn't remove the effects of 'configure'.
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: postgres-clean neon-pg-clean-ext
|
clean: postgres-clean neon-pg-clean-ext
|
||||||
$(MAKE) -C compute clean
|
|
||||||
$(CARGO_CMD_PREFIX) cargo clean
|
$(CARGO_CMD_PREFIX) cargo clean
|
||||||
|
|
||||||
# This removes everything
|
# This removes everything
|
||||||
.PHONY: distclean
|
.PHONY: distclean
|
||||||
distclean:
|
distclean:
|
||||||
$(RM) -r $(POSTGRES_INSTALL_DIR)
|
rm -rf $(POSTGRES_INSTALL_DIR)
|
||||||
$(CARGO_CMD_PREFIX) cargo clean
|
$(CARGO_CMD_PREFIX) cargo clean
|
||||||
|
|
||||||
.PHONY: fmt
|
.PHONY: fmt
|
||||||
@@ -332,16 +287,16 @@ postgres-%-pgindent: postgres-%-pg-bsd-indent postgres-%-typedefs.list
|
|||||||
$(ROOT_PROJECT_DIR)/vendor/postgres-$*/src/tools/pgindent/pgindent --typedefs postgres-$*-typedefs-full.list \
|
$(ROOT_PROJECT_DIR)/vendor/postgres-$*/src/tools/pgindent/pgindent --typedefs postgres-$*-typedefs-full.list \
|
||||||
$(ROOT_PROJECT_DIR)/vendor/postgres-$*/src/ \
|
$(ROOT_PROJECT_DIR)/vendor/postgres-$*/src/ \
|
||||||
--excludes $(ROOT_PROJECT_DIR)/vendor/postgres-$*/src/tools/pgindent/exclude_file_patterns
|
--excludes $(ROOT_PROJECT_DIR)/vendor/postgres-$*/src/tools/pgindent/exclude_file_patterns
|
||||||
$(RM) pg*.BAK
|
rm -f pg*.BAK
|
||||||
|
|
||||||
# Indent pxgn/neon.
|
# Indent pxgn/neon.
|
||||||
.PHONY: neon-pgindent
|
.PHONY: pgindent
|
||||||
neon-pgindent: postgres-v17-pg-bsd-indent neon-pg-ext-v17
|
neon-pgindent: postgres-v16-pg-bsd-indent neon-pg-ext-v16
|
||||||
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/v17/bin/pg_config COPT='$(COPT)' \
|
$(MAKE) PG_CONFIG=$(POSTGRES_INSTALL_DIR)/v16/bin/pg_config CFLAGS='$(PG_CFLAGS) $(COPT)' \
|
||||||
FIND_TYPEDEF=$(ROOT_PROJECT_DIR)/vendor/postgres-v17/src/tools/find_typedef \
|
FIND_TYPEDEF=$(ROOT_PROJECT_DIR)/vendor/postgres-v16/src/tools/find_typedef \
|
||||||
INDENT=$(POSTGRES_INSTALL_DIR)/build/v17/src/tools/pg_bsd_indent/pg_bsd_indent \
|
INDENT=$(POSTGRES_INSTALL_DIR)/build/v16/src/tools/pg_bsd_indent/pg_bsd_indent \
|
||||||
PGINDENT_SCRIPT=$(ROOT_PROJECT_DIR)/vendor/postgres-v17/src/tools/pgindent/pgindent \
|
PGINDENT_SCRIPT=$(ROOT_PROJECT_DIR)/vendor/postgres-v16/src/tools/pgindent/pgindent \
|
||||||
-C $(POSTGRES_INSTALL_DIR)/build/neon-v17 \
|
-C $(POSTGRES_INSTALL_DIR)/build/neon-v16 \
|
||||||
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile pgindent
|
-f $(ROOT_PROJECT_DIR)/pgxn/neon/Makefile pgindent
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
26
README.md
26
README.md
@@ -1,6 +1,4 @@
|
|||||||
[](https://neon.tech)
|
[](https://neon.tech)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Neon
|
# Neon
|
||||||
|
|
||||||
@@ -31,7 +29,7 @@ See developer documentation in [SUMMARY.md](/docs/SUMMARY.md) for more informati
|
|||||||
```bash
|
```bash
|
||||||
apt install build-essential libtool libreadline-dev zlib1g-dev flex bison libseccomp-dev \
|
apt install build-essential libtool libreadline-dev zlib1g-dev flex bison libseccomp-dev \
|
||||||
libssl-dev clang pkg-config libpq-dev cmake postgresql-client protobuf-compiler \
|
libssl-dev clang pkg-config libpq-dev cmake postgresql-client protobuf-compiler \
|
||||||
libprotobuf-dev libcurl4-openssl-dev openssl python3-poetry lsof libicu-dev
|
libcurl4-openssl-dev openssl python3-poetry lsof libicu-dev
|
||||||
```
|
```
|
||||||
* On Fedora, these packages are needed:
|
* On Fedora, these packages are needed:
|
||||||
```bash
|
```bash
|
||||||
@@ -58,18 +56,12 @@ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
|||||||
1. Install XCode and dependencies
|
1. Install XCode and dependencies
|
||||||
```
|
```
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
brew install protobuf openssl flex bison icu4c pkg-config m4
|
brew install protobuf openssl flex bison icu4c pkg-config
|
||||||
|
|
||||||
# add openssl to PATH, required for ed25519 keys generation in neon_local
|
# add openssl to PATH, required for ed25519 keys generation in neon_local
|
||||||
echo 'export PATH="$(brew --prefix openssl)/bin:$PATH"' >> ~/.zshrc
|
echo 'export PATH="$(brew --prefix openssl)/bin:$PATH"' >> ~/.zshrc
|
||||||
```
|
```
|
||||||
|
|
||||||
If you get errors about missing `m4` you may have to install it manually:
|
|
||||||
```
|
|
||||||
brew install m4
|
|
||||||
brew link --force m4
|
|
||||||
```
|
|
||||||
|
|
||||||
2. [Install Rust](https://www.rust-lang.org/tools/install)
|
2. [Install Rust](https://www.rust-lang.org/tools/install)
|
||||||
```
|
```
|
||||||
# recommended approach from https://www.rust-lang.org/tools/install
|
# recommended approach from https://www.rust-lang.org/tools/install
|
||||||
@@ -132,7 +124,7 @@ make -j`sysctl -n hw.logicalcpu` -s
|
|||||||
To run the `psql` client, install the `postgresql-client` package or modify `PATH` and `LD_LIBRARY_PATH` to include `pg_install/bin` and `pg_install/lib`, respectively.
|
To run the `psql` client, install the `postgresql-client` package or modify `PATH` and `LD_LIBRARY_PATH` to include `pg_install/bin` and `pg_install/lib`, respectively.
|
||||||
|
|
||||||
To run the integration tests or Python scripts (not required to use the code), install
|
To run the integration tests or Python scripts (not required to use the code), install
|
||||||
Python (3.11 or higher), and install the python3 packages using `./scripts/pysync` (requires [poetry>=1.8](https://python-poetry.org/)) in the project directory.
|
Python (3.9 or higher), and install the python3 packages using `./scripts/pysync` (requires [poetry>=1.3](https://python-poetry.org/)) in the project directory.
|
||||||
|
|
||||||
|
|
||||||
#### Running neon database
|
#### Running neon database
|
||||||
@@ -246,14 +238,6 @@ If you encounter errors during setting up the initial tenant, it's best to stop
|
|||||||
|
|
||||||
## Running tests
|
## Running tests
|
||||||
|
|
||||||
### Rust unit tests
|
|
||||||
|
|
||||||
We are using [`cargo-nextest`](https://nexte.st/) to run the tests in Github Workflows.
|
|
||||||
Some crates do not support running plain `cargo test` anymore, prefer `cargo nextest run` instead.
|
|
||||||
You can install `cargo-nextest` with `cargo install cargo-nextest`.
|
|
||||||
|
|
||||||
### Integration tests
|
|
||||||
|
|
||||||
Ensure your dependencies are installed as described [here](https://github.com/neondatabase/neon#dependency-installation-notes).
|
Ensure your dependencies are installed as described [here](https://github.com/neondatabase/neon#dependency-installation-notes).
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
@@ -268,7 +252,7 @@ By default, this runs both debug and release modes, and all supported postgres v
|
|||||||
testing locally, it is convenient to run just one set of permutations, like this:
|
testing locally, it is convenient to run just one set of permutations, like this:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
DEFAULT_PG_VERSION=16 BUILD_TYPE=release ./scripts/pytest
|
DEFAULT_PG_VERSION=15 BUILD_TYPE=release ./scripts/pytest
|
||||||
```
|
```
|
||||||
|
|
||||||
## Flamegraphs
|
## Flamegraphs
|
||||||
|
|||||||
@@ -1,301 +0,0 @@
|
|||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
|
|
||||||
FROM debian:bookworm-slim AS pgcopydb_builder
|
|
||||||
ARG DEBIAN_VERSION
|
|
||||||
|
|
||||||
RUN if [ "${DEBIAN_VERSION}" = "bookworm" ]; then \
|
|
||||||
set -e && \
|
|
||||||
apt update && \
|
|
||||||
apt install -y --no-install-recommends \
|
|
||||||
ca-certificates wget gpg && \
|
|
||||||
wget -qO - https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt bookworm-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
|
||||||
apt-get update && \
|
|
||||||
apt install -y --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
autotools-dev \
|
|
||||||
libedit-dev \
|
|
||||||
libgc-dev \
|
|
||||||
libpam0g-dev \
|
|
||||||
libreadline-dev \
|
|
||||||
libselinux1-dev \
|
|
||||||
libxslt1-dev \
|
|
||||||
libssl-dev \
|
|
||||||
libkrb5-dev \
|
|
||||||
zlib1g-dev \
|
|
||||||
liblz4-dev \
|
|
||||||
libpq5 \
|
|
||||||
libpq-dev \
|
|
||||||
libzstd-dev \
|
|
||||||
postgresql-16 \
|
|
||||||
postgresql-server-dev-16 \
|
|
||||||
postgresql-common \
|
|
||||||
python3-sphinx && \
|
|
||||||
wget -O /tmp/pgcopydb.tar.gz https://github.com/dimitri/pgcopydb/archive/refs/tags/v0.17.tar.gz && \
|
|
||||||
mkdir /tmp/pgcopydb && \
|
|
||||||
tar -xzf /tmp/pgcopydb.tar.gz -C /tmp/pgcopydb --strip-components=1 && \
|
|
||||||
cd /tmp/pgcopydb && \
|
|
||||||
make -s clean && \
|
|
||||||
make -s -j12 install && \
|
|
||||||
libpq_path=$(find /lib /usr/lib -name "libpq.so.5" | head -n 1) && \
|
|
||||||
mkdir -p /pgcopydb/lib && \
|
|
||||||
cp "$libpq_path" /pgcopydb/lib/; \
|
|
||||||
else \
|
|
||||||
# copy command below will fail if we don't have dummy files, so we create them for other debian versions
|
|
||||||
mkdir -p /usr/lib/postgresql/16/bin && touch /usr/lib/postgresql/16/bin/pgcopydb && \
|
|
||||||
mkdir -p mkdir -p /pgcopydb/lib && touch /pgcopydb/lib/libpq.so.5; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
FROM debian:${DEBIAN_VERSION}-slim AS build_tools
|
|
||||||
ARG DEBIAN_VERSION
|
|
||||||
|
|
||||||
# Add nonroot user
|
|
||||||
RUN useradd -ms /bin/bash nonroot -b /home
|
|
||||||
SHELL ["/bin/bash", "-c"]
|
|
||||||
|
|
||||||
RUN mkdir -p /pgcopydb/bin && \
|
|
||||||
mkdir -p /pgcopydb/lib && \
|
|
||||||
chmod -R 755 /pgcopydb && \
|
|
||||||
chown -R nonroot:nonroot /pgcopydb
|
|
||||||
|
|
||||||
COPY --from=pgcopydb_builder /usr/lib/postgresql/16/bin/pgcopydb /pgcopydb/bin/pgcopydb
|
|
||||||
COPY --from=pgcopydb_builder /pgcopydb/lib/libpq.so.5 /pgcopydb/lib/libpq.so.5
|
|
||||||
|
|
||||||
# System deps
|
|
||||||
#
|
|
||||||
# 'gdb' is included so that we get backtraces of core dumps produced in
|
|
||||||
# regression tests
|
|
||||||
RUN set -e \
|
|
||||||
&& apt update \
|
|
||||||
&& apt install -y \
|
|
||||||
autoconf \
|
|
||||||
automake \
|
|
||||||
bison \
|
|
||||||
build-essential \
|
|
||||||
ca-certificates \
|
|
||||||
cmake \
|
|
||||||
curl \
|
|
||||||
flex \
|
|
||||||
gdb \
|
|
||||||
git \
|
|
||||||
gnupg \
|
|
||||||
gzip \
|
|
||||||
jq \
|
|
||||||
jsonnet \
|
|
||||||
libcurl4-openssl-dev \
|
|
||||||
libbz2-dev \
|
|
||||||
libffi-dev \
|
|
||||||
liblzma-dev \
|
|
||||||
libncurses5-dev \
|
|
||||||
libncursesw5-dev \
|
|
||||||
libreadline-dev \
|
|
||||||
libseccomp-dev \
|
|
||||||
libsqlite3-dev \
|
|
||||||
libssl-dev \
|
|
||||||
$([[ "${DEBIAN_VERSION}" = "bullseye" ]] && echo libstdc++-10-dev || echo libstdc++-11-dev) \
|
|
||||||
libtool \
|
|
||||||
libxml2-dev \
|
|
||||||
libxmlsec1-dev \
|
|
||||||
libxxhash-dev \
|
|
||||||
lsof \
|
|
||||||
make \
|
|
||||||
netcat-openbsd \
|
|
||||||
net-tools \
|
|
||||||
openssh-client \
|
|
||||||
parallel \
|
|
||||||
pkg-config \
|
|
||||||
unzip \
|
|
||||||
wget \
|
|
||||||
xz-utils \
|
|
||||||
zlib1g-dev \
|
|
||||||
zstd \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
|
||||||
|
|
||||||
# sql_exporter
|
|
||||||
|
|
||||||
# Keep the version the same as in compute/compute-node.Dockerfile and
|
|
||||||
# test_runner/regress/test_compute_metrics.py.
|
|
||||||
ENV SQL_EXPORTER_VERSION=0.13.1
|
|
||||||
RUN curl -fsSL \
|
|
||||||
"https://github.com/burningalchemist/sql_exporter/releases/download/${SQL_EXPORTER_VERSION}/sql_exporter-${SQL_EXPORTER_VERSION}.linux-$(case "$(uname -m)" in x86_64) echo amd64;; aarch64) echo arm64;; esac).tar.gz" \
|
|
||||||
--output sql_exporter.tar.gz \
|
|
||||||
&& mkdir /tmp/sql_exporter \
|
|
||||||
&& tar xzvf sql_exporter.tar.gz -C /tmp/sql_exporter --strip-components=1 \
|
|
||||||
&& mv /tmp/sql_exporter/sql_exporter /usr/local/bin/sql_exporter
|
|
||||||
|
|
||||||
# protobuf-compiler (protoc)
|
|
||||||
ENV PROTOC_VERSION=25.1
|
|
||||||
RUN curl -fsSL "https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-$(uname -m | sed 's/aarch64/aarch_64/g').zip" -o "protoc.zip" \
|
|
||||||
&& unzip -q protoc.zip -d protoc \
|
|
||||||
&& mv protoc/bin/protoc /usr/local/bin/protoc \
|
|
||||||
&& mv protoc/include/google /usr/local/include/google \
|
|
||||||
&& rm -rf protoc.zip protoc
|
|
||||||
|
|
||||||
# s5cmd
|
|
||||||
ENV S5CMD_VERSION=2.2.2
|
|
||||||
RUN curl -sL "https://github.com/peak/s5cmd/releases/download/v${S5CMD_VERSION}/s5cmd_${S5CMD_VERSION}_Linux-$(uname -m | sed 's/x86_64/64bit/g' | sed 's/aarch64/arm64/g').tar.gz" | tar zxvf - s5cmd \
|
|
||||||
&& chmod +x s5cmd \
|
|
||||||
&& mv s5cmd /usr/local/bin/s5cmd
|
|
||||||
|
|
||||||
# LLVM
|
|
||||||
ENV LLVM_VERSION=19
|
|
||||||
RUN curl -fsSL 'https://apt.llvm.org/llvm-snapshot.gpg.key' | apt-key add - \
|
|
||||||
&& echo "deb http://apt.llvm.org/${DEBIAN_VERSION}/ llvm-toolchain-${DEBIAN_VERSION}-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.stable.list \
|
|
||||||
&& apt update \
|
|
||||||
&& apt install -y clang-${LLVM_VERSION} llvm-${LLVM_VERSION} \
|
|
||||||
&& bash -c 'for f in /usr/bin/clang*-${LLVM_VERSION} /usr/bin/llvm*-${LLVM_VERSION}; do ln -s "${f}" "${f%-${LLVM_VERSION}}"; done' \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
|
||||||
|
|
||||||
# Install docker
|
|
||||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg \
|
|
||||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian ${DEBIAN_VERSION} stable" > /etc/apt/sources.list.d/docker.list \
|
|
||||||
&& apt update \
|
|
||||||
&& apt install -y docker-ce docker-ce-cli \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
|
||||||
|
|
||||||
# Configure sudo & docker
|
|
||||||
RUN usermod -aG sudo nonroot && \
|
|
||||||
echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers && \
|
|
||||||
usermod -aG docker nonroot
|
|
||||||
|
|
||||||
# AWS CLI
|
|
||||||
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip" \
|
|
||||||
&& unzip -q awscliv2.zip \
|
|
||||||
&& ./aws/install \
|
|
||||||
&& rm awscliv2.zip
|
|
||||||
|
|
||||||
# Mold: A Modern Linker
|
|
||||||
ENV MOLD_VERSION=v2.34.1
|
|
||||||
RUN set -e \
|
|
||||||
&& git clone https://github.com/rui314/mold.git \
|
|
||||||
&& mkdir mold/build \
|
|
||||||
&& cd mold/build \
|
|
||||||
&& git checkout ${MOLD_VERSION} \
|
|
||||||
&& cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang++ .. \
|
|
||||||
&& cmake --build . -j $(nproc) \
|
|
||||||
&& cmake --install . \
|
|
||||||
&& cd .. \
|
|
||||||
&& rm -rf mold
|
|
||||||
|
|
||||||
# LCOV
|
|
||||||
# Build lcov from a fork:
|
|
||||||
# It includes several bug fixes on top on v2.0 release (https://github.com/linux-test-project/lcov/compare/v2.0...master)
|
|
||||||
# And patches from us:
|
|
||||||
# - Generates json file with code coverage summary (https://github.com/neondatabase/lcov/commit/426e7e7a22f669da54278e9b55e6d8caabd00af0.tar.gz)
|
|
||||||
RUN for package in Capture::Tiny DateTime Devel::Cover Digest::MD5 File::Spec JSON::XS Memory::Process Time::HiRes JSON; do yes | perl -MCPAN -e "CPAN::Shell->notest('install', '$package')"; done \
|
|
||||||
&& wget https://github.com/neondatabase/lcov/archive/426e7e7a22f669da54278e9b55e6d8caabd00af0.tar.gz -O lcov.tar.gz \
|
|
||||||
&& echo "61a22a62e20908b8b9e27d890bd0ea31f567a7b9668065589266371dcbca0992 lcov.tar.gz" | sha256sum --check \
|
|
||||||
&& mkdir -p lcov && tar -xzf lcov.tar.gz -C lcov --strip-components=1 \
|
|
||||||
&& cd lcov \
|
|
||||||
&& make install \
|
|
||||||
&& rm -rf ../lcov.tar.gz
|
|
||||||
|
|
||||||
# Compile and install the static OpenSSL library
|
|
||||||
ENV OPENSSL_VERSION=1.1.1w
|
|
||||||
ENV OPENSSL_PREFIX=/usr/local/openssl
|
|
||||||
RUN wget -O /tmp/openssl-${OPENSSL_VERSION}.tar.gz https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz && \
|
|
||||||
echo "cf3098950cb4d853ad95c0841f1f9c6d3dc102dccfcacd521d93925208b76ac8 /tmp/openssl-${OPENSSL_VERSION}.tar.gz" | sha256sum --check && \
|
|
||||||
cd /tmp && \
|
|
||||||
tar xzvf /tmp/openssl-${OPENSSL_VERSION}.tar.gz && \
|
|
||||||
rm /tmp/openssl-${OPENSSL_VERSION}.tar.gz && \
|
|
||||||
cd /tmp/openssl-${OPENSSL_VERSION} && \
|
|
||||||
./config --prefix=${OPENSSL_PREFIX} -static --static no-shared -fPIC && \
|
|
||||||
make -j "$(nproc)" && \
|
|
||||||
make install && \
|
|
||||||
cd /tmp && \
|
|
||||||
rm -rf /tmp/openssl-${OPENSSL_VERSION}
|
|
||||||
|
|
||||||
# Use the same version of libicu as the compute nodes so that
|
|
||||||
# clusters created using inidb on pageserver can be used by computes.
|
|
||||||
#
|
|
||||||
# TODO: at this time, compute-node.Dockerfile uses the debian bullseye libicu
|
|
||||||
# package, which is 67.1. We're duplicating that knowledge here, and also, technically,
|
|
||||||
# Debian has a few patches on top of 67.1 that we're not adding here.
|
|
||||||
ENV ICU_VERSION=67.1
|
|
||||||
ENV ICU_PREFIX=/usr/local/icu
|
|
||||||
|
|
||||||
# Download and build static ICU
|
|
||||||
RUN wget -O /tmp/libicu-${ICU_VERSION}.tgz https://github.com/unicode-org/icu/releases/download/release-${ICU_VERSION//./-}/icu4c-${ICU_VERSION//./_}-src.tgz && \
|
|
||||||
echo "94a80cd6f251a53bd2a997f6f1b5ac6653fe791dfab66e1eb0227740fb86d5dc /tmp/libicu-${ICU_VERSION}.tgz" | sha256sum --check && \
|
|
||||||
mkdir /tmp/icu && \
|
|
||||||
pushd /tmp/icu && \
|
|
||||||
tar -xzf /tmp/libicu-${ICU_VERSION}.tgz && \
|
|
||||||
pushd icu/source && \
|
|
||||||
./configure --prefix=${ICU_PREFIX} --enable-static --enable-shared=no CXXFLAGS="-fPIC" CFLAGS="-fPIC" && \
|
|
||||||
make -j "$(nproc)" && \
|
|
||||||
make install && \
|
|
||||||
popd && \
|
|
||||||
rm -rf icu && \
|
|
||||||
rm -f /tmp/libicu-${ICU_VERSION}.tgz && \
|
|
||||||
popd
|
|
||||||
|
|
||||||
# Switch to nonroot user
|
|
||||||
USER nonroot:nonroot
|
|
||||||
WORKDIR /home/nonroot
|
|
||||||
|
|
||||||
# Python
|
|
||||||
ENV PYTHON_VERSION=3.11.10 \
|
|
||||||
PYENV_ROOT=/home/nonroot/.pyenv \
|
|
||||||
PATH=/home/nonroot/.pyenv/shims:/home/nonroot/.pyenv/bin:/home/nonroot/.poetry/bin:$PATH
|
|
||||||
RUN set -e \
|
|
||||||
&& cd $HOME \
|
|
||||||
&& curl -sSO https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer \
|
|
||||||
&& chmod +x pyenv-installer \
|
|
||||||
&& ./pyenv-installer \
|
|
||||||
&& export PYENV_ROOT=/home/nonroot/.pyenv \
|
|
||||||
&& export PATH="$PYENV_ROOT/bin:$PATH" \
|
|
||||||
&& export PATH="$PYENV_ROOT/shims:$PATH" \
|
|
||||||
&& pyenv install ${PYTHON_VERSION} \
|
|
||||||
&& pyenv global ${PYTHON_VERSION} \
|
|
||||||
&& python --version \
|
|
||||||
&& pip install --upgrade pip \
|
|
||||||
&& pip --version \
|
|
||||||
&& pip install pipenv wheel poetry
|
|
||||||
|
|
||||||
# Switch to nonroot user (again)
|
|
||||||
USER nonroot:nonroot
|
|
||||||
WORKDIR /home/nonroot
|
|
||||||
|
|
||||||
# Rust
|
|
||||||
# Please keep the version of llvm (installed above) in sync with rust llvm (`rustc --version --verbose | grep LLVM`)
|
|
||||||
ENV RUSTC_VERSION=1.83.0
|
|
||||||
ENV RUSTUP_HOME="/home/nonroot/.rustup"
|
|
||||||
ENV PATH="/home/nonroot/.cargo/bin:${PATH}"
|
|
||||||
ARG RUSTFILT_VERSION=0.2.1
|
|
||||||
ARG CARGO_HAKARI_VERSION=0.9.33
|
|
||||||
ARG CARGO_DENY_VERSION=0.16.2
|
|
||||||
ARG CARGO_HACK_VERSION=0.6.33
|
|
||||||
ARG CARGO_NEXTEST_VERSION=0.9.85
|
|
||||||
RUN curl -sSO https://static.rust-lang.org/rustup/dist/$(uname -m)-unknown-linux-gnu/rustup-init && whoami && \
|
|
||||||
chmod +x rustup-init && \
|
|
||||||
./rustup-init -y --default-toolchain ${RUSTC_VERSION} && \
|
|
||||||
rm rustup-init && \
|
|
||||||
export PATH="$HOME/.cargo/bin:$PATH" && \
|
|
||||||
. "$HOME/.cargo/env" && \
|
|
||||||
cargo --version && rustup --version && \
|
|
||||||
rustup component add llvm-tools rustfmt clippy && \
|
|
||||||
cargo install rustfilt --version ${RUSTFILT_VERSION} && \
|
|
||||||
cargo install cargo-hakari --version ${CARGO_HAKARI_VERSION} && \
|
|
||||||
cargo install cargo-deny --locked --version ${CARGO_DENY_VERSION} && \
|
|
||||||
cargo install cargo-hack --version ${CARGO_HACK_VERSION} && \
|
|
||||||
cargo install cargo-nextest --version ${CARGO_NEXTEST_VERSION} && \
|
|
||||||
rm -rf /home/nonroot/.cargo/registry && \
|
|
||||||
rm -rf /home/nonroot/.cargo/git
|
|
||||||
|
|
||||||
# Show versions
|
|
||||||
RUN whoami \
|
|
||||||
&& python --version \
|
|
||||||
&& pip --version \
|
|
||||||
&& cargo --version --verbose \
|
|
||||||
&& rustup --version --verbose \
|
|
||||||
&& rustc --version --verbose \
|
|
||||||
&& clang --version
|
|
||||||
|
|
||||||
RUN if [ "${DEBIAN_VERSION}" = "bookworm" ]; then \
|
|
||||||
LD_LIBRARY_PATH=/pgcopydb/lib /pgcopydb/bin/pgcopydb --version; \
|
|
||||||
else \
|
|
||||||
echo "pgcopydb is not available for ${DEBIAN_VERSION}"; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set following flag to check in Makefile if its running in Docker
|
|
||||||
RUN touch /home/nonroot/.docker_build
|
|
||||||
@@ -2,8 +2,6 @@ disallowed-methods = [
|
|||||||
"tokio::task::block_in_place",
|
"tokio::task::block_in_place",
|
||||||
# Allow this for now, to deny it later once we stop using Handle::block_on completely
|
# Allow this for now, to deny it later once we stop using Handle::block_on completely
|
||||||
# "tokio::runtime::Handle::block_on",
|
# "tokio::runtime::Handle::block_on",
|
||||||
# use tokio_epoll_uring_ext instead
|
|
||||||
"tokio_epoll_uring::thread_local_system",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
disallowed-macros = [
|
disallowed-macros = [
|
||||||
|
|||||||
5
compute/.gitignore
vendored
5
compute/.gitignore
vendored
@@ -1,5 +0,0 @@
|
|||||||
# sql_exporter config files generated from Jsonnet
|
|
||||||
etc/neon_collector.yml
|
|
||||||
etc/neon_collector_autoscaling.yml
|
|
||||||
etc/sql_exporter.yml
|
|
||||||
etc/sql_exporter_autoscaling.yml
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
jsonnet_files = $(wildcard \
|
|
||||||
etc/*.jsonnet \
|
|
||||||
etc/sql_exporter/*.libsonnet)
|
|
||||||
|
|
||||||
.PHONY: all
|
|
||||||
all: neon_collector.yml neon_collector_autoscaling.yml sql_exporter.yml sql_exporter_autoscaling.yml
|
|
||||||
|
|
||||||
neon_collector.yml: $(jsonnet_files)
|
|
||||||
JSONNET_PATH=jsonnet:etc jsonnet \
|
|
||||||
--output-file etc/$@ \
|
|
||||||
--ext-str pg_version=$(PG_VERSION) \
|
|
||||||
etc/neon_collector.jsonnet
|
|
||||||
|
|
||||||
neon_collector_autoscaling.yml: $(jsonnet_files)
|
|
||||||
JSONNET_PATH=jsonnet:etc jsonnet \
|
|
||||||
--output-file etc/$@ \
|
|
||||||
--ext-str pg_version=$(PG_VERSION) \
|
|
||||||
etc/neon_collector_autoscaling.jsonnet
|
|
||||||
|
|
||||||
sql_exporter.yml: $(jsonnet_files)
|
|
||||||
JSONNET_PATH=etc jsonnet \
|
|
||||||
--output-file etc/$@ \
|
|
||||||
--tla-str collector_name=neon_collector \
|
|
||||||
--tla-str collector_file=neon_collector.yml \
|
|
||||||
--tla-str 'connection_string=postgresql://cloud_admin@127.0.0.1:5432/postgres?sslmode=disable&application_name=sql_exporter' \
|
|
||||||
etc/sql_exporter.jsonnet
|
|
||||||
|
|
||||||
sql_exporter_autoscaling.yml: $(jsonnet_files)
|
|
||||||
JSONNET_PATH=etc jsonnet \
|
|
||||||
--output-file etc/$@ \
|
|
||||||
--tla-str collector_name=neon_collector_autoscaling \
|
|
||||||
--tla-str collector_file=neon_collector_autoscaling.yml \
|
|
||||||
--tla-str 'connection_string=postgresql://cloud_admin@127.0.0.1:5432/postgres?sslmode=disable&application_name=sql_exporter_autoscaling' \
|
|
||||||
etc/sql_exporter.jsonnet
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
||||||
clean:
|
|
||||||
$(RM) \
|
|
||||||
etc/neon_collector.yml \
|
|
||||||
etc/neon_collector_autoscaling.yml \
|
|
||||||
etc/sql_exporter.yml \
|
|
||||||
etc/sql_exporter_autoscaling.yml
|
|
||||||
|
|
||||||
.PHONY: jsonnetfmt-test
|
|
||||||
jsonnetfmt-test:
|
|
||||||
jsonnetfmt --test $(jsonnet_files)
|
|
||||||
|
|
||||||
.PHONY: jsonnetfmt-format
|
|
||||||
jsonnetfmt-format:
|
|
||||||
jsonnetfmt --in-place $(jsonnet_files)
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
This directory contains files that are needed to build the compute
|
|
||||||
images, or included in the compute images.
|
|
||||||
|
|
||||||
compute-node.Dockerfile
|
|
||||||
To build the compute image
|
|
||||||
|
|
||||||
vm-image-spec.yaml
|
|
||||||
Instructions for vm-builder, to turn the compute-node image into
|
|
||||||
corresponding vm-compute-node image.
|
|
||||||
|
|
||||||
etc/
|
|
||||||
Configuration files included in /etc in the compute image
|
|
||||||
|
|
||||||
patches/
|
|
||||||
Some extensions need to be patched to work with Neon. This
|
|
||||||
directory contains such patches. They are applied to the extension
|
|
||||||
sources in compute-node.Dockerfile
|
|
||||||
|
|
||||||
In addition to these, postgres itself, the neon postgres extension,
|
|
||||||
and compute_ctl are built and copied into the compute image by
|
|
||||||
compute-node.Dockerfile.
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Compute Configuration
|
|
||||||
|
|
||||||
These files are the configuration files for various other pieces of software
|
|
||||||
that will be running in the compute alongside Postgres.
|
|
||||||
|
|
||||||
## `sql_exporter`
|
|
||||||
|
|
||||||
### Adding a `sql_exporter` Metric
|
|
||||||
|
|
||||||
We use `sql_exporter` to export various metrics from Postgres. In order to add
|
|
||||||
a metric, you will need to create two files: a `libsonnet` and a `sql` file. You
|
|
||||||
will then import the `libsonnet` file in one of the collector files, and the
|
|
||||||
`sql` file will be imported in the `libsonnet` file.
|
|
||||||
|
|
||||||
In the event your statistic is an LSN, you may want to cast it to a `float8`
|
|
||||||
because Prometheus only supports floats. It's probably fine because `float8` can
|
|
||||||
store integers from `-2^53` to `+2^53` exactly.
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
{
|
|
||||||
collector_name: 'neon_collector',
|
|
||||||
metrics: [
|
|
||||||
import 'sql_exporter/checkpoints_req.libsonnet',
|
|
||||||
import 'sql_exporter/checkpoints_timed.libsonnet',
|
|
||||||
import 'sql_exporter/compute_backpressure_throttling_seconds.libsonnet',
|
|
||||||
import 'sql_exporter/compute_current_lsn.libsonnet',
|
|
||||||
import 'sql_exporter/compute_logical_snapshot_files.libsonnet',
|
|
||||||
import 'sql_exporter/compute_logical_snapshots_bytes.libsonnet',
|
|
||||||
import 'sql_exporter/compute_max_connections.libsonnet',
|
|
||||||
import 'sql_exporter/compute_receive_lsn.libsonnet',
|
|
||||||
import 'sql_exporter/compute_subscriptions_count.libsonnet',
|
|
||||||
import 'sql_exporter/connection_counts.libsonnet',
|
|
||||||
import 'sql_exporter/db_total_size.libsonnet',
|
|
||||||
import 'sql_exporter/file_cache_read_wait_seconds_bucket.libsonnet',
|
|
||||||
import 'sql_exporter/file_cache_read_wait_seconds_count.libsonnet',
|
|
||||||
import 'sql_exporter/file_cache_read_wait_seconds_sum.libsonnet',
|
|
||||||
import 'sql_exporter/file_cache_write_wait_seconds_bucket.libsonnet',
|
|
||||||
import 'sql_exporter/file_cache_write_wait_seconds_count.libsonnet',
|
|
||||||
import 'sql_exporter/file_cache_write_wait_seconds_sum.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_prefetch_discards_total.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_prefetch_misses_total.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_prefetch_requests_total.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_prefetches_buffered.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_sync_requests_total.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_wait_seconds_bucket.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_wait_seconds_count.libsonnet',
|
|
||||||
import 'sql_exporter/getpage_wait_seconds_sum.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_approximate_working_set_size.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_approximate_working_set_size_windows.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_cache_size_limit.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_hits.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_misses.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_used.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_writes.libsonnet',
|
|
||||||
import 'sql_exporter/logical_slot_restart_lsn.libsonnet',
|
|
||||||
import 'sql_exporter/max_cluster_size.libsonnet',
|
|
||||||
import 'sql_exporter/pageserver_disconnects_total.libsonnet',
|
|
||||||
import 'sql_exporter/pageserver_requests_sent_total.libsonnet',
|
|
||||||
import 'sql_exporter/pageserver_send_flushes_total.libsonnet',
|
|
||||||
import 'sql_exporter/pageserver_open_requests.libsonnet',
|
|
||||||
import 'sql_exporter/pg_stats_userdb.libsonnet',
|
|
||||||
import 'sql_exporter/replication_delay_bytes.libsonnet',
|
|
||||||
import 'sql_exporter/replication_delay_seconds.libsonnet',
|
|
||||||
import 'sql_exporter/retained_wal.libsonnet',
|
|
||||||
import 'sql_exporter/wal_is_lost.libsonnet',
|
|
||||||
],
|
|
||||||
queries: [
|
|
||||||
{
|
|
||||||
query_name: 'neon_perf_counters',
|
|
||||||
query: importstr 'sql_exporter/neon_perf_counters.sql',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
collector_name: 'neon_collector_autoscaling',
|
|
||||||
metrics: [
|
|
||||||
import 'sql_exporter/lfc_approximate_working_set_size_windows.autoscaling.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_cache_size_limit.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_hits.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_misses.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_used.libsonnet',
|
|
||||||
import 'sql_exporter/lfc_writes.libsonnet',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
[databases]
|
|
||||||
;; pgbouncer propagates application_name (if it's specified) to the server, but some
|
|
||||||
;; clients don't set it. We set default application_name=pgbouncer to make it
|
|
||||||
;; easier to identify pgbouncer connections in Postgres. If client sets
|
|
||||||
;; application_name, it will be used instead.
|
|
||||||
*=host=localhost port=5432 auth_user=cloud_admin application_name=pgbouncer
|
|
||||||
[pgbouncer]
|
|
||||||
listen_port=6432
|
|
||||||
listen_addr=0.0.0.0
|
|
||||||
auth_type=scram-sha-256
|
|
||||||
auth_user=cloud_admin
|
|
||||||
auth_dbname=postgres
|
|
||||||
client_tls_sslmode=disable
|
|
||||||
server_tls_sslmode=disable
|
|
||||||
pool_mode=transaction
|
|
||||||
max_client_conn=10000
|
|
||||||
default_pool_size=64
|
|
||||||
max_prepared_statements=0
|
|
||||||
admin_users=postgres
|
|
||||||
unix_socket_dir=/tmp/
|
|
||||||
unix_socket_mode=0777
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
function(collector_name, collector_file, connection_string) {
|
|
||||||
// Configuration for sql_exporter for autoscaling-agent
|
|
||||||
// Global defaults.
|
|
||||||
global: {
|
|
||||||
// If scrape_timeout <= 0, no timeout is set unless Prometheus provides one. The default is 10s.
|
|
||||||
scrape_timeout: '10s',
|
|
||||||
// Subtracted from Prometheus' scrape_timeout to give us some headroom and prevent Prometheus from timing out first.
|
|
||||||
scrape_timeout_offset: '500ms',
|
|
||||||
// Minimum interval between collector runs: by default (0s) collectors are executed on every scrape.
|
|
||||||
min_interval: '0s',
|
|
||||||
// Maximum number of open connections to any one target. Metric queries will run concurrently on multiple connections,
|
|
||||||
// as will concurrent scrapes.
|
|
||||||
max_connections: 1,
|
|
||||||
// Maximum number of idle connections to any one target. Unless you use very long collection intervals, this should
|
|
||||||
// always be the same as max_connections.
|
|
||||||
max_idle_connections: 1,
|
|
||||||
// Maximum number of maximum amount of time a connection may be reused. Expired connections may be closed lazily before reuse.
|
|
||||||
// If 0, connections are not closed due to a connection's age.
|
|
||||||
max_connection_lifetime: '5m',
|
|
||||||
},
|
|
||||||
|
|
||||||
// The target to monitor and the collectors to execute on it.
|
|
||||||
target: {
|
|
||||||
// Data source name always has a URI schema that matches the driver name. In some cases (e.g. MySQL)
|
|
||||||
// the schema gets dropped or replaced to match the driver expected DSN format.
|
|
||||||
data_source_name: connection_string,
|
|
||||||
|
|
||||||
// Collectors (referenced by name) to execute on the target.
|
|
||||||
// Glob patterns are supported (see <https://pkg.go.dev/path/filepath#Match> for syntax).
|
|
||||||
collectors: [
|
|
||||||
collector_name,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
|
|
||||||
// Collector files specifies a list of globs. One collector definition is read from each matching file.
|
|
||||||
// Glob patterns are supported (see <https://pkg.go.dev/path/filepath#Match> for syntax).
|
|
||||||
collector_files: [
|
|
||||||
collector_file,
|
|
||||||
],
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT num_requested AS checkpoints_req FROM pg_stat_checkpointer;
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
local neon = import 'neon.libsonnet';
|
|
||||||
|
|
||||||
local pg_stat_bgwriter = importstr 'sql_exporter/checkpoints_req.sql';
|
|
||||||
local pg_stat_checkpointer = importstr 'sql_exporter/checkpoints_req.17.sql';
|
|
||||||
|
|
||||||
{
|
|
||||||
metric_name: 'checkpoints_req',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Number of requested checkpoints',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'checkpoints_req',
|
|
||||||
],
|
|
||||||
query: if neon.PG_MAJORVERSION_NUM < 17 then pg_stat_bgwriter else pg_stat_checkpointer,
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT checkpoints_req FROM pg_stat_bgwriter;
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT num_timed AS checkpoints_timed FROM pg_stat_checkpointer;
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
local neon = import 'neon.libsonnet';
|
|
||||||
|
|
||||||
local pg_stat_bgwriter = importstr 'sql_exporter/checkpoints_timed.sql';
|
|
||||||
local pg_stat_checkpointer = importstr 'sql_exporter/checkpoints_timed.17.sql';
|
|
||||||
|
|
||||||
{
|
|
||||||
metric_name: 'checkpoints_timed',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Number of scheduled checkpoints',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'checkpoints_timed',
|
|
||||||
],
|
|
||||||
query: if neon.PG_MAJORVERSION_NUM < 17 then pg_stat_bgwriter else pg_stat_checkpointer,
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT checkpoints_timed FROM pg_stat_bgwriter;
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'compute_backpressure_throttling_seconds',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Time compute has spent throttled',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'throttled',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/compute_backpressure_throttling_seconds.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT (neon.backpressure_throttling_time()::float8 / 1000000) AS throttled;
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'compute_current_lsn',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Current LSN of the database',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'lsn',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/compute_current_lsn.sql',
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
SELECT CASE
|
|
||||||
WHEN pg_catalog.pg_is_in_recovery() THEN (pg_last_wal_replay_lsn() - '0/0')::FLOAT8
|
|
||||||
ELSE (pg_current_wal_lsn() - '0/0')::FLOAT8
|
|
||||||
END AS lsn;
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'compute_logical_snapshot_files',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Number of snapshot files in pg_logical/snapshot',
|
|
||||||
key_labels: [
|
|
||||||
'timeline_id',
|
|
||||||
],
|
|
||||||
values: [
|
|
||||||
'num_logical_snapshot_files',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/compute_logical_snapshot_files.sql',
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
SELECT
|
|
||||||
(SELECT setting FROM pg_settings WHERE name = 'neon.timeline_id') AS timeline_id,
|
|
||||||
-- Postgres creates temporary snapshot files of the form %X-%X.snap.%d.tmp.
|
|
||||||
-- These temporary snapshot files are renamed to the actual snapshot files
|
|
||||||
-- after they are completely built. We only WAL-log the completely built
|
|
||||||
-- snapshot files
|
|
||||||
(SELECT COUNT(*) FROM pg_ls_dir('pg_logical/snapshots') AS name WHERE name LIKE '%.snap') AS num_logical_snapshot_files;
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
SELECT
|
|
||||||
(SELECT current_setting('neon.timeline_id')) AS timeline_id,
|
|
||||||
-- Postgres creates temporary snapshot files of the form %X-%X.snap.%d.tmp.
|
|
||||||
-- These temporary snapshot files are renamed to the actual snapshot files
|
|
||||||
-- after they are completely built. We only WAL-log the completely built
|
|
||||||
-- snapshot files
|
|
||||||
(SELECT COALESCE(sum(size), 0) FROM pg_ls_logicalsnapdir() WHERE name LIKE '%.snap') AS logical_snapshots_bytes;
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
local neon = import 'neon.libsonnet';
|
|
||||||
|
|
||||||
local pg_ls_logicalsnapdir = importstr 'sql_exporter/compute_logical_snapshots_bytes.15.sql';
|
|
||||||
local pg_ls_dir = importstr 'sql_exporter/compute_logical_snapshots_bytes.sql';
|
|
||||||
|
|
||||||
{
|
|
||||||
metric_name: 'compute_logical_snapshots_bytes',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Size of the pg_logical/snapshots directory, not including temporary files',
|
|
||||||
key_labels: [
|
|
||||||
'timeline_id',
|
|
||||||
],
|
|
||||||
values: [
|
|
||||||
'logical_snapshots_bytes',
|
|
||||||
],
|
|
||||||
query: if neon.PG_MAJORVERSION_NUM < 15 then pg_ls_dir else pg_ls_logicalsnapdir,
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
SELECT
|
|
||||||
(SELECT setting FROM pg_settings WHERE name = 'neon.timeline_id') AS timeline_id,
|
|
||||||
-- Postgres creates temporary snapshot files of the form %X-%X.snap.%d.tmp.
|
|
||||||
-- These temporary snapshot files are renamed to the actual snapshot files
|
|
||||||
-- after they are completely built. We only WAL-log the completely built
|
|
||||||
-- snapshot files
|
|
||||||
(SELECT COALESCE(sum((pg_stat_file('pg_logical/snapshots/' || name, missing_ok => true)).size), 0)
|
|
||||||
FROM (SELECT * FROM pg_ls_dir('pg_logical/snapshots') WHERE pg_ls_dir LIKE '%.snap') AS name
|
|
||||||
) AS logical_snapshots_bytes;
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'compute_max_connections',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Max connections allowed for Postgres',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'max_connections',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/compute_max_connections.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT current_setting('max_connections') as max_connections;
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'compute_receive_lsn',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Returns the last write-ahead log location that has been received and synced to disk by streaming replication',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'lsn',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/compute_receive_lsn.sql',
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
SELECT CASE
|
|
||||||
WHEN pg_catalog.pg_is_in_recovery() THEN (pg_last_wal_receive_lsn() - '0/0')::FLOAT8
|
|
||||||
ELSE 0
|
|
||||||
END AS lsn;
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'compute_subscriptions_count',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Number of logical replication subscriptions grouped by enabled/disabled',
|
|
||||||
key_labels: [
|
|
||||||
'enabled',
|
|
||||||
],
|
|
||||||
values: [
|
|
||||||
'subscriptions_count',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/compute_subscriptions_count.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT subenabled::text AS enabled, count(*) AS subscriptions_count FROM pg_subscription GROUP BY subenabled;
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'connection_counts',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Connection counts',
|
|
||||||
key_labels: [
|
|
||||||
'datname',
|
|
||||||
'state',
|
|
||||||
],
|
|
||||||
values: [
|
|
||||||
'count',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/connection_counts.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT datname, state, count(*) AS count FROM pg_stat_activity WHERE state <> '' GROUP BY datname, state;
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'db_total_size',
|
|
||||||
type: 'gauge',
|
|
||||||
help: 'Size of all databases',
|
|
||||||
key_labels: null,
|
|
||||||
values: [
|
|
||||||
'total',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/db_total_size.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT sum(pg_database_size(datname)) AS total FROM pg_database;
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'file_cache_read_wait_seconds_bucket',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Histogram buckets of LFC read operation latencies',
|
|
||||||
key_labels: [
|
|
||||||
'bucket_le',
|
|
||||||
],
|
|
||||||
values: [
|
|
||||||
'value',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/file_cache_read_wait_seconds_bucket.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT bucket_le, value FROM neon.neon_perf_counters WHERE metric = 'file_cache_read_wait_seconds_bucket';
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'file_cache_read_wait_seconds_count',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Number of read operations in LFC',
|
|
||||||
values: [
|
|
||||||
'file_cache_read_wait_seconds_count',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'file_cache_read_wait_seconds_sum',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Time spent in LFC read operations',
|
|
||||||
values: [
|
|
||||||
'file_cache_read_wait_seconds_sum',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'file_cache_write_wait_seconds_bucket',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Histogram buckets of LFC write operation latencies',
|
|
||||||
key_labels: [
|
|
||||||
'bucket_le',
|
|
||||||
],
|
|
||||||
values: [
|
|
||||||
'value',
|
|
||||||
],
|
|
||||||
query: importstr 'sql_exporter/file_cache_write_wait_seconds_bucket.sql',
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
SELECT bucket_le, value FROM neon.neon_perf_counters WHERE metric = 'file_cache_write_wait_seconds_bucket';
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'file_cache_write_wait_seconds_count',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Number of write operations in LFC',
|
|
||||||
values: [
|
|
||||||
'file_cache_write_wait_seconds_count',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'file_cache_write_wait_seconds_sum',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Time spent in LFC write operations',
|
|
||||||
values: [
|
|
||||||
'file_cache_write_wait_seconds_sum',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'getpage_prefetch_discards_total',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Number of prefetch responses issued but not used',
|
|
||||||
values: [
|
|
||||||
'getpage_prefetch_discards_total',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'getpage_prefetch_misses_total',
|
|
||||||
type: 'counter',
|
|
||||||
help: "Total number of readahead misses; consisting of either prefetches that don't satisfy the LSN bounds once the prefetch got read by the backend, or cases where somehow no readahead was issued for the read",
|
|
||||||
values: [
|
|
||||||
'getpage_prefetch_misses_total',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
metric_name: 'getpage_prefetch_requests_total',
|
|
||||||
type: 'counter',
|
|
||||||
help: 'Number of getpage issued for prefetching',
|
|
||||||
values: [
|
|
||||||
'getpage_prefetch_requests_total',
|
|
||||||
],
|
|
||||||
query_ref: 'neon_perf_counters',
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user