Compare commits

...

9 Commits

Author SHA1 Message Date
Alexander Bayandin
cb5590373d Revert "Aurora only"
This reverts commit cf35950fe96e2342b5850d33faa2f3c2d55d3410.
2023-09-20 13:45:42 +01:00
Alexander Bayandin
254f0d8bca Aurora only 2023-09-20 13:45:42 +01:00
Alexander Bayandin
11065183d5 Add rds-aurora benchmark 2023-09-20 13:45:42 +01:00
Alexander Bayandin
b59722c482 [DO NOT MERGE]: run only for scale=65 2023-09-20 13:45:42 +01:00
Alexander Bayandin
14cef1afce Bigger timeout 2023-09-20 13:45:42 +01:00
Alexander Bayandin
b508d142f5 Nightly Benchmarks: add TPC-H with scale 300 2023-09-20 13:45:42 +01:00
Alexander Bayandin
959df3f6a5 [DO NOT MERGE] run workflow from the branch 2023-09-20 13:45:42 +01:00
Alexander Bayandin
d43a8333f4 [DO NOT MERGE] run only tpch test 2023-09-20 13:45:39 +01:00
Alexander Bayandin
bfe406b88b test_runner: add scale parameter to tpc-h tests 2023-09-20 13:43:15 +01:00

View File

@@ -2,8 +2,8 @@ name: Benchmarking
on:
# uncomment to run on push for debugging your PR
# push:
# branches: [ your branch ]
push:
branches: [ bayandin/bigger-tpc-h ]
schedule:
# * is a special character in YAML so you have to quote this string
# ┌───────────── minute (0 - 59)
@@ -34,75 +34,75 @@ concurrency:
cancel-in-progress: true
jobs:
bench:
env:
TEST_PG_BENCH_DURATIONS_MATRIX: "300"
TEST_PG_BENCH_SCALES_MATRIX: "10,100"
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
DEFAULT_PG_VERSION: 14
TEST_OUTPUT: /tmp/test_output
BUILD_TYPE: remote
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
PLATFORM: "neon-staging"
# bench:
# env:
# TEST_PG_BENCH_DURATIONS_MATRIX: "300"
# TEST_PG_BENCH_SCALES_MATRIX: "10,100"
# POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
# DEFAULT_PG_VERSION: 14
# TEST_OUTPUT: /tmp/test_output
# BUILD_TYPE: remote
# SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
# PLATFORM: "neon-staging"
runs-on: [ self-hosted, us-east-2, x64 ]
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
# runs-on: [ self-hosted, us-east-2, x64 ]
# container:
# image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
# options: --init
steps:
- uses: actions/checkout@v3
# steps:
# - uses: actions/checkout@v3
- name: Download Neon artifact
uses: ./.github/actions/download
with:
name: neon-${{ runner.os }}-release-artifact
path: /tmp/neon/
prefix: latest
# - name: Download Neon artifact
# uses: ./.github/actions/download
# with:
# name: neon-${{ runner.os }}-release-artifact
# path: /tmp/neon/
# prefix: latest
- name: Create Neon Project
id: create-neon-project
uses: ./.github/actions/neon-project-create
with:
region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
# - name: Create Neon Project
# id: create-neon-project
# uses: ./.github/actions/neon-project-create
# with:
# region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
# postgres_version: ${{ env.DEFAULT_PG_VERSION }}
# api_key: ${{ secrets.NEON_STAGING_API_KEY }}
- name: Run benchmark
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# Set --sparse-ordering option of pytest-order plugin
# to ensure tests are running in order of appears in the file.
# It's important for test_perf_pgbench.py::test_pgbench_remote_* tests
extra_params: -m remote_cluster --sparse-ordering --timeout 5400 --ignore test_runner/performance/test_perf_olap.py
env:
BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
# - name: Run benchmark
# uses: ./.github/actions/run-python-test-set
# with:
# build_type: ${{ env.BUILD_TYPE }}
# test_selection: performance
# run_in_parallel: false
# save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# # Set --sparse-ordering option of pytest-order plugin
# # to ensure tests are running in order of appears in the file.
# # It's important for test_perf_pgbench.py::test_pgbench_remote_* tests
# extra_params: -m remote_cluster --sparse-ordering --timeout 5400 --ignore test_runner/performance/test_perf_olap.py
# env:
# BENCHMARK_CONNSTR: ${{ steps.create-neon-project.outputs.dsn }}
# VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
# PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
- name: Delete Neon Project
if: ${{ always() }}
uses: ./.github/actions/neon-project-delete
with:
project_id: ${{ steps.create-neon-project.outputs.project_id }}
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
# - name: Delete Neon Project
# if: ${{ always() }}
# uses: ./.github/actions/neon-project-delete
# with:
# project_id: ${{ steps.create-neon-project.outputs.project_id }}
# api_key: ${{ secrets.NEON_STAGING_API_KEY }}
- name: Create Allure report
if: ${{ !cancelled() }}
uses: ./.github/actions/allure-report-generate
# - name: Create Allure report
# if: ${{ !cancelled() }}
# uses: ./.github/actions/allure-report-generate
- name: Post to a Slack channel
if: ${{ github.event.schedule && failure() }}
uses: slackapi/slack-github-action@v1
with:
channel-id: "C033QLM5P7D" # dev-staging-stream
slack-message: "Periodic perf testing: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
# - name: Post to a Slack channel
# if: ${{ github.event.schedule && failure() }}
# uses: slackapi/slack-github-action@v1
# with:
# channel-id: "C033QLM5P7D" # dev-staging-stream
# slack-message: "Periodic perf testing: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
# env:
# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
generate-matrices:
# Create matrices for the benchmarking jobs, so we run benchmarks on rds only once a week (on Saturday)
@@ -164,10 +164,11 @@ jobs:
run: |
matrix='{
"platform": [
"neon-captest-reuse"
"neon-captest-reuse",
"rds-aurora"
],
"scale": [
"10"
"65"
]
}'
@@ -178,242 +179,242 @@ jobs:
echo "matrix=$(echo "$matrix" | jq --compact-output '.')" >> $GITHUB_OUTPUT
pgbench-compare:
needs: [ generate-matrices ]
# pgbench-compare:
# needs: [ generate-matrices ]
strategy:
fail-fast: false
matrix: ${{fromJson(needs.generate-matrices.outputs.pgbench-compare-matrix)}}
# strategy:
# fail-fast: false
# matrix: ${{fromJson(needs.generate-matrices.outputs.pgbench-compare-matrix)}}
env:
TEST_PG_BENCH_DURATIONS_MATRIX: "60m"
TEST_PG_BENCH_SCALES_MATRIX: ${{ matrix.db_size }}
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
DEFAULT_PG_VERSION: 14
TEST_OUTPUT: /tmp/test_output
BUILD_TYPE: remote
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
PLATFORM: ${{ matrix.platform }}
# env:
# TEST_PG_BENCH_DURATIONS_MATRIX: "60m"
# TEST_PG_BENCH_SCALES_MATRIX: ${{ matrix.db_size }}
# POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
# DEFAULT_PG_VERSION: 14
# TEST_OUTPUT: /tmp/test_output
# BUILD_TYPE: remote
# SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
# PLATFORM: ${{ matrix.platform }}
runs-on: [ self-hosted, us-east-2, x64 ]
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
# runs-on: [ self-hosted, us-east-2, x64 ]
# container:
# image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
# options: --init
# Increase timeout to 8h, default timeout is 6h
timeout-minutes: 480
# # Increase timeout to 8h, default timeout is 6h
# timeout-minutes: 480
steps:
- uses: actions/checkout@v3
# steps:
# - uses: actions/checkout@v3
- name: Download Neon artifact
uses: ./.github/actions/download
with:
name: neon-${{ runner.os }}-release-artifact
path: /tmp/neon/
prefix: latest
# - name: Download Neon artifact
# uses: ./.github/actions/download
# with:
# name: neon-${{ runner.os }}-release-artifact
# path: /tmp/neon/
# prefix: latest
- name: Add Postgres binaries to PATH
run: |
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
# - name: Add Postgres binaries to PATH
# run: |
# ${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
# echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
- name: Create Neon Project
if: contains(fromJson('["neon-captest-new", "neon-captest-freetier", "neonvm-captest-new", "neonvm-captest-freetier"]'), matrix.platform)
id: create-neon-project
uses: ./.github/actions/neon-project-create
with:
region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
postgres_version: ${{ env.DEFAULT_PG_VERSION }}
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
compute_units: ${{ (matrix.platform == 'neon-captest-freetier' && '[0.25, 0.25]') || '[1, 1]' }}
provisioner: ${{ (contains(matrix.platform, 'neonvm-') && 'k8s-neonvm') || 'k8s-pod' }}
# - name: Create Neon Project
# if: contains(fromJson('["neon-captest-new", "neon-captest-freetier", "neonvm-captest-new", "neonvm-captest-freetier"]'), matrix.platform)
# id: create-neon-project
# uses: ./.github/actions/neon-project-create
# with:
# region_id: ${{ github.event.inputs.region_id || 'aws-us-east-2' }}
# postgres_version: ${{ env.DEFAULT_PG_VERSION }}
# api_key: ${{ secrets.NEON_STAGING_API_KEY }}
# compute_units: ${{ (matrix.platform == 'neon-captest-freetier' && '[0.25, 0.25]') || '[1, 1]' }}
# provisioner: ${{ (contains(matrix.platform, 'neonvm-') && 'k8s-neonvm') || 'k8s-pod' }}
- name: Set up Connection String
id: set-up-connstr
run: |
case "${PLATFORM}" in
neon-captest-reuse)
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CONNSTR }}
;;
neon-captest-new | neon-captest-freetier | neonvm-captest-new | neonvm-captest-freetier)
CONNSTR=${{ steps.create-neon-project.outputs.dsn }}
;;
rds-aurora)
CONNSTR=${{ secrets.BENCHMARK_RDS_AURORA_CONNSTR }}
;;
rds-postgres)
CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CONNSTR }}
;;
*)
echo >&2 "Unknown PLATFORM=${PLATFORM}"
exit 1
;;
esac
# - name: Set up Connection String
# id: set-up-connstr
# run: |
# case "${PLATFORM}" in
# neon-captest-reuse)
# CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CONNSTR }}
# ;;
# neon-captest-new | neon-captest-freetier | neonvm-captest-new | neonvm-captest-freetier)
# CONNSTR=${{ steps.create-neon-project.outputs.dsn }}
# ;;
# rds-aurora)
# CONNSTR=${{ secrets.BENCHMARK_RDS_AURORA_CONNSTR }}
# ;;
# rds-postgres)
# CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CONNSTR }}
# ;;
# *)
# echo >&2 "Unknown PLATFORM=${PLATFORM}"
# exit 1
# ;;
# esac
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
# echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
QUERY="SELECT version();"
if [[ "${PLATFORM}" = "neon"* ]]; then
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
fi
psql ${CONNSTR} -c "${QUERY}"
# QUERY="SELECT version();"
# if [[ "${PLATFORM}" = "neon"* ]]; then
# QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
# fi
# psql ${CONNSTR} -c "${QUERY}"
- name: Benchmark init
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_init
env:
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
# - name: Benchmark init
# uses: ./.github/actions/run-python-test-set
# with:
# build_type: ${{ env.BUILD_TYPE }}
# test_selection: performance
# run_in_parallel: false
# save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_init
# env:
# BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
# VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
# PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
- name: Benchmark simple-update
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_simple_update
env:
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
# - name: Benchmark simple-update
# uses: ./.github/actions/run-python-test-set
# with:
# build_type: ${{ env.BUILD_TYPE }}
# test_selection: performance
# run_in_parallel: false
# save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_simple_update
# env:
# BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
# VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
# PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
- name: Benchmark select-only
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_select_only
env:
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
# - name: Benchmark select-only
# uses: ./.github/actions/run-python-test-set
# with:
# build_type: ${{ env.BUILD_TYPE }}
# test_selection: performance
# run_in_parallel: false
# save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_select_only
# env:
# BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
# VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
# PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
- name: Delete Neon Project
if: ${{ steps.create-neon-project.outputs.project_id && always() }}
uses: ./.github/actions/neon-project-delete
with:
project_id: ${{ steps.create-neon-project.outputs.project_id }}
api_key: ${{ secrets.NEON_STAGING_API_KEY }}
# - name: Delete Neon Project
# if: ${{ steps.create-neon-project.outputs.project_id && always() }}
# uses: ./.github/actions/neon-project-delete
# with:
# project_id: ${{ steps.create-neon-project.outputs.project_id }}
# api_key: ${{ secrets.NEON_STAGING_API_KEY }}
- name: Create Allure report
if: ${{ !cancelled() }}
uses: ./.github/actions/allure-report-generate
# - name: Create Allure report
# if: ${{ !cancelled() }}
# uses: ./.github/actions/allure-report-generate
- name: Post to a Slack channel
if: ${{ github.event.schedule && failure() }}
uses: slackapi/slack-github-action@v1
with:
channel-id: "C033QLM5P7D" # dev-staging-stream
slack-message: "Periodic perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
# - name: Post to a Slack channel
# if: ${{ github.event.schedule && failure() }}
# uses: slackapi/slack-github-action@v1
# with:
# channel-id: "C033QLM5P7D" # dev-staging-stream
# slack-message: "Periodic perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
# env:
# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
clickbench-compare:
# ClichBench DB for rds-aurora and rds-Postgres deployed to the same clusters
# we use for performance testing in pgbench-compare.
# Run this job only when pgbench-compare is finished to avoid the intersection.
# We might change it after https://github.com/neondatabase/neon/issues/2900.
#
# *_CLICKBENCH_CONNSTR: Genuine ClickBench DB with ~100M rows
# *_CLICKBENCH_10M_CONNSTR: DB with the first 10M rows of ClickBench DB
if: ${{ !cancelled() }}
needs: [ generate-matrices, pgbench-compare ]
# clickbench-compare:
# # ClichBench DB for rds-aurora and rds-Postgres deployed to the same clusters
# # we use for performance testing in pgbench-compare.
# # Run this job only when pgbench-compare is finished to avoid the intersection.
# # We might change it after https://github.com/neondatabase/neon/issues/2900.
# #
# # *_CLICKBENCH_CONNSTR: Genuine ClickBench DB with ~100M rows
# # *_CLICKBENCH_10M_CONNSTR: DB with the first 10M rows of ClickBench DB
# if: ${{ !cancelled() }}
# needs: [ generate-matrices, pgbench-compare ]
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrices.outputs.olap-compare-matrix) }}
# strategy:
# fail-fast: false
# matrix: ${{ fromJson(needs.generate-matrices.outputs.olap-compare-matrix) }}
env:
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
DEFAULT_PG_VERSION: 14
TEST_OUTPUT: /tmp/test_output
BUILD_TYPE: remote
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
PLATFORM: ${{ matrix.platform }}
# env:
# POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
# DEFAULT_PG_VERSION: 14
# TEST_OUTPUT: /tmp/test_output
# BUILD_TYPE: remote
# SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
# PLATFORM: ${{ matrix.platform }}
runs-on: [ self-hosted, us-east-2, x64 ]
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
# runs-on: [ self-hosted, us-east-2, x64 ]
# container:
# image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
# options: --init
steps:
- uses: actions/checkout@v3
# steps:
# - uses: actions/checkout@v3
- name: Download Neon artifact
uses: ./.github/actions/download
with:
name: neon-${{ runner.os }}-release-artifact
path: /tmp/neon/
prefix: latest
# - name: Download Neon artifact
# uses: ./.github/actions/download
# with:
# name: neon-${{ runner.os }}-release-artifact
# path: /tmp/neon/
# prefix: latest
- name: Add Postgres binaries to PATH
run: |
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
# - name: Add Postgres binaries to PATH
# run: |
# ${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
# echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
- name: Set up Connection String
id: set-up-connstr
run: |
case "${PLATFORM}" in
neon-captest-reuse)
CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CLICKBENCH_10M_CONNSTR }}
;;
rds-aurora)
CONNSTR=${{ secrets.BENCHMARK_RDS_AURORA_CLICKBENCH_10M_CONNSTR }}
;;
rds-postgres)
CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CLICKBENCH_10M_CONNSTR }}
;;
*)
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
exit 1
;;
esac
# - name: Set up Connection String
# id: set-up-connstr
# run: |
# case "${PLATFORM}" in
# neon-captest-reuse)
# CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CLICKBENCH_10M_CONNSTR }}
# ;;
# rds-aurora)
# CONNSTR=${{ secrets.BENCHMARK_RDS_AURORA_CLICKBENCH_10M_CONNSTR }}
# ;;
# rds-postgres)
# CONNSTR=${{ secrets.BENCHMARK_RDS_POSTGRES_CLICKBENCH_10M_CONNSTR }}
# ;;
# *)
# echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
# exit 1
# ;;
# esac
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
# echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
QUERY="SELECT version();"
if [[ "${PLATFORM}" = "neon"* ]]; then
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
fi
psql ${CONNSTR} -c "${QUERY}"
# QUERY="SELECT version();"
# if [[ "${PLATFORM}" = "neon"* ]]; then
# QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
# fi
# psql ${CONNSTR} -c "${QUERY}"
- name: ClickBench benchmark
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance/test_perf_olap.py
run_in_parallel: false
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
extra_params: -m remote_cluster --timeout 21600 -k test_clickbench
env:
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
TEST_OLAP_SCALE: 10
# - name: ClickBench benchmark
# uses: ./.github/actions/run-python-test-set
# with:
# build_type: ${{ env.BUILD_TYPE }}
# test_selection: performance/test_perf_olap.py
# run_in_parallel: false
# save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# extra_params: -m remote_cluster --timeout 21600 -k test_clickbench
# env:
# VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
# PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
# BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
# TEST_OLAP_SCALE: 10
- name: Create Allure report
if: ${{ !cancelled() }}
uses: ./.github/actions/allure-report-generate
# - name: Create Allure report
# if: ${{ !cancelled() }}
# uses: ./.github/actions/allure-report-generate
- name: Post to a Slack channel
if: ${{ github.event.schedule && failure() }}
uses: slackapi/slack-github-action@v1
with:
channel-id: "C033QLM5P7D" # dev-staging-stream
slack-message: "Periodic OLAP perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
# - name: Post to a Slack channel
# if: ${{ github.event.schedule && failure() }}
# uses: slackapi/slack-github-action@v1
# with:
# channel-id: "C033QLM5P7D" # dev-staging-stream
# slack-message: "Periodic OLAP perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
# env:
# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
tpch-compare:
# TCP-H DB for rds-aurora and rds-Postgres deployed to the same clusters
@@ -423,7 +424,7 @@ jobs:
#
# *_TPCH_S10_CONNSTR: DB generated with scale factor 10 (~10 GB)
if: ${{ !cancelled() }}
needs: [ generate-matrices, clickbench-compare ]
needs: [ generate-matrices ]
strategy:
fail-fast: false
@@ -443,6 +444,8 @@ jobs:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
timeout-minutes: 4320
steps:
- uses: actions/checkout@v3
@@ -492,6 +495,9 @@ jobs:
fi
psql ${CONNSTR} -c "${QUERY}"
# Print `tenant_id` and `timeline_id` to ease debugging
psql ${CONNSTR} -C "SHOW neon.tenant_id; SHOW neon.timeline_id;" || true
- name: Run TPC-H benchmark
uses: ./.github/actions/run-python-test-set
with:
@@ -519,91 +525,91 @@ jobs:
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
user-examples-compare:
if: ${{ !cancelled() }}
needs: [ generate-matrices, tpch-compare ]
# user-examples-compare:
# if: ${{ !cancelled() }}
# needs: [ generate-matrices, tpch-compare ]
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrices.outputs.olap-compare-matrix) }}
# strategy:
# fail-fast: false
# matrix: ${{ fromJson(needs.generate-matrices.outputs.olap-compare-matrix) }}
env:
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
DEFAULT_PG_VERSION: 14
TEST_OUTPUT: /tmp/test_output
BUILD_TYPE: remote
SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
PLATFORM: ${{ matrix.platform }}
# env:
# POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
# DEFAULT_PG_VERSION: 14
# TEST_OUTPUT: /tmp/test_output
# BUILD_TYPE: remote
# SAVE_PERF_REPORT: ${{ github.event.inputs.save_perf_report || ( github.ref_name == 'main' ) }}
# PLATFORM: ${{ matrix.platform }}
runs-on: [ self-hosted, us-east-2, x64 ]
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
# runs-on: [ self-hosted, us-east-2, x64 ]
# container:
# image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
# options: --init
steps:
- uses: actions/checkout@v3
# steps:
# - uses: actions/checkout@v3
- name: Download Neon artifact
uses: ./.github/actions/download
with:
name: neon-${{ runner.os }}-release-artifact
path: /tmp/neon/
prefix: latest
# - name: Download Neon artifact
# uses: ./.github/actions/download
# with:
# name: neon-${{ runner.os }}-release-artifact
# path: /tmp/neon/
# prefix: latest
- name: Add Postgres binaries to PATH
run: |
${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
# - name: Add Postgres binaries to PATH
# run: |
# ${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version
# echo "${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin" >> $GITHUB_PATH
- name: Set up Connection String
id: set-up-connstr
run: |
case "${PLATFORM}" in
neon-captest-reuse)
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_CAPTEST_CONNSTR }}
;;
rds-aurora)
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_RDS_AURORA_CONNSTR }}
;;
rds-postgres)
CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_RDS_POSTGRES_CONNSTR }}
;;
*)
echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
exit 1
;;
esac
# - name: Set up Connection String
# id: set-up-connstr
# run: |
# case "${PLATFORM}" in
# neon-captest-reuse)
# CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_CAPTEST_CONNSTR }}
# ;;
# rds-aurora)
# CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_RDS_AURORA_CONNSTR }}
# ;;
# rds-postgres)
# CONNSTR=${{ secrets.BENCHMARK_USER_EXAMPLE_RDS_POSTGRES_CONNSTR }}
# ;;
# *)
# echo >&2 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'rds-aurora', or 'rds-postgres'"
# exit 1
# ;;
# esac
echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
# echo "connstr=${CONNSTR}" >> $GITHUB_OUTPUT
QUERY="SELECT version();"
if [[ "${PLATFORM}" = "neon"* ]]; then
QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
fi
psql ${CONNSTR} -c "${QUERY}"
# QUERY="SELECT version();"
# if [[ "${PLATFORM}" = "neon"* ]]; then
# QUERY="${QUERY} SHOW neon.tenant_id; SHOW neon.timeline_id;"
# fi
# psql ${CONNSTR} -c "${QUERY}"
- name: Run user examples
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance/test_perf_olap.py
run_in_parallel: false
save_perf_report: ${{ env.SAVE_PERF_REPORT }}
extra_params: -m remote_cluster --timeout 21600 -k test_user_examples
env:
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
# - name: Run user examples
# uses: ./.github/actions/run-python-test-set
# with:
# build_type: ${{ env.BUILD_TYPE }}
# test_selection: performance/test_perf_olap.py
# run_in_parallel: false
# save_perf_report: ${{ env.SAVE_PERF_REPORT }}
# extra_params: -m remote_cluster --timeout 21600 -k test_user_examples
# env:
# VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
# PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
# BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }}
- name: Create Allure report
if: ${{ !cancelled() }}
uses: ./.github/actions/allure-report-generate
# - name: Create Allure report
# if: ${{ !cancelled() }}
# uses: ./.github/actions/allure-report-generate
- name: Post to a Slack channel
if: ${{ github.event.schedule && failure() }}
uses: slackapi/slack-github-action@v1
with:
channel-id: "C033QLM5P7D" # dev-staging-stream
slack-message: "Periodic User example perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
# - name: Post to a Slack channel
# if: ${{ github.event.schedule && failure() }}
# uses: slackapi/slack-github-action@v1
# with:
# channel-id: "C033QLM5P7D" # dev-staging-stream
# slack-message: "Periodic User example perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
# env:
# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}