name: Benchmarking on: # uncomment to run on push for debugging your PR # push: # branches: [ your branch ] schedule: # * is a special character in YAML so you have to quote this string # ┌───────────── minute (0 - 59) # │ ┌───────────── hour (0 - 23) # │ │ ┌───────────── day of the month (1 - 31) # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) - cron: '0 3 * * *' # run once a day, timezone is utc workflow_dispatch: # adds ability to run this manually inputs: environment: description: 'Environment to run remote tests on (dev or staging)' required: false region_id: description: 'Use a particular region. If not set the default region will be used' required: false save_perf_report: type: boolean description: 'Publish perf report or not. If not set, the report is published only for the main branch' required: false defaults: run: shell: bash -euxo pipefail {0} concurrency: # Allow only one workflow per any non-`main` branch. group: ${{ github.workflow }}-${{ github.ref }}-${{ github.ref == 'refs/heads/main' && github.sha || 'anysha' }} cancel-in-progress: true jobs: bench: # this workflow runs on self hosteed runner # it's environment is quite different from usual guthub runner # probably the most important difference is that it doesn't start from clean workspace each time # e g if you install system packages they are not cleaned up since you install them directly in host machine # not a container or something # See documentation for more info: https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners runs-on: [self-hosted, zenith-benchmarker] env: POSTGRES_DISTRIB_DIR: /usr/pgsql DEFAULT_PG_VERSION: 14 steps: - name: Checkout zenith repo uses: actions/checkout@v3 # actions/setup-python@v2 is not working correctly on self-hosted runners # see https://github.com/actions/setup-python/issues/162 # and probably https://github.com/actions/setup-python/issues/162#issuecomment-865387976 in particular # so the simplest solution to me is to use already installed system python and spin virtualenvs for job runs. # there is Python 3.7.10 already installed on the machine so use it to install poetry and then use poetry's virtuealenvs - name: Install poetry & deps run: | python3 -m pip install --upgrade poetry wheel # since pip/poetry caches are reused there shouldn't be any troubles with install every time ./scripts/pysync - name: Show versions run: | echo Python python3 --version poetry run python3 --version echo Poetry poetry --version echo Pgbench ${POSTGRES_DISTRIB_DIR}/v${DEFAULT_PG_VERSION}/bin/pgbench --version - name: Create Neon Project id: create-neon-project uses: ./.github/actions/neon-project-create with: environment: ${{ github.event.inputs.environment || 'staging' }} api_key: ${{ ( github.event.inputs.environment || 'staging' ) == 'staging' && secrets.NEON_STAGING_API_KEY || secrets.NEON_CAPTEST_API_KEY }} - name: Run benchmark # pgbench is installed system wide from official repo # https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/ # via # sudo tee /etc/yum.repos.d/pgdg.repo<> $GITHUB_PATH - name: Create Neon Project if: matrix.platform != 'neon-captest-reuse' id: create-neon-project uses: ./.github/actions/neon-project-create with: environment: ${{ github.event.inputs.environment || 'dev' }} api_key: ${{ ( github.event.inputs.environment || 'dev' ) == 'staging' && secrets.NEON_STAGING_API_KEY || secrets.NEON_CAPTEST_API_KEY }} - name: Set up Connection String id: set-up-connstr run: | case "${PLATFORM}" in neon-captest-reuse) CONNSTR=${{ secrets.BENCHMARK_CAPTEST_CONNSTR }} ;; neon-captest-new | neon-captest-prefetch) CONNSTR=${{ steps.create-neon-project.outputs.dsn }} ;; rds-aurora) CONNSTR=${{ secrets.BENCHMARK_RDS_CONNSTR }} ;; *) echo 2>&1 "Unknown PLATFORM=${PLATFORM}. Allowed only 'neon-captest-reuse', 'neon-captest-new', 'neon-captest-prefetch' or 'rds-aurora'" exit 1 ;; esac echo "::set-output name=connstr::${CONNSTR}" psql ${CONNSTR} -c "SELECT version();" env: PLATFORM: ${{ matrix.platform }} - name: Set database options if: matrix.platform == 'neon-captest-prefetch' run: | psql ${BENCHMARK_CONNSTR} -c "ALTER DATABASE main SET enable_seqscan_prefetch=on" psql ${BENCHMARK_CONNSTR} -c "ALTER DATABASE main SET seqscan_prefetch_buffers=10" env: BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }} - name: Benchmark init uses: ./.github/actions/run-python-test-set with: build_type: ${{ env.BUILD_TYPE }} test_selection: performance run_in_parallel: false save_perf_report: ${{ env.SAVE_PERF_REPORT }} extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_init env: PLATFORM: ${{ matrix.platform }} BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }} VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}" PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}" - name: Benchmark simple-update uses: ./.github/actions/run-python-test-set with: build_type: ${{ env.BUILD_TYPE }} test_selection: performance run_in_parallel: false save_perf_report: ${{ env.SAVE_PERF_REPORT }} extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_simple_update env: PLATFORM: ${{ matrix.platform }} BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }} VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}" PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}" - name: Benchmark select-only uses: ./.github/actions/run-python-test-set with: build_type: ${{ env.BUILD_TYPE }} test_selection: performance run_in_parallel: false save_perf_report: ${{ env.SAVE_PERF_REPORT }} extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_select_only env: PLATFORM: ${{ matrix.platform }} BENCHMARK_CONNSTR: ${{ steps.set-up-connstr.outputs.connstr }} VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}" PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}" - name: Create Allure report if: always() uses: ./.github/actions/allure-report with: action: generate build_type: ${{ env.BUILD_TYPE }} - name: Delete Neon Project if: ${{ matrix.platform != 'neon-captest-reuse' && always() }} uses: ./.github/actions/neon-project-delete with: environment: dev project_id: ${{ steps.create-neon-project.outputs.project_id }} api_key: ${{ secrets.NEON_CAPTEST_API_KEY }} - name: Post to a Slack channel if: ${{ github.event.schedule && failure() }} uses: slackapi/slack-github-action@v1 with: channel-id: "C033QLM5P7D" # dev-staging-stream slack-message: "Periodic perf testing ${{ matrix.platform }}: ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" env: SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}