proxy: subzero integration in auth-broker (embedded data-api) (#12474)

## Problem
We want to have the data-api served by the proxy directly instead of
relying on a 3rd party to run a deployment for each project/endpoint.

## Summary of changes
With the changes below, the proxy (auth-broker) becomes also a
"rest-broker", that can be thought of as a "Multi-tenant" data-api which
provides an automated REST api for all the databases in the region.

The core of the implementation (that leverages the subzero library) is
in proxy/src/serverless/rest.rs and this is the only place that has "new
logic".

---------

Co-authored-by: Ruslan Talpa <ruslan.talpa@databricks.com>
Co-authored-by: Alexander Bayandin <alexander@neon.tech>
Co-authored-by: Conrad Ludgate <conrad@neon.tech>
This commit is contained in:
Ruslan Talpa
2025-07-21 21:16:28 +03:00
committed by GitHub
parent 187170be47
commit 0dbe551802
30 changed files with 2073 additions and 70 deletions

View File

@@ -21,13 +21,14 @@ platforms = [
# "x86_64-apple-darwin",
# "x86_64-pc-windows-msvc",
]
[final-excludes]
workspace-members = [
# vm_monitor benefits from the same Cargo.lock as the rest of our artifacts, but
# it is built primarly in separate repo neondatabase/autoscaling and thus is excluded
# from depending on workspace-hack because most of the dependencies are not used.
"vm_monitor",
# subzero-core is a stub crate that should be excluded from workspace-hack
"subzero-core",
# All of these exist in libs and are not usually built independently.
# Putting workspace hack there adds a bottleneck for cargo builds.
"compute_api",

View File

@@ -0,0 +1,28 @@
name: 'Prepare current job for subzero'
description: >
Set git token to access `neondatabase/subzero` from cargo build,
and set `CARGO_NET_GIT_FETCH_WITH_CLI=true` env variable to use git CLI
inputs:
token:
description: 'GitHub token with access to neondatabase/subzero'
required: true
runs:
using: "composite"
steps:
- name: Set git token for neondatabase/subzero
uses: pyTooling/Actions/with-post-step@2307b526df64d55e95884e072e49aac2a00a9afa # v5.1.0
env:
SUBZERO_ACCESS_TOKEN: ${{ inputs.token }}
with:
main: |
git config --global url."https://x-access-token:${SUBZERO_ACCESS_TOKEN}@github.com/neondatabase/subzero".insteadOf "https://github.com/neondatabase/subzero"
cargo add -p proxy subzero-core --git https://github.com/neondatabase/subzero --rev 396264617e78e8be428682f87469bb25429af88a
post: |
git config --global --unset url."https://x-access-token:${SUBZERO_ACCESS_TOKEN}@github.com/neondatabase/subzero".insteadOf "https://github.com/neondatabase/subzero"
- name: Set `CARGO_NET_GIT_FETCH_WITH_CLI=true` env variable
shell: bash -euxo pipefail {0}
run: echo "CARGO_NET_GIT_FETCH_WITH_CLI=true" >> ${GITHUB_ENV}

View File

@@ -86,6 +86,10 @@ jobs:
with:
submodules: true
- uses: ./.github/actions/prepare-for-subzero
with:
token: ${{ secrets.CI_ACCESS_TOKEN }}
- name: Set pg 14 revision for caching
id: pg_v14_rev
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v14) >> $GITHUB_OUTPUT
@@ -116,7 +120,7 @@ jobs:
ARCH: ${{ inputs.arch }}
SANITIZERS: ${{ inputs.sanitizers }}
run: |
CARGO_FLAGS="--locked --features testing"
CARGO_FLAGS="--locked --features testing,rest_broker"
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
cov_prefix="scripts/coverage --profraw-prefix=$GITHUB_JOB --dir=/tmp/coverage run"
CARGO_PROFILE=""

View File

@@ -46,6 +46,10 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: true
- uses: ./.github/actions/prepare-for-subzero
with:
token: ${{ secrets.CI_ACCESS_TOKEN }}
- name: Cache cargo deps
uses: tespkg/actions-cache@b7bf5fcc2f98a52ac6080eb0fd282c2f752074b1 # v1.8.0

View File

@@ -54,6 +54,10 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: true
- uses: ./.github/actions/prepare-for-subzero
with:
token: ${{ secrets.CI_ACCESS_TOKEN }}
- name: Install build dependencies
run: |

View File

@@ -632,6 +632,8 @@ jobs:
BUILD_TAG=${{ needs.meta.outputs.release-tag || needs.meta.outputs.build-tag }}
TAG=${{ needs.build-build-tools-image.outputs.image-tag }}-bookworm
DEBIAN_VERSION=bookworm
secrets: |
SUBZERO_ACCESS_TOKEN=${{ secrets.CI_ACCESS_TOKEN }}
provenance: false
push: true
pull: true

View File

@@ -72,6 +72,7 @@ jobs:
check-macos-build:
needs: [ check-permissions, files-changed ]
uses: ./.github/workflows/build-macos.yml
secrets: inherit
with:
pg_versions: ${{ needs.files-changed.outputs.postgres_changes }}
rebuild_rust_code: ${{ fromJSON(needs.files-changed.outputs.rebuild_rust_code) }}

5
.gitignore vendored
View File

@@ -26,9 +26,14 @@ docker-compose/docker-compose-parallel.yml
*.o
*.so
*.Po
*.pid
# pgindent typedef lists
*.list
# Node
**/node_modules/
# various files for local testing
/proxy/.subzero
local_proxy.json

161
Cargo.lock generated
View File

@@ -52,6 +52,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "aliasable"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
[[package]]
name = "aligned-vec"
version = "0.6.1"
@@ -490,7 +496,7 @@ dependencies = [
"hex",
"hmac",
"http 0.2.9",
"http 1.1.0",
"http 1.3.1",
"once_cell",
"p256 0.11.1",
"percent-encoding",
@@ -631,7 +637,7 @@ dependencies = [
"aws-smithy-types",
"bytes",
"http 0.2.9",
"http 1.1.0",
"http 1.3.1",
"pin-project-lite",
"tokio",
"tracing",
@@ -649,7 +655,7 @@ dependencies = [
"bytes-utils",
"futures-core",
"http 0.2.9",
"http 1.1.0",
"http 1.3.1",
"http-body 0.4.5",
"http-body 1.0.0",
"http-body-util",
@@ -698,7 +704,7 @@ dependencies = [
"bytes",
"form_urlencoded",
"futures-util",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"http-body-util",
"hyper 1.4.1",
@@ -732,7 +738,7 @@ checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733"
dependencies = [
"bytes",
"futures-util",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"http-body-util",
"mime",
@@ -756,7 +762,7 @@ dependencies = [
"form_urlencoded",
"futures-util",
"headers",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"http-body-util",
"mime",
@@ -1090,7 +1096,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "975982cdb7ad6a142be15bdf84aea7ec6a9e5d4d797c004d43185b24cfe4e684"
dependencies = [
"clap",
"heck",
"heck 0.5.0",
"indexmap 2.9.0",
"log",
"proc-macro2",
@@ -1228,7 +1234,7 @@ version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
dependencies = [
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.100",
@@ -1334,7 +1340,7 @@ dependencies = [
"flate2",
"futures",
"hostname-validator",
"http 1.1.0",
"http 1.3.1",
"indexmap 2.9.0",
"itertools 0.10.5",
"jsonwebtoken",
@@ -1969,7 +1975,7 @@ checksum = "0892a17df262a24294c382f0d5997571006e7a4348b4327557c4ff1cd4a8bccc"
dependencies = [
"darling",
"either",
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.100",
@@ -2661,7 +2667,7 @@ dependencies = [
"futures-core",
"futures-sink",
"futures-util",
"http 1.1.0",
"http 1.3.1",
"indexmap 2.9.0",
"slab",
"tokio",
@@ -2743,7 +2749,7 @@ dependencies = [
"base64 0.21.7",
"bytes",
"headers-core",
"http 1.1.0",
"http 1.3.1",
"httpdate",
"mime",
"sha1",
@@ -2755,9 +2761,15 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
dependencies = [
"http 1.1.0",
"http 1.3.1",
]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heck"
version = "0.5.0"
@@ -2833,9 +2845,9 @@ dependencies = [
[[package]]
name = "http"
version = "1.1.0"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
dependencies = [
"bytes",
"fnv",
@@ -2860,7 +2872,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643"
dependencies = [
"bytes",
"http 1.1.0",
"http 1.3.1",
]
[[package]]
@@ -2871,7 +2883,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f"
dependencies = [
"bytes",
"futures-util",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"pin-project-lite",
]
@@ -2995,7 +3007,7 @@ dependencies = [
"futures-channel",
"futures-util",
"h2 0.4.4",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"httparse",
"httpdate",
@@ -3028,7 +3040,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c"
dependencies = [
"futures-util",
"http 1.1.0",
"http 1.3.1",
"hyper 1.4.1",
"hyper-util",
"rustls 0.22.4",
@@ -3060,7 +3072,7 @@ dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"hyper 1.4.1",
"pin-project-lite",
@@ -3709,7 +3721,7 @@ version = "0.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e6777fc80a575f9503d908c8b498782a6c3ee88a06cb416dc3941401e43b94"
dependencies = [
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.100",
@@ -4160,7 +4172,7 @@ checksum = "10a8a7f5f6ba7c1b286c2fbca0454eaba116f63bbe69ed250b642d36fbb04d80"
dependencies = [
"async-trait",
"bytes",
"http 1.1.0",
"http 1.3.1",
"opentelemetry",
"reqwest",
]
@@ -4173,7 +4185,7 @@ checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76"
dependencies = [
"async-trait",
"futures-core",
"http 1.1.0",
"http 1.3.1",
"opentelemetry",
"opentelemetry-http",
"opentelemetry-proto",
@@ -4252,6 +4264,30 @@ dependencies = [
"winapi",
]
[[package]]
name = "ouroboros"
version = "0.18.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0f050db9c44b97a94723127e6be766ac5c340c48f2c4bb3ffa11713744be59"
dependencies = [
"aliasable",
"ouroboros_macro",
"static_assertions",
]
[[package]]
name = "ouroboros_macro"
version = "0.18.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c7028bdd3d43083f6d8d4d5187680d0d3560d54df4cc9d752005268b41e64d0"
dependencies = [
"heck 0.4.1",
"proc-macro2",
"proc-macro2-diagnostics",
"quote",
"syn 2.0.100",
]
[[package]]
name = "outref"
version = "0.5.1"
@@ -4381,7 +4417,7 @@ dependencies = [
"hashlink",
"hex",
"hex-literal",
"http 1.1.0",
"http 1.3.1",
"http-utils",
"humantime",
"humantime-serde",
@@ -5148,6 +5184,19 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "proc-macro2-diagnostics"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
"version_check",
"yansi",
]
[[package]]
name = "procfs"
version = "0.16.0"
@@ -5217,7 +5266,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4"
dependencies = [
"bytes",
"heck",
"heck 0.5.0",
"itertools 0.12.1",
"log",
"multimap",
@@ -5238,7 +5287,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15"
dependencies = [
"bytes",
"heck",
"heck 0.5.0",
"itertools 0.12.1",
"log",
"multimap",
@@ -5334,7 +5383,7 @@ dependencies = [
"hex",
"hmac",
"hostname",
"http 1.1.0",
"http 1.3.1",
"http-body-util",
"http-utils",
"humantime",
@@ -5354,6 +5403,7 @@ dependencies = [
"metrics",
"once_cell",
"opentelemetry",
"ouroboros",
"p256 0.13.2",
"papaya",
"parking_lot 0.12.1",
@@ -5390,6 +5440,7 @@ dependencies = [
"socket2",
"strum_macros",
"subtle",
"subzero-core",
"thiserror 1.0.69",
"tikv-jemalloc-ctl",
"tikv-jemallocator",
@@ -5705,14 +5756,14 @@ dependencies = [
[[package]]
name = "regex"
version = "1.10.2"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata 0.4.3",
"regex-syntax 0.8.2",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
]
[[package]]
@@ -5726,13 +5777,13 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.4.3"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.8.2",
"regex-syntax 0.8.5",
]
[[package]]
@@ -5749,9 +5800,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.8.2"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "relative-path"
@@ -5821,7 +5872,7 @@ dependencies = [
"futures-channel",
"futures-core",
"futures-util",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"http-body-util",
"hyper 1.4.1",
@@ -5863,7 +5914,7 @@ checksum = "d1ccd3b55e711f91a9885a2fa6fbbb2e39db1776420b062efc058c6410f7e5e3"
dependencies = [
"anyhow",
"async-trait",
"http 1.1.0",
"http 1.3.1",
"reqwest",
"serde",
"thiserror 1.0.69",
@@ -5880,7 +5931,7 @@ dependencies = [
"async-trait",
"futures",
"getrandom 0.2.11",
"http 1.1.0",
"http 1.3.1",
"hyper 1.4.1",
"parking_lot 0.11.2",
"reqwest",
@@ -5901,7 +5952,7 @@ dependencies = [
"anyhow",
"async-trait",
"getrandom 0.2.11",
"http 1.1.0",
"http 1.3.1",
"matchit",
"opentelemetry",
"reqwest",
@@ -6260,7 +6311,7 @@ dependencies = [
"fail",
"futures",
"hex",
"http 1.1.0",
"http 1.3.1",
"http-utils",
"humantime",
"hyper 0.14.30",
@@ -7109,7 +7160,7 @@ version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
dependencies = [
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"rustversion",
@@ -7122,6 +7173,10 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "subzero-core"
version = "3.0.1"
[[package]]
name = "svg_fmt"
version = "0.4.3"
@@ -7732,7 +7787,7 @@ dependencies = [
"async-trait",
"base64 0.22.1",
"bytes",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"http-body-util",
"percent-encoding",
@@ -7756,7 +7811,7 @@ dependencies = [
"bytes",
"flate2",
"h2 0.4.4",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"http-body-util",
"hyper 1.4.1",
@@ -7847,7 +7902,7 @@ dependencies = [
"base64 0.22.1",
"bitflags 2.8.0",
"bytes",
"http 1.1.0",
"http 1.3.1",
"http-body 1.0.0",
"mime",
"pin-project-lite",
@@ -7868,7 +7923,7 @@ name = "tower-otel"
version = "0.2.0"
source = "git+https://github.com/mattiapenati/tower-otel?rev=56a7321053bcb72443888257b622ba0d43a11fcd#56a7321053bcb72443888257b622ba0d43a11fcd"
dependencies = [
"http 1.1.0",
"http 1.3.1",
"opentelemetry",
"pin-project",
"tower-layer",
@@ -8049,7 +8104,7 @@ dependencies = [
"byteorder",
"bytes",
"data-encoding",
"http 1.1.0",
"http 1.3.1",
"httparse",
"log",
"rand 0.8.5",
@@ -8068,7 +8123,7 @@ dependencies = [
"byteorder",
"bytes",
"data-encoding",
"http 1.1.0",
"http 1.3.1",
"httparse",
"log",
"rand 0.8.5",
@@ -8857,8 +8912,8 @@ dependencies = [
"quote",
"rand 0.8.5",
"regex",
"regex-automata 0.4.3",
"regex-syntax 0.8.2",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
"reqwest",
"rustls 0.23.27",
"rustls-pki-types",
@@ -8954,6 +9009,12 @@ version = "0.13.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d25c75bf9ea12c4040a97f829154768bbbce366287e2dc044af160cd79a13fd"
[[package]]
name = "yansi"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "yasna"
version = "0.5.2"

View File

@@ -49,6 +49,7 @@ members = [
"libs/proxy/tokio-postgres2",
"endpoint_storage",
"pgxn/neon/communicator",
"proxy/subzero_core",
]
[workspace.package]

View File

@@ -63,7 +63,14 @@ WORKDIR /home/nonroot
COPY --chown=nonroot . .
RUN cargo chef prepare --recipe-path recipe.json
RUN --mount=type=secret,uid=1000,id=SUBZERO_ACCESS_TOKEN \
set -e \
&& if [ -s /run/secrets/SUBZERO_ACCESS_TOKEN ]; then \
export CARGO_NET_GIT_FETCH_WITH_CLI=true && \
git config --global url."https://$(cat /run/secrets/SUBZERO_ACCESS_TOKEN)@github.com/neondatabase/subzero".insteadOf "https://github.com/neondatabase/subzero" && \
cargo add -p proxy subzero-core --git https://github.com/neondatabase/subzero --rev 396264617e78e8be428682f87469bb25429af88a; \
fi \
&& cargo chef prepare --recipe-path recipe.json
# Main build image
FROM $REPOSITORY/$IMAGE:$TAG AS build
@@ -71,20 +78,33 @@ WORKDIR /home/nonroot
ARG GIT_VERSION=local
ARG BUILD_TAG
ARG ADDITIONAL_RUSTFLAGS=""
ENV CARGO_FEATURES="default"
# 3. Build cargo dependencies. Note that this step doesn't depend on anything else than
# `recipe.json`, so the layer can be reused as long as none of the dependencies change.
COPY --from=plan /home/nonroot/recipe.json recipe.json
RUN set -e \
RUN --mount=type=secret,uid=1000,id=SUBZERO_ACCESS_TOKEN \
set -e \
&& if [ -s /run/secrets/SUBZERO_ACCESS_TOKEN ]; then \
export CARGO_NET_GIT_FETCH_WITH_CLI=true && \
git config --global url."https://$(cat /run/secrets/SUBZERO_ACCESS_TOKEN)@github.com/neondatabase/subzero".insteadOf "https://github.com/neondatabase/subzero"; \
fi \
&& RUSTFLAGS="-Clinker=clang -Clink-arg=-fuse-ld=mold -Clink-arg=-Wl,--no-rosegment -Cforce-frame-pointers=yes ${ADDITIONAL_RUSTFLAGS}" cargo chef cook --locked --release --recipe-path recipe.json
# Perform the main build. We reuse the Postgres build artifacts from the intermediate 'pg-build'
# layer, and the cargo dependencies built in the previous step.
COPY --chown=nonroot --from=pg-build /home/nonroot/pg_install/ pg_install
COPY --chown=nonroot . .
COPY --chown=nonroot --from=plan /home/nonroot/proxy/Cargo.toml proxy/Cargo.toml
COPY --chown=nonroot --from=plan /home/nonroot/Cargo.lock Cargo.lock
RUN set -e \
RUN --mount=type=secret,uid=1000,id=SUBZERO_ACCESS_TOKEN \
set -e \
&& if [ -s /run/secrets/SUBZERO_ACCESS_TOKEN ]; then \
export CARGO_FEATURES="rest_broker"; \
fi \
&& RUSTFLAGS="-Clinker=clang -Clink-arg=-fuse-ld=mold -Clink-arg=-Wl,--no-rosegment -Cforce-frame-pointers=yes ${ADDITIONAL_RUSTFLAGS}" cargo build \
--features $CARGO_FEATURES \
--bin pg_sni_router \
--bin pageserver \
--bin pagectl \

View File

@@ -35,6 +35,7 @@ reason = "The paste crate is a build-only dependency with no runtime components.
# More documentation for the licenses section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
[licenses]
version = 2
allow = [
"0BSD",
"Apache-2.0",

View File

@@ -11,6 +11,9 @@ crate-type = ["staticlib"]
# 'testing' feature is currently unused in the communicator, but we accept it for convenience of
# calling build scripts, so that you can pass the same feature to all packages.
testing = []
# 'rest_broker' feature is currently unused in the communicator, but we accept it for convenience of
# calling build scripts, so that you can pass the same feature to all packages.
rest_broker = []
[dependencies]
neon-shmem.workspace = true

View File

@@ -7,6 +7,7 @@ license.workspace = true
[features]
default = []
testing = ["dep:tokio-postgres"]
rest_broker = ["dep:subzero-core", "dep:ouroboros"]
[dependencies]
ahash.workspace = true
@@ -105,6 +106,11 @@ uuid.workspace = true
x509-cert.workspace = true
redis.workspace = true
zerocopy.workspace = true
# uncomment this to use the real subzero-core crate
# subzero-core = { git = "https://github.com/neondatabase/subzero", rev = "396264617e78e8be428682f87469bb25429af88a", features = ["postgresql"], optional = true }
# this is a stub for the subzero-core crate
subzero-core = { path = "./subzero_core", features = ["postgresql"], optional = true}
ouroboros = { version = "0.18", optional = true }
# jwt stuff
jose-jwa = "0.1.2"

View File

@@ -178,16 +178,24 @@ Create a configuration file called `local_proxy.json` in the root of the repo (u
Start the local proxy:
```sh
cargo run --bin local_proxy -- \
--disable_pg_session_jwt true \
cargo run --bin local_proxy --features testing -- \
--disable-pg-session-jwt \
--http 0.0.0.0:7432
```
Start the auth broker:
Start the auth/rest broker:
Note: to enable the rest broker you need to replace the stub subzero-core crate with the real one.
```sh
LOGFMT=text OTEL_SDK_DISABLED=true cargo run --bin proxy --features testing -- \
cargo add -p proxy subzero-core --git https://github.com/neondatabase/subzero --rev 396264617e78e8be428682f87469bb25429af88a
```
```sh
LOGFMT=text OTEL_SDK_DISABLED=true cargo run --bin proxy --features testing,rest_broker -- \
-c server.crt -k server.key \
--is-auth-broker true \
--is-rest-broker true \
--wss 0.0.0.0:8080 \
--http 0.0.0.0:7002 \
--auth-backend local
@@ -205,3 +213,9 @@ curl -k "https://foo.local.neon.build:8080/sql" \
-H "neon-connection-string: postgresql://authenticator@foo.local.neon.build/database" \
-d '{"query":"select 1","params":[]}'
```
Make a rest request against the auth broker (rest broker):
```sh
curl -k "https://foo.local.neon.build:8080/database/rest/v1/items?select=id,name&id=eq.1" \
-H "Authorization: Bearer $NEON_JWT"
```

View File

@@ -20,6 +20,8 @@ use crate::auth::backend::jwt::JwkCache;
use crate::auth::backend::local::LocalBackend;
use crate::auth::{self};
use crate::cancellation::CancellationHandler;
#[cfg(feature = "rest_broker")]
use crate::config::RestConfig;
use crate::config::{
self, AuthenticationConfig, ComputeConfig, HttpConfig, ProxyConfig, RetryConfig,
refresh_config_loop,
@@ -276,6 +278,13 @@ fn build_config(args: &LocalProxyCliArgs) -> anyhow::Result<&'static ProxyConfig
accept_jwts: true,
console_redirect_confirmation_timeout: Duration::ZERO,
},
#[cfg(feature = "rest_broker")]
rest_config: RestConfig {
is_rest_broker: false,
db_schema_cache: None,
max_schema_size: 0,
hostname_prefix: String::new(),
},
proxy_protocol_v2: config::ProxyProtocolV2::Rejected,
handshake_timeout: Duration::from_secs(10),
wake_compute_retry_config: RetryConfig::parse(RetryConfig::WAKE_COMPUTE_DEFAULT_VALUES)?,

View File

@@ -31,6 +31,8 @@ use crate::auth::backend::local::LocalBackend;
use crate::auth::backend::{ConsoleRedirectBackend, MaybeOwned};
use crate::batch::BatchQueue;
use crate::cancellation::{CancellationHandler, CancellationProcessor};
#[cfg(feature = "rest_broker")]
use crate::config::RestConfig;
#[cfg(any(test, feature = "testing"))]
use crate::config::refresh_config_loop;
use crate::config::{
@@ -47,6 +49,8 @@ use crate::redis::{elasticache, notifications};
use crate::scram::threadpool::ThreadPool;
use crate::serverless::GlobalConnPoolOptions;
use crate::serverless::cancel_set::CancelSet;
#[cfg(feature = "rest_broker")]
use crate::serverless::rest::DbSchemaCache;
use crate::tls::client_config::compute_client_config_with_root_certs;
#[cfg(any(test, feature = "testing"))]
use crate::url::ApiUrl;
@@ -246,11 +250,23 @@ struct ProxyCliArgs {
/// if this is not local proxy, this toggles whether we accept Postgres REST requests
#[clap(long, default_value_t = false, value_parser = clap::builder::BoolishValueParser::new(), action = clap::ArgAction::Set)]
#[cfg(feature = "rest_broker")]
is_rest_broker: bool,
/// cache for `db_schema_cache` introspection (use `size=0` to disable)
#[clap(long, default_value = "size=1000,ttl=1h")]
#[cfg(feature = "rest_broker")]
db_schema_cache: String,
/// Maximum size allowed for schema in bytes
#[clap(long, default_value_t = 5 * 1024 * 1024)] // 5MB
#[cfg(feature = "rest_broker")]
max_schema_size: usize,
/// Hostname prefix to strip from request hostname to get database hostname
#[clap(long, default_value = "apirest.")]
#[cfg(feature = "rest_broker")]
hostname_prefix: String,
}
#[derive(clap::Args, Clone, Copy, Debug)]
@@ -517,6 +533,17 @@ pub async fn run() -> anyhow::Result<()> {
));
maintenance_tasks.spawn(control_plane::mgmt::task_main(mgmt_listener));
// add a task to flush the db_schema cache every 10 minutes
#[cfg(feature = "rest_broker")]
if let Some(db_schema_cache) = &config.rest_config.db_schema_cache {
maintenance_tasks.spawn(async move {
loop {
tokio::time::sleep(Duration::from_secs(600)).await;
db_schema_cache.flush();
}
});
}
if let Some(metrics_config) = &config.metric_collection {
// TODO: Add gc regardles of the metric collection being enabled.
maintenance_tasks.spawn(usage_metrics::task_main(metrics_config));
@@ -679,6 +706,30 @@ fn build_config(args: &ProxyCliArgs) -> anyhow::Result<&'static ProxyConfig> {
timeout: Duration::from_secs(2),
};
#[cfg(feature = "rest_broker")]
let rest_config = {
let db_schema_cache_config: CacheOptions = args.db_schema_cache.parse()?;
info!("Using DbSchemaCache with options={db_schema_cache_config:?}");
let db_schema_cache = if args.is_rest_broker {
Some(DbSchemaCache::new(
"db_schema_cache",
db_schema_cache_config.size,
db_schema_cache_config.ttl,
true,
))
} else {
None
};
RestConfig {
is_rest_broker: args.is_rest_broker,
db_schema_cache,
max_schema_size: args.max_schema_size,
hostname_prefix: args.hostname_prefix.clone(),
}
};
let config = ProxyConfig {
tls_config,
metric_collection,
@@ -691,6 +742,8 @@ fn build_config(args: &ProxyCliArgs) -> anyhow::Result<&'static ProxyConfig> {
connect_to_compute: compute_config,
#[cfg(feature = "testing")]
disable_pg_session_jwt: false,
#[cfg(feature = "rest_broker")]
rest_config,
};
let config = Box::leak(Box::new(config));

View File

@@ -204,6 +204,11 @@ impl<K: Hash + Eq + Clone, V: Clone> TimedLru<K, V> {
self.insert_raw_ttl(key, value, ttl, false);
}
#[cfg(feature = "rest_broker")]
pub(crate) fn insert(&self, key: K, value: V) {
self.insert_raw_ttl(key, value, self.ttl, self.update_ttl_on_retrieval);
}
pub(crate) fn insert_unit(&self, key: K, value: V) -> (Option<V>, Cached<&Self, ()>) {
let (_, old) = self.insert_raw(key.clone(), value);
@@ -214,6 +219,29 @@ impl<K: Hash + Eq + Clone, V: Clone> TimedLru<K, V> {
(old, cached)
}
#[cfg(feature = "rest_broker")]
pub(crate) fn flush(&self) {
let now = Instant::now();
let mut cache = self.cache.lock();
// Collect keys of expired entries first
let expired_keys: Vec<_> = cache
.iter()
.filter_map(|(key, entry)| {
if entry.expires_at <= now {
Some(key.clone())
} else {
None
}
})
.collect();
// Remove expired entries
for key in expired_keys {
cache.remove(&key);
}
}
}
impl<K: Hash + Eq, V: Clone> TimedLru<K, V> {

View File

@@ -22,6 +22,8 @@ use crate::rate_limiter::{RateLimitAlgorithm, RateLimiterConfig};
use crate::scram::threadpool::ThreadPool;
use crate::serverless::GlobalConnPoolOptions;
use crate::serverless::cancel_set::CancelSet;
#[cfg(feature = "rest_broker")]
use crate::serverless::rest::DbSchemaCache;
pub use crate::tls::server_config::{TlsConfig, configure_tls};
use crate::types::{Host, RoleName};
@@ -30,6 +32,8 @@ pub struct ProxyConfig {
pub metric_collection: Option<MetricCollectionConfig>,
pub http_config: HttpConfig,
pub authentication_config: AuthenticationConfig,
#[cfg(feature = "rest_broker")]
pub rest_config: RestConfig,
pub proxy_protocol_v2: ProxyProtocolV2,
pub handshake_timeout: Duration,
pub wake_compute_retry_config: RetryConfig,
@@ -80,6 +84,14 @@ pub struct AuthenticationConfig {
pub console_redirect_confirmation_timeout: tokio::time::Duration,
}
#[cfg(feature = "rest_broker")]
pub struct RestConfig {
pub is_rest_broker: bool,
pub db_schema_cache: Option<DbSchemaCache>,
pub max_schema_size: usize,
pub hostname_prefix: String,
}
#[derive(Debug)]
pub struct MetricBackupCollectionConfig {
pub remote_storage_config: Option<RemoteStorageConfig>,

View File

@@ -10,6 +10,7 @@ use super::connection_with_credentials_provider::ConnectionWithCredentialsProvid
use crate::cache::project_info::ProjectInfoCache;
use crate::intern::{AccountIdInt, EndpointIdInt, ProjectIdInt, RoleNameInt};
use crate::metrics::{Metrics, RedisErrors, RedisEventsCount};
use crate::util::deserialize_json_string;
const CPLANE_CHANNEL_NAME: &str = "neondb-proxy-ws-updates";
const RECONNECT_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(20);
@@ -121,15 +122,6 @@ struct InvalidateRole {
role_name: RoleNameInt,
}
fn deserialize_json_string<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
T: for<'de2> serde::Deserialize<'de2>,
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
serde_json::from_str(&s).map_err(<D::Error as serde::de::Error>::custom)
}
// https://github.com/serde-rs/serde/issues/1714
fn deserialize_unknown_topic<'de, D>(deserializer: D) -> Result<(), D::Error>
where

View File

@@ -11,6 +11,8 @@ mod http_conn_pool;
mod http_util;
mod json;
mod local_conn_pool;
#[cfg(feature = "rest_broker")]
pub mod rest;
mod sql_over_http;
mod websocket;
@@ -487,6 +489,42 @@ async fn request_handler(
.body(Empty::new().map_err(|x| match x {}).boxed())
.map_err(|e| ApiError::InternalServerError(e.into()))
} else {
json_response(StatusCode::BAD_REQUEST, "query is not supported")
#[cfg(feature = "rest_broker")]
{
if config.rest_config.is_rest_broker
// we are testing for the path to be /database_name/rest/...
&& request
.uri()
.path()
.split('/')
.nth(2)
.is_some_and(|part| part.starts_with("rest"))
{
let ctx =
RequestContext::new(session_id, conn_info, crate::metrics::Protocol::Http);
let span = ctx.span();
let testodrome_id = request
.headers()
.get("X-Neon-Query-ID")
.and_then(|value| value.to_str().ok())
.map(|s| s.to_string());
if let Some(query_id) = testodrome_id {
info!(parent: &span, "testodrome query ID: {query_id}");
ctx.set_testodrome_id(query_id.into());
}
rest::handle(config, ctx, request, backend, http_cancellation_token)
.instrument(span)
.await
} else {
json_response(StatusCode::BAD_REQUEST, "query is not supported")
}
}
#[cfg(not(feature = "rest_broker"))]
{
json_response(StatusCode::BAD_REQUEST, "query is not supported")
}
}
}

1165
proxy/src/serverless/rest.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -64,7 +64,7 @@ enum Payload {
Batch(BatchQueryData),
}
static HEADER_VALUE_TRUE: HeaderValue = HeaderValue::from_static("true");
pub(super) const HEADER_VALUE_TRUE: HeaderValue = HeaderValue::from_static("true");
fn bytes_to_pg_text<'de, D>(deserializer: D) -> Result<Vec<Option<String>>, D::Error>
where

View File

@@ -20,3 +20,13 @@ pub async fn run_until<F1: Future, F2: Future>(
Either::Right((f2, _)) => Err(f2),
}
}
pub fn deserialize_json_string<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
T: for<'de2> serde::Deserialize<'de2>,
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let s = String::deserialize(deserializer)?;
serde_json::from_str(&s).map_err(<D::Error as serde::de::Error>::custom)
}

2
proxy/subzero_core/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
target
Cargo.lock

View File

@@ -0,0 +1,12 @@
# This is a stub for the subzero-core crate.
[package]
name = "subzero-core"
version = "3.0.1"
edition = "2024"
publish = false # "private"!
[features]
default = []
postgresql = []
[dependencies]

View File

@@ -0,0 +1 @@
// This is a stub for the subzero-core crate.

View File

@@ -4121,6 +4121,294 @@ class NeonAuthBroker:
self._popen.kill()
class NeonLocalProxy(LogUtils):
"""
An object managing a local_proxy instance for rest broker testing.
The local_proxy serves as a direct connection to VanillaPostgres.
"""
def __init__(
self,
neon_binpath: Path,
test_output_dir: Path,
http_port: int,
metrics_port: int,
vanilla_pg: VanillaPostgres,
config_path: Path | None = None,
):
self.neon_binpath = neon_binpath
self.test_output_dir = test_output_dir
self.http_port = http_port
self.metrics_port = metrics_port
self.vanilla_pg = vanilla_pg
self.config_path = config_path or (test_output_dir / "local_proxy.json")
self.host = "127.0.0.1"
self.running = False
self.logfile = test_output_dir / "local_proxy.log"
self._popen: subprocess.Popen[bytes] | None = None
super().__init__(logfile=self.logfile)
def start(self) -> Self:
assert self._popen is None
assert not self.running
# Ensure vanilla_pg is running
if not self.vanilla_pg.is_running():
self.vanilla_pg.start()
args = [
str(self.neon_binpath / "local_proxy"),
"--http",
f"{self.host}:{self.http_port}",
"--metrics",
f"{self.host}:{self.metrics_port}",
"--postgres",
f"127.0.0.1:{self.vanilla_pg.default_options['port']}",
"--config-path",
str(self.config_path),
"--disable-pg-session-jwt",
]
logfile = open(self.logfile, "w")
self._popen = subprocess.Popen(args, stdout=logfile, stderr=logfile)
self.running = True
self._wait_until_ready()
return self
def stop(self) -> Self:
if self._popen is not None and self.running:
self._popen.terminate()
try:
self._popen.wait(timeout=5)
except subprocess.TimeoutExpired:
log.warning("failed to gracefully terminate local_proxy; killing")
self._popen.kill()
self.running = False
return self
def get_binary_version(self) -> str:
"""Get the version string of the local_proxy binary"""
try:
result = subprocess.run(
[str(self.neon_binpath / "local_proxy"), "--version"],
capture_output=True,
text=True,
timeout=10,
)
return result.stdout.strip()
except (subprocess.TimeoutExpired, subprocess.CalledProcessError):
return ""
@backoff.on_exception(backoff.expo, requests.exceptions.RequestException, max_time=10)
def _wait_until_ready(self):
assert self._popen and self._popen.poll() is None, (
"Local proxy exited unexpectedly. Check test log."
)
requests.get(f"http://{self.host}:{self.http_port}/metrics")
def get_metrics(self) -> str:
response = requests.get(f"http://{self.host}:{self.metrics_port}/metrics")
return response.text
def assert_no_errors(self):
# Define allowed error patterns for local_proxy
allowed_errors = [
# Add patterns as needed
]
not_allowed = [
"error",
"panic",
"failed",
]
for na in not_allowed:
if na not in allowed_errors:
assert not self.log_contains(na), f"Found disallowed error pattern: {na}"
def __enter__(self) -> Self:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
):
self.stop()
class NeonRestBrokerProxy(LogUtils):
"""
An object managing a proxy instance configured as both auth broker and rest broker.
This is the main proxy binary with --is-auth-broker and --is-rest-broker flags.
"""
def __init__(
self,
neon_binpath: Path,
test_output_dir: Path,
wss_port: int,
http_port: int,
mgmt_port: int,
config_path: Path | None = None,
):
self.neon_binpath = neon_binpath
self.test_output_dir = test_output_dir
self.wss_port = wss_port
self.http_port = http_port
self.mgmt_port = mgmt_port
self.config_path = config_path or (test_output_dir / "rest_broker_proxy.json")
self.host = "127.0.0.1"
self.running = False
self.logfile = test_output_dir / "rest_broker_proxy.log"
self._popen: subprocess.Popen[Any] | None = None
def start(self) -> Self:
if self.running:
return self
# Generate self-signed TLS certificates
cert_path = self.test_output_dir / "server.crt"
key_path = self.test_output_dir / "server.key"
if not cert_path.exists() or not key_path.exists():
import subprocess
log.info("Generating self-signed TLS certificate for rest broker")
subprocess.run(
[
"openssl",
"req",
"-new",
"-x509",
"-days",
"365",
"-nodes",
"-text",
"-out",
str(cert_path),
"-keyout",
str(key_path),
"-subj",
"/CN=*.local.neon.build",
],
check=True,
)
log.info(
f"Starting rest broker proxy on WSS port {self.wss_port}, HTTP port {self.http_port}"
)
cmd = [
str(self.neon_binpath / "proxy"),
"-c",
str(cert_path),
"-k",
str(key_path),
"--is-auth-broker",
"true",
"--is-rest-broker",
"true",
"--wss",
f"{self.host}:{self.wss_port}",
"--http",
f"{self.host}:{self.http_port}",
"--mgmt",
f"{self.host}:{self.mgmt_port}",
"--auth-backend",
"local",
"--config-path",
str(self.config_path),
]
log.info(f"Starting rest broker proxy with command: {' '.join(cmd)}")
with open(self.logfile, "w") as logfile:
self._popen = subprocess.Popen(
cmd,
stdout=logfile,
stderr=subprocess.STDOUT,
cwd=self.test_output_dir,
env={
**os.environ,
"RUST_LOG": "info",
"LOGFMT": "text",
"OTEL_SDK_DISABLED": "true",
},
)
self.running = True
self._wait_until_ready()
return self
def stop(self) -> Self:
if not self.running:
return self
log.info("Stopping rest broker proxy")
if self._popen is not None:
self._popen.terminate()
try:
self._popen.wait(timeout=10)
except subprocess.TimeoutExpired:
log.warning("failed to gracefully terminate rest broker proxy; killing")
self._popen.kill()
self.running = False
return self
def get_binary_version(self) -> str:
cmd = [str(self.neon_binpath / "proxy"), "--version"]
res = subprocess.run(cmd, capture_output=True, text=True, check=True)
return res.stdout.strip()
@backoff.on_exception(backoff.expo, requests.exceptions.RequestException, max_time=10)
def _wait_until_ready(self):
# Check if the WSS port is ready using a simple HTTPS request
# REST API is served on the WSS port with HTTPS
requests.get(f"https://{self.host}:{self.wss_port}/", timeout=1, verify=False)
# Any response (even error) means the server is up - we just need to connect
def get_metrics(self) -> str:
# Metrics are still on the HTTP port
response = requests.get(f"http://{self.host}:{self.http_port}/metrics", timeout=5)
response.raise_for_status()
return response.text
def assert_no_errors(self):
# Define allowed error patterns for rest broker proxy
allowed_errors = [
"connection closed before message completed",
"connection reset by peer",
"broken pipe",
"client disconnected",
"Authentication failed",
"connection timed out",
"no connection available",
"Pool dropped",
]
with open(self.logfile) as f:
for line in f:
if "ERROR" in line or "FATAL" in line:
if not any(allowed in line for allowed in allowed_errors):
raise AssertionError(
f"Found error in rest broker proxy log: {line.strip()}"
)
def __enter__(self) -> Self:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
):
self.stop()
@pytest.fixture(scope="function")
def link_proxy(
port_distributor: PortDistributor, neon_binpath: Path, test_output_dir: Path
@@ -4203,6 +4491,81 @@ def static_proxy(
yield proxy
@pytest.fixture(scope="function")
def local_proxy(
vanilla_pg: VanillaPostgres,
port_distributor: PortDistributor,
neon_binpath: Path,
test_output_dir: Path,
) -> Iterator[NeonLocalProxy]:
"""Local proxy that connects directly to vanilla postgres for rest broker testing."""
# Start vanilla_pg without database bootstrapping
vanilla_pg.start()
http_port = port_distributor.get_port()
metrics_port = port_distributor.get_port()
with NeonLocalProxy(
neon_binpath=neon_binpath,
test_output_dir=test_output_dir,
http_port=http_port,
metrics_port=metrics_port,
vanilla_pg=vanilla_pg,
) as proxy:
proxy.start()
yield proxy
@pytest.fixture(scope="function")
def local_proxy_fixed_port(
vanilla_pg: VanillaPostgres,
neon_binpath: Path,
test_output_dir: Path,
) -> Iterator[NeonLocalProxy]:
"""Local proxy that connects directly to vanilla postgres on the hardcoded port 7432."""
# Start vanilla_pg without database bootstrapping
vanilla_pg.start()
# Use the hardcoded port that the rest broker proxy expects
http_port = 7432
metrics_port = 7433 # Use a different port for metrics
with NeonLocalProxy(
neon_binpath=neon_binpath,
test_output_dir=test_output_dir,
http_port=http_port,
metrics_port=metrics_port,
vanilla_pg=vanilla_pg,
) as proxy:
proxy.start()
yield proxy
@pytest.fixture(scope="function")
def rest_broker_proxy(
port_distributor: PortDistributor,
neon_binpath: Path,
test_output_dir: Path,
) -> Iterator[NeonRestBrokerProxy]:
"""Rest broker proxy that handles both auth broker and rest broker functionality."""
wss_port = port_distributor.get_port()
http_port = port_distributor.get_port()
mgmt_port = port_distributor.get_port()
with NeonRestBrokerProxy(
neon_binpath=neon_binpath,
test_output_dir=test_output_dir,
wss_port=wss_port,
http_port=http_port,
mgmt_port=mgmt_port,
) as proxy:
proxy.start()
yield proxy
@pytest.fixture(scope="function")
def neon_authorize_jwk() -> jwk.JWK:
kid = str(uuid.uuid4())

View File

@@ -741,3 +741,29 @@ def shared_buffers_for_max_cu(max_cu: float) -> str:
sharedBuffersMb = int(max(128, (1023 + maxBackends * 256) / 1024))
sharedBuffers = int(sharedBuffersMb * 1024 / 8)
return str(sharedBuffers)
def skip_if_proxy_lacks_rest_broker(reason: str = "proxy was built without 'rest_broker' feature"):
# Determine the binary path using the same logic as neon_binpath fixture
def has_rest_broker_feature():
# Find the neon binaries
if env_neon_bin := os.environ.get("NEON_BIN"):
binpath = Path(env_neon_bin)
else:
base_dir = Path(__file__).parents[2] # Same as BASE_DIR in paths.py
build_type = os.environ.get("BUILD_TYPE", "debug")
binpath = base_dir / "target" / build_type
proxy_bin = binpath / "proxy"
if not proxy_bin.exists():
return False
try:
cmd = [str(proxy_bin), "--help"]
result = subprocess.run(cmd, capture_output=True, text=True, check=True, timeout=10)
help_output = result.stdout
return "--is-rest-broker" in help_output
except (subprocess.CalledProcessError, subprocess.TimeoutExpired, FileNotFoundError):
return False
return pytest.mark.skipif(not has_rest_broker_feature(), reason=reason)

View File

@@ -0,0 +1,137 @@
import json
import signal
import time
import requests
from fixtures.utils import skip_if_proxy_lacks_rest_broker
from jwcrypto import jwt
@skip_if_proxy_lacks_rest_broker()
def test_rest_broker_happy(
local_proxy_fixed_port, rest_broker_proxy, vanilla_pg, neon_authorize_jwk, httpserver
):
"""Test REST API endpoint using local_proxy and rest_broker_proxy."""
# Use the fixed port local proxy
local_proxy = local_proxy_fixed_port
# Create the required roles for PostgREST authentication
vanilla_pg.safe_psql("CREATE ROLE authenticator LOGIN")
vanilla_pg.safe_psql("CREATE ROLE authenticated")
vanilla_pg.safe_psql("CREATE ROLE anon")
vanilla_pg.safe_psql("GRANT authenticated TO authenticator")
vanilla_pg.safe_psql("GRANT anon TO authenticator")
# Create the pgrst schema and configuration function required by the rest broker
vanilla_pg.safe_psql("CREATE SCHEMA IF NOT EXISTS pgrst")
vanilla_pg.safe_psql("""
CREATE OR REPLACE FUNCTION pgrst.pre_config()
RETURNS VOID AS $$
SELECT
set_config('pgrst.db_schemas', 'test', true)
, set_config('pgrst.db_aggregates_enabled', 'true', true)
, set_config('pgrst.db_anon_role', 'anon', true)
, set_config('pgrst.jwt_aud', '', true)
, set_config('pgrst.jwt_secret', '', true)
, set_config('pgrst.jwt_role_claim_key', '."role"', true)
$$ LANGUAGE SQL;
""")
vanilla_pg.safe_psql("GRANT USAGE ON SCHEMA pgrst TO authenticator")
vanilla_pg.safe_psql("GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA pgrst TO authenticator")
# Bootstrap the database with test data
vanilla_pg.safe_psql("CREATE SCHEMA IF NOT EXISTS test")
vanilla_pg.safe_psql("""
CREATE TABLE IF NOT EXISTS test.items (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL
)
""")
vanilla_pg.safe_psql("INSERT INTO test.items (name) VALUES ('test_item')")
# Grant access to the test schema for the authenticated role
vanilla_pg.safe_psql("GRANT USAGE ON SCHEMA test TO authenticated")
vanilla_pg.safe_psql("GRANT SELECT ON ALL TABLES IN SCHEMA test TO authenticated")
# Set up HTTP server to serve JWKS (like static_auth_broker)
# Generate public key from the JWK
public_key = neon_authorize_jwk.export_public(as_dict=True)
# Set up the httpserver to serve the JWKS
httpserver.expect_request("/.well-known/jwks.json").respond_with_json({"keys": [public_key]})
# Create JWKS configuration for the rest broker proxy
jwks_config = {
"jwks": [
{
"id": "1",
"role_names": ["authenticator", "authenticated", "anon"],
"jwks_url": httpserver.url_for("/.well-known/jwks.json"),
"provider_name": "foo",
"jwt_audience": None,
}
]
}
# Write the JWKS config to the config file that rest_broker_proxy expects
config_file = rest_broker_proxy.config_path
with open(config_file, "w") as f:
json.dump(jwks_config, f)
# Write the same config to the local_proxy config file
local_config_file = local_proxy.config_path
with open(local_config_file, "w") as f:
json.dump(jwks_config, f)
# Signal both proxies to reload their config
if rest_broker_proxy._popen is not None:
rest_broker_proxy._popen.send_signal(signal.SIGHUP)
if local_proxy._popen is not None:
local_proxy._popen.send_signal(signal.SIGHUP)
# Wait a bit for config to reload
time.sleep(0.5)
# Generate a proper JWT token using the JWK (similar to test_auth_broker.py)
token = jwt.JWT(
header={"kid": neon_authorize_jwk.key_id, "alg": "RS256"},
claims={
"sub": "user",
"role": "authenticated", # role that's in role_names
"exp": 9999999999, # expires far in the future
"iat": 1000000000, # issued at
},
)
token.make_signed_token(neon_authorize_jwk)
# Debug: Print the JWT claims and config for troubleshooting
print(f"JWT claims: {token.claims}")
print(f"JWT header: {token.header}")
print(f"Config file contains: {jwks_config}")
print(f"Public key kid: {public_key.get('kid')}")
# Test REST API call - following SUBZERO.md pattern
# REST API is served on the WSS port with HTTPS and includes database name
# ep-purple-glitter-adqior4l-pooler.c-2.us-east-1.aws.neon.tech
url = f"https://foo.apirest.c-2.local.neon.build:{rest_broker_proxy.wss_port}/postgres/rest/v1/items"
response = requests.get(
url,
headers={
"Authorization": f"Bearer {token.serialize()}",
},
params={"id": "eq.1", "select": "name"},
verify=False, # Skip SSL verification for self-signed certs
)
print(f"Response status: {response.status_code}")
print(f"Response headers: {response.headers}")
print(f"Response body: {response.text}")
# For now, let's just check that we get some response
# We can refine the assertions once we see what the actual response looks like
assert response.status_code in [200] # Any response means the proxies are working
# check the response body
assert response.json() == [{"name": "test_item"}]