Files
neon/pyproject.toml
Heikki Linnakangas 70fdd75c89 Introduce built-in Prometheus exporter to the Postgres extension
Currently, the exporter exposes the same LFC metrics that are exposed
by the "autoscaling" sql_exporter in the docker image. With this, we
can remove the dedicated sql_exporter instance. But that's left as a
TODO until this is rolled out to production and we have changed
autoscaling-agent to fetch the metrics from this new endpoint.

The exporter runs as a Postgres background worker process. This is
extracted from the Rust communicator rewrite project, which will use
the same worker process for much more, to handle the communications
with the pageservers. For now, though, it merely handles the metrics
requests.

In the future, we will add more metrics, and perhaps even APIs to
control the running Postgres instance.

The exporter listens on a Unix Domain socket within the Postgres data
directory. A Unix Domain socket is a bit inconventional, but it has
some advantages:

- Permissions are taken care of. Only processes that can access the
  data directory, and therefore already have full access to the
  running Postgres instance, can connect to it.

- No need to allocate and manage a new port number for the listener

It has some downsides too: it's not immediately accessible from the
outside world, and the functions to work with Unix Domain sockets are
more low-level than TCP sockets (see the symlink hack in
`postgres_metrics_client.rs`, for example).

To expose the metrics from the local Unix Domain Socket to the
autoscaling agent, introduce a new '/autoscaling_metrics' endpoint in
the compute_ctl's HTTP server. Currently it merely forwards the
request to the Postgres instance, but we could add rate limiting and
access control there in the future.
2025-07-15 11:50:31 +03:00

115 lines
2.8 KiB
TOML

[tool.poetry]
description = ""
authors = []
package-mode = false
[tool.poetry.dependencies]
python = "^3.11"
pytest = "^7.4.4"
psycopg2-binary = "^2.9.10"
typing-extensions = "^4.12.2"
PyJWT = {version = "^2.1.0", extras = ["crypto"]}
requests = "^2.32.4"
pytest-xdist = "^3.3.1"
asyncpg = "^0.30.0"
aiopg = "^1.4.0"
Jinja2 = "^3.1.6"
types-requests = "^2.31.0.0"
types-psycopg2 = "^2.9.21.20241019"
boto3 = "^1.34.11"
boto3-stubs = {extras = ["s3", "kms"], version = "^1.26.16"}
moto = {extras = ["server"], version = "^5.0.6"}
backoff = "^2.2.1"
pytest-lazy-fixture = "^0.6.3"
prometheus-client = "^0.14.1"
pytest-timeout = "^2.3.1"
Werkzeug = "^3.0.6"
pytest-order = "^1.1.0"
allure-pytest = "^2.13.5"
pytest-asyncio = "^0.21.0"
toml = "^0.10.2"
psutil = "^5.9.4"
types-psutil = "^5.9.5.12"
types-toml = "^0.10.8.6"
pytest-httpserver = "^1.0.8"
aiohttp = "3.10.11"
pytest-rerunfailures = "^15.0"
types-pytest-lazy-fixture = "^0.6.3.3"
pytest-split = "^0.8.1"
zstandard = "^0.23.0"
httpx = {extras = ["http2"], version = "^0.26.0"}
pytest-repeat = "^0.9.3"
websockets = "^12.0"
clickhouse-connect = "^0.7.16"
kafka-python = "^2.0.2"
jwcrypto = "^1.5.6"
h2 = "^4.2.0"
types-jwcrypto = "^1.5.0.20240925"
pyyaml = "^6.0.2"
types-pyyaml = "^6.0.12.20240917"
testcontainers = "^4.9.0"
# Install a release candidate of `jsonnet`, as it supports Python 3.13
jsonnet = "^0.21.0-rc2"
requests-unixsocket = "^0.4.1"
[tool.poetry.group.dev.dependencies]
mypy = "==1.13.0"
ruff = "^0.11.2"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.mypy]
exclude = [
"^vendor/",
"^target/",
"test_runner/performance/pgvector/loaddata.py",
]
check_untyped_defs = true
# Help mypy find imports when running against list of individual files.
# Without this line it would behave differently when executed on the entire project.
mypy_path = "$MYPY_CONFIG_FILE_DIR:$MYPY_CONFIG_FILE_DIR/test_runner:$MYPY_CONFIG_FILE_DIR/test_runner/stubs"
disallow_incomplete_defs = false
disallow_untyped_calls = false
disallow_untyped_decorators = false
disallow_untyped_defs = false
strict = true
[[tool.mypy.overrides]]
module = [
"_jsonnet.*",
"asyncpg.*",
"pg8000.*",
"allure.*",
"allure_commons.*",
"allure_pytest.*",
"kafka.*",
"testcontainers.*",
]
ignore_missing_imports = true
[tool.ruff]
target-version = "py311"
extend-exclude = [
"vendor/",
"target/",
"test_runner/stubs/", # Autogenerated by mypy's stubgen
]
line-length = 100 # this setting is rather guidance, it won't fail if it can't make the shorter
[tool.ruff.lint]
ignore = [
"E501", # Line too long, we don't want to be too strict about it
]
select = [
"E", # pycodestyle
"F", # Pyflakes
"I", # isort
"W", # pycodestyle
"B", # bugbear
"UP", # pyupgrade
"TC", # flake8-type-checking
]