mirror of
https://github.com/GreptimeTeam/greptimedb.git
synced 2025-12-22 22:20:02 +00:00
build: on windows (#2054)
* build on windows * rebase develop * fix: resolve PR comments
This commit is contained in:
@@ -1,2 +1,3 @@
|
|||||||
[profile.default]
|
[profile.default]
|
||||||
slow-timeout = { period = "60s", terminate-after = 3, grace-period = "30s" }
|
slow-timeout = { period = "60s", terminate-after = 3, grace-period = "30s" }
|
||||||
|
retries = { backoff = "exponential", count = 3, delay = "10s", jitter = true }
|
||||||
|
|||||||
102
.github/workflows/develop.yml
vendored
102
.github/workflows/develop.yml
vendored
@@ -70,52 +70,13 @@ jobs:
|
|||||||
- name: Run taplo
|
- name: Run taplo
|
||||||
run: taplo format --check
|
run: taplo format --check
|
||||||
|
|
||||||
# Use coverage to run test.
|
|
||||||
# test:
|
|
||||||
# name: Test Suite
|
|
||||||
# if: github.event.pull_request.draft == false
|
|
||||||
# runs-on: ubuntu-latest
|
|
||||||
# timeout-minutes: 60
|
|
||||||
# steps:
|
|
||||||
# - uses: actions/checkout@v3
|
|
||||||
# - name: Cache LLVM and Clang
|
|
||||||
# id: cache-llvm
|
|
||||||
# uses: actions/cache@v3
|
|
||||||
# with:
|
|
||||||
# path: ./llvm
|
|
||||||
# key: llvm
|
|
||||||
# - uses: arduino/setup-protoc@v1
|
|
||||||
# with:
|
|
||||||
# repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
# - uses: KyleMayes/install-llvm-action@v1
|
|
||||||
# with:
|
|
||||||
# version: "14.0"
|
|
||||||
# cached: ${{ steps.cache-llvm.outputs.cache-hit }}
|
|
||||||
# - uses: dtolnay/rust-toolchain@master
|
|
||||||
# with:
|
|
||||||
# toolchain: ${{ env.RUST_TOOLCHAIN }}
|
|
||||||
# - name: Rust Cache
|
|
||||||
# uses: Swatinem/rust-cache@v2
|
|
||||||
# - name: Cleanup disk
|
|
||||||
# uses: curoky/cleanup-disk-action@v2.0
|
|
||||||
# with:
|
|
||||||
# retain: 'rust,llvm'
|
|
||||||
# - name: Install latest nextest release
|
|
||||||
# uses: taiki-e/install-action@nextest
|
|
||||||
# - name: Run tests
|
|
||||||
# run: cargo nextest run
|
|
||||||
# env:
|
|
||||||
# CARGO_BUILD_RUSTFLAGS: "-C link-arg=-fuse-ld=lld"
|
|
||||||
# RUST_BACKTRACE: 1
|
|
||||||
# GT_S3_BUCKET: ${{ secrets.S3_BUCKET }}
|
|
||||||
# GT_S3_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_KEY_ID }}
|
|
||||||
# GT_S3_ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
|
|
||||||
# UNITTEST_LOG_DIR: "__unittest_logs"
|
|
||||||
|
|
||||||
sqlness:
|
sqlness:
|
||||||
name: Sqlness Test
|
name: Sqlness Test
|
||||||
if: github.event.pull_request.draft == false
|
if: github.event.pull_request.draft == false
|
||||||
runs-on: ubuntu-latest-8-cores
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ ubuntu-latest-8-cores, windows-latest ]
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
@@ -127,25 +88,14 @@ jobs:
|
|||||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||||
- name: Rust Cache
|
- name: Rust Cache
|
||||||
uses: Swatinem/rust-cache@v2
|
uses: Swatinem/rust-cache@v2
|
||||||
- name: Run etcd
|
|
||||||
run: |
|
|
||||||
ETCD_VER=v3.5.7
|
|
||||||
DOWNLOAD_URL=https://github.com/etcd-io/etcd/releases/download
|
|
||||||
curl -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-linux-amd64.tar.gz -o /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
|
|
||||||
mkdir -p /tmp/etcd-download
|
|
||||||
tar xzvf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C /tmp/etcd-download --strip-components=1
|
|
||||||
rm -f /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
|
|
||||||
|
|
||||||
sudo cp -a /tmp/etcd-download/etcd* /usr/local/bin/
|
|
||||||
nohup etcd >/tmp/etcd.log 2>&1 &
|
|
||||||
- name: Run sqlness
|
- name: Run sqlness
|
||||||
run: cargo sqlness && ls /tmp
|
run: cargo sqlness
|
||||||
- name: Upload sqlness logs
|
- name: Upload sqlness logs
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: sqlness-logs
|
name: sqlness-logs
|
||||||
path: /tmp/greptime-*.log
|
path: ${{ runner.temp }}/greptime-*.log
|
||||||
retention-days: 3
|
retention-days: 3
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
@@ -234,3 +184,43 @@ jobs:
|
|||||||
flags: rust
|
flags: rust
|
||||||
fail_ci_if_error: false
|
fail_ci_if_error: false
|
||||||
verbose: true
|
verbose: true
|
||||||
|
|
||||||
|
test-on-windows:
|
||||||
|
if: github.event.pull_request.draft == false
|
||||||
|
runs-on: windows-latest-8-cores
|
||||||
|
timeout-minutes: 60
|
||||||
|
steps:
|
||||||
|
- run: git config --global core.autocrlf false
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: arduino/setup-protoc@v1
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
uses: dtolnay/rust-toolchain@master
|
||||||
|
with:
|
||||||
|
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||||
|
components: llvm-tools-preview
|
||||||
|
- name: Rust Cache
|
||||||
|
uses: Swatinem/rust-cache@v2
|
||||||
|
- name: Install Cargo Nextest
|
||||||
|
uses: taiki-e/install-action@nextest
|
||||||
|
- name: Install Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
- name: Install PyArrow Package
|
||||||
|
run: pip install pyarrow
|
||||||
|
- name: Install WSL distribution
|
||||||
|
uses: Vampire/setup-wsl@v2
|
||||||
|
with:
|
||||||
|
distribution: Ubuntu-22.04
|
||||||
|
- name: Running tests
|
||||||
|
run: cargo nextest run -F pyo3_backend,dashboard
|
||||||
|
env:
|
||||||
|
RUST_BACKTRACE: 1
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
GT_S3_BUCKET: ${{ secrets.S3_BUCKET }}
|
||||||
|
GT_S3_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_KEY_ID }}
|
||||||
|
GT_S3_ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
|
||||||
|
GT_S3_REGION: ${{ secrets.S3_REGION }}
|
||||||
|
UNITTEST_LOG_DIR: "__unittest_logs"
|
||||||
|
|||||||
84
Cargo.lock
generated
84
Cargo.lock
generated
@@ -1816,17 +1816,6 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "common-pprof"
|
|
||||||
version = "0.3.2"
|
|
||||||
dependencies = [
|
|
||||||
"common-error",
|
|
||||||
"pprof",
|
|
||||||
"prost",
|
|
||||||
"snafu",
|
|
||||||
"tokio",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-procedure"
|
name = "common-procedure"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
@@ -4943,9 +4932,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.4.3"
|
version = "0.4.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0"
|
checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lock_api"
|
name = "lock_api"
|
||||||
@@ -5845,13 +5834,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-derive"
|
name = "num-derive"
|
||||||
version = "0.3.3"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d"
|
checksum = "9e6a0fd4f737c707bd9086cc16c925f294943eb62eb71499e9fd4cf71f8b9f4e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 1.0.109",
|
"syn 2.0.28",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -7354,22 +7343,21 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "raft-engine"
|
name = "raft-engine"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/tikv/raft-engine.git?rev=2dcaf5beeea3d5de9ec9c7133a2451d00f508f52#2dcaf5beeea3d5de9ec9c7133a2451d00f508f52"
|
||||||
checksum = "67b66e735395b7ff12f3ebbb4794006aecb365c4c9a82141279b58b227ac3a8b"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"crc32fast",
|
"crc32fast",
|
||||||
"crossbeam",
|
"crossbeam",
|
||||||
"fail",
|
"fail",
|
||||||
"fs2",
|
"fs2",
|
||||||
"hashbrown 0.12.3",
|
"hashbrown 0.14.0",
|
||||||
"hex",
|
"hex",
|
||||||
"if_chain",
|
"if_chain",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libc",
|
"libc",
|
||||||
"log",
|
"log",
|
||||||
"lz4-sys",
|
"lz4-sys",
|
||||||
"nix 0.25.1",
|
"nix 0.26.2",
|
||||||
"num-derive",
|
"num-derive",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"parking_lot 0.12.1",
|
"parking_lot 0.12.1",
|
||||||
@@ -7377,10 +7365,11 @@ dependencies = [
|
|||||||
"prometheus-static-metric",
|
"prometheus-static-metric",
|
||||||
"protobuf",
|
"protobuf",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
"rhai",
|
||||||
"scopeguard",
|
"scopeguard",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_repr",
|
"serde_repr",
|
||||||
"strum 0.24.1",
|
"strum 0.25.0",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -7683,6 +7672,32 @@ dependencies = [
|
|||||||
"bytemuck",
|
"bytemuck",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rhai"
|
||||||
|
version = "1.15.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4c2a11a646ef5d4e4a9d5cf80c7e4ecb20f9b1954292d5c5e6d6cbc8d33728ec"
|
||||||
|
dependencies = [
|
||||||
|
"ahash 0.8.3",
|
||||||
|
"bitflags 1.3.2",
|
||||||
|
"instant",
|
||||||
|
"num-traits",
|
||||||
|
"rhai_codegen",
|
||||||
|
"smallvec",
|
||||||
|
"smartstring",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rhai_codegen"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "db74e3fdd29d969a0ec1f8e79171a6f0f71d0429293656901db382d248c4c021"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ring"
|
name = "ring"
|
||||||
version = "0.16.20"
|
version = "0.16.20"
|
||||||
@@ -7970,7 +7985,7 @@ dependencies = [
|
|||||||
"bitflags 2.3.3",
|
"bitflags 2.3.3",
|
||||||
"errno 0.3.2",
|
"errno 0.3.2",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys 0.4.3",
|
"linux-raw-sys 0.4.5",
|
||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -8643,9 +8658,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.179"
|
version = "1.0.180"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0a5bf42b8d227d4abf38a1ddb08602e229108a517cd4e5bb28f9c7eaafdce5c0"
|
checksum = "0ea67f183f058fe88a4e3ec6e2788e003840893b91bac4559cabedd00863b3ed"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
@@ -8662,9 +8677,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.179"
|
version = "1.0.180"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "741e124f5485c7e60c03b043f79f320bff3527f4bbf12cf3831750dc46a0ec2c"
|
checksum = "24e744d7782b686ab3b73267ef05697159cc0e5abbed3f47f9933165e5219036"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -8794,7 +8809,6 @@ dependencies = [
|
|||||||
"common-grpc",
|
"common-grpc",
|
||||||
"common-grpc-expr",
|
"common-grpc-expr",
|
||||||
"common-mem-prof",
|
"common-mem-prof",
|
||||||
"common-pprof",
|
|
||||||
"common-query",
|
"common-query",
|
||||||
"common-recordbatch",
|
"common-recordbatch",
|
||||||
"common-runtime",
|
"common-runtime",
|
||||||
@@ -8829,6 +8843,7 @@ dependencies = [
|
|||||||
"pgwire",
|
"pgwire",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
"postgres-types",
|
"postgres-types",
|
||||||
|
"pprof",
|
||||||
"promql-parser",
|
"promql-parser",
|
||||||
"prost",
|
"prost",
|
||||||
"query",
|
"query",
|
||||||
@@ -9040,6 +9055,17 @@ version = "1.11.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
|
checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "smartstring"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
"static_assertions",
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "snafu"
|
name = "snafu"
|
||||||
version = "0.7.5"
|
version = "0.7.5"
|
||||||
@@ -9795,9 +9821,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "target-lexicon"
|
name = "target-lexicon"
|
||||||
version = "0.12.10"
|
version = "0.12.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e"
|
checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "temp-env"
|
name = "temp-env"
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ members = [
|
|||||||
"src/common/meta",
|
"src/common/meta",
|
||||||
"src/common/procedure",
|
"src/common/procedure",
|
||||||
"src/common/procedure-test",
|
"src/common/procedure-test",
|
||||||
"src/common/pprof",
|
|
||||||
"src/common/query",
|
"src/common/query",
|
||||||
"src/common/recordbatch",
|
"src/common/recordbatch",
|
||||||
"src/common/runtime",
|
"src/common/runtime",
|
||||||
|
|||||||
@@ -2,14 +2,14 @@
|
|||||||
|
|
||||||
# This script is used to download built dashboard assets from the "GreptimeTeam/dashboard" repository.
|
# This script is used to download built dashboard assets from the "GreptimeTeam/dashboard" repository.
|
||||||
|
|
||||||
set -e
|
set -e -x
|
||||||
|
|
||||||
declare -r SCRIPT_DIR=$(cd $(dirname ${0}) >/dev/null 2>&1 && pwd)
|
declare -r SCRIPT_DIR=$(cd $(dirname ${0}) >/dev/null 2>&1 && pwd)
|
||||||
declare -r ROOT_DIR=$(dirname ${SCRIPT_DIR})
|
declare -r ROOT_DIR=$(dirname ${SCRIPT_DIR})
|
||||||
declare -r STATIC_DIR="$ROOT_DIR/src/servers/dashboard"
|
declare -r STATIC_DIR="$ROOT_DIR/src/servers/dashboard"
|
||||||
OUT_DIR="${1:-$SCRIPT_DIR}"
|
OUT_DIR="${1:-$SCRIPT_DIR}"
|
||||||
|
|
||||||
RELEASE_VERSION="$(cat $STATIC_DIR/VERSION)"
|
RELEASE_VERSION="$(cat $STATIC_DIR/VERSION | tr -d '\t\r\n ')"
|
||||||
|
|
||||||
echo "Downloading assets to dir: $OUT_DIR"
|
echo "Downloading assets to dir: $OUT_DIR"
|
||||||
cd $OUT_DIR
|
cd $OUT_DIR
|
||||||
|
|||||||
@@ -54,15 +54,19 @@ session = { workspace = true }
|
|||||||
snafu.workspace = true
|
snafu.workspace = true
|
||||||
substrait = { workspace = true }
|
substrait = { workspace = true }
|
||||||
table = { workspace = true }
|
table = { workspace = true }
|
||||||
tikv-jemallocator = "0.5"
|
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
|
||||||
|
[target.'cfg(not(windows))'.dependencies]
|
||||||
|
tikv-jemallocator = "0.5"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
common-test-util = { workspace = true }
|
common-test-util = { workspace = true }
|
||||||
rexpect = "0.5"
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
temp-env = "0.3"
|
temp-env = "0.3"
|
||||||
toml.workspace = true
|
toml.workspace = true
|
||||||
|
|
||||||
|
[target.'cfg(not(windows))'.dev-dependencies]
|
||||||
|
rexpect = "0.5"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
common-version = { workspace = true }
|
common-version = { workspace = true }
|
||||||
|
|||||||
@@ -187,6 +187,7 @@ fn log_env_flags() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||||
|
|
||||||
|
|||||||
@@ -209,15 +209,19 @@ impl DfRecordBatchEncoder for csv::Writer<SharedBuffer> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::file_format::{
|
use crate::file_format::{
|
||||||
FileFormat, FORMAT_COMPRESSION_TYPE, FORMAT_DELIMITER, FORMAT_HAS_HEADER,
|
FileFormat, FORMAT_COMPRESSION_TYPE, FORMAT_DELIMITER, FORMAT_HAS_HEADER,
|
||||||
FORMAT_SCHEMA_INFER_MAX_RECORD,
|
FORMAT_SCHEMA_INFER_MAX_RECORD,
|
||||||
};
|
};
|
||||||
use crate::test_util::{self, format_schema, test_store};
|
use crate::test_util::{format_schema, test_store};
|
||||||
|
|
||||||
fn test_data_root() -> String {
|
fn test_data_root() -> String {
|
||||||
test_util::get_data_dir("tests/csv").display().to_string()
|
find_workspace_path("/src/common/datasource/tests/csv")
|
||||||
|
.display()
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|||||||
@@ -167,12 +167,16 @@ impl DfRecordBatchEncoder for json::Writer<SharedBuffer, LineDelimited> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::file_format::{FileFormat, FORMAT_COMPRESSION_TYPE, FORMAT_SCHEMA_INFER_MAX_RECORD};
|
use crate::file_format::{FileFormat, FORMAT_COMPRESSION_TYPE, FORMAT_SCHEMA_INFER_MAX_RECORD};
|
||||||
use crate::test_util::{self, format_schema, test_store};
|
use crate::test_util::{format_schema, test_store};
|
||||||
|
|
||||||
fn test_data_root() -> String {
|
fn test_data_root() -> String {
|
||||||
test_util::get_data_dir("tests/json").display().to_string()
|
find_workspace_path("/src/common/datasource/tests/json")
|
||||||
|
.display()
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|||||||
@@ -188,12 +188,16 @@ impl FileOpener for OrcOpener {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::file_format::FileFormat;
|
use crate::file_format::FileFormat;
|
||||||
use crate::test_util::{self, format_schema, test_store};
|
use crate::test_util::{format_schema, test_store};
|
||||||
|
|
||||||
fn test_data_root() -> String {
|
fn test_data_root() -> String {
|
||||||
test_util::get_data_dir("tests/orc").display().to_string()
|
find_workspace_path("/src/common/datasource/tests/orc")
|
||||||
|
.display()
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|||||||
@@ -158,11 +158,13 @@ impl ArrowWriterCloser for ArrowWriter<SharedBuffer> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::test_util::{self, format_schema, test_store};
|
use crate::test_util::{format_schema, test_store};
|
||||||
|
|
||||||
fn test_data_root() -> String {
|
fn test_data_root() -> String {
|
||||||
test_util::get_data_dir("tests/parquet")
|
find_workspace_path("/src/common/datasource/tests/parquet")
|
||||||
.display()
|
.display()
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ use std::collections::HashMap;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
use datafusion::assert_batches_eq;
|
use datafusion::assert_batches_eq;
|
||||||
use datafusion::datasource::physical_plan::{FileOpener, FileScanConfig, FileStream, ParquetExec};
|
use datafusion::datasource::physical_plan::{FileOpener, FileScanConfig, FileStream, ParquetExec};
|
||||||
use datafusion::execution::context::TaskContext;
|
use datafusion::execution::context::TaskContext;
|
||||||
@@ -71,7 +72,7 @@ async fn test_json_opener() {
|
|||||||
CompressionType::Uncompressed,
|
CompressionType::Uncompressed,
|
||||||
);
|
);
|
||||||
|
|
||||||
let path = &test_util::get_data_dir("tests/json/basic.json")
|
let path = &find_workspace_path("/src/common/datasource/tests/json/basic.json")
|
||||||
.display()
|
.display()
|
||||||
.to_string();
|
.to_string();
|
||||||
let tests = [
|
let tests = [
|
||||||
@@ -111,7 +112,7 @@ async fn test_csv_opener() {
|
|||||||
let store = test_store("/");
|
let store = test_store("/");
|
||||||
|
|
||||||
let schema = test_basic_schema();
|
let schema = test_basic_schema();
|
||||||
let path = &test_util::get_data_dir("tests/csv/basic.csv")
|
let path = &find_workspace_path("/src/common/datasource/tests/csv/basic.csv")
|
||||||
.display()
|
.display()
|
||||||
.to_string();
|
.to_string();
|
||||||
let csv_conf = CsvConfigBuilder::default()
|
let csv_conf = CsvConfigBuilder::default()
|
||||||
@@ -160,7 +161,7 @@ async fn test_parquet_exec() {
|
|||||||
|
|
||||||
let schema = test_basic_schema();
|
let schema = test_basic_schema();
|
||||||
|
|
||||||
let path = &test_util::get_data_dir("tests/parquet/basic.parquet")
|
let path = &find_workspace_path("/src/common/datasource/tests/parquet/basic.parquet")
|
||||||
.display()
|
.display()
|
||||||
.to_string();
|
.to_string();
|
||||||
let base_config = scan_config(schema.clone(), None, path);
|
let base_config = scan_config(schema.clone(), None, path);
|
||||||
@@ -196,13 +197,15 @@ async fn test_parquet_exec() {
|
|||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_orc_opener() {
|
async fn test_orc_opener() {
|
||||||
let root = test_util::get_data_dir("tests/orc").display().to_string();
|
let root = find_workspace_path("/src/common/datasource/tests/orc")
|
||||||
|
.display()
|
||||||
|
.to_string();
|
||||||
let store = test_store(&root);
|
let store = test_store(&root);
|
||||||
let schema = OrcFormat.infer_schema(&store, "test.orc").await.unwrap();
|
let schema = OrcFormat.infer_schema(&store, "test.orc").await.unwrap();
|
||||||
let schema = Arc::new(schema);
|
let schema = Arc::new(schema);
|
||||||
|
|
||||||
let orc_opener = OrcOpener::new(store.clone(), schema.clone(), None);
|
let orc_opener = OrcOpener::new(store.clone(), schema.clone(), None);
|
||||||
let path = &test_util::get_data_dir("/test.orc").display().to_string();
|
let path = "test.orc";
|
||||||
|
|
||||||
let tests = [
|
let tests = [
|
||||||
Test {
|
Test {
|
||||||
|
|||||||
@@ -12,7 +12,6 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_schema::{DataType, Field, Schema, SchemaRef};
|
use arrow_schema::{DataType, Field, Schema, SchemaRef};
|
||||||
@@ -31,13 +30,6 @@ use crate::test_util;
|
|||||||
|
|
||||||
pub const TEST_BATCH_SIZE: usize = 100;
|
pub const TEST_BATCH_SIZE: usize = 100;
|
||||||
|
|
||||||
pub fn get_data_dir(path: &str) -> PathBuf {
|
|
||||||
// https://doc.rust-lang.org/cargo/reference/environment-variables.html
|
|
||||||
let dir = env!("CARGO_MANIFEST_DIR");
|
|
||||||
|
|
||||||
PathBuf::from(dir).join(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn format_schema(schema: Schema) -> Vec<String> {
|
pub fn format_schema(schema: Schema) -> Vec<String> {
|
||||||
schema
|
schema
|
||||||
.fields()
|
.fields()
|
||||||
@@ -78,6 +70,9 @@ pub fn test_basic_schema() -> SchemaRef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn scan_config(file_schema: SchemaRef, limit: Option<usize>, filename: &str) -> FileScanConfig {
|
pub fn scan_config(file_schema: SchemaRef, limit: Option<usize>, filename: &str) -> FileScanConfig {
|
||||||
|
// object_store only recognize the Unix style path, so make it happy.
|
||||||
|
let filename = &filename.replace('\\', "/");
|
||||||
|
|
||||||
FileScanConfig {
|
FileScanConfig {
|
||||||
object_store_url: ObjectStoreUrl::parse("empty://").unwrap(), // won't be used
|
object_store_url: ObjectStoreUrl::parse("empty://").unwrap(), // won't be used
|
||||||
file_schema,
|
file_schema,
|
||||||
@@ -124,12 +119,7 @@ pub async fn setup_stream_to_json_test(origin_path: &str, threshold: impl Fn(usi
|
|||||||
|
|
||||||
let written = tmp_store.read(&output_path).await.unwrap();
|
let written = tmp_store.read(&output_path).await.unwrap();
|
||||||
let origin = store.read(origin_path).await.unwrap();
|
let origin = store.read(origin_path).await.unwrap();
|
||||||
|
assert_eq_lines(written, origin);
|
||||||
// ignores `\n`
|
|
||||||
assert_eq!(
|
|
||||||
String::from_utf8_lossy(&written).trim_end_matches('\n'),
|
|
||||||
String::from_utf8_lossy(&origin).trim_end_matches('\n'),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn setup_stream_to_csv_test(origin_path: &str, threshold: impl Fn(usize) -> usize) {
|
pub async fn setup_stream_to_csv_test(origin_path: &str, threshold: impl Fn(usize) -> usize) {
|
||||||
@@ -166,10 +156,19 @@ pub async fn setup_stream_to_csv_test(origin_path: &str, threshold: impl Fn(usiz
|
|||||||
|
|
||||||
let written = tmp_store.read(&output_path).await.unwrap();
|
let written = tmp_store.read(&output_path).await.unwrap();
|
||||||
let origin = store.read(origin_path).await.unwrap();
|
let origin = store.read(origin_path).await.unwrap();
|
||||||
|
assert_eq_lines(written, origin);
|
||||||
|
}
|
||||||
|
|
||||||
// ignores `\n`
|
// Ignore the CRLF difference across operating systems.
|
||||||
|
fn assert_eq_lines(written: Vec<u8>, origin: Vec<u8>) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
String::from_utf8_lossy(&written).trim_end_matches('\n'),
|
String::from_utf8(written)
|
||||||
String::from_utf8_lossy(&origin).trim_end_matches('\n'),
|
.unwrap()
|
||||||
|
.lines()
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
String::from_utf8(origin)
|
||||||
|
.unwrap()
|
||||||
|
.lines()
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,50 +12,36 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
|
|
||||||
use crate::test_util;
|
use crate::test_util;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_stream_to_json() {
|
async fn test_stream_to_json() {
|
||||||
|
let origin_path = &find_workspace_path("/src/common/datasource/tests/json/basic.json")
|
||||||
|
.display()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
// A small threshold
|
// A small threshold
|
||||||
// Triggers the flush each writes
|
// Triggers the flush each writes
|
||||||
test_util::setup_stream_to_json_test(
|
test_util::setup_stream_to_json_test(origin_path, |size| size / 2).await;
|
||||||
&test_util::get_data_dir("tests/json/basic.json")
|
|
||||||
.display()
|
|
||||||
.to_string(),
|
|
||||||
|size| size / 2,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// A large threshold
|
// A large threshold
|
||||||
// Only triggers the flush at last
|
// Only triggers the flush at last
|
||||||
test_util::setup_stream_to_json_test(
|
test_util::setup_stream_to_json_test(origin_path, |size| size * 2).await;
|
||||||
&test_util::get_data_dir("tests/json/basic.json")
|
|
||||||
.display()
|
|
||||||
.to_string(),
|
|
||||||
|size| size * 2,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_stream_to_csv() {
|
async fn test_stream_to_csv() {
|
||||||
|
let origin_path = &find_workspace_path("/src/common/datasource/tests/csv/basic.csv")
|
||||||
|
.display()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
// A small threshold
|
// A small threshold
|
||||||
// Triggers the flush each writes
|
// Triggers the flush each writes
|
||||||
test_util::setup_stream_to_csv_test(
|
test_util::setup_stream_to_csv_test(origin_path, |size| size / 2).await;
|
||||||
&test_util::get_data_dir("tests/csv/basic.csv")
|
|
||||||
.display()
|
|
||||||
.to_string(),
|
|
||||||
|size| size / 2,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// A large threshold
|
// A large threshold
|
||||||
// Only triggers the flush at last
|
// Only triggers the flush at last
|
||||||
test_util::setup_stream_to_csv_test(
|
test_util::setup_stream_to_csv_test(origin_path, |size| size * 2).await;
|
||||||
&test_util::get_data_dir("tests/csv/basic.csv")
|
|
||||||
.display()
|
|
||||||
.to_string(),
|
|
||||||
|size| size * 2,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -66,6 +66,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_path_and_dir() {
|
fn test_parse_path_and_dir() {
|
||||||
let parsed = Url::from_file_path("/to/path/file").unwrap();
|
let parsed = Url::from_file_path("/to/path/file").unwrap();
|
||||||
@@ -75,6 +76,16 @@ mod tests {
|
|||||||
assert_eq!(parsed.path(), "/to/path/");
|
assert_eq!(parsed.path(), "/to/path/");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
#[test]
|
||||||
|
fn test_parse_path_and_dir() {
|
||||||
|
let parsed = Url::from_file_path("C:\\to\\path\\file").unwrap();
|
||||||
|
assert_eq!(parsed.path(), "/C:/to/path/file");
|
||||||
|
|
||||||
|
let parsed = Url::from_directory_path("C:\\to\\path\\").unwrap();
|
||||||
|
assert_eq!(parsed.path(), "/C:/to/path/");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_dir_and_filename() {
|
fn test_find_dir_and_filename() {
|
||||||
struct Test<'a> {
|
struct Test<'a> {
|
||||||
|
|||||||
@@ -8,9 +8,11 @@ license.workspace = true
|
|||||||
common-error = { workspace = true }
|
common-error = { workspace = true }
|
||||||
snafu.workspace = true
|
snafu.workspace = true
|
||||||
tempfile = "3.4"
|
tempfile = "3.4"
|
||||||
tikv-jemalloc-ctl = { version = "0.5", features = ["use_std"] }
|
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
|
||||||
[dependencies.tikv-jemalloc-sys]
|
[target.'cfg(not(windows))'.dependencies]
|
||||||
|
tikv-jemalloc-ctl = { version = "0.5", features = ["use_std"] }
|
||||||
|
|
||||||
|
[target.'cfg(not(windows))'.dependencies.tikv-jemalloc-sys]
|
||||||
features = ["stats", "profiling", "unprefixed_malloc_on_supported_platforms"]
|
features = ["stats", "profiling", "unprefixed_malloc_on_supported_platforms"]
|
||||||
version = "0.5"
|
version = "0.5"
|
||||||
|
|||||||
@@ -13,51 +13,28 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use common_error::ext::ErrorExt;
|
use common_error::ext::{BoxedError, ErrorExt};
|
||||||
use common_error::status_code::StatusCode;
|
use common_error::status_code::StatusCode;
|
||||||
use snafu::{Location, Snafu};
|
use snafu::Snafu;
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
#[derive(Debug, Snafu)]
|
#[derive(Debug, Snafu)]
|
||||||
#[snafu(visibility(pub))]
|
#[snafu(visibility(pub))]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[snafu(display("Failed to read OPT_PROF, source: {}", source))]
|
#[snafu(display("{source}"))]
|
||||||
ReadOptProf { source: tikv_jemalloc_ctl::Error },
|
Internal { source: BoxedError },
|
||||||
|
|
||||||
#[snafu(display("Memory profiling is not enabled"))]
|
#[snafu(display("Memory profiling is not supported"))]
|
||||||
ProfilingNotEnabled,
|
ProfilingNotSupported,
|
||||||
|
|
||||||
#[snafu(display("Failed to build temp file from given path: {:?}", path))]
|
|
||||||
BuildTempPath { path: PathBuf, location: Location },
|
|
||||||
|
|
||||||
#[snafu(display("Failed to open temp file: {}, source: {}", path, source))]
|
|
||||||
OpenTempFile {
|
|
||||||
path: String,
|
|
||||||
source: std::io::Error,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[snafu(display(
|
|
||||||
"Failed to dump profiling data to temp file: {:?}, source: {}",
|
|
||||||
path,
|
|
||||||
source
|
|
||||||
))]
|
|
||||||
DumpProfileData {
|
|
||||||
path: PathBuf,
|
|
||||||
source: tikv_jemalloc_ctl::Error,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorExt for Error {
|
impl ErrorExt for Error {
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
match self {
|
match self {
|
||||||
Error::ReadOptProf { .. } => StatusCode::Internal,
|
Error::Internal { source } => source.status_code(),
|
||||||
Error::ProfilingNotEnabled => StatusCode::InvalidArguments,
|
Error::ProfilingNotSupported => StatusCode::Unsupported,
|
||||||
Error::BuildTempPath { .. } => StatusCode::Internal,
|
|
||||||
Error::OpenTempFile { .. } => StatusCode::StorageUnavailable,
|
|
||||||
Error::DumpProfileData { .. } => StatusCode::StorageUnavailable,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
76
src/common/mem-prof/src/jemalloc.rs
Normal file
76
src/common/mem-prof/src/jemalloc.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
// Copyright 2023 Greptime Team
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
mod error;
|
||||||
|
|
||||||
|
use std::ffi::{c_char, CString};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use error::{
|
||||||
|
BuildTempPathSnafu, DumpProfileDataSnafu, OpenTempFileSnafu, ProfilingNotEnabledSnafu,
|
||||||
|
ReadOptProfSnafu,
|
||||||
|
};
|
||||||
|
use snafu::{ensure, ResultExt};
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
|
use crate::error::Result;
|
||||||
|
|
||||||
|
const PROF_DUMP: &[u8] = b"prof.dump\0";
|
||||||
|
const OPT_PROF: &[u8] = b"opt.prof\0";
|
||||||
|
|
||||||
|
pub async fn dump_profile() -> Result<Vec<u8>> {
|
||||||
|
ensure!(is_prof_enabled()?, ProfilingNotEnabledSnafu);
|
||||||
|
let tmp_path = tempfile::tempdir().map_err(|_| {
|
||||||
|
BuildTempPathSnafu {
|
||||||
|
path: std::env::temp_dir(),
|
||||||
|
}
|
||||||
|
.build()
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut path_buf = PathBuf::from(tmp_path.path());
|
||||||
|
path_buf.push("greptimedb.hprof");
|
||||||
|
|
||||||
|
let path = path_buf
|
||||||
|
.to_str()
|
||||||
|
.ok_or_else(|| BuildTempPathSnafu { path: &path_buf }.build())?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let mut bytes = CString::new(path.as_str())
|
||||||
|
.map_err(|_| BuildTempPathSnafu { path: &path_buf }.build())?
|
||||||
|
.into_bytes_with_nul();
|
||||||
|
|
||||||
|
{
|
||||||
|
// #safety: we always expect a valid temp file path to write profiling data to.
|
||||||
|
let ptr = bytes.as_mut_ptr() as *mut c_char;
|
||||||
|
unsafe {
|
||||||
|
tikv_jemalloc_ctl::raw::write(PROF_DUMP, ptr)
|
||||||
|
.context(DumpProfileDataSnafu { path: path_buf })?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut f = tokio::fs::File::open(path.as_str())
|
||||||
|
.await
|
||||||
|
.context(OpenTempFileSnafu { path: &path })?;
|
||||||
|
let mut buf = vec![];
|
||||||
|
let _ = f
|
||||||
|
.read_to_end(&mut buf)
|
||||||
|
.await
|
||||||
|
.context(OpenTempFileSnafu { path })?;
|
||||||
|
Ok(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_prof_enabled() -> Result<bool> {
|
||||||
|
// safety: OPT_PROF variable, if present, is always a boolean value.
|
||||||
|
Ok(unsafe { tikv_jemalloc_ctl::raw::read::<bool>(OPT_PROF).context(ReadOptProfSnafu)? })
|
||||||
|
}
|
||||||
73
src/common/mem-prof/src/jemalloc/error.rs
Normal file
73
src/common/mem-prof/src/jemalloc/error.rs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
// Copyright 2023 Greptime Team
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::any::Any;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use common_error::ext::{BoxedError, ErrorExt};
|
||||||
|
use common_error::status_code::StatusCode;
|
||||||
|
use snafu::{Location, Snafu};
|
||||||
|
|
||||||
|
#[derive(Debug, Snafu)]
|
||||||
|
#[snafu(visibility(pub))]
|
||||||
|
pub enum Error {
|
||||||
|
#[snafu(display("Failed to read OPT_PROF, source: {}", source))]
|
||||||
|
ReadOptProf { source: tikv_jemalloc_ctl::Error },
|
||||||
|
|
||||||
|
#[snafu(display("Memory profiling is not enabled"))]
|
||||||
|
ProfilingNotEnabled,
|
||||||
|
|
||||||
|
#[snafu(display("Failed to build temp file from given path: {:?}", path))]
|
||||||
|
BuildTempPath { path: PathBuf, location: Location },
|
||||||
|
|
||||||
|
#[snafu(display("Failed to open temp file: {}, source: {}", path, source))]
|
||||||
|
OpenTempFile {
|
||||||
|
path: String,
|
||||||
|
source: std::io::Error,
|
||||||
|
},
|
||||||
|
|
||||||
|
#[snafu(display(
|
||||||
|
"Failed to dump profiling data to temp file: {:?}, source: {}",
|
||||||
|
path,
|
||||||
|
source
|
||||||
|
))]
|
||||||
|
DumpProfileData {
|
||||||
|
path: PathBuf,
|
||||||
|
source: tikv_jemalloc_ctl::Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorExt for Error {
|
||||||
|
fn status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
Error::ReadOptProf { .. } => StatusCode::Internal,
|
||||||
|
Error::ProfilingNotEnabled => StatusCode::InvalidArguments,
|
||||||
|
Error::BuildTempPath { .. } => StatusCode::Internal,
|
||||||
|
Error::OpenTempFile { .. } => StatusCode::StorageUnavailable,
|
||||||
|
Error::DumpProfileData { .. } => StatusCode::StorageUnavailable,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any(&self) -> &dyn Any {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Error> for crate::error::Error {
|
||||||
|
fn from(e: Error) -> Self {
|
||||||
|
Self::Internal {
|
||||||
|
source: BoxedError::new(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,62 +14,12 @@
|
|||||||
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
|
||||||
use std::ffi::{c_char, CString};
|
#[cfg(not(windows))]
|
||||||
use std::path::PathBuf;
|
mod jemalloc;
|
||||||
|
#[cfg(not(windows))]
|
||||||
use snafu::{ensure, ResultExt};
|
pub use jemalloc::dump_profile;
|
||||||
use tokio::io::AsyncReadExt;
|
|
||||||
|
|
||||||
use crate::error::{
|
|
||||||
BuildTempPathSnafu, DumpProfileDataSnafu, OpenTempFileSnafu, ProfilingNotEnabledSnafu,
|
|
||||||
ReadOptProfSnafu,
|
|
||||||
};
|
|
||||||
|
|
||||||
const PROF_DUMP: &[u8] = b"prof.dump\0";
|
|
||||||
const OPT_PROF: &[u8] = b"opt.prof\0";
|
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
pub async fn dump_profile() -> error::Result<Vec<u8>> {
|
pub async fn dump_profile() -> error::Result<Vec<u8>> {
|
||||||
ensure!(is_prof_enabled()?, ProfilingNotEnabledSnafu);
|
error::ProfilingNotSupportedSnafu.fail()
|
||||||
let tmp_path = tempfile::tempdir().map_err(|_| {
|
|
||||||
BuildTempPathSnafu {
|
|
||||||
path: std::env::temp_dir(),
|
|
||||||
}
|
|
||||||
.build()
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut path_buf = PathBuf::from(tmp_path.path());
|
|
||||||
path_buf.push("greptimedb.hprof");
|
|
||||||
|
|
||||||
let path = path_buf
|
|
||||||
.to_str()
|
|
||||||
.ok_or_else(|| BuildTempPathSnafu { path: &path_buf }.build())?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let mut bytes = CString::new(path.as_str())
|
|
||||||
.map_err(|_| BuildTempPathSnafu { path: &path_buf }.build())?
|
|
||||||
.into_bytes_with_nul();
|
|
||||||
|
|
||||||
{
|
|
||||||
// #safety: we always expect a valid temp file path to write profiling data to.
|
|
||||||
let ptr = bytes.as_mut_ptr() as *mut c_char;
|
|
||||||
unsafe {
|
|
||||||
tikv_jemalloc_ctl::raw::write(PROF_DUMP, ptr)
|
|
||||||
.context(DumpProfileDataSnafu { path: path_buf })?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut f = tokio::fs::File::open(path.as_str())
|
|
||||||
.await
|
|
||||||
.context(OpenTempFileSnafu { path: &path })?;
|
|
||||||
let mut buf = vec![];
|
|
||||||
let _ = f
|
|
||||||
.read_to_end(&mut buf)
|
|
||||||
.await
|
|
||||||
.context(OpenTempFileSnafu { path })?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_prof_enabled() -> error::Result<bool> {
|
|
||||||
// safety: OPT_PROF variable, if present, is always a boolean value.
|
|
||||||
Ok(unsafe { tikv_jemalloc_ctl::raw::read::<bool>(OPT_PROF).context(ReadOptProfSnafu)? })
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "common-pprof"
|
|
||||||
version.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
common-error = { workspace = true }
|
|
||||||
pprof = { version = "0.11", features = [
|
|
||||||
"flamegraph",
|
|
||||||
"prost-codec",
|
|
||||||
"protobuf",
|
|
||||||
] }
|
|
||||||
prost.workspace = true
|
|
||||||
snafu.workspace = true
|
|
||||||
tokio.workspace = true
|
|
||||||
@@ -12,5 +12,48 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
#![feature(lazy_cell)]
|
||||||
|
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::Command;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
pub mod ports;
|
pub mod ports;
|
||||||
pub mod temp_dir;
|
pub mod temp_dir;
|
||||||
|
|
||||||
|
// Rust is working on an env possibly named `CARGO_WORKSPACE_DIR` to find the root path to the
|
||||||
|
// workspace, see https://github.com/rust-lang/cargo/issues/3946.
|
||||||
|
// Until then, use this verbose way.
|
||||||
|
static WORKSPACE_ROOT: LazyLock<PathBuf> = LazyLock::new(|| {
|
||||||
|
let output = Command::new(env!("CARGO"))
|
||||||
|
.args(["locate-project", "--workspace", "--message-format=plain"])
|
||||||
|
.output()
|
||||||
|
.unwrap()
|
||||||
|
.stdout;
|
||||||
|
let cargo_path = Path::new(std::str::from_utf8(&output).unwrap().trim());
|
||||||
|
cargo_path.parent().unwrap().to_path_buf()
|
||||||
|
});
|
||||||
|
|
||||||
|
/// Find the absolute path to a file or a directory in the workspace.
|
||||||
|
/// The input `path` should be the relative path of the file or directory from workspace root.
|
||||||
|
///
|
||||||
|
/// For example, if the greptimedb project is placed under directory "/foo/bar/greptimedb/",
|
||||||
|
/// and this function is invoked with path = "/src/common/test-util/src/lib.rs", you will get the
|
||||||
|
/// absolute path to this file.
|
||||||
|
///
|
||||||
|
/// The return value is [PathBuf]. This is to adapt the Windows file system's style.
|
||||||
|
/// However, the input argument is Unix style, this is to give user the most convenience.
|
||||||
|
pub fn find_workspace_path(path: &str) -> PathBuf {
|
||||||
|
let mut buf = WORKSPACE_ROOT.clone();
|
||||||
|
|
||||||
|
// Manually "canonicalize" to avoid annoy Windows specific "\\?" path prefix.
|
||||||
|
path.split('/').for_each(|x| {
|
||||||
|
if x == ".." {
|
||||||
|
buf.pop();
|
||||||
|
} else if x != "." {
|
||||||
|
buf = buf.join(x);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|||||||
@@ -15,10 +15,11 @@
|
|||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use chrono::{Local, LocalResult, NaiveDateTime, TimeZone};
|
use chrono::{LocalResult, NaiveDateTime};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::error::{Error, InvalidDateStrSnafu, Result};
|
use crate::error::{Error, InvalidDateStrSnafu, Result};
|
||||||
|
use crate::util::{format_utc_datetime, local_datetime_to_utc};
|
||||||
|
|
||||||
const DATETIME_FORMAT: &str = "%F %T";
|
const DATETIME_FORMAT: &str = "%F %T";
|
||||||
const DATETIME_FORMAT_WITH_TZ: &str = "%F %T%z";
|
const DATETIME_FORMAT_WITH_TZ: &str = "%F %T%z";
|
||||||
@@ -35,9 +36,7 @@ impl Display for DateTime {
|
|||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}",
|
"{}",
|
||||||
Local {}
|
format_utc_datetime(&abs_time, DATETIME_FORMAT_WITH_TZ)
|
||||||
.from_utc_datetime(&abs_time)
|
|
||||||
.format(DATETIME_FORMAT_WITH_TZ)
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "DateTime({})", self.0)
|
write!(f, "DateTime({})", self.0)
|
||||||
@@ -56,13 +55,12 @@ impl FromStr for DateTime {
|
|||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self> {
|
fn from_str(s: &str) -> Result<Self> {
|
||||||
let s = s.trim();
|
let s = s.trim();
|
||||||
let local = Local {};
|
|
||||||
let timestamp = if let Ok(d) = NaiveDateTime::parse_from_str(s, DATETIME_FORMAT) {
|
let timestamp = if let Ok(d) = NaiveDateTime::parse_from_str(s, DATETIME_FORMAT) {
|
||||||
match local.from_local_datetime(&d) {
|
match local_datetime_to_utc(&d) {
|
||||||
LocalResult::None => {
|
LocalResult::None => {
|
||||||
return InvalidDateStrSnafu { raw: s }.fail();
|
return InvalidDateStrSnafu { raw: s }.fail();
|
||||||
}
|
}
|
||||||
LocalResult::Single(d) | LocalResult::Ambiguous(d, _) => d.naive_utc().timestamp(),
|
LocalResult::Single(d) | LocalResult::Ambiguous(d, _) => d.timestamp(),
|
||||||
}
|
}
|
||||||
} else if let Ok(v) = chrono::DateTime::parse_from_str(s, DATETIME_FORMAT_WITH_TZ) {
|
} else if let Ok(v) = chrono::DateTime::parse_from_str(s, DATETIME_FORMAT_WITH_TZ) {
|
||||||
v.timestamp()
|
v.timestamp()
|
||||||
|
|||||||
@@ -15,12 +15,12 @@
|
|||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
use chrono::offset::Local;
|
|
||||||
use chrono::{NaiveDateTime, NaiveTime, TimeZone as ChronoTimeZone, Utc};
|
use chrono::{NaiveDateTime, NaiveTime, TimeZone as ChronoTimeZone, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::timestamp::TimeUnit;
|
use crate::timestamp::TimeUnit;
|
||||||
use crate::timezone::TimeZone;
|
use crate::timezone::TimeZone;
|
||||||
|
use crate::util::format_utc_datetime;
|
||||||
|
|
||||||
/// Time value, represents the elapsed time since midnight in the unit of `TimeUnit`.
|
/// Time value, represents the elapsed time since midnight in the unit of `TimeUnit`.
|
||||||
#[derive(Debug, Clone, Default, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Default, Copy, Serialize, Deserialize)]
|
||||||
@@ -119,10 +119,7 @@ impl Time {
|
|||||||
Some(TimeZone::Named(tz)) => {
|
Some(TimeZone::Named(tz)) => {
|
||||||
format!("{}", tz.from_utc_datetime(&datetime).format(pattern))
|
format!("{}", tz.from_utc_datetime(&datetime).format(pattern))
|
||||||
}
|
}
|
||||||
None => {
|
None => format_utc_datetime(&datetime, pattern),
|
||||||
let local = Local {};
|
|
||||||
format!("{}", local.from_utc_datetime(&datetime).format(pattern))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
format!("[Time{}: {}]", self.unit, self.value)
|
format!("[Time{}: {}]", self.unit, self.value)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ use std::str::FromStr;
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use arrow::datatypes::TimeUnit as ArrowTimeUnit;
|
use arrow::datatypes::TimeUnit as ArrowTimeUnit;
|
||||||
use chrono::offset::Local;
|
|
||||||
use chrono::{DateTime, LocalResult, NaiveDateTime, TimeZone as ChronoTimeZone, Utc};
|
use chrono::{DateTime, LocalResult, NaiveDateTime, TimeZone as ChronoTimeZone, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::{OptionExt, ResultExt};
|
use snafu::{OptionExt, ResultExt};
|
||||||
@@ -28,7 +27,7 @@ use snafu::{OptionExt, ResultExt};
|
|||||||
use crate::error;
|
use crate::error;
|
||||||
use crate::error::{ArithmeticOverflowSnafu, Error, ParseTimestampSnafu, TimestampOverflowSnafu};
|
use crate::error::{ArithmeticOverflowSnafu, Error, ParseTimestampSnafu, TimestampOverflowSnafu};
|
||||||
use crate::timezone::TimeZone;
|
use crate::timezone::TimeZone;
|
||||||
use crate::util::div_ceil;
|
use crate::util::{div_ceil, format_utc_datetime, local_datetime_to_utc};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Default, Copy, Serialize, Deserialize)]
|
||||||
pub struct Timestamp {
|
pub struct Timestamp {
|
||||||
@@ -195,10 +194,7 @@ impl Timestamp {
|
|||||||
Some(TimeZone::Named(tz)) => {
|
Some(TimeZone::Named(tz)) => {
|
||||||
format!("{}", tz.from_utc_datetime(&v).format(pattern))
|
format!("{}", tz.from_utc_datetime(&v).format(pattern))
|
||||||
}
|
}
|
||||||
None => {
|
None => format_utc_datetime(&v, pattern),
|
||||||
let local = Local {};
|
|
||||||
format!("{}", local.from_utc_datetime(&v).format(pattern))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
format!("[Timestamp{}: {}]", self.unit, self.value)
|
format!("[Timestamp{}: {}]", self.unit, self.value)
|
||||||
@@ -265,18 +261,11 @@ fn naive_datetime_to_timestamp(
|
|||||||
s: &str,
|
s: &str,
|
||||||
datetime: NaiveDateTime,
|
datetime: NaiveDateTime,
|
||||||
) -> crate::error::Result<Timestamp> {
|
) -> crate::error::Result<Timestamp> {
|
||||||
let l = Local {};
|
match local_datetime_to_utc(&datetime) {
|
||||||
|
|
||||||
match l.from_local_datetime(&datetime) {
|
|
||||||
LocalResult::None => ParseTimestampSnafu { raw: s }.fail(),
|
LocalResult::None => ParseTimestampSnafu { raw: s }.fail(),
|
||||||
LocalResult::Single(local_datetime) => Ok(Timestamp::new(
|
LocalResult::Single(utc) | LocalResult::Ambiguous(utc, _) => {
|
||||||
local_datetime.with_timezone(&Utc).timestamp_nanos(),
|
Ok(Timestamp::new(utc.timestamp_nanos(), TimeUnit::Nanosecond))
|
||||||
TimeUnit::Nanosecond,
|
}
|
||||||
)),
|
|
||||||
LocalResult::Ambiguous(local_datetime, _) => Ok(Timestamp::new(
|
|
||||||
local_datetime.with_timezone(&Utc).timestamp_nanos(),
|
|
||||||
TimeUnit::Nanosecond,
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -421,7 +410,7 @@ impl Hash for Timestamp {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
|
|
||||||
use chrono::Offset;
|
use chrono::{Local, Offset};
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
|||||||
@@ -15,13 +15,14 @@
|
|||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use chrono::{FixedOffset, Local};
|
use chrono::{FixedOffset, Local, Offset};
|
||||||
use chrono_tz::Tz;
|
use chrono_tz::Tz;
|
||||||
use snafu::{OptionExt, ResultExt};
|
use snafu::{OptionExt, ResultExt};
|
||||||
|
|
||||||
use crate::error::{
|
use crate::error::{
|
||||||
InvalidTimeZoneOffsetSnafu, ParseOffsetStrSnafu, ParseTimeZoneNameSnafu, Result,
|
InvalidTimeZoneOffsetSnafu, ParseOffsetStrSnafu, ParseTimeZoneNameSnafu, Result,
|
||||||
};
|
};
|
||||||
|
use crate::util::find_tz_from_env;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum TimeZone {
|
pub enum TimeZone {
|
||||||
@@ -87,7 +88,11 @@ impl Display for TimeZone {
|
|||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn system_time_zone_name() -> String {
|
pub fn system_time_zone_name() -> String {
|
||||||
Local::now().offset().to_string()
|
if let Some(tz) = find_tz_from_env() {
|
||||||
|
Local::now().with_timezone(&tz).offset().fix().to_string()
|
||||||
|
} else {
|
||||||
|
Local::now().offset().to_string()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -12,6 +12,38 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use chrono::offset::Local;
|
||||||
|
use chrono::{LocalResult, NaiveDateTime, TimeZone};
|
||||||
|
use chrono_tz::Tz;
|
||||||
|
|
||||||
|
pub fn format_utc_datetime(utc: &NaiveDateTime, pattern: &str) -> String {
|
||||||
|
if let Some(tz) = find_tz_from_env() {
|
||||||
|
format!("{}", tz.from_utc_datetime(utc).format(pattern))
|
||||||
|
} else {
|
||||||
|
format!("{}", Local.from_utc_datetime(utc).format(pattern))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn local_datetime_to_utc(local: &NaiveDateTime) -> LocalResult<NaiveDateTime> {
|
||||||
|
if let Some(tz) = find_tz_from_env() {
|
||||||
|
tz.from_local_datetime(local).map(|x| x.naive_utc())
|
||||||
|
} else {
|
||||||
|
Local.from_local_datetime(local).map(|x| x.naive_utc())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_tz_from_env() -> Option<Tz> {
|
||||||
|
// Windows does not support "TZ" env variable, which is used in the `Local` timezone under Unix.
|
||||||
|
// However, we are used to set "TZ" env as the default timezone without actually providing a
|
||||||
|
// timezone argument (especially in tests), and it's very convenient to do so, we decide to make
|
||||||
|
// it work under Windows as well.
|
||||||
|
std::env::var("TZ")
|
||||||
|
.ok()
|
||||||
|
.and_then(|tz| Tz::from_str(&tz).ok())
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the time duration since UNIX_EPOCH in milliseconds.
|
/// Returns the time duration since UNIX_EPOCH in milliseconds.
|
||||||
pub fn current_time_millis() -> i64 {
|
pub fn current_time_millis() -> i64 {
|
||||||
chrono::Utc::now().timestamp_millis()
|
chrono::Utc::now().timestamp_millis()
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ common-telemetry = { workspace = true }
|
|||||||
futures-util.workspace = true
|
futures-util.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
protobuf = { version = "2", features = ["bytes"] }
|
protobuf = { version = "2", features = ["bytes"] }
|
||||||
raft-engine = "0.3"
|
raft-engine = { git = "https://github.com/tikv/raft-engine.git", rev = "2dcaf5beeea3d5de9ec9c7133a2451d00f508f52" }
|
||||||
snafu = { version = "0.7", features = ["backtraces"] }
|
snafu = { version = "0.7", features = ["backtraces"] }
|
||||||
store-api = { workspace = true }
|
store-api = { workspace = true }
|
||||||
tokio-util.workspace = true
|
tokio-util.workspace = true
|
||||||
|
|||||||
@@ -12,14 +12,16 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::raft_engine::log_store::RaftEngineLogStore;
|
use crate::raft_engine::log_store::RaftEngineLogStore;
|
||||||
use crate::LogConfig;
|
use crate::LogConfig;
|
||||||
|
|
||||||
/// Create a write log for the provided path, used for test.
|
/// Create a write log for the provided path, used for test.
|
||||||
pub async fn create_tmp_local_file_log_store(path: &str) -> RaftEngineLogStore {
|
pub async fn create_tmp_local_file_log_store<P: AsRef<Path>>(path: P) -> RaftEngineLogStore {
|
||||||
let cfg = LogConfig {
|
let cfg = LogConfig {
|
||||||
file_size: 128 * 1024,
|
file_size: 128 * 1024,
|
||||||
log_file_dir: path.to_string(),
|
log_file_dir: path.as_ref().display().to_string(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
RaftEngineLogStore::try_new(cfg).await.unwrap()
|
RaftEngineLogStore::try_new(cfg).await.unwrap()
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ impl MetaSrvOptions {
|
|||||||
// Options for datanode.
|
// Options for datanode.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||||
pub struct DatanodeOptions {
|
pub struct DatanodeOptions {
|
||||||
client_options: DatanodeClientOptions,
|
pub client_options: DatanodeClientOptions,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Options for datanode client.
|
// Options for datanode client.
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ use datatypes::schema::ColumnSchema;
|
|||||||
use log_store::raft_engine::log_store::RaftEngineLogStore;
|
use log_store::raft_engine::log_store::RaftEngineLogStore;
|
||||||
use log_store::test_util::log_store_util;
|
use log_store::test_util::log_store_util;
|
||||||
use object_store::services::Fs;
|
use object_store::services::Fs;
|
||||||
use object_store::util::join_dir;
|
|
||||||
use object_store::ObjectStore;
|
use object_store::ObjectStore;
|
||||||
use store_api::storage::RegionId;
|
use store_api::storage::RegionId;
|
||||||
|
|
||||||
@@ -79,9 +78,9 @@ impl TestEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn create_log_and_object_store(&self) -> (RaftEngineLogStore, ObjectStore) {
|
async fn create_log_and_object_store(&self) -> (RaftEngineLogStore, ObjectStore) {
|
||||||
let data_home = self.data_home.path().to_str().unwrap();
|
let data_home = self.data_home.path();
|
||||||
let wal_path = join_dir(data_home, "wal");
|
let wal_path = data_home.join("wal");
|
||||||
let data_path = join_dir(data_home, "data");
|
let data_path = data_home.join("data").as_path().display().to_string();
|
||||||
|
|
||||||
let log_store = log_store_util::create_tmp_local_file_log_store(&wal_path).await;
|
let log_store = log_store_util::create_tmp_local_file_log_store(&wal_path).await;
|
||||||
let mut builder = Fs::default();
|
let mut builder = Fs::default();
|
||||||
@@ -99,13 +98,20 @@ impl TestEnv {
|
|||||||
checkpoint_interval: u64,
|
checkpoint_interval: u64,
|
||||||
initial_metadata: Option<RegionMetadataRef>,
|
initial_metadata: Option<RegionMetadataRef>,
|
||||||
) -> Result<Option<RegionManifestManager>> {
|
) -> Result<Option<RegionManifestManager>> {
|
||||||
let data_home = self.data_home.path().to_str().unwrap();
|
let data_home = self.data_home.path();
|
||||||
let manifest_dir = join_dir(data_home, "manifest");
|
let manifest_dir = data_home.join("manifest").as_path().display().to_string();
|
||||||
|
|
||||||
let mut builder = Fs::default();
|
let mut builder = Fs::default();
|
||||||
builder.root(&manifest_dir);
|
builder.root(&manifest_dir);
|
||||||
let object_store = ObjectStore::new(builder).unwrap().finish();
|
let object_store = ObjectStore::new(builder).unwrap().finish();
|
||||||
|
|
||||||
|
// The "manifest_dir" here should be the relative path from the `object_store`'s root.
|
||||||
|
// Otherwise the OpenDal's list operation would fail with "StripPrefixError". This is
|
||||||
|
// because the `object_store`'s root path is "canonicalize"d; and under the Windows,
|
||||||
|
// canonicalize a path will prepend "\\?\" to it. This behavior will cause the original
|
||||||
|
// happen-to-be-working list on an absolute path failed on Windows.
|
||||||
|
let manifest_dir = "/".to_string();
|
||||||
|
|
||||||
let manifest_opts = RegionManifestOptions {
|
let manifest_opts = RegionManifestOptions {
|
||||||
manifest_dir,
|
manifest_dir,
|
||||||
object_store,
|
object_store,
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use common_error::ext::{BoxedError, ErrorExt};
|
use common_error::ext::{BoxedError, ErrorExt};
|
||||||
use common_error::status_code::StatusCode;
|
use common_error::status_code::StatusCode;
|
||||||
@@ -234,6 +235,12 @@ pub enum Error {
|
|||||||
location
|
location
|
||||||
))]
|
))]
|
||||||
TimeIndexNotFound { table: String, location: Location },
|
TimeIndexNotFound { table: String, location: Location },
|
||||||
|
|
||||||
|
#[snafu(display("Failed to add duration '{:?}' to SystemTime, overflowed", duration))]
|
||||||
|
AddSystemTimeOverflow {
|
||||||
|
duration: Duration,
|
||||||
|
location: Location,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorExt for Error {
|
impl ErrorExt for Error {
|
||||||
@@ -253,7 +260,8 @@ impl ErrorExt for Error {
|
|||||||
| ParseFloat { .. }
|
| ParseFloat { .. }
|
||||||
| MissingRequiredField { .. }
|
| MissingRequiredField { .. }
|
||||||
| BuildRegex { .. }
|
| BuildRegex { .. }
|
||||||
| ConvertSchema { .. } => StatusCode::InvalidArguments,
|
| ConvertSchema { .. }
|
||||||
|
| AddSystemTimeOverflow { .. } => StatusCode::InvalidArguments,
|
||||||
|
|
||||||
BuildBackend { .. } | ListObjects { .. } => StatusCode::StorageUnavailable,
|
BuildBackend { .. } | ListObjects { .. } => StatusCode::StorageUnavailable,
|
||||||
EncodeSubstraitLogicalPlan { source, .. } => source.status_code(),
|
EncodeSubstraitLogicalPlan { source, .. } => source.status_code(),
|
||||||
|
|||||||
@@ -24,14 +24,14 @@ use common_telemetry::timer;
|
|||||||
use promql_parser::parser::ast::{Extension as NodeExtension, ExtensionExpr};
|
use promql_parser::parser::ast::{Extension as NodeExtension, ExtensionExpr};
|
||||||
use promql_parser::parser::Expr::Extension;
|
use promql_parser::parser::Expr::Extension;
|
||||||
use promql_parser::parser::{EvalStmt, Expr, ValueType};
|
use promql_parser::parser::{EvalStmt, Expr, ValueType};
|
||||||
use snafu::ResultExt;
|
use snafu::{OptionExt, ResultExt};
|
||||||
use sql::dialect::GreptimeDbDialect;
|
use sql::dialect::GreptimeDbDialect;
|
||||||
use sql::parser::ParserContext;
|
use sql::parser::ParserContext;
|
||||||
use sql::statements::statement::Statement;
|
use sql::statements::statement::Statement;
|
||||||
|
|
||||||
use crate::error::{
|
use crate::error::{
|
||||||
MultipleStatementsSnafu, ParseFloatSnafu, ParseTimestampSnafu, QueryParseSnafu, Result,
|
AddSystemTimeOverflowSnafu, MultipleStatementsSnafu, ParseFloatSnafu, ParseTimestampSnafu,
|
||||||
UnimplementedSnafu,
|
QueryParseSnafu, Result, UnimplementedSnafu,
|
||||||
};
|
};
|
||||||
use crate::metrics::{METRIC_PARSE_PROMQL_ELAPSED, METRIC_PARSE_SQL_ELAPSED};
|
use crate::metrics::{METRIC_PARSE_PROMQL_ELAPSED, METRIC_PARSE_SQL_ELAPSED};
|
||||||
|
|
||||||
@@ -177,24 +177,17 @@ impl QueryLanguageParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// try float format
|
// try float format
|
||||||
timestamp
|
let secs = timestamp
|
||||||
.parse::<f64>()
|
.parse::<f64>()
|
||||||
.context(ParseFloatSnafu { raw: timestamp })
|
.context(ParseFloatSnafu { raw: timestamp })
|
||||||
.map(|float| {
|
|
||||||
let duration = Duration::from_secs_f64(float);
|
|
||||||
SystemTime::UNIX_EPOCH
|
|
||||||
.checked_add(duration)
|
|
||||||
.unwrap_or(max_system_timestamp())
|
|
||||||
})
|
|
||||||
// also report rfc3339 error if float parsing fails
|
// also report rfc3339 error if float parsing fails
|
||||||
.map_err(|_| rfc3339_result.unwrap_err())
|
.map_err(|_| rfc3339_result.unwrap_err())?;
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn max_system_timestamp() -> SystemTime {
|
let duration = Duration::from_secs_f64(secs);
|
||||||
SystemTime::UNIX_EPOCH
|
SystemTime::UNIX_EPOCH
|
||||||
.checked_add(Duration::from_secs(std::i64::MAX as u64))
|
.checked_add(duration)
|
||||||
.unwrap()
|
.context(AddSystemTimeOverflowSnafu { duration })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! define_node_ast_extension {
|
macro_rules! define_node_ast_extension {
|
||||||
@@ -291,11 +284,6 @@ mod test {
|
|||||||
),
|
),
|
||||||
("0.000", SystemTime::UNIX_EPOCH),
|
("0.000", SystemTime::UNIX_EPOCH),
|
||||||
("00", SystemTime::UNIX_EPOCH),
|
("00", SystemTime::UNIX_EPOCH),
|
||||||
(
|
|
||||||
// i64::MAX + 1
|
|
||||||
"9223372036854775808.000",
|
|
||||||
max_system_timestamp(),
|
|
||||||
),
|
|
||||||
(
|
(
|
||||||
"2015-07-01T20:10:51.781Z",
|
"2015-07-01T20:10:51.781Z",
|
||||||
SystemTime::UNIX_EPOCH
|
SystemTime::UNIX_EPOCH
|
||||||
@@ -320,6 +308,14 @@ mod test {
|
|||||||
// assert difference < 0.1 second
|
// assert difference < 0.1 second
|
||||||
assert!(result.abs_diff(expected) < 100);
|
assert!(result.abs_diff(expected) < 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// i64::MAX + 1
|
||||||
|
let timestamp = "9223372036854775808.000";
|
||||||
|
let result = QueryLanguageParser::parse_promql_timestamp(timestamp);
|
||||||
|
assert_eq!(
|
||||||
|
result.unwrap_err().to_string(),
|
||||||
|
"Failed to add duration '9223372036854775808s' to SystemTime, overflowed"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -331,6 +327,7 @@ mod test {
|
|||||||
step: "1d".to_string(),
|
step: "1d".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
let expected = String::from(
|
let expected = String::from(
|
||||||
"\
|
"\
|
||||||
Promql(EvalStmt { \
|
Promql(EvalStmt { \
|
||||||
@@ -346,6 +343,22 @@ mod test {
|
|||||||
})",
|
})",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Windows has different debug output for SystemTime.
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected = String::from(
|
||||||
|
"\
|
||||||
|
Promql(EvalStmt { \
|
||||||
|
expr: VectorSelector(VectorSelector { \
|
||||||
|
name: Some(\"http_request\"), \
|
||||||
|
matchers: Matchers { matchers: [] }, \
|
||||||
|
offset: None, at: None }), \
|
||||||
|
start: SystemTime { intervals: 132892460400000000 }, \
|
||||||
|
end: SystemTime { intervals: 133207820400000000 }, \
|
||||||
|
interval: 86400s, \
|
||||||
|
lookback_delta: 300s \
|
||||||
|
})",
|
||||||
|
);
|
||||||
|
|
||||||
let result = QueryLanguageParser::parse_promql(&promql).unwrap();
|
let result = QueryLanguageParser::parse_promql(&promql).unwrap();
|
||||||
assert_eq!(format!("{result:?}"), expected);
|
assert_eq!(format!("{result:?}"), expected);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -334,7 +334,7 @@ impl PyVector {
|
|||||||
let ret = Self::from(ret).into_py(py);
|
let ret = Self::from(ret).into_py(py);
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
} else if let Ok(slice) = needle.downcast::<PySlice>(py) {
|
} else if let Ok(slice) = needle.downcast::<PySlice>(py) {
|
||||||
let indices = slice.indices(self.len() as i64)?;
|
let indices = slice.indices(self.len() as _)?;
|
||||||
let (start, stop, step, _slicelength) = (
|
let (start, stop, step, _slicelength) = (
|
||||||
indices.start,
|
indices.start,
|
||||||
indices.stop,
|
indices.stop,
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ license.workspace = true
|
|||||||
[features]
|
[features]
|
||||||
dashboard = []
|
dashboard = []
|
||||||
mem-prof = ["dep:common-mem-prof"]
|
mem-prof = ["dep:common-mem-prof"]
|
||||||
pprof = ["dep:common-pprof"]
|
pprof = ["dep:pprof"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
aide = { version = "0.9", features = ["axum"] }
|
aide = { version = "0.9", features = ["axum"] }
|
||||||
@@ -26,7 +26,6 @@ common-error = { workspace = true }
|
|||||||
common-grpc = { workspace = true }
|
common-grpc = { workspace = true }
|
||||||
common-grpc-expr = { workspace = true }
|
common-grpc-expr = { workspace = true }
|
||||||
common-mem-prof = { workspace = true, optional = true }
|
common-mem-prof = { workspace = true, optional = true }
|
||||||
common-pprof = { workspace = true, optional = true }
|
|
||||||
common-query = { workspace = true }
|
common-query = { workspace = true }
|
||||||
common-recordbatch = { workspace = true }
|
common-recordbatch = { workspace = true }
|
||||||
common-runtime = { workspace = true }
|
common-runtime = { workspace = true }
|
||||||
@@ -61,6 +60,11 @@ parking_lot = "0.12"
|
|||||||
pgwire = "0.16"
|
pgwire = "0.16"
|
||||||
pin-project = "1.0"
|
pin-project = "1.0"
|
||||||
postgres-types = { version = "0.2", features = ["with-chrono-0_4"] }
|
postgres-types = { version = "0.2", features = ["with-chrono-0_4"] }
|
||||||
|
pprof = { version = "0.11", features = [
|
||||||
|
"flamegraph",
|
||||||
|
"prost-codec",
|
||||||
|
"protobuf",
|
||||||
|
], optional = true }
|
||||||
promql-parser = "0.1.1"
|
promql-parser = "0.1.1"
|
||||||
prost.workspace = true
|
prost.workspace = true
|
||||||
query = { workspace = true }
|
query = { workspace = true }
|
||||||
@@ -80,7 +84,6 @@ snap = "1"
|
|||||||
sql = { workspace = true }
|
sql = { workspace = true }
|
||||||
strum = { version = "0.24", features = ["derive"] }
|
strum = { version = "0.24", features = ["derive"] }
|
||||||
table = { workspace = true }
|
table = { workspace = true }
|
||||||
tikv-jemalloc-ctl = { version = "0.5", features = ["use_std"] }
|
|
||||||
tokio-rustls = "0.24"
|
tokio-rustls = "0.24"
|
||||||
tokio-stream = { version = "0.1", features = ["net"] }
|
tokio-stream = { version = "0.1", features = ["net"] }
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
@@ -89,6 +92,9 @@ tonic.workspace = true
|
|||||||
tower = { version = "0.4", features = ["full"] }
|
tower = { version = "0.4", features = ["full"] }
|
||||||
tower-http = { version = "0.3", features = ["full"] }
|
tower-http = { version = "0.3", features = ["full"] }
|
||||||
|
|
||||||
|
[target.'cfg(not(windows))'.dependencies]
|
||||||
|
tikv-jemalloc-ctl = { version = "0.5", features = ["use_std"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
axum-test-helper = { git = "https://github.com/sunng87/axum-test-helper.git", branch = "patch-1" }
|
axum-test-helper = { git = "https://github.com/sunng87/axum-test-helper.git", branch = "patch-1" }
|
||||||
catalog = { workspace = true, features = ["testing"] }
|
catalog = { workspace = true, features = ["testing"] }
|
||||||
|
|||||||
@@ -28,12 +28,41 @@ fn fetch_dashboard_assets() {
|
|||||||
|
|
||||||
let message = "Failed to fetch dashboard assets";
|
let message = "Failed to fetch dashboard assets";
|
||||||
let help = r#"
|
let help = r#"
|
||||||
You can manually execute './scripts/fetch-dashboard-assets.sh' to see why,
|
You can manually execute "fetch-dashboard-assets.sh" to see why,
|
||||||
or it's a network error, just try again or enable/disable some proxy."#;
|
or it's a network error, just try again or enable/disable some proxy."#;
|
||||||
|
|
||||||
|
// It's very unlikely to be failed to get the current dir here, see `current_dir`'s docs.
|
||||||
|
let mut dir = std::env::current_dir().unwrap();
|
||||||
|
dir.pop();
|
||||||
|
dir.pop();
|
||||||
|
dir.push("scripts");
|
||||||
|
|
||||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||||
let output = Command::new("./fetch-dashboard-assets.sh")
|
|
||||||
.arg(&out_dir)
|
#[cfg(windows)]
|
||||||
.current_dir("../../scripts")
|
let (program, args) = (
|
||||||
|
"bash",
|
||||||
|
[
|
||||||
|
format!(
|
||||||
|
"/mnt/{}/fetch-dashboard-assets.sh",
|
||||||
|
dir.display()
|
||||||
|
.to_string()
|
||||||
|
.to_lowercase()
|
||||||
|
.replace(':', "")
|
||||||
|
.replace('\\', "/")
|
||||||
|
),
|
||||||
|
format!(
|
||||||
|
"/mnt/{}",
|
||||||
|
out_dir.to_lowercase().replace(':', "").replace('\\', "/")
|
||||||
|
),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let (program, args) = ("./fetch-dashboard-assets.sh", [out_dir]);
|
||||||
|
|
||||||
|
let output = Command::new(program)
|
||||||
|
.args(args)
|
||||||
|
.current_dir(dir)
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
.and_then(|p| p.wait_with_output());
|
.and_then(|p| p.wait_with_output());
|
||||||
|
|||||||
@@ -297,11 +297,8 @@ pub enum Error {
|
|||||||
#[snafu(display("Failed to dump pprof data, source: {}", source))]
|
#[snafu(display("Failed to dump pprof data, source: {}", source))]
|
||||||
DumpPprof { source: common_pprof::Error },
|
DumpPprof { source: common_pprof::Error },
|
||||||
|
|
||||||
#[snafu(display("Failed to update jemalloc metrics, source: {source}, location: {location}"))]
|
#[snafu(display("{source}"))]
|
||||||
UpdateJemallocMetrics {
|
Metrics { source: BoxedError },
|
||||||
source: tikv_jemalloc_ctl::Error,
|
|
||||||
location: Location,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[snafu(display("DataFrame operation error, source: {source}, location: {location}"))]
|
#[snafu(display("DataFrame operation error, source: {source}, location: {location}"))]
|
||||||
DataFrame {
|
DataFrame {
|
||||||
@@ -418,7 +415,7 @@ impl ErrorExt for Error {
|
|||||||
#[cfg(feature = "pprof")]
|
#[cfg(feature = "pprof")]
|
||||||
DumpPprof { source, .. } => source.status_code(),
|
DumpPprof { source, .. } => source.status_code(),
|
||||||
|
|
||||||
UpdateJemallocMetrics { .. } => StatusCode::Internal,
|
Metrics { source } => source.status_code(),
|
||||||
|
|
||||||
ConvertScalarValue { source, .. } => source.status_code(),
|
ConvertScalarValue { source, .. } => source.status_code(),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,14 +21,13 @@ use axum::extract::{Json, Query, State};
|
|||||||
use axum::response::{IntoResponse, Response};
|
use axum::response::{IntoResponse, Response};
|
||||||
use axum::{Extension, Form};
|
use axum::{Extension, Form};
|
||||||
use common_error::status_code::StatusCode;
|
use common_error::status_code::StatusCode;
|
||||||
use common_telemetry::{error, timer};
|
use common_telemetry::timer;
|
||||||
use query::parser::PromQuery;
|
use query::parser::PromQuery;
|
||||||
use schemars::JsonSchema;
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use session::context::UserInfo;
|
use session::context::UserInfo;
|
||||||
|
|
||||||
use crate::http::{ApiState, GreptimeOptionsConfigState, JsonResponse};
|
use crate::http::{ApiState, GreptimeOptionsConfigState, JsonResponse};
|
||||||
use crate::metrics::JEMALLOC_COLLECTOR;
|
|
||||||
use crate::metrics_handler::MetricsHandler;
|
use crate::metrics_handler::MetricsHandler;
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)]
|
#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||||
@@ -141,9 +140,10 @@ pub async fn metrics(
|
|||||||
#[cfg(feature = "metrics-process")]
|
#[cfg(feature = "metrics-process")]
|
||||||
crate::metrics::PROCESS_COLLECTOR.collect();
|
crate::metrics::PROCESS_COLLECTOR.collect();
|
||||||
|
|
||||||
if let Some(c) = JEMALLOC_COLLECTOR.as_ref() {
|
#[cfg(not(windows))]
|
||||||
|
if let Some(c) = crate::metrics::jemalloc::JEMALLOC_COLLECTOR.as_ref() {
|
||||||
if let Err(e) = c.update() {
|
if let Err(e) = c.update() {
|
||||||
error!(e; "Failed to update jemalloc metrics");
|
common_telemetry::error!(e; "Failed to update jemalloc metrics");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
state.render()
|
state.render()
|
||||||
|
|||||||
@@ -12,6 +12,9 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
#[cfg(feature = "pprof")]
|
||||||
|
mod nix;
|
||||||
|
|
||||||
#[cfg(feature = "pprof")]
|
#[cfg(feature = "pprof")]
|
||||||
pub mod handler {
|
pub mod handler {
|
||||||
use std::num::NonZeroI32;
|
use std::num::NonZeroI32;
|
||||||
@@ -20,13 +23,13 @@ pub mod handler {
|
|||||||
use axum::extract::Query;
|
use axum::extract::Query;
|
||||||
use axum::http::StatusCode;
|
use axum::http::StatusCode;
|
||||||
use axum::response::IntoResponse;
|
use axum::response::IntoResponse;
|
||||||
use common_pprof::Profiling;
|
|
||||||
use common_telemetry::logging;
|
use common_telemetry::logging;
|
||||||
use schemars::JsonSchema;
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use snafu::ResultExt;
|
use snafu::ResultExt;
|
||||||
|
|
||||||
use crate::error::{DumpPprofSnafu, Result};
|
use crate::error::{DumpPprofSnafu, Result};
|
||||||
|
use crate::http::pprof::nix::Profiling;
|
||||||
|
|
||||||
/// Output format.
|
/// Output format.
|
||||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||||
|
|||||||
@@ -12,22 +12,16 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
pub(crate) mod jemalloc;
|
||||||
|
|
||||||
use std::task::{Context, Poll};
|
use std::task::{Context, Poll};
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use common_telemetry::error;
|
|
||||||
use hyper::Body;
|
use hyper::Body;
|
||||||
use metrics::gauge;
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use snafu::ResultExt;
|
|
||||||
use tikv_jemalloc_ctl::stats::{allocated_mib, resident_mib};
|
|
||||||
use tikv_jemalloc_ctl::{epoch, epoch_mib, stats};
|
|
||||||
use tonic::body::BoxBody;
|
use tonic::body::BoxBody;
|
||||||
use tower::{Layer, Service};
|
use tower::{Layer, Service};
|
||||||
|
|
||||||
use crate::error;
|
|
||||||
use crate::error::UpdateJemallocMetricsSnafu;
|
|
||||||
|
|
||||||
pub(crate) const METRIC_DB_LABEL: &str = "db";
|
pub(crate) const METRIC_DB_LABEL: &str = "db";
|
||||||
pub(crate) const METRIC_CODE_LABEL: &str = "code";
|
pub(crate) const METRIC_CODE_LABEL: &str = "code";
|
||||||
pub(crate) const METRIC_TYPE_LABEL: &str = "type";
|
pub(crate) const METRIC_TYPE_LABEL: &str = "type";
|
||||||
@@ -80,60 +74,15 @@ pub(crate) const METRIC_GRPC_REQUESTS_TOTAL: &str = "servers.grpc_requests_total
|
|||||||
pub(crate) const METRIC_GRPC_REQUESTS_ELAPSED: &str = "servers.grpc_requests_elapsed";
|
pub(crate) const METRIC_GRPC_REQUESTS_ELAPSED: &str = "servers.grpc_requests_elapsed";
|
||||||
pub(crate) const METRIC_METHOD_LABEL: &str = "method";
|
pub(crate) const METRIC_METHOD_LABEL: &str = "method";
|
||||||
pub(crate) const METRIC_PATH_LABEL: &str = "path";
|
pub(crate) const METRIC_PATH_LABEL: &str = "path";
|
||||||
pub(crate) const METRIC_JEMALLOC_RESIDENT: &str = "sys.jemalloc.resident";
|
|
||||||
pub(crate) const METRIC_JEMALLOC_ALLOCATED: &str = "sys.jemalloc.allocated";
|
|
||||||
|
|
||||||
/// Prometheus style process metrics collector.
|
/// Prometheus style process metrics collector.
|
||||||
#[cfg(feature = "metrics-process")]
|
#[cfg(feature = "metrics-process")]
|
||||||
pub(crate) static PROCESS_COLLECTOR: Lazy<metrics_process::Collector> = Lazy::new(|| {
|
pub(crate) static PROCESS_COLLECTOR: once_cell::sync::Lazy<metrics_process::Collector> =
|
||||||
let collector = metrics_process::Collector::default();
|
once_cell::sync::Lazy::new(|| {
|
||||||
// Describe collector.
|
let collector = metrics_process::Collector::default();
|
||||||
collector.describe();
|
collector.describe();
|
||||||
collector
|
collector
|
||||||
});
|
});
|
||||||
|
|
||||||
pub(crate) static JEMALLOC_COLLECTOR: Lazy<Option<JemallocCollector>> = Lazy::new(|| {
|
|
||||||
let collector = JemallocCollector::try_new()
|
|
||||||
.map_err(|e| {
|
|
||||||
error!(e; "Failed to retrieve jemalloc metrics");
|
|
||||||
e
|
|
||||||
})
|
|
||||||
.ok();
|
|
||||||
collector.map(|c| {
|
|
||||||
if let Err(e) = c.update() {
|
|
||||||
error!(e; "Failed to update jemalloc metrics");
|
|
||||||
};
|
|
||||||
c
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
pub(crate) struct JemallocCollector {
|
|
||||||
epoch: epoch_mib,
|
|
||||||
allocated: allocated_mib,
|
|
||||||
resident: resident_mib,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl JemallocCollector {
|
|
||||||
pub(crate) fn try_new() -> error::Result<Self> {
|
|
||||||
let e = epoch::mib().context(UpdateJemallocMetricsSnafu)?;
|
|
||||||
let allocated = stats::allocated::mib().context(UpdateJemallocMetricsSnafu)?;
|
|
||||||
let resident = stats::resident::mib().context(UpdateJemallocMetricsSnafu)?;
|
|
||||||
Ok(Self {
|
|
||||||
epoch: e,
|
|
||||||
allocated,
|
|
||||||
resident,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn update(&self) -> error::Result<()> {
|
|
||||||
let _ = self.epoch.advance().context(UpdateJemallocMetricsSnafu)?;
|
|
||||||
let allocated = self.allocated.read().context(UpdateJemallocMetricsSnafu)?;
|
|
||||||
let resident = self.resident.read().context(UpdateJemallocMetricsSnafu)?;
|
|
||||||
gauge!(METRIC_JEMALLOC_ALLOCATED, allocated as f64);
|
|
||||||
gauge!(METRIC_JEMALLOC_RESIDENT, resident as f64);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Based on https://github.com/hyperium/tonic/blob/master/examples/src/tower/server.rs
|
// Based on https://github.com/hyperium/tonic/blob/master/examples/src/tower/server.rs
|
||||||
// See https://github.com/hyperium/tonic/issues/242
|
// See https://github.com/hyperium/tonic/issues/242
|
||||||
|
|||||||
69
src/servers/src/metrics/jemalloc.rs
Normal file
69
src/servers/src/metrics/jemalloc.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
// Copyright 2023 Greptime Team
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
mod error;
|
||||||
|
|
||||||
|
use common_telemetry::error;
|
||||||
|
use error::UpdateJemallocMetricsSnafu;
|
||||||
|
use metrics::gauge;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use snafu::ResultExt;
|
||||||
|
use tikv_jemalloc_ctl::stats::{allocated_mib, resident_mib};
|
||||||
|
use tikv_jemalloc_ctl::{epoch, epoch_mib, stats};
|
||||||
|
|
||||||
|
pub(crate) const METRIC_JEMALLOC_RESIDENT: &str = "sys.jemalloc.resident";
|
||||||
|
pub(crate) const METRIC_JEMALLOC_ALLOCATED: &str = "sys.jemalloc.allocated";
|
||||||
|
|
||||||
|
pub(crate) static JEMALLOC_COLLECTOR: Lazy<Option<JemallocCollector>> = Lazy::new(|| {
|
||||||
|
let collector = JemallocCollector::try_new()
|
||||||
|
.map_err(|e| {
|
||||||
|
error!(e; "Failed to retrieve jemalloc metrics");
|
||||||
|
e
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
collector.map(|c| {
|
||||||
|
if let Err(e) = c.update() {
|
||||||
|
error!(e; "Failed to update jemalloc metrics");
|
||||||
|
};
|
||||||
|
c
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
pub(crate) struct JemallocCollector {
|
||||||
|
epoch: epoch_mib,
|
||||||
|
allocated: allocated_mib,
|
||||||
|
resident: resident_mib,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JemallocCollector {
|
||||||
|
pub(crate) fn try_new() -> crate::error::Result<Self> {
|
||||||
|
let e = epoch::mib().context(UpdateJemallocMetricsSnafu)?;
|
||||||
|
let allocated = stats::allocated::mib().context(UpdateJemallocMetricsSnafu)?;
|
||||||
|
let resident = stats::resident::mib().context(UpdateJemallocMetricsSnafu)?;
|
||||||
|
Ok(Self {
|
||||||
|
epoch: e,
|
||||||
|
allocated,
|
||||||
|
resident,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn update(&self) -> crate::error::Result<()> {
|
||||||
|
let _ = self.epoch.advance().context(UpdateJemallocMetricsSnafu)?;
|
||||||
|
let allocated = self.allocated.read().context(UpdateJemallocMetricsSnafu)?;
|
||||||
|
let resident = self.resident.read().context(UpdateJemallocMetricsSnafu)?;
|
||||||
|
gauge!(METRIC_JEMALLOC_ALLOCATED, allocated as f64);
|
||||||
|
gauge!(METRIC_JEMALLOC_RESIDENT, resident as f64);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
49
src/servers/src/metrics/jemalloc/error.rs
Normal file
49
src/servers/src/metrics/jemalloc/error.rs
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
// Copyright 2023 Greptime Team
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::any::Any;
|
||||||
|
|
||||||
|
use common_error::ext::{BoxedError, ErrorExt};
|
||||||
|
use common_error::status_code::StatusCode;
|
||||||
|
use snafu::{Location, Snafu};
|
||||||
|
|
||||||
|
#[derive(Debug, Snafu)]
|
||||||
|
#[snafu(visibility(pub))]
|
||||||
|
pub enum Error {
|
||||||
|
#[snafu(display("Failed to update jemalloc metrics, source: {source}, location: {location}"))]
|
||||||
|
UpdateJemallocMetrics {
|
||||||
|
source: tikv_jemalloc_ctl::Error,
|
||||||
|
location: Location,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorExt for Error {
|
||||||
|
fn status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
Error::UpdateJemallocMetrics { .. } => StatusCode::Internal,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any(&self) -> &dyn Any {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Error> for crate::error::Error {
|
||||||
|
fn from(e: Error) -> Self {
|
||||||
|
Self::Metrics {
|
||||||
|
source: BoxedError::new(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -199,8 +199,7 @@ async fn test_shutdown_mysql_server() -> Result<()> {
|
|||||||
for handle in join_handles.iter_mut() {
|
for handle in join_handles.iter_mut() {
|
||||||
let result = handle.await.unwrap();
|
let result = handle.await.unwrap();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
let error = result.unwrap_err().to_string();
|
assert!(result.unwrap_err().is_fatal());
|
||||||
assert!(error.contains("Connection refused") || error.contains("Connection reset by peer"));
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -378,7 +378,7 @@ mod tests {
|
|||||||
assert_eq!(expected, actions);
|
assert_eq!(expected, actions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_fs_region_manifest_checkpoint_compress() {
|
async fn test_fs_region_manifest_checkpoint_compress() {
|
||||||
let duration = Duration::from_millis(50);
|
let duration = Duration::from_millis(50);
|
||||||
let manifest = new_fs_manifest(true, Some(duration)).await;
|
let manifest = new_fs_manifest(true, Some(duration)).await;
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ use api::v1::meta::Role;
|
|||||||
use client::client_manager::DatanodeClients;
|
use client::client_manager::DatanodeClients;
|
||||||
use client::Client;
|
use client::Client;
|
||||||
use common_base::Plugins;
|
use common_base::Plugins;
|
||||||
use common_grpc::channel_manager::ChannelManager;
|
use common_grpc::channel_manager::{ChannelConfig, ChannelManager};
|
||||||
use common_meta::peer::Peer;
|
use common_meta::peer::Peer;
|
||||||
use common_meta::DatanodeId;
|
use common_meta::DatanodeId;
|
||||||
use common_runtime::Builder as RuntimeBuilder;
|
use common_runtime::Builder as RuntimeBuilder;
|
||||||
@@ -88,7 +88,8 @@ impl GreptimeDbClusterBuilder {
|
|||||||
pub async fn build(self) -> GreptimeDbCluster {
|
pub async fn build(self) -> GreptimeDbCluster {
|
||||||
let datanodes = self.datanodes.unwrap_or(4);
|
let datanodes = self.datanodes.unwrap_or(4);
|
||||||
|
|
||||||
let datanode_clients = Arc::new(DatanodeClients::default());
|
let channel_config = ChannelConfig::new().timeout(Duration::from_secs(20));
|
||||||
|
let datanode_clients = Arc::new(DatanodeClients::new(channel_config));
|
||||||
|
|
||||||
let meta_srv = self.build_metasrv(datanode_clients.clone()).await;
|
let meta_srv = self.build_metasrv(datanode_clients.clone()).await;
|
||||||
|
|
||||||
|
|||||||
@@ -29,8 +29,8 @@ use session::context::{QueryContext, QueryContextRef};
|
|||||||
|
|
||||||
use crate::test_util::check_output_stream;
|
use crate::test_util::check_output_stream;
|
||||||
use crate::tests::test_util::{
|
use crate::tests::test_util::{
|
||||||
both_instances_cases, check_unordered_output_stream, distributed, get_data_dir, standalone,
|
both_instances_cases, check_unordered_output_stream, distributed, find_testing_resource,
|
||||||
standalone_instance_case, MockInstance,
|
standalone, standalone_instance_case, MockInstance,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[apply(both_instances_cases)]
|
#[apply(both_instances_cases)]
|
||||||
@@ -518,12 +518,7 @@ async fn test_execute_external_create_without_ts_type(instance: Arc<dyn MockInst
|
|||||||
async fn test_execute_query_external_table_parquet(instance: Arc<dyn MockInstance>) {
|
async fn test_execute_query_external_table_parquet(instance: Arc<dyn MockInstance>) {
|
||||||
let instance = instance.frontend();
|
let instance = instance.frontend();
|
||||||
let format = "parquet";
|
let format = "parquet";
|
||||||
let location = get_data_dir("../tests/data/parquet/various_type.parquet")
|
let location = find_testing_resource("/tests/data/parquet/various_type.parquet");
|
||||||
.canonicalize()
|
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let table_name = "various_type_parquet";
|
let table_name = "various_type_parquet";
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
@@ -588,12 +583,7 @@ async fn test_execute_query_external_table_parquet(instance: Arc<dyn MockInstanc
|
|||||||
async fn test_execute_query_external_table_orc(instance: Arc<dyn MockInstance>) {
|
async fn test_execute_query_external_table_orc(instance: Arc<dyn MockInstance>) {
|
||||||
let instance = instance.frontend();
|
let instance = instance.frontend();
|
||||||
let format = "orc";
|
let format = "orc";
|
||||||
let location = get_data_dir("../src/common/datasource/tests/orc/test.orc")
|
let location = find_testing_resource("/src/common/datasource/tests/orc/test.orc");
|
||||||
.canonicalize()
|
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let table_name = "various_type_orc";
|
let table_name = "various_type_orc";
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
@@ -668,12 +658,7 @@ async fn test_execute_query_external_table_orc(instance: Arc<dyn MockInstance>)
|
|||||||
async fn test_execute_query_external_table_csv(instance: Arc<dyn MockInstance>) {
|
async fn test_execute_query_external_table_csv(instance: Arc<dyn MockInstance>) {
|
||||||
let instance = instance.frontend();
|
let instance = instance.frontend();
|
||||||
let format = "csv";
|
let format = "csv";
|
||||||
let location = get_data_dir("../tests/data/csv/various_type.csv")
|
let location = find_testing_resource("/tests/data/csv/various_type.csv");
|
||||||
.canonicalize()
|
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let table_name = "various_type_csv";
|
let table_name = "various_type_csv";
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
@@ -719,12 +704,7 @@ async fn test_execute_query_external_table_csv(instance: Arc<dyn MockInstance>)
|
|||||||
async fn test_execute_query_external_table_json(instance: Arc<dyn MockInstance>) {
|
async fn test_execute_query_external_table_json(instance: Arc<dyn MockInstance>) {
|
||||||
let instance = instance.frontend();
|
let instance = instance.frontend();
|
||||||
let format = "json";
|
let format = "json";
|
||||||
let location = get_data_dir("../tests/data/json/various_type.json")
|
let location = find_testing_resource("/tests/data/json/various_type.json");
|
||||||
.canonicalize()
|
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let table_name = "various_type_json";
|
let table_name = "various_type_json";
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
@@ -776,12 +756,7 @@ async fn test_execute_query_external_table_json(instance: Arc<dyn MockInstance>)
|
|||||||
async fn test_execute_query_external_table_json_with_schame(instance: Arc<dyn MockInstance>) {
|
async fn test_execute_query_external_table_json_with_schame(instance: Arc<dyn MockInstance>) {
|
||||||
let instance = instance.frontend();
|
let instance = instance.frontend();
|
||||||
let format = "json";
|
let format = "json";
|
||||||
let location = get_data_dir("../tests/data/json/various_type.json")
|
let location = find_testing_resource("/tests/data/json/various_type.json");
|
||||||
.canonicalize()
|
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let table_name = "various_type_json_with_schema";
|
let table_name = "various_type_json_with_schema";
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
@@ -1339,11 +1314,7 @@ async fn test_execute_copy_from_orc(instance: Arc<dyn MockInstance>) {
|
|||||||
)
|
)
|
||||||
.await, Output::AffectedRows(0)));
|
.await, Output::AffectedRows(0)));
|
||||||
|
|
||||||
let filepath = get_data_dir("../src/common/datasource/tests/orc/test.orc")
|
let filepath = find_testing_resource("/src/common/datasource/tests/orc/test.orc");
|
||||||
.canonicalize()
|
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
&instance,
|
&instance,
|
||||||
@@ -1376,11 +1347,8 @@ async fn test_cast_type_issue_1594(instance: Arc<dyn MockInstance>) {
|
|||||||
"create table tsbs_cpu(hostname STRING, environment STRING, usage_user DOUBLE, usage_system DOUBLE, usage_idle DOUBLE, usage_nice DOUBLE, usage_iowait DOUBLE, usage_irq DOUBLE, usage_softirq DOUBLE, usage_steal DOUBLE, usage_guest DOUBLE, usage_guest_nice DOUBLE, ts TIMESTAMP TIME INDEX, PRIMARY KEY(hostname));",
|
"create table tsbs_cpu(hostname STRING, environment STRING, usage_user DOUBLE, usage_system DOUBLE, usage_idle DOUBLE, usage_nice DOUBLE, usage_iowait DOUBLE, usage_irq DOUBLE, usage_softirq DOUBLE, usage_steal DOUBLE, usage_guest DOUBLE, usage_guest_nice DOUBLE, ts TIMESTAMP TIME INDEX, PRIMARY KEY(hostname));",
|
||||||
)
|
)
|
||||||
.await, Output::AffectedRows(0)));
|
.await, Output::AffectedRows(0)));
|
||||||
let filepath = get_data_dir("../src/common/datasource/tests/csv/type_cast.csv")
|
|
||||||
.canonicalize()
|
let filepath = find_testing_resource("/src/common/datasource/tests/csv/type_cast.csv");
|
||||||
.unwrap()
|
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let output = execute_sql(
|
let output = execute_sql(
|
||||||
&instance,
|
&instance,
|
||||||
|
|||||||
@@ -12,11 +12,11 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use common_query::Output;
|
use common_query::Output;
|
||||||
use common_recordbatch::util;
|
use common_recordbatch::util;
|
||||||
|
use common_test_util::find_workspace_path;
|
||||||
use frontend::instance::Instance;
|
use frontend::instance::Instance;
|
||||||
use rstest_reuse::{self, template};
|
use rstest_reuse::{self, template};
|
||||||
|
|
||||||
@@ -110,8 +110,20 @@ pub(crate) async fn check_unordered_output_stream(output: Output, expected: &str
|
|||||||
assert_eq!(pretty_print, expected);
|
assert_eq!(pretty_print, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_data_dir(path: &str) -> PathBuf {
|
/// Find the testing file resource under workspace root to be used in object store.
|
||||||
let dir = env!("CARGO_MANIFEST_DIR");
|
pub fn find_testing_resource(path: &str) -> String {
|
||||||
|
let p = find_workspace_path(path).display().to_string();
|
||||||
|
|
||||||
PathBuf::from(dir).join(path)
|
#[cfg(windows)]
|
||||||
|
let p = {
|
||||||
|
// We need unix style path even in the Windows, because the path is used in object-store, must
|
||||||
|
// be delimited with '/'. Inside the object-store, it will be converted to file system needed
|
||||||
|
// path in the end.
|
||||||
|
let p = p.replace('\\', "/");
|
||||||
|
|
||||||
|
// Prepend a '/' to indicate it's a file system path when parsed as object-store url in Windows.
|
||||||
|
format!("/{p}")
|
||||||
|
};
|
||||||
|
|
||||||
|
p
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::collections::BTreeMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use axum::http::StatusCode;
|
use axum::http::StatusCode;
|
||||||
@@ -411,13 +412,22 @@ pub async fn test_prom_http_api(store_type: StorageType) {
|
|||||||
assert_eq!(res.status(), StatusCode::OK);
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
let body = serde_json::from_str::<PrometheusJsonResponse>(&res.text().await).unwrap();
|
let body = serde_json::from_str::<PrometheusJsonResponse>(&res.text().await).unwrap();
|
||||||
assert_eq!(body.status, "success");
|
assert_eq!(body.status, "success");
|
||||||
assert_eq!(
|
|
||||||
body.data,
|
let PrometheusResponse::Series(mut series) = body.data else {
|
||||||
serde_json::from_value::<PrometheusResponse>(json!(
|
unreachable!()
|
||||||
[{"__name__" : "demo","ts":"1970-01-01 00:00:00+0000","cpu":"1.1","host":"host1","memory":"2.2"}]
|
};
|
||||||
))
|
let actual = series
|
||||||
.unwrap()
|
.remove(0)
|
||||||
);
|
.into_iter()
|
||||||
|
.collect::<BTreeMap<String, String>>();
|
||||||
|
let expected = BTreeMap::from([
|
||||||
|
("__name__".to_string(), "demo".to_string()),
|
||||||
|
("ts".to_string(), "1970-01-01 00:00:00+0000".to_string()),
|
||||||
|
("cpu".to_string(), "1.1".to_string()),
|
||||||
|
("host".to_string(), "host1".to_string()),
|
||||||
|
("memory".to_string(), "2.2".to_string()),
|
||||||
|
]);
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
|
||||||
let res = client
|
let res = client
|
||||||
.post("/api/v1/series?match[]=up&match[]=down")
|
.post("/api/v1/series?match[]=up&match[]=down")
|
||||||
|
|||||||
@@ -36,13 +36,11 @@ use crate::util;
|
|||||||
|
|
||||||
const METASRV_ADDR: &str = "127.0.0.1:3002";
|
const METASRV_ADDR: &str = "127.0.0.1:3002";
|
||||||
const SERVER_ADDR: &str = "127.0.0.1:4001";
|
const SERVER_ADDR: &str = "127.0.0.1:4001";
|
||||||
const STANDALONE_LOG_FILE: &str = "/tmp/greptime-sqlness-standalone.log";
|
|
||||||
const METASRV_LOG_FILE: &str = "/tmp/greptime-sqlness-metasrv.log";
|
|
||||||
const FRONTEND_LOG_FILE: &str = "/tmp/greptime-sqlness-frontend.log";
|
|
||||||
|
|
||||||
const DEFAULT_LOG_LEVEL: &str = "--log-level=debug,hyper=warn,tower=warn,datafusion=warn,reqwest=warn,sqlparser=warn,h2=info,opendal=info";
|
const DEFAULT_LOG_LEVEL: &str = "--log-level=debug,hyper=warn,tower=warn,datafusion=warn,reqwest=warn,sqlparser=warn,h2=info,opendal=info";
|
||||||
|
|
||||||
pub struct Env {}
|
pub struct Env {
|
||||||
|
data_home: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::print_stdout)]
|
#[allow(clippy::print_stdout)]
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
@@ -51,8 +49,8 @@ impl EnvController for Env {
|
|||||||
|
|
||||||
async fn start(&self, mode: &str, _config: Option<&Path>) -> Self::DB {
|
async fn start(&self, mode: &str, _config: Option<&Path>) -> Self::DB {
|
||||||
match mode {
|
match mode {
|
||||||
"standalone" => Self::start_standalone().await,
|
"standalone" => self.start_standalone().await,
|
||||||
"distributed" => Self::start_distributed().await,
|
"distributed" => self.start_distributed().await,
|
||||||
_ => panic!("Unexpected mode: {mode}"),
|
_ => panic!("Unexpected mode: {mode}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -65,14 +63,16 @@ impl EnvController for Env {
|
|||||||
|
|
||||||
#[allow(clippy::print_stdout)]
|
#[allow(clippy::print_stdout)]
|
||||||
impl Env {
|
impl Env {
|
||||||
pub async fn start_standalone() -> GreptimeDB {
|
pub fn new(data_home: PathBuf) -> Self {
|
||||||
|
Self { data_home }
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn start_standalone(&self) -> GreptimeDB {
|
||||||
Self::build_db().await;
|
Self::build_db().await;
|
||||||
|
|
||||||
let db_ctx = GreptimeDBContext::new();
|
let db_ctx = GreptimeDBContext::new();
|
||||||
|
|
||||||
let server_process = Self::start_server("standalone", &db_ctx, true).await;
|
let server_process = self.start_server("standalone", &db_ctx, true).await;
|
||||||
|
|
||||||
println!("Started, going to test. Log will be write to {STANDALONE_LOG_FILE}");
|
|
||||||
|
|
||||||
let client = Client::with_urls(vec![SERVER_ADDR]);
|
let client = Client::with_urls(vec![SERVER_ADDR]);
|
||||||
let db = DB::new(DEFAULT_CATALOG_NAME, DEFAULT_SCHEMA_NAME, client);
|
let db = DB::new(DEFAULT_CATALOG_NAME, DEFAULT_SCHEMA_NAME, client);
|
||||||
@@ -84,22 +84,23 @@ impl Env {
|
|||||||
client: TokioMutex::new(db),
|
client: TokioMutex::new(db),
|
||||||
ctx: db_ctx,
|
ctx: db_ctx,
|
||||||
is_standalone: true,
|
is_standalone: true,
|
||||||
|
env: Env::new(self.data_home.clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn start_distributed() -> GreptimeDB {
|
async fn start_distributed(&self) -> GreptimeDB {
|
||||||
Self::build_db().await;
|
Self::build_db().await;
|
||||||
|
|
||||||
let db_ctx = GreptimeDBContext::new();
|
let db_ctx = GreptimeDBContext::new();
|
||||||
|
|
||||||
// start a distributed GreptimeDB
|
// start a distributed GreptimeDB
|
||||||
let meta_server = Env::start_server("metasrv", &db_ctx, true).await;
|
let meta_server = self.start_server("metasrv", &db_ctx, true).await;
|
||||||
|
|
||||||
let datanode_1 = Env::start_server("datanode", &db_ctx, true).await;
|
let datanode_1 = self.start_server("datanode", &db_ctx, true).await;
|
||||||
let datanode_2 = Env::start_server("datanode", &db_ctx, true).await;
|
let datanode_2 = self.start_server("datanode", &db_ctx, true).await;
|
||||||
let datanode_3 = Env::start_server("datanode", &db_ctx, true).await;
|
let datanode_3 = self.start_server("datanode", &db_ctx, true).await;
|
||||||
|
|
||||||
let frontend = Env::start_server("frontend", &db_ctx, true).await;
|
let frontend = self.start_server("frontend", &db_ctx, true).await;
|
||||||
|
|
||||||
let client = Client::with_urls(vec![SERVER_ADDR]);
|
let client = Client::with_urls(vec![SERVER_ADDR]);
|
||||||
let db = DB::new(DEFAULT_CATALOG_NAME, DEFAULT_SCHEMA_NAME, client);
|
let db = DB::new(DEFAULT_CATALOG_NAME, DEFAULT_SCHEMA_NAME, client);
|
||||||
@@ -111,6 +112,7 @@ impl Env {
|
|||||||
client: TokioMutex::new(db),
|
client: TokioMutex::new(db),
|
||||||
ctx: db_ctx,
|
ctx: db_ctx,
|
||||||
is_standalone: false,
|
is_standalone: false,
|
||||||
|
env: Env::new(self.data_home.clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,6 +122,7 @@ impl Env {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn start_server(
|
async fn start_server(
|
||||||
|
&self,
|
||||||
subcommand: &str,
|
subcommand: &str,
|
||||||
db_ctx: &GreptimeDBContext,
|
db_ctx: &GreptimeDBContext,
|
||||||
truncate_log: bool,
|
truncate_log: bool,
|
||||||
@@ -127,17 +130,15 @@ impl Env {
|
|||||||
let log_file_name = match subcommand {
|
let log_file_name = match subcommand {
|
||||||
"datanode" => {
|
"datanode" => {
|
||||||
db_ctx.incr_datanode_id();
|
db_ctx.incr_datanode_id();
|
||||||
|
format!("greptime-sqlness-datanode-{}.log", db_ctx.datanode_id())
|
||||||
format!(
|
|
||||||
"/tmp/greptime-sqlness-datanode-{}.log",
|
|
||||||
db_ctx.datanode_id()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
"frontend" => FRONTEND_LOG_FILE.to_string(),
|
"frontend" => "greptime-sqlness-frontend.log".to_string(),
|
||||||
"metasrv" => METASRV_LOG_FILE.to_string(),
|
"metasrv" => "greptime-sqlness-metasrv.log".to_string(),
|
||||||
"standalone" => STANDALONE_LOG_FILE.to_string(),
|
"standalone" => "greptime-sqlness-standalone.log".to_string(),
|
||||||
_ => panic!("Unexpected subcommand: {subcommand}"),
|
_ => panic!("Unexpected subcommand: {subcommand}"),
|
||||||
};
|
};
|
||||||
|
let log_file_name = self.data_home.join(log_file_name).display().to_string();
|
||||||
|
|
||||||
let log_file = OpenOptions::new()
|
let log_file = OpenOptions::new()
|
||||||
.create(true)
|
.create(true)
|
||||||
.write(true)
|
.write(true)
|
||||||
@@ -146,15 +147,15 @@ impl Env {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let (args, check_ip_addr) = match subcommand {
|
let (args, check_ip_addr) = match subcommand {
|
||||||
"datanode" => Self::datanode_start_args(db_ctx),
|
"datanode" => self.datanode_start_args(db_ctx),
|
||||||
"standalone" => {
|
"standalone" => {
|
||||||
let args = vec![
|
let args = vec![
|
||||||
DEFAULT_LOG_LEVEL.to_string(),
|
DEFAULT_LOG_LEVEL.to_string(),
|
||||||
subcommand.to_string(),
|
subcommand.to_string(),
|
||||||
"start".to_string(),
|
"start".to_string(),
|
||||||
"-c".to_string(),
|
"-c".to_string(),
|
||||||
Self::generate_config_file(subcommand, db_ctx),
|
self.generate_config_file(subcommand, db_ctx),
|
||||||
"--http-addr=0.0.0.0:5001".to_string(),
|
"--http-addr=127.0.0.1:5001".to_string(),
|
||||||
];
|
];
|
||||||
(args, SERVER_ADDR.to_string())
|
(args, SERVER_ADDR.to_string())
|
||||||
}
|
}
|
||||||
@@ -163,8 +164,8 @@ impl Env {
|
|||||||
DEFAULT_LOG_LEVEL.to_string(),
|
DEFAULT_LOG_LEVEL.to_string(),
|
||||||
subcommand.to_string(),
|
subcommand.to_string(),
|
||||||
"start".to_string(),
|
"start".to_string(),
|
||||||
"--metasrv-addr=0.0.0.0:3002".to_string(),
|
"--metasrv-addr=127.0.0.1:3002".to_string(),
|
||||||
"--http-addr=0.0.0.0:5003".to_string(),
|
"--http-addr=127.0.0.1:5003".to_string(),
|
||||||
];
|
];
|
||||||
(args, SERVER_ADDR.to_string())
|
(args, SERVER_ADDR.to_string())
|
||||||
}
|
}
|
||||||
@@ -174,7 +175,7 @@ impl Env {
|
|||||||
subcommand.to_string(),
|
subcommand.to_string(),
|
||||||
"start".to_string(),
|
"start".to_string(),
|
||||||
"--use-memory-store".to_string(),
|
"--use-memory-store".to_string(),
|
||||||
"--http-addr=0.0.0.0:5001".to_string(),
|
"--http-addr=127.0.0.1:5001".to_string(),
|
||||||
"--disable-region-failover".to_string(),
|
"--disable-region-failover".to_string(),
|
||||||
];
|
];
|
||||||
(args, METASRV_ADDR.to_string())
|
(args, METASRV_ADDR.to_string())
|
||||||
@@ -189,7 +190,12 @@ impl Env {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut process = Command::new("./greptime")
|
#[cfg(not(windows))]
|
||||||
|
let program = "./greptime";
|
||||||
|
#[cfg(windows)]
|
||||||
|
let program = "greptime.exe";
|
||||||
|
|
||||||
|
let mut process = Command::new(program)
|
||||||
.current_dir(util::get_binary_dir("debug"))
|
.current_dir(util::get_binary_dir("debug"))
|
||||||
.args(args)
|
.args(args)
|
||||||
.stdout(log_file)
|
.stdout(log_file)
|
||||||
@@ -206,32 +212,31 @@ impl Env {
|
|||||||
process
|
process
|
||||||
}
|
}
|
||||||
|
|
||||||
fn datanode_start_args(db_ctx: &GreptimeDBContext) -> (Vec<String>, String) {
|
fn datanode_start_args(&self, db_ctx: &GreptimeDBContext) -> (Vec<String>, String) {
|
||||||
let id = db_ctx.datanode_id();
|
let id = db_ctx.datanode_id();
|
||||||
|
|
||||||
|
let data_home = self
|
||||||
|
.data_home
|
||||||
|
.join(format!("greptimedb_datanode_{}_{id}", db_ctx.time));
|
||||||
|
let wal_dir = data_home.join("wal").display().to_string();
|
||||||
|
|
||||||
let subcommand = "datanode";
|
let subcommand = "datanode";
|
||||||
let mut args = vec![
|
let mut args = vec![
|
||||||
DEFAULT_LOG_LEVEL.to_string(),
|
DEFAULT_LOG_LEVEL.to_string(),
|
||||||
subcommand.to_string(),
|
subcommand.to_string(),
|
||||||
"start".to_string(),
|
"start".to_string(),
|
||||||
];
|
];
|
||||||
args.push(format!("--rpc-addr=0.0.0.0:410{id}"));
|
args.push(format!("--rpc-addr=127.0.0.1:410{id}"));
|
||||||
args.push(format!("--http-addr=0.0.0.0:430{id}"));
|
args.push(format!("--http-addr=127.0.0.1:430{id}"));
|
||||||
args.push(format!(
|
args.push(format!("--data-home={}", data_home.display()));
|
||||||
"--data-home=/tmp/greptimedb_datanode_{}",
|
args.push(format!("--wal-dir={wal_dir}"));
|
||||||
db_ctx.time
|
|
||||||
));
|
|
||||||
args.push(format!(
|
|
||||||
"--wal-dir=/tmp/greptimedb_datanode_{}_{id}/wal",
|
|
||||||
db_ctx.time
|
|
||||||
));
|
|
||||||
args.push(format!("--node-id={id}"));
|
args.push(format!("--node-id={id}"));
|
||||||
args.push("--metasrv-addr=0.0.0.0:3002".to_string());
|
args.push("--metasrv-addr=127.0.0.1:3002".to_string());
|
||||||
(args, format!("0.0.0.0:410{id}"))
|
(args, format!("127.0.0.1:410{id}"))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// stop and restart the server process
|
/// stop and restart the server process
|
||||||
async fn restart_server(db: &GreptimeDB) {
|
async fn restart_server(&self, db: &GreptimeDB) {
|
||||||
{
|
{
|
||||||
let mut server_processes = db.server_processes.lock().unwrap();
|
let mut server_processes = db.server_processes.lock().unwrap();
|
||||||
for server_process in server_processes.iter_mut() {
|
for server_process in server_processes.iter_mut() {
|
||||||
@@ -241,14 +246,14 @@ impl Env {
|
|||||||
|
|
||||||
// check if the server is distributed or standalone
|
// check if the server is distributed or standalone
|
||||||
let new_server_processes = if db.is_standalone {
|
let new_server_processes = if db.is_standalone {
|
||||||
let new_server_process = Env::start_server("standalone", &db.ctx, false).await;
|
let new_server_process = self.start_server("standalone", &db.ctx, false).await;
|
||||||
vec![new_server_process]
|
vec![new_server_process]
|
||||||
} else {
|
} else {
|
||||||
db.ctx.reset_datanode_id();
|
db.ctx.reset_datanode_id();
|
||||||
|
|
||||||
let mut processes = vec![];
|
let mut processes = vec![];
|
||||||
for _ in 0..3 {
|
for _ in 0..3 {
|
||||||
let new_server_process = Env::start_server("datanode", &db.ctx, false).await;
|
let new_server_process = self.start_server("datanode", &db.ctx, false).await;
|
||||||
processes.push(new_server_process);
|
processes.push(new_server_process);
|
||||||
}
|
}
|
||||||
processes
|
processes
|
||||||
@@ -259,11 +264,13 @@ impl Env {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Generate config file to `/tmp/{subcommand}-{current_time}.toml`
|
/// Generate config file to `/tmp/{subcommand}-{current_time}.toml`
|
||||||
fn generate_config_file(subcommand: &str, db_ctx: &GreptimeDBContext) -> String {
|
fn generate_config_file(&self, subcommand: &str, db_ctx: &GreptimeDBContext) -> String {
|
||||||
let mut tt = TinyTemplate::new();
|
let mut tt = TinyTemplate::new();
|
||||||
|
|
||||||
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||||
path.push(format!("../conf/{subcommand}-test.toml.template"));
|
path.pop();
|
||||||
|
path.push("conf");
|
||||||
|
path.push(format!("{subcommand}-test.toml.template"));
|
||||||
let template = std::fs::read_to_string(path).unwrap();
|
let template = std::fs::read_to_string(path).unwrap();
|
||||||
tt.add_template(subcommand, &template).unwrap();
|
tt.add_template(subcommand, &template).unwrap();
|
||||||
|
|
||||||
@@ -274,15 +281,24 @@ impl Env {
|
|||||||
procedure_dir: String,
|
procedure_dir: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
let greptimedb_dir = format!("/tmp/greptimedb-{subcommand}-{}", db_ctx.time);
|
let data_home = self
|
||||||
|
.data_home
|
||||||
|
.join(format!("greptimedb-{subcommand}-{}", db_ctx.time));
|
||||||
|
std::fs::create_dir_all(data_home.as_path()).unwrap();
|
||||||
|
|
||||||
|
let wal_dir = data_home.join("wal").display().to_string();
|
||||||
|
let procedure_dir = data_home.join("procedure").display().to_string();
|
||||||
let ctx = Context {
|
let ctx = Context {
|
||||||
wal_dir: format!("{greptimedb_dir}/wal/"),
|
wal_dir,
|
||||||
data_home: format!("{greptimedb_dir}/"),
|
data_home: data_home.display().to_string(),
|
||||||
procedure_dir: format!("{greptimedb_dir}/procedure/"),
|
procedure_dir,
|
||||||
};
|
};
|
||||||
let rendered = tt.render(subcommand, &ctx).unwrap();
|
let rendered = tt.render(subcommand, &ctx).unwrap();
|
||||||
|
|
||||||
let conf_file = format!("/tmp/{subcommand}-{}.toml", db_ctx.time);
|
let conf_file = data_home
|
||||||
|
.join(format!("{subcommand}-{}.toml", db_ctx.time))
|
||||||
|
.display()
|
||||||
|
.to_string();
|
||||||
println!("Generating {subcommand} config file in {conf_file}, full content:\n{rendered}");
|
println!("Generating {subcommand} config file in {conf_file}, full content:\n{rendered}");
|
||||||
std::fs::write(&conf_file, rendered).unwrap();
|
std::fs::write(&conf_file, rendered).unwrap();
|
||||||
|
|
||||||
@@ -313,13 +329,14 @@ pub struct GreptimeDB {
|
|||||||
client: TokioMutex<DB>,
|
client: TokioMutex<DB>,
|
||||||
ctx: GreptimeDBContext,
|
ctx: GreptimeDBContext,
|
||||||
is_standalone: bool,
|
is_standalone: bool,
|
||||||
|
env: Env,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl Database for GreptimeDB {
|
impl Database for GreptimeDB {
|
||||||
async fn query(&self, ctx: QueryContext, query: String) -> Box<dyn Display> {
|
async fn query(&self, ctx: QueryContext, query: String) -> Box<dyn Display> {
|
||||||
if ctx.context.contains_key("restart") {
|
if ctx.context.contains_key("restart") {
|
||||||
Env::restart_server(self).await;
|
self.env.restart_server(self).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut client = self.client.lock().await;
|
let mut client = self.client.lock().await;
|
||||||
|
|||||||
@@ -27,6 +27,11 @@ async fn main() {
|
|||||||
"".to_string()
|
"".to_string()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let data_home = std::env::temp_dir();
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let data_home = std::path::PathBuf::from("/tmp");
|
||||||
|
|
||||||
let config = ConfigBuilder::default()
|
let config = ConfigBuilder::default()
|
||||||
.case_dir(util::get_case_dir())
|
.case_dir(util::get_case_dir())
|
||||||
.fail_fast(true)
|
.fail_fast(true)
|
||||||
@@ -34,6 +39,6 @@ async fn main() {
|
|||||||
.follow_links(true)
|
.follow_links(true)
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let runner = Runner::new(config, Env {});
|
let runner = Runner::new(config, Env::new(data_home));
|
||||||
runner.run().await.unwrap();
|
runner.run().await.unwrap();
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user