Compare commits

...

9 Commits

Author SHA1 Message Date
Conrad Ludgate
ce6bbca8d7 make use of esc string opt 2025-07-18 09:59:38 +01:00
Conrad Ludgate
d6f4dc4949 provide EscapedStr type for faster string encoding 2025-07-18 09:59:38 +01:00
Conrad Ludgate
b8435190d1 make unicode handling cold 2025-07-18 09:59:38 +01:00
Conrad Ludgate
ff08c78489 rather than using bytes[start..i], we can slice the bytes instead and use bytes[..i]. this ends up optimising better 2025-07-18 09:59:35 +01:00
Conrad Ludgate
654be07090 the ESCAPE table contains the escape character we need to insert, so we don't need to roundtrip via the CharEscape enum.
We also don't need to return the 'rest' slice, we can just insert it into the vec

Lastly, we can simplify the hotloop by making write_char_escape cold and moving the vec write inside this fn
2025-07-18 09:56:56 +01:00
Conrad Ludgate
8ba106d832 move str values into str module 2025-07-18 09:46:00 +01:00
Conrad Ludgate
03522b3434 turn KeyEncoder into a marker trait 2025-07-18 09:44:55 +01:00
Conrad Ludgate
e2bd8e4c61 vendor some serialization tests from serde_json 2025-07-18 09:40:22 +01:00
Conrad Ludgate
44201814b9 add benchmark for json str escaping 2025-07-18 09:38:48 +01:00
11 changed files with 430 additions and 154 deletions

1
Cargo.lock generated
View File

@@ -3507,6 +3507,7 @@ dependencies = [
name = "json" name = "json"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"criterion",
"futures", "futures",
"itoa", "itoa",
"ryu", "ryu",

View File

@@ -9,7 +9,6 @@ regex.workspace = true
bytes.workspace = true bytes.workspace = true
anyhow.workspace = true anyhow.workspace = true
crc32c.workspace = true crc32c.workspace = true
criterion.workspace = true
once_cell.workspace = true once_cell.workspace = true
log.workspace = true log.workspace = true
memoffset.workspace = true memoffset.workspace = true
@@ -22,6 +21,7 @@ tracing.workspace = true
postgres_versioninfo.workspace = true postgres_versioninfo.workspace = true
[dev-dependencies] [dev-dependencies]
criterion.workspace = true
env_logger.workspace = true env_logger.workspace = true
postgres.workspace = true postgres.workspace = true

View File

@@ -10,3 +10,8 @@ itoa = "1"
[dev-dependencies] [dev-dependencies]
futures = "0.3" futures = "0.3"
criterion.workspace = true
[[bench]]
name = "escape"
harness = false

View File

@@ -0,0 +1,53 @@
use std::hint::black_box;
use criterion::{Bencher, Criterion, criterion_group, criterion_main};
#[derive(Debug)]
#[allow(dead_code)]
struct Foo {
some_field: Bar,
some_other_field: String,
}
#[derive(Debug)]
#[allow(dead_code)]
struct Bar {
nested_fields: String,
some_other_value: i32,
}
pub fn escape(c: &mut Criterion) {
c.bench_function("small", |b| bench_json_encode_inner(b, "object_key"));
c.bench_function("small_static", |b| {
bench_json_encode_inner(b, json::esc!("object_key"));
});
c.bench_function("large_fmt", |b| {
let value = Foo {
some_field: Bar {
nested_fields: "could not connect to database, control plane error \"foo bar\""
.to_string()
.to_string(),
some_other_value: -1,
},
some_other_field: "error".to_string(),
};
bench_json_encode_inner(b, format_args!("{:?}", &value));
});
}
criterion_group!(benches, escape);
criterion_main!(benches);
fn bench_json_encode_inner(b: &mut Bencher<'_>, v: impl json::ValueEncoder + Copy) {
let mut output = Vec::new();
// write it once so we don't alloc during the benchmark.
json::ValueSer::new(&mut output).value(black_box(v));
b.iter(|| {
output.clear();
json::ValueSer::new(&mut output).value(black_box(v));
black_box(&output[..]);
});
}

View File

@@ -81,7 +81,8 @@ mod macros;
mod str; mod str;
mod value; mod value;
pub use value::{Null, ValueEncoder}; pub use str::EscapedStr;
pub use value::{KeyEncoder, Null, ValueEncoder};
#[must_use] #[must_use]
/// Serialize a single json value. /// Serialize a single json value.
@@ -164,7 +165,9 @@ impl<'buf> ObjectSer<'buf> {
/// Start a new object entry with the given string key, returning a [`ValueSer`] for the associated value. /// Start a new object entry with the given string key, returning a [`ValueSer`] for the associated value.
#[inline] #[inline]
pub fn key(&mut self, key: impl KeyEncoder) -> ValueSer<'_> { pub fn key(&mut self, key: impl KeyEncoder) -> ValueSer<'_> {
key.write_key(self) // we create a psuedo value to write the key into.
let start = self.start;
self.entry_inner(|buf| key.encode(ValueSer { buf, start }))
} }
/// Write an entry (key-value pair) to the object. /// Write an entry (key-value pair) to the object.
@@ -211,10 +214,6 @@ impl<'buf> ObjectSer<'buf> {
} }
} }
pub trait KeyEncoder {
fn write_key<'a>(self, obj: &'a mut ObjectSer) -> ValueSer<'a>;
}
#[must_use] #[must_use]
/// Serialize a json object. /// Serialize a json object.
pub struct ListSer<'buf> { pub struct ListSer<'buf> {
@@ -279,14 +278,14 @@ impl<'buf> ListSer<'buf> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{Null, ValueSer}; use crate::{Null, ValueSer, esc};
#[test] #[test]
fn object() { fn object() {
let mut buf = vec![]; let mut buf = vec![];
let mut object = ValueSer::new(&mut buf).object(); let mut object = ValueSer::new(&mut buf).object();
object.entry("foo", "bar"); object.entry(esc!("foo"), "bar");
object.entry("baz", Null); object.entry(esc!("baz"), Null);
object.finish(); object.finish();
assert_eq!(buf, br#"{"foo":"bar","baz":null}"#); assert_eq!(buf, br#"{"foo":"bar","baz":null}"#);
@@ -307,8 +306,8 @@ mod tests {
fn object_macro() { fn object_macro() {
let res = crate::value_to_string!(|obj| { let res = crate::value_to_string!(|obj| {
crate::value_as_object!(|obj| { crate::value_as_object!(|obj| {
obj.entry("foo", "bar"); obj.entry(esc!("foo"), "bar");
obj.entry("baz", Null); obj.entry(esc!("baz"), Null);
}) })
}); });
@@ -364,7 +363,7 @@ mod tests {
let entry = obj.key("2"); let entry = obj.key("2");
let entry = { let entry = {
let mut nested_obj = entry.object(); let mut nested_obj = entry.object();
nested_obj.entry("foo", "bar"); nested_obj.entry(esc!("foo"), "bar");
nested_obj.rollback() nested_obj.rollback()
}; };

View File

@@ -84,3 +84,11 @@ macro_rules! value_as_list {
res res
}}; }};
} }
/// A helper macro that ensures the provided string literal does not need escaping.
#[macro_export]
macro_rules! esc {
($str:literal) => {
const { $crate::EscapedStr::from_static($str) }
};
}

View File

@@ -10,58 +10,98 @@
use std::fmt::{self, Write}; use std::fmt::{self, Write};
/// Represents a character escape code in a type-safe manner. use crate::{KeyEncoder, ValueEncoder, ValueSer};
pub enum CharEscape {
/// An escaped quote `"`
Quote,
/// An escaped reverse solidus `\`
ReverseSolidus,
// /// An escaped solidus `/`
// Solidus,
/// An escaped backspace character (usually escaped as `\b`)
Backspace,
/// An escaped form feed character (usually escaped as `\f`)
FormFeed,
/// An escaped line feed character (usually escaped as `\n`)
LineFeed,
/// An escaped carriage return character (usually escaped as `\r`)
CarriageReturn,
/// An escaped tab character (usually escaped as `\t`)
Tab,
/// An escaped ASCII plane control character (usually escaped as
/// `\u00XX` where `XX` are two hex characters)
AsciiControl(u8),
}
impl CharEscape { #[repr(transparent)]
#[inline] pub struct EscapedStr([u8]);
fn from_escape_table(escape: u8, byte: u8) -> CharEscape {
match escape { impl EscapedStr {
self::BB => CharEscape::Backspace, /// Assumes the string does not need escaping.
self::TT => CharEscape::Tab, ///
self::NN => CharEscape::LineFeed, /// # Panics
self::FF => CharEscape::FormFeed, ///
self::RR => CharEscape::CarriageReturn, /// This will panic if the string does need escaping.
self::QU => CharEscape::Quote, #[inline(always)]
self::BS => CharEscape::ReverseSolidus, pub const fn from_static(s: &'static str) -> &'static Self {
self::UU => CharEscape::AsciiControl(byte), let bytes = s.as_bytes();
_ => unreachable!(),
let mut i = 0;
while i < bytes.len() {
let byte = bytes[i];
if byte < 0x20 || byte == b'"' || byte == b'\\' {
panic!("the string needs escaping");
}
i += 1;
} }
// safety: this EscapedStr is transparent over [u8].
unsafe { std::mem::transmute::<&[u8], &EscapedStr>(bytes) }
}
/// Escapes the string eagerly.
pub fn escape(s: &str) -> Box<Self> {
let mut writer = Vec::with_capacity(s.len());
Collect { buf: &mut writer }
.write_str(s)
.expect("formatting should not error");
let bytes = writer.into_boxed_slice();
// safety: this EscapedStr is transparent over [u8].
unsafe { std::mem::transmute::<Box<[u8]>, Box<EscapedStr>>(bytes) }
} }
} }
pub(crate) fn format_escaped_str(writer: &mut Vec<u8>, value: &str) { impl KeyEncoder for &EscapedStr {}
impl ValueEncoder for &EscapedStr {
fn encode(self, v: crate::ValueSer<'_>) {
let buf = &mut *v.buf;
buf.reserve(2 + self.0.len());
buf.push(b'"');
buf.extend_from_slice(&self.0);
buf.push(b'"');
v.finish();
}
}
impl KeyEncoder for &str {}
impl ValueEncoder for &str {
#[inline]
fn encode(self, v: ValueSer<'_>) {
format_escaped_str(v.buf, self);
v.finish();
}
}
impl KeyEncoder for fmt::Arguments<'_> {}
impl ValueEncoder for fmt::Arguments<'_> {
#[inline]
fn encode(self, v: ValueSer<'_>) {
if let Some(s) = self.as_str() {
format_escaped_str(v.buf, s);
} else {
format_escaped_fmt(v.buf, self);
}
v.finish();
}
}
fn format_escaped_str(writer: &mut Vec<u8>, value: &str) {
writer.reserve(2 + value.len()); writer.reserve(2 + value.len());
writer.push(b'"'); writer.push(b'"');
let rest = format_escaped_str_contents(writer, value); format_escaped_str_contents(writer, value);
writer.extend_from_slice(rest);
writer.push(b'"'); writer.push(b'"');
} }
pub(crate) fn format_escaped_fmt(writer: &mut Vec<u8>, args: fmt::Arguments) { fn format_escaped_fmt(writer: &mut Vec<u8>, args: fmt::Arguments) {
writer.push(b'"'); writer.push(b'"');
Collect { buf: writer } Collect { buf: writer }
@@ -77,33 +117,36 @@ struct Collect<'buf> {
impl fmt::Write for Collect<'_> { impl fmt::Write for Collect<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result { fn write_str(&mut self, s: &str) -> fmt::Result {
let last = format_escaped_str_contents(self.buf, s); format_escaped_str_contents(self.buf, s);
self.buf.extend(last);
Ok(()) Ok(())
} }
} }
// writes any escape sequences, and returns the suffix still needed to be written. // writes any escape sequences, and returns the suffix still needed to be written.
fn format_escaped_str_contents<'a>(writer: &mut Vec<u8>, value: &'a str) -> &'a [u8] { fn format_escaped_str_contents(writer: &mut Vec<u8>, value: &str) {
let bytes = value.as_bytes(); let mut bytes = value.as_bytes();
let mut start = 0; let mut i = 0;
while i < bytes.len() {
for (i, &byte) in bytes.iter().enumerate() { let byte = bytes[i];
let escape = ESCAPE[byte as usize]; let escape = ESCAPE[byte as usize];
i += 1;
if escape == 0 { if escape == 0 {
continue; continue;
} }
writer.extend_from_slice(&bytes[start..i]); // hitting an escape character is unlikely.
cold();
let char_escape = CharEscape::from_escape_table(escape, byte); let string_run;
write_char_escape(writer, char_escape); (string_run, bytes) = bytes.split_at(i);
i = 0;
start = i + 1; write_char_escape(writer, string_run);
} }
&bytes[start..] writer.extend_from_slice(bytes);
} }
const BB: u8 = b'b'; // \x08 const BB: u8 = b'b'; // \x08
@@ -138,29 +181,38 @@ static ESCAPE: [u8; 256] = [
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // F __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // F
]; ];
fn write_char_escape(writer: &mut Vec<u8>, char_escape: CharEscape) { #[cold]
let s = match char_escape { fn cold() {}
CharEscape::Quote => b"\\\"",
CharEscape::ReverseSolidus => b"\\\\",
// CharEscape::Solidus => b"\\/",
CharEscape::Backspace => b"\\b",
CharEscape::FormFeed => b"\\f",
CharEscape::LineFeed => b"\\n",
CharEscape::CarriageReturn => b"\\r",
CharEscape::Tab => b"\\t",
CharEscape::AsciiControl(byte) => {
static HEX_DIGITS: [u8; 16] = *b"0123456789abcdef";
let bytes = &[
b'\\',
b'u',
b'0',
b'0',
HEX_DIGITS[(byte >> 4) as usize],
HEX_DIGITS[(byte & 0xF) as usize],
];
return writer.extend_from_slice(bytes);
}
};
fn write_char_escape(writer: &mut Vec<u8>, bytes: &[u8]) {
debug_assert!(
!bytes.is_empty(),
"caller guarantees that bytes is non empty"
);
let (&byte, string_run) = bytes.split_last().unwrap_or((&0, b""));
let escape = ESCAPE[byte as usize];
debug_assert_ne!(escape, 0, "caller guarantees that escape will be non-zero");
// the escape char from the escape table is the correct replacement
// character.
let mut bytes = [b'\\', escape, b'0', b'0', b'0', b'0'];
let mut s = &bytes[0..2];
// if the replacement character is 'u', then we need
// to write the unicode encoding
if escape == UU {
static HEX_DIGITS: [u8; 16] = *b"0123456789abcdef";
// we rarely encounter characters that must be escaped as unicode.
cold();
bytes[4] = HEX_DIGITS[(byte >> 4) as usize];
bytes[5] = HEX_DIGITS[(byte & 0xF) as usize];
s = &bytes;
}
writer.extend_from_slice(string_run);
writer.extend_from_slice(s); writer.extend_from_slice(s);
} }

View File

@@ -1,11 +1,12 @@
use core::fmt;
use std::collections::{BTreeMap, HashMap}; use std::collections::{BTreeMap, HashMap};
use crate::str::{format_escaped_fmt, format_escaped_str}; use crate::{ValueSer, value_as_list, value_as_object};
use crate::{KeyEncoder, ObjectSer, ValueSer, value_as_list, value_as_object};
/// Marker trait for values that are valid keys
pub trait KeyEncoder: ValueEncoder {}
/// Write a value to the underlying json representation. /// Write a value to the underlying json representation.
pub trait ValueEncoder { pub trait ValueEncoder: Sized {
fn encode(self, v: ValueSer<'_>); fn encode(self, v: ValueSer<'_>);
} }
@@ -24,23 +25,11 @@ impl<T: Copy + ValueEncoder> ValueEncoder for &T {
} }
} }
impl ValueEncoder for &str { impl KeyEncoder for String {}
impl ValueEncoder for String {
#[inline] #[inline]
fn encode(self, v: ValueSer<'_>) { fn encode(self, v: ValueSer<'_>) {
format_escaped_str(v.buf, self); self.as_str().encode(v);
v.finish();
}
}
impl ValueEncoder for fmt::Arguments<'_> {
#[inline]
fn encode(self, v: ValueSer<'_>) {
if let Some(s) = self.as_str() {
format_escaped_str(v.buf, s);
} else {
format_escaped_fmt(v.buf, self);
}
v.finish();
} }
} }
@@ -94,26 +83,8 @@ impl<T: ValueEncoder> ValueEncoder for Option<T> {
} }
} }
impl KeyEncoder for &str {
#[inline]
fn write_key<'a>(self, obj: &'a mut ObjectSer) -> ValueSer<'a> {
let obj = &mut *obj;
obj.entry_inner(|b| format_escaped_str(b, self))
}
}
impl KeyEncoder for fmt::Arguments<'_> {
#[inline]
fn write_key<'a>(self, obj: &'a mut ObjectSer) -> ValueSer<'a> {
if let Some(key) = self.as_str() {
obj.entry_inner(|b| format_escaped_str(b, key))
} else {
obj.entry_inner(|b| format_escaped_fmt(b, self))
}
}
}
/// Represents the JSON null value. /// Represents the JSON null value.
#[derive(Clone, Copy)]
pub struct Null; pub struct Null;
impl ValueEncoder for Null { impl ValueEncoder for Null {

View File

@@ -0,0 +1,180 @@
use json::Null;
use json::ValueEncoder;
use std::collections::BTreeMap;
macro_rules! treemap {
() => {
BTreeMap::new()
};
($($k:expr => $v:expr),+ $(,)?) => {
{
let mut m = BTreeMap::new();
$(
m.insert($k, $v);
)+
m
}
};
}
fn test_encode_ok<'a, T>(errors: impl IntoIterator<Item = (T, &'a str)>)
where
T: ValueEncoder,
{
for (value, out) in errors {
let s = json::value_to_string!(|v| value.encode(v));
assert_eq!(&*s, out);
}
}
#[test]
fn test_write_null() {
let tests = [(Null, "null")];
test_encode_ok(tests);
}
#[test]
fn test_write_u64() {
let tests = [(3u64, "3"), (u64::MAX, &u64::MAX.to_string())];
test_encode_ok(tests);
}
#[test]
fn test_write_i64() {
let tests = [
(3i64, "3"),
(-2i64, "-2"),
(-1234i64, "-1234"),
(i64::MIN, &i64::MIN.to_string()),
];
test_encode_ok(tests);
}
#[test]
fn test_write_f64() {
let tests = [
(3.0, "3.0"),
(3.1, "3.1"),
(-1.5, "-1.5"),
(0.5, "0.5"),
(f64::MIN, "-1.7976931348623157e308"),
(f64::MAX, "1.7976931348623157e308"),
(f64::EPSILON, "2.220446049250313e-16"),
];
test_encode_ok(tests);
}
#[test]
fn test_write_str() {
let tests = [
// normal
("", "\"\""),
("foo", "\"foo\""),
// ascii escape chars.
("\"", "\"\\\"\""),
("\x08", "\"\\b\""),
("\n", "\"\\n\""),
("\r", "\"\\r\""),
("\t", "\"\\t\""),
("\x07", "\"\\u0007\""),
// unicode not escaped.
("\u{12ab}", "\"\u{12ab}\""),
("\u{AB12}", "\"\u{AB12}\""),
("\u{1F395}", "\"\u{1F395}\""),
];
test_encode_ok(tests);
}
#[test]
fn test_write_bool() {
let tests = [(true, "true"), (false, "false")];
test_encode_ok(tests);
}
#[test]
fn test_write_list() {
test_encode_ok([
(vec![], "[]"),
(vec![true], "[true]"),
(vec![true, false], "[true,false]"),
]);
test_encode_ok([
(vec![vec![], vec![], vec![]], "[[],[],[]]"),
(vec![vec![1, 2, 3], vec![], vec![]], "[[1,2,3],[],[]]"),
(vec![vec![], vec![1, 2, 3], vec![]], "[[],[1,2,3],[]]"),
(vec![vec![], vec![], vec![1, 2, 3]], "[[],[],[1,2,3]]"),
]);
}
#[test]
fn test_write_object() {
test_encode_ok([
(treemap!(), "{}"),
(treemap!("a".to_owned() => true), "{\"a\":true}"),
(
treemap!(
"a".to_owned() => true,
"b".to_owned() => false,
),
"{\"a\":true,\"b\":false}",
),
]);
test_encode_ok([
(
treemap![
"a".to_owned() => treemap![],
"b".to_owned() => treemap![],
"c".to_owned() => treemap![],
],
"{\"a\":{},\"b\":{},\"c\":{}}",
),
(
treemap![
"a".to_owned() => treemap![
"a".to_owned() => treemap!["a" => vec![1,2,3]],
"b".to_owned() => treemap![],
"c".to_owned() => treemap![],
],
"b".to_owned() => treemap![],
"c".to_owned() => treemap![],
],
"{\"a\":{\"a\":{\"a\":[1,2,3]},\"b\":{},\"c\":{}},\"b\":{},\"c\":{}}",
),
(
treemap![
"a".to_owned() => treemap![],
"b".to_owned() => treemap![
"a".to_owned() => treemap!["a" => vec![1,2,3]],
"b".to_owned() => treemap![],
"c".to_owned() => treemap![],
],
"c".to_owned() => treemap![],
],
"{\"a\":{},\"b\":{\"a\":{\"a\":[1,2,3]},\"b\":{},\"c\":{}},\"c\":{}}",
),
(
treemap![
"a".to_owned() => treemap![],
"b".to_owned() => treemap![],
"c".to_owned() => treemap![
"a".to_owned() => treemap!["a" => vec![1,2,3]],
"b".to_owned() => treemap![],
"c".to_owned() => treemap![],
],
],
"{\"a\":{},\"b\":{},\"c\":{\"a\":{\"a\":[1,2,3]},\"b\":{},\"c\":{}}}",
),
]);
}
#[test]
fn test_write_option() {
test_encode_ok([(None, "null"), (Some("jodhpurs"), "\"jodhpurs\"")]);
test_encode_ok([
(None, "null"),
(Some(vec!["foo", "bar"]), "[\"foo\",\"bar\"]"),
]);
}

View File

@@ -552,21 +552,23 @@ impl EventFormatter {
let serializer = json::ValueSer::new(&mut self.logline_buffer); let serializer = json::ValueSer::new(&mut self.logline_buffer);
json::value_as_object!(|serializer| { json::value_as_object!(|serializer| {
// Timestamp comes first, so raw lines can be sorted by timestamp. // Timestamp comes first, so raw lines can be sorted by timestamp.
serializer.entry("timestamp", &*timestamp); serializer.entry(json::esc!("timestamp"), &*timestamp);
// Level next. // Level next.
serializer.entry("level", meta.level().as_str()); serializer.entry(json::esc!("level"), meta.level().as_str());
// Message next. // Message next.
let mut message_extractor = let mut message_extractor = MessageFieldExtractor::new(
MessageFieldExtractor::new(serializer.key("message"), skipped_field_indices); serializer.key(json::esc!("message")),
skipped_field_indices,
);
event.record(&mut message_extractor); event.record(&mut message_extractor);
message_extractor.finish(); message_extractor.finish();
// Direct message fields. // Direct message fields.
{ {
let mut message_skipper = MessageFieldSkipper::new( let mut message_skipper = MessageFieldSkipper::new(
serializer.key("fields").object(), serializer.key(json::esc!("fields")).object(),
skipped_field_indices, skipped_field_indices,
); );
event.record(&mut message_skipper); event.record(&mut message_skipper);
@@ -579,7 +581,7 @@ impl EventFormatter {
let mut extracted = ExtractedSpanFields::new(extract_fields); let mut extracted = ExtractedSpanFields::new(extract_fields);
let spans = serializer.key("spans"); let spans = serializer.key(json::esc!("spans"));
json::value_as_object!(|spans| { json::value_as_object!(|spans| {
let parent_spans = ctx let parent_spans = ctx
.event_span(event) .event_span(event)
@@ -599,6 +601,8 @@ impl EventFormatter {
json::value_as_object!(|span_fields| { json::value_as_object!(|span_fields| {
for (field, value) in std::iter::zip(span.metadata().fields(), values) { for (field, value) in std::iter::zip(span.metadata().fields(), values) {
if let Some(value) = value { if let Some(value) = value {
// we don't use entry syntax here, as that's intended for literal keys only.
// the field name might need escaping, and entry would panic in that case.
span_fields.entry(field.name(), value); span_fields.entry(field.name(), value);
} }
} }
@@ -610,37 +614,37 @@ impl EventFormatter {
let pid = std::process::id(); let pid = std::process::id();
// Skip adding pid 1 to reduce noise for services running in containers. // Skip adding pid 1 to reduce noise for services running in containers.
if pid != 1 { if pid != 1 {
serializer.entry("process_id", pid); serializer.entry(json::esc!("process_id"), pid);
} }
THREAD_ID.with(|tid| serializer.entry("thread_id", tid)); THREAD_ID.with(|tid| serializer.entry(json::esc!("thread_id"), tid));
// TODO: tls cache? name could change // TODO: tls cache? name could change
if let Some(thread_name) = std::thread::current().name() if let Some(thread_name) = std::thread::current().name()
&& !thread_name.is_empty() && !thread_name.is_empty()
&& thread_name != "tokio-runtime-worker" && thread_name != "tokio-runtime-worker"
{ {
serializer.entry("thread_name", thread_name); serializer.entry(json::esc!("thread_name"), thread_name);
} }
if let Some(task_id) = tokio::task::try_id() { if let Some(task_id) = tokio::task::try_id() {
serializer.entry("task_id", format_args!("{task_id}")); serializer.entry(json::esc!("task_id"), format_args!("{task_id}"));
} }
serializer.entry("target", meta.target()); serializer.entry(json::esc!("target"), meta.target());
// Skip adding module if it's the same as target. // Skip adding module if it's the same as target.
if let Some(module) = meta.module_path() if let Some(module) = meta.module_path()
&& module != meta.target() && module != meta.target()
{ {
serializer.entry("module", module); serializer.entry(json::esc!("module"), module);
} }
if let Some(file) = meta.file() { if let Some(file) = meta.file() {
if let Some(line) = meta.line() { if let Some(line) = meta.line() {
serializer.entry("src", format_args!("{file}:{line}")); serializer.entry(json::esc!("src"), format_args!("{file}:{line}"));
} else { } else {
serializer.entry("src", file); serializer.entry(json::esc!("src"), file);
} }
} }
@@ -649,13 +653,16 @@ impl EventFormatter {
let otel_spanref = otel_context.span(); let otel_spanref = otel_context.span();
let span_context = otel_spanref.span_context(); let span_context = otel_spanref.span_context();
if span_context.is_valid() { if span_context.is_valid() {
serializer.entry("trace_id", format_args!("{}", span_context.trace_id())); serializer.entry(
json::esc!("trace_id"),
format_args!("{}", span_context.trace_id()),
);
} }
} }
if extracted.has_values() { if extracted.has_values() {
// TODO: add fields from event, too? // TODO: add fields from event, too?
let extract = serializer.key("extract"); let extract = serializer.key(json::esc!("extract"));
json::value_as_object!(|extract| { json::value_as_object!(|extract| {
for (key, value) in std::iter::zip(extracted.names, extracted.values) { for (key, value) in std::iter::zip(extracted.names, extracted.values) {
if let Some(value) = value { if let Some(value) = value {

View File

@@ -875,7 +875,7 @@ async fn query_batch_to_json(
headers: HttpHeaders, headers: HttpHeaders,
) -> Result<String, SqlOverHttpError> { ) -> Result<String, SqlOverHttpError> {
let json_output = json::value_to_string!(|obj| json::value_as_object!(|obj| { let json_output = json::value_to_string!(|obj| json::value_as_object!(|obj| {
let results = obj.key("results"); let results = obj.key(json::esc!("results"));
json::value_as_list!(|results| { json::value_as_list!(|results| {
query_batch(config, cancel, tx, queries, headers, results).await?; query_batch(config, cancel, tx, queries, headers, results).await?;
}); });
@@ -900,17 +900,17 @@ async fn query_to_json<T: GenericClient>(
.map_err(SqlOverHttpError::Postgres)?; .map_err(SqlOverHttpError::Postgres)?;
let query_acknowledged = Instant::now(); let query_acknowledged = Instant::now();
let mut json_fields = output.key("fields").list(); let mut json_fields = output.key(json::esc!("fields")).list();
for c in row_stream.statement.columns() { for c in row_stream.statement.columns() {
let json_field = json_fields.entry(); let json_field = json_fields.entry();
json::value_as_object!(|json_field| { json::value_as_object!(|json_field| {
json_field.entry("name", c.name()); json_field.entry(json::esc!("name"), c.name());
json_field.entry("dataTypeID", c.type_().oid()); json_field.entry(json::esc!("dataTypeID"), c.type_().oid());
json_field.entry("tableID", c.table_oid()); json_field.entry(json::esc!("tableID"), c.table_oid());
json_field.entry("columnID", c.column_id()); json_field.entry(json::esc!("columnID"), c.column_id());
json_field.entry("dataTypeSize", c.type_size()); json_field.entry(json::esc!("dataTypeSize"), c.type_size());
json_field.entry("dataTypeModifier", c.type_modifier()); json_field.entry(json::esc!("dataTypeModifier"), c.type_modifier());
json_field.entry("format", "text"); json_field.entry(json::esc!("format"), "text");
}); });
} }
json_fields.finish(); json_fields.finish();
@@ -922,7 +922,7 @@ async fn query_to_json<T: GenericClient>(
// around to get a command tag. Also check that the response is not too // around to get a command tag. Also check that the response is not too
// big. // big.
let mut rows = 0; let mut rows = 0;
let mut json_rows = output.key("rows").list(); let mut json_rows = output.key(json::esc!("rows")).list();
while let Some(row) = row_stream.next().await { while let Some(row) = row_stream.next().await {
let row = row.map_err(SqlOverHttpError::Postgres)?; let row = row.map_err(SqlOverHttpError::Postgres)?;
@@ -971,9 +971,9 @@ async fn query_to_json<T: GenericClient>(
"finished executing query" "finished executing query"
); );
output.entry("command", command_tag_name); output.entry(json::esc!("command"), command_tag_name);
output.entry("rowCount", command_tag_count); output.entry(json::esc!("rowCount"), command_tag_count);
output.entry("rowAsArray", array_mode); output.entry(json::esc!("rowAsArray"), array_mode);
output.finish(); output.finish();
Ok(ready) Ok(ready)