mirror of
https://github.com/GreptimeTeam/greptimedb.git
synced 2026-01-07 22:02:56 +00:00
* feat: implement compressed CSV/JSON export functionality - Add CompressedWriter for real-time compression during CSV/JSON export - Support GZIP, BZIP2, XZ, ZSTD compression formats - Remove LazyBufferedWriter dependency for simplified architecture - Implement Encoder -> Compressor -> FileWriter data flow - Add tests for compressed CSV/JSON export Signed-off-by: McKnight22 <tao.wang.22@outlook.com> * feat: implement compressed CSV/JSON export functionality - refactor and extend compressed_writer tests - add coverage for Bzip2 and Xz compression Signed-off-by: McKnight22 <tao.wang.22@outlook.com> * feat: implement compressed CSV/JSON export functionality - Switch to threshold-based chunked flushing - Avoid unnecessary writes on empty buffers - Replace direct write_all() calls with the new helper for consistency Signed-off-by: McKnight22 <tao.wang.22@outlook.com> * feat: implement compressed CSV/JSON import (COPY FROM) functionality - Add support for reading compressed CSV and JSON in COPY FROM - Support GZIP, BZIP2, XZ, ZSTD compression formats - Add tests for compressed CSV/JSON import Signed-off-by: McKnight22 <tao.wang.22@outlook.com> * feat: implement compressed CSV/JSON export/import functionality - Fix review comments Signed-off-by: McKnight22 <tao.wang.22@outlook.com> * feat: implement compressed CSV/JSON export/import functionality - Move temp_dir out of the loop Signed-off-by: McKnight22 <tao.wang.22@outlook.com> * feat: implement compressed CSV/JSON export/import functionality - Fix unreasonable locking logic Co-authored-by: jeremyhi <jiachun_feng@proton.me> Signed-off-by: McKnight22 <tao.wang.22@outlook.com> --------- Signed-off-by: McKnight22 <tao.wang.22@outlook.com> Co-authored-by: jeremyhi <jiachun_feng@proton.me>
56 lines
1.6 KiB
Plaintext
56 lines
1.6 KiB
Plaintext
-- Test compressed JSON export functionality
|
|
CREATE TABLE test_json_compression(
|
|
`id` UINT32,
|
|
`name` STRING,
|
|
`value` DOUBLE,
|
|
ts TIMESTAMP TIME INDEX
|
|
);
|
|
|
|
Affected Rows: 0
|
|
|
|
-- Insert test data
|
|
INSERT INTO test_json_compression(`id`, `name`, `value`, ts) VALUES
|
|
(1, 'Alice', 10.5, 1640995200000),
|
|
(2, 'Bob', 20.3, 1640995260000),
|
|
(3, 'Charlie', 30.7, 1640995320000),
|
|
(4, 'David', 40.1, 1640995380000),
|
|
(5, 'Eve', 50.9, 1640995440000);
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Test uncompressed JSON export
|
|
COPY test_json_compression TO '${SQLNESS_HOME}/export/test_json_uncompressed.json' WITH (format='json');
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Test GZIP compressed JSON export
|
|
COPY test_json_compression TO '${SQLNESS_HOME}/export/test_json_gzip.json.gz' WITH (format='json', compression_type='gzip');
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Test ZSTD compressed JSON export
|
|
COPY test_json_compression TO '${SQLNESS_HOME}/export/test_json_zstd.json.zst' WITH (format='json', compression_type='zstd');
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Test BZIP2 compressed JSON export
|
|
COPY test_json_compression TO '${SQLNESS_HOME}/export/test_json_bzip2.json.bz2' WITH (format='json', compression_type='bzip2');
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Test XZ compressed JSON export
|
|
COPY test_json_compression TO '${SQLNESS_HOME}/export/test_json_xz.json.xz' WITH (format='json', compression_type='xz');
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Test compressed JSON with schema inference limit
|
|
COPY test_json_compression TO '${SQLNESS_HOME}/export/test_json_schema_limit.json.gz' WITH (format='json', compression_type='gzip', schema_infer_max_record=100);
|
|
|
|
Affected Rows: 5
|
|
|
|
-- Clean up
|
|
DROP TABLE test_json_compression;
|
|
|
|
Affected Rows: 0
|
|
|