Created
February 18, 2025 15:04
-
-
Save a10y/7791192fe8a787647e81b3ac86c0f266 to your computer and use it in GitHub Desktop.
Benchmarking ZSTD throughput. Code mostly generated with Claude 3.5 Sonnet
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// use rand::Rng; | |
use std::time::Instant; | |
use zstd::{decode_all, stream::encode_all}; | |
fn main() -> Result<(), Box<dyn std::error::Error>> { | |
// Generate sample data - 1 million random u32 integers | |
let count = 64_000; | |
println!("Generating {} random integers...", count); | |
// let mut rng = rand::rng(); | |
let numbers: Vec<u32> = (0..1024).cycle().take(64_000).collect(); | |
// Convert numbers to bytes | |
let original_bytes: Vec<u8> = numbers.iter().flat_map(|n| n.to_le_bytes()).collect(); | |
println!("Original data size: {} bytes", original_bytes.len()); | |
// Benchmark compression | |
let start = Instant::now(); | |
let compressed = encode_all(&original_bytes[..], 3)?; | |
let compression_duration = start.elapsed(); | |
// Calculate compression ratio and speeds | |
let compression_ratio = original_bytes.len() as f64 / compressed.len() as f64; | |
let throughput_mb = | |
(original_bytes.len() as f64 / 1_000_000.0) / (compression_duration.as_secs_f64()); | |
let integers_per_second = count as f64 / compression_duration.as_secs_f64(); | |
// Print results | |
println!("\nCompression Results:"); | |
println!("-------------------"); | |
println!("Compressed size: {} bytes", compressed.len()); | |
println!("Compression ratio: {:.2}x", compression_ratio); | |
println!("Compression time: {:.2?}", compression_duration); | |
println!("Throughput: {:.2} MB/s", throughput_mb); | |
println!("Processing speed: {:#.0} integers/s", integers_per_second); | |
// Benchmark decompression | |
let start = Instant::now(); | |
let decompressed = decode_all(&compressed[..])?; | |
let decompression_duration = start.elapsed(); | |
// Calculate decompression speeds | |
let decompression_throughput_mb = | |
(decompressed.len() as f64 / 1_000_000.0) / (decompression_duration.as_secs_f64()); | |
let decompression_integers_per_second = count as f64 / decompression_duration.as_secs_f64(); | |
// Print decompression results | |
println!("\nDecompression Results:"); | |
println!("---------------------"); | |
println!("Decompression time: {:.2?}", decompression_duration); | |
println!( | |
"Decompression throughput: {:.2} MB/s", | |
decompression_throughput_mb | |
); | |
println!( | |
"Decompression speed: {:.0} integers/s", | |
decompression_integers_per_second | |
); | |
// Verify decompression was correct | |
if decompressed == original_bytes { | |
println!("\nVerification: Successful - decompressed data matches original"); | |
} else { | |
println!("\nVerification: FAILED - decompressed data does not match original!"); | |
} | |
Ok(()) | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Results on M2 Max: