// benches/common/metrics.rs use serde::{Deserialize, Serialize}; use std::time::Duration; /// Custom metrics for benchmark results #[derive(Debug, Clone, Serialize, Deserialize)] pub struct BenchmarkMetrics { pub operation: String, pub backend: String, pub dataset_size: usize, pub mean_ns: u64, pub median_ns: u64, pub p95_ns: u64, pub p99_ns: u64, pub std_dev_ns: u64, pub throughput_ops_sec: f64, } impl BenchmarkMetrics { pub fn new( operation: String, backend: String, dataset_size: usize, ) -> Self { Self { operation, backend, dataset_size, mean_ns: 0, median_ns: 0, p95_ns: 0, p99_ns: 0, std_dev_ns: 0, throughput_ops_sec: 0.0, } } /// Convert to CSV row format pub fn to_csv_row(&self) -> String { format!( "{},{},{},{},{},{},{},{},{:.2}", self.backend, self.operation, self.dataset_size, self.mean_ns, self.median_ns, self.p95_ns, self.p99_ns, self.std_dev_ns, self.throughput_ops_sec ) } /// Get CSV header pub fn csv_header() -> String { "backend,operation,dataset_size,mean_ns,median_ns,p95_ns,p99_ns,std_dev_ns,throughput_ops_sec".to_string() } /// Convert to JSON pub fn to_json(&self) -> serde_json::Value { serde_json::json!({ "backend": self.backend, "operation": self.operation, "dataset_size": self.dataset_size, "metrics": { "mean_ns": self.mean_ns, "median_ns": self.median_ns, "p95_ns": self.p95_ns, "p99_ns": self.p99_ns, "std_dev_ns": self.std_dev_ns, "throughput_ops_sec": self.throughput_ops_sec } }) } /// Calculate throughput from mean latency pub fn calculate_throughput(&mut self) { if self.mean_ns > 0 { self.throughput_ops_sec = 1_000_000_000.0 / self.mean_ns as f64; } } /// Format duration for display pub fn format_duration(nanos: u64) -> String { if nanos < 1_000 { format!("{} ns", nanos) } else if nanos < 1_000_000 { format!("{:.2} µs", nanos as f64 / 1_000.0) } else if nanos < 1_000_000_000 { format!("{:.2} ms", nanos as f64 / 1_000_000.0) } else { format!("{:.2} s", nanos as f64 / 1_000_000_000.0) } } /// Pretty print the metrics pub fn display(&self) -> String { format!( "{}/{} (n={}): mean={}, median={}, p95={}, p99={}, throughput={:.0} ops/sec", self.backend, self.operation, self.dataset_size, Self::format_duration(self.mean_ns), Self::format_duration(self.median_ns), Self::format_duration(self.p95_ns), Self::format_duration(self.p99_ns), self.throughput_ops_sec ) } } /// Memory metrics for profiling #[derive(Debug, Clone, Serialize, Deserialize)] pub struct MemoryMetrics { pub operation: String, pub backend: String, pub allocations: usize, pub peak_bytes: usize, pub avg_bytes_per_op: f64, } impl MemoryMetrics { pub fn new(operation: String, backend: String) -> Self { Self { operation, backend, allocations: 0, peak_bytes: 0, avg_bytes_per_op: 0.0, } } /// Convert to CSV row format pub fn to_csv_row(&self) -> String { format!( "{},{},{},{},{:.2}", self.backend, self.operation, self.allocations, self.peak_bytes, self.avg_bytes_per_op ) } /// Get CSV header pub fn csv_header() -> String { "backend,operation,allocations,peak_bytes,avg_bytes_per_op".to_string() } /// Format bytes for display pub fn format_bytes(bytes: usize) -> String { if bytes < 1024 { format!("{} B", bytes) } else if bytes < 1024 * 1024 { format!("{:.2} KB", bytes as f64 / 1024.0) } else if bytes < 1024 * 1024 * 1024 { format!("{:.2} MB", bytes as f64 / (1024.0 * 1024.0)) } else { format!("{:.2} GB", bytes as f64 / (1024.0 * 1024.0 * 1024.0)) } } /// Pretty print the metrics pub fn display(&self) -> String { format!( "{}/{}: {} allocations, peak={}, avg={}", self.backend, self.operation, self.allocations, Self::format_bytes(self.peak_bytes), Self::format_bytes(self.avg_bytes_per_op as usize) ) } } /// Collection of benchmark results for comparison #[derive(Debug, Default)] pub struct BenchmarkResults { pub metrics: Vec, pub memory_metrics: Vec, } impl BenchmarkResults { pub fn new() -> Self { Self::default() } pub fn add_metric(&mut self, metric: BenchmarkMetrics) { self.metrics.push(metric); } pub fn add_memory_metric(&mut self, metric: MemoryMetrics) { self.memory_metrics.push(metric); } /// Export all metrics to CSV format pub fn to_csv(&self) -> String { let mut output = String::new(); if !self.metrics.is_empty() { output.push_str(&BenchmarkMetrics::csv_header()); output.push('\n'); for metric in &self.metrics { output.push_str(&metric.to_csv_row()); output.push('\n'); } } if !self.memory_metrics.is_empty() { output.push('\n'); output.push_str(&MemoryMetrics::csv_header()); output.push('\n'); for metric in &self.memory_metrics { output.push_str(&metric.to_csv_row()); output.push('\n'); } } output } /// Export all metrics to JSON format pub fn to_json(&self) -> serde_json::Value { serde_json::json!({ "benchmarks": self.metrics.iter().map(|m| m.to_json()).collect::>(), "memory": self.memory_metrics }) } /// Save results to a file pub fn save_csv(&self, path: &str) -> std::io::Result<()> { std::fs::write(path, self.to_csv()) } pub fn save_json(&self, path: &str) -> std::io::Result<()> { let json = serde_json::to_string_pretty(&self.to_json())?; std::fs::write(path, json) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_metrics_creation() { let mut metric = BenchmarkMetrics::new( "set".to_string(), "redb".to_string(), 1000, ); metric.mean_ns = 1_245; metric.calculate_throughput(); assert!(metric.throughput_ops_sec > 0.0); } #[test] fn test_csv_export() { let mut results = BenchmarkResults::new(); let mut metric = BenchmarkMetrics::new( "set".to_string(), "redb".to_string(), 1000, ); metric.mean_ns = 1_245; metric.calculate_throughput(); results.add_metric(metric); let csv = results.to_csv(); assert!(csv.contains("backend,operation")); assert!(csv.contains("redb,set")); } #[test] fn test_duration_formatting() { assert_eq!(BenchmarkMetrics::format_duration(500), "500 ns"); assert_eq!(BenchmarkMetrics::format_duration(1_500), "1.50 µs"); assert_eq!(BenchmarkMetrics::format_duration(1_500_000), "1.50 ms"); } #[test] fn test_bytes_formatting() { assert_eq!(MemoryMetrics::format_bytes(512), "512 B"); assert_eq!(MemoryMetrics::format_bytes(2048), "2.00 KB"); assert_eq!(MemoryMetrics::format_bytes(2_097_152), "2.00 MB"); } }