Files
herodb/benches/single_ops.rs
Maxime Van Hees 9136e5f3c0 benchmarking
2025-10-30 11:17:26 +01:00

444 lines
15 KiB
Rust

// benches/single_ops.rs
use criterion::{criterion_group, criterion_main, Criterion, BenchmarkId, BatchSize};
mod common;
use common::*;
/// Benchmark string SET operations
fn bench_string_set(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/strings/set");
for backend_type in BackendType::all() {
let backend = BenchmarkBackend::new(backend_type).expect("Failed to create backend");
let mut generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "100bytes"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key = generator.generate_key("bench:key", rand::random::<usize>() % 100000);
let value = generator.generate_value(100);
(key, value)
},
|(key, value)| {
backend.storage.set(key, value).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark string GET operations
fn bench_string_get(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/strings/get");
for backend_type in BackendType::all() {
// Pre-populate with 10K keys
let backend = setup_populated_backend(backend_type, 10_000, 100)
.expect("Failed to setup backend");
let generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "100bytes"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key_id = rand::random::<usize>() % 10_000;
generator.generate_key("bench:key", key_id)
},
|key| {
backend.storage.get(&key).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark string DEL operations
fn bench_string_del(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/strings/del");
for backend_type in BackendType::all() {
group.bench_with_input(
BenchmarkId::new(backend_type.name(), "100bytes"),
&backend_type,
|b, &backend_type| {
b.iter_batched(
|| {
// Create fresh backend with one key for each iteration
let backend = BenchmarkBackend::new(backend_type).unwrap();
let mut generator = DataGenerator::new(42);
let key = generator.generate_key("bench:key", 0);
let value = generator.generate_value(100);
backend.storage.set(key.clone(), value).unwrap();
(backend, key)
},
|(backend, key)| {
backend.storage.del(key).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark string EXISTS operations
fn bench_string_exists(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/strings/exists");
for backend_type in BackendType::all() {
let backend = setup_populated_backend(backend_type, 10_000, 100)
.expect("Failed to setup backend");
let generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "100bytes"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key_id = rand::random::<usize>() % 10_000;
generator.generate_key("bench:key", key_id)
},
|key| {
backend.storage.exists(&key).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark hash HSET operations
fn bench_hash_hset(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/hashes/hset");
for backend_type in BackendType::all() {
let backend = BenchmarkBackend::new(backend_type).expect("Failed to create backend");
let mut generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "single_field"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key = generator.generate_key("bench:hash", rand::random::<usize>() % 1000);
let field = format!("field{}", rand::random::<usize>() % 100);
let value = generator.generate_value(100);
(key, field, value)
},
|(key, field, value)| {
backend.storage.hset(&key, vec![(field, value)]).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark hash HGET operations
fn bench_hash_hget(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/hashes/hget");
for backend_type in BackendType::all() {
// Pre-populate with hashes
let backend = setup_populated_backend_hashes(backend_type, 1_000, 10, 100)
.expect("Failed to setup backend");
let generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "single_field"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key = generator.generate_key("bench:hash", rand::random::<usize>() % 1_000);
let field = format!("field{}", rand::random::<usize>() % 10);
(key, field)
},
|(key, field)| {
backend.storage.hget(&key, &field).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark hash HGETALL operations
fn bench_hash_hgetall(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/hashes/hgetall");
for backend_type in BackendType::all() {
let backend = setup_populated_backend_hashes(backend_type, 1_000, 10, 100)
.expect("Failed to setup backend");
let generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "10_fields"),
&backend,
|b, backend| {
b.iter_batched(
|| {
generator.generate_key("bench:hash", rand::random::<usize>() % 1_000)
},
|key| {
backend.storage.hgetall(&key).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark hash HDEL operations
fn bench_hash_hdel(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/hashes/hdel");
for backend_type in BackendType::all() {
group.bench_with_input(
BenchmarkId::new(backend_type.name(), "single_field"),
&backend_type,
|b, &backend_type| {
b.iter_batched(
|| {
let backend = setup_populated_backend_hashes(backend_type, 1, 10, 100).unwrap();
let generator = DataGenerator::new(42);
let key = generator.generate_key("bench:hash", 0);
let field = format!("field{}", rand::random::<usize>() % 10);
(backend, key, field)
},
|(backend, key, field)| {
backend.storage.hdel(&key, vec![field]).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark hash HEXISTS operations
fn bench_hash_hexists(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/hashes/hexists");
for backend_type in BackendType::all() {
let backend = setup_populated_backend_hashes(backend_type, 1_000, 10, 100)
.expect("Failed to setup backend");
let generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "single_field"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key = generator.generate_key("bench:hash", rand::random::<usize>() % 1_000);
let field = format!("field{}", rand::random::<usize>() % 10);
(key, field)
},
|(key, field)| {
backend.storage.hexists(&key, &field).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark list LPUSH operations
fn bench_list_lpush(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/lists/lpush");
for backend_type in BackendType::all() {
let backend = BenchmarkBackend::new(backend_type).expect("Failed to create backend");
let mut generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "single_element"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key = generator.generate_key("bench:list", rand::random::<usize>() % 1000);
let element = generator.generate_value(100);
(key, element)
},
|(key, element)| {
backend.storage.lpush(&key, vec![element]).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark list RPUSH operations
fn bench_list_rpush(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/lists/rpush");
for backend_type in BackendType::all() {
let backend = BenchmarkBackend::new(backend_type).expect("Failed to create backend");
let mut generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "single_element"),
&backend,
|b, backend| {
b.iter_batched(
|| {
let key = generator.generate_key("bench:list", rand::random::<usize>() % 1000);
let element = generator.generate_value(100);
(key, element)
},
|(key, element)| {
backend.storage.rpush(&key, vec![element]).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark list LPOP operations
fn bench_list_lpop(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/lists/lpop");
for backend_type in BackendType::all() {
group.bench_with_input(
BenchmarkId::new(backend_type.name(), "single_element"),
&backend_type,
|b, &backend_type| {
b.iter_batched(
|| {
let backend = setup_populated_backend_lists(backend_type, 1, 100, 100).unwrap();
let generator = DataGenerator::new(42);
let key = generator.generate_key("bench:list", 0);
(backend, key)
},
|(backend, key)| {
backend.storage.lpop(&key, 1).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark list RPOP operations
fn bench_list_rpop(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/lists/rpop");
for backend_type in BackendType::all() {
group.bench_with_input(
BenchmarkId::new(backend_type.name(), "single_element"),
&backend_type,
|b, &backend_type| {
b.iter_batched(
|| {
let backend = setup_populated_backend_lists(backend_type, 1, 100, 100).unwrap();
let generator = DataGenerator::new(42);
let key = generator.generate_key("bench:list", 0);
(backend, key)
},
|(backend, key)| {
backend.storage.rpop(&key, 1).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
/// Benchmark list LRANGE operations
fn bench_list_lrange(c: &mut Criterion) {
let mut group = c.benchmark_group("single_ops/lists/lrange");
for backend_type in BackendType::all() {
let backend = setup_populated_backend_lists(backend_type, 1_000, 100, 100)
.expect("Failed to setup backend");
let generator = DataGenerator::new(42);
group.bench_with_input(
BenchmarkId::new(backend.name(), "10_elements"),
&backend,
|b, backend| {
b.iter_batched(
|| {
generator.generate_key("bench:list", rand::random::<usize>() % 1_000)
},
|key| {
backend.storage.lrange(&key, 0, 9).unwrap();
},
BatchSize::SmallInput
);
}
);
}
group.finish();
}
criterion_group!(
benches,
bench_string_set,
bench_string_get,
bench_string_del,
bench_string_exists,
bench_hash_hset,
bench_hash_hget,
bench_hash_hgetall,
bench_hash_hdel,
bench_hash_hexists,
bench_list_lpush,
bench_list_rpush,
bench_list_lpop,
bench_list_rpop,
bench_list_lrange,
);
criterion_main!(benches);