benches: Add support to generate flamegraphs when we profile our benchmarks
This commit is contained in:
parent
c64567ba9b
commit
fc6ff4288e
3 changed files with 91 additions and 5 deletions
|
@ -52,6 +52,7 @@ http = "0.2.3"
|
|||
matrix-sdk-test = { version = "0.2.0", path = "../matrix_sdk_test" }
|
||||
indoc = "1.0.3"
|
||||
criterion = { version = "0.3.4", features = ["async", "async_futures", "html_reports"] }
|
||||
pprof = { version = "0.4.2", features = ["flamegraph"] }
|
||||
|
||||
[[bench]]
|
||||
name = "crypto_bench"
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
mod perf;
|
||||
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use criterion::{
|
||||
async_executor::FuturesExecutor, criterion_group, criterion_main, BatchSize, BenchmarkId,
|
||||
Criterion, Throughput,
|
||||
};
|
||||
use criterion::{async_executor::FuturesExecutor, *};
|
||||
|
||||
use futures::executor::block_on;
|
||||
use matrix_sdk_common::{
|
||||
|
@ -101,5 +100,13 @@ pub fn keys_claiming(c: &mut Criterion) {
|
|||
group.finish()
|
||||
}
|
||||
|
||||
criterion_group!(benches, keys_query, keys_claiming);
|
||||
fn criterion() -> Criterion {
|
||||
Criterion::default().with_profiler(perf::FlamegraphProfiler::new(100))
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = criterion();
|
||||
targets = keys_query, keys_claiming
|
||||
}
|
||||
criterion_main!(benches);
|
||||
|
|
78
matrix_sdk_crypto/benches/perf.rs
Normal file
78
matrix_sdk_crypto/benches/perf.rs
Normal file
|
@ -0,0 +1,78 @@
|
|||
//! This is a simple Criterion Profiler implementation using pprof.
|
||||
//!
|
||||
//! It's mostly a direct copy from here: https://www.jibbow.com/posts/criterion-flamegraphs/
|
||||
use std::{fs::File, os::raw::c_int, path::Path};
|
||||
|
||||
use criterion::profiler::Profiler;
|
||||
use pprof::ProfilerGuard;
|
||||
|
||||
/// Small custom profiler that can be used with Criterion to create a flamegraph for benchmarks.
|
||||
/// Also see [the Criterion documentation on this][custom-profiler].
|
||||
///
|
||||
/// ## Example on how to enable the custom profiler:
|
||||
///
|
||||
/// ```
|
||||
/// mod perf;
|
||||
/// use perf::FlamegraphProfiler;
|
||||
///
|
||||
/// fn fibonacci_profiled(criterion: &mut Criterion) {
|
||||
/// // Use the criterion struct as normal here.
|
||||
/// }
|
||||
///
|
||||
/// fn custom() -> Criterion {
|
||||
/// Criterion::default().with_profiler(FlamegraphProfiler::new())
|
||||
/// }
|
||||
///
|
||||
/// criterion_group! {
|
||||
/// name = benches;
|
||||
/// config = custom();
|
||||
/// targets = fibonacci_profiled
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The neat thing about this is that it will sample _only_ the benchmark, and not other stuff like
|
||||
/// the setup process.
|
||||
///
|
||||
/// Further, it will only kick in if `--profile-time <time>` is passed to the benchmark binary.
|
||||
/// A flamegraph will be created for each individual benchmark in its report directory under
|
||||
/// `profile/flamegraph.svg`.
|
||||
///
|
||||
/// [custom-profiler]: https://bheisler.github.io/criterion.rs/book/user_guide/profiling.html#implementing-in-process-profiling-hooks
|
||||
pub struct FlamegraphProfiler<'a> {
|
||||
frequency: c_int,
|
||||
active_profiler: Option<ProfilerGuard<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> FlamegraphProfiler<'a> {
|
||||
pub fn new(frequency: c_int) -> Self {
|
||||
FlamegraphProfiler {
|
||||
frequency,
|
||||
active_profiler: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Profiler for FlamegraphProfiler<'a> {
|
||||
fn start_profiling(&mut self, _benchmark_id: &str, _benchmark_dir: &Path) {
|
||||
self.active_profiler = Some(ProfilerGuard::new(self.frequency).unwrap());
|
||||
}
|
||||
|
||||
fn stop_profiling(&mut self, _benchmark_id: &str, benchmark_dir: &Path) {
|
||||
std::fs::create_dir_all(benchmark_dir)
|
||||
.expect("Can't create a directory to store the benchmarking report");
|
||||
|
||||
let flamegraph_path = benchmark_dir.join("flamegraph.svg");
|
||||
|
||||
let flamegraph_file = File::create(&flamegraph_path)
|
||||
.expect("File system error while creating flamegraph.svg");
|
||||
|
||||
if let Some(profiler) = self.active_profiler.take() {
|
||||
profiler
|
||||
.report()
|
||||
.build()
|
||||
.expect("Can't build profiling report")
|
||||
.flamegraph(flamegraph_file)
|
||||
.expect("Error writing flamegraph");
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue