[TASK] Extended the benchmarking tools, to give information about which parts take time etc

This commit is contained in:
2022-12-02 19:03:05 +01:00
parent b9c24407a3
commit 78a905f9de

View File

@@ -3,16 +3,14 @@ use crate::day1::Day1;
use crate::day2::Day2;
use crate::day_solver::DaySolver;
#[macro_use] extern crate lazy_static;
mod util;
mod day1;
mod day_solver;
mod day2;
const MAX_DAY: u8 = 1;
const BENCHMARK_AMOUNT: u32 = 100;
const MAX_DAY: u8 = 2;
const DEFAULT_BENCHMARK_AMOUNT: u32 = 100;
fn build_day_solver(day: u8) -> Option<Box<dyn DaySolver>> {
match day {
@@ -22,25 +20,55 @@ fn build_day_solver(day: u8) -> Option<Box<dyn DaySolver>> {
}
}
fn solve(day: u8, silent: bool) {
fn bench<F, K>(mut f: F) -> (K, u128)
where F: FnMut() -> K {
let now = Instant::now();
let res = f();
(res, now.elapsed().as_micros())
}
let solver = build_day_solver(day);
fn solve(day: u8, silent: bool) -> AocBenchResult {
let now = Instant::now();
let (solver, init_time) = bench(|| build_day_solver(day));
let part1_time: u128;
let part2_time: u128;
match solver {
Some(mut s) => {
let part1 = s.solve_part1();
let(part1, pt1_time) = bench(|| s.solve_part1());
part1_time = pt1_time;
if !silent {
println!("Day {} Part 1: {}", day, part1);
}
let part2 = s.solve_part2();
let (part2, pt2_time) = bench(|| s.solve_part2());
part2_time = pt2_time;
if !silent {
println!("Day {} Part 1: {}", day, part2);
}
},
None => println!("This day is not yet implemented")
None => panic!("This day is not yet implemented")
}
return AocBenchResult{
init: init_time,
part1: part1_time,
part2: part2_time,
total: now.elapsed().as_micros()
}
}
#[derive(Copy, Clone)]
struct AocBenchResult {
init: u128,
part1: u128,
part2: u128,
total: u128
}
fn main() {
let args: Vec<String> = std::env::args().collect();
@@ -49,49 +77,82 @@ fn main() {
let single_day = day_arg_idx.is_some();
let day = if single_day { args[day_arg_idx.unwrap() + 1].parse::<u8>().unwrap() } else { 0 };
let benchmark = args.contains(&String::from("--bench")) || args.contains(&String::from("-b"));
let benchmark_arg_idx_option = args.iter().position(|a| a == "--bench").or(
args.iter().position(|a| a == "-b"));
let mut bench_results: Vec<u128> = Vec::new();
let benchmark = benchmark_arg_idx_option.is_some();
let bench_amount: u32 = if let Some(benchmark_arg_idx) = benchmark_arg_idx_option {
args.get(benchmark_arg_idx + 1)
.map_or(DEFAULT_BENCHMARK_AMOUNT, |r| r.parse::<u32>().unwrap_or(DEFAULT_BENCHMARK_AMOUNT))
} else {
DEFAULT_BENCHMARK_AMOUNT
};
let mut bench_results: Vec<AocBenchResult> = Vec::new();
// This is essentially the warmup for the benchmark:
run_once(single_day, day, false, &mut bench_results);
let first_run_time = bench_results[0];
let first_run_bench = bench_results[0].to_owned();
let (_, total_time) = bench(||
if benchmark {
// Ignore the warmup run in the rest of the benchmark:
bench_results.clear();
for _ in 0..BENCHMARK_AMOUNT {
run_once(single_day, day, true,&mut bench_results);
}
}
let avg_runtime: u128 = bench_results.iter().sum::<u128>() / (bench_results.len() as u128);
for _ in 0..bench_amount {
run_once(single_day, day, true, &mut bench_results);
}
});
println!("Executed {} rounds; Execution took {} μs {}", BENCHMARK_AMOUNT, avg_runtime, if benchmark { "on average" } else { "" });
if benchmark {
bench_results.sort();
println!("Min: {} μs, Max: {} μs, Median: {} μs",
bench_results[0],
bench_results[bench_results.len() - 1],
bench_results[bench_results.len() / 2]
);
println!("First time took {} μs", first_run_time);
}
}
fn run_once(single_day: bool, day: u8, silent: bool, bench_results: &mut Vec<u128>) {
println!("Executed {} rounds;", bench_results.len());
print_bench_result(&bench_results, |b| b.total, "Execution");
print_bench_result(&bench_results, |b| b.init, "Initialization");
print_bench_result(&bench_results, |b| b.part1, "Part 1");
print_bench_result(&bench_results, |b| b.part2, "Part 2");
let now = Instant::now();
if single_day {
solve(day, silent);
println!("First time took {} μs (init {} μs, part 1: {} μs, part 2: {} μs", first_run_bench.total, first_run_bench.init, first_run_bench.part1, first_run_bench.part2);
println!("Total execute time (of the entire benchmark): {} μs ({} μs on average per round)", total_time, total_time / (DEFAULT_BENCHMARK_AMOUNT as u128));
} else {
solve_all(silent);
println!("Execution took {} μs (init {} μs, part 1: {} μs, part 2: {} μs", first_run_bench.total, first_run_bench.init, first_run_bench.part1, first_run_bench.part2);
}
bench_results.push(now.elapsed().as_micros());
}
fn solve_all(silent: bool) {
for day in 1..(MAX_DAY + 1) {
solve(day, silent);
}
fn print_bench_result<F>(bench_results: &Vec<AocBenchResult>, f: F, bench_part_description: &str)
where
F: FnMut(&AocBenchResult) -> u128 {
let mut benches: Vec<u128> = bench_results.iter().map(f).collect();
benches.sort();
let avg_runtime: u128 = benches.iter().sum::<u128>() / (bench_results.len() as u128);
println!("{} took {} μs {} (Min: {} μs, Max: {} μs, Median: {} μs)", bench_part_description, avg_runtime, if bench_results.len() > 1 { "on average"} else {""},
benches[0], benches[benches.len() - 1], benches[benches.len() / 2])
}
fn run_once(single_day: bool, day: u8, silent: bool, bench_results: &mut Vec<AocBenchResult>) {
let bench_result = if single_day {
solve(day, silent)
} else {
solve_all(silent)
};
bench_results.push(bench_result);
}
fn solve_all(silent: bool) -> AocBenchResult {
let mut bench_results = Vec::new();
for day in 1..(MAX_DAY + 1) {
bench_results.push(solve(day, silent));
}
return AocBenchResult {
init: bench_results.iter().map(|t| t.init).sum(),
part1: bench_results.iter().map(|t| t.part1).sum(),
part2: bench_results.iter().map(|t| t.part2).sum(),
total: bench_results.iter().map(|t| t.total).sum(),
}
}