|
| 1 | +use std::fs; |
| 2 | +use std::path::PathBuf; |
| 3 | + |
| 4 | +use crate::gcs; |
| 5 | +use crate::types::benchmark_config::BenchmarkConfig; |
| 6 | +use crate::utils::copy_dir_contents; |
| 7 | + |
| 8 | +/// Prepares inputs for a benchmark. |
| 9 | +/// If the benchmark needs inputs and a local input directory is provided, |
| 10 | +/// it copies the contents from the local directory to the expected input location. |
| 11 | +/// If the benchmark needs inputs and no local input directory is provided, |
| 12 | +/// it downloads the inputs from GCS. |
| 13 | +fn prepare_inputs(bench: &BenchmarkConfig, input_dir: Option<&str>) { |
| 14 | + if !bench.needs_inputs() { |
| 15 | + return; |
| 16 | + } |
| 17 | + |
| 18 | + let benchmark_input_dir = PathBuf::from(bench.input_dir.unwrap()); |
| 19 | + |
| 20 | + // Create the input directory if it doesn't exist. |
| 21 | + fs::create_dir_all(&benchmark_input_dir).unwrap_or_else(|e| { |
| 22 | + panic!("Failed to create directory {}: {}", benchmark_input_dir.display(), e) |
| 23 | + }); |
| 24 | + |
| 25 | + if let Some(local_dir) = input_dir { |
| 26 | + let local_path = PathBuf::from(local_dir); |
| 27 | + if !local_path.exists() { |
| 28 | + panic!("Input directory does not exist: {}", local_dir); |
| 29 | + } |
| 30 | + |
| 31 | + // Copy local directory contents to the benchmark input directory. |
| 32 | + copy_dir_contents(&local_path, &benchmark_input_dir); |
| 33 | + |
| 34 | + println!("Copied inputs from {} to {}", local_dir, benchmark_input_dir.display()); |
| 35 | + } else { |
| 36 | + gcs::download_inputs(bench.name, &benchmark_input_dir); |
| 37 | + if !benchmark_input_dir.exists() { |
| 38 | + panic!( |
| 39 | + "Failed to download inputs for {}: {}", |
| 40 | + bench.name, |
| 41 | + benchmark_input_dir.display() |
| 42 | + ); |
| 43 | + } |
| 44 | + } |
| 45 | +} |
| 46 | + |
| 47 | +/// Runs a single benchmark and panic if it fails. |
| 48 | +fn run_single_benchmark(bench: &BenchmarkConfig) { |
| 49 | + println!("Running: {}", bench.name); |
| 50 | + |
| 51 | + let output = std::process::Command::new("cargo") |
| 52 | + .args(bench.cmd_args) |
| 53 | + .output() |
| 54 | + .unwrap_or_else(|e| panic!("Failed to execute {}: {}", bench.name, e)); |
| 55 | + |
| 56 | + if !output.status.success() { |
| 57 | + panic!("\nBenchmark {} failed:\n{}", bench.name, String::from_utf8_lossy(&output.stderr)); |
| 58 | + } |
| 59 | +} |
| 60 | + |
| 61 | +/// Collects benchmark results from criterion output and saves them to the output directory. |
| 62 | +fn save_benchmark_results(_bench: &BenchmarkConfig, output_dir: &str) { |
| 63 | + let criterion_base = PathBuf::from("target/criterion"); |
| 64 | + let Ok(entries) = fs::read_dir(&criterion_base) else { return }; |
| 65 | + |
| 66 | + // Collect all estimates files. |
| 67 | + for entry in entries.flatten() { |
| 68 | + let path = entry.path(); |
| 69 | + if !path.is_dir() { |
| 70 | + continue; |
| 71 | + } |
| 72 | + |
| 73 | + // Save estimates file. |
| 74 | + let estimates_path = path.join("new/estimates.json"); |
| 75 | + if let Ok(data) = fs::read_to_string(&estimates_path) { |
| 76 | + if let Ok(json) = serde_json::from_str::<serde_json::Value>(&data) { |
| 77 | + if let Ok(pretty) = serde_json::to_string_pretty(&json) { |
| 78 | + let bench_name = path.file_name().unwrap().to_string_lossy(); |
| 79 | + let dest = |
| 80 | + PathBuf::from(output_dir).join(format!("{}_estimates.json", bench_name)); |
| 81 | + if fs::write(&dest, pretty).is_ok() { |
| 82 | + println!("Saved results: {}", dest.display()); |
| 83 | + } |
| 84 | + } |
| 85 | + } |
| 86 | + } |
| 87 | + } |
| 88 | +} |
| 89 | + |
| 90 | +/// Runs benchmarks for a given package, handling input downloads if needed. |
| 91 | +pub fn run_benchmarks(benchmarks: &[&BenchmarkConfig], input_dir: Option<&str>, output_dir: &str) { |
| 92 | + // Prepare inputs. |
| 93 | + for bench in benchmarks { |
| 94 | + prepare_inputs(bench, input_dir); |
| 95 | + } |
| 96 | + |
| 97 | + // Create output directory. |
| 98 | + fs::create_dir_all(output_dir).unwrap_or_else(|e| panic!("Failed to create output dir: {}", e)); |
| 99 | + |
| 100 | + // Run benchmarks. |
| 101 | + for bench in benchmarks { |
| 102 | + run_single_benchmark(bench); |
| 103 | + save_benchmark_results(bench, output_dir); |
| 104 | + } |
| 105 | + |
| 106 | + println!("\n✓ All benchmarks completed! Results saved to: {}", output_dir); |
| 107 | +} |
0 commit comments