From 85fb82fcbbf7ee643f7dbc7e071347dfe4126304 Mon Sep 17 00:00:00 2001 From: Flakebi Date: Mon, 14 Dec 2020 22:06:14 +0100 Subject: [PATCH] Save junit xml --- .gitignore | 1 + Cargo.lock | 30 ++++++ Cargo.toml | 1 + src/lib.rs | 280 ++++++++++++++++++++++++------------------------- src/main.rs | 33 ++++-- src/slog_pg.rs | 43 ++++++++ src/summary.rs | 167 +++++++++++++++++++++++++++++ 7 files changed, 402 insertions(+), 153 deletions(-) create mode 100644 src/slog_pg.rs create mode 100644 src/summary.rs diff --git a/.gitignore b/.gitignore index 7288eed..ab88eba 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ /fails /log.json /summary.csv +/summary.xml diff --git a/Cargo.lock b/Cargo.lock index 273166d..10aa337 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -247,6 +247,7 @@ dependencies = [ "futures", "genawaiter", "indicatif", + "junit-report", "num_cpus", "once_cell", "rand", @@ -262,6 +263,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "derive-getters" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16dc4e2517f08ca167440ccb11023c1308ee19a4022d7b03c0e652f971171869" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dirs" version = "2.0.2" @@ -484,6 +496,18 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6" +[[package]] +name = "junit-report" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4e00ad2de771fc4988af88b02cbd618c08c17920208c35c4bbfe67ccfab31eb" +dependencies = [ + "chrono", + "derive-getters", + "thiserror", + "xml-rs", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -1362,3 +1386,9 @@ name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "xml-rs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b07db065a5cf61a7e4ba64f29e67db906fb1787316516c4e6e5ff0fea1efcd8a" diff --git a/Cargo.toml b/Cargo.toml index cc8b298..4626f37 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ once_cell = "1" rand = "0.7" serde = { version = "1", features = ["derive"] } serde_json = "1" +junit-report = "0.4" slog = { version = "2", features = ["max_level_trace", "release_max_level_debug"] } slog-async = { version = "2", optional = true } slog-envlogger = { version = "2", optional = true } diff --git a/src/lib.rs b/src/lib.rs index d87e037..198c0ec 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -24,8 +24,17 @@ use tokio::prelude::*; use tokio::process::{Child, ChildStderr, ChildStdout, Command}; use tokio::time::Sleep; +pub mod slog_pg; +pub mod summary; + +pub use summary::Summary; + /// This many tests will be executed with a single deqp run. const BATCH_SIZE: usize = 1000; +/// Name of the file where stderr is saved. +const STDERR_FILE: &str = "stderr.txt"; +/// These many lines from stderr will be saved in the junit xml result file. +const LAST_STDERR_LINES: usize = 5; static RESULT_VARIANTS: Lazy> = Lazy::new(|| { let mut result_variants = HashMap::new(); @@ -38,6 +47,15 @@ static RESULT_VARIANTS: Lazy> = Lazy::new(|| { result_variants }); +pub static PROGRESS_BAR: Lazy = Lazy::new(|| { + let bar = ProgressBar::new(1); + bar.set_style( + indicatif::ProgressStyle::default_bar().template("{wide_bar} job {pos}/{len}{msg} ({eta})"), + ); + bar.enable_steady_tick(1000); + bar +}); + #[derive(Clone, Debug)] #[cfg_attr(feature = "bin", derive(clap::Clap))] #[cfg_attr(feature = "bin", clap(version = clap::crate_version!(), author = clap::crate_authors!(), @@ -51,9 +69,9 @@ pub struct Options { /// This can uncover bugs that are not detected normally. #[cfg_attr(feature = "bin", clap(long))] pub shuffle: bool, - /// Show progress bar. - #[cfg_attr(feature = "bin", clap(short, long))] - pub progress: bool, + /// Hide progress bar. + #[cfg_attr(feature = "bin", clap(short = 'p', long))] + pub no_progress: bool, /// Start of test range from test list. #[cfg_attr(feature = "bin", clap(long))] pub start: Option, @@ -116,13 +134,13 @@ pub enum TestResultType { Flake(Box), } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize)] pub struct TestResult { pub stdout: String, pub variant: TestResultType, } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize)] pub struct TestResultData<'a> { /// Name of the deqp test. pub name: &'a str, @@ -212,18 +230,6 @@ pub enum RunTestListEvent<'a, 'list> { DeqpError(DeqpErrorWithOutput), } -/// Lines of the `summary.csv` file. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct SummaryEntry<'a> { - /// Name of the deqp test. - pub name: &'a str, - pub result: TestResultType, - /// Reference into the run log. - /// - /// References a [`TestResultEntry`], the reference is `None` if the test was not executed. - pub run_id: Option, -} - /// Struct for the `run_log.json` file. Every line in this file is such an entry. #[derive(Debug, Deserialize, Serialize)] pub enum RunLogEntry<'a> { @@ -310,15 +316,6 @@ struct RunTestListState<'a, 'list, S: Stream> { fail_dir: Option, } -/// Failure when writing the summary file -#[derive(Debug, Error)] -pub enum WriteSummaryError { - #[error("Failed to write summary file: {0}")] - WriteFile(#[source] csv::Error), - #[error("Failed to open summary file: {0}")] - OpenFile(#[source] csv::Error), -} - mod serde_io_error { use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -611,7 +608,7 @@ impl<'a, 'list, S: Stream> RunTestListState<'a, 'list, S> { if let Some(dir_name) = &self.fail_dir { let fail_dir = self.options.fail_dir.as_ref().unwrap().join(dir_name); // Save stderr - match std::fs::File::create(fail_dir.join("stderr.txt")) { + match std::fs::File::create(fail_dir.join(STDERR_FILE)) { Ok(mut f) => { if let Err(e) = f.write_all(stderr.as_bytes()) { error!(self.logger, "Failed to write stderr file"; "error" => %e); @@ -626,22 +623,24 @@ impl<'a, 'list, S: Stream> RunTestListState<'a, 'list, S> { fn get_missing(&self, count: usize) -> Vec> { let start = self.last_finished.map(|i| i + 1).unwrap_or_default(); - (0..count).map(|i| { - RunTestListEvent::TestResult(ReproducibleTestResultData { - data: TestResultData { - name: self.tests[start + i], - result: TestResult { - stdout: String::new(), - variant: TestResultType::Missing, + (0..count) + .map(|i| { + RunTestListEvent::TestResult(ReproducibleTestResultData { + data: TestResultData { + name: self.tests[start + i], + result: TestResult { + stdout: String::new(), + variant: TestResultType::Missing, + }, + start: OffsetDateTime::now_utc(), + duration: Duration::new(0, 0), + fail_dir: None, }, - start: OffsetDateTime::now_utc(), - duration: Duration::new(0, 0), - fail_dir: None, - }, - run_list: &self.tests[..start + i + 1], - args: &self.options.args, + run_list: &self.tests[..start + i + 1], + args: &self.options.args, + }) }) - }).collect() + .collect() } fn handle_test_start(&mut self, name: &str) -> Vec> { @@ -699,37 +698,42 @@ impl<'a, 'list, S: Stream> RunTestListState<'a, 'list, S> { self.running = None; self.save_fail_dir_stderr(&stderr); - if let Err(e) = result { - if let Some(cur_test) = self.cur_test { - let duration = OffsetDateTime::now_utc() - cur_test.1; - if self.fail_dir.is_none() { - self.create_fail_dir(self.tests[cur_test.0]); - self.save_fail_dir_stderr(&stderr); - } + if let Some(cur_test) = self.cur_test { + let duration = OffsetDateTime::now_utc() - cur_test.1; + if self.fail_dir.is_none() { + self.create_fail_dir(self.tests[cur_test.0]); + self.save_fail_dir_stderr(&stderr); + } - // Continue testing - let run_list = &self.tests[..cur_test.0 + 1]; - self.tests = &self.tests[cur_test.0 + 1..]; + if result.is_ok() { + warn!(self.logger, "test not finished but deqp exited successful, count as failure"; + "cur_test" => self.tests[cur_test.0], "started" => %cur_test.1); + } - vec![RunTestListEvent::TestResult(ReproducibleTestResultData { - data: TestResultData { - name: run_list[cur_test.0], - result: TestResult { - stdout, - variant: if matches!(e, DeqpError::Timeout) { - TestResultType::Timeout - } else { - TestResultType::Crash - }, + // Continue testing + let run_list = &self.tests[..cur_test.0 + 1]; + self.tests = &self.tests[cur_test.0 + 1..]; + + vec![RunTestListEvent::TestResult(ReproducibleTestResultData { + data: TestResultData { + name: run_list[cur_test.0], + result: TestResult { + stdout, + variant: if matches!(result, Err(DeqpError::Timeout)) { + TestResultType::Timeout + } else { + TestResultType::Crash }, - start: cur_test.1, - duration, - fail_dir: self.fail_dir.clone(), }, - run_list, - args: &self.options.args, - })] - } else if let Some(last_finished) = self.last_finished { + start: cur_test.1, + duration, + fail_dir: self.fail_dir.clone(), + }, + run_list, + args: &self.options.args, + })] + } else if let Err(e) = result { + if let Some(last_finished) = self.last_finished { // No current test executed, so probably some tests are failing // Mark rest of tests as missing let r = self.get_missing(self.tests.len() - last_finished - 1); @@ -745,27 +749,6 @@ impl<'a, 'list, S: Stream> RunTestListState<'a, 'list, S> { stderr, })] } - } else if let Some(cur_test) = self.cur_test { - let duration = OffsetDateTime::now_utc() - cur_test.1; - warn!(self.logger, "test not finished but deqp exited successful, count as failure"; - "cur_test" => self.tests[cur_test.0], "started" => %cur_test.1); - let run_list = &self.tests[..cur_test.0 + 1]; - self.tests = &self.tests[cur_test.0 + 1..]; - debug!(self.logger, "LEFT OVER @@@@@@"; "tests" => ?self.tests); - vec![RunTestListEvent::TestResult(ReproducibleTestResultData { - data: TestResultData { - name: run_list[cur_test.0], - result: TestResult { - stdout, - variant: TestResultType::Crash, - }, - start: cur_test.1, - duration, - fail_dir: self.fail_dir.clone(), - }, - run_list, - args: &self.options.args, - })] } else { let r = if let Some(last_finished) = self.last_finished { // Mark rest of tests as missing @@ -1133,39 +1116,11 @@ pub fn run_test_list<'a, 'list>( }) } -/// Write summary csv and xml file. -pub fn write_summary<'a>( - tests: &[&'a str], - summary: &HashMap<&'a str, SummaryEntry>, - csv_file: Option<&Path>, - xml_file: Option<&Path>, -) -> Result<(), WriteSummaryError> { - if let Some(file) = csv_file { - // Write summary - let mut writer = csv::Writer::from_path(file).map_err(WriteSummaryError::OpenFile)?; - for t in tests { - let r = summary.get(t).map(Cow::Borrowed).unwrap_or_else(|| { - Cow::Owned(SummaryEntry { - name: t, - result: TestResultType::NotRun, - run_id: None, - }) - }); - writer.serialize(r).map_err(WriteSummaryError::WriteFile)?; - } - } - // TODO Write xml - if let Some(_file) = xml_file { - // Write only failures - } - Ok(()) -} - pub async fn run_tests_parallel<'a>( logger: &'a Logger, tests: &'a [&'a str], // Map test names to summary entries - summary: &mut HashMap<&'a str, SummaryEntry<'a>>, + summary: &mut Summary<'a>, options: &'a RunOptions, log_file: Option<&'a Path>, job_count: usize, @@ -1183,9 +1138,11 @@ pub async fn run_tests_parallel<'a>( let mut log_entry_id: u64 = 0; if let Some(pb) = progress_bar { pb.set_length(pending_jobs.len() as u64); - pb.set_prefix("Test job"); } + let mut fails = 0; + let mut crashes = 0; + let mut log = if let Some(log_file) = log_file { match std::fs::File::create(log_file) { Ok(r) => Some(r), @@ -1227,25 +1184,50 @@ pub async fn run_tests_parallel<'a>( RunLogEntry::TestResult(res) => { res.id = log_entry_id; log_entry_id += 1; - match summary.entry(res.data.name) { + match summary.0.entry(res.data.name) { Entry::Occupied(mut entry) => { - let old_id = entry.get().run_id; + let old_id = entry.get().0.run_id; // Merge result variants - let old = entry.get().result.clone(); + let old = entry.get().0.result.clone(); let new = res.data.result.variant.clone(); let (result, take_new) = old.merge(new); - entry.insert(SummaryEntry { + entry.get_mut().0 = summary::SummaryEntry { name: res.data.name, result, run_id: if take_new { Some(res.id) } else { old_id }, - }); + }; + if take_new { + entry.get_mut().1 = Some(res.data.clone()); + } } Entry::Vacant(entry) => { - entry.insert(SummaryEntry { - name: res.data.name, - result: res.data.result.variant.clone(), - run_id: Some(res.id), - }); + if res.data.result.variant.is_failure() { + if res.data.result.variant == TestResultType::Crash { + crashes += 1; + } else { + fails += 1; + } + if let Some(pb) = progress_bar { + pb.println(format!( + "{}: {:?}", + res.data.name, res.data.result.variant + )); + // Show fails and crashes on progress bar + pb.set_message(&format!( + "; fails: {}, crashes: {}", + fails, crashes + )); + pb.tick(); + } + } + entry.insert(( + summary::SummaryEntry { + name: res.data.name, + result: res.data.result.variant.clone(), + run_id: Some(res.id), + }, + Some(res.data.clone()), + )); } } } @@ -1285,6 +1267,10 @@ pub async fn run_tests_parallel<'a>( } } } + + if let Some(pb) = progress_bar { + pb.finish_and_clear(); + } } #[cfg(test)] @@ -1304,11 +1290,19 @@ mod tests { } async fn check_tests(args: &[&str], expected: &[(&str, TestResultType)]) -> Result<()> { + check_tests_with_summary(args, expected, |_| {}).await + } + + async fn check_tests_with_summary FnOnce(Summary<'a>)>( + args: &[&str], + expected: &[(&str, TestResultType)], + check: F, + ) -> Result<()> { let logger = create_logger(); let run_options = RunOptions { args: args.iter().map(|s| s.to_string()).collect(), capture_dumps: true, - timeout: std::time::Duration::from_secs(5), + timeout: std::time::Duration::from_secs(2), fail_dir: None, }; @@ -1317,7 +1311,7 @@ mod tests { let tests = parse_test_file(&test_file); assert_eq!(tests.len(), 18, "Test size does not match"); - let mut summary = HashMap::new(); + let mut summary = Summary::default(); run_tests_parallel( &logger, &tests, @@ -1330,18 +1324,20 @@ mod tests { .await; assert_eq!( - summary.len(), + summary.0.len(), expected.len(), "Result length does not match" ); for (t, r) in expected { - if let Some(r2) = summary.get(t) { - assert_eq!(r2.result, *r, "Test result does not match for test {}", t); + if let Some(r2) = summary.0.get(t) { + assert_eq!(r2.0.result, *r, "Test result does not match for test {}", t); } else { panic!("Test {} has no result but expected {:?}", t, r); } } + check(summary); + Ok(()) } @@ -1387,11 +1383,7 @@ mod tests { #[tokio::test] async fn test_b() -> Result<()> { - check_tests( - &["test/test-runner.sh", "logs/b", "logs/b-err", "1"], - &[], - ) - .await?; + check_tests(&["test/test-runner.sh", "logs/b", "logs/b-err", "1"], &[]).await?; Ok(()) } @@ -1511,11 +1503,15 @@ mod tests { ("dEQP-VK.fragment_shader_interlock.basic.discard.ssbo.shading_rate_unordered.4xaa.sample_shading.1024x1024", TestResultType::NotSupported), ]; - // TODO Check bisection result - - check_tests( + check_tests_with_summary( &["test/bisect-test-runner.sh", "dEQP-VK.tessellation.primitive_discard.triangles_fractional_odd_spacing_ccw_valid_levels", "logs/d", "/dev/null", "1", "logs/a", "dev/null", "0"], &expected, + |summary| { + // TODO Check bisection result with get_test_results returned by check_tests + let res = summary.0.get("dEQP-VK.tessellation.primitive_discard.triangles_fractional_even_spacing_cw_point_mode").unwrap(); + assert_eq!(res.1.as_ref().unwrap().fail_dir, None); + assert_eq!(res.0.run_id, Some(25)); + } ) .await?; diff --git a/src/main.rs b/src/main.rs index 84d7957..d4f44c4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,3 @@ -use std::collections::HashMap; - use anyhow::Result; use clap::Clap; use deqp_runner::*; @@ -18,13 +16,19 @@ async fn main() -> Result<()> { async fn real_main() -> Result<()> { let mut options: Options = Options::parse(); - let does_log = std::env::var("RUST_LOG").is_ok(); - // We can have either logging or a progress bar, choose logging by default - if !does_log && !options.progress { + if std::env::var("RUST_LOG").is_err() { std::env::set_var("RUST_LOG", "info"); } - let logger = { + let logger = if !options.no_progress { + let drain = slog_term::FullFormat::new(deqp_runner::slog_pg::ProgressBarDecorator) + .build() + .fuse(); + let drain = slog_envlogger::new(drain).fuse(); + let drain = slog_async::Async::new(drain).build().fuse(); + + slog::Logger::root(drain, o!()) + } else { let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::CompactFormat::new(decorator).build().fuse(); let drain = slog_envlogger::new(drain).fuse(); @@ -67,14 +71,14 @@ async fn real_main() -> Result<()> { fail_dir: Some(options.failures), }; - let progress_bar = if !does_log && options.progress { - Some(indicatif::ProgressBar::new(1)) + let progress_bar = if !options.no_progress { + Some(&*PROGRESS_BAR) } else { None }; let job_count = options.jobs.unwrap_or_else(num_cpus::get); - let mut summary = HashMap::new(); + let mut summary = Summary::default(); tokio::select! { _ = run_tests_parallel( &logger, @@ -83,14 +87,21 @@ async fn real_main() -> Result<()> { &run_options, Some(&options.log), job_count, - progress_bar.as_ref(), + progress_bar, ) => {} _ = tokio::signal::ctrl_c() => { info!(logger, "Killed by sigint"); } } - write_summary(&tests, &summary, Some(&options.csv_summary), Some(&options.xml_summary))?; + summary::write_summary( + &logger, + &tests, + &summary, + run_options.fail_dir.as_deref(), + Some(&options.csv_summary), + Some(&options.xml_summary), + )?; Ok(()) } diff --git a/src/slog_pg.rs b/src/slog_pg.rs new file mode 100644 index 0000000..c4353dd --- /dev/null +++ b/src/slog_pg.rs @@ -0,0 +1,43 @@ +//! A slog term decorator that prints log messages to a progress bar. + +use slog::{OwnedKVList, Record}; +use slog_term::{Decorator, RecordDecorator}; + +pub struct ProgressBarDecorator; + +struct ProgressBarRecordDecorator<'a>(&'a mut Vec); + +impl Decorator for ProgressBarDecorator { + fn with_record std::io::Result<()>>( + &self, + _record: &Record, + _logger_values: &OwnedKVList, + f: F, + ) -> std::io::Result<()> { + let mut rec = Vec::new(); + f(&mut ProgressBarRecordDecorator(&mut rec))?; + crate::PROGRESS_BAR.println(String::from_utf8(rec).map_err(|e| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Cannot convert log message to string: {}", e), + ) + })?); + Ok(()) + } +} + +impl std::io::Write for ProgressBarRecordDecorator<'_> { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.0.write(buf) + } + + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) + } +} + +impl RecordDecorator for ProgressBarRecordDecorator<'_> { + fn reset(&mut self) -> std::io::Result<()> { + Ok(()) + } +} diff --git a/src/summary.rs b/src/summary.rs new file mode 100644 index 0000000..b407f6f --- /dev/null +++ b/src/summary.rs @@ -0,0 +1,167 @@ +use std::borrow::Cow; +use std::collections::{HashMap, VecDeque}; +use std::convert::TryInto; +use std::fs::File; +use std::io::{BufRead, BufReader}; +use std::path::Path; + +use serde::{Deserialize, Serialize}; +use slog::{warn, Logger}; +use thiserror::Error; + +use crate::{TestResultData, TestResultType}; + +#[derive(Clone, Debug, Default)] +pub struct Summary<'a>(pub HashMap<&'a str, (SummaryEntry<'a>, Option>)>); + +/// Failure when writing the summary file +#[derive(Debug, Error)] +pub enum WriteSummaryError { + #[error("Failed to write csv summary file: {0}")] + WriteCsvFile(#[source] csv::Error), + #[error("Failed to open csv summary file: {0}")] + OpenCsvFile(#[source] csv::Error), + #[error("Failed to write xml summary file: {0}")] + WriteXmlFile(String), + #[error("Failed to open xml summary file: {0}")] + OpenFile(#[source] std::io::Error), +} + +/// Lines of the `summary.csv` file. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SummaryEntry<'a> { + /// Name of the deqp test. + pub name: &'a str, + pub result: TestResultType, + /// Reference into the run log. + /// + /// References a [`TestResultEntry`], the reference is `None` if the test was not executed. + pub run_id: Option, +} + +/// Write summary csv and xml file. +pub fn write_summary<'a>( + logger: &Logger, + tests: &[&'a str], + summary: &Summary, + fail_dir: Option<&Path>, + csv_file: Option<&Path>, + xml_file: Option<&Path>, +) -> Result<(), WriteSummaryError> { + // Write csv + if let Some(file) = csv_file { + let mut writer = csv::Writer::from_path(file).map_err(WriteSummaryError::OpenCsvFile)?; + for t in tests { + let r = summary + .0 + .get(t) + .map(|r| Cow::Borrowed(&r.0)) + .unwrap_or_else(|| { + Cow::Owned(SummaryEntry { + name: t, + result: TestResultType::NotRun, + run_id: None, + }) + }); + writer + .serialize(r) + .map_err(WriteSummaryError::WriteCsvFile)?; + } + } + + // Write xml + if let Some(file) = xml_file { + use junit_report::TestCase; + + // Write only failures + let ts = junit_report::TestSuite::new("CTS").add_testcases(tests.iter().filter_map(|t| { + if let Some(entry) = summary.0.get(t) { + if entry.0.result.is_failure() || matches!(entry.0.result, TestResultType::Flake(_)) + { + // Count flakes as success but report anyway + if let Some(run) = &entry.1 { + let mut test = if let TestResultType::Flake(res) = &entry.0.result { + TestCase::success( + t, + junit_report::Duration::from_std(run.duration.try_into().unwrap()) + .unwrap(), + ) + .set_system_out(&format!("{}\nFlake({:?})", run.result.stdout, res)) + } else { + TestCase::failure( + t, + junit_report::Duration::from_std(run.duration.try_into().unwrap()) + .unwrap(), + &format!("{:?}", entry.0.result), + "", + ) + .set_system_out(&run.result.stdout) + }; + + // Read stderr from failure dir + if let (Some(fail_dir), Some(run_dir)) = (fail_dir, &run.fail_dir) { + let path = fail_dir.join(run_dir).join(crate::STDERR_FILE); + match File::open(&path) { + Ok(f) => { + let mut last_lines = + VecDeque::with_capacity(crate::LAST_STDERR_LINES); + for l in BufReader::new(f).lines() { + let l = match l { + Ok(l) => l, + Err(e) => { + warn!(logger, "Failed to read stderr file"; + "path" => ?path, "error" => %e); + break; + } + }; + if last_lines.len() >= crate::LAST_STDERR_LINES { + last_lines.pop_front(); + } + last_lines.push_back(l); + } + + let mut last_lines_str = String::new(); + for l in last_lines { + if !last_lines_str.is_empty() { + last_lines_str.push('\n'); + } + last_lines_str.push_str(&l); + } + test = test.set_system_err(&last_lines_str); + } + Err(e) => { + warn!(logger, "Failed to open stderr file"; "path" => ?path, + "error" => %e); + } + } + } + + Some(test) + } else { + Some(TestCase::failure( + t, + junit_report::Duration::seconds(0), + &format!("{:?}", entry.0.result), + "", + )) + } + } else { + None + } + } else { + Some(TestCase::error( + t, + junit_report::Duration::seconds(0), + "NotRun", + "", + )) + } + })); + + let r = junit_report::Report::new().add_testsuite(ts); + let mut file = std::fs::File::create(file).map_err(WriteSummaryError::OpenFile)?; + r.write_xml(&mut file) + .map_err(|e| WriteSummaryError::WriteXmlFile(e.to_string()))?; + } + Ok(()) +}