|
| 1 | +//! Module providing interface for running tests in the console. |
| 2 | +
|
| 3 | +use std::fs::File; |
| 4 | +use std::io::prelude::*; |
| 5 | +use std::io; |
| 6 | + |
| 7 | +use term; |
| 8 | + |
| 9 | +use super::{ |
| 10 | + helpers::{ |
| 11 | + concurrency::get_concurrency, |
| 12 | + metrics::MetricMap, |
| 13 | + }, |
| 14 | + types::{TestDesc, TestDescAndFn, NamePadding}, |
| 15 | + options::{Options, OutputFormat}, |
| 16 | + bench::fmt_bench_samples, |
| 17 | + test_result::TestResult, |
| 18 | + time::TestExecTime, |
| 19 | + cli::TestOpts, |
| 20 | + event::TestEvent, |
| 21 | + run_tests, |
| 22 | + filter_tests, |
| 23 | +}; |
| 24 | + |
| 25 | +pub enum OutputLocation<T> { |
| 26 | + Pretty(Box<term::StdoutTerminal>), |
| 27 | + Raw(T), |
| 28 | +} |
| 29 | + |
| 30 | +impl<T: Write> Write for OutputLocation<T> { |
| 31 | + fn write(&mut self, buf: &[u8]) -> io::Result<usize> { |
| 32 | + match *self { |
| 33 | + OutputLocation::Pretty(ref mut term) => term.write(buf), |
| 34 | + OutputLocation::Raw(ref mut stdout) => stdout.write(buf), |
| 35 | + } |
| 36 | + } |
| 37 | + |
| 38 | + fn flush(&mut self) -> io::Result<()> { |
| 39 | + match *self { |
| 40 | + OutputLocation::Pretty(ref mut term) => term.flush(), |
| 41 | + OutputLocation::Raw(ref mut stdout) => stdout.flush(), |
| 42 | + } |
| 43 | + } |
| 44 | +} |
| 45 | + |
| 46 | +use crate::formatters::{JsonFormatter, OutputFormatter, PrettyFormatter, TerseFormatter}; |
| 47 | + |
| 48 | +pub struct ConsoleTestState { |
| 49 | + pub log_out: Option<File>, |
| 50 | + pub total: usize, |
| 51 | + pub passed: usize, |
| 52 | + pub failed: usize, |
| 53 | + pub ignored: usize, |
| 54 | + pub allowed_fail: usize, |
| 55 | + pub filtered_out: usize, |
| 56 | + pub measured: usize, |
| 57 | + pub metrics: MetricMap, |
| 58 | + pub failures: Vec<(TestDesc, Vec<u8>)>, |
| 59 | + pub not_failures: Vec<(TestDesc, Vec<u8>)>, |
| 60 | + pub time_failures: Vec<(TestDesc, Vec<u8>)>, |
| 61 | + pub options: Options, |
| 62 | +} |
| 63 | + |
| 64 | +impl ConsoleTestState { |
| 65 | + pub fn new(opts: &TestOpts) -> io::Result<ConsoleTestState> { |
| 66 | + let log_out = match opts.logfile { |
| 67 | + Some(ref path) => Some(File::create(path)?), |
| 68 | + None => None, |
| 69 | + }; |
| 70 | + |
| 71 | + Ok(ConsoleTestState { |
| 72 | + log_out, |
| 73 | + total: 0, |
| 74 | + passed: 0, |
| 75 | + failed: 0, |
| 76 | + ignored: 0, |
| 77 | + allowed_fail: 0, |
| 78 | + filtered_out: 0, |
| 79 | + measured: 0, |
| 80 | + metrics: MetricMap::new(), |
| 81 | + failures: Vec::new(), |
| 82 | + not_failures: Vec::new(), |
| 83 | + time_failures: Vec::new(), |
| 84 | + options: opts.options, |
| 85 | + }) |
| 86 | + } |
| 87 | + |
| 88 | + pub fn write_log<F, S>( |
| 89 | + &mut self, |
| 90 | + msg: F, |
| 91 | + ) -> io::Result<()> |
| 92 | + where |
| 93 | + S: AsRef<str>, |
| 94 | + F: FnOnce() -> S, |
| 95 | + { |
| 96 | + match self.log_out { |
| 97 | + None => Ok(()), |
| 98 | + Some(ref mut o) => { |
| 99 | + let msg = msg(); |
| 100 | + let msg = msg.as_ref(); |
| 101 | + o.write_all(msg.as_bytes()) |
| 102 | + }, |
| 103 | + } |
| 104 | + } |
| 105 | + |
| 106 | + pub fn write_log_result(&mut self,test: &TestDesc, |
| 107 | + result: &TestResult, |
| 108 | + exec_time: Option<&TestExecTime>, |
| 109 | + ) -> io::Result<()> { |
| 110 | + self.write_log(|| format!( |
| 111 | + "{} {}", |
| 112 | + match *result { |
| 113 | + TestResult::TrOk => "ok".to_owned(), |
| 114 | + TestResult::TrFailed => "failed".to_owned(), |
| 115 | + TestResult::TrFailedMsg(ref msg) => format!("failed: {}", msg), |
| 116 | + TestResult::TrIgnored => "ignored".to_owned(), |
| 117 | + TestResult::TrAllowedFail => "failed (allowed)".to_owned(), |
| 118 | + TestResult::TrBench(ref bs) => fmt_bench_samples(bs), |
| 119 | + TestResult::TrTimedFail => "failed (time limit exceeded)".to_owned(), |
| 120 | + }, |
| 121 | + test.name, |
| 122 | + ))?; |
| 123 | + if let Some(exec_time) = exec_time { |
| 124 | + self.write_log(|| format!(" <{}>", exec_time))?; |
| 125 | + } |
| 126 | + self.write_log(|| "\n") |
| 127 | + } |
| 128 | + |
| 129 | + fn current_test_count(&self) -> usize { |
| 130 | + self.passed + self.failed + self.ignored + self.measured + self.allowed_fail |
| 131 | + } |
| 132 | +} |
| 133 | + |
| 134 | +// List the tests to console, and optionally to logfile. Filters are honored. |
| 135 | +pub fn list_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Result<()> { |
| 136 | + let mut output = match term::stdout() { |
| 137 | + None => OutputLocation::Raw(io::stdout()), |
| 138 | + Some(t) => OutputLocation::Pretty(t), |
| 139 | + }; |
| 140 | + |
| 141 | + let quiet = opts.format == OutputFormat::Terse; |
| 142 | + let mut st = ConsoleTestState::new(opts)?; |
| 143 | + |
| 144 | + let mut ntest = 0; |
| 145 | + let mut nbench = 0; |
| 146 | + |
| 147 | + for test in filter_tests(&opts, tests) { |
| 148 | + use crate::TestFn::*; |
| 149 | + |
| 150 | + let TestDescAndFn { |
| 151 | + desc: TestDesc { name, .. }, |
| 152 | + testfn, |
| 153 | + } = test; |
| 154 | + |
| 155 | + let fntype = match testfn { |
| 156 | + StaticTestFn(..) | DynTestFn(..) => { |
| 157 | + ntest += 1; |
| 158 | + "test" |
| 159 | + } |
| 160 | + StaticBenchFn(..) | DynBenchFn(..) => { |
| 161 | + nbench += 1; |
| 162 | + "benchmark" |
| 163 | + } |
| 164 | + }; |
| 165 | + |
| 166 | + writeln!(output, "{}: {}", name, fntype)?; |
| 167 | + st.write_log(|| format!("{} {}\n", fntype, name))?; |
| 168 | + } |
| 169 | + |
| 170 | + fn plural(count: u32, s: &str) -> String { |
| 171 | + match count { |
| 172 | + 1 => format!("{} {}", 1, s), |
| 173 | + n => format!("{} {}s", n, s), |
| 174 | + } |
| 175 | + } |
| 176 | + |
| 177 | + if !quiet { |
| 178 | + if ntest != 0 || nbench != 0 { |
| 179 | + writeln!(output, "")?; |
| 180 | + } |
| 181 | + |
| 182 | + writeln!( |
| 183 | + output, |
| 184 | + "{}, {}", |
| 185 | + plural(ntest, "test"), |
| 186 | + plural(nbench, "benchmark") |
| 187 | + )?; |
| 188 | + } |
| 189 | + |
| 190 | + Ok(()) |
| 191 | +} |
| 192 | + |
| 193 | +// A simple console test runner |
| 194 | +pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Result<bool> { |
| 195 | + fn callback( |
| 196 | + event: &TestEvent, |
| 197 | + st: &mut ConsoleTestState, |
| 198 | + out: &mut dyn OutputFormatter, |
| 199 | + ) -> io::Result<()> { |
| 200 | + match (*event).clone() { |
| 201 | + TestEvent::TeFiltered(ref filtered_tests) => { |
| 202 | + st.total = filtered_tests.len(); |
| 203 | + out.write_run_start(filtered_tests.len()) |
| 204 | + } |
| 205 | + TestEvent::TeFilteredOut(filtered_out) => Ok(st.filtered_out = filtered_out), |
| 206 | + TestEvent::TeWait(ref test) => out.write_test_start(test), |
| 207 | + TestEvent::TeTimeout(ref test) => out.write_timeout(test), |
| 208 | + TestEvent::TeResult(test, result, exec_time, stdout) => { |
| 209 | + st.write_log_result(&test, &result, exec_time.as_ref())?; |
| 210 | + out.write_result(&test, &result, exec_time.as_ref(), &*stdout, &st)?; |
| 211 | + match result { |
| 212 | + TestResult::TrOk => { |
| 213 | + st.passed += 1; |
| 214 | + st.not_failures.push((test, stdout)); |
| 215 | + } |
| 216 | + TestResult::TrIgnored => st.ignored += 1, |
| 217 | + TestResult::TrAllowedFail => st.allowed_fail += 1, |
| 218 | + TestResult::TrBench(bs) => { |
| 219 | + st.metrics.insert_metric( |
| 220 | + test.name.as_slice(), |
| 221 | + bs.ns_iter_summ.median, |
| 222 | + bs.ns_iter_summ.max - bs.ns_iter_summ.min, |
| 223 | + ); |
| 224 | + st.measured += 1 |
| 225 | + } |
| 226 | + TestResult::TrFailed => { |
| 227 | + st.failed += 1; |
| 228 | + st.failures.push((test, stdout)); |
| 229 | + } |
| 230 | + TestResult::TrFailedMsg(msg) => { |
| 231 | + st.failed += 1; |
| 232 | + let mut stdout = stdout; |
| 233 | + stdout.extend_from_slice(format!("note: {}", msg).as_bytes()); |
| 234 | + st.failures.push((test, stdout)); |
| 235 | + } |
| 236 | + TestResult::TrTimedFail => { |
| 237 | + st.failed += 1; |
| 238 | + st.time_failures.push((test, stdout)); |
| 239 | + } |
| 240 | + } |
| 241 | + Ok(()) |
| 242 | + } |
| 243 | + } |
| 244 | + } |
| 245 | + |
| 246 | + let output = match term::stdout() { |
| 247 | + None => OutputLocation::Raw(io::stdout()), |
| 248 | + Some(t) => OutputLocation::Pretty(t), |
| 249 | + }; |
| 250 | + |
| 251 | + let max_name_len = tests |
| 252 | + .iter() |
| 253 | + .max_by_key(|t| len_if_padded(*t)) |
| 254 | + .map(|t| t.desc.name.as_slice().len()) |
| 255 | + .unwrap_or(0); |
| 256 | + |
| 257 | + let is_multithreaded = opts.test_threads.unwrap_or_else(get_concurrency) > 1; |
| 258 | + |
| 259 | + let mut out: Box<dyn OutputFormatter> = match opts.format { |
| 260 | + OutputFormat::Pretty => Box::new(PrettyFormatter::new( |
| 261 | + output, |
| 262 | + opts.use_color(), |
| 263 | + max_name_len, |
| 264 | + is_multithreaded, |
| 265 | + opts.time_options, |
| 266 | + )), |
| 267 | + OutputFormat::Terse => Box::new(TerseFormatter::new( |
| 268 | + output, |
| 269 | + opts.use_color(), |
| 270 | + max_name_len, |
| 271 | + is_multithreaded, |
| 272 | + )), |
| 273 | + OutputFormat::Json => Box::new(JsonFormatter::new(output)), |
| 274 | + }; |
| 275 | + let mut st = ConsoleTestState::new(opts)?; |
| 276 | + fn len_if_padded(t: &TestDescAndFn) -> usize { |
| 277 | + match t.testfn.padding() { |
| 278 | + NamePadding::PadNone => 0, |
| 279 | + NamePadding::PadOnRight => t.desc.name.as_slice().len(), |
| 280 | + } |
| 281 | + } |
| 282 | + |
| 283 | + run_tests(opts, tests, |x| callback(&x, &mut st, &mut *out))?; |
| 284 | + |
| 285 | + assert!(st.current_test_count() == st.total); |
| 286 | + |
| 287 | + return out.write_run_finish(&st); |
| 288 | +} |
0 commit comments