Skip to content

Commit 9a85380

Browse files
Refactor to avoid duplication of the log entry creation
1 parent 2a48044 commit 9a85380

File tree

1 file changed

+89
-85
lines changed

1 file changed

+89
-85
lines changed

src/report/archives.rs

Lines changed: 89 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use crate::config::Config;
22
use crate::crates::Crate;
33
use crate::experiments::Experiment;
44
use crate::prelude::*;
5-
use crate::report::{compare, ReportWriter};
5+
use crate::report::{compare, Comparison, ReportWriter};
66
use crate::results::{EncodedLog, EncodingType, ReadResults};
77
use flate2::{write::GzEncoder, Compression};
88
use indexmap::IndexMap;
@@ -14,6 +14,84 @@ pub struct Archive {
1414
path: String,
1515
}
1616

17+
struct LogEntry {
18+
path: String,
19+
comparison: Comparison,
20+
log_bytes: Vec<u8>,
21+
}
22+
23+
impl LogEntry {
24+
fn header(&self) -> TarHeader {
25+
let mut header = TarHeader::new_gnu();
26+
header.set_size(self.log_bytes.len() as u64);
27+
header.set_mode(0o644);
28+
header.set_cksum();
29+
header
30+
}
31+
}
32+
33+
fn iterate<'a, DB: ReadResults + 'a>(
34+
db: &'a DB,
35+
ex: &'a Experiment,
36+
crates: &'a [Crate],
37+
config: &'a Config,
38+
) -> impl Iterator<Item = Fallible<LogEntry>> + 'a {
39+
let mut iter = crates
40+
.iter()
41+
.filter(move |krate| !config.should_skip(krate))
42+
.map(move |krate| -> Fallible<Vec<LogEntry>> {
43+
let res1 = db.load_test_result(ex, &ex.toolchains[0], krate)?;
44+
let res2 = db.load_test_result(ex, &ex.toolchains[1], krate)?;
45+
let comparison = compare(config, krate, res1.as_ref(), res2.as_ref());
46+
47+
ex.toolchains
48+
.iter()
49+
.filter_map(move |tc| {
50+
let log = db
51+
.load_log(ex, tc, krate)
52+
.and_then(|c| c.ok_or_else(|| err_msg("missing logs")))
53+
.with_context(|_| format!("failed to read log of {} on {}", krate, tc));
54+
55+
let log_bytes: EncodedLog = match log {
56+
Ok(l) => l,
57+
Err(e) => {
58+
crate::utils::report_failure(&e);
59+
return None;
60+
}
61+
};
62+
63+
let log_bytes = match log_bytes.to_plain() {
64+
Ok(it) => it,
65+
Err(err) => return Some(Err(err)),
66+
};
67+
68+
let path = format!(
69+
"{}/{}/{}.txt",
70+
comparison,
71+
krate.id(),
72+
tc.to_path_component(),
73+
);
74+
Some(Ok(LogEntry {
75+
path,
76+
comparison,
77+
log_bytes,
78+
}))
79+
})
80+
.collect()
81+
});
82+
83+
let mut in_progress = vec![].into_iter();
84+
std::iter::from_fn(move || loop {
85+
if let Some(next) = in_progress.next() {
86+
return Some(Ok(next));
87+
}
88+
match iter.next()? {
89+
Ok(list) => in_progress = list.into_iter(),
90+
Err(err) => return Some(Err(err)),
91+
}
92+
})
93+
}
94+
1795
fn write_all_archive<DB: ReadResults, W: ReportWriter>(
1896
db: &DB,
1997
ex: &Experiment,
@@ -23,46 +101,10 @@ fn write_all_archive<DB: ReadResults, W: ReportWriter>(
23101
) -> Fallible<Archive> {
24102
for i in 1..=RETRIES {
25103
let mut all = TarBuilder::new(GzEncoder::new(Vec::new(), Compression::default()));
26-
for krate in crates {
27-
if config.should_skip(krate) {
28-
continue;
29-
}
30-
31-
let res1 = db.load_test_result(ex, &ex.toolchains[0], krate)?;
32-
let res2 = db.load_test_result(ex, &ex.toolchains[1], krate)?;
33-
let comparison = compare(config, krate, res1.as_ref(), res2.as_ref());
34-
35-
for tc in &ex.toolchains {
36-
let log = db
37-
.load_log(ex, tc, krate)
38-
.and_then(|c| c.ok_or_else(|| err_msg("missing logs")))
39-
.with_context(|_| format!("failed to read log of {} on {}", krate, tc));
40-
41-
let log_bytes: EncodedLog = match log {
42-
Ok(l) => l,
43-
Err(e) => {
44-
crate::utils::report_failure(&e);
45-
continue;
46-
}
47-
};
48-
49-
let log_bytes = log_bytes.to_plain()?;
50-
let log_bytes = log_bytes.as_slice();
51-
52-
let path = format!(
53-
"{}/{}/{}.txt",
54-
comparison,
55-
krate.id(),
56-
tc.to_path_component(),
57-
);
58-
59-
let mut header = TarHeader::new_gnu();
60-
header.set_size(log_bytes.len() as u64);
61-
header.set_mode(0o644);
62-
header.set_cksum();
63-
64-
all.append_data(&mut header, &path, log_bytes)?;
65-
}
104+
for entry in iterate(db, ex, crates, config) {
105+
let entry = entry?;
106+
let mut header = entry.header();
107+
all.append_data(&mut header, &entry.path, &entry.log_bytes[..])?;
66108
}
67109

68110
let data = all.into_inner()?.finish()?;
@@ -109,51 +151,13 @@ pub fn write_logs_archives<DB: ReadResults, W: ReportWriter>(
109151

110152
archives.push(write_all_archive(db, ex, crates, dest, config)?);
111153

112-
for krate in crates {
113-
if config.should_skip(krate) {
114-
continue;
115-
}
116-
117-
let res1 = db.load_test_result(ex, &ex.toolchains[0], krate)?;
118-
let res2 = db.load_test_result(ex, &ex.toolchains[1], krate)?;
119-
let comparison = compare(config, krate, res1.as_ref(), res2.as_ref());
120-
121-
for tc in &ex.toolchains {
122-
let log = db
123-
.load_log(ex, tc, krate)
124-
.and_then(|c| c.ok_or_else(|| err_msg("missing logs")))
125-
.with_context(|_| format!("failed to read log of {} on {}", krate, tc));
154+
for entry in iterate(db, ex, crates, config) {
155+
let entry = entry?;
126156

127-
let log_bytes: EncodedLog = match log {
128-
Ok(l) => l,
129-
Err(e) => {
130-
crate::utils::report_failure(&e);
131-
continue;
132-
}
133-
};
134-
135-
let log_bytes = log_bytes.to_plain()?;
136-
let log_bytes = log_bytes.as_slice();
137-
138-
let path = format!(
139-
"{}/{}/{}.txt",
140-
comparison,
141-
krate.id(),
142-
tc.to_path_component(),
143-
);
144-
145-
let mut header = TarHeader::new_gnu();
146-
header.set_size(log_bytes.len() as u64);
147-
header.set_mode(0o644);
148-
header.set_cksum();
149-
150-
by_comparison
151-
.entry(comparison)
152-
.or_insert_with(|| {
153-
TarBuilder::new(GzEncoder::new(Vec::new(), Compression::default()))
154-
})
155-
.append_data(&mut header, &path, log_bytes)?;
156-
}
157+
by_comparison
158+
.entry(entry.comparison)
159+
.or_insert_with(|| TarBuilder::new(GzEncoder::new(Vec::new(), Compression::default())))
160+
.append_data(&mut entry.header(), &entry.path, &entry.log_bytes[..])?;
157161
}
158162

159163
for (comparison, archive) in by_comparison.drain(..) {

0 commit comments

Comments
 (0)