Skip to content

Commit c5e3f17

Browse files
committed
Optimize removing unit_deps.
1 parent a58b0c5 commit c5e3f17

File tree

1 file changed

+10
-5
lines changed

1 file changed

+10
-5
lines changed

src/cargo/ops/cargo_compile.rs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1497,18 +1497,19 @@ fn remove_duplicate_doc(build_config: &BuildConfig, unit_graph: &mut UnitGraph)
14971497
.push(unit.clone());
14981498
}
14991499
}
1500+
// Keep track of units to remove so that they can be efficiently removed
1501+
// from the unit_deps.
1502+
let mut removed_units: HashSet<Unit> = HashSet::new();
15001503
let mut remove = |units: Vec<Unit>, reason: &str| {
1501-
for unit in &units {
1504+
for unit in units {
15021505
log::debug!(
15031506
"removing duplicate doc due to {} for package {} target `{}`",
15041507
reason,
15051508
unit.pkg,
15061509
unit.target.name()
15071510
);
1508-
unit_graph.remove(unit);
1509-
}
1510-
for unit_deps in unit_graph.values_mut() {
1511-
unit_deps.retain(|unit_dep| !units.iter().any(|unit| *unit == unit_dep.unit));
1511+
unit_graph.remove(&unit);
1512+
removed_units.insert(unit);
15121513
}
15131514
};
15141515
// Iterate over the duplicates and try to remove them from unit_graph.
@@ -1566,4 +1567,8 @@ fn remove_duplicate_doc(build_config: &BuildConfig, unit_graph: &mut UnitGraph)
15661567
// Are there other heuristics to remove duplicates that would make
15671568
// sense? Maybe prefer path sources over all others?
15681569
}
1570+
// Also remove units from the unit_deps so there aren't any dangling edges.
1571+
for unit_deps in unit_graph.values_mut() {
1572+
unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1573+
}
15691574
}

0 commit comments

Comments
 (0)