Skip to content

Commit 73a1b76

Browse files
committed
Auto merge of #14377 - mo8it:context, r=weihanglo
Use context instead of with_context Replace `.with_context(|| "…")` with `.context("…")` to avoid calling a trivial closure. It is also shorter :) On the other hand, use `.with_context(|| format!(…))` instead of `.context(format!(…))` to avoid unneeded string allocation.
2 parents b66cad8 + af04e54 commit 73a1b76

File tree

22 files changed

+63
-68
lines changed

22 files changed

+63
-68
lines changed

crates/rustfix/tests/parse_and_replace.rs

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -164,8 +164,8 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
164164

165165
debug!("next up: {:?}", file);
166166
let code = fs::read_to_string(file)?;
167-
let errors =
168-
compile_and_get_json_errors(file).context(format!("could compile {}", file.display()))?;
167+
let errors = compile_and_get_json_errors(file)
168+
.with_context(|| format!("could not compile {}", file.display()))?;
169169
let suggestions =
170170
rustfix::get_suggestions_from_json(&errors, &HashSet::new(), filter_suggestions)
171171
.context("could not load suggestions")?;
@@ -175,10 +175,8 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
175175
}
176176

177177
if std::env::var(settings::CHECK_JSON).is_ok() {
178-
let expected_json = fs::read_to_string(&json_file).context(format!(
179-
"could not load json fixtures for {}",
180-
file.display()
181-
))?;
178+
let expected_json = fs::read_to_string(&json_file)
179+
.with_context(|| format!("could not load json fixtures for {}", file.display()))?;
182180
let expected_suggestions =
183181
rustfix::get_suggestions_from_json(&expected_json, &HashSet::new(), filter_suggestions)
184182
.context("could not load expected suggestions")?;
@@ -194,7 +192,7 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
194192
}
195193

196194
let fixed = apply_suggestions(&code, &suggestions)
197-
.context(format!("could not apply suggestions to {}", file.display()))?
195+
.with_context(|| format!("could not apply suggestions to {}", file.display()))?
198196
.replace('\r', "");
199197

200198
if std::env::var(settings::RECORD_FIXED_RUST).is_ok() {
@@ -209,7 +207,7 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
209207
}
210208

211209
let expected_fixed = fs::read_to_string(&fixed_file)
212-
.context(format!("could read fixed file for {}", file.display()))?
210+
.with_context(|| format!("could read fixed file for {}", file.display()))?
213211
.replace('\r', "");
214212
ensure!(
215213
fixed.trim() == expected_fixed.trim(),
@@ -236,7 +234,7 @@ fn get_fixture_files(p: &str) -> Result<Vec<PathBuf>, Error> {
236234

237235
fn assert_fixtures(dir: &str, mode: &str) {
238236
let files = get_fixture_files(dir)
239-
.context(format!("couldn't load dir `{}`", dir))
237+
.with_context(|| format!("couldn't load dir `{dir}`"))
240238
.unwrap();
241239
let mut failures = 0;
242240

src/cargo/core/compiler/build_runner/mod.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
100100
let jobserver = match bcx.gctx.jobserver_from_env() {
101101
Some(c) => c.clone(),
102102
None => {
103-
let client = Client::new(bcx.jobs() as usize)
104-
.with_context(|| "failed to create jobserver")?;
103+
let client =
104+
Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
105105
client.acquire_raw()?;
106106
client
107107
}
@@ -354,11 +354,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
354354
.unwrap()
355355
.host
356356
.prepare()
357-
.with_context(|| "couldn't prepare build directories")?;
357+
.context("couldn't prepare build directories")?;
358358
for target in self.files.as_mut().unwrap().target.values_mut() {
359359
target
360360
.prepare()
361-
.with_context(|| "couldn't prepare build directories")?;
361+
.context("couldn't prepare build directories")?;
362362
}
363363

364364
let files = self.files.as_ref().unwrap();

src/cargo/core/compiler/custom_build.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -431,7 +431,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul
431431
// If we have an old build directory, then just move it into place,
432432
// otherwise create it!
433433
paths::create_dir_all(&script_out_dir)
434-
.with_context(|| "failed to create script output directory for build command")?;
434+
.context("failed to create script output directory for build command")?;
435435

436436
// For all our native lib dependencies, pick up their metadata to pass
437437
// along to this custom build command. We're also careful to augment our

src/cargo/core/compiler/future_incompat.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -211,9 +211,9 @@ impl OnDiskReports {
211211
report_file
212212
.file()
213213
.read_to_string(&mut file_contents)
214-
.with_context(|| "failed to read report")?;
214+
.context("failed to read report")?;
215215
let on_disk_reports: OnDiskReports =
216-
serde_json::from_str(&file_contents).with_context(|| "failed to load report")?;
216+
serde_json::from_str(&file_contents).context("failed to load report")?;
217217
if on_disk_reports.version != ON_DISK_VERSION {
218218
bail!("unable to read reports; reports were saved from a future version of Cargo");
219219
}

src/cargo/core/compiler/job_queue/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ impl<'gctx> JobQueue<'gctx> {
513513
.into_helper_thread(move |token| {
514514
messages.push(Message::Token(token));
515515
})
516-
.with_context(|| "failed to create helper thread for jobserver management")?;
516+
.context("failed to create helper thread for jobserver management")?;
517517

518518
// Create a helper thread to manage the diagnostics for rustfix if
519519
// necessary.
@@ -700,7 +700,7 @@ impl<'gctx> DrainState<'gctx> {
700700
.push(FutureIncompatReportPackage { package_id, items });
701701
}
702702
Message::Token(acquired_token) => {
703-
let token = acquired_token.with_context(|| "failed to acquire jobserver token")?;
703+
let token = acquired_token.context("failed to acquire jobserver token")?;
704704
self.tokens.push(token);
705705
}
706706
}

src/cargo/core/compiler/timings.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -299,7 +299,7 @@ impl<'gctx> Timings<'gctx> {
299299
.sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap());
300300
if self.report_html {
301301
self.report_html(build_runner, error)
302-
.with_context(|| "failed to save timing report")?;
302+
.context("failed to save timing report")?;
303303
}
304304
Ok(())
305305
}

src/cargo/core/global_cache_tracker.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,7 @@ impl GlobalCacheTracker {
543543
/// Deletes files from the global cache based on the given options.
544544
pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> {
545545
self.clean_inner(clean_ctx, gc_opts)
546-
.with_context(|| "failed to clean entries from the global cache")
546+
.context("failed to clean entries from the global cache")
547547
}
548548

549549
#[tracing::instrument(skip_all)]
@@ -575,7 +575,7 @@ impl GlobalCacheTracker {
575575
gc_opts.is_download_cache_size_set(),
576576
&mut delete_paths,
577577
)
578-
.with_context(|| "failed to sync tracking database")?
578+
.context("failed to sync tracking database")?
579579
}
580580
if let Some(max_age) = gc_opts.max_index_age {
581581
let max_age = now - max_age.as_secs();

src/cargo/core/package.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ impl<'gctx> PackageSet<'gctx> {
393393
let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true);
394394
multi
395395
.pipelining(false, multiplexing)
396-
.with_context(|| "failed to enable multiplexing/pipelining in curl")?;
396+
.context("failed to enable multiplexing/pipelining in curl")?;
397397

398398
// let's not flood crates.io with connections
399399
multi.set_max_host_connections(2)?;
@@ -681,7 +681,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
681681
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
682682
let pkg = source
683683
.download(id)
684-
.with_context(|| "unable to get packages from source")?;
684+
.context("unable to get packages from source")?;
685685
let (url, descriptor, authorization) = match pkg {
686686
MaybePackage::Ready(pkg) => {
687687
debug!("{} doesn't need a download", id);
@@ -951,7 +951,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
951951
self.set
952952
.multi
953953
.perform()
954-
.with_context(|| "failed to perform http requests")
954+
.context("failed to perform http requests")
955955
})?;
956956
debug!(target: "network", "handles remaining: {}", n);
957957
let results = &mut self.results;
@@ -981,7 +981,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
981981
self.set
982982
.multi
983983
.wait(&mut [], timeout)
984-
.with_context(|| "failed to wait on curl `Multi`")?;
984+
.context("failed to wait on curl `Multi`")?;
985985
}
986986
}
987987
}

src/cargo/core/registry.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -444,8 +444,8 @@ impl<'gctx> PackageRegistry<'gctx> {
444444
patches must point to different sources",
445445
dep.package_name(),
446446
url
447-
))
448-
.context(format!("failed to resolve patches for `{}`", url));
447+
)
448+
.context(format!("failed to resolve patches for `{}`", url)));
449449
}
450450
unlocked_summaries.push(summary);
451451
}

src/cargo/ops/cargo_add/mod.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1085,10 +1085,9 @@ fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult<Depen
10851085
.context("could not find `dependencies` table in `workspace`")?
10861086
.as_table_like()
10871087
.context("could not make `dependencies` into a table")?;
1088-
let dep_item = dependencies.get(toml_key).context(format!(
1089-
"could not find {} in `workspace.dependencies`",
1090-
toml_key
1091-
))?;
1088+
let dep_item = dependencies
1089+
.get(toml_key)
1090+
.with_context(|| format!("could not find {toml_key} in `workspace.dependencies`"))?;
10921091
Dependency::from_toml(root_manifest.parent().unwrap(), toml_key, dep_item)
10931092
}
10941093

0 commit comments

Comments
 (0)