Skip to content

Commit 27932ea

Browse files
committed
publish-lockfile: Always check Cargo.lock is up-to-date.
This changes it so that `cargo package` will make sure the Cargo.lock file is in sync with the Cargo.toml that is generated during packaging. This has several points: - This makes the Cargo.lock more accurately reflect what would be locked if a user runs `cargo install` on the resulting package. - In a workspace, this removes irrelevant packages from the lock file. - This handles `[patch]` dependencies and dual-source dependencies (like path/version). - Warnings are generated for any differences in the lock file compared to the original. This has a significant change in how `cargo package` works. It now unconditionally copies the package to `target/package`. Previously this was only done during the verification step. This is necessary to run the resolver against the new `Cargo.toml` that gets generated.
1 parent ebb5764 commit 27932ea

File tree

2 files changed

+347
-99
lines changed

2 files changed

+347
-99
lines changed

src/cargo/ops/cargo_package.rs

Lines changed: 145 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
1-
use std::collections::HashMap;
1+
use std::collections::{BTreeSet, HashMap};
22
use std::fs::{self, File};
33
use std::io::prelude::*;
44
use std::io::SeekFrom;
55
use std::path::{self, Path, PathBuf};
66
use std::sync::Arc;
77

8-
use flate2::read::GzDecoder;
98
use flate2::{Compression, GzBuilder};
109
use log::debug;
1110
use serde_json::{self, json};
12-
use tar::{Archive, Builder, EntryType, Header};
11+
use tar::{Builder, EntryType, Header};
1312

1413
use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
15-
use crate::core::{Package, Source, SourceId, Workspace};
14+
use crate::core::resolver::Method;
15+
use crate::core::{Package, PackageId, PackageIdSpec, Resolve, Source, SourceId, Workspace};
1616
use crate::ops;
1717
use crate::sources::PathSource;
1818
use crate::util::errors::{CargoResult, CargoResultExt};
@@ -35,7 +35,6 @@ pub struct PackageOpts<'cfg> {
3535
static VCS_INFO_FILE: &'static str = ".cargo_vcs_info.json";
3636

3737
pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option<FileLock>> {
38-
ops::resolve_ws(ws)?;
3938
let pkg = ws.current()?;
4039
let config = ws.config();
4140

@@ -100,7 +99,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option
10099
.shell()
101100
.status("Packaging", pkg.package_id().to_string())?;
102101
dst.file().set_len(0)?;
103-
tar(ws, &src_files, vcs_info.as_ref(), dst.file(), &filename)
102+
tar(ws, &src_files, vcs_info.as_ref(), &dst, &filename)
104103
.chain_err(|| failure::format_err!("failed to prepare local package for uploading"))?;
105104
if opts.verify {
106105
dst.seek(SeekFrom::Start(0))?;
@@ -281,35 +280,43 @@ fn tar(
281280
ws: &Workspace<'_>,
282281
src_files: &[PathBuf],
283282
vcs_info: Option<&serde_json::Value>,
284-
dst: &File,
283+
dst: &FileLock,
285284
filename: &str,
286285
) -> CargoResult<()> {
287286
// Prepare the encoder and its header.
288287
let filename = Path::new(filename);
289288
let encoder = GzBuilder::new()
290289
.filename(util::path2bytes(filename)?)
291-
.write(dst, Compression::best());
290+
.write(dst.file(), Compression::best());
292291

293292
// Put all package files into a compressed archive.
294293
let mut ar = Builder::new(encoder);
295294
let pkg = ws.current()?;
296295
let config = ws.config();
297296
let root = pkg.root();
298-
for file in src_files.iter() {
299-
let relative = file.strip_prefix(root)?;
297+
// While creating the tar file, also copy to the output directory.
298+
let dest_copy_root = dst
299+
.parent()
300+
.join(format!("{}-{}", pkg.name(), pkg.version()));
301+
if dest_copy_root.exists() {
302+
paths::remove_dir_all(&dest_copy_root)?;
303+
}
304+
fs::create_dir_all(&dest_copy_root)?;
305+
for src_file in src_files {
306+
let relative = src_file.strip_prefix(root)?;
300307
check_filename(relative)?;
301-
let relative = relative.to_str().ok_or_else(|| {
308+
let relative_str = relative.to_str().ok_or_else(|| {
302309
failure::format_err!("non-utf8 path in source directory: {}", relative.display())
303310
})?;
304311
config
305312
.shell()
306-
.verbose(|shell| shell.status("Archiving", &relative))?;
313+
.verbose(|shell| shell.status("Archiving", &relative_str))?;
307314
let path = format!(
308315
"{}-{}{}{}",
309316
pkg.name(),
310317
pkg.version(),
311318
path::MAIN_SEPARATOR,
312-
relative
319+
relative_str
313320
);
314321

315322
// The `tar::Builder` type by default will build GNU archives, but
@@ -333,20 +340,21 @@ fn tar(
333340
let mut header = Header::new_ustar();
334341
header
335342
.set_path(&path)
336-
.chain_err(|| format!("failed to add to archive: `{}`", relative))?;
337-
let mut file = File::open(file)
338-
.chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?;
343+
.chain_err(|| format!("failed to add to archive: `{}`", relative_str))?;
344+
let mut file = File::open(src_file)
345+
.chain_err(|| format!("failed to open for archiving: `{}`", src_file.display()))?;
339346
let metadata = file
340347
.metadata()
341-
.chain_err(|| format!("could not learn metadata for: `{}`", relative))?;
348+
.chain_err(|| format!("could not learn metadata for: `{}`", relative_str))?;
342349
header.set_metadata(&metadata);
343350

344-
if relative == "Cargo.toml" {
351+
if relative_str == "Cargo.toml" {
345352
let orig = Path::new(&path).with_file_name("Cargo.toml.orig");
346353
header.set_path(&orig)?;
347354
header.set_cksum();
348-
ar.append(&header, &mut file)
349-
.chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
355+
ar.append(&header, &mut file).chain_err(|| {
356+
internal(format!("could not archive source file `{}`", relative_str))
357+
})?;
350358

351359
let mut header = Header::new_ustar();
352360
let toml = pkg.to_registry_toml(ws.config())?;
@@ -355,12 +363,22 @@ fn tar(
355363
header.set_mode(0o644);
356364
header.set_size(toml.len() as u64);
357365
header.set_cksum();
358-
ar.append(&header, toml.as_bytes())
359-
.chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
366+
ar.append(&header, toml.as_bytes()).chain_err(|| {
367+
internal(format!("could not archive source file `{}`", relative_str))
368+
})?;
369+
fs::write(dest_copy_root.join(relative), toml)?;
370+
fs::copy(src_file, dest_copy_root.join("Cargo.toml.orig"))?;
360371
} else {
361372
header.set_cksum();
362-
ar.append(&header, &mut file)
363-
.chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
373+
ar.append(&header, &mut file).chain_err(|| {
374+
internal(format!("could not archive source file `{}`", relative_str))
375+
})?;
376+
let dest = dest_copy_root.join(relative);
377+
let parent = dest.parent().unwrap();
378+
if !parent.exists() {
379+
fs::create_dir_all(parent)?;
380+
}
381+
fs::copy(src_file, dest)?;
364382
}
365383
}
366384

@@ -394,7 +412,27 @@ fn tar(
394412
}
395413

396414
if include_lockfile(pkg) {
397-
let toml = paths::read(&ws.root().join("Cargo.lock"))?;
415+
let orig_lock_path = ws.root().join("Cargo.lock");
416+
let new_lock_path = dest_copy_root.join("Cargo.lock");
417+
if orig_lock_path.exists() {
418+
fs::copy(&orig_lock_path, &new_lock_path)?;
419+
}
420+
421+
// Regenerate Cargo.lock using the old one as a guide.
422+
let orig_resolve = ops::load_pkg_lockfile(ws)?;
423+
let id = SourceId::for_path(&dest_copy_root)?;
424+
let mut src = PathSource::new(&dest_copy_root, id, ws.config());
425+
let new_pkg = src.root_package()?;
426+
let specs = vec![PackageIdSpec::from_package_id(new_pkg.package_id())];
427+
let tmp_ws = Workspace::ephemeral(new_pkg, config, None, true)?;
428+
let new_resolve = ops::resolve_ws_with_method(&tmp_ws, None, Method::Everything, &specs)?.1;
429+
// resolve_ws_with_method won't save for ephemeral, do it manually.
430+
ops::write_pkg_lockfile(&tmp_ws, &new_resolve)?;
431+
if let Some(orig_resolve) = orig_resolve {
432+
compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
433+
}
434+
435+
let toml = paths::read(&new_lock_path)?;
398436
let path = format!(
399437
"{}-{}{}Cargo.lock",
400438
pkg.name(),
@@ -416,24 +454,97 @@ fn tar(
416454
Ok(())
417455
}
418456

457+
/// Generate warnings when packaging Cargo.lock, and the resolve have changed.
458+
fn compare_resolve(
459+
config: &Config,
460+
current_pkg: &Package,
461+
orig_resolve: &Resolve,
462+
new_resolve: &Resolve,
463+
) -> CargoResult<()> {
464+
let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
465+
let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
466+
let added = new_set.difference(&orig_set);
467+
// Removed entries are ignored, this is used to quickly find hints for why
468+
// an entry changed.
469+
let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
470+
for pkg_id in added {
471+
if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
472+
// Skip the package that is being created, since its SourceId
473+
// (directory) changes.
474+
continue;
475+
}
476+
// Check for candidates where the source has changed (such as [patch]
477+
// or a dependency with multiple sources like path/version).
478+
let removed_candidates: Vec<&PackageId> = removed
479+
.iter()
480+
.filter(|orig_pkg_id| {
481+
orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
482+
})
483+
.cloned()
484+
.collect();
485+
let extra = match removed_candidates.len() {
486+
0 => {
487+
// This can happen if the original was out of date.
488+
let previous_versions: Vec<&PackageId> = removed
489+
.iter()
490+
.filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
491+
.cloned()
492+
.collect();
493+
match previous_versions.len() {
494+
0 => String::new(),
495+
1 => format!(
496+
", previous version was `{}`",
497+
previous_versions[0].version()
498+
),
499+
_ => format!(
500+
", previous versions were: {}",
501+
previous_versions
502+
.iter()
503+
.map(|pkg_id| format!("`{}`", pkg_id.version()))
504+
.collect::<Vec<_>>()
505+
.join(", ")
506+
),
507+
}
508+
}
509+
1 => {
510+
// This can happen for multi-sourced dependencies like
511+
// `{path="...", version="..."}` or `[patch]` replacement.
512+
// `[replace]` is not captured in Cargo.lock.
513+
format!(
514+
", was originally sourced from `{}`",
515+
removed_candidates[0].source_id()
516+
)
517+
}
518+
_ => {
519+
// I don't know if there is a way to actually trigger this,
520+
// but handle it just in case.
521+
let comma_list = removed_candidates
522+
.iter()
523+
.map(|pkg_id| format!("`{}`", pkg_id.source_id()))
524+
.collect::<Vec<_>>()
525+
.join(", ");
526+
format!(
527+
", was originally sourced from one of these sources: {}",
528+
comma_list
529+
)
530+
}
531+
};
532+
config
533+
.shell()
534+
.warn(format!("package `{}` added to Cargo.lock{}", pkg_id, extra))?;
535+
}
536+
Ok(())
537+
}
538+
419539
fn run_verify(ws: &Workspace<'_>, tar: &FileLock, opts: &PackageOpts<'_>) -> CargoResult<()> {
420540
let config = ws.config();
421541
let pkg = ws.current()?;
422542

423543
config.shell().status("Verifying", pkg)?;
424544

425-
let f = GzDecoder::new(tar.file());
426545
let dst = tar
427546
.parent()
428547
.join(&format!("{}-{}", pkg.name(), pkg.version()));
429-
if dst.exists() {
430-
paths::remove_dir_all(&dst)?;
431-
}
432-
let mut archive = Archive::new(f);
433-
// We don't need to set the Modified Time, as it's not relevant to verification
434-
// and it errors on filesystems that don't support setting a modified timestamp
435-
archive.set_preserve_mtime(false);
436-
archive.unpack(dst.parent().unwrap())?;
437548

438549
// Manufacture an ephemeral workspace to ensure that even if the top-level
439550
// package has a workspace we can still build our new crate.

0 commit comments

Comments
 (0)