1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::File;
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use annotate_snippets::Level;
36use anyhow::{Context as _, bail};
37use cargo_util::paths;
38use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
39use cargo_util_schemas::messages;
40use flate2::{Compression, GzBuilder};
41use tar::{Builder, EntryType, Header, HeaderMode};
42use tracing::debug;
43use unicase::Ascii as UncasedAscii;
44
45mod vcs;
46mod verify;
47
48#[derive(Debug, Clone)]
52pub enum PackageMessageFormat {
53 Human,
54 Json,
55}
56
57impl PackageMessageFormat {
58 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
59
60 pub const DEFAULT: &str = "human";
61}
62
63impl std::str::FromStr for PackageMessageFormat {
64 type Err = anyhow::Error;
65
66 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
67 match s {
68 "human" => Ok(PackageMessageFormat::Human),
69 "json" => Ok(PackageMessageFormat::Json),
70 f => bail!("unknown message format `{f}`"),
71 }
72 }
73}
74
75#[derive(Clone)]
76pub struct PackageOpts<'gctx> {
77 pub gctx: &'gctx GlobalContext,
78 pub list: bool,
79 pub fmt: PackageMessageFormat,
80 pub check_metadata: bool,
81 pub allow_dirty: bool,
82 pub include_lockfile: bool,
83 pub verify: bool,
84 pub jobs: Option<JobsConfig>,
85 pub keep_going: bool,
86 pub to_package: ops::Packages,
87 pub targets: Vec<String>,
88 pub cli_features: CliFeatures,
89 pub reg_or_index: Option<ops::RegistryOrIndex>,
90 pub dry_run: bool,
103}
104
105const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
106const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
107
108struct ArchiveFile {
109 rel_path: PathBuf,
112 rel_str: String,
114 contents: FileContents,
116}
117
118enum FileContents {
119 OnDisk(PathBuf),
121 Generated(GeneratedFile),
123}
124
125enum GeneratedFile {
126 Manifest(PathBuf),
130 Lockfile(Option<PathBuf>),
134 VcsInfo(vcs::VcsInfo),
136}
137
138#[tracing::instrument(skip_all)]
140fn create_package(
141 ws: &Workspace<'_>,
142 opts: &PackageOpts<'_>,
143 pkg: &Package,
144 ar_files: Vec<ArchiveFile>,
145 local_reg: Option<&TmpRegistry<'_>>,
146) -> CargoResult<FileLock> {
147 let gctx = ws.gctx();
148 let filecount = ar_files.len();
149
150 for dep in pkg.dependencies() {
152 super::check_dep_has_version(dep, false).map_err(|err| {
153 ManifestError::new(
154 err.context(format!(
155 "failed to verify manifest at `{}`",
156 pkg.manifest_path().display()
157 )),
158 pkg.manifest_path().into(),
159 )
160 })?;
161 }
162
163 let filename = pkg.package_id().tarball_name();
164 let build_dir = ws.build_dir();
165 paths::create_dir_all_excluded_from_backups_atomic(build_dir.as_path_unlocked())?;
166 let dir = build_dir.join("package").join("tmp-crate");
167 let dst = dir.open_rw_exclusive_create(&filename, gctx, "package scratch space")?;
168
169 gctx.shell()
174 .status("Packaging", pkg.package_id().to_string())?;
175 dst.file().set_len(0)?;
176 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
177 .context("failed to prepare local package for uploading")?;
178
179 let dst_metadata = dst
180 .file()
181 .metadata()
182 .with_context(|| format!("could not learn metadata for: `{}`", dst.path().display()))?;
183 let compressed_size = dst_metadata.len();
184
185 let uncompressed = HumanBytes(uncompressed_size);
186 let compressed = HumanBytes(compressed_size);
187
188 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
189 drop(gctx.shell().status("Packaged", message));
191
192 return Ok(dst);
193}
194
195pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
200 let specs = &opts.to_package.to_package_id_specs(ws)?;
201 if let ops::Packages::Packages(_) = opts.to_package {
203 for spec in specs.iter() {
204 let member_ids = ws.members().map(|p| p.package_id());
205 spec.query(member_ids)?;
206 }
207 }
208 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
209
210 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
213
214 let packaged = do_package(ws, opts, pkgs)?;
215
216 let mut result = Vec::new();
218 let target_dir = ws.target_dir();
219 paths::create_dir_all_excluded_from_backups_atomic(target_dir.as_path_unlocked())?;
220 let artifact_dir = target_dir.join("package");
221 for (pkg, _, src) in packaged {
222 let filename = pkg.package_id().tarball_name();
223 let dst = artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
224 src.file().seek(SeekFrom::Start(0))?;
225 std::io::copy(&mut src.file(), &mut dst.file())?;
226 result.push(dst);
227 }
228
229 Ok(result)
230}
231
232pub(crate) fn package_with_dep_graph(
238 ws: &Workspace<'_>,
239 opts: &PackageOpts<'_>,
240 pkgs: Vec<(&Package, CliFeatures)>,
241) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
242 let output = do_package(ws, opts, pkgs)?;
243
244 Ok(local_deps(output.into_iter().map(
245 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
246 )))
247}
248
249fn do_package<'a>(
250 ws: &Workspace<'_>,
251 opts: &PackageOpts<'a>,
252 pkgs: Vec<(&Package, CliFeatures)>,
253) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
254 if ws
255 .lock_root()
256 .as_path_unlocked()
257 .join(LOCKFILE_NAME)
258 .exists()
259 && opts.include_lockfile
260 {
261 let dry_run = false;
263 let _ = ops::resolve_ws(ws, dry_run)?;
264 }
267
268 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
269 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
270
271 let needs_local_reg = deps.has_dependencies() && (opts.include_lockfile || opts.verify);
278 let verify_registry_allow_list = opts.reg_or_index.is_some();
279 let mut local_reg = if !opts.list && (needs_local_reg || verify_registry_allow_list) {
280 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
281 debug!("packaging for registry {}", sid);
282 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
283 let local_reg = TmpRegistry::new(ws.gctx(), reg_dir, sid)?;
284 Some(local_reg)
285 } else {
286 None
287 };
288
289 let sorted_pkgs = deps.sort();
292 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
293 for (pkg, cli_features) in sorted_pkgs {
294 let opts = PackageOpts {
295 cli_features: cli_features.clone(),
296 to_package: ops::Packages::Default,
297 ..opts.clone()
298 };
299 let ar_files = prepare_archive(ws, &pkg, &opts)?;
300
301 if opts.list {
302 match opts.fmt {
303 PackageMessageFormat::Human => {
304 for ar_file in &ar_files {
307 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
308 }
309 }
310 PackageMessageFormat::Json => {
311 let message = messages::PackageList {
312 id: pkg.package_id().to_spec(),
313 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
314 let file = match f.contents {
315 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
316 FileContents::Generated(
317 GeneratedFile::Manifest(path)
318 | GeneratedFile::Lockfile(Some(path)),
319 ) => messages::PackageFile::Generate { path: Some(path) },
320 FileContents::Generated(
321 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
322 ) => messages::PackageFile::Generate { path: None },
323 };
324 (f.rel_path, file)
325 })),
326 };
327 let _ = ws.gctx().shell().print_json(&message);
328 }
329 }
330 } else {
331 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
332 if let Some(local_reg) = local_reg.as_mut() {
333 if pkg.publish() != &Some(Vec::new()) {
334 local_reg.add_package(ws, &pkg, &tarball)?;
335 }
336 }
337 outputs.push((pkg, opts, tarball));
338 }
339 }
340
341 if opts.verify {
344 for (pkg, opts, tarball) in &outputs {
345 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
346 .context("failed to verify package tarball")?
347 }
348 }
349
350 Ok(outputs)
351}
352
353fn get_registry(
360 gctx: &GlobalContext,
361 pkgs: &[&Package],
362 reg_or_index: Option<RegistryOrIndex>,
363) -> CargoResult<SourceId> {
364 let reg_or_index = match reg_or_index.clone() {
365 Some(r) => Some(r),
366 None => infer_registry(pkgs)?,
367 };
368
369 let reg = reg_or_index
371 .clone()
372 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
373 if let RegistryOrIndex::Registry(reg_name) = reg {
374 for pkg in pkgs {
375 if let Some(allowed) = pkg.publish().as_ref() {
376 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
380 bail!(
381 "`{}` cannot be packaged.\n\
382 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
383 pkg.name(),
384 reg_name
385 );
386 }
387 }
388 }
389 }
390 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
391}
392
393#[derive(Clone, Debug, Default)]
395pub(crate) struct LocalDependencies<T> {
396 pub packages: HashMap<PackageId, (Package, T)>,
397 pub graph: Graph<PackageId, ()>,
398}
399
400impl<T: Clone> LocalDependencies<T> {
401 pub fn sort(&self) -> Vec<(Package, T)> {
402 self.graph
403 .sort()
404 .into_iter()
405 .map(|name| self.packages[&name].clone())
406 .collect()
407 }
408
409 pub fn has_dependencies(&self) -> bool {
410 self.graph
411 .iter()
412 .any(|node| self.graph.edges(node).next().is_some())
413 }
414}
415
416fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
421 let packages: HashMap<PackageId, (Package, T)> = packages
422 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
423 .collect();
424
425 let source_to_pkg: HashMap<_, _> = packages
430 .keys()
431 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
432 .collect();
433
434 let mut graph = Graph::new();
435 for (pkg, _payload) in packages.values() {
436 graph.add(pkg.package_id());
437 for dep in pkg.dependencies() {
438 if !dep.source_id().is_path() {
440 continue;
441 }
442
443 if dep.kind() == DepKind::Development && !dep.specified_req() {
446 continue;
447 };
448
449 if dep.source_id() == pkg.package_id().source_id() {
451 continue;
452 }
453
454 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
455 graph.link(pkg.package_id(), *dep_pkg);
456 }
457 }
458 }
459
460 LocalDependencies { packages, graph }
461}
462
463#[tracing::instrument(skip_all)]
465fn prepare_archive(
466 ws: &Workspace<'_>,
467 pkg: &Package,
468 opts: &PackageOpts<'_>,
469) -> CargoResult<Vec<ArchiveFile>> {
470 let gctx = ws.gctx();
471 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
472 src.load()?;
473
474 if opts.check_metadata {
475 check_metadata(pkg, opts.reg_or_index.as_ref(), gctx)?;
476 }
477
478 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
479 gctx.shell().warn(
480 "both package.include and package.exclude are specified; \
481 the exclude list will be ignored",
482 )?;
483 }
484 let src_files = src.list_files(pkg)?;
485
486 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
488 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
489}
490
491#[tracing::instrument(skip_all)]
493fn build_ar_list(
494 ws: &Workspace<'_>,
495 pkg: &Package,
496 src_files: Vec<PathEntry>,
497 vcs_info: Option<vcs::VcsInfo>,
498 include_lockfile: bool,
499) -> CargoResult<Vec<ArchiveFile>> {
500 let mut result = HashMap::new();
501 let root = pkg.root();
502 for src_file in &src_files {
503 let rel_path = src_file.strip_prefix(&root)?;
504 check_filename(rel_path, &mut ws.gctx().shell())?;
505 let rel_str = rel_path.to_str().ok_or_else(|| {
506 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
507 })?;
508 match rel_str {
509 "Cargo.lock" => continue,
510 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
511 "invalid inclusion of reserved file name {} in package source",
512 rel_str
513 ),
514 _ => {
515 result
516 .entry(UncasedAscii::new(rel_str))
517 .or_insert_with(Vec::new)
518 .push(ArchiveFile {
519 rel_path: rel_path.to_owned(),
520 rel_str: rel_str.to_owned(),
521 contents: FileContents::OnDisk(src_file.to_path_buf()),
522 });
523 }
524 }
525 }
526
527 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
530 result
531 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
532 .or_insert_with(Vec::new)
533 .push(ArchiveFile {
534 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
535 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
536 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
537 });
538 result
539 .entry(UncasedAscii::new("Cargo.toml"))
540 .or_insert_with(Vec::new)
541 .push(ArchiveFile {
542 rel_path: PathBuf::from("Cargo.toml"),
543 rel_str: "Cargo.toml".to_string(),
544 contents: FileContents::Generated(GeneratedFile::Manifest(
545 pkg.manifest_path().to_owned(),
546 )),
547 });
548 } else {
549 ws.gctx().shell().warn(&format!(
550 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
551 pkg.name()
552 ))?;
553 }
554
555 if include_lockfile {
556 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
557 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
558 let rel_str = "Cargo.lock";
559 result
560 .entry(UncasedAscii::new(rel_str))
561 .or_insert_with(Vec::new)
562 .push(ArchiveFile {
563 rel_path: PathBuf::from(rel_str),
564 rel_str: rel_str.to_string(),
565 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
566 });
567 }
568
569 if let Some(vcs_info) = vcs_info {
570 let rel_str = VCS_INFO_FILE;
571 result
572 .entry(UncasedAscii::new(rel_str))
573 .or_insert_with(Vec::new)
574 .push(ArchiveFile {
575 rel_path: PathBuf::from(rel_str),
576 rel_str: rel_str.to_string(),
577 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
578 });
579 }
580
581 let mut invalid_manifest_field: Vec<String> = vec![];
582
583 let mut result = result.into_values().flatten().collect();
584 if let Some(license_file) = &pkg.manifest().metadata().license_file {
585 let license_path = Path::new(license_file);
586 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
587 if abs_file_path.is_file() {
588 check_for_file_and_add(
589 "license-file",
590 license_path,
591 abs_file_path,
592 pkg,
593 &mut result,
594 ws,
595 )?;
596 } else {
597 error_on_nonexistent_file(
598 &pkg,
599 &license_path,
600 "license-file",
601 &mut invalid_manifest_field,
602 );
603 }
604 }
605 if let Some(readme) = &pkg.manifest().metadata().readme {
606 let readme_path = Path::new(readme);
607 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
608 if abs_file_path.is_file() {
609 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
610 } else {
611 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
612 }
613 }
614
615 if !invalid_manifest_field.is_empty() {
616 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
617 }
618
619 for t in pkg
620 .manifest()
621 .targets()
622 .iter()
623 .filter(|t| t.is_custom_build())
624 {
625 if let Some(custom_build_path) = t.src_path().path() {
626 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
627 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
628 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
629 }
630 }
631 }
632
633 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
634
635 Ok(result)
636}
637
638fn check_for_file_and_add(
639 label: &str,
640 file_path: &Path,
641 abs_file_path: PathBuf,
642 pkg: &Package,
643 result: &mut Vec<ArchiveFile>,
644 ws: &Workspace<'_>,
645) -> CargoResult<()> {
646 match abs_file_path.strip_prefix(&pkg.root()) {
647 Ok(rel_file_path) => {
648 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
649 result.push(ArchiveFile {
650 rel_path: rel_file_path.to_path_buf(),
651 rel_str: rel_file_path
652 .to_str()
653 .expect("everything was utf8")
654 .to_string(),
655 contents: FileContents::OnDisk(abs_file_path),
656 })
657 }
658 }
659 Err(_) => {
660 let file_name = file_path.file_name().unwrap();
662 if result.iter().any(|ar| ar.rel_path == file_name) {
663 ws.gctx().shell().warn(&format!(
664 "{} `{}` appears to be a path outside of the package, \
665 but there is already a file named `{}` in the root of the package. \
666 The archived crate will contain the copy in the root of the package. \
667 Update the {} to point to the path relative \
668 to the root of the package to remove this warning.",
669 label,
670 file_path.display(),
671 file_name.to_str().unwrap(),
672 label,
673 ))?;
674 } else {
675 result.push(ArchiveFile {
676 rel_path: PathBuf::from(file_name),
677 rel_str: file_name.to_str().unwrap().to_string(),
678 contents: FileContents::OnDisk(abs_file_path),
679 })
680 }
681 }
682 }
683 Ok(())
684}
685
686fn error_on_nonexistent_file(
687 pkg: &Package,
688 path: &Path,
689 manifest_key_name: &'static str,
690 invalid: &mut Vec<String>,
691) {
692 let rel_msg = if path.is_absolute() {
693 "".to_string()
694 } else {
695 format!(" (relative to `{}`)", pkg.root().display())
696 };
697
698 let msg = format!(
699 "{manifest_key_name} `{}` does not appear to exist{}.\n\
700 Please update the {manifest_key_name} setting in the manifest at `{}`.",
701 path.display(),
702 rel_msg,
703 pkg.manifest_path().display()
704 );
705
706 invalid.push(msg);
707}
708
709fn error_custom_build_file_not_in_package(
710 pkg: &Package,
711 path: &Path,
712 target: &Target,
713) -> CargoResult<Vec<ArchiveFile>> {
714 let tip = {
715 let description_name = target.description_named();
716 if path.is_file() {
717 format!(
718 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
719 It is at `{}`, whereas the root the package is `{}`.\n",
720 path.display(),
721 pkg.root().display()
722 )
723 } else {
724 format!("the source file of {description_name} doesn't appear to exist.\n",)
725 }
726 };
727 let msg = format!(
728 "{}\
729 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
730 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
731 tip,
732 pkg.manifest_path().display()
733 );
734 anyhow::bail!(msg)
735}
736
737fn build_lock(
739 ws: &Workspace<'_>,
740 opts: &PackageOpts<'_>,
741 publish_pkg: &Package,
742 local_reg: Option<&TmpRegistry<'_>>,
743) -> CargoResult<String> {
744 let gctx = ws.gctx();
745 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
746
747 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
748
749 if let Some(local_reg) = local_reg {
753 tmp_ws.add_local_overlay(
754 local_reg.upstream,
755 local_reg.root.as_path_unlocked().to_owned(),
756 );
757 if opts.dry_run {
758 if let Some(orig_resolve) = orig_resolve.as_mut() {
759 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
760 SourceId::crates_io(gctx)?
761 } else {
762 local_reg.upstream
763 };
764 for (p, s) in local_reg.checksums() {
765 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
766 }
767 }
768 }
769 }
770 let mut tmp_reg = tmp_ws.package_registry()?;
771
772 let mut new_resolve = ops::resolve_with_previous(
773 &mut tmp_reg,
774 &tmp_ws,
775 &CliFeatures::new_all(true),
776 HasDevUnits::Yes,
777 orig_resolve.as_ref(),
778 None,
779 &[],
780 true,
781 )?;
782
783 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
784
785 if let Some(orig_resolve) = orig_resolve {
786 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
787 }
788 check_yanked(
789 gctx,
790 &pkg_set,
791 &new_resolve,
792 "consider updating to a version that is not yanked",
793 )?;
794
795 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
796}
797
798fn check_metadata(
801 pkg: &Package,
802 reg_or_index: Option<&RegistryOrIndex>,
803 gctx: &GlobalContext,
804) -> CargoResult<()> {
805 let md = pkg.manifest().metadata();
806
807 let mut missing = vec![];
808
809 macro_rules! lacking {
810 ($( $($field: ident)||* ),*) => {{
811 $(
812 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
813 $(missing.push(stringify!($field).replace("_", "-"));)*
814 }
815 )*
816 }}
817 }
818 lacking!(
819 description,
820 license || license_file,
821 documentation || homepage || repository
822 );
823
824 if !missing.is_empty() {
825 let should_warn = match reg_or_index {
827 Some(RegistryOrIndex::Registry(reg_name)) => reg_name == CRATES_IO_REGISTRY,
828 None => true, Some(RegistryOrIndex::Index(_)) => false, };
831
832 if should_warn {
833 let mut things = missing[..missing.len() - 1].join(", ");
834 if !things.is_empty() {
837 things.push_str(" or ");
838 }
839 things.push_str(missing.last().unwrap());
840
841 gctx.shell().print_report(&[
842 Level::WARNING.secondary_title(format!("manifest has no {things}"))
843 .element(Level::NOTE.message("see https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info"))
844 ],
845 false
846 )?
847 }
848 }
849
850 Ok(())
851}
852
853fn tar(
857 ws: &Workspace<'_>,
858 opts: &PackageOpts<'_>,
859 pkg: &Package,
860 local_reg: Option<&TmpRegistry<'_>>,
861 ar_files: Vec<ArchiveFile>,
862 dst: &File,
863 filename: &str,
864) -> CargoResult<u64> {
865 let filename = Path::new(filename);
867 let encoder = GzBuilder::new()
868 .filename(paths::path2bytes(filename)?)
869 .write(dst, Compression::best());
870
871 let mut ar = Builder::new(encoder);
873 ar.sparse(false);
874 let gctx = ws.gctx();
875
876 let base_name = format!("{}-{}", pkg.name(), pkg.version());
877 let base_path = Path::new(&base_name);
878 let included = ar_files
879 .iter()
880 .map(|ar_file| ar_file.rel_path.clone())
881 .collect::<Vec<_>>();
882 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
883
884 let mut uncompressed_size = 0;
885 for ar_file in ar_files {
886 let ArchiveFile {
887 rel_path,
888 rel_str,
889 contents,
890 } = ar_file;
891 let ar_path = base_path.join(&rel_path);
892 gctx.shell()
893 .verbose(|shell| shell.status("Archiving", &rel_str))?;
894 let mut header = Header::new_gnu();
895 match contents {
896 FileContents::OnDisk(disk_path) => {
897 let mut file = File::open(&disk_path).with_context(|| {
898 format!("failed to open for archiving: `{}`", disk_path.display())
899 })?;
900 let metadata = file.metadata().with_context(|| {
901 format!("could not learn metadata for: `{}`", disk_path.display())
902 })?;
903 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
904 header.set_cksum();
905 ar.append_data(&mut header, &ar_path, &mut file)
906 .with_context(|| {
907 format!("could not archive source file `{}`", disk_path.display())
908 })?;
909 uncompressed_size += metadata.len() as u64;
910 }
911 FileContents::Generated(generated_kind) => {
912 let contents = match generated_kind {
913 GeneratedFile::Manifest(_) => {
914 publish_pkg.manifest().to_normalized_contents()?
915 }
916 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
917 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
918 };
919 header.set_entry_type(EntryType::file());
920 header.set_mode(0o644);
921 header.set_size(contents.len() as u64);
922 header.set_mtime(1153704088);
928 header.set_cksum();
929 ar.append_data(&mut header, &ar_path, contents.as_bytes())
930 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
931 uncompressed_size += contents.len() as u64;
932 }
933 }
934 }
935
936 let encoder = ar.into_inner()?;
937 encoder.finish()?;
938 Ok(uncompressed_size)
939}
940
941fn compare_resolve(
943 gctx: &GlobalContext,
944 current_pkg: &Package,
945 orig_resolve: &Resolve,
946 new_resolve: &Resolve,
947) -> CargoResult<()> {
948 if gctx.shell().verbosity() != Verbosity::Verbose {
949 return Ok(());
950 }
951 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
952 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
953 let added = new_set.difference(&orig_set);
954 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
957 for pkg_id in added {
958 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
959 continue;
962 }
963 let removed_candidates: Vec<&PackageId> = removed
966 .iter()
967 .filter(|orig_pkg_id| {
968 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
969 })
970 .cloned()
971 .collect();
972 let extra = match removed_candidates.len() {
973 0 => {
974 let previous_versions: Vec<&PackageId> = removed
976 .iter()
977 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
978 .cloned()
979 .collect();
980 match previous_versions.len() {
981 0 => String::new(),
982 1 => format!(
983 ", previous version was `{}`",
984 previous_versions[0].version()
985 ),
986 _ => format!(
987 ", previous versions were: {}",
988 previous_versions
989 .iter()
990 .map(|pkg_id| format!("`{}`", pkg_id.version()))
991 .collect::<Vec<_>>()
992 .join(", ")
993 ),
994 }
995 }
996 1 => {
997 format!(
1001 ", was originally sourced from `{}`",
1002 removed_candidates[0].source_id()
1003 )
1004 }
1005 _ => {
1006 let comma_list = removed_candidates
1009 .iter()
1010 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1011 .collect::<Vec<_>>()
1012 .join(", ");
1013 format!(
1014 ", was originally sourced from one of these sources: {}",
1015 comma_list
1016 )
1017 }
1018 };
1019 let msg = format!(
1020 "package `{}` added to the packaged Cargo.lock file{}",
1021 pkg_id, extra
1022 );
1023 gctx.shell().note(msg)?;
1024 }
1025 Ok(())
1026}
1027
1028pub fn check_yanked(
1029 gctx: &GlobalContext,
1030 pkg_set: &PackageSet<'_>,
1031 resolve: &Resolve,
1032 hint: &str,
1033) -> CargoResult<()> {
1034 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1037
1038 let mut sources = pkg_set.sources_mut();
1039 let mut pending: Vec<PackageId> = resolve.iter().collect();
1040 let mut results = Vec::new();
1041 for (_id, source) in sources.sources_mut() {
1042 source.invalidate_cache();
1043 }
1044 while !pending.is_empty() {
1045 pending.retain(|pkg_id| {
1046 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1047 match source.is_yanked(*pkg_id) {
1048 Poll::Ready(result) => results.push((*pkg_id, result)),
1049 Poll::Pending => return true,
1050 }
1051 }
1052 false
1053 });
1054 for (_id, source) in sources.sources_mut() {
1055 source.block_until_ready()?;
1056 }
1057 }
1058
1059 for (pkg_id, is_yanked) in results {
1060 if is_yanked? {
1061 gctx.shell().warn(format!(
1062 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1063 pkg_id,
1064 pkg_id.source_id().display_registry_name(),
1065 hint
1066 ))?;
1067 }
1068 }
1069 Ok(())
1070}
1071
1072fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1079 let Some(name) = file.file_name() else {
1080 return Ok(());
1081 };
1082 let Some(name) = name.to_str() else {
1083 anyhow::bail!(
1084 "path does not have a unicode filename which may not unpack \
1085 on all platforms: {}",
1086 file.display()
1087 )
1088 };
1089 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1090 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1091 anyhow::bail!(
1092 "cannot package a filename with a special character `{}`: {}",
1093 c,
1094 file.display()
1095 )
1096 }
1097 if restricted_names::is_windows_reserved_path(file) {
1098 shell.warn(format!(
1099 "file {} is a reserved Windows filename, \
1100 it will not work on Windows platforms",
1101 file.display()
1102 ))?;
1103 }
1104 Ok(())
1105}
1106
1107struct TmpRegistry<'a> {
1111 gctx: &'a GlobalContext,
1112 upstream: SourceId,
1113 root: Filesystem,
1114 checksums: HashMap<PackageId, String>,
1115 _lock: FileLock,
1116}
1117
1118impl<'a> TmpRegistry<'a> {
1119 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1120 root.create_dir()?;
1121 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1122 let slf = Self {
1123 gctx,
1124 root,
1125 upstream,
1126 checksums: HashMap::new(),
1127 _lock,
1128 };
1129 let index_path = slf.index_path().into_path_unlocked();
1131 if index_path.exists() {
1132 paths::remove_dir_all(index_path)?;
1133 }
1134 slf.index_path().create_dir()?;
1135 Ok(slf)
1136 }
1137
1138 fn index_path(&self) -> Filesystem {
1139 self.root.join("index")
1140 }
1141
1142 fn add_package(
1143 &mut self,
1144 ws: &Workspace<'_>,
1145 package: &Package,
1146 tar: &FileLock,
1147 ) -> CargoResult<()> {
1148 debug!(
1149 "adding package {}@{} to local overlay at {}",
1150 package.name(),
1151 package.version(),
1152 self.root.as_path_unlocked().display()
1153 );
1154 {
1155 let mut tar_copy = self.root.open_rw_exclusive_create(
1156 package.package_id().tarball_name(),
1157 self.gctx,
1158 "temporary package registry",
1159 )?;
1160 tar.file().seek(SeekFrom::Start(0))?;
1161 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1162 tar_copy.flush()?;
1163 }
1164
1165 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1166
1167 tar.file().seek(SeekFrom::Start(0))?;
1168 let cksum = cargo_util::Sha256::new()
1169 .update_file(tar.file())?
1170 .finish_hex();
1171
1172 self.checksums.insert(package.package_id(), cksum.clone());
1173
1174 let deps: Vec<_> = new_crate
1175 .deps
1176 .into_iter()
1177 .map(|dep| {
1178 let name = dep
1179 .explicit_name_in_toml
1180 .clone()
1181 .unwrap_or_else(|| dep.name.clone())
1182 .into();
1183 let package = dep
1184 .explicit_name_in_toml
1185 .as_ref()
1186 .map(|_| dep.name.clone().into());
1187 RegistryDependency {
1188 name: name,
1189 req: dep.version_req.into(),
1190 features: dep.features.into_iter().map(|x| x.into()).collect(),
1191 optional: dep.optional,
1192 default_features: dep.default_features,
1193 target: dep.target.map(|x| x.into()),
1194 kind: Some(dep.kind.into()),
1195 registry: dep.registry.map(|x| x.into()),
1196 package: package,
1197 public: None,
1198 artifact: dep
1199 .artifact
1200 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1201 bindep_target: dep.bindep_target.map(|x| x.into()),
1202 lib: dep.lib,
1203 }
1204 })
1205 .collect();
1206
1207 let index_line = serde_json::to_string(&IndexPackage {
1208 name: new_crate.name.into(),
1209 vers: package.version().clone(),
1210 deps,
1211 features: new_crate
1212 .features
1213 .into_iter()
1214 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1215 .collect(),
1216 features2: None,
1217 cksum,
1218 yanked: None,
1219 links: new_crate.links.map(|x| x.into()),
1220 rust_version: None,
1221 pubtime: None,
1222 v: Some(2),
1223 })?;
1224
1225 let file =
1226 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1227 let mut dst = self.index_path().open_rw_exclusive_create(
1228 file,
1229 self.gctx,
1230 "temporary package registry",
1231 )?;
1232 dst.write_all(index_line.as_bytes())?;
1233 Ok(())
1234 }
1235
1236 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1237 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1238 }
1239}