1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::File;
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use annotate_snippets::Level;
36use anyhow::{Context as _, bail};
37use cargo_util::paths;
38use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
39use cargo_util_schemas::messages;
40use flate2::{Compression, GzBuilder};
41use tar::{Builder, EntryType, Header, HeaderMode};
42use tracing::debug;
43use unicase::Ascii as UncasedAscii;
44
45mod vcs;
46mod verify;
47
48#[derive(Debug, Clone)]
52pub enum PackageMessageFormat {
53 Human,
54 Json,
55}
56
57impl PackageMessageFormat {
58 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
59
60 pub const DEFAULT: &str = "human";
61}
62
63impl std::str::FromStr for PackageMessageFormat {
64 type Err = anyhow::Error;
65
66 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
67 match s {
68 "human" => Ok(PackageMessageFormat::Human),
69 "json" => Ok(PackageMessageFormat::Json),
70 f => bail!("unknown message format `{f}`"),
71 }
72 }
73}
74
75#[derive(Clone)]
76pub struct PackageOpts<'gctx> {
77 pub gctx: &'gctx GlobalContext,
78 pub list: bool,
79 pub fmt: PackageMessageFormat,
80 pub check_metadata: bool,
81 pub allow_dirty: bool,
82 pub include_lockfile: bool,
83 pub verify: bool,
84 pub jobs: Option<JobsConfig>,
85 pub keep_going: bool,
86 pub to_package: ops::Packages,
87 pub targets: Vec<String>,
88 pub cli_features: CliFeatures,
89 pub reg_or_index: Option<ops::RegistryOrIndex>,
90 pub dry_run: bool,
103}
104
105const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
106const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
107
108struct ArchiveFile {
109 rel_path: PathBuf,
112 rel_str: String,
114 contents: FileContents,
116}
117
118enum FileContents {
119 OnDisk(PathBuf),
121 Generated(GeneratedFile),
123}
124
125enum GeneratedFile {
126 Manifest(PathBuf),
130 Lockfile(Option<PathBuf>),
134 VcsInfo(vcs::VcsInfo),
136}
137
138#[tracing::instrument(skip_all)]
140fn create_package(
141 ws: &Workspace<'_>,
142 opts: &PackageOpts<'_>,
143 pkg: &Package,
144 ar_files: Vec<ArchiveFile>,
145 local_reg: Option<&TmpRegistry<'_>>,
146) -> CargoResult<FileLock> {
147 let gctx = ws.gctx();
148 let filecount = ar_files.len();
149
150 for dep in pkg.dependencies() {
152 super::check_dep_has_version(dep, false).map_err(|err| {
153 ManifestError::new(
154 err.context(format!(
155 "failed to verify manifest at `{}`",
156 pkg.manifest_path().display()
157 )),
158 pkg.manifest_path().into(),
159 )
160 })?;
161 }
162
163 let filename = pkg.package_id().tarball_name();
164 let build_dir = ws.build_dir();
165 paths::create_dir_all_excluded_from_backups_atomic(build_dir.as_path_unlocked())?;
166 let dir = build_dir.join("package").join("tmp-crate");
167 let dst = dir.open_rw_exclusive_create(&filename, gctx, "package scratch space")?;
168
169 gctx.shell()
174 .status("Packaging", pkg.package_id().to_string())?;
175 dst.file().set_len(0)?;
176 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
177 .context("failed to prepare local package for uploading")?;
178
179 let dst_metadata = dst
180 .file()
181 .metadata()
182 .with_context(|| format!("could not learn metadata for: `{}`", dst.path().display()))?;
183 let compressed_size = dst_metadata.len();
184
185 let uncompressed = HumanBytes(uncompressed_size);
186 let compressed = HumanBytes(compressed_size);
187
188 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
189 drop(gctx.shell().status("Packaged", message));
191
192 return Ok(dst);
193}
194
195pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
200 let specs = &opts.to_package.to_package_id_specs(ws)?;
201 if let ops::Packages::Packages(_) = opts.to_package {
203 for spec in specs.iter() {
204 let member_ids = ws.members().map(|p| p.package_id());
205 spec.query(member_ids)?;
206 }
207 }
208 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
209
210 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
213
214 let packaged = do_package(ws, opts, pkgs)?;
215
216 let mut result = Vec::new();
218 let target_dir = ws.target_dir();
219 paths::create_dir_all_excluded_from_backups_atomic(target_dir.as_path_unlocked())?;
220 let artifact_dir = target_dir.join("package");
221 for (pkg, _, src) in packaged {
222 let filename = pkg.package_id().tarball_name();
223 let dst = artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
224 src.file().seek(SeekFrom::Start(0))?;
225 std::io::copy(&mut src.file(), &mut dst.file())?;
226 result.push(dst);
227 }
228
229 Ok(result)
230}
231
232pub(crate) fn package_with_dep_graph(
238 ws: &Workspace<'_>,
239 opts: &PackageOpts<'_>,
240 pkgs: Vec<(&Package, CliFeatures)>,
241) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
242 let output = do_package(ws, opts, pkgs)?;
243
244 Ok(local_deps(output.into_iter().map(
245 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
246 )))
247}
248
249fn do_package<'a>(
250 ws: &Workspace<'_>,
251 opts: &PackageOpts<'a>,
252 pkgs: Vec<(&Package, CliFeatures)>,
253) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
254 if ws
255 .lock_root()
256 .as_path_unlocked()
257 .join(LOCKFILE_NAME)
258 .exists()
259 && opts.include_lockfile
260 {
261 let dry_run = false;
263 let _ = ops::resolve_ws(ws, dry_run)?;
264 }
267
268 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
269 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
270
271 let mut local_reg = {
272 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
277 || opts.reg_or_index.is_some()
278 {
279 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
280 debug!("packaging for registry {}", sid);
281 Some(sid)
282 } else {
283 None
284 };
285 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
286 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
287 .transpose()?
288 };
289
290 let sorted_pkgs = deps.sort();
293 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
294 for (pkg, cli_features) in sorted_pkgs {
295 let opts = PackageOpts {
296 cli_features: cli_features.clone(),
297 to_package: ops::Packages::Default,
298 ..opts.clone()
299 };
300 let ar_files = prepare_archive(ws, &pkg, &opts)?;
301
302 if opts.list {
303 match opts.fmt {
304 PackageMessageFormat::Human => {
305 for ar_file in &ar_files {
308 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
309 }
310 }
311 PackageMessageFormat::Json => {
312 let message = messages::PackageList {
313 id: pkg.package_id().to_spec(),
314 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
315 let file = match f.contents {
316 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
317 FileContents::Generated(
318 GeneratedFile::Manifest(path)
319 | GeneratedFile::Lockfile(Some(path)),
320 ) => messages::PackageFile::Generate { path: Some(path) },
321 FileContents::Generated(
322 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
323 ) => messages::PackageFile::Generate { path: None },
324 };
325 (f.rel_path, file)
326 })),
327 };
328 let _ = ws.gctx().shell().print_json(&message);
329 }
330 }
331 } else {
332 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
333 if let Some(local_reg) = local_reg.as_mut() {
334 if pkg.publish() != &Some(Vec::new()) {
335 local_reg.add_package(ws, &pkg, &tarball)?;
336 }
337 }
338 outputs.push((pkg, opts, tarball));
339 }
340 }
341
342 if opts.verify {
345 for (pkg, opts, tarball) in &outputs {
346 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
347 .context("failed to verify package tarball")?
348 }
349 }
350
351 Ok(outputs)
352}
353
354fn get_registry(
361 gctx: &GlobalContext,
362 pkgs: &[&Package],
363 reg_or_index: Option<RegistryOrIndex>,
364) -> CargoResult<SourceId> {
365 let reg_or_index = match reg_or_index.clone() {
366 Some(r) => Some(r),
367 None => infer_registry(pkgs)?,
368 };
369
370 let reg = reg_or_index
372 .clone()
373 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
374 if let RegistryOrIndex::Registry(reg_name) = reg {
375 for pkg in pkgs {
376 if let Some(allowed) = pkg.publish().as_ref() {
377 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
381 bail!(
382 "`{}` cannot be packaged.\n\
383 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
384 pkg.name(),
385 reg_name
386 );
387 }
388 }
389 }
390 }
391 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
392}
393
394#[derive(Clone, Debug, Default)]
396pub(crate) struct LocalDependencies<T> {
397 pub packages: HashMap<PackageId, (Package, T)>,
398 pub graph: Graph<PackageId, ()>,
399}
400
401impl<T: Clone> LocalDependencies<T> {
402 pub fn sort(&self) -> Vec<(Package, T)> {
403 self.graph
404 .sort()
405 .into_iter()
406 .map(|name| self.packages[&name].clone())
407 .collect()
408 }
409
410 pub fn has_dependencies(&self) -> bool {
411 self.graph
412 .iter()
413 .any(|node| self.graph.edges(node).next().is_some())
414 }
415}
416
417fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
422 let packages: HashMap<PackageId, (Package, T)> = packages
423 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
424 .collect();
425
426 let source_to_pkg: HashMap<_, _> = packages
431 .keys()
432 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
433 .collect();
434
435 let mut graph = Graph::new();
436 for (pkg, _payload) in packages.values() {
437 graph.add(pkg.package_id());
438 for dep in pkg.dependencies() {
439 if !dep.source_id().is_path() {
441 continue;
442 }
443
444 if dep.kind() == DepKind::Development && !dep.specified_req() {
447 continue;
448 };
449
450 if dep.source_id() == pkg.package_id().source_id() {
452 continue;
453 }
454
455 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
456 graph.link(pkg.package_id(), *dep_pkg);
457 }
458 }
459 }
460
461 LocalDependencies { packages, graph }
462}
463
464#[tracing::instrument(skip_all)]
466fn prepare_archive(
467 ws: &Workspace<'_>,
468 pkg: &Package,
469 opts: &PackageOpts<'_>,
470) -> CargoResult<Vec<ArchiveFile>> {
471 let gctx = ws.gctx();
472 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
473 src.load()?;
474
475 if opts.check_metadata {
476 check_metadata(pkg, opts.reg_or_index.as_ref(), gctx)?;
477 }
478
479 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
480 gctx.shell().warn(
481 "both package.include and package.exclude are specified; \
482 the exclude list will be ignored",
483 )?;
484 }
485 let src_files = src.list_files(pkg)?;
486
487 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
489 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
490}
491
492#[tracing::instrument(skip_all)]
494fn build_ar_list(
495 ws: &Workspace<'_>,
496 pkg: &Package,
497 src_files: Vec<PathEntry>,
498 vcs_info: Option<vcs::VcsInfo>,
499 include_lockfile: bool,
500) -> CargoResult<Vec<ArchiveFile>> {
501 let mut result = HashMap::new();
502 let root = pkg.root();
503 for src_file in &src_files {
504 let rel_path = src_file.strip_prefix(&root)?;
505 check_filename(rel_path, &mut ws.gctx().shell())?;
506 let rel_str = rel_path.to_str().ok_or_else(|| {
507 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
508 })?;
509 match rel_str {
510 "Cargo.lock" => continue,
511 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
512 "invalid inclusion of reserved file name {} in package source",
513 rel_str
514 ),
515 _ => {
516 result
517 .entry(UncasedAscii::new(rel_str))
518 .or_insert_with(Vec::new)
519 .push(ArchiveFile {
520 rel_path: rel_path.to_owned(),
521 rel_str: rel_str.to_owned(),
522 contents: FileContents::OnDisk(src_file.to_path_buf()),
523 });
524 }
525 }
526 }
527
528 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
531 result
532 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
533 .or_insert_with(Vec::new)
534 .push(ArchiveFile {
535 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
536 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
537 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
538 });
539 result
540 .entry(UncasedAscii::new("Cargo.toml"))
541 .or_insert_with(Vec::new)
542 .push(ArchiveFile {
543 rel_path: PathBuf::from("Cargo.toml"),
544 rel_str: "Cargo.toml".to_string(),
545 contents: FileContents::Generated(GeneratedFile::Manifest(
546 pkg.manifest_path().to_owned(),
547 )),
548 });
549 } else {
550 ws.gctx().shell().warn(&format!(
551 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
552 pkg.name()
553 ))?;
554 }
555
556 if include_lockfile {
557 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
558 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
559 let rel_str = "Cargo.lock";
560 result
561 .entry(UncasedAscii::new(rel_str))
562 .or_insert_with(Vec::new)
563 .push(ArchiveFile {
564 rel_path: PathBuf::from(rel_str),
565 rel_str: rel_str.to_string(),
566 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
567 });
568 }
569
570 if let Some(vcs_info) = vcs_info {
571 let rel_str = VCS_INFO_FILE;
572 result
573 .entry(UncasedAscii::new(rel_str))
574 .or_insert_with(Vec::new)
575 .push(ArchiveFile {
576 rel_path: PathBuf::from(rel_str),
577 rel_str: rel_str.to_string(),
578 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
579 });
580 }
581
582 let mut invalid_manifest_field: Vec<String> = vec![];
583
584 let mut result = result.into_values().flatten().collect();
585 if let Some(license_file) = &pkg.manifest().metadata().license_file {
586 let license_path = Path::new(license_file);
587 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
588 if abs_file_path.is_file() {
589 check_for_file_and_add(
590 "license-file",
591 license_path,
592 abs_file_path,
593 pkg,
594 &mut result,
595 ws,
596 )?;
597 } else {
598 error_on_nonexistent_file(
599 &pkg,
600 &license_path,
601 "license-file",
602 &mut invalid_manifest_field,
603 );
604 }
605 }
606 if let Some(readme) = &pkg.manifest().metadata().readme {
607 let readme_path = Path::new(readme);
608 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
609 if abs_file_path.is_file() {
610 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
611 } else {
612 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
613 }
614 }
615
616 if !invalid_manifest_field.is_empty() {
617 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
618 }
619
620 for t in pkg
621 .manifest()
622 .targets()
623 .iter()
624 .filter(|t| t.is_custom_build())
625 {
626 if let Some(custom_build_path) = t.src_path().path() {
627 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
628 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
629 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
630 }
631 }
632 }
633
634 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
635
636 Ok(result)
637}
638
639fn check_for_file_and_add(
640 label: &str,
641 file_path: &Path,
642 abs_file_path: PathBuf,
643 pkg: &Package,
644 result: &mut Vec<ArchiveFile>,
645 ws: &Workspace<'_>,
646) -> CargoResult<()> {
647 match abs_file_path.strip_prefix(&pkg.root()) {
648 Ok(rel_file_path) => {
649 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
650 result.push(ArchiveFile {
651 rel_path: rel_file_path.to_path_buf(),
652 rel_str: rel_file_path
653 .to_str()
654 .expect("everything was utf8")
655 .to_string(),
656 contents: FileContents::OnDisk(abs_file_path),
657 })
658 }
659 }
660 Err(_) => {
661 let file_name = file_path.file_name().unwrap();
663 if result.iter().any(|ar| ar.rel_path == file_name) {
664 ws.gctx().shell().warn(&format!(
665 "{} `{}` appears to be a path outside of the package, \
666 but there is already a file named `{}` in the root of the package. \
667 The archived crate will contain the copy in the root of the package. \
668 Update the {} to point to the path relative \
669 to the root of the package to remove this warning.",
670 label,
671 file_path.display(),
672 file_name.to_str().unwrap(),
673 label,
674 ))?;
675 } else {
676 result.push(ArchiveFile {
677 rel_path: PathBuf::from(file_name),
678 rel_str: file_name.to_str().unwrap().to_string(),
679 contents: FileContents::OnDisk(abs_file_path),
680 })
681 }
682 }
683 }
684 Ok(())
685}
686
687fn error_on_nonexistent_file(
688 pkg: &Package,
689 path: &Path,
690 manifest_key_name: &'static str,
691 invalid: &mut Vec<String>,
692) {
693 let rel_msg = if path.is_absolute() {
694 "".to_string()
695 } else {
696 format!(" (relative to `{}`)", pkg.root().display())
697 };
698
699 let msg = format!(
700 "{manifest_key_name} `{}` does not appear to exist{}.\n\
701 Please update the {manifest_key_name} setting in the manifest at `{}`.",
702 path.display(),
703 rel_msg,
704 pkg.manifest_path().display()
705 );
706
707 invalid.push(msg);
708}
709
710fn error_custom_build_file_not_in_package(
711 pkg: &Package,
712 path: &Path,
713 target: &Target,
714) -> CargoResult<Vec<ArchiveFile>> {
715 let tip = {
716 let description_name = target.description_named();
717 if path.is_file() {
718 format!(
719 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
720 It is at `{}`, whereas the root the package is `{}`.\n",
721 path.display(),
722 pkg.root().display()
723 )
724 } else {
725 format!("the source file of {description_name} doesn't appear to exist.\n",)
726 }
727 };
728 let msg = format!(
729 "{}\
730 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
731 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
732 tip,
733 pkg.manifest_path().display()
734 );
735 anyhow::bail!(msg)
736}
737
738fn build_lock(
740 ws: &Workspace<'_>,
741 opts: &PackageOpts<'_>,
742 publish_pkg: &Package,
743 local_reg: Option<&TmpRegistry<'_>>,
744) -> CargoResult<String> {
745 let gctx = ws.gctx();
746 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
747
748 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
749
750 if let Some(local_reg) = local_reg {
754 tmp_ws.add_local_overlay(
755 local_reg.upstream,
756 local_reg.root.as_path_unlocked().to_owned(),
757 );
758 if opts.dry_run {
759 if let Some(orig_resolve) = orig_resolve.as_mut() {
760 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
761 SourceId::crates_io(gctx)?
762 } else {
763 local_reg.upstream
764 };
765 for (p, s) in local_reg.checksums() {
766 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
767 }
768 }
769 }
770 }
771 let mut tmp_reg = tmp_ws.package_registry()?;
772
773 let mut new_resolve = ops::resolve_with_previous(
774 &mut tmp_reg,
775 &tmp_ws,
776 &CliFeatures::new_all(true),
777 HasDevUnits::Yes,
778 orig_resolve.as_ref(),
779 None,
780 &[],
781 true,
782 )?;
783
784 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
785
786 if let Some(orig_resolve) = orig_resolve {
787 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
788 }
789 check_yanked(
790 gctx,
791 &pkg_set,
792 &new_resolve,
793 "consider updating to a version that is not yanked",
794 )?;
795
796 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
797}
798
799fn check_metadata(
802 pkg: &Package,
803 reg_or_index: Option<&RegistryOrIndex>,
804 gctx: &GlobalContext,
805) -> CargoResult<()> {
806 let md = pkg.manifest().metadata();
807
808 let mut missing = vec![];
809
810 macro_rules! lacking {
811 ($( $($field: ident)||* ),*) => {{
812 $(
813 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
814 $(missing.push(stringify!($field).replace("_", "-"));)*
815 }
816 )*
817 }}
818 }
819 lacking!(
820 description,
821 license || license_file,
822 documentation || homepage || repository
823 );
824
825 if !missing.is_empty() {
826 let should_warn = match reg_or_index {
828 Some(RegistryOrIndex::Registry(reg_name)) => reg_name == CRATES_IO_REGISTRY,
829 None => true, Some(RegistryOrIndex::Index(_)) => false, };
832
833 if should_warn {
834 let mut things = missing[..missing.len() - 1].join(", ");
835 if !things.is_empty() {
838 things.push_str(" or ");
839 }
840 things.push_str(missing.last().unwrap());
841
842 gctx.shell().print_report(&[
843 Level::WARNING.secondary_title(format!("manifest has no {things}"))
844 .element(Level::NOTE.message("see https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info"))
845 ],
846 false
847 )?
848 }
849 }
850
851 Ok(())
852}
853
854fn tar(
858 ws: &Workspace<'_>,
859 opts: &PackageOpts<'_>,
860 pkg: &Package,
861 local_reg: Option<&TmpRegistry<'_>>,
862 ar_files: Vec<ArchiveFile>,
863 dst: &File,
864 filename: &str,
865) -> CargoResult<u64> {
866 let filename = Path::new(filename);
868 let encoder = GzBuilder::new()
869 .filename(paths::path2bytes(filename)?)
870 .write(dst, Compression::best());
871
872 let mut ar = Builder::new(encoder);
874 ar.sparse(false);
875 let gctx = ws.gctx();
876
877 let base_name = format!("{}-{}", pkg.name(), pkg.version());
878 let base_path = Path::new(&base_name);
879 let included = ar_files
880 .iter()
881 .map(|ar_file| ar_file.rel_path.clone())
882 .collect::<Vec<_>>();
883 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
884
885 let mut uncompressed_size = 0;
886 for ar_file in ar_files {
887 let ArchiveFile {
888 rel_path,
889 rel_str,
890 contents,
891 } = ar_file;
892 let ar_path = base_path.join(&rel_path);
893 gctx.shell()
894 .verbose(|shell| shell.status("Archiving", &rel_str))?;
895 let mut header = Header::new_gnu();
896 match contents {
897 FileContents::OnDisk(disk_path) => {
898 let mut file = File::open(&disk_path).with_context(|| {
899 format!("failed to open for archiving: `{}`", disk_path.display())
900 })?;
901 let metadata = file.metadata().with_context(|| {
902 format!("could not learn metadata for: `{}`", disk_path.display())
903 })?;
904 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
905 header.set_cksum();
906 ar.append_data(&mut header, &ar_path, &mut file)
907 .with_context(|| {
908 format!("could not archive source file `{}`", disk_path.display())
909 })?;
910 uncompressed_size += metadata.len() as u64;
911 }
912 FileContents::Generated(generated_kind) => {
913 let contents = match generated_kind {
914 GeneratedFile::Manifest(_) => {
915 publish_pkg.manifest().to_normalized_contents()?
916 }
917 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
918 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
919 };
920 header.set_entry_type(EntryType::file());
921 header.set_mode(0o644);
922 header.set_size(contents.len() as u64);
923 header.set_mtime(1153704088);
929 header.set_cksum();
930 ar.append_data(&mut header, &ar_path, contents.as_bytes())
931 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
932 uncompressed_size += contents.len() as u64;
933 }
934 }
935 }
936
937 let encoder = ar.into_inner()?;
938 encoder.finish()?;
939 Ok(uncompressed_size)
940}
941
942fn compare_resolve(
944 gctx: &GlobalContext,
945 current_pkg: &Package,
946 orig_resolve: &Resolve,
947 new_resolve: &Resolve,
948) -> CargoResult<()> {
949 if gctx.shell().verbosity() != Verbosity::Verbose {
950 return Ok(());
951 }
952 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
953 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
954 let added = new_set.difference(&orig_set);
955 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
958 for pkg_id in added {
959 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
960 continue;
963 }
964 let removed_candidates: Vec<&PackageId> = removed
967 .iter()
968 .filter(|orig_pkg_id| {
969 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
970 })
971 .cloned()
972 .collect();
973 let extra = match removed_candidates.len() {
974 0 => {
975 let previous_versions: Vec<&PackageId> = removed
977 .iter()
978 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
979 .cloned()
980 .collect();
981 match previous_versions.len() {
982 0 => String::new(),
983 1 => format!(
984 ", previous version was `{}`",
985 previous_versions[0].version()
986 ),
987 _ => format!(
988 ", previous versions were: {}",
989 previous_versions
990 .iter()
991 .map(|pkg_id| format!("`{}`", pkg_id.version()))
992 .collect::<Vec<_>>()
993 .join(", ")
994 ),
995 }
996 }
997 1 => {
998 format!(
1002 ", was originally sourced from `{}`",
1003 removed_candidates[0].source_id()
1004 )
1005 }
1006 _ => {
1007 let comma_list = removed_candidates
1010 .iter()
1011 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1012 .collect::<Vec<_>>()
1013 .join(", ");
1014 format!(
1015 ", was originally sourced from one of these sources: {}",
1016 comma_list
1017 )
1018 }
1019 };
1020 let msg = format!(
1021 "package `{}` added to the packaged Cargo.lock file{}",
1022 pkg_id, extra
1023 );
1024 gctx.shell().note(msg)?;
1025 }
1026 Ok(())
1027}
1028
1029pub fn check_yanked(
1030 gctx: &GlobalContext,
1031 pkg_set: &PackageSet<'_>,
1032 resolve: &Resolve,
1033 hint: &str,
1034) -> CargoResult<()> {
1035 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1038
1039 let mut sources = pkg_set.sources_mut();
1040 let mut pending: Vec<PackageId> = resolve.iter().collect();
1041 let mut results = Vec::new();
1042 for (_id, source) in sources.sources_mut() {
1043 source.invalidate_cache();
1044 }
1045 while !pending.is_empty() {
1046 pending.retain(|pkg_id| {
1047 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1048 match source.is_yanked(*pkg_id) {
1049 Poll::Ready(result) => results.push((*pkg_id, result)),
1050 Poll::Pending => return true,
1051 }
1052 }
1053 false
1054 });
1055 for (_id, source) in sources.sources_mut() {
1056 source.block_until_ready()?;
1057 }
1058 }
1059
1060 for (pkg_id, is_yanked) in results {
1061 if is_yanked? {
1062 gctx.shell().warn(format!(
1063 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1064 pkg_id,
1065 pkg_id.source_id().display_registry_name(),
1066 hint
1067 ))?;
1068 }
1069 }
1070 Ok(())
1071}
1072
1073fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1080 let Some(name) = file.file_name() else {
1081 return Ok(());
1082 };
1083 let Some(name) = name.to_str() else {
1084 anyhow::bail!(
1085 "path does not have a unicode filename which may not unpack \
1086 on all platforms: {}",
1087 file.display()
1088 )
1089 };
1090 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1091 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1092 anyhow::bail!(
1093 "cannot package a filename with a special character `{}`: {}",
1094 c,
1095 file.display()
1096 )
1097 }
1098 if restricted_names::is_windows_reserved_path(file) {
1099 shell.warn(format!(
1100 "file {} is a reserved Windows filename, \
1101 it will not work on Windows platforms",
1102 file.display()
1103 ))?;
1104 }
1105 Ok(())
1106}
1107
1108struct TmpRegistry<'a> {
1112 gctx: &'a GlobalContext,
1113 upstream: SourceId,
1114 root: Filesystem,
1115 checksums: HashMap<PackageId, String>,
1116 _lock: FileLock,
1117}
1118
1119impl<'a> TmpRegistry<'a> {
1120 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1121 root.create_dir()?;
1122 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1123 let slf = Self {
1124 gctx,
1125 root,
1126 upstream,
1127 checksums: HashMap::new(),
1128 _lock,
1129 };
1130 let index_path = slf.index_path().into_path_unlocked();
1132 if index_path.exists() {
1133 paths::remove_dir_all(index_path)?;
1134 }
1135 slf.index_path().create_dir()?;
1136 Ok(slf)
1137 }
1138
1139 fn index_path(&self) -> Filesystem {
1140 self.root.join("index")
1141 }
1142
1143 fn add_package(
1144 &mut self,
1145 ws: &Workspace<'_>,
1146 package: &Package,
1147 tar: &FileLock,
1148 ) -> CargoResult<()> {
1149 debug!(
1150 "adding package {}@{} to local overlay at {}",
1151 package.name(),
1152 package.version(),
1153 self.root.as_path_unlocked().display()
1154 );
1155 {
1156 let mut tar_copy = self.root.open_rw_exclusive_create(
1157 package.package_id().tarball_name(),
1158 self.gctx,
1159 "temporary package registry",
1160 )?;
1161 tar.file().seek(SeekFrom::Start(0))?;
1162 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1163 tar_copy.flush()?;
1164 }
1165
1166 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1167
1168 tar.file().seek(SeekFrom::Start(0))?;
1169 let cksum = cargo_util::Sha256::new()
1170 .update_file(tar.file())?
1171 .finish_hex();
1172
1173 self.checksums.insert(package.package_id(), cksum.clone());
1174
1175 let deps: Vec<_> = new_crate
1176 .deps
1177 .into_iter()
1178 .map(|dep| {
1179 let name = dep
1180 .explicit_name_in_toml
1181 .clone()
1182 .unwrap_or_else(|| dep.name.clone())
1183 .into();
1184 let package = dep
1185 .explicit_name_in_toml
1186 .as_ref()
1187 .map(|_| dep.name.clone().into());
1188 RegistryDependency {
1189 name: name,
1190 req: dep.version_req.into(),
1191 features: dep.features.into_iter().map(|x| x.into()).collect(),
1192 optional: dep.optional,
1193 default_features: dep.default_features,
1194 target: dep.target.map(|x| x.into()),
1195 kind: Some(dep.kind.into()),
1196 registry: dep.registry.map(|x| x.into()),
1197 package: package,
1198 public: None,
1199 artifact: dep
1200 .artifact
1201 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1202 bindep_target: dep.bindep_target.map(|x| x.into()),
1203 lib: dep.lib,
1204 }
1205 })
1206 .collect();
1207
1208 let index_line = serde_json::to_string(&IndexPackage {
1209 name: new_crate.name.into(),
1210 vers: package.version().clone(),
1211 deps,
1212 features: new_crate
1213 .features
1214 .into_iter()
1215 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1216 .collect(),
1217 features2: None,
1218 cksum,
1219 yanked: None,
1220 links: new_crate.links.map(|x| x.into()),
1221 rust_version: None,
1222 pubtime: None,
1223 v: Some(2),
1224 })?;
1225
1226 let file =
1227 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1228 let mut dst = self.index_path().open_rw_exclusive_create(
1229 file,
1230 self.gctx,
1231 "temporary package registry",
1232 )?;
1233 dst.write_all(index_line.as_bytes())?;
1234 Ok(())
1235 }
1236
1237 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1238 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1239 }
1240}