1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::File;
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use anyhow::{Context as _, bail};
36use cargo_util::paths;
37use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
38use cargo_util_schemas::messages;
39use flate2::{Compression, GzBuilder};
40use tar::{Builder, EntryType, Header, HeaderMode};
41use tracing::debug;
42use unicase::Ascii as UncasedAscii;
43
44mod vcs;
45mod verify;
46
47#[derive(Debug, Clone)]
51pub enum PackageMessageFormat {
52 Human,
53 Json,
54}
55
56impl PackageMessageFormat {
57 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
58
59 pub const DEFAULT: &str = "human";
60}
61
62impl std::str::FromStr for PackageMessageFormat {
63 type Err = anyhow::Error;
64
65 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
66 match s {
67 "human" => Ok(PackageMessageFormat::Human),
68 "json" => Ok(PackageMessageFormat::Json),
69 f => bail!("unknown message format `{f}`"),
70 }
71 }
72}
73
74#[derive(Clone)]
75pub struct PackageOpts<'gctx> {
76 pub gctx: &'gctx GlobalContext,
77 pub list: bool,
78 pub fmt: PackageMessageFormat,
79 pub check_metadata: bool,
80 pub allow_dirty: bool,
81 pub include_lockfile: bool,
82 pub verify: bool,
83 pub jobs: Option<JobsConfig>,
84 pub keep_going: bool,
85 pub to_package: ops::Packages,
86 pub targets: Vec<String>,
87 pub cli_features: CliFeatures,
88 pub reg_or_index: Option<ops::RegistryOrIndex>,
89 pub dry_run: bool,
102}
103
104const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
105const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
106
107struct ArchiveFile {
108 rel_path: PathBuf,
111 rel_str: String,
113 contents: FileContents,
115}
116
117enum FileContents {
118 OnDisk(PathBuf),
120 Generated(GeneratedFile),
122}
123
124enum GeneratedFile {
125 Manifest(PathBuf),
129 Lockfile(Option<PathBuf>),
133 VcsInfo(vcs::VcsInfo),
135}
136
137#[tracing::instrument(skip_all)]
139fn create_package(
140 ws: &Workspace<'_>,
141 opts: &PackageOpts<'_>,
142 pkg: &Package,
143 ar_files: Vec<ArchiveFile>,
144 local_reg: Option<&TmpRegistry<'_>>,
145) -> CargoResult<FileLock> {
146 let gctx = ws.gctx();
147 let filecount = ar_files.len();
148
149 for dep in pkg.dependencies() {
151 super::check_dep_has_version(dep, false).map_err(|err| {
152 ManifestError::new(
153 err.context(format!(
154 "failed to verify manifest at `{}`",
155 pkg.manifest_path().display()
156 )),
157 pkg.manifest_path().into(),
158 )
159 })?;
160 }
161
162 let filename = pkg.package_id().tarball_name();
163 let dir = ws.build_dir().join("package");
164 let mut dst = {
165 let tmp = format!(".{}", filename);
166 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
167 };
168
169 gctx.shell()
174 .status("Packaging", pkg.package_id().to_string())?;
175 dst.file().set_len(0)?;
176 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
177 .context("failed to prepare local package for uploading")?;
178
179 dst.seek(SeekFrom::Start(0))?;
180 let dst_path = dst.parent().join(&filename);
181 dst.rename(&dst_path)?;
182
183 let dst_metadata = dst
184 .file()
185 .metadata()
186 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
187 let compressed_size = dst_metadata.len();
188
189 let uncompressed = HumanBytes(uncompressed_size);
190 let compressed = HumanBytes(compressed_size);
191
192 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
193 drop(gctx.shell().status("Packaged", message));
195
196 return Ok(dst);
197}
198
199pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
204 let specs = &opts.to_package.to_package_id_specs(ws)?;
205 if let ops::Packages::Packages(_) = opts.to_package {
207 for spec in specs.iter() {
208 let member_ids = ws.members().map(|p| p.package_id());
209 spec.query(member_ids)?;
210 }
211 }
212 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
213
214 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
217
218 let packaged = do_package(ws, opts, pkgs)?;
219
220 let mut result = Vec::new();
221 let target_dir = ws.target_dir();
222 let build_dir = ws.build_dir();
223 if target_dir == build_dir {
224 result.extend(packaged.into_iter().map(|(_, _, src)| src));
225 } else {
226 let artifact_dir = target_dir.join("package");
228 for (pkg, _, src) in packaged {
229 let filename = pkg.package_id().tarball_name();
230 let dst =
231 artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
232 src.file().seek(SeekFrom::Start(0))?;
233 std::io::copy(&mut src.file(), &mut dst.file())?;
234 result.push(dst);
235 }
236 }
237
238 Ok(result)
239}
240
241pub(crate) fn package_with_dep_graph(
247 ws: &Workspace<'_>,
248 opts: &PackageOpts<'_>,
249 pkgs: Vec<(&Package, CliFeatures)>,
250) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
251 let output = do_package(ws, opts, pkgs)?;
252
253 Ok(local_deps(output.into_iter().map(
254 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
255 )))
256}
257
258fn do_package<'a>(
259 ws: &Workspace<'_>,
260 opts: &PackageOpts<'a>,
261 pkgs: Vec<(&Package, CliFeatures)>,
262) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
263 if ws
264 .lock_root()
265 .as_path_unlocked()
266 .join(LOCKFILE_NAME)
267 .exists()
268 && opts.include_lockfile
269 {
270 let dry_run = false;
272 let _ = ops::resolve_ws(ws, dry_run)?;
273 }
276
277 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
278 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
279
280 let mut local_reg = {
281 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
286 || opts.reg_or_index.is_some()
287 {
288 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
289 debug!("packaging for registry {}", sid);
290 Some(sid)
291 } else {
292 None
293 };
294 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
295 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
296 .transpose()?
297 };
298
299 let sorted_pkgs = deps.sort();
302 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
303 for (pkg, cli_features) in sorted_pkgs {
304 let opts = PackageOpts {
305 cli_features: cli_features.clone(),
306 to_package: ops::Packages::Default,
307 ..opts.clone()
308 };
309 let ar_files = prepare_archive(ws, &pkg, &opts)?;
310
311 if opts.list {
312 match opts.fmt {
313 PackageMessageFormat::Human => {
314 for ar_file in &ar_files {
317 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
318 }
319 }
320 PackageMessageFormat::Json => {
321 let message = messages::PackageList {
322 id: pkg.package_id().to_spec(),
323 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
324 let file = match f.contents {
325 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
326 FileContents::Generated(
327 GeneratedFile::Manifest(path)
328 | GeneratedFile::Lockfile(Some(path)),
329 ) => messages::PackageFile::Generate { path: Some(path) },
330 FileContents::Generated(
331 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
332 ) => messages::PackageFile::Generate { path: None },
333 };
334 (f.rel_path, file)
335 })),
336 };
337 let _ = ws.gctx().shell().print_json(&message);
338 }
339 }
340 } else {
341 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
342 if let Some(local_reg) = local_reg.as_mut() {
343 if pkg.publish() != &Some(Vec::new()) {
344 local_reg.add_package(ws, &pkg, &tarball)?;
345 }
346 }
347 outputs.push((pkg, opts, tarball));
348 }
349 }
350
351 if opts.verify {
354 for (pkg, opts, tarball) in &outputs {
355 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
356 .context("failed to verify package tarball")?
357 }
358 }
359
360 Ok(outputs)
361}
362
363fn get_registry(
370 gctx: &GlobalContext,
371 pkgs: &[&Package],
372 reg_or_index: Option<RegistryOrIndex>,
373) -> CargoResult<SourceId> {
374 let reg_or_index = match reg_or_index.clone() {
375 Some(r) => Some(r),
376 None => infer_registry(pkgs)?,
377 };
378
379 let reg = reg_or_index
381 .clone()
382 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
383 if let RegistryOrIndex::Registry(reg_name) = reg {
384 for pkg in pkgs {
385 if let Some(allowed) = pkg.publish().as_ref() {
386 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
390 bail!(
391 "`{}` cannot be packaged.\n\
392 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
393 pkg.name(),
394 reg_name
395 );
396 }
397 }
398 }
399 }
400 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
401}
402
403#[derive(Clone, Debug, Default)]
405pub(crate) struct LocalDependencies<T> {
406 pub packages: HashMap<PackageId, (Package, T)>,
407 pub graph: Graph<PackageId, ()>,
408}
409
410impl<T: Clone> LocalDependencies<T> {
411 pub fn sort(&self) -> Vec<(Package, T)> {
412 self.graph
413 .sort()
414 .into_iter()
415 .map(|name| self.packages[&name].clone())
416 .collect()
417 }
418
419 pub fn has_dependencies(&self) -> bool {
420 self.graph
421 .iter()
422 .any(|node| self.graph.edges(node).next().is_some())
423 }
424}
425
426fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
431 let packages: HashMap<PackageId, (Package, T)> = packages
432 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
433 .collect();
434
435 let source_to_pkg: HashMap<_, _> = packages
440 .keys()
441 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
442 .collect();
443
444 let mut graph = Graph::new();
445 for (pkg, _payload) in packages.values() {
446 graph.add(pkg.package_id());
447 for dep in pkg.dependencies() {
448 if !dep.source_id().is_path() {
450 continue;
451 }
452
453 if dep.kind() == DepKind::Development && !dep.specified_req() {
456 continue;
457 };
458
459 if dep.source_id() == pkg.package_id().source_id() {
461 continue;
462 }
463
464 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
465 graph.link(pkg.package_id(), *dep_pkg);
466 }
467 }
468 }
469
470 LocalDependencies { packages, graph }
471}
472
473#[tracing::instrument(skip_all)]
475fn prepare_archive(
476 ws: &Workspace<'_>,
477 pkg: &Package,
478 opts: &PackageOpts<'_>,
479) -> CargoResult<Vec<ArchiveFile>> {
480 let gctx = ws.gctx();
481 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
482 src.load()?;
483
484 if opts.check_metadata {
485 check_metadata(pkg, gctx)?;
486 }
487
488 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
489 gctx.shell().warn(
490 "both package.include and package.exclude are specified; \
491 the exclude list will be ignored",
492 )?;
493 }
494 let src_files = src.list_files(pkg)?;
495
496 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
498 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
499}
500
501#[tracing::instrument(skip_all)]
503fn build_ar_list(
504 ws: &Workspace<'_>,
505 pkg: &Package,
506 src_files: Vec<PathEntry>,
507 vcs_info: Option<vcs::VcsInfo>,
508 include_lockfile: bool,
509) -> CargoResult<Vec<ArchiveFile>> {
510 let mut result = HashMap::new();
511 let root = pkg.root();
512 for src_file in &src_files {
513 let rel_path = src_file.strip_prefix(&root)?;
514 check_filename(rel_path, &mut ws.gctx().shell())?;
515 let rel_str = rel_path.to_str().ok_or_else(|| {
516 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
517 })?;
518 match rel_str {
519 "Cargo.lock" => continue,
520 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
521 "invalid inclusion of reserved file name {} in package source",
522 rel_str
523 ),
524 _ => {
525 result
526 .entry(UncasedAscii::new(rel_str))
527 .or_insert_with(Vec::new)
528 .push(ArchiveFile {
529 rel_path: rel_path.to_owned(),
530 rel_str: rel_str.to_owned(),
531 contents: FileContents::OnDisk(src_file.to_path_buf()),
532 });
533 }
534 }
535 }
536
537 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
540 result
541 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
542 .or_insert_with(Vec::new)
543 .push(ArchiveFile {
544 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
545 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
546 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
547 });
548 result
549 .entry(UncasedAscii::new("Cargo.toml"))
550 .or_insert_with(Vec::new)
551 .push(ArchiveFile {
552 rel_path: PathBuf::from("Cargo.toml"),
553 rel_str: "Cargo.toml".to_string(),
554 contents: FileContents::Generated(GeneratedFile::Manifest(
555 pkg.manifest_path().to_owned(),
556 )),
557 });
558 } else {
559 ws.gctx().shell().warn(&format!(
560 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
561 pkg.name()
562 ))?;
563 }
564
565 if include_lockfile {
566 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
567 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
568 let rel_str = "Cargo.lock";
569 result
570 .entry(UncasedAscii::new(rel_str))
571 .or_insert_with(Vec::new)
572 .push(ArchiveFile {
573 rel_path: PathBuf::from(rel_str),
574 rel_str: rel_str.to_string(),
575 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
576 });
577 }
578
579 if let Some(vcs_info) = vcs_info {
580 let rel_str = VCS_INFO_FILE;
581 result
582 .entry(UncasedAscii::new(rel_str))
583 .or_insert_with(Vec::new)
584 .push(ArchiveFile {
585 rel_path: PathBuf::from(rel_str),
586 rel_str: rel_str.to_string(),
587 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
588 });
589 }
590
591 let mut invalid_manifest_field: Vec<String> = vec![];
592
593 let mut result = result.into_values().flatten().collect();
594 if let Some(license_file) = &pkg.manifest().metadata().license_file {
595 let license_path = Path::new(license_file);
596 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
597 if abs_file_path.is_file() {
598 check_for_file_and_add(
599 "license-file",
600 license_path,
601 abs_file_path,
602 pkg,
603 &mut result,
604 ws,
605 )?;
606 } else {
607 error_on_nonexistent_file(
608 &pkg,
609 &license_path,
610 "license-file",
611 &mut invalid_manifest_field,
612 );
613 }
614 }
615 if let Some(readme) = &pkg.manifest().metadata().readme {
616 let readme_path = Path::new(readme);
617 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
618 if abs_file_path.is_file() {
619 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
620 } else {
621 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
622 }
623 }
624
625 if !invalid_manifest_field.is_empty() {
626 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
627 }
628
629 for t in pkg
630 .manifest()
631 .targets()
632 .iter()
633 .filter(|t| t.is_custom_build())
634 {
635 if let Some(custom_build_path) = t.src_path().path() {
636 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
637 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
638 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
639 }
640 }
641 }
642
643 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
644
645 Ok(result)
646}
647
648fn check_for_file_and_add(
649 label: &str,
650 file_path: &Path,
651 abs_file_path: PathBuf,
652 pkg: &Package,
653 result: &mut Vec<ArchiveFile>,
654 ws: &Workspace<'_>,
655) -> CargoResult<()> {
656 match abs_file_path.strip_prefix(&pkg.root()) {
657 Ok(rel_file_path) => {
658 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
659 result.push(ArchiveFile {
660 rel_path: rel_file_path.to_path_buf(),
661 rel_str: rel_file_path
662 .to_str()
663 .expect("everything was utf8")
664 .to_string(),
665 contents: FileContents::OnDisk(abs_file_path),
666 })
667 }
668 }
669 Err(_) => {
670 let file_name = file_path.file_name().unwrap();
672 if result.iter().any(|ar| ar.rel_path == file_name) {
673 ws.gctx().shell().warn(&format!(
674 "{} `{}` appears to be a path outside of the package, \
675 but there is already a file named `{}` in the root of the package. \
676 The archived crate will contain the copy in the root of the package. \
677 Update the {} to point to the path relative \
678 to the root of the package to remove this warning.",
679 label,
680 file_path.display(),
681 file_name.to_str().unwrap(),
682 label,
683 ))?;
684 } else {
685 result.push(ArchiveFile {
686 rel_path: PathBuf::from(file_name),
687 rel_str: file_name.to_str().unwrap().to_string(),
688 contents: FileContents::OnDisk(abs_file_path),
689 })
690 }
691 }
692 }
693 Ok(())
694}
695
696fn error_on_nonexistent_file(
697 pkg: &Package,
698 path: &Path,
699 manifest_key_name: &'static str,
700 invalid: &mut Vec<String>,
701) {
702 let rel_msg = if path.is_absolute() {
703 "".to_string()
704 } else {
705 format!(" (relative to `{}`)", pkg.root().display())
706 };
707
708 let msg = format!(
709 "{manifest_key_name} `{}` does not appear to exist{}.\n\
710 Please update the {manifest_key_name} setting in the manifest at `{}`.",
711 path.display(),
712 rel_msg,
713 pkg.manifest_path().display()
714 );
715
716 invalid.push(msg);
717}
718
719fn error_custom_build_file_not_in_package(
720 pkg: &Package,
721 path: &Path,
722 target: &Target,
723) -> CargoResult<Vec<ArchiveFile>> {
724 let tip = {
725 let description_name = target.description_named();
726 if path.is_file() {
727 format!(
728 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
729 It is at `{}`, whereas the root the package is `{}`.\n",
730 path.display(),
731 pkg.root().display()
732 )
733 } else {
734 format!("the source file of {description_name} doesn't appear to exist.\n",)
735 }
736 };
737 let msg = format!(
738 "{}\
739 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
740 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
741 tip,
742 pkg.manifest_path().display()
743 );
744 anyhow::bail!(msg)
745}
746
747fn build_lock(
749 ws: &Workspace<'_>,
750 opts: &PackageOpts<'_>,
751 publish_pkg: &Package,
752 local_reg: Option<&TmpRegistry<'_>>,
753) -> CargoResult<String> {
754 let gctx = ws.gctx();
755 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
756
757 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
758
759 if let Some(local_reg) = local_reg {
763 tmp_ws.add_local_overlay(
764 local_reg.upstream,
765 local_reg.root.as_path_unlocked().to_owned(),
766 );
767 if opts.dry_run {
768 if let Some(orig_resolve) = orig_resolve.as_mut() {
769 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
770 SourceId::crates_io(gctx)?
771 } else {
772 local_reg.upstream
773 };
774 for (p, s) in local_reg.checksums() {
775 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
776 }
777 }
778 }
779 }
780 let mut tmp_reg = tmp_ws.package_registry()?;
781
782 let mut new_resolve = ops::resolve_with_previous(
783 &mut tmp_reg,
784 &tmp_ws,
785 &CliFeatures::new_all(true),
786 HasDevUnits::Yes,
787 orig_resolve.as_ref(),
788 None,
789 &[],
790 true,
791 )?;
792
793 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
794
795 if let Some(orig_resolve) = orig_resolve {
796 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
797 }
798 check_yanked(
799 gctx,
800 &pkg_set,
801 &new_resolve,
802 "consider updating to a version that is not yanked",
803 )?;
804
805 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
806}
807
808fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
811 let md = pkg.manifest().metadata();
812
813 let mut missing = vec![];
814
815 macro_rules! lacking {
816 ($( $($field: ident)||* ),*) => {{
817 $(
818 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
819 $(missing.push(stringify!($field).replace("_", "-"));)*
820 }
821 )*
822 }}
823 }
824 lacking!(
825 description,
826 license || license_file,
827 documentation || homepage || repository
828 );
829
830 if !missing.is_empty() {
831 let mut things = missing[..missing.len() - 1].join(", ");
832 if !things.is_empty() {
835 things.push_str(" or ");
836 }
837 things.push_str(missing.last().unwrap());
838
839 gctx.shell().warn(&format!(
840 "manifest has no {things}.\n\
841 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
842 things = things
843 ))?
844 }
845
846 Ok(())
847}
848
849fn tar(
853 ws: &Workspace<'_>,
854 opts: &PackageOpts<'_>,
855 pkg: &Package,
856 local_reg: Option<&TmpRegistry<'_>>,
857 ar_files: Vec<ArchiveFile>,
858 dst: &File,
859 filename: &str,
860) -> CargoResult<u64> {
861 let filename = Path::new(filename);
863 let encoder = GzBuilder::new()
864 .filename(paths::path2bytes(filename)?)
865 .write(dst, Compression::best());
866
867 let mut ar = Builder::new(encoder);
869 ar.sparse(false);
870 let gctx = ws.gctx();
871
872 let base_name = format!("{}-{}", pkg.name(), pkg.version());
873 let base_path = Path::new(&base_name);
874 let included = ar_files
875 .iter()
876 .map(|ar_file| ar_file.rel_path.clone())
877 .collect::<Vec<_>>();
878 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
879
880 let mut uncompressed_size = 0;
881 for ar_file in ar_files {
882 let ArchiveFile {
883 rel_path,
884 rel_str,
885 contents,
886 } = ar_file;
887 let ar_path = base_path.join(&rel_path);
888 gctx.shell()
889 .verbose(|shell| shell.status("Archiving", &rel_str))?;
890 let mut header = Header::new_gnu();
891 match contents {
892 FileContents::OnDisk(disk_path) => {
893 let mut file = File::open(&disk_path).with_context(|| {
894 format!("failed to open for archiving: `{}`", disk_path.display())
895 })?;
896 let metadata = file.metadata().with_context(|| {
897 format!("could not learn metadata for: `{}`", disk_path.display())
898 })?;
899 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
900 header.set_cksum();
901 ar.append_data(&mut header, &ar_path, &mut file)
902 .with_context(|| {
903 format!("could not archive source file `{}`", disk_path.display())
904 })?;
905 uncompressed_size += metadata.len() as u64;
906 }
907 FileContents::Generated(generated_kind) => {
908 let contents = match generated_kind {
909 GeneratedFile::Manifest(_) => {
910 publish_pkg.manifest().to_normalized_contents()?
911 }
912 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
913 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
914 };
915 header.set_entry_type(EntryType::file());
916 header.set_mode(0o644);
917 header.set_size(contents.len() as u64);
918 header.set_mtime(1);
920 header.set_cksum();
921 ar.append_data(&mut header, &ar_path, contents.as_bytes())
922 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
923 uncompressed_size += contents.len() as u64;
924 }
925 }
926 }
927
928 let encoder = ar.into_inner()?;
929 encoder.finish()?;
930 Ok(uncompressed_size)
931}
932
933fn compare_resolve(
935 gctx: &GlobalContext,
936 current_pkg: &Package,
937 orig_resolve: &Resolve,
938 new_resolve: &Resolve,
939) -> CargoResult<()> {
940 if gctx.shell().verbosity() != Verbosity::Verbose {
941 return Ok(());
942 }
943 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
944 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
945 let added = new_set.difference(&orig_set);
946 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
949 for pkg_id in added {
950 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
951 continue;
954 }
955 let removed_candidates: Vec<&PackageId> = removed
958 .iter()
959 .filter(|orig_pkg_id| {
960 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
961 })
962 .cloned()
963 .collect();
964 let extra = match removed_candidates.len() {
965 0 => {
966 let previous_versions: Vec<&PackageId> = removed
968 .iter()
969 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
970 .cloned()
971 .collect();
972 match previous_versions.len() {
973 0 => String::new(),
974 1 => format!(
975 ", previous version was `{}`",
976 previous_versions[0].version()
977 ),
978 _ => format!(
979 ", previous versions were: {}",
980 previous_versions
981 .iter()
982 .map(|pkg_id| format!("`{}`", pkg_id.version()))
983 .collect::<Vec<_>>()
984 .join(", ")
985 ),
986 }
987 }
988 1 => {
989 format!(
993 ", was originally sourced from `{}`",
994 removed_candidates[0].source_id()
995 )
996 }
997 _ => {
998 let comma_list = removed_candidates
1001 .iter()
1002 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1003 .collect::<Vec<_>>()
1004 .join(", ");
1005 format!(
1006 ", was originally sourced from one of these sources: {}",
1007 comma_list
1008 )
1009 }
1010 };
1011 let msg = format!(
1012 "package `{}` added to the packaged Cargo.lock file{}",
1013 pkg_id, extra
1014 );
1015 gctx.shell().note(msg)?;
1016 }
1017 Ok(())
1018}
1019
1020pub fn check_yanked(
1021 gctx: &GlobalContext,
1022 pkg_set: &PackageSet<'_>,
1023 resolve: &Resolve,
1024 hint: &str,
1025) -> CargoResult<()> {
1026 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1029
1030 let mut sources = pkg_set.sources_mut();
1031 let mut pending: Vec<PackageId> = resolve.iter().collect();
1032 let mut results = Vec::new();
1033 for (_id, source) in sources.sources_mut() {
1034 source.invalidate_cache();
1035 }
1036 while !pending.is_empty() {
1037 pending.retain(|pkg_id| {
1038 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1039 match source.is_yanked(*pkg_id) {
1040 Poll::Ready(result) => results.push((*pkg_id, result)),
1041 Poll::Pending => return true,
1042 }
1043 }
1044 false
1045 });
1046 for (_id, source) in sources.sources_mut() {
1047 source.block_until_ready()?;
1048 }
1049 }
1050
1051 for (pkg_id, is_yanked) in results {
1052 if is_yanked? {
1053 gctx.shell().warn(format!(
1054 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1055 pkg_id,
1056 pkg_id.source_id().display_registry_name(),
1057 hint
1058 ))?;
1059 }
1060 }
1061 Ok(())
1062}
1063
1064fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1071 let Some(name) = file.file_name() else {
1072 return Ok(());
1073 };
1074 let Some(name) = name.to_str() else {
1075 anyhow::bail!(
1076 "path does not have a unicode filename which may not unpack \
1077 on all platforms: {}",
1078 file.display()
1079 )
1080 };
1081 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1082 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1083 anyhow::bail!(
1084 "cannot package a filename with a special character `{}`: {}",
1085 c,
1086 file.display()
1087 )
1088 }
1089 if restricted_names::is_windows_reserved_path(file) {
1090 shell.warn(format!(
1091 "file {} is a reserved Windows filename, \
1092 it will not work on Windows platforms",
1093 file.display()
1094 ))?;
1095 }
1096 Ok(())
1097}
1098
1099struct TmpRegistry<'a> {
1103 gctx: &'a GlobalContext,
1104 upstream: SourceId,
1105 root: Filesystem,
1106 checksums: HashMap<PackageId, String>,
1107 _lock: FileLock,
1108}
1109
1110impl<'a> TmpRegistry<'a> {
1111 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1112 root.create_dir()?;
1113 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1114 let slf = Self {
1115 gctx,
1116 root,
1117 upstream,
1118 checksums: HashMap::new(),
1119 _lock,
1120 };
1121 let index_path = slf.index_path().into_path_unlocked();
1123 if index_path.exists() {
1124 paths::remove_dir_all(index_path)?;
1125 }
1126 slf.index_path().create_dir()?;
1127 Ok(slf)
1128 }
1129
1130 fn index_path(&self) -> Filesystem {
1131 self.root.join("index")
1132 }
1133
1134 fn add_package(
1135 &mut self,
1136 ws: &Workspace<'_>,
1137 package: &Package,
1138 tar: &FileLock,
1139 ) -> CargoResult<()> {
1140 debug!(
1141 "adding package {}@{} to local overlay at {}",
1142 package.name(),
1143 package.version(),
1144 self.root.as_path_unlocked().display()
1145 );
1146 {
1147 let mut tar_copy = self.root.open_rw_exclusive_create(
1148 package.package_id().tarball_name(),
1149 self.gctx,
1150 "temporary package registry",
1151 )?;
1152 tar.file().seek(SeekFrom::Start(0))?;
1153 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1154 tar_copy.flush()?;
1155 }
1156
1157 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1158
1159 tar.file().seek(SeekFrom::Start(0))?;
1160 let cksum = cargo_util::Sha256::new()
1161 .update_file(tar.file())?
1162 .finish_hex();
1163
1164 self.checksums.insert(package.package_id(), cksum.clone());
1165
1166 let deps: Vec<_> = new_crate
1167 .deps
1168 .into_iter()
1169 .map(|dep| {
1170 let name = dep
1171 .explicit_name_in_toml
1172 .clone()
1173 .unwrap_or_else(|| dep.name.clone())
1174 .into();
1175 let package = dep
1176 .explicit_name_in_toml
1177 .as_ref()
1178 .map(|_| dep.name.clone().into());
1179 RegistryDependency {
1180 name: name,
1181 req: dep.version_req.into(),
1182 features: dep.features.into_iter().map(|x| x.into()).collect(),
1183 optional: dep.optional,
1184 default_features: dep.default_features,
1185 target: dep.target.map(|x| x.into()),
1186 kind: Some(dep.kind.into()),
1187 registry: dep.registry.map(|x| x.into()),
1188 package: package,
1189 public: None,
1190 artifact: dep
1191 .artifact
1192 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1193 bindep_target: dep.bindep_target.map(|x| x.into()),
1194 lib: dep.lib,
1195 }
1196 })
1197 .collect();
1198
1199 let index_line = serde_json::to_string(&IndexPackage {
1200 name: new_crate.name.into(),
1201 vers: package.version().clone(),
1202 deps,
1203 features: new_crate
1204 .features
1205 .into_iter()
1206 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1207 .collect(),
1208 features2: None,
1209 cksum,
1210 yanked: None,
1211 links: new_crate.links.map(|x| x.into()),
1212 rust_version: None,
1213 v: Some(2),
1214 })?;
1215
1216 let file =
1217 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1218 let mut dst = self.index_path().open_rw_exclusive_create(
1219 file,
1220 self.gctx,
1221 "temporary package registry",
1222 )?;
1223 dst.write_all(index_line.as_bytes())?;
1224 Ok(())
1225 }
1226
1227 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1228 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1229 }
1230}