1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::File;
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use annotate_snippets::Level;
36use anyhow::{Context as _, bail};
37use cargo_util::paths;
38use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
39use cargo_util_schemas::messages;
40use flate2::{Compression, GzBuilder};
41use tar::{Builder, EntryType, Header, HeaderMode};
42use tracing::debug;
43use unicase::Ascii as UncasedAscii;
44
45mod vcs;
46mod verify;
47
48#[derive(Debug, Clone)]
52pub enum PackageMessageFormat {
53 Human,
54 Json,
55}
56
57impl PackageMessageFormat {
58 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
59
60 pub const DEFAULT: &str = "human";
61}
62
63impl std::str::FromStr for PackageMessageFormat {
64 type Err = anyhow::Error;
65
66 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
67 match s {
68 "human" => Ok(PackageMessageFormat::Human),
69 "json" => Ok(PackageMessageFormat::Json),
70 f => bail!("unknown message format `{f}`"),
71 }
72 }
73}
74
75#[derive(Clone)]
76pub struct PackageOpts<'gctx> {
77 pub gctx: &'gctx GlobalContext,
78 pub list: bool,
79 pub fmt: PackageMessageFormat,
80 pub check_metadata: bool,
81 pub allow_dirty: bool,
82 pub include_lockfile: bool,
83 pub verify: bool,
84 pub jobs: Option<JobsConfig>,
85 pub keep_going: bool,
86 pub to_package: ops::Packages,
87 pub targets: Vec<String>,
88 pub cli_features: CliFeatures,
89 pub reg_or_index: Option<ops::RegistryOrIndex>,
90 pub dry_run: bool,
103}
104
105const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
106const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
107
108struct ArchiveFile {
109 rel_path: PathBuf,
112 rel_str: String,
114 contents: FileContents,
116}
117
118enum FileContents {
119 OnDisk(PathBuf),
121 Generated(GeneratedFile),
123}
124
125enum GeneratedFile {
126 Manifest(PathBuf),
130 Lockfile(Option<PathBuf>),
134 VcsInfo(vcs::VcsInfo),
136}
137
138#[tracing::instrument(skip_all)]
140fn create_package(
141 ws: &Workspace<'_>,
142 opts: &PackageOpts<'_>,
143 pkg: &Package,
144 ar_files: Vec<ArchiveFile>,
145 local_reg: Option<&TmpRegistry<'_>>,
146) -> CargoResult<FileLock> {
147 let gctx = ws.gctx();
148 let filecount = ar_files.len();
149
150 for dep in pkg.dependencies() {
152 super::check_dep_has_version(dep, false).map_err(|err| {
153 ManifestError::new(
154 err.context(format!(
155 "failed to verify manifest at `{}`",
156 pkg.manifest_path().display()
157 )),
158 pkg.manifest_path().into(),
159 )
160 })?;
161 }
162
163 let filename = pkg.package_id().tarball_name();
164 let dir = ws.build_dir().join("package");
165 let mut dst = {
166 let tmp = format!(".{}", filename);
167 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
168 };
169
170 gctx.shell()
175 .status("Packaging", pkg.package_id().to_string())?;
176 dst.file().set_len(0)?;
177 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
178 .context("failed to prepare local package for uploading")?;
179
180 dst.seek(SeekFrom::Start(0))?;
181 let dst_path = dst.parent().join(&filename);
182 dst.rename(&dst_path)?;
183
184 let dst_metadata = dst
185 .file()
186 .metadata()
187 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
188 let compressed_size = dst_metadata.len();
189
190 let uncompressed = HumanBytes(uncompressed_size);
191 let compressed = HumanBytes(compressed_size);
192
193 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
194 drop(gctx.shell().status("Packaged", message));
196
197 return Ok(dst);
198}
199
200pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
205 let specs = &opts.to_package.to_package_id_specs(ws)?;
206 if let ops::Packages::Packages(_) = opts.to_package {
208 for spec in specs.iter() {
209 let member_ids = ws.members().map(|p| p.package_id());
210 spec.query(member_ids)?;
211 }
212 }
213 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
214
215 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
218
219 let packaged = do_package(ws, opts, pkgs)?;
220
221 let mut result = Vec::new();
222 let target_dir = ws.target_dir();
223 let build_dir = ws.build_dir();
224 if target_dir == build_dir {
225 result.extend(packaged.into_iter().map(|(_, _, src)| src));
226 } else {
227 let artifact_dir = target_dir.join("package");
229 for (pkg, _, src) in packaged {
230 let filename = pkg.package_id().tarball_name();
231 let dst =
232 artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
233 src.file().seek(SeekFrom::Start(0))?;
234 std::io::copy(&mut src.file(), &mut dst.file())?;
235 result.push(dst);
236 }
237 }
238
239 Ok(result)
240}
241
242pub(crate) fn package_with_dep_graph(
248 ws: &Workspace<'_>,
249 opts: &PackageOpts<'_>,
250 pkgs: Vec<(&Package, CliFeatures)>,
251) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
252 let output = do_package(ws, opts, pkgs)?;
253
254 Ok(local_deps(output.into_iter().map(
255 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
256 )))
257}
258
259fn do_package<'a>(
260 ws: &Workspace<'_>,
261 opts: &PackageOpts<'a>,
262 pkgs: Vec<(&Package, CliFeatures)>,
263) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
264 if ws
265 .lock_root()
266 .as_path_unlocked()
267 .join(LOCKFILE_NAME)
268 .exists()
269 && opts.include_lockfile
270 {
271 let dry_run = false;
273 let _ = ops::resolve_ws(ws, dry_run)?;
274 }
277
278 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
279 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
280
281 let mut local_reg = {
282 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
287 || opts.reg_or_index.is_some()
288 {
289 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
290 debug!("packaging for registry {}", sid);
291 Some(sid)
292 } else {
293 None
294 };
295 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
296 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
297 .transpose()?
298 };
299
300 let sorted_pkgs = deps.sort();
303 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
304 for (pkg, cli_features) in sorted_pkgs {
305 let opts = PackageOpts {
306 cli_features: cli_features.clone(),
307 to_package: ops::Packages::Default,
308 ..opts.clone()
309 };
310 let ar_files = prepare_archive(ws, &pkg, &opts)?;
311
312 if opts.list {
313 match opts.fmt {
314 PackageMessageFormat::Human => {
315 for ar_file in &ar_files {
318 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
319 }
320 }
321 PackageMessageFormat::Json => {
322 let message = messages::PackageList {
323 id: pkg.package_id().to_spec(),
324 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
325 let file = match f.contents {
326 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
327 FileContents::Generated(
328 GeneratedFile::Manifest(path)
329 | GeneratedFile::Lockfile(Some(path)),
330 ) => messages::PackageFile::Generate { path: Some(path) },
331 FileContents::Generated(
332 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
333 ) => messages::PackageFile::Generate { path: None },
334 };
335 (f.rel_path, file)
336 })),
337 };
338 let _ = ws.gctx().shell().print_json(&message);
339 }
340 }
341 } else {
342 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
343 if let Some(local_reg) = local_reg.as_mut() {
344 if pkg.publish() != &Some(Vec::new()) {
345 local_reg.add_package(ws, &pkg, &tarball)?;
346 }
347 }
348 outputs.push((pkg, opts, tarball));
349 }
350 }
351
352 if opts.verify {
355 for (pkg, opts, tarball) in &outputs {
356 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
357 .context("failed to verify package tarball")?
358 }
359 }
360
361 Ok(outputs)
362}
363
364fn get_registry(
371 gctx: &GlobalContext,
372 pkgs: &[&Package],
373 reg_or_index: Option<RegistryOrIndex>,
374) -> CargoResult<SourceId> {
375 let reg_or_index = match reg_or_index.clone() {
376 Some(r) => Some(r),
377 None => infer_registry(pkgs)?,
378 };
379
380 let reg = reg_or_index
382 .clone()
383 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
384 if let RegistryOrIndex::Registry(reg_name) = reg {
385 for pkg in pkgs {
386 if let Some(allowed) = pkg.publish().as_ref() {
387 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
391 bail!(
392 "`{}` cannot be packaged.\n\
393 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
394 pkg.name(),
395 reg_name
396 );
397 }
398 }
399 }
400 }
401 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
402}
403
404#[derive(Clone, Debug, Default)]
406pub(crate) struct LocalDependencies<T> {
407 pub packages: HashMap<PackageId, (Package, T)>,
408 pub graph: Graph<PackageId, ()>,
409}
410
411impl<T: Clone> LocalDependencies<T> {
412 pub fn sort(&self) -> Vec<(Package, T)> {
413 self.graph
414 .sort()
415 .into_iter()
416 .map(|name| self.packages[&name].clone())
417 .collect()
418 }
419
420 pub fn has_dependencies(&self) -> bool {
421 self.graph
422 .iter()
423 .any(|node| self.graph.edges(node).next().is_some())
424 }
425}
426
427fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
432 let packages: HashMap<PackageId, (Package, T)> = packages
433 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
434 .collect();
435
436 let source_to_pkg: HashMap<_, _> = packages
441 .keys()
442 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
443 .collect();
444
445 let mut graph = Graph::new();
446 for (pkg, _payload) in packages.values() {
447 graph.add(pkg.package_id());
448 for dep in pkg.dependencies() {
449 if !dep.source_id().is_path() {
451 continue;
452 }
453
454 if dep.kind() == DepKind::Development && !dep.specified_req() {
457 continue;
458 };
459
460 if dep.source_id() == pkg.package_id().source_id() {
462 continue;
463 }
464
465 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
466 graph.link(pkg.package_id(), *dep_pkg);
467 }
468 }
469 }
470
471 LocalDependencies { packages, graph }
472}
473
474#[tracing::instrument(skip_all)]
476fn prepare_archive(
477 ws: &Workspace<'_>,
478 pkg: &Package,
479 opts: &PackageOpts<'_>,
480) -> CargoResult<Vec<ArchiveFile>> {
481 let gctx = ws.gctx();
482 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
483 src.load()?;
484
485 if opts.check_metadata {
486 check_metadata(pkg, gctx)?;
487 }
488
489 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
490 gctx.shell().warn(
491 "both package.include and package.exclude are specified; \
492 the exclude list will be ignored",
493 )?;
494 }
495 let src_files = src.list_files(pkg)?;
496
497 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
499 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
500}
501
502#[tracing::instrument(skip_all)]
504fn build_ar_list(
505 ws: &Workspace<'_>,
506 pkg: &Package,
507 src_files: Vec<PathEntry>,
508 vcs_info: Option<vcs::VcsInfo>,
509 include_lockfile: bool,
510) -> CargoResult<Vec<ArchiveFile>> {
511 let mut result = HashMap::new();
512 let root = pkg.root();
513 for src_file in &src_files {
514 let rel_path = src_file.strip_prefix(&root)?;
515 check_filename(rel_path, &mut ws.gctx().shell())?;
516 let rel_str = rel_path.to_str().ok_or_else(|| {
517 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
518 })?;
519 match rel_str {
520 "Cargo.lock" => continue,
521 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
522 "invalid inclusion of reserved file name {} in package source",
523 rel_str
524 ),
525 _ => {
526 result
527 .entry(UncasedAscii::new(rel_str))
528 .or_insert_with(Vec::new)
529 .push(ArchiveFile {
530 rel_path: rel_path.to_owned(),
531 rel_str: rel_str.to_owned(),
532 contents: FileContents::OnDisk(src_file.to_path_buf()),
533 });
534 }
535 }
536 }
537
538 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
541 result
542 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
543 .or_insert_with(Vec::new)
544 .push(ArchiveFile {
545 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
546 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
547 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
548 });
549 result
550 .entry(UncasedAscii::new("Cargo.toml"))
551 .or_insert_with(Vec::new)
552 .push(ArchiveFile {
553 rel_path: PathBuf::from("Cargo.toml"),
554 rel_str: "Cargo.toml".to_string(),
555 contents: FileContents::Generated(GeneratedFile::Manifest(
556 pkg.manifest_path().to_owned(),
557 )),
558 });
559 } else {
560 ws.gctx().shell().warn(&format!(
561 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
562 pkg.name()
563 ))?;
564 }
565
566 if include_lockfile {
567 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
568 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
569 let rel_str = "Cargo.lock";
570 result
571 .entry(UncasedAscii::new(rel_str))
572 .or_insert_with(Vec::new)
573 .push(ArchiveFile {
574 rel_path: PathBuf::from(rel_str),
575 rel_str: rel_str.to_string(),
576 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
577 });
578 }
579
580 if let Some(vcs_info) = vcs_info {
581 let rel_str = VCS_INFO_FILE;
582 result
583 .entry(UncasedAscii::new(rel_str))
584 .or_insert_with(Vec::new)
585 .push(ArchiveFile {
586 rel_path: PathBuf::from(rel_str),
587 rel_str: rel_str.to_string(),
588 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
589 });
590 }
591
592 let mut invalid_manifest_field: Vec<String> = vec![];
593
594 let mut result = result.into_values().flatten().collect();
595 if let Some(license_file) = &pkg.manifest().metadata().license_file {
596 let license_path = Path::new(license_file);
597 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
598 if abs_file_path.is_file() {
599 check_for_file_and_add(
600 "license-file",
601 license_path,
602 abs_file_path,
603 pkg,
604 &mut result,
605 ws,
606 )?;
607 } else {
608 error_on_nonexistent_file(
609 &pkg,
610 &license_path,
611 "license-file",
612 &mut invalid_manifest_field,
613 );
614 }
615 }
616 if let Some(readme) = &pkg.manifest().metadata().readme {
617 let readme_path = Path::new(readme);
618 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
619 if abs_file_path.is_file() {
620 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
621 } else {
622 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
623 }
624 }
625
626 if !invalid_manifest_field.is_empty() {
627 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
628 }
629
630 for t in pkg
631 .manifest()
632 .targets()
633 .iter()
634 .filter(|t| t.is_custom_build())
635 {
636 if let Some(custom_build_path) = t.src_path().path() {
637 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
638 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
639 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
640 }
641 }
642 }
643
644 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
645
646 Ok(result)
647}
648
649fn check_for_file_and_add(
650 label: &str,
651 file_path: &Path,
652 abs_file_path: PathBuf,
653 pkg: &Package,
654 result: &mut Vec<ArchiveFile>,
655 ws: &Workspace<'_>,
656) -> CargoResult<()> {
657 match abs_file_path.strip_prefix(&pkg.root()) {
658 Ok(rel_file_path) => {
659 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
660 result.push(ArchiveFile {
661 rel_path: rel_file_path.to_path_buf(),
662 rel_str: rel_file_path
663 .to_str()
664 .expect("everything was utf8")
665 .to_string(),
666 contents: FileContents::OnDisk(abs_file_path),
667 })
668 }
669 }
670 Err(_) => {
671 let file_name = file_path.file_name().unwrap();
673 if result.iter().any(|ar| ar.rel_path == file_name) {
674 ws.gctx().shell().warn(&format!(
675 "{} `{}` appears to be a path outside of the package, \
676 but there is already a file named `{}` in the root of the package. \
677 The archived crate will contain the copy in the root of the package. \
678 Update the {} to point to the path relative \
679 to the root of the package to remove this warning.",
680 label,
681 file_path.display(),
682 file_name.to_str().unwrap(),
683 label,
684 ))?;
685 } else {
686 result.push(ArchiveFile {
687 rel_path: PathBuf::from(file_name),
688 rel_str: file_name.to_str().unwrap().to_string(),
689 contents: FileContents::OnDisk(abs_file_path),
690 })
691 }
692 }
693 }
694 Ok(())
695}
696
697fn error_on_nonexistent_file(
698 pkg: &Package,
699 path: &Path,
700 manifest_key_name: &'static str,
701 invalid: &mut Vec<String>,
702) {
703 let rel_msg = if path.is_absolute() {
704 "".to_string()
705 } else {
706 format!(" (relative to `{}`)", pkg.root().display())
707 };
708
709 let msg = format!(
710 "{manifest_key_name} `{}` does not appear to exist{}.\n\
711 Please update the {manifest_key_name} setting in the manifest at `{}`.",
712 path.display(),
713 rel_msg,
714 pkg.manifest_path().display()
715 );
716
717 invalid.push(msg);
718}
719
720fn error_custom_build_file_not_in_package(
721 pkg: &Package,
722 path: &Path,
723 target: &Target,
724) -> CargoResult<Vec<ArchiveFile>> {
725 let tip = {
726 let description_name = target.description_named();
727 if path.is_file() {
728 format!(
729 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
730 It is at `{}`, whereas the root the package is `{}`.\n",
731 path.display(),
732 pkg.root().display()
733 )
734 } else {
735 format!("the source file of {description_name} doesn't appear to exist.\n",)
736 }
737 };
738 let msg = format!(
739 "{}\
740 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
741 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
742 tip,
743 pkg.manifest_path().display()
744 );
745 anyhow::bail!(msg)
746}
747
748fn build_lock(
750 ws: &Workspace<'_>,
751 opts: &PackageOpts<'_>,
752 publish_pkg: &Package,
753 local_reg: Option<&TmpRegistry<'_>>,
754) -> CargoResult<String> {
755 let gctx = ws.gctx();
756 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
757
758 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
759
760 if let Some(local_reg) = local_reg {
764 tmp_ws.add_local_overlay(
765 local_reg.upstream,
766 local_reg.root.as_path_unlocked().to_owned(),
767 );
768 if opts.dry_run {
769 if let Some(orig_resolve) = orig_resolve.as_mut() {
770 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
771 SourceId::crates_io(gctx)?
772 } else {
773 local_reg.upstream
774 };
775 for (p, s) in local_reg.checksums() {
776 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
777 }
778 }
779 }
780 }
781 let mut tmp_reg = tmp_ws.package_registry()?;
782
783 let mut new_resolve = ops::resolve_with_previous(
784 &mut tmp_reg,
785 &tmp_ws,
786 &CliFeatures::new_all(true),
787 HasDevUnits::Yes,
788 orig_resolve.as_ref(),
789 None,
790 &[],
791 true,
792 )?;
793
794 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
795
796 if let Some(orig_resolve) = orig_resolve {
797 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
798 }
799 check_yanked(
800 gctx,
801 &pkg_set,
802 &new_resolve,
803 "consider updating to a version that is not yanked",
804 )?;
805
806 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
807}
808
809fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
812 let md = pkg.manifest().metadata();
813
814 let mut missing = vec![];
815
816 macro_rules! lacking {
817 ($( $($field: ident)||* ),*) => {{
818 $(
819 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
820 $(missing.push(stringify!($field).replace("_", "-"));)*
821 }
822 )*
823 }}
824 }
825 lacking!(
826 description,
827 license || license_file,
828 documentation || homepage || repository
829 );
830
831 if !missing.is_empty() {
832 let mut things = missing[..missing.len() - 1].join(", ");
833 if !things.is_empty() {
836 things.push_str(" or ");
837 }
838 things.push_str(missing.last().unwrap());
839
840 gctx.shell().print_report(&[
841 Level::WARNING.secondary_title(format!("manifest has no {things}"))
842 .element(Level::NOTE.message("see https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info"))
843 ],
844 false
845 )?
846 }
847
848 Ok(())
849}
850
851fn tar(
855 ws: &Workspace<'_>,
856 opts: &PackageOpts<'_>,
857 pkg: &Package,
858 local_reg: Option<&TmpRegistry<'_>>,
859 ar_files: Vec<ArchiveFile>,
860 dst: &File,
861 filename: &str,
862) -> CargoResult<u64> {
863 let filename = Path::new(filename);
865 let encoder = GzBuilder::new()
866 .filename(paths::path2bytes(filename)?)
867 .write(dst, Compression::best());
868
869 let mut ar = Builder::new(encoder);
871 ar.sparse(false);
872 let gctx = ws.gctx();
873
874 let base_name = format!("{}-{}", pkg.name(), pkg.version());
875 let base_path = Path::new(&base_name);
876 let included = ar_files
877 .iter()
878 .map(|ar_file| ar_file.rel_path.clone())
879 .collect::<Vec<_>>();
880 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
881
882 let mut uncompressed_size = 0;
883 for ar_file in ar_files {
884 let ArchiveFile {
885 rel_path,
886 rel_str,
887 contents,
888 } = ar_file;
889 let ar_path = base_path.join(&rel_path);
890 gctx.shell()
891 .verbose(|shell| shell.status("Archiving", &rel_str))?;
892 let mut header = Header::new_gnu();
893 match contents {
894 FileContents::OnDisk(disk_path) => {
895 let mut file = File::open(&disk_path).with_context(|| {
896 format!("failed to open for archiving: `{}`", disk_path.display())
897 })?;
898 let metadata = file.metadata().with_context(|| {
899 format!("could not learn metadata for: `{}`", disk_path.display())
900 })?;
901 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
902 header.set_cksum();
903 ar.append_data(&mut header, &ar_path, &mut file)
904 .with_context(|| {
905 format!("could not archive source file `{}`", disk_path.display())
906 })?;
907 uncompressed_size += metadata.len() as u64;
908 }
909 FileContents::Generated(generated_kind) => {
910 let contents = match generated_kind {
911 GeneratedFile::Manifest(_) => {
912 publish_pkg.manifest().to_normalized_contents()?
913 }
914 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
915 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
916 };
917 header.set_entry_type(EntryType::file());
918 header.set_mode(0o644);
919 header.set_size(contents.len() as u64);
920 header.set_mtime(1);
922 header.set_cksum();
923 ar.append_data(&mut header, &ar_path, contents.as_bytes())
924 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
925 uncompressed_size += contents.len() as u64;
926 }
927 }
928 }
929
930 let encoder = ar.into_inner()?;
931 encoder.finish()?;
932 Ok(uncompressed_size)
933}
934
935fn compare_resolve(
937 gctx: &GlobalContext,
938 current_pkg: &Package,
939 orig_resolve: &Resolve,
940 new_resolve: &Resolve,
941) -> CargoResult<()> {
942 if gctx.shell().verbosity() != Verbosity::Verbose {
943 return Ok(());
944 }
945 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
946 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
947 let added = new_set.difference(&orig_set);
948 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
951 for pkg_id in added {
952 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
953 continue;
956 }
957 let removed_candidates: Vec<&PackageId> = removed
960 .iter()
961 .filter(|orig_pkg_id| {
962 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
963 })
964 .cloned()
965 .collect();
966 let extra = match removed_candidates.len() {
967 0 => {
968 let previous_versions: Vec<&PackageId> = removed
970 .iter()
971 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
972 .cloned()
973 .collect();
974 match previous_versions.len() {
975 0 => String::new(),
976 1 => format!(
977 ", previous version was `{}`",
978 previous_versions[0].version()
979 ),
980 _ => format!(
981 ", previous versions were: {}",
982 previous_versions
983 .iter()
984 .map(|pkg_id| format!("`{}`", pkg_id.version()))
985 .collect::<Vec<_>>()
986 .join(", ")
987 ),
988 }
989 }
990 1 => {
991 format!(
995 ", was originally sourced from `{}`",
996 removed_candidates[0].source_id()
997 )
998 }
999 _ => {
1000 let comma_list = removed_candidates
1003 .iter()
1004 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1005 .collect::<Vec<_>>()
1006 .join(", ");
1007 format!(
1008 ", was originally sourced from one of these sources: {}",
1009 comma_list
1010 )
1011 }
1012 };
1013 let msg = format!(
1014 "package `{}` added to the packaged Cargo.lock file{}",
1015 pkg_id, extra
1016 );
1017 gctx.shell().note(msg)?;
1018 }
1019 Ok(())
1020}
1021
1022pub fn check_yanked(
1023 gctx: &GlobalContext,
1024 pkg_set: &PackageSet<'_>,
1025 resolve: &Resolve,
1026 hint: &str,
1027) -> CargoResult<()> {
1028 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1031
1032 let mut sources = pkg_set.sources_mut();
1033 let mut pending: Vec<PackageId> = resolve.iter().collect();
1034 let mut results = Vec::new();
1035 for (_id, source) in sources.sources_mut() {
1036 source.invalidate_cache();
1037 }
1038 while !pending.is_empty() {
1039 pending.retain(|pkg_id| {
1040 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1041 match source.is_yanked(*pkg_id) {
1042 Poll::Ready(result) => results.push((*pkg_id, result)),
1043 Poll::Pending => return true,
1044 }
1045 }
1046 false
1047 });
1048 for (_id, source) in sources.sources_mut() {
1049 source.block_until_ready()?;
1050 }
1051 }
1052
1053 for (pkg_id, is_yanked) in results {
1054 if is_yanked? {
1055 gctx.shell().warn(format!(
1056 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1057 pkg_id,
1058 pkg_id.source_id().display_registry_name(),
1059 hint
1060 ))?;
1061 }
1062 }
1063 Ok(())
1064}
1065
1066fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1073 let Some(name) = file.file_name() else {
1074 return Ok(());
1075 };
1076 let Some(name) = name.to_str() else {
1077 anyhow::bail!(
1078 "path does not have a unicode filename which may not unpack \
1079 on all platforms: {}",
1080 file.display()
1081 )
1082 };
1083 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1084 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1085 anyhow::bail!(
1086 "cannot package a filename with a special character `{}`: {}",
1087 c,
1088 file.display()
1089 )
1090 }
1091 if restricted_names::is_windows_reserved_path(file) {
1092 shell.warn(format!(
1093 "file {} is a reserved Windows filename, \
1094 it will not work on Windows platforms",
1095 file.display()
1096 ))?;
1097 }
1098 Ok(())
1099}
1100
1101struct TmpRegistry<'a> {
1105 gctx: &'a GlobalContext,
1106 upstream: SourceId,
1107 root: Filesystem,
1108 checksums: HashMap<PackageId, String>,
1109 _lock: FileLock,
1110}
1111
1112impl<'a> TmpRegistry<'a> {
1113 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1114 root.create_dir()?;
1115 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1116 let slf = Self {
1117 gctx,
1118 root,
1119 upstream,
1120 checksums: HashMap::new(),
1121 _lock,
1122 };
1123 let index_path = slf.index_path().into_path_unlocked();
1125 if index_path.exists() {
1126 paths::remove_dir_all(index_path)?;
1127 }
1128 slf.index_path().create_dir()?;
1129 Ok(slf)
1130 }
1131
1132 fn index_path(&self) -> Filesystem {
1133 self.root.join("index")
1134 }
1135
1136 fn add_package(
1137 &mut self,
1138 ws: &Workspace<'_>,
1139 package: &Package,
1140 tar: &FileLock,
1141 ) -> CargoResult<()> {
1142 debug!(
1143 "adding package {}@{} to local overlay at {}",
1144 package.name(),
1145 package.version(),
1146 self.root.as_path_unlocked().display()
1147 );
1148 {
1149 let mut tar_copy = self.root.open_rw_exclusive_create(
1150 package.package_id().tarball_name(),
1151 self.gctx,
1152 "temporary package registry",
1153 )?;
1154 tar.file().seek(SeekFrom::Start(0))?;
1155 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1156 tar_copy.flush()?;
1157 }
1158
1159 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1160
1161 tar.file().seek(SeekFrom::Start(0))?;
1162 let cksum = cargo_util::Sha256::new()
1163 .update_file(tar.file())?
1164 .finish_hex();
1165
1166 self.checksums.insert(package.package_id(), cksum.clone());
1167
1168 let deps: Vec<_> = new_crate
1169 .deps
1170 .into_iter()
1171 .map(|dep| {
1172 let name = dep
1173 .explicit_name_in_toml
1174 .clone()
1175 .unwrap_or_else(|| dep.name.clone())
1176 .into();
1177 let package = dep
1178 .explicit_name_in_toml
1179 .as_ref()
1180 .map(|_| dep.name.clone().into());
1181 RegistryDependency {
1182 name: name,
1183 req: dep.version_req.into(),
1184 features: dep.features.into_iter().map(|x| x.into()).collect(),
1185 optional: dep.optional,
1186 default_features: dep.default_features,
1187 target: dep.target.map(|x| x.into()),
1188 kind: Some(dep.kind.into()),
1189 registry: dep.registry.map(|x| x.into()),
1190 package: package,
1191 public: None,
1192 artifact: dep
1193 .artifact
1194 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1195 bindep_target: dep.bindep_target.map(|x| x.into()),
1196 lib: dep.lib,
1197 }
1198 })
1199 .collect();
1200
1201 let index_line = serde_json::to_string(&IndexPackage {
1202 name: new_crate.name.into(),
1203 vers: package.version().clone(),
1204 deps,
1205 features: new_crate
1206 .features
1207 .into_iter()
1208 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1209 .collect(),
1210 features2: None,
1211 cksum,
1212 yanked: None,
1213 links: new_crate.links.map(|x| x.into()),
1214 rust_version: None,
1215 v: Some(2),
1216 })?;
1217
1218 let file =
1219 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1220 let mut dst = self.index_path().open_rw_exclusive_create(
1221 file,
1222 self.gctx,
1223 "temporary package registry",
1224 )?;
1225 dst.write_all(index_line.as_bytes())?;
1226 Ok(())
1227 }
1228
1229 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1230 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1231 }
1232}