1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::{self, File};
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use anyhow::{Context as _, bail};
36use cargo_util::paths;
37use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
38use cargo_util_schemas::messages;
39use flate2::{Compression, GzBuilder};
40use tar::{Builder, EntryType, Header, HeaderMode};
41use tracing::debug;
42use unicase::Ascii as UncasedAscii;
43
44mod vcs;
45mod verify;
46
47#[derive(Debug, Clone)]
51pub enum PackageMessageFormat {
52 Human,
53 Json,
54}
55
56impl PackageMessageFormat {
57 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
58
59 pub const DEFAULT: &str = "human";
60}
61
62impl std::str::FromStr for PackageMessageFormat {
63 type Err = anyhow::Error;
64
65 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
66 match s {
67 "human" => Ok(PackageMessageFormat::Human),
68 "json" => Ok(PackageMessageFormat::Json),
69 f => bail!("unknown message format `{f}`"),
70 }
71 }
72}
73
74#[derive(Clone)]
75pub struct PackageOpts<'gctx> {
76 pub gctx: &'gctx GlobalContext,
77 pub list: bool,
78 pub fmt: PackageMessageFormat,
79 pub check_metadata: bool,
80 pub allow_dirty: bool,
81 pub include_lockfile: bool,
82 pub verify: bool,
83 pub jobs: Option<JobsConfig>,
84 pub keep_going: bool,
85 pub to_package: ops::Packages,
86 pub targets: Vec<String>,
87 pub cli_features: CliFeatures,
88 pub reg_or_index: Option<ops::RegistryOrIndex>,
89 pub dry_run: bool,
102}
103
104const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
105const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
106
107struct ArchiveFile {
108 rel_path: PathBuf,
111 rel_str: String,
113 contents: FileContents,
115}
116
117enum FileContents {
118 OnDisk(PathBuf),
120 Generated(GeneratedFile),
122}
123
124enum GeneratedFile {
125 Manifest(PathBuf),
129 Lockfile(Option<PathBuf>),
133 VcsInfo(vcs::VcsInfo),
135}
136
137#[tracing::instrument(skip_all)]
139fn create_package(
140 ws: &Workspace<'_>,
141 opts: &PackageOpts<'_>,
142 pkg: &Package,
143 ar_files: Vec<ArchiveFile>,
144 local_reg: Option<&TmpRegistry<'_>>,
145) -> CargoResult<FileLock> {
146 let gctx = ws.gctx();
147 let filecount = ar_files.len();
148
149 for dep in pkg.dependencies() {
151 super::check_dep_has_version(dep, false).map_err(|err| {
152 ManifestError::new(
153 err.context(format!(
154 "failed to verify manifest at `{}`",
155 pkg.manifest_path().display()
156 )),
157 pkg.manifest_path().into(),
158 )
159 })?;
160 }
161
162 let filename = pkg.package_id().tarball_name();
163 let dir = ws.build_dir().join("package");
164 let mut dst = {
165 let tmp = format!(".{}", filename);
166 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
167 };
168
169 gctx.shell()
174 .status("Packaging", pkg.package_id().to_string())?;
175 dst.file().set_len(0)?;
176 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
177 .context("failed to prepare local package for uploading")?;
178
179 dst.seek(SeekFrom::Start(0))?;
180 let src_path = dst.path();
181 let dst_path = dst.parent().join(&filename);
182 fs::rename(&src_path, &dst_path)
183 .context("failed to move temporary tarball into final location")?;
184
185 let dst_metadata = dst
186 .file()
187 .metadata()
188 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
189 let compressed_size = dst_metadata.len();
190
191 let uncompressed = HumanBytes(uncompressed_size);
192 let compressed = HumanBytes(compressed_size);
193
194 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
195 drop(gctx.shell().status("Packaged", message));
197
198 return Ok(dst);
199}
200
201pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
206 let specs = &opts.to_package.to_package_id_specs(ws)?;
207 if let ops::Packages::Packages(_) = opts.to_package {
209 for spec in specs.iter() {
210 let member_ids = ws.members().map(|p| p.package_id());
211 spec.query(member_ids)?;
212 }
213 }
214 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
215
216 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
219
220 let packaged = do_package(ws, opts, pkgs)?;
221
222 let mut result = Vec::new();
223 let target_dir = ws.target_dir();
224 let build_dir = ws.build_dir();
225 if target_dir == build_dir {
226 result.extend(packaged.into_iter().map(|(_, _, src)| src));
227 } else {
228 let artifact_dir = target_dir.join("package");
230 for (pkg, _, src) in packaged {
231 let filename = pkg.package_id().tarball_name();
232 let dst =
233 artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
234 src.file().seek(SeekFrom::Start(0))?;
235 std::io::copy(&mut src.file(), &mut dst.file())?;
236 result.push(dst);
237 }
238 }
239
240 Ok(result)
241}
242
243pub(crate) fn package_with_dep_graph(
249 ws: &Workspace<'_>,
250 opts: &PackageOpts<'_>,
251 pkgs: Vec<(&Package, CliFeatures)>,
252) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
253 let output = do_package(ws, opts, pkgs)?;
254
255 Ok(local_deps(output.into_iter().map(
256 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
257 )))
258}
259
260fn do_package<'a>(
261 ws: &Workspace<'_>,
262 opts: &PackageOpts<'a>,
263 pkgs: Vec<(&Package, CliFeatures)>,
264) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
265 if ws
266 .lock_root()
267 .as_path_unlocked()
268 .join(LOCKFILE_NAME)
269 .exists()
270 && opts.include_lockfile
271 {
272 let dry_run = false;
274 let _ = ops::resolve_ws(ws, dry_run)?;
275 }
278
279 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
280 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
281
282 let mut local_reg = {
283 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
288 || opts.reg_or_index.is_some()
289 {
290 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
291 debug!("packaging for registry {}", sid);
292 Some(sid)
293 } else {
294 None
295 };
296 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
297 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
298 .transpose()?
299 };
300
301 let sorted_pkgs = deps.sort();
304 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
305 for (pkg, cli_features) in sorted_pkgs {
306 let opts = PackageOpts {
307 cli_features: cli_features.clone(),
308 to_package: ops::Packages::Default,
309 ..opts.clone()
310 };
311 let ar_files = prepare_archive(ws, &pkg, &opts)?;
312
313 if opts.list {
314 match opts.fmt {
315 PackageMessageFormat::Human => {
316 for ar_file in &ar_files {
319 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
320 }
321 }
322 PackageMessageFormat::Json => {
323 let message = messages::PackageList {
324 id: pkg.package_id().to_spec(),
325 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
326 let file = match f.contents {
327 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
328 FileContents::Generated(
329 GeneratedFile::Manifest(path)
330 | GeneratedFile::Lockfile(Some(path)),
331 ) => messages::PackageFile::Generate { path: Some(path) },
332 FileContents::Generated(
333 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
334 ) => messages::PackageFile::Generate { path: None },
335 };
336 (f.rel_path, file)
337 })),
338 };
339 let _ = ws.gctx().shell().print_json(&message);
340 }
341 }
342 } else {
343 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
344 if let Some(local_reg) = local_reg.as_mut() {
345 if pkg.publish() != &Some(Vec::new()) {
346 local_reg.add_package(ws, &pkg, &tarball)?;
347 }
348 }
349 outputs.push((pkg, opts, tarball));
350 }
351 }
352
353 if opts.verify {
356 for (pkg, opts, tarball) in &outputs {
357 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
358 .context("failed to verify package tarball")?
359 }
360 }
361
362 Ok(outputs)
363}
364
365fn get_registry(
372 gctx: &GlobalContext,
373 pkgs: &[&Package],
374 reg_or_index: Option<RegistryOrIndex>,
375) -> CargoResult<SourceId> {
376 let reg_or_index = match reg_or_index.clone() {
377 Some(r) => Some(r),
378 None => infer_registry(pkgs)?,
379 };
380
381 let reg = reg_or_index
383 .clone()
384 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
385 if let RegistryOrIndex::Registry(reg_name) = reg {
386 for pkg in pkgs {
387 if let Some(allowed) = pkg.publish().as_ref() {
388 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
392 bail!(
393 "`{}` cannot be packaged.\n\
394 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
395 pkg.name(),
396 reg_name
397 );
398 }
399 }
400 }
401 }
402 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
403}
404
405#[derive(Clone, Debug, Default)]
407pub(crate) struct LocalDependencies<T> {
408 pub packages: HashMap<PackageId, (Package, T)>,
409 pub graph: Graph<PackageId, ()>,
410}
411
412impl<T: Clone> LocalDependencies<T> {
413 pub fn sort(&self) -> Vec<(Package, T)> {
414 self.graph
415 .sort()
416 .into_iter()
417 .map(|name| self.packages[&name].clone())
418 .collect()
419 }
420
421 pub fn has_dependencies(&self) -> bool {
422 self.graph
423 .iter()
424 .any(|node| self.graph.edges(node).next().is_some())
425 }
426}
427
428fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
433 let packages: HashMap<PackageId, (Package, T)> = packages
434 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
435 .collect();
436
437 let source_to_pkg: HashMap<_, _> = packages
442 .keys()
443 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
444 .collect();
445
446 let mut graph = Graph::new();
447 for (pkg, _payload) in packages.values() {
448 graph.add(pkg.package_id());
449 for dep in pkg.dependencies() {
450 if !dep.source_id().is_path() {
452 continue;
453 }
454
455 if dep.kind() == DepKind::Development && !dep.specified_req() {
458 continue;
459 };
460
461 if dep.source_id() == pkg.package_id().source_id() {
463 continue;
464 }
465
466 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
467 graph.link(pkg.package_id(), *dep_pkg);
468 }
469 }
470 }
471
472 LocalDependencies { packages, graph }
473}
474
475#[tracing::instrument(skip_all)]
477fn prepare_archive(
478 ws: &Workspace<'_>,
479 pkg: &Package,
480 opts: &PackageOpts<'_>,
481) -> CargoResult<Vec<ArchiveFile>> {
482 let gctx = ws.gctx();
483 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
484 src.load()?;
485
486 if opts.check_metadata {
487 check_metadata(pkg, gctx)?;
488 }
489
490 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
491 gctx.shell().warn(
492 "both package.include and package.exclude are specified; \
493 the exclude list will be ignored",
494 )?;
495 }
496 let src_files = src.list_files(pkg)?;
497
498 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
500 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
501}
502
503#[tracing::instrument(skip_all)]
505fn build_ar_list(
506 ws: &Workspace<'_>,
507 pkg: &Package,
508 src_files: Vec<PathEntry>,
509 vcs_info: Option<vcs::VcsInfo>,
510 include_lockfile: bool,
511) -> CargoResult<Vec<ArchiveFile>> {
512 let mut result = HashMap::new();
513 let root = pkg.root();
514 for src_file in &src_files {
515 let rel_path = src_file.strip_prefix(&root)?;
516 check_filename(rel_path, &mut ws.gctx().shell())?;
517 let rel_str = rel_path.to_str().ok_or_else(|| {
518 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
519 })?;
520 match rel_str {
521 "Cargo.lock" => continue,
522 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
523 "invalid inclusion of reserved file name {} in package source",
524 rel_str
525 ),
526 _ => {
527 result
528 .entry(UncasedAscii::new(rel_str))
529 .or_insert_with(Vec::new)
530 .push(ArchiveFile {
531 rel_path: rel_path.to_owned(),
532 rel_str: rel_str.to_owned(),
533 contents: FileContents::OnDisk(src_file.to_path_buf()),
534 });
535 }
536 }
537 }
538
539 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
542 result
543 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
544 .or_insert_with(Vec::new)
545 .push(ArchiveFile {
546 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
547 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
548 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
549 });
550 result
551 .entry(UncasedAscii::new("Cargo.toml"))
552 .or_insert_with(Vec::new)
553 .push(ArchiveFile {
554 rel_path: PathBuf::from("Cargo.toml"),
555 rel_str: "Cargo.toml".to_string(),
556 contents: FileContents::Generated(GeneratedFile::Manifest(
557 pkg.manifest_path().to_owned(),
558 )),
559 });
560 } else {
561 ws.gctx().shell().warn(&format!(
562 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
563 pkg.name()
564 ))?;
565 }
566
567 if include_lockfile {
568 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
569 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
570 let rel_str = "Cargo.lock";
571 result
572 .entry(UncasedAscii::new(rel_str))
573 .or_insert_with(Vec::new)
574 .push(ArchiveFile {
575 rel_path: PathBuf::from(rel_str),
576 rel_str: rel_str.to_string(),
577 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
578 });
579 }
580
581 if let Some(vcs_info) = vcs_info {
582 let rel_str = VCS_INFO_FILE;
583 result
584 .entry(UncasedAscii::new(rel_str))
585 .or_insert_with(Vec::new)
586 .push(ArchiveFile {
587 rel_path: PathBuf::from(rel_str),
588 rel_str: rel_str.to_string(),
589 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
590 });
591 }
592
593 let mut invalid_manifest_field: Vec<String> = vec![];
594
595 let mut result = result.into_values().flatten().collect();
596 if let Some(license_file) = &pkg.manifest().metadata().license_file {
597 let license_path = Path::new(license_file);
598 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
599 if abs_file_path.is_file() {
600 check_for_file_and_add(
601 "license-file",
602 license_path,
603 abs_file_path,
604 pkg,
605 &mut result,
606 ws,
607 )?;
608 } else {
609 error_on_nonexistent_file(
610 &pkg,
611 &license_path,
612 "license-file",
613 &mut invalid_manifest_field,
614 );
615 }
616 }
617 if let Some(readme) = &pkg.manifest().metadata().readme {
618 let readme_path = Path::new(readme);
619 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
620 if abs_file_path.is_file() {
621 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
622 } else {
623 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
624 }
625 }
626
627 if !invalid_manifest_field.is_empty() {
628 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
629 }
630
631 for t in pkg
632 .manifest()
633 .targets()
634 .iter()
635 .filter(|t| t.is_custom_build())
636 {
637 if let Some(custom_build_path) = t.src_path().path() {
638 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
639 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
640 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
641 }
642 }
643 }
644
645 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
646
647 Ok(result)
648}
649
650fn check_for_file_and_add(
651 label: &str,
652 file_path: &Path,
653 abs_file_path: PathBuf,
654 pkg: &Package,
655 result: &mut Vec<ArchiveFile>,
656 ws: &Workspace<'_>,
657) -> CargoResult<()> {
658 match abs_file_path.strip_prefix(&pkg.root()) {
659 Ok(rel_file_path) => {
660 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
661 result.push(ArchiveFile {
662 rel_path: rel_file_path.to_path_buf(),
663 rel_str: rel_file_path
664 .to_str()
665 .expect("everything was utf8")
666 .to_string(),
667 contents: FileContents::OnDisk(abs_file_path),
668 })
669 }
670 }
671 Err(_) => {
672 let file_name = file_path.file_name().unwrap();
674 if result.iter().any(|ar| ar.rel_path == file_name) {
675 ws.gctx().shell().warn(&format!(
676 "{} `{}` appears to be a path outside of the package, \
677 but there is already a file named `{}` in the root of the package. \
678 The archived crate will contain the copy in the root of the package. \
679 Update the {} to point to the path relative \
680 to the root of the package to remove this warning.",
681 label,
682 file_path.display(),
683 file_name.to_str().unwrap(),
684 label,
685 ))?;
686 } else {
687 result.push(ArchiveFile {
688 rel_path: PathBuf::from(file_name),
689 rel_str: file_name.to_str().unwrap().to_string(),
690 contents: FileContents::OnDisk(abs_file_path),
691 })
692 }
693 }
694 }
695 Ok(())
696}
697
698fn error_on_nonexistent_file(
699 pkg: &Package,
700 path: &Path,
701 manifest_key_name: &'static str,
702 invalid: &mut Vec<String>,
703) {
704 let rel_msg = if path.is_absolute() {
705 "".to_string()
706 } else {
707 format!(" (relative to `{}`)", pkg.root().display())
708 };
709
710 let msg = format!(
711 "{manifest_key_name} `{}` does not appear to exist{}.\n\
712 Please update the {manifest_key_name} setting in the manifest at `{}`.",
713 path.display(),
714 rel_msg,
715 pkg.manifest_path().display()
716 );
717
718 invalid.push(msg);
719}
720
721fn error_custom_build_file_not_in_package(
722 pkg: &Package,
723 path: &Path,
724 target: &Target,
725) -> CargoResult<Vec<ArchiveFile>> {
726 let tip = {
727 let description_name = target.description_named();
728 if path.is_file() {
729 format!(
730 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
731 It is at `{}`, whereas the root the package is `{}`.\n",
732 path.display(),
733 pkg.root().display()
734 )
735 } else {
736 format!("the source file of {description_name} doesn't appear to exist.\n",)
737 }
738 };
739 let msg = format!(
740 "{}\
741 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
742 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
743 tip,
744 pkg.manifest_path().display()
745 );
746 anyhow::bail!(msg)
747}
748
749fn build_lock(
751 ws: &Workspace<'_>,
752 opts: &PackageOpts<'_>,
753 publish_pkg: &Package,
754 local_reg: Option<&TmpRegistry<'_>>,
755) -> CargoResult<String> {
756 let gctx = ws.gctx();
757 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
758
759 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
760
761 if let Some(local_reg) = local_reg {
765 tmp_ws.add_local_overlay(
766 local_reg.upstream,
767 local_reg.root.as_path_unlocked().to_owned(),
768 );
769 if opts.dry_run {
770 if let Some(orig_resolve) = orig_resolve.as_mut() {
771 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
772 SourceId::crates_io(gctx)?
773 } else {
774 local_reg.upstream
775 };
776 for (p, s) in local_reg.checksums() {
777 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
778 }
779 }
780 }
781 }
782 let mut tmp_reg = tmp_ws.package_registry()?;
783
784 let mut new_resolve = ops::resolve_with_previous(
785 &mut tmp_reg,
786 &tmp_ws,
787 &CliFeatures::new_all(true),
788 HasDevUnits::Yes,
789 orig_resolve.as_ref(),
790 None,
791 &[],
792 true,
793 )?;
794
795 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
796
797 if let Some(orig_resolve) = orig_resolve {
798 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
799 }
800 check_yanked(
801 gctx,
802 &pkg_set,
803 &new_resolve,
804 "consider updating to a version that is not yanked",
805 )?;
806
807 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
808}
809
810fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
813 let md = pkg.manifest().metadata();
814
815 let mut missing = vec![];
816
817 macro_rules! lacking {
818 ($( $($field: ident)||* ),*) => {{
819 $(
820 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
821 $(missing.push(stringify!($field).replace("_", "-"));)*
822 }
823 )*
824 }}
825 }
826 lacking!(
827 description,
828 license || license_file,
829 documentation || homepage || repository
830 );
831
832 if !missing.is_empty() {
833 let mut things = missing[..missing.len() - 1].join(", ");
834 if !things.is_empty() {
837 things.push_str(" or ");
838 }
839 things.push_str(missing.last().unwrap());
840
841 gctx.shell().warn(&format!(
842 "manifest has no {things}.\n\
843 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
844 things = things
845 ))?
846 }
847
848 Ok(())
849}
850
851fn tar(
855 ws: &Workspace<'_>,
856 opts: &PackageOpts<'_>,
857 pkg: &Package,
858 local_reg: Option<&TmpRegistry<'_>>,
859 ar_files: Vec<ArchiveFile>,
860 dst: &File,
861 filename: &str,
862) -> CargoResult<u64> {
863 let filename = Path::new(filename);
865 let encoder = GzBuilder::new()
866 .filename(paths::path2bytes(filename)?)
867 .write(dst, Compression::best());
868
869 let mut ar = Builder::new(encoder);
871 ar.sparse(false);
872 let gctx = ws.gctx();
873
874 let base_name = format!("{}-{}", pkg.name(), pkg.version());
875 let base_path = Path::new(&base_name);
876 let included = ar_files
877 .iter()
878 .map(|ar_file| ar_file.rel_path.clone())
879 .collect::<Vec<_>>();
880 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
881
882 let mut uncompressed_size = 0;
883 for ar_file in ar_files {
884 let ArchiveFile {
885 rel_path,
886 rel_str,
887 contents,
888 } = ar_file;
889 let ar_path = base_path.join(&rel_path);
890 gctx.shell()
891 .verbose(|shell| shell.status("Archiving", &rel_str))?;
892 let mut header = Header::new_gnu();
893 match contents {
894 FileContents::OnDisk(disk_path) => {
895 let mut file = File::open(&disk_path).with_context(|| {
896 format!("failed to open for archiving: `{}`", disk_path.display())
897 })?;
898 let metadata = file.metadata().with_context(|| {
899 format!("could not learn metadata for: `{}`", disk_path.display())
900 })?;
901 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
902 header.set_cksum();
903 ar.append_data(&mut header, &ar_path, &mut file)
904 .with_context(|| {
905 format!("could not archive source file `{}`", disk_path.display())
906 })?;
907 uncompressed_size += metadata.len() as u64;
908 }
909 FileContents::Generated(generated_kind) => {
910 let contents = match generated_kind {
911 GeneratedFile::Manifest(_) => {
912 publish_pkg.manifest().to_normalized_contents()?
913 }
914 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
915 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
916 };
917 header.set_entry_type(EntryType::file());
918 header.set_mode(0o644);
919 header.set_size(contents.len() as u64);
920 header.set_mtime(1);
922 header.set_cksum();
923 ar.append_data(&mut header, &ar_path, contents.as_bytes())
924 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
925 uncompressed_size += contents.len() as u64;
926 }
927 }
928 }
929
930 let encoder = ar.into_inner()?;
931 encoder.finish()?;
932 Ok(uncompressed_size)
933}
934
935fn compare_resolve(
937 gctx: &GlobalContext,
938 current_pkg: &Package,
939 orig_resolve: &Resolve,
940 new_resolve: &Resolve,
941) -> CargoResult<()> {
942 if gctx.shell().verbosity() != Verbosity::Verbose {
943 return Ok(());
944 }
945 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
946 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
947 let added = new_set.difference(&orig_set);
948 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
951 for pkg_id in added {
952 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
953 continue;
956 }
957 let removed_candidates: Vec<&PackageId> = removed
960 .iter()
961 .filter(|orig_pkg_id| {
962 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
963 })
964 .cloned()
965 .collect();
966 let extra = match removed_candidates.len() {
967 0 => {
968 let previous_versions: Vec<&PackageId> = removed
970 .iter()
971 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
972 .cloned()
973 .collect();
974 match previous_versions.len() {
975 0 => String::new(),
976 1 => format!(
977 ", previous version was `{}`",
978 previous_versions[0].version()
979 ),
980 _ => format!(
981 ", previous versions were: {}",
982 previous_versions
983 .iter()
984 .map(|pkg_id| format!("`{}`", pkg_id.version()))
985 .collect::<Vec<_>>()
986 .join(", ")
987 ),
988 }
989 }
990 1 => {
991 format!(
995 ", was originally sourced from `{}`",
996 removed_candidates[0].source_id()
997 )
998 }
999 _ => {
1000 let comma_list = removed_candidates
1003 .iter()
1004 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1005 .collect::<Vec<_>>()
1006 .join(", ");
1007 format!(
1008 ", was originally sourced from one of these sources: {}",
1009 comma_list
1010 )
1011 }
1012 };
1013 let msg = format!(
1014 "package `{}` added to the packaged Cargo.lock file{}",
1015 pkg_id, extra
1016 );
1017 gctx.shell().note(msg)?;
1018 }
1019 Ok(())
1020}
1021
1022pub fn check_yanked(
1023 gctx: &GlobalContext,
1024 pkg_set: &PackageSet<'_>,
1025 resolve: &Resolve,
1026 hint: &str,
1027) -> CargoResult<()> {
1028 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1031
1032 let mut sources = pkg_set.sources_mut();
1033 let mut pending: Vec<PackageId> = resolve.iter().collect();
1034 let mut results = Vec::new();
1035 for (_id, source) in sources.sources_mut() {
1036 source.invalidate_cache();
1037 }
1038 while !pending.is_empty() {
1039 pending.retain(|pkg_id| {
1040 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1041 match source.is_yanked(*pkg_id) {
1042 Poll::Ready(result) => results.push((*pkg_id, result)),
1043 Poll::Pending => return true,
1044 }
1045 }
1046 false
1047 });
1048 for (_id, source) in sources.sources_mut() {
1049 source.block_until_ready()?;
1050 }
1051 }
1052
1053 for (pkg_id, is_yanked) in results {
1054 if is_yanked? {
1055 gctx.shell().warn(format!(
1056 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1057 pkg_id,
1058 pkg_id.source_id().display_registry_name(),
1059 hint
1060 ))?;
1061 }
1062 }
1063 Ok(())
1064}
1065
1066fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1073 let Some(name) = file.file_name() else {
1074 return Ok(());
1075 };
1076 let Some(name) = name.to_str() else {
1077 anyhow::bail!(
1078 "path does not have a unicode filename which may not unpack \
1079 on all platforms: {}",
1080 file.display()
1081 )
1082 };
1083 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1084 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1085 anyhow::bail!(
1086 "cannot package a filename with a special character `{}`: {}",
1087 c,
1088 file.display()
1089 )
1090 }
1091 if restricted_names::is_windows_reserved_path(file) {
1092 shell.warn(format!(
1093 "file {} is a reserved Windows filename, \
1094 it will not work on Windows platforms",
1095 file.display()
1096 ))?;
1097 }
1098 Ok(())
1099}
1100
1101struct TmpRegistry<'a> {
1105 gctx: &'a GlobalContext,
1106 upstream: SourceId,
1107 root: Filesystem,
1108 checksums: HashMap<PackageId, String>,
1109 _lock: FileLock,
1110}
1111
1112impl<'a> TmpRegistry<'a> {
1113 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1114 root.create_dir()?;
1115 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1116 let slf = Self {
1117 gctx,
1118 root,
1119 upstream,
1120 checksums: HashMap::new(),
1121 _lock,
1122 };
1123 let index_path = slf.index_path().into_path_unlocked();
1125 if index_path.exists() {
1126 paths::remove_dir_all(index_path)?;
1127 }
1128 slf.index_path().create_dir()?;
1129 Ok(slf)
1130 }
1131
1132 fn index_path(&self) -> Filesystem {
1133 self.root.join("index")
1134 }
1135
1136 fn add_package(
1137 &mut self,
1138 ws: &Workspace<'_>,
1139 package: &Package,
1140 tar: &FileLock,
1141 ) -> CargoResult<()> {
1142 debug!(
1143 "adding package {}@{} to local overlay at {}",
1144 package.name(),
1145 package.version(),
1146 self.root.as_path_unlocked().display()
1147 );
1148 {
1149 let mut tar_copy = self.root.open_rw_exclusive_create(
1150 package.package_id().tarball_name(),
1151 self.gctx,
1152 "temporary package registry",
1153 )?;
1154 tar.file().seek(SeekFrom::Start(0))?;
1155 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1156 tar_copy.flush()?;
1157 }
1158
1159 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1160
1161 tar.file().seek(SeekFrom::Start(0))?;
1162 let cksum = cargo_util::Sha256::new()
1163 .update_file(tar.file())?
1164 .finish_hex();
1165
1166 self.checksums.insert(package.package_id(), cksum.clone());
1167
1168 let deps: Vec<_> = new_crate
1169 .deps
1170 .into_iter()
1171 .map(|dep| {
1172 let name = dep
1173 .explicit_name_in_toml
1174 .clone()
1175 .unwrap_or_else(|| dep.name.clone())
1176 .into();
1177 let package = dep
1178 .explicit_name_in_toml
1179 .as_ref()
1180 .map(|_| dep.name.clone().into());
1181 RegistryDependency {
1182 name: name,
1183 req: dep.version_req.into(),
1184 features: dep.features.into_iter().map(|x| x.into()).collect(),
1185 optional: dep.optional,
1186 default_features: dep.default_features,
1187 target: dep.target.map(|x| x.into()),
1188 kind: Some(dep.kind.into()),
1189 registry: dep.registry.map(|x| x.into()),
1190 package: package,
1191 public: None,
1192 artifact: dep
1193 .artifact
1194 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1195 bindep_target: dep.bindep_target.map(|x| x.into()),
1196 lib: dep.lib,
1197 }
1198 })
1199 .collect();
1200
1201 let index_line = serde_json::to_string(&IndexPackage {
1202 name: new_crate.name.into(),
1203 vers: package.version().clone(),
1204 deps,
1205 features: new_crate
1206 .features
1207 .into_iter()
1208 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1209 .collect(),
1210 features2: None,
1211 cksum,
1212 yanked: None,
1213 links: new_crate.links.map(|x| x.into()),
1214 rust_version: None,
1215 v: Some(2),
1216 })?;
1217
1218 let file =
1219 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1220 let mut dst = self.index_path().open_rw_exclusive_create(
1221 file,
1222 self.gctx,
1223 "temporary package registry",
1224 )?;
1225 dst.write_all(index_line.as_bytes())?;
1226 Ok(())
1227 }
1228
1229 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1230 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1231 }
1232}