1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::File;
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use annotate_snippets::Level;
36use anyhow::{Context as _, bail};
37use cargo_util::paths;
38use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
39use cargo_util_schemas::messages;
40use flate2::{Compression, GzBuilder};
41use tar::{Builder, EntryType, Header, HeaderMode};
42use tracing::debug;
43use unicase::Ascii as UncasedAscii;
44
45mod vcs;
46mod verify;
47
48#[derive(Debug, Clone)]
52pub enum PackageMessageFormat {
53 Human,
54 Json,
55}
56
57impl PackageMessageFormat {
58 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
59
60 pub const DEFAULT: &str = "human";
61}
62
63impl std::str::FromStr for PackageMessageFormat {
64 type Err = anyhow::Error;
65
66 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
67 match s {
68 "human" => Ok(PackageMessageFormat::Human),
69 "json" => Ok(PackageMessageFormat::Json),
70 f => bail!("unknown message format `{f}`"),
71 }
72 }
73}
74
75#[derive(Clone)]
76pub struct PackageOpts<'gctx> {
77 pub gctx: &'gctx GlobalContext,
78 pub list: bool,
79 pub fmt: PackageMessageFormat,
80 pub check_metadata: bool,
81 pub allow_dirty: bool,
82 pub include_lockfile: bool,
83 pub verify: bool,
84 pub jobs: Option<JobsConfig>,
85 pub keep_going: bool,
86 pub to_package: ops::Packages,
87 pub targets: Vec<String>,
88 pub cli_features: CliFeatures,
89 pub reg_or_index: Option<ops::RegistryOrIndex>,
90 pub dry_run: bool,
103}
104
105const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
106const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
107
108struct ArchiveFile {
109 rel_path: PathBuf,
112 rel_str: String,
114 contents: FileContents,
116}
117
118enum FileContents {
119 OnDisk(PathBuf),
121 Generated(GeneratedFile),
123}
124
125enum GeneratedFile {
126 Manifest(PathBuf),
130 Lockfile(Option<PathBuf>),
134 VcsInfo(vcs::VcsInfo),
136}
137
138#[tracing::instrument(skip_all)]
140fn create_package(
141 ws: &Workspace<'_>,
142 opts: &PackageOpts<'_>,
143 pkg: &Package,
144 ar_files: Vec<ArchiveFile>,
145 local_reg: Option<&TmpRegistry<'_>>,
146) -> CargoResult<FileLock> {
147 let gctx = ws.gctx();
148 let filecount = ar_files.len();
149
150 for dep in pkg.dependencies() {
152 super::check_dep_has_version(dep, false).map_err(|err| {
153 ManifestError::new(
154 err.context(format!(
155 "failed to verify manifest at `{}`",
156 pkg.manifest_path().display()
157 )),
158 pkg.manifest_path().into(),
159 )
160 })?;
161 }
162
163 let filename = pkg.package_id().tarball_name();
164 let build_dir = ws.build_dir();
165 paths::create_dir_all_excluded_from_backups_atomic(build_dir.as_path_unlocked())?;
166 let dir = build_dir.join("package");
167 let mut dst = {
168 let tmp = format!(".{}", filename);
169 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
170 };
171
172 gctx.shell()
177 .status("Packaging", pkg.package_id().to_string())?;
178 dst.file().set_len(0)?;
179 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
180 .context("failed to prepare local package for uploading")?;
181
182 dst.seek(SeekFrom::Start(0))?;
183 let dst_path = dst.parent().join(&filename);
184 dst.rename(&dst_path)?;
185
186 let dst_metadata = dst
187 .file()
188 .metadata()
189 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
190 let compressed_size = dst_metadata.len();
191
192 let uncompressed = HumanBytes(uncompressed_size);
193 let compressed = HumanBytes(compressed_size);
194
195 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
196 drop(gctx.shell().status("Packaged", message));
198
199 return Ok(dst);
200}
201
202pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
207 let specs = &opts.to_package.to_package_id_specs(ws)?;
208 if let ops::Packages::Packages(_) = opts.to_package {
210 for spec in specs.iter() {
211 let member_ids = ws.members().map(|p| p.package_id());
212 spec.query(member_ids)?;
213 }
214 }
215 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
216
217 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
220
221 let packaged = do_package(ws, opts, pkgs)?;
222
223 let mut result = Vec::new();
224 let target_dir = ws.target_dir();
225 let build_dir = ws.build_dir();
226 if target_dir == build_dir {
227 result.extend(packaged.into_iter().map(|(_, _, src)| src));
228 } else {
229 paths::create_dir_all_excluded_from_backups_atomic(target_dir.as_path_unlocked())?;
231 let artifact_dir = target_dir.join("package");
232 for (pkg, _, src) in packaged {
233 let filename = pkg.package_id().tarball_name();
234 let dst =
235 artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
236 src.file().seek(SeekFrom::Start(0))?;
237 std::io::copy(&mut src.file(), &mut dst.file())?;
238 result.push(dst);
239 }
240 }
241
242 Ok(result)
243}
244
245pub(crate) fn package_with_dep_graph(
251 ws: &Workspace<'_>,
252 opts: &PackageOpts<'_>,
253 pkgs: Vec<(&Package, CliFeatures)>,
254) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
255 let output = do_package(ws, opts, pkgs)?;
256
257 Ok(local_deps(output.into_iter().map(
258 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
259 )))
260}
261
262fn do_package<'a>(
263 ws: &Workspace<'_>,
264 opts: &PackageOpts<'a>,
265 pkgs: Vec<(&Package, CliFeatures)>,
266) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
267 if ws
268 .lock_root()
269 .as_path_unlocked()
270 .join(LOCKFILE_NAME)
271 .exists()
272 && opts.include_lockfile
273 {
274 let dry_run = false;
276 let _ = ops::resolve_ws(ws, dry_run)?;
277 }
280
281 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
282 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
283
284 let mut local_reg = {
285 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
290 || opts.reg_or_index.is_some()
291 {
292 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
293 debug!("packaging for registry {}", sid);
294 Some(sid)
295 } else {
296 None
297 };
298 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
299 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
300 .transpose()?
301 };
302
303 let sorted_pkgs = deps.sort();
306 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
307 for (pkg, cli_features) in sorted_pkgs {
308 let opts = PackageOpts {
309 cli_features: cli_features.clone(),
310 to_package: ops::Packages::Default,
311 ..opts.clone()
312 };
313 let ar_files = prepare_archive(ws, &pkg, &opts)?;
314
315 if opts.list {
316 match opts.fmt {
317 PackageMessageFormat::Human => {
318 for ar_file in &ar_files {
321 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
322 }
323 }
324 PackageMessageFormat::Json => {
325 let message = messages::PackageList {
326 id: pkg.package_id().to_spec(),
327 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
328 let file = match f.contents {
329 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
330 FileContents::Generated(
331 GeneratedFile::Manifest(path)
332 | GeneratedFile::Lockfile(Some(path)),
333 ) => messages::PackageFile::Generate { path: Some(path) },
334 FileContents::Generated(
335 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
336 ) => messages::PackageFile::Generate { path: None },
337 };
338 (f.rel_path, file)
339 })),
340 };
341 let _ = ws.gctx().shell().print_json(&message);
342 }
343 }
344 } else {
345 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
346 if let Some(local_reg) = local_reg.as_mut() {
347 if pkg.publish() != &Some(Vec::new()) {
348 local_reg.add_package(ws, &pkg, &tarball)?;
349 }
350 }
351 outputs.push((pkg, opts, tarball));
352 }
353 }
354
355 if opts.verify {
358 for (pkg, opts, tarball) in &outputs {
359 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
360 .context("failed to verify package tarball")?
361 }
362 }
363
364 Ok(outputs)
365}
366
367fn get_registry(
374 gctx: &GlobalContext,
375 pkgs: &[&Package],
376 reg_or_index: Option<RegistryOrIndex>,
377) -> CargoResult<SourceId> {
378 let reg_or_index = match reg_or_index.clone() {
379 Some(r) => Some(r),
380 None => infer_registry(pkgs)?,
381 };
382
383 let reg = reg_or_index
385 .clone()
386 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
387 if let RegistryOrIndex::Registry(reg_name) = reg {
388 for pkg in pkgs {
389 if let Some(allowed) = pkg.publish().as_ref() {
390 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
394 bail!(
395 "`{}` cannot be packaged.\n\
396 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
397 pkg.name(),
398 reg_name
399 );
400 }
401 }
402 }
403 }
404 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
405}
406
407#[derive(Clone, Debug, Default)]
409pub(crate) struct LocalDependencies<T> {
410 pub packages: HashMap<PackageId, (Package, T)>,
411 pub graph: Graph<PackageId, ()>,
412}
413
414impl<T: Clone> LocalDependencies<T> {
415 pub fn sort(&self) -> Vec<(Package, T)> {
416 self.graph
417 .sort()
418 .into_iter()
419 .map(|name| self.packages[&name].clone())
420 .collect()
421 }
422
423 pub fn has_dependencies(&self) -> bool {
424 self.graph
425 .iter()
426 .any(|node| self.graph.edges(node).next().is_some())
427 }
428}
429
430fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
435 let packages: HashMap<PackageId, (Package, T)> = packages
436 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
437 .collect();
438
439 let source_to_pkg: HashMap<_, _> = packages
444 .keys()
445 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
446 .collect();
447
448 let mut graph = Graph::new();
449 for (pkg, _payload) in packages.values() {
450 graph.add(pkg.package_id());
451 for dep in pkg.dependencies() {
452 if !dep.source_id().is_path() {
454 continue;
455 }
456
457 if dep.kind() == DepKind::Development && !dep.specified_req() {
460 continue;
461 };
462
463 if dep.source_id() == pkg.package_id().source_id() {
465 continue;
466 }
467
468 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
469 graph.link(pkg.package_id(), *dep_pkg);
470 }
471 }
472 }
473
474 LocalDependencies { packages, graph }
475}
476
477#[tracing::instrument(skip_all)]
479fn prepare_archive(
480 ws: &Workspace<'_>,
481 pkg: &Package,
482 opts: &PackageOpts<'_>,
483) -> CargoResult<Vec<ArchiveFile>> {
484 let gctx = ws.gctx();
485 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
486 src.load()?;
487
488 if opts.check_metadata {
489 check_metadata(pkg, opts.reg_or_index.as_ref(), gctx)?;
490 }
491
492 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
493 gctx.shell().warn(
494 "both package.include and package.exclude are specified; \
495 the exclude list will be ignored",
496 )?;
497 }
498 let src_files = src.list_files(pkg)?;
499
500 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
502 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
503}
504
505#[tracing::instrument(skip_all)]
507fn build_ar_list(
508 ws: &Workspace<'_>,
509 pkg: &Package,
510 src_files: Vec<PathEntry>,
511 vcs_info: Option<vcs::VcsInfo>,
512 include_lockfile: bool,
513) -> CargoResult<Vec<ArchiveFile>> {
514 let mut result = HashMap::new();
515 let root = pkg.root();
516 for src_file in &src_files {
517 let rel_path = src_file.strip_prefix(&root)?;
518 check_filename(rel_path, &mut ws.gctx().shell())?;
519 let rel_str = rel_path.to_str().ok_or_else(|| {
520 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
521 })?;
522 match rel_str {
523 "Cargo.lock" => continue,
524 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
525 "invalid inclusion of reserved file name {} in package source",
526 rel_str
527 ),
528 _ => {
529 result
530 .entry(UncasedAscii::new(rel_str))
531 .or_insert_with(Vec::new)
532 .push(ArchiveFile {
533 rel_path: rel_path.to_owned(),
534 rel_str: rel_str.to_owned(),
535 contents: FileContents::OnDisk(src_file.to_path_buf()),
536 });
537 }
538 }
539 }
540
541 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
544 result
545 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
546 .or_insert_with(Vec::new)
547 .push(ArchiveFile {
548 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
549 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
550 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
551 });
552 result
553 .entry(UncasedAscii::new("Cargo.toml"))
554 .or_insert_with(Vec::new)
555 .push(ArchiveFile {
556 rel_path: PathBuf::from("Cargo.toml"),
557 rel_str: "Cargo.toml".to_string(),
558 contents: FileContents::Generated(GeneratedFile::Manifest(
559 pkg.manifest_path().to_owned(),
560 )),
561 });
562 } else {
563 ws.gctx().shell().warn(&format!(
564 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
565 pkg.name()
566 ))?;
567 }
568
569 if include_lockfile {
570 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
571 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
572 let rel_str = "Cargo.lock";
573 result
574 .entry(UncasedAscii::new(rel_str))
575 .or_insert_with(Vec::new)
576 .push(ArchiveFile {
577 rel_path: PathBuf::from(rel_str),
578 rel_str: rel_str.to_string(),
579 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
580 });
581 }
582
583 if let Some(vcs_info) = vcs_info {
584 let rel_str = VCS_INFO_FILE;
585 result
586 .entry(UncasedAscii::new(rel_str))
587 .or_insert_with(Vec::new)
588 .push(ArchiveFile {
589 rel_path: PathBuf::from(rel_str),
590 rel_str: rel_str.to_string(),
591 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
592 });
593 }
594
595 let mut invalid_manifest_field: Vec<String> = vec![];
596
597 let mut result = result.into_values().flatten().collect();
598 if let Some(license_file) = &pkg.manifest().metadata().license_file {
599 let license_path = Path::new(license_file);
600 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
601 if abs_file_path.is_file() {
602 check_for_file_and_add(
603 "license-file",
604 license_path,
605 abs_file_path,
606 pkg,
607 &mut result,
608 ws,
609 )?;
610 } else {
611 error_on_nonexistent_file(
612 &pkg,
613 &license_path,
614 "license-file",
615 &mut invalid_manifest_field,
616 );
617 }
618 }
619 if let Some(readme) = &pkg.manifest().metadata().readme {
620 let readme_path = Path::new(readme);
621 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
622 if abs_file_path.is_file() {
623 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
624 } else {
625 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
626 }
627 }
628
629 if !invalid_manifest_field.is_empty() {
630 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
631 }
632
633 for t in pkg
634 .manifest()
635 .targets()
636 .iter()
637 .filter(|t| t.is_custom_build())
638 {
639 if let Some(custom_build_path) = t.src_path().path() {
640 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
641 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
642 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
643 }
644 }
645 }
646
647 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
648
649 Ok(result)
650}
651
652fn check_for_file_and_add(
653 label: &str,
654 file_path: &Path,
655 abs_file_path: PathBuf,
656 pkg: &Package,
657 result: &mut Vec<ArchiveFile>,
658 ws: &Workspace<'_>,
659) -> CargoResult<()> {
660 match abs_file_path.strip_prefix(&pkg.root()) {
661 Ok(rel_file_path) => {
662 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
663 result.push(ArchiveFile {
664 rel_path: rel_file_path.to_path_buf(),
665 rel_str: rel_file_path
666 .to_str()
667 .expect("everything was utf8")
668 .to_string(),
669 contents: FileContents::OnDisk(abs_file_path),
670 })
671 }
672 }
673 Err(_) => {
674 let file_name = file_path.file_name().unwrap();
676 if result.iter().any(|ar| ar.rel_path == file_name) {
677 ws.gctx().shell().warn(&format!(
678 "{} `{}` appears to be a path outside of the package, \
679 but there is already a file named `{}` in the root of the package. \
680 The archived crate will contain the copy in the root of the package. \
681 Update the {} to point to the path relative \
682 to the root of the package to remove this warning.",
683 label,
684 file_path.display(),
685 file_name.to_str().unwrap(),
686 label,
687 ))?;
688 } else {
689 result.push(ArchiveFile {
690 rel_path: PathBuf::from(file_name),
691 rel_str: file_name.to_str().unwrap().to_string(),
692 contents: FileContents::OnDisk(abs_file_path),
693 })
694 }
695 }
696 }
697 Ok(())
698}
699
700fn error_on_nonexistent_file(
701 pkg: &Package,
702 path: &Path,
703 manifest_key_name: &'static str,
704 invalid: &mut Vec<String>,
705) {
706 let rel_msg = if path.is_absolute() {
707 "".to_string()
708 } else {
709 format!(" (relative to `{}`)", pkg.root().display())
710 };
711
712 let msg = format!(
713 "{manifest_key_name} `{}` does not appear to exist{}.\n\
714 Please update the {manifest_key_name} setting in the manifest at `{}`.",
715 path.display(),
716 rel_msg,
717 pkg.manifest_path().display()
718 );
719
720 invalid.push(msg);
721}
722
723fn error_custom_build_file_not_in_package(
724 pkg: &Package,
725 path: &Path,
726 target: &Target,
727) -> CargoResult<Vec<ArchiveFile>> {
728 let tip = {
729 let description_name = target.description_named();
730 if path.is_file() {
731 format!(
732 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
733 It is at `{}`, whereas the root the package is `{}`.\n",
734 path.display(),
735 pkg.root().display()
736 )
737 } else {
738 format!("the source file of {description_name} doesn't appear to exist.\n",)
739 }
740 };
741 let msg = format!(
742 "{}\
743 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
744 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
745 tip,
746 pkg.manifest_path().display()
747 );
748 anyhow::bail!(msg)
749}
750
751fn build_lock(
753 ws: &Workspace<'_>,
754 opts: &PackageOpts<'_>,
755 publish_pkg: &Package,
756 local_reg: Option<&TmpRegistry<'_>>,
757) -> CargoResult<String> {
758 let gctx = ws.gctx();
759 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
760
761 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
762
763 if let Some(local_reg) = local_reg {
767 tmp_ws.add_local_overlay(
768 local_reg.upstream,
769 local_reg.root.as_path_unlocked().to_owned(),
770 );
771 if opts.dry_run {
772 if let Some(orig_resolve) = orig_resolve.as_mut() {
773 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
774 SourceId::crates_io(gctx)?
775 } else {
776 local_reg.upstream
777 };
778 for (p, s) in local_reg.checksums() {
779 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
780 }
781 }
782 }
783 }
784 let mut tmp_reg = tmp_ws.package_registry()?;
785
786 let mut new_resolve = ops::resolve_with_previous(
787 &mut tmp_reg,
788 &tmp_ws,
789 &CliFeatures::new_all(true),
790 HasDevUnits::Yes,
791 orig_resolve.as_ref(),
792 None,
793 &[],
794 true,
795 )?;
796
797 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
798
799 if let Some(orig_resolve) = orig_resolve {
800 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
801 }
802 check_yanked(
803 gctx,
804 &pkg_set,
805 &new_resolve,
806 "consider updating to a version that is not yanked",
807 )?;
808
809 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
810}
811
812fn check_metadata(
815 pkg: &Package,
816 reg_or_index: Option<&RegistryOrIndex>,
817 gctx: &GlobalContext,
818) -> CargoResult<()> {
819 let md = pkg.manifest().metadata();
820
821 let mut missing = vec![];
822
823 macro_rules! lacking {
824 ($( $($field: ident)||* ),*) => {{
825 $(
826 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
827 $(missing.push(stringify!($field).replace("_", "-"));)*
828 }
829 )*
830 }}
831 }
832 lacking!(
833 description,
834 license || license_file,
835 documentation || homepage || repository
836 );
837
838 if !missing.is_empty() {
839 let should_warn = match reg_or_index {
841 Some(RegistryOrIndex::Registry(reg_name)) => reg_name == CRATES_IO_REGISTRY,
842 None => true, Some(RegistryOrIndex::Index(_)) => false, };
845
846 if should_warn {
847 let mut things = missing[..missing.len() - 1].join(", ");
848 if !things.is_empty() {
851 things.push_str(" or ");
852 }
853 things.push_str(missing.last().unwrap());
854
855 gctx.shell().print_report(&[
856 Level::WARNING.secondary_title(format!("manifest has no {things}"))
857 .element(Level::NOTE.message("see https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info"))
858 ],
859 false
860 )?
861 }
862 }
863
864 Ok(())
865}
866
867fn tar(
871 ws: &Workspace<'_>,
872 opts: &PackageOpts<'_>,
873 pkg: &Package,
874 local_reg: Option<&TmpRegistry<'_>>,
875 ar_files: Vec<ArchiveFile>,
876 dst: &File,
877 filename: &str,
878) -> CargoResult<u64> {
879 let filename = Path::new(filename);
881 let encoder = GzBuilder::new()
882 .filename(paths::path2bytes(filename)?)
883 .write(dst, Compression::best());
884
885 let mut ar = Builder::new(encoder);
887 ar.sparse(false);
888 let gctx = ws.gctx();
889
890 let base_name = format!("{}-{}", pkg.name(), pkg.version());
891 let base_path = Path::new(&base_name);
892 let included = ar_files
893 .iter()
894 .map(|ar_file| ar_file.rel_path.clone())
895 .collect::<Vec<_>>();
896 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
897
898 let mut uncompressed_size = 0;
899 for ar_file in ar_files {
900 let ArchiveFile {
901 rel_path,
902 rel_str,
903 contents,
904 } = ar_file;
905 let ar_path = base_path.join(&rel_path);
906 gctx.shell()
907 .verbose(|shell| shell.status("Archiving", &rel_str))?;
908 let mut header = Header::new_gnu();
909 match contents {
910 FileContents::OnDisk(disk_path) => {
911 let mut file = File::open(&disk_path).with_context(|| {
912 format!("failed to open for archiving: `{}`", disk_path.display())
913 })?;
914 let metadata = file.metadata().with_context(|| {
915 format!("could not learn metadata for: `{}`", disk_path.display())
916 })?;
917 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
918 header.set_cksum();
919 ar.append_data(&mut header, &ar_path, &mut file)
920 .with_context(|| {
921 format!("could not archive source file `{}`", disk_path.display())
922 })?;
923 uncompressed_size += metadata.len() as u64;
924 }
925 FileContents::Generated(generated_kind) => {
926 let contents = match generated_kind {
927 GeneratedFile::Manifest(_) => {
928 publish_pkg.manifest().to_normalized_contents()?
929 }
930 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
931 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
932 };
933 header.set_entry_type(EntryType::file());
934 header.set_mode(0o644);
935 header.set_size(contents.len() as u64);
936 header.set_mtime(1153704088);
942 header.set_cksum();
943 ar.append_data(&mut header, &ar_path, contents.as_bytes())
944 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
945 uncompressed_size += contents.len() as u64;
946 }
947 }
948 }
949
950 let encoder = ar.into_inner()?;
951 encoder.finish()?;
952 Ok(uncompressed_size)
953}
954
955fn compare_resolve(
957 gctx: &GlobalContext,
958 current_pkg: &Package,
959 orig_resolve: &Resolve,
960 new_resolve: &Resolve,
961) -> CargoResult<()> {
962 if gctx.shell().verbosity() != Verbosity::Verbose {
963 return Ok(());
964 }
965 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
966 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
967 let added = new_set.difference(&orig_set);
968 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
971 for pkg_id in added {
972 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
973 continue;
976 }
977 let removed_candidates: Vec<&PackageId> = removed
980 .iter()
981 .filter(|orig_pkg_id| {
982 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
983 })
984 .cloned()
985 .collect();
986 let extra = match removed_candidates.len() {
987 0 => {
988 let previous_versions: Vec<&PackageId> = removed
990 .iter()
991 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
992 .cloned()
993 .collect();
994 match previous_versions.len() {
995 0 => String::new(),
996 1 => format!(
997 ", previous version was `{}`",
998 previous_versions[0].version()
999 ),
1000 _ => format!(
1001 ", previous versions were: {}",
1002 previous_versions
1003 .iter()
1004 .map(|pkg_id| format!("`{}`", pkg_id.version()))
1005 .collect::<Vec<_>>()
1006 .join(", ")
1007 ),
1008 }
1009 }
1010 1 => {
1011 format!(
1015 ", was originally sourced from `{}`",
1016 removed_candidates[0].source_id()
1017 )
1018 }
1019 _ => {
1020 let comma_list = removed_candidates
1023 .iter()
1024 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1025 .collect::<Vec<_>>()
1026 .join(", ");
1027 format!(
1028 ", was originally sourced from one of these sources: {}",
1029 comma_list
1030 )
1031 }
1032 };
1033 let msg = format!(
1034 "package `{}` added to the packaged Cargo.lock file{}",
1035 pkg_id, extra
1036 );
1037 gctx.shell().note(msg)?;
1038 }
1039 Ok(())
1040}
1041
1042pub fn check_yanked(
1043 gctx: &GlobalContext,
1044 pkg_set: &PackageSet<'_>,
1045 resolve: &Resolve,
1046 hint: &str,
1047) -> CargoResult<()> {
1048 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1051
1052 let mut sources = pkg_set.sources_mut();
1053 let mut pending: Vec<PackageId> = resolve.iter().collect();
1054 let mut results = Vec::new();
1055 for (_id, source) in sources.sources_mut() {
1056 source.invalidate_cache();
1057 }
1058 while !pending.is_empty() {
1059 pending.retain(|pkg_id| {
1060 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1061 match source.is_yanked(*pkg_id) {
1062 Poll::Ready(result) => results.push((*pkg_id, result)),
1063 Poll::Pending => return true,
1064 }
1065 }
1066 false
1067 });
1068 for (_id, source) in sources.sources_mut() {
1069 source.block_until_ready()?;
1070 }
1071 }
1072
1073 for (pkg_id, is_yanked) in results {
1074 if is_yanked? {
1075 gctx.shell().warn(format!(
1076 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1077 pkg_id,
1078 pkg_id.source_id().display_registry_name(),
1079 hint
1080 ))?;
1081 }
1082 }
1083 Ok(())
1084}
1085
1086fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1093 let Some(name) = file.file_name() else {
1094 return Ok(());
1095 };
1096 let Some(name) = name.to_str() else {
1097 anyhow::bail!(
1098 "path does not have a unicode filename which may not unpack \
1099 on all platforms: {}",
1100 file.display()
1101 )
1102 };
1103 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1104 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1105 anyhow::bail!(
1106 "cannot package a filename with a special character `{}`: {}",
1107 c,
1108 file.display()
1109 )
1110 }
1111 if restricted_names::is_windows_reserved_path(file) {
1112 shell.warn(format!(
1113 "file {} is a reserved Windows filename, \
1114 it will not work on Windows platforms",
1115 file.display()
1116 ))?;
1117 }
1118 Ok(())
1119}
1120
1121struct TmpRegistry<'a> {
1125 gctx: &'a GlobalContext,
1126 upstream: SourceId,
1127 root: Filesystem,
1128 checksums: HashMap<PackageId, String>,
1129 _lock: FileLock,
1130}
1131
1132impl<'a> TmpRegistry<'a> {
1133 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1134 root.create_dir()?;
1135 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1136 let slf = Self {
1137 gctx,
1138 root,
1139 upstream,
1140 checksums: HashMap::new(),
1141 _lock,
1142 };
1143 let index_path = slf.index_path().into_path_unlocked();
1145 if index_path.exists() {
1146 paths::remove_dir_all(index_path)?;
1147 }
1148 slf.index_path().create_dir()?;
1149 Ok(slf)
1150 }
1151
1152 fn index_path(&self) -> Filesystem {
1153 self.root.join("index")
1154 }
1155
1156 fn add_package(
1157 &mut self,
1158 ws: &Workspace<'_>,
1159 package: &Package,
1160 tar: &FileLock,
1161 ) -> CargoResult<()> {
1162 debug!(
1163 "adding package {}@{} to local overlay at {}",
1164 package.name(),
1165 package.version(),
1166 self.root.as_path_unlocked().display()
1167 );
1168 {
1169 let mut tar_copy = self.root.open_rw_exclusive_create(
1170 package.package_id().tarball_name(),
1171 self.gctx,
1172 "temporary package registry",
1173 )?;
1174 tar.file().seek(SeekFrom::Start(0))?;
1175 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1176 tar_copy.flush()?;
1177 }
1178
1179 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1180
1181 tar.file().seek(SeekFrom::Start(0))?;
1182 let cksum = cargo_util::Sha256::new()
1183 .update_file(tar.file())?
1184 .finish_hex();
1185
1186 self.checksums.insert(package.package_id(), cksum.clone());
1187
1188 let deps: Vec<_> = new_crate
1189 .deps
1190 .into_iter()
1191 .map(|dep| {
1192 let name = dep
1193 .explicit_name_in_toml
1194 .clone()
1195 .unwrap_or_else(|| dep.name.clone())
1196 .into();
1197 let package = dep
1198 .explicit_name_in_toml
1199 .as_ref()
1200 .map(|_| dep.name.clone().into());
1201 RegistryDependency {
1202 name: name,
1203 req: dep.version_req.into(),
1204 features: dep.features.into_iter().map(|x| x.into()).collect(),
1205 optional: dep.optional,
1206 default_features: dep.default_features,
1207 target: dep.target.map(|x| x.into()),
1208 kind: Some(dep.kind.into()),
1209 registry: dep.registry.map(|x| x.into()),
1210 package: package,
1211 public: None,
1212 artifact: dep
1213 .artifact
1214 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1215 bindep_target: dep.bindep_target.map(|x| x.into()),
1216 lib: dep.lib,
1217 }
1218 })
1219 .collect();
1220
1221 let index_line = serde_json::to_string(&IndexPackage {
1222 name: new_crate.name.into(),
1223 vers: package.version().clone(),
1224 deps,
1225 features: new_crate
1226 .features
1227 .into_iter()
1228 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1229 .collect(),
1230 features2: None,
1231 cksum,
1232 yanked: None,
1233 links: new_crate.links.map(|x| x.into()),
1234 rust_version: None,
1235 pubtime: None,
1236 v: Some(2),
1237 })?;
1238
1239 let file =
1240 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1241 let mut dst = self.index_path().open_rw_exclusive_create(
1242 file,
1243 self.gctx,
1244 "temporary package registry",
1245 )?;
1246 dst.write_all(index_line.as_bytes())?;
1247 Ok(())
1248 }
1249
1250 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1251 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1252 }
1253}