1use std::collections::{BTreeSet, HashMap};
2use std::fs::{self, File};
3use std::io::prelude::*;
4use std::io::SeekFrom;
5use std::path::{Path, PathBuf};
6use std::task::Poll;
7
8use crate::core::dependency::DepKind;
9use crate::core::manifest::Target;
10use crate::core::resolver::CliFeatures;
11use crate::core::resolver::HasDevUnits;
12use crate::core::PackageIdSpecQuery;
13use crate::core::Shell;
14use crate::core::Verbosity;
15use crate::core::Workspace;
16use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
17use crate::ops::lockfile::LOCKFILE_NAME;
18use crate::ops::registry::{infer_registry, RegistryOrIndex};
19use crate::sources::path::PathEntry;
20use crate::sources::registry::index::{IndexPackage, RegistryDependency};
21use crate::sources::{PathSource, CRATES_IO_REGISTRY};
22use crate::util::cache_lock::CacheLockMode;
23use crate::util::context::JobsConfig;
24use crate::util::errors::CargoResult;
25use crate::util::restricted_names;
26use crate::util::toml::prepare_for_publish;
27use crate::util::FileLock;
28use crate::util::Filesystem;
29use crate::util::GlobalContext;
30use crate::util::Graph;
31use crate::util::HumanBytes;
32use crate::{drop_println, ops};
33use anyhow::{bail, Context as _};
34use cargo_util::paths;
35use flate2::{Compression, GzBuilder};
36use tar::{Builder, EntryType, Header, HeaderMode};
37use tracing::debug;
38use unicase::Ascii as UncasedAscii;
39
40mod vcs;
41mod verify;
42
43#[derive(Clone)]
44pub struct PackageOpts<'gctx> {
45 pub gctx: &'gctx GlobalContext,
46 pub list: bool,
47 pub check_metadata: bool,
48 pub allow_dirty: bool,
49 pub include_lockfile: bool,
50 pub verify: bool,
51 pub jobs: Option<JobsConfig>,
52 pub keep_going: bool,
53 pub to_package: ops::Packages,
54 pub targets: Vec<String>,
55 pub cli_features: CliFeatures,
56 pub reg_or_index: Option<ops::RegistryOrIndex>,
57}
58
59const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
60const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
61
62struct ArchiveFile {
63 rel_path: PathBuf,
66 rel_str: String,
68 contents: FileContents,
70}
71
72enum FileContents {
73 OnDisk(PathBuf),
75 Generated(GeneratedFile),
77}
78
79enum GeneratedFile {
80 Manifest,
82 Lockfile,
84 VcsInfo(vcs::VcsInfo),
86}
87
88#[tracing::instrument(skip_all)]
90fn create_package(
91 ws: &Workspace<'_>,
92 pkg: &Package,
93 ar_files: Vec<ArchiveFile>,
94 local_reg: Option<&TmpRegistry<'_>>,
95) -> CargoResult<FileLock> {
96 let gctx = ws.gctx();
97 let filecount = ar_files.len();
98
99 for dep in pkg.dependencies() {
101 super::check_dep_has_version(dep, false)?;
102 }
103
104 let filename = pkg.package_id().tarball_name();
105 let dir = ws.target_dir().join("package");
106 let mut dst = {
107 let tmp = format!(".{}", filename);
108 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
109 };
110
111 gctx.shell()
116 .status("Packaging", pkg.package_id().to_string())?;
117 dst.file().set_len(0)?;
118 let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
119 .context("failed to prepare local package for uploading")?;
120
121 dst.seek(SeekFrom::Start(0))?;
122 let src_path = dst.path();
123 let dst_path = dst.parent().join(&filename);
124 fs::rename(&src_path, &dst_path)
125 .context("failed to move temporary tarball into final location")?;
126
127 let dst_metadata = dst
128 .file()
129 .metadata()
130 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
131 let compressed_size = dst_metadata.len();
132
133 let uncompressed = HumanBytes(uncompressed_size);
134 let compressed = HumanBytes(compressed_size);
135
136 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
137 drop(gctx.shell().status("Packaged", message));
139
140 return Ok(dst);
141}
142
143pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
148 let specs = &opts.to_package.to_package_id_specs(ws)?;
149 if let ops::Packages::Packages(_) = opts.to_package {
151 for spec in specs.iter() {
152 let member_ids = ws.members().map(|p| p.package_id());
153 spec.query(member_ids)?;
154 }
155 }
156 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
157
158 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
161
162 Ok(do_package(ws, opts, pkgs)?
163 .into_iter()
164 .map(|x| x.2)
165 .collect())
166}
167
168pub(crate) fn package_with_dep_graph(
174 ws: &Workspace<'_>,
175 opts: &PackageOpts<'_>,
176 pkgs: Vec<(&Package, CliFeatures)>,
177) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
178 let output = do_package(ws, opts, pkgs)?;
179
180 Ok(local_deps(output.into_iter().map(
181 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
182 )))
183}
184
185fn do_package<'a>(
186 ws: &Workspace<'_>,
187 opts: &PackageOpts<'a>,
188 pkgs: Vec<(&Package, CliFeatures)>,
189) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
190 if ws
191 .lock_root()
192 .as_path_unlocked()
193 .join(LOCKFILE_NAME)
194 .exists()
195 && opts.include_lockfile
196 {
197 let dry_run = false;
199 let _ = ops::resolve_ws(ws, dry_run)?;
200 }
203
204 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
205 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
206
207 let sid = if deps.has_no_dependencies() && opts.reg_or_index.is_none() {
211 None
212 } else {
213 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
214 debug!("packaging for registry {}", sid);
215 Some(sid)
216 };
217
218 let mut local_reg = if ws.gctx().cli_unstable().package_workspace {
219 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
220 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
221 .transpose()?
222 } else {
223 None
224 };
225
226 let sorted_pkgs = deps.sort();
229 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
230 for (pkg, cli_features) in sorted_pkgs {
231 let opts = PackageOpts {
232 cli_features: cli_features.clone(),
233 to_package: ops::Packages::Default,
234 ..opts.clone()
235 };
236 let ar_files = prepare_archive(ws, &pkg, &opts)?;
237
238 if opts.list {
239 for ar_file in &ar_files {
240 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
241 }
242 } else {
243 let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?;
244 if let Some(local_reg) = local_reg.as_mut() {
245 if pkg.publish() != &Some(Vec::new()) {
246 local_reg.add_package(ws, &pkg, &tarball)?;
247 }
248 }
249 outputs.push((pkg, opts, tarball));
250 }
251 }
252
253 if opts.verify {
256 for (pkg, opts, tarball) in &outputs {
257 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
258 .context("failed to verify package tarball")?
259 }
260 }
261
262 Ok(outputs)
263}
264
265fn get_registry(
272 gctx: &GlobalContext,
273 pkgs: &[&Package],
274 reg_or_index: Option<RegistryOrIndex>,
275) -> CargoResult<SourceId> {
276 let reg_or_index = match reg_or_index.clone() {
277 Some(r) => Some(r),
278 None => infer_registry(pkgs)?,
279 };
280
281 let reg = reg_or_index
283 .clone()
284 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
285 if let RegistryOrIndex::Registry(reg_name) = reg {
286 for pkg in pkgs {
287 if let Some(allowed) = pkg.publish().as_ref() {
288 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
292 bail!(
293 "`{}` cannot be packaged.\n\
294 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
295 pkg.name(),
296 reg_name
297 );
298 }
299 }
300 }
301 }
302 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
303}
304
305#[derive(Clone, Debug, Default)]
307pub(crate) struct LocalDependencies<T> {
308 pub packages: HashMap<PackageId, (Package, T)>,
309 pub graph: Graph<PackageId, ()>,
310}
311
312impl<T: Clone> LocalDependencies<T> {
313 pub fn sort(&self) -> Vec<(Package, T)> {
314 self.graph
315 .sort()
316 .into_iter()
317 .map(|name| self.packages[&name].clone())
318 .collect()
319 }
320
321 pub fn has_no_dependencies(&self) -> bool {
322 self.graph
323 .iter()
324 .all(|node| self.graph.edges(node).next().is_none())
325 }
326}
327
328fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
333 let packages: HashMap<PackageId, (Package, T)> = packages
334 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
335 .collect();
336
337 let source_to_pkg: HashMap<_, _> = packages
342 .keys()
343 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
344 .collect();
345
346 let mut graph = Graph::new();
347 for (pkg, _payload) in packages.values() {
348 graph.add(pkg.package_id());
349 for dep in pkg.dependencies() {
350 if dep.kind() == DepKind::Development || !dep.source_id().is_path() {
353 continue;
354 };
355
356 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
357 graph.link(pkg.package_id(), *dep_pkg);
358 }
359 }
360 }
361
362 LocalDependencies { packages, graph }
363}
364
365#[tracing::instrument(skip_all)]
367fn prepare_archive(
368 ws: &Workspace<'_>,
369 pkg: &Package,
370 opts: &PackageOpts<'_>,
371) -> CargoResult<Vec<ArchiveFile>> {
372 let gctx = ws.gctx();
373 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
374 src.load()?;
375
376 if opts.check_metadata {
377 check_metadata(pkg, gctx)?;
378 }
379
380 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
381 gctx.shell().warn(
382 "both package.include and package.exclude are specified; \
383 the exclude list will be ignored",
384 )?;
385 }
386 let src_files = src.list_files(pkg)?;
387
388 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
390
391 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
392}
393
394#[tracing::instrument(skip_all)]
396fn build_ar_list(
397 ws: &Workspace<'_>,
398 pkg: &Package,
399 src_files: Vec<PathEntry>,
400 vcs_info: Option<vcs::VcsInfo>,
401 include_lockfile: bool,
402) -> CargoResult<Vec<ArchiveFile>> {
403 let mut result = HashMap::new();
404 let root = pkg.root();
405 for src_file in &src_files {
406 let rel_path = src_file.strip_prefix(&root)?;
407 check_filename(rel_path, &mut ws.gctx().shell())?;
408 let rel_str = rel_path.to_str().ok_or_else(|| {
409 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
410 })?;
411 match rel_str {
412 "Cargo.lock" => continue,
413 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
414 "invalid inclusion of reserved file name {} in package source",
415 rel_str
416 ),
417 _ => {
418 result
419 .entry(UncasedAscii::new(rel_str))
420 .or_insert_with(Vec::new)
421 .push(ArchiveFile {
422 rel_path: rel_path.to_owned(),
423 rel_str: rel_str.to_owned(),
424 contents: FileContents::OnDisk(src_file.to_path_buf()),
425 });
426 }
427 }
428 }
429
430 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
433 result
434 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
435 .or_insert_with(Vec::new)
436 .push(ArchiveFile {
437 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
438 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
439 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
440 });
441 result
442 .entry(UncasedAscii::new("Cargo.toml"))
443 .or_insert_with(Vec::new)
444 .push(ArchiveFile {
445 rel_path: PathBuf::from("Cargo.toml"),
446 rel_str: "Cargo.toml".to_string(),
447 contents: FileContents::Generated(GeneratedFile::Manifest),
448 });
449 } else {
450 ws.gctx().shell().warn(&format!(
451 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
452 pkg.name()
453 ))?;
454 }
455
456 if include_lockfile {
457 let rel_str = "Cargo.lock";
458 result
459 .entry(UncasedAscii::new(rel_str))
460 .or_insert_with(Vec::new)
461 .push(ArchiveFile {
462 rel_path: PathBuf::from(rel_str),
463 rel_str: rel_str.to_string(),
464 contents: FileContents::Generated(GeneratedFile::Lockfile),
465 });
466 }
467
468 if let Some(vcs_info) = vcs_info {
469 let rel_str = VCS_INFO_FILE;
470 result
471 .entry(UncasedAscii::new(rel_str))
472 .or_insert_with(Vec::new)
473 .push(ArchiveFile {
474 rel_path: PathBuf::from(rel_str),
475 rel_str: rel_str.to_string(),
476 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
477 });
478 }
479
480 let mut invalid_manifest_field: Vec<String> = vec![];
481
482 let mut result = result.into_values().flatten().collect();
483 if let Some(license_file) = &pkg.manifest().metadata().license_file {
484 let license_path = Path::new(license_file);
485 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
486 if abs_file_path.is_file() {
487 check_for_file_and_add(
488 "license-file",
489 license_path,
490 abs_file_path,
491 pkg,
492 &mut result,
493 ws,
494 )?;
495 } else {
496 error_on_nonexistent_file(
497 &pkg,
498 &license_path,
499 "license-file",
500 &mut invalid_manifest_field,
501 );
502 }
503 }
504 if let Some(readme) = &pkg.manifest().metadata().readme {
505 let readme_path = Path::new(readme);
506 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
507 if abs_file_path.is_file() {
508 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
509 } else {
510 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
511 }
512 }
513
514 if !invalid_manifest_field.is_empty() {
515 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
516 }
517
518 for t in pkg
519 .manifest()
520 .targets()
521 .iter()
522 .filter(|t| t.is_custom_build())
523 {
524 if let Some(custome_build_path) = t.src_path().path() {
525 let abs_custome_build_path =
526 paths::normalize_path(&pkg.root().join(custome_build_path));
527 if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
528 {
529 error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
530 }
531 }
532 }
533
534 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
535
536 Ok(result)
537}
538
539fn check_for_file_and_add(
540 label: &str,
541 file_path: &Path,
542 abs_file_path: PathBuf,
543 pkg: &Package,
544 result: &mut Vec<ArchiveFile>,
545 ws: &Workspace<'_>,
546) -> CargoResult<()> {
547 match abs_file_path.strip_prefix(&pkg.root()) {
548 Ok(rel_file_path) => {
549 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
550 result.push(ArchiveFile {
551 rel_path: rel_file_path.to_path_buf(),
552 rel_str: rel_file_path
553 .to_str()
554 .expect("everything was utf8")
555 .to_string(),
556 contents: FileContents::OnDisk(abs_file_path),
557 })
558 }
559 }
560 Err(_) => {
561 let file_name = file_path.file_name().unwrap();
563 if result.iter().any(|ar| ar.rel_path == file_name) {
564 ws.gctx().shell().warn(&format!(
565 "{} `{}` appears to be a path outside of the package, \
566 but there is already a file named `{}` in the root of the package. \
567 The archived crate will contain the copy in the root of the package. \
568 Update the {} to point to the path relative \
569 to the root of the package to remove this warning.",
570 label,
571 file_path.display(),
572 file_name.to_str().unwrap(),
573 label,
574 ))?;
575 } else {
576 result.push(ArchiveFile {
577 rel_path: PathBuf::from(file_name),
578 rel_str: file_name.to_str().unwrap().to_string(),
579 contents: FileContents::OnDisk(abs_file_path),
580 })
581 }
582 }
583 }
584 Ok(())
585}
586
587fn error_on_nonexistent_file(
588 pkg: &Package,
589 path: &Path,
590 manifest_key_name: &'static str,
591 invalid: &mut Vec<String>,
592) {
593 let rel_msg = if path.is_absolute() {
594 "".to_string()
595 } else {
596 format!(" (relative to `{}`)", pkg.root().display())
597 };
598
599 let msg = format!(
600 "{manifest_key_name} `{}` does not appear to exist{}.\n\
601 Please update the {manifest_key_name} setting in the manifest at `{}`.",
602 path.display(),
603 rel_msg,
604 pkg.manifest_path().display()
605 );
606
607 invalid.push(msg);
608}
609
610fn error_custom_build_file_not_in_package(
611 pkg: &Package,
612 path: &Path,
613 target: &Target,
614) -> CargoResult<Vec<ArchiveFile>> {
615 let tip = {
616 let description_name = target.description_named();
617 if path.is_file() {
618 format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
619 It is at `{}`, whereas the root the package is `{}`.\n",
620 path.display(), pkg.root().display()
621 )
622 } else {
623 format!("the source file of {description_name} doesn't appear to exist.\n",)
624 }
625 };
626 let msg = format!(
627 "{}\
628 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
629 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
630 tip, pkg.manifest_path().display()
631 );
632 anyhow::bail!(msg)
633}
634
635fn build_lock(
637 ws: &Workspace<'_>,
638 publish_pkg: &Package,
639 local_reg: Option<&TmpRegistry<'_>>,
640) -> CargoResult<String> {
641 let gctx = ws.gctx();
642 let orig_resolve = ops::load_pkg_lockfile(ws)?;
643
644 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
645
646 if let Some(local_reg) = local_reg {
650 tmp_ws.add_local_overlay(
651 local_reg.upstream,
652 local_reg.root.as_path_unlocked().to_owned(),
653 );
654 }
655 let mut tmp_reg = tmp_ws.package_registry()?;
656
657 let mut new_resolve = ops::resolve_with_previous(
658 &mut tmp_reg,
659 &tmp_ws,
660 &CliFeatures::new_all(true),
661 HasDevUnits::Yes,
662 orig_resolve.as_ref(),
663 None,
664 &[],
665 true,
666 )?;
667
668 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
669
670 if let Some(orig_resolve) = orig_resolve {
671 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
672 }
673 check_yanked(
674 gctx,
675 &pkg_set,
676 &new_resolve,
677 "consider updating to a version that is not yanked",
678 )?;
679
680 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
681}
682
683fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
686 let md = pkg.manifest().metadata();
687
688 let mut missing = vec![];
689
690 macro_rules! lacking {
691 ($( $($field: ident)||* ),*) => {{
692 $(
693 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
694 $(missing.push(stringify!($field).replace("_", "-"));)*
695 }
696 )*
697 }}
698 }
699 lacking!(
700 description,
701 license || license_file,
702 documentation || homepage || repository
703 );
704
705 if !missing.is_empty() {
706 let mut things = missing[..missing.len() - 1].join(", ");
707 if !things.is_empty() {
710 things.push_str(" or ");
711 }
712 things.push_str(missing.last().unwrap());
713
714 gctx.shell().warn(&format!(
715 "manifest has no {things}.\n\
716 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
717 things = things
718 ))?
719 }
720
721 Ok(())
722}
723
724fn tar(
728 ws: &Workspace<'_>,
729 pkg: &Package,
730 local_reg: Option<&TmpRegistry<'_>>,
731 ar_files: Vec<ArchiveFile>,
732 dst: &File,
733 filename: &str,
734) -> CargoResult<u64> {
735 let filename = Path::new(filename);
737 let encoder = GzBuilder::new()
738 .filename(paths::path2bytes(filename)?)
739 .write(dst, Compression::best());
740
741 let mut ar = Builder::new(encoder);
743 ar.sparse(false);
744 let gctx = ws.gctx();
745
746 let base_name = format!("{}-{}", pkg.name(), pkg.version());
747 let base_path = Path::new(&base_name);
748 let included = ar_files
749 .iter()
750 .map(|ar_file| ar_file.rel_path.clone())
751 .collect::<Vec<_>>();
752 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
753
754 let mut uncompressed_size = 0;
755 for ar_file in ar_files {
756 let ArchiveFile {
757 rel_path,
758 rel_str,
759 contents,
760 } = ar_file;
761 let ar_path = base_path.join(&rel_path);
762 gctx.shell()
763 .verbose(|shell| shell.status("Archiving", &rel_str))?;
764 let mut header = Header::new_gnu();
765 match contents {
766 FileContents::OnDisk(disk_path) => {
767 let mut file = File::open(&disk_path).with_context(|| {
768 format!("failed to open for archiving: `{}`", disk_path.display())
769 })?;
770 let metadata = file.metadata().with_context(|| {
771 format!("could not learn metadata for: `{}`", disk_path.display())
772 })?;
773 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
774 header.set_cksum();
775 ar.append_data(&mut header, &ar_path, &mut file)
776 .with_context(|| {
777 format!("could not archive source file `{}`", disk_path.display())
778 })?;
779 uncompressed_size += metadata.len() as u64;
780 }
781 FileContents::Generated(generated_kind) => {
782 let contents = match generated_kind {
783 GeneratedFile::Manifest => publish_pkg.manifest().to_normalized_contents()?,
784 GeneratedFile::Lockfile => build_lock(ws, &publish_pkg, local_reg)?,
785 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
786 };
787 header.set_entry_type(EntryType::file());
788 header.set_mode(0o644);
789 header.set_size(contents.len() as u64);
790 header.set_mtime(1);
792 header.set_cksum();
793 ar.append_data(&mut header, &ar_path, contents.as_bytes())
794 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
795 uncompressed_size += contents.len() as u64;
796 }
797 }
798 }
799
800 let encoder = ar.into_inner()?;
801 encoder.finish()?;
802 Ok(uncompressed_size)
803}
804
805fn compare_resolve(
807 gctx: &GlobalContext,
808 current_pkg: &Package,
809 orig_resolve: &Resolve,
810 new_resolve: &Resolve,
811) -> CargoResult<()> {
812 if gctx.shell().verbosity() != Verbosity::Verbose {
813 return Ok(());
814 }
815 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
816 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
817 let added = new_set.difference(&orig_set);
818 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
821 for pkg_id in added {
822 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
823 continue;
826 }
827 let removed_candidates: Vec<&PackageId> = removed
830 .iter()
831 .filter(|orig_pkg_id| {
832 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
833 })
834 .cloned()
835 .collect();
836 let extra = match removed_candidates.len() {
837 0 => {
838 let previous_versions: Vec<&PackageId> = removed
840 .iter()
841 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
842 .cloned()
843 .collect();
844 match previous_versions.len() {
845 0 => String::new(),
846 1 => format!(
847 ", previous version was `{}`",
848 previous_versions[0].version()
849 ),
850 _ => format!(
851 ", previous versions were: {}",
852 previous_versions
853 .iter()
854 .map(|pkg_id| format!("`{}`", pkg_id.version()))
855 .collect::<Vec<_>>()
856 .join(", ")
857 ),
858 }
859 }
860 1 => {
861 format!(
865 ", was originally sourced from `{}`",
866 removed_candidates[0].source_id()
867 )
868 }
869 _ => {
870 let comma_list = removed_candidates
873 .iter()
874 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
875 .collect::<Vec<_>>()
876 .join(", ");
877 format!(
878 ", was originally sourced from one of these sources: {}",
879 comma_list
880 )
881 }
882 };
883 let msg = format!(
884 "package `{}` added to the packaged Cargo.lock file{}",
885 pkg_id, extra
886 );
887 gctx.shell().note(msg)?;
888 }
889 Ok(())
890}
891
892pub fn check_yanked(
893 gctx: &GlobalContext,
894 pkg_set: &PackageSet<'_>,
895 resolve: &Resolve,
896 hint: &str,
897) -> CargoResult<()> {
898 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
901
902 let mut sources = pkg_set.sources_mut();
903 let mut pending: Vec<PackageId> = resolve.iter().collect();
904 let mut results = Vec::new();
905 for (_id, source) in sources.sources_mut() {
906 source.invalidate_cache();
907 }
908 while !pending.is_empty() {
909 pending.retain(|pkg_id| {
910 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
911 match source.is_yanked(*pkg_id) {
912 Poll::Ready(result) => results.push((*pkg_id, result)),
913 Poll::Pending => return true,
914 }
915 }
916 false
917 });
918 for (_id, source) in sources.sources_mut() {
919 source.block_until_ready()?;
920 }
921 }
922
923 for (pkg_id, is_yanked) in results {
924 if is_yanked? {
925 gctx.shell().warn(format!(
926 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
927 pkg_id,
928 pkg_id.source_id().display_registry_name(),
929 hint
930 ))?;
931 }
932 }
933 Ok(())
934}
935
936fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
943 let Some(name) = file.file_name() else {
944 return Ok(());
945 };
946 let Some(name) = name.to_str() else {
947 anyhow::bail!(
948 "path does not have a unicode filename which may not unpack \
949 on all platforms: {}",
950 file.display()
951 )
952 };
953 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
954 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
955 anyhow::bail!(
956 "cannot package a filename with a special character `{}`: {}",
957 c,
958 file.display()
959 )
960 }
961 if restricted_names::is_windows_reserved_path(file) {
962 shell.warn(format!(
963 "file {} is a reserved Windows filename, \
964 it will not work on Windows platforms",
965 file.display()
966 ))?;
967 }
968 Ok(())
969}
970
971struct TmpRegistry<'a> {
975 gctx: &'a GlobalContext,
976 upstream: SourceId,
977 root: Filesystem,
978 _lock: FileLock,
979}
980
981impl<'a> TmpRegistry<'a> {
982 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
983 root.create_dir()?;
984 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
985 let slf = Self {
986 gctx,
987 root,
988 upstream,
989 _lock,
990 };
991 let index_path = slf.index_path().into_path_unlocked();
993 if index_path.exists() {
994 paths::remove_dir_all(index_path)?;
995 }
996 slf.index_path().create_dir()?;
997 Ok(slf)
998 }
999
1000 fn index_path(&self) -> Filesystem {
1001 self.root.join("index")
1002 }
1003
1004 fn add_package(
1005 &mut self,
1006 ws: &Workspace<'_>,
1007 package: &Package,
1008 tar: &FileLock,
1009 ) -> CargoResult<()> {
1010 debug!(
1011 "adding package {}@{} to local overlay at {}",
1012 package.name(),
1013 package.version(),
1014 self.root.as_path_unlocked().display()
1015 );
1016 {
1017 let mut tar_copy = self.root.open_rw_exclusive_create(
1018 package.package_id().tarball_name(),
1019 self.gctx,
1020 "temporary package registry",
1021 )?;
1022 tar.file().seek(SeekFrom::Start(0))?;
1023 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1024 tar_copy.flush()?;
1025 }
1026
1027 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1028
1029 tar.file().seek(SeekFrom::Start(0))?;
1030 let cksum = cargo_util::Sha256::new()
1031 .update_file(tar.file())?
1032 .finish_hex();
1033
1034 let deps: Vec<_> = new_crate
1035 .deps
1036 .into_iter()
1037 .map(|dep| {
1038 let name = dep
1039 .explicit_name_in_toml
1040 .clone()
1041 .unwrap_or_else(|| dep.name.clone())
1042 .into();
1043 let package = dep
1044 .explicit_name_in_toml
1045 .as_ref()
1046 .map(|_| dep.name.clone().into());
1047 RegistryDependency {
1048 name: name,
1049 req: dep.version_req.into(),
1050 features: dep.features.into_iter().map(|x| x.into()).collect(),
1051 optional: dep.optional,
1052 default_features: dep.default_features,
1053 target: dep.target.map(|x| x.into()),
1054 kind: Some(dep.kind.into()),
1055 registry: dep.registry.map(|x| x.into()),
1056 package: package,
1057 public: None,
1058 artifact: dep
1059 .artifact
1060 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1061 bindep_target: dep.bindep_target.map(|x| x.into()),
1062 lib: dep.lib,
1063 }
1064 })
1065 .collect();
1066
1067 let index_line = serde_json::to_string(&IndexPackage {
1068 name: new_crate.name.into(),
1069 vers: package.version().clone(),
1070 deps,
1071 features: new_crate
1072 .features
1073 .into_iter()
1074 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1075 .collect(),
1076 features2: None,
1077 cksum,
1078 yanked: None,
1079 links: new_crate.links.map(|x| x.into()),
1080 rust_version: None,
1081 v: Some(2),
1082 })?;
1083
1084 let file =
1085 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1086 let mut dst = self.index_path().open_rw_exclusive_create(
1087 file,
1088 self.gctx,
1089 "temporary package registry",
1090 )?;
1091 dst.write_all(index_line.as_bytes())?;
1092 Ok(())
1093 }
1094}