1use std::collections::{BTreeSet, HashMap};
2use std::fs::{self, File};
3use std::io::prelude::*;
4use std::io::SeekFrom;
5use std::path::{Path, PathBuf};
6use std::task::Poll;
7
8use crate::core::dependency::DepKind;
9use crate::core::manifest::Target;
10use crate::core::resolver::CliFeatures;
11use crate::core::resolver::HasDevUnits;
12use crate::core::PackageIdSpecQuery;
13use crate::core::Shell;
14use crate::core::Verbosity;
15use crate::core::Workspace;
16use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
17use crate::ops::lockfile::LOCKFILE_NAME;
18use crate::ops::registry::{infer_registry, RegistryOrIndex};
19use crate::sources::path::PathEntry;
20use crate::sources::registry::index::{IndexPackage, RegistryDependency};
21use crate::sources::{PathSource, CRATES_IO_REGISTRY};
22use crate::util::cache_lock::CacheLockMode;
23use crate::util::context::JobsConfig;
24use crate::util::errors::CargoResult;
25use crate::util::human_readable_bytes;
26use crate::util::restricted_names;
27use crate::util::toml::prepare_for_publish;
28use crate::util::FileLock;
29use crate::util::Filesystem;
30use crate::util::GlobalContext;
31use crate::util::Graph;
32use crate::{drop_println, ops};
33use anyhow::{bail, Context as _};
34use cargo_util::paths;
35use flate2::{Compression, GzBuilder};
36use tar::{Builder, EntryType, Header, HeaderMode};
37use tracing::debug;
38use unicase::Ascii as UncasedAscii;
39
40mod vcs;
41mod verify;
42
43#[derive(Clone)]
44pub struct PackageOpts<'gctx> {
45 pub gctx: &'gctx GlobalContext,
46 pub list: bool,
47 pub check_metadata: bool,
48 pub allow_dirty: bool,
49 pub verify: bool,
50 pub jobs: Option<JobsConfig>,
51 pub keep_going: bool,
52 pub to_package: ops::Packages,
53 pub targets: Vec<String>,
54 pub cli_features: CliFeatures,
55 pub reg_or_index: Option<ops::RegistryOrIndex>,
56}
57
58const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
59const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
60
61struct ArchiveFile {
62 rel_path: PathBuf,
65 rel_str: String,
67 contents: FileContents,
69}
70
71enum FileContents {
72 OnDisk(PathBuf),
74 Generated(GeneratedFile),
76}
77
78enum GeneratedFile {
79 Manifest,
81 Lockfile,
83 VcsInfo(vcs::VcsInfo),
85}
86
87#[tracing::instrument(skip_all)]
89fn create_package(
90 ws: &Workspace<'_>,
91 pkg: &Package,
92 ar_files: Vec<ArchiveFile>,
93 local_reg: Option<&TmpRegistry<'_>>,
94) -> CargoResult<FileLock> {
95 let gctx = ws.gctx();
96 let filecount = ar_files.len();
97
98 for dep in pkg.dependencies() {
100 super::check_dep_has_version(dep, false)?;
101 }
102
103 let filename = pkg.package_id().tarball_name();
104 let dir = ws.target_dir().join("package");
105 let mut dst = {
106 let tmp = format!(".{}", filename);
107 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
108 };
109
110 gctx.shell()
115 .status("Packaging", pkg.package_id().to_string())?;
116 dst.file().set_len(0)?;
117 let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
118 .context("failed to prepare local package for uploading")?;
119
120 dst.seek(SeekFrom::Start(0))?;
121 let src_path = dst.path();
122 let dst_path = dst.parent().join(&filename);
123 fs::rename(&src_path, &dst_path)
124 .context("failed to move temporary tarball into final location")?;
125
126 let dst_metadata = dst
127 .file()
128 .metadata()
129 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
130 let compressed_size = dst_metadata.len();
131
132 let uncompressed = human_readable_bytes(uncompressed_size);
133 let compressed = human_readable_bytes(compressed_size);
134
135 let message = format!(
136 "{} files, {:.1}{} ({:.1}{} compressed)",
137 filecount, uncompressed.0, uncompressed.1, compressed.0, compressed.1,
138 );
139 drop(gctx.shell().status("Packaged", message));
141
142 return Ok(dst);
143}
144
145pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
150 let specs = &opts.to_package.to_package_id_specs(ws)?;
151 if let ops::Packages::Packages(_) = opts.to_package {
153 for spec in specs.iter() {
154 let member_ids = ws.members().map(|p| p.package_id());
155 spec.query(member_ids)?;
156 }
157 }
158 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
159
160 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
163
164 Ok(do_package(ws, opts, pkgs)?
165 .into_iter()
166 .map(|x| x.2)
167 .collect())
168}
169
170pub(crate) fn package_with_dep_graph(
176 ws: &Workspace<'_>,
177 opts: &PackageOpts<'_>,
178 pkgs: Vec<(&Package, CliFeatures)>,
179) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
180 let output = do_package(ws, opts, pkgs)?;
181
182 Ok(local_deps(output.into_iter().map(
183 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
184 )))
185}
186
187fn do_package<'a>(
188 ws: &Workspace<'_>,
189 opts: &PackageOpts<'a>,
190 pkgs: Vec<(&Package, CliFeatures)>,
191) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
192 if ws
193 .lock_root()
194 .as_path_unlocked()
195 .join(LOCKFILE_NAME)
196 .exists()
197 {
198 let dry_run = false;
200 let _ = ops::resolve_ws(ws, dry_run)?;
201 }
204
205 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
206 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
207
208 let sid = if deps.has_no_dependencies() && opts.reg_or_index.is_none() {
212 None
213 } else {
214 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
215 debug!("packaging for registry {}", sid);
216 Some(sid)
217 };
218
219 let mut local_reg = if ws.gctx().cli_unstable().package_workspace {
220 let reg_dir = ws.target_dir().join("package").join("tmp-registry");
221 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
222 .transpose()?
223 } else {
224 None
225 };
226
227 let sorted_pkgs = deps.sort();
230 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
231 for (pkg, cli_features) in sorted_pkgs {
232 let opts = PackageOpts {
233 cli_features: cli_features.clone(),
234 to_package: ops::Packages::Default,
235 ..opts.clone()
236 };
237 let ar_files = prepare_archive(ws, &pkg, &opts)?;
238
239 if opts.list {
240 for ar_file in &ar_files {
241 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
242 }
243 } else {
244 let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?;
245 if let Some(local_reg) = local_reg.as_mut() {
246 if pkg.publish() != &Some(Vec::new()) {
247 local_reg.add_package(ws, &pkg, &tarball)?;
248 }
249 }
250 outputs.push((pkg, opts, tarball));
251 }
252 }
253
254 if opts.verify {
257 for (pkg, opts, tarball) in &outputs {
258 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
259 .context("failed to verify package tarball")?
260 }
261 }
262
263 Ok(outputs)
264}
265
266fn get_registry(
273 gctx: &GlobalContext,
274 pkgs: &[&Package],
275 reg_or_index: Option<RegistryOrIndex>,
276) -> CargoResult<SourceId> {
277 let reg_or_index = match reg_or_index.clone() {
278 Some(r) => Some(r),
279 None => infer_registry(pkgs)?,
280 };
281
282 let reg = reg_or_index
284 .clone()
285 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
286 if let RegistryOrIndex::Registry(reg_name) = reg {
287 for pkg in pkgs {
288 if let Some(allowed) = pkg.publish().as_ref() {
289 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
293 bail!(
294 "`{}` cannot be packaged.\n\
295 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
296 pkg.name(),
297 reg_name
298 );
299 }
300 }
301 }
302 }
303 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
304}
305
306#[derive(Clone, Debug, Default)]
308pub(crate) struct LocalDependencies<T> {
309 pub packages: HashMap<PackageId, (Package, T)>,
310 pub graph: Graph<PackageId, ()>,
311}
312
313impl<T: Clone> LocalDependencies<T> {
314 pub fn sort(&self) -> Vec<(Package, T)> {
315 self.graph
316 .sort()
317 .into_iter()
318 .map(|name| self.packages[&name].clone())
319 .collect()
320 }
321
322 pub fn has_no_dependencies(&self) -> bool {
323 self.graph
324 .iter()
325 .all(|node| self.graph.edges(node).next().is_none())
326 }
327}
328
329fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
334 let packages: HashMap<PackageId, (Package, T)> = packages
335 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
336 .collect();
337
338 let source_to_pkg: HashMap<_, _> = packages
343 .keys()
344 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
345 .collect();
346
347 let mut graph = Graph::new();
348 for (pkg, _payload) in packages.values() {
349 graph.add(pkg.package_id());
350 for dep in pkg.dependencies() {
351 if dep.kind() == DepKind::Development || !dep.source_id().is_path() {
354 continue;
355 };
356
357 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
358 graph.link(pkg.package_id(), *dep_pkg);
359 }
360 }
361 }
362
363 LocalDependencies { packages, graph }
364}
365
366#[tracing::instrument(skip_all)]
368fn prepare_archive(
369 ws: &Workspace<'_>,
370 pkg: &Package,
371 opts: &PackageOpts<'_>,
372) -> CargoResult<Vec<ArchiveFile>> {
373 let gctx = ws.gctx();
374 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
375 src.load()?;
376
377 if opts.check_metadata {
378 check_metadata(pkg, gctx)?;
379 }
380
381 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
382 gctx.shell().warn(
383 "both package.include and package.exclude are specified; \
384 the exclude list will be ignored",
385 )?;
386 }
387 let src_files = src.list_files(pkg)?;
388
389 let vcs_info = vcs::check_repo_state(pkg, &src_files, gctx, &opts)?;
391
392 build_ar_list(ws, pkg, src_files, vcs_info)
393}
394
395#[tracing::instrument(skip_all)]
397fn build_ar_list(
398 ws: &Workspace<'_>,
399 pkg: &Package,
400 src_files: Vec<PathEntry>,
401 vcs_info: Option<vcs::VcsInfo>,
402) -> CargoResult<Vec<ArchiveFile>> {
403 let mut result = HashMap::new();
404 let root = pkg.root();
405 for src_file in &src_files {
406 let rel_path = src_file.strip_prefix(&root)?;
407 check_filename(rel_path, &mut ws.gctx().shell())?;
408 let rel_str = rel_path.to_str().ok_or_else(|| {
409 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
410 })?;
411 match rel_str {
412 "Cargo.lock" => continue,
413 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
414 "invalid inclusion of reserved file name {} in package source",
415 rel_str
416 ),
417 _ => {
418 result
419 .entry(UncasedAscii::new(rel_str))
420 .or_insert_with(Vec::new)
421 .push(ArchiveFile {
422 rel_path: rel_path.to_owned(),
423 rel_str: rel_str.to_owned(),
424 contents: FileContents::OnDisk(src_file.to_path_buf()),
425 });
426 }
427 }
428 }
429
430 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
433 result
434 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
435 .or_insert_with(Vec::new)
436 .push(ArchiveFile {
437 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
438 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
439 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
440 });
441 result
442 .entry(UncasedAscii::new("Cargo.toml"))
443 .or_insert_with(Vec::new)
444 .push(ArchiveFile {
445 rel_path: PathBuf::from("Cargo.toml"),
446 rel_str: "Cargo.toml".to_string(),
447 contents: FileContents::Generated(GeneratedFile::Manifest),
448 });
449 } else {
450 ws.gctx().shell().warn(&format!(
451 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
452 pkg.name()
453 ))?;
454 }
455
456 let rel_str = "Cargo.lock";
457 result
458 .entry(UncasedAscii::new(rel_str))
459 .or_insert_with(Vec::new)
460 .push(ArchiveFile {
461 rel_path: PathBuf::from(rel_str),
462 rel_str: rel_str.to_string(),
463 contents: FileContents::Generated(GeneratedFile::Lockfile),
464 });
465
466 if let Some(vcs_info) = vcs_info {
467 let rel_str = VCS_INFO_FILE;
468 result
469 .entry(UncasedAscii::new(rel_str))
470 .or_insert_with(Vec::new)
471 .push(ArchiveFile {
472 rel_path: PathBuf::from(rel_str),
473 rel_str: rel_str.to_string(),
474 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
475 });
476 }
477
478 let mut invalid_manifest_field: Vec<String> = vec![];
479
480 let mut result = result.into_values().flatten().collect();
481 if let Some(license_file) = &pkg.manifest().metadata().license_file {
482 let license_path = Path::new(license_file);
483 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
484 if abs_file_path.is_file() {
485 check_for_file_and_add(
486 "license-file",
487 license_path,
488 abs_file_path,
489 pkg,
490 &mut result,
491 ws,
492 )?;
493 } else {
494 error_on_nonexistent_file(
495 &pkg,
496 &license_path,
497 "license-file",
498 &mut invalid_manifest_field,
499 );
500 }
501 }
502 if let Some(readme) = &pkg.manifest().metadata().readme {
503 let readme_path = Path::new(readme);
504 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
505 if abs_file_path.is_file() {
506 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
507 } else {
508 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
509 }
510 }
511
512 if !invalid_manifest_field.is_empty() {
513 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
514 }
515
516 for t in pkg
517 .manifest()
518 .targets()
519 .iter()
520 .filter(|t| t.is_custom_build())
521 {
522 if let Some(custome_build_path) = t.src_path().path() {
523 let abs_custome_build_path =
524 paths::normalize_path(&pkg.root().join(custome_build_path));
525 if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
526 {
527 error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
528 }
529 }
530 }
531
532 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
533
534 Ok(result)
535}
536
537fn check_for_file_and_add(
538 label: &str,
539 file_path: &Path,
540 abs_file_path: PathBuf,
541 pkg: &Package,
542 result: &mut Vec<ArchiveFile>,
543 ws: &Workspace<'_>,
544) -> CargoResult<()> {
545 match abs_file_path.strip_prefix(&pkg.root()) {
546 Ok(rel_file_path) => {
547 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
548 result.push(ArchiveFile {
549 rel_path: rel_file_path.to_path_buf(),
550 rel_str: rel_file_path
551 .to_str()
552 .expect("everything was utf8")
553 .to_string(),
554 contents: FileContents::OnDisk(abs_file_path),
555 })
556 }
557 }
558 Err(_) => {
559 let file_name = file_path.file_name().unwrap();
561 if result.iter().any(|ar| ar.rel_path == file_name) {
562 ws.gctx().shell().warn(&format!(
563 "{} `{}` appears to be a path outside of the package, \
564 but there is already a file named `{}` in the root of the package. \
565 The archived crate will contain the copy in the root of the package. \
566 Update the {} to point to the path relative \
567 to the root of the package to remove this warning.",
568 label,
569 file_path.display(),
570 file_name.to_str().unwrap(),
571 label,
572 ))?;
573 } else {
574 result.push(ArchiveFile {
575 rel_path: PathBuf::from(file_name),
576 rel_str: file_name.to_str().unwrap().to_string(),
577 contents: FileContents::OnDisk(abs_file_path),
578 })
579 }
580 }
581 }
582 Ok(())
583}
584
585fn error_on_nonexistent_file(
586 pkg: &Package,
587 path: &Path,
588 manifest_key_name: &'static str,
589 invalid: &mut Vec<String>,
590) {
591 let rel_msg = if path.is_absolute() {
592 "".to_string()
593 } else {
594 format!(" (relative to `{}`)", pkg.root().display())
595 };
596
597 let msg = format!(
598 "{manifest_key_name} `{}` does not appear to exist{}.\n\
599 Please update the {manifest_key_name} setting in the manifest at `{}`.",
600 path.display(),
601 rel_msg,
602 pkg.manifest_path().display()
603 );
604
605 invalid.push(msg);
606}
607
608fn error_custom_build_file_not_in_package(
609 pkg: &Package,
610 path: &Path,
611 target: &Target,
612) -> CargoResult<Vec<ArchiveFile>> {
613 let tip = {
614 let description_name = target.description_named();
615 if path.is_file() {
616 format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
617 It is at `{}`, whereas the root the package is `{}`.\n",
618 path.display(), pkg.root().display()
619 )
620 } else {
621 format!("the source file of {description_name} doesn't appear to exist.\n",)
622 }
623 };
624 let msg = format!(
625 "{}\
626 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
627 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
628 tip, pkg.manifest_path().display()
629 );
630 anyhow::bail!(msg)
631}
632
633fn build_lock(
635 ws: &Workspace<'_>,
636 publish_pkg: &Package,
637 local_reg: Option<&TmpRegistry<'_>>,
638) -> CargoResult<String> {
639 let gctx = ws.gctx();
640 let orig_resolve = ops::load_pkg_lockfile(ws)?;
641
642 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
643
644 if let Some(local_reg) = local_reg {
648 tmp_ws.add_local_overlay(
649 local_reg.upstream,
650 local_reg.root.as_path_unlocked().to_owned(),
651 );
652 }
653 let mut tmp_reg = tmp_ws.package_registry()?;
654
655 let mut new_resolve = ops::resolve_with_previous(
656 &mut tmp_reg,
657 &tmp_ws,
658 &CliFeatures::new_all(true),
659 HasDevUnits::Yes,
660 orig_resolve.as_ref(),
661 None,
662 &[],
663 true,
664 )?;
665
666 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
667
668 if let Some(orig_resolve) = orig_resolve {
669 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
670 }
671 check_yanked(
672 gctx,
673 &pkg_set,
674 &new_resolve,
675 "consider updating to a version that is not yanked",
676 )?;
677
678 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
679}
680
681fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
684 let md = pkg.manifest().metadata();
685
686 let mut missing = vec![];
687
688 macro_rules! lacking {
689 ($( $($field: ident)||* ),*) => {{
690 $(
691 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
692 $(missing.push(stringify!($field).replace("_", "-"));)*
693 }
694 )*
695 }}
696 }
697 lacking!(
698 description,
699 license || license_file,
700 documentation || homepage || repository
701 );
702
703 if !missing.is_empty() {
704 let mut things = missing[..missing.len() - 1].join(", ");
705 if !things.is_empty() {
708 things.push_str(" or ");
709 }
710 things.push_str(missing.last().unwrap());
711
712 gctx.shell().warn(&format!(
713 "manifest has no {things}.\n\
714 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
715 things = things
716 ))?
717 }
718
719 Ok(())
720}
721
722fn tar(
726 ws: &Workspace<'_>,
727 pkg: &Package,
728 local_reg: Option<&TmpRegistry<'_>>,
729 ar_files: Vec<ArchiveFile>,
730 dst: &File,
731 filename: &str,
732) -> CargoResult<u64> {
733 let filename = Path::new(filename);
735 let encoder = GzBuilder::new()
736 .filename(paths::path2bytes(filename)?)
737 .write(dst, Compression::best());
738
739 let mut ar = Builder::new(encoder);
741 ar.sparse(false);
742 let gctx = ws.gctx();
743
744 let base_name = format!("{}-{}", pkg.name(), pkg.version());
745 let base_path = Path::new(&base_name);
746 let included = ar_files
747 .iter()
748 .map(|ar_file| ar_file.rel_path.clone())
749 .collect::<Vec<_>>();
750 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
751
752 let mut uncompressed_size = 0;
753 for ar_file in ar_files {
754 let ArchiveFile {
755 rel_path,
756 rel_str,
757 contents,
758 } = ar_file;
759 let ar_path = base_path.join(&rel_path);
760 gctx.shell()
761 .verbose(|shell| shell.status("Archiving", &rel_str))?;
762 let mut header = Header::new_gnu();
763 match contents {
764 FileContents::OnDisk(disk_path) => {
765 let mut file = File::open(&disk_path).with_context(|| {
766 format!("failed to open for archiving: `{}`", disk_path.display())
767 })?;
768 let metadata = file.metadata().with_context(|| {
769 format!("could not learn metadata for: `{}`", disk_path.display())
770 })?;
771 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
772 header.set_cksum();
773 ar.append_data(&mut header, &ar_path, &mut file)
774 .with_context(|| {
775 format!("could not archive source file `{}`", disk_path.display())
776 })?;
777 uncompressed_size += metadata.len() as u64;
778 }
779 FileContents::Generated(generated_kind) => {
780 let contents = match generated_kind {
781 GeneratedFile::Manifest => publish_pkg.manifest().to_normalized_contents()?,
782 GeneratedFile::Lockfile => build_lock(ws, &publish_pkg, local_reg)?,
783 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
784 };
785 header.set_entry_type(EntryType::file());
786 header.set_mode(0o644);
787 header.set_size(contents.len() as u64);
788 header.set_mtime(1);
790 header.set_cksum();
791 ar.append_data(&mut header, &ar_path, contents.as_bytes())
792 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
793 uncompressed_size += contents.len() as u64;
794 }
795 }
796 }
797
798 let encoder = ar.into_inner()?;
799 encoder.finish()?;
800 Ok(uncompressed_size)
801}
802
803fn compare_resolve(
805 gctx: &GlobalContext,
806 current_pkg: &Package,
807 orig_resolve: &Resolve,
808 new_resolve: &Resolve,
809) -> CargoResult<()> {
810 if gctx.shell().verbosity() != Verbosity::Verbose {
811 return Ok(());
812 }
813 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
814 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
815 let added = new_set.difference(&orig_set);
816 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
819 for pkg_id in added {
820 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
821 continue;
824 }
825 let removed_candidates: Vec<&PackageId> = removed
828 .iter()
829 .filter(|orig_pkg_id| {
830 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
831 })
832 .cloned()
833 .collect();
834 let extra = match removed_candidates.len() {
835 0 => {
836 let previous_versions: Vec<&PackageId> = removed
838 .iter()
839 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
840 .cloned()
841 .collect();
842 match previous_versions.len() {
843 0 => String::new(),
844 1 => format!(
845 ", previous version was `{}`",
846 previous_versions[0].version()
847 ),
848 _ => format!(
849 ", previous versions were: {}",
850 previous_versions
851 .iter()
852 .map(|pkg_id| format!("`{}`", pkg_id.version()))
853 .collect::<Vec<_>>()
854 .join(", ")
855 ),
856 }
857 }
858 1 => {
859 format!(
863 ", was originally sourced from `{}`",
864 removed_candidates[0].source_id()
865 )
866 }
867 _ => {
868 let comma_list = removed_candidates
871 .iter()
872 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
873 .collect::<Vec<_>>()
874 .join(", ");
875 format!(
876 ", was originally sourced from one of these sources: {}",
877 comma_list
878 )
879 }
880 };
881 let msg = format!(
882 "package `{}` added to the packaged Cargo.lock file{}",
883 pkg_id, extra
884 );
885 gctx.shell().note(msg)?;
886 }
887 Ok(())
888}
889
890pub fn check_yanked(
891 gctx: &GlobalContext,
892 pkg_set: &PackageSet<'_>,
893 resolve: &Resolve,
894 hint: &str,
895) -> CargoResult<()> {
896 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
899
900 let mut sources = pkg_set.sources_mut();
901 let mut pending: Vec<PackageId> = resolve.iter().collect();
902 let mut results = Vec::new();
903 for (_id, source) in sources.sources_mut() {
904 source.invalidate_cache();
905 }
906 while !pending.is_empty() {
907 pending.retain(|pkg_id| {
908 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
909 match source.is_yanked(*pkg_id) {
910 Poll::Ready(result) => results.push((*pkg_id, result)),
911 Poll::Pending => return true,
912 }
913 }
914 false
915 });
916 for (_id, source) in sources.sources_mut() {
917 source.block_until_ready()?;
918 }
919 }
920
921 for (pkg_id, is_yanked) in results {
922 if is_yanked? {
923 gctx.shell().warn(format!(
924 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
925 pkg_id,
926 pkg_id.source_id().display_registry_name(),
927 hint
928 ))?;
929 }
930 }
931 Ok(())
932}
933
934fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
941 let Some(name) = file.file_name() else {
942 return Ok(());
943 };
944 let Some(name) = name.to_str() else {
945 anyhow::bail!(
946 "path does not have a unicode filename which may not unpack \
947 on all platforms: {}",
948 file.display()
949 )
950 };
951 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
952 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
953 anyhow::bail!(
954 "cannot package a filename with a special character `{}`: {}",
955 c,
956 file.display()
957 )
958 }
959 if restricted_names::is_windows_reserved_path(file) {
960 shell.warn(format!(
961 "file {} is a reserved Windows filename, \
962 it will not work on Windows platforms",
963 file.display()
964 ))?;
965 }
966 Ok(())
967}
968
969struct TmpRegistry<'a> {
973 gctx: &'a GlobalContext,
974 upstream: SourceId,
975 root: Filesystem,
976 _lock: FileLock,
977}
978
979impl<'a> TmpRegistry<'a> {
980 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
981 root.create_dir()?;
982 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
983 let slf = Self {
984 gctx,
985 root,
986 upstream,
987 _lock,
988 };
989 let index_path = slf.index_path().into_path_unlocked();
991 if index_path.exists() {
992 paths::remove_dir_all(index_path)?;
993 }
994 slf.index_path().create_dir()?;
995 Ok(slf)
996 }
997
998 fn index_path(&self) -> Filesystem {
999 self.root.join("index")
1000 }
1001
1002 fn add_package(
1003 &mut self,
1004 ws: &Workspace<'_>,
1005 package: &Package,
1006 tar: &FileLock,
1007 ) -> CargoResult<()> {
1008 debug!(
1009 "adding package {}@{} to local overlay at {}",
1010 package.name(),
1011 package.version(),
1012 self.root.as_path_unlocked().display()
1013 );
1014 {
1015 let mut tar_copy = self.root.open_rw_exclusive_create(
1016 package.package_id().tarball_name(),
1017 self.gctx,
1018 "temporary package registry",
1019 )?;
1020 tar.file().seek(SeekFrom::Start(0))?;
1021 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1022 tar_copy.flush()?;
1023 }
1024
1025 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1026
1027 tar.file().seek(SeekFrom::Start(0))?;
1028 let cksum = cargo_util::Sha256::new()
1029 .update_file(tar.file())?
1030 .finish_hex();
1031
1032 let deps: Vec<_> = new_crate
1033 .deps
1034 .into_iter()
1035 .map(|dep| RegistryDependency {
1036 name: dep.name.into(),
1037 req: dep.version_req.into(),
1038 features: dep.features.into_iter().map(|x| x.into()).collect(),
1039 optional: dep.optional,
1040 default_features: dep.default_features,
1041 target: dep.target.map(|x| x.into()),
1042 kind: Some(dep.kind.into()),
1043 registry: dep.registry.map(|x| x.into()),
1044 package: None,
1045 public: None,
1046 artifact: dep
1047 .artifact
1048 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1049 bindep_target: dep.bindep_target.map(|x| x.into()),
1050 lib: dep.lib,
1051 })
1052 .collect();
1053
1054 let index_line = serde_json::to_string(&IndexPackage {
1055 name: new_crate.name.into(),
1056 vers: package.version().clone(),
1057 deps,
1058 features: new_crate
1059 .features
1060 .into_iter()
1061 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1062 .collect(),
1063 features2: None,
1064 cksum,
1065 yanked: None,
1066 links: new_crate.links.map(|x| x.into()),
1067 rust_version: None,
1068 v: Some(2),
1069 })?;
1070
1071 let file = cargo_util::registry::make_dep_path(package.name().as_str(), false);
1072 let mut dst = self.index_path().open_rw_exclusive_create(
1073 file,
1074 self.gctx,
1075 "temporary package registry",
1076 )?;
1077 dst.write_all(index_line.as_bytes())?;
1078 Ok(())
1079 }
1080}