use std::collections::{BTreeSet, HashMap, HashSet};
use std::ffi::OsString;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::{self, ExitStatus, Output};
use std::{env, fs, str};
use anyhow::{bail, Context as _};
use cargo_util::{exit_status_to_string, is_simple_exit_code, paths, ProcessBuilder};
use cargo_util_schemas::manifest::TomlManifest;
use rustfix::diagnostics::Diagnostic;
use rustfix::CodeFix;
use semver::Version;
use tracing::{debug, trace, warn};
use crate::core::compiler::CompileKind;
use crate::core::compiler::RustcTargetData;
use crate::core::resolver::features::{DiffMap, FeatureOpts, FeatureResolver, FeaturesFor};
use crate::core::resolver::{HasDevUnits, Resolve, ResolveBehavior};
use crate::core::PackageIdSpecQuery as _;
use crate::core::{Edition, MaybePackage, Package, PackageId, Workspace};
use crate::ops::resolve::WorkspaceResolve;
use crate::ops::{self, CompileOptions};
use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer};
use crate::util::errors::CargoResult;
use crate::util::GlobalContext;
use crate::util::{existing_vcs_repo, LockServer, LockServerClient};
use crate::{drop_eprint, drop_eprintln};
const FIX_ENV_INTERNAL: &str = "__CARGO_FIX_PLZ";
const BROKEN_CODE_ENV_INTERNAL: &str = "__CARGO_FIX_BROKEN_CODE";
const EDITION_ENV_INTERNAL: &str = "__CARGO_FIX_EDITION";
const IDIOMS_ENV_INTERNAL: &str = "__CARGO_FIX_IDIOMS";
const SYSROOT_INTERNAL: &str = "__CARGO_FIX_RUST_SRC";
pub struct FixOptions {
pub edition: bool,
pub idioms: bool,
pub compile_opts: CompileOptions,
pub allow_dirty: bool,
pub allow_no_vcs: bool,
pub allow_staged: bool,
pub broken_code: bool,
pub requested_lockfile_path: Option<PathBuf>,
}
pub fn fix(
gctx: &GlobalContext,
original_ws: &Workspace<'_>,
root_manifest: &Path,
opts: &mut FixOptions,
) -> CargoResult<()> {
check_version_control(gctx, opts)?;
let mut target_data =
RustcTargetData::new(original_ws, &opts.compile_opts.build_config.requested_kinds)?;
if opts.edition {
let specs = opts.compile_opts.spec.to_package_id_specs(&original_ws)?;
let members: Vec<&Package> = original_ws
.members()
.filter(|m| specs.iter().any(|spec| spec.matches(m.package_id())))
.collect();
migrate_manifests(original_ws, &members)?;
check_resolver_change(&original_ws, &mut target_data, opts)?;
}
let mut ws = Workspace::new(&root_manifest, gctx)?;
ws.set_resolve_honors_rust_version(Some(original_ws.resolve_honors_rust_version()));
ws.set_requested_lockfile_path(opts.requested_lockfile_path.clone());
let lock_server = LockServer::new()?;
let mut wrapper = ProcessBuilder::new(env::current_exe()?);
wrapper.env(FIX_ENV_INTERNAL, lock_server.addr().to_string());
let _started = lock_server.start()?;
opts.compile_opts.build_config.force_rebuild = true;
if opts.broken_code {
wrapper.env(BROKEN_CODE_ENV_INTERNAL, "1");
}
if opts.edition {
wrapper.env(EDITION_ENV_INTERNAL, "1");
}
if opts.idioms {
wrapper.env(IDIOMS_ENV_INTERNAL, "1");
}
let sysroot = &target_data.info(CompileKind::Host).sysroot;
if sysroot.is_dir() {
wrapper.env(SYSROOT_INTERNAL, sysroot);
}
*opts
.compile_opts
.build_config
.rustfix_diagnostic_server
.borrow_mut() = Some(RustfixDiagnosticServer::new()?);
if let Some(server) = opts
.compile_opts
.build_config
.rustfix_diagnostic_server
.borrow()
.as_ref()
{
server.configure(&mut wrapper);
}
let rustc = ws.gctx().load_global_rustc(Some(&ws))?;
wrapper.arg(&rustc.path);
wrapper.retry_with_argfile(true);
opts.compile_opts.build_config.primary_unit_rustc = Some(wrapper);
ops::compile(&ws, &opts.compile_opts)?;
Ok(())
}
fn check_version_control(gctx: &GlobalContext, opts: &FixOptions) -> CargoResult<()> {
if opts.allow_no_vcs {
return Ok(());
}
if !existing_vcs_repo(gctx.cwd(), gctx.cwd()) {
bail!(
"no VCS found for this package and `cargo fix` can potentially \
perform destructive changes; if you'd like to suppress this \
error pass `--allow-no-vcs`"
)
}
if opts.allow_dirty && opts.allow_staged {
return Ok(());
}
let mut dirty_files = Vec::new();
let mut staged_files = Vec::new();
if let Ok(repo) = git2::Repository::discover(gctx.cwd()) {
let mut repo_opts = git2::StatusOptions::new();
repo_opts.include_ignored(false);
repo_opts.include_untracked(true);
for status in repo.statuses(Some(&mut repo_opts))?.iter() {
if let Some(path) = status.path() {
match status.status() {
git2::Status::CURRENT => (),
git2::Status::INDEX_NEW
| git2::Status::INDEX_MODIFIED
| git2::Status::INDEX_DELETED
| git2::Status::INDEX_RENAMED
| git2::Status::INDEX_TYPECHANGE => {
if !opts.allow_staged {
staged_files.push(path.to_string())
}
}
_ => {
if !opts.allow_dirty {
dirty_files.push(path.to_string())
}
}
};
}
}
}
if dirty_files.is_empty() && staged_files.is_empty() {
return Ok(());
}
let mut files_list = String::new();
for file in dirty_files {
files_list.push_str(" * ");
files_list.push_str(&file);
files_list.push_str(" (dirty)\n");
}
for file in staged_files {
files_list.push_str(" * ");
files_list.push_str(&file);
files_list.push_str(" (staged)\n");
}
bail!(
"the working directory of this package has uncommitted changes, and \
`cargo fix` can potentially perform destructive changes; if you'd \
like to suppress this error pass `--allow-dirty`, `--allow-staged`, \
or commit the changes to these files:\n\
\n\
{}\n\
",
files_list
);
}
fn migrate_manifests(ws: &Workspace<'_>, pkgs: &[&Package]) -> CargoResult<()> {
for pkg in pkgs {
let existing_edition = pkg.manifest().edition();
let prepare_for_edition = existing_edition.saturating_next();
if existing_edition == prepare_for_edition
|| (!prepare_for_edition.is_stable() && !ws.gctx().nightly_features_allowed)
{
continue;
}
let file = pkg.manifest_path();
let file = file.strip_prefix(ws.root()).unwrap_or(file);
let file = file.display();
ws.gctx().shell().status(
"Migrating",
format!("{file} from {existing_edition} edition to {prepare_for_edition}"),
)?;
let ws_original_toml = match ws.root_maybe() {
MaybePackage::Package(package) => package.manifest().original_toml(),
MaybePackage::Virtual(manifest) => manifest.original_toml(),
};
if Edition::Edition2024 <= prepare_for_edition {
let mut document = pkg.manifest().document().clone().into_mut();
let mut fixes = 0;
let root = document.as_table_mut();
if let Some(workspace) = root
.get_mut("workspace")
.and_then(|t| t.as_table_like_mut())
{
fixes += rename_dep_fields_2024(workspace, "dependencies");
}
fixes += rename_table(root, "project", "package");
if let Some(target) = root.get_mut("lib").and_then(|t| t.as_table_like_mut()) {
fixes += rename_target_fields_2024(target);
}
fixes += rename_array_of_target_fields_2024(root, "bin");
fixes += rename_array_of_target_fields_2024(root, "example");
fixes += rename_array_of_target_fields_2024(root, "test");
fixes += rename_array_of_target_fields_2024(root, "bench");
fixes += rename_dep_fields_2024(root, "dependencies");
fixes += remove_ignored_default_features_2024(root, "dependencies", ws_original_toml);
fixes += rename_table(root, "dev_dependencies", "dev-dependencies");
fixes += rename_dep_fields_2024(root, "dev-dependencies");
fixes +=
remove_ignored_default_features_2024(root, "dev-dependencies", ws_original_toml);
fixes += rename_table(root, "build_dependencies", "build-dependencies");
fixes += rename_dep_fields_2024(root, "build-dependencies");
fixes +=
remove_ignored_default_features_2024(root, "build-dependencies", ws_original_toml);
for target in root
.get_mut("target")
.and_then(|t| t.as_table_like_mut())
.iter_mut()
.flat_map(|t| t.iter_mut())
.filter_map(|(_k, t)| t.as_table_like_mut())
{
fixes += rename_dep_fields_2024(target, "dependencies");
fixes +=
remove_ignored_default_features_2024(target, "dependencies", ws_original_toml);
fixes += rename_table(target, "dev_dependencies", "dev-dependencies");
fixes += rename_dep_fields_2024(target, "dev-dependencies");
fixes += remove_ignored_default_features_2024(
target,
"dev-dependencies",
ws_original_toml,
);
fixes += rename_table(target, "build_dependencies", "build-dependencies");
fixes += rename_dep_fields_2024(target, "build-dependencies");
fixes += remove_ignored_default_features_2024(
target,
"build-dependencies",
ws_original_toml,
);
}
if 0 < fixes {
let verb = if fixes == 1 { "fix" } else { "fixes" };
let msg = format!("{file} ({fixes} {verb})");
ws.gctx().shell().status("Fixed", msg)?;
let s = document.to_string();
let new_contents_bytes = s.as_bytes();
cargo_util::paths::write_atomic(pkg.manifest_path(), new_contents_bytes)?;
}
}
}
Ok(())
}
fn rename_dep_fields_2024(parent: &mut dyn toml_edit::TableLike, dep_kind: &str) -> usize {
let mut fixes = 0;
for target in parent
.get_mut(dep_kind)
.and_then(|t| t.as_table_like_mut())
.iter_mut()
.flat_map(|t| t.iter_mut())
.filter_map(|(_k, t)| t.as_table_like_mut())
{
fixes += rename_table(target, "default_features", "default-features");
}
fixes
}
fn remove_ignored_default_features_2024(
parent: &mut dyn toml_edit::TableLike,
dep_kind: &str,
ws_original_toml: &TomlManifest,
) -> usize {
let mut fixes = 0;
for (name_in_toml, target) in parent
.get_mut(dep_kind)
.and_then(|t| t.as_table_like_mut())
.iter_mut()
.flat_map(|t| t.iter_mut())
.filter_map(|(k, t)| t.as_table_like_mut().map(|t| (k, t)))
{
let name_in_toml: &str = &name_in_toml;
let ws_deps = ws_original_toml
.workspace
.as_ref()
.and_then(|ws| ws.dependencies.as_ref());
if let Some(ws_dep) = ws_deps.and_then(|ws_deps| ws_deps.get(name_in_toml)) {
if ws_dep.default_features() == Some(false) {
continue;
}
}
if target
.get("workspace")
.and_then(|i| i.as_value())
.and_then(|i| i.as_bool())
== Some(true)
&& target
.get("default-features")
.and_then(|i| i.as_value())
.and_then(|i| i.as_bool())
== Some(false)
{
target.remove("default-features");
fixes += 1;
}
}
fixes
}
fn rename_array_of_target_fields_2024(root: &mut dyn toml_edit::TableLike, kind: &str) -> usize {
let mut fixes = 0;
for target in root
.get_mut(kind)
.and_then(|t| t.as_array_of_tables_mut())
.iter_mut()
.flat_map(|t| t.iter_mut())
{
fixes += rename_target_fields_2024(target);
}
fixes
}
fn rename_target_fields_2024(target: &mut dyn toml_edit::TableLike) -> usize {
let mut fixes = 0;
fixes += rename_table(target, "crate_type", "crate-type");
fixes += rename_table(target, "proc_macro", "proc-macro");
fixes
}
fn rename_table(parent: &mut dyn toml_edit::TableLike, old: &str, new: &str) -> usize {
let Some(old_key) = parent.key(old).cloned() else {
return 0;
};
let project = parent.remove(old).expect("returned early");
if !parent.contains_key(new) {
parent.insert(new, project);
let mut new_key = parent.key_mut(new).expect("just inserted");
*new_key.dotted_decor_mut() = old_key.dotted_decor().clone();
*new_key.leaf_decor_mut() = old_key.leaf_decor().clone();
}
1
}
fn check_resolver_change<'gctx>(
ws: &Workspace<'gctx>,
target_data: &mut RustcTargetData<'gctx>,
opts: &FixOptions,
) -> CargoResult<()> {
let root = ws.root_maybe();
match root {
MaybePackage::Package(root_pkg) => {
if root_pkg.manifest().resolve_behavior().is_some() {
return Ok(());
}
let pkgs = opts.compile_opts.spec.get_packages(ws)?;
if !pkgs.iter().any(|&pkg| pkg == root_pkg) {
return Ok(());
}
if root_pkg.manifest().edition() != Edition::Edition2018 {
return Ok(());
}
}
MaybePackage::Virtual(_vm) => {
return Ok(());
}
}
assert_eq!(ws.resolve_behavior(), ResolveBehavior::V1);
let specs = opts.compile_opts.spec.to_package_id_specs(ws)?;
let mut resolve_differences = |has_dev_units| -> CargoResult<(WorkspaceResolve<'_>, DiffMap)> {
let dry_run = false;
let ws_resolve = ops::resolve_ws_with_opts(
ws,
target_data,
&opts.compile_opts.build_config.requested_kinds,
&opts.compile_opts.cli_features,
&specs,
has_dev_units,
crate::core::resolver::features::ForceAllTargets::No,
dry_run,
)?;
let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, has_dev_units);
let v2_features = FeatureResolver::resolve(
ws,
target_data,
&ws_resolve.targeted_resolve,
&ws_resolve.pkg_set,
&opts.compile_opts.cli_features,
&specs,
&opts.compile_opts.build_config.requested_kinds,
feature_opts,
)?;
let diffs = v2_features.compare_legacy(&ws_resolve.resolved_features);
Ok((ws_resolve, diffs))
};
let (_, without_dev_diffs) = resolve_differences(HasDevUnits::No)?;
let (ws_resolve, mut with_dev_diffs) = resolve_differences(HasDevUnits::Yes)?;
if without_dev_diffs.is_empty() && with_dev_diffs.is_empty() {
return Ok(());
}
with_dev_diffs.retain(|k, vals| without_dev_diffs.get(k) != Some(vals));
let gctx = ws.gctx();
gctx.shell().note(
"Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo.",
)?;
drop_eprintln!(
gctx,
"This may cause some dependencies to be built with fewer features enabled than previously."
);
drop_eprintln!(
gctx,
"More information about the resolver changes may be found \
at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html"
);
drop_eprintln!(
gctx,
"When building the following dependencies, \
the given features will no longer be used:\n"
);
let show_diffs = |differences: DiffMap| {
for ((pkg_id, features_for), removed) in differences {
drop_eprint!(gctx, " {}", pkg_id);
if let FeaturesFor::HostDep = features_for {
drop_eprint!(gctx, " (as host dependency)");
}
drop_eprint!(gctx, " removed features: ");
let joined: Vec<_> = removed.iter().map(|s| s.as_str()).collect();
drop_eprintln!(gctx, "{}", joined.join(", "));
}
drop_eprint!(gctx, "\n");
};
if !without_dev_diffs.is_empty() {
show_diffs(without_dev_diffs);
}
if !with_dev_diffs.is_empty() {
drop_eprintln!(
gctx,
"The following differences only apply when building with dev-dependencies:\n"
);
show_diffs(with_dev_diffs);
}
report_maybe_diesel(gctx, &ws_resolve.targeted_resolve)?;
Ok(())
}
fn report_maybe_diesel(gctx: &GlobalContext, resolve: &Resolve) -> CargoResult<()> {
fn is_broken_diesel(pid: PackageId) -> bool {
pid.name() == "diesel" && pid.version() < &Version::new(1, 4, 8)
}
fn is_broken_diesel_migration(pid: PackageId) -> bool {
pid.name() == "diesel_migrations" && pid.version().major <= 1
}
if resolve.iter().any(is_broken_diesel) && resolve.iter().any(is_broken_diesel_migration) {
gctx.shell().note(
"\
This project appears to use both diesel and diesel_migrations. These packages have
a known issue where the build may fail due to the version 2 resolver preventing
feature unification between those two packages. Please update to at least diesel 1.4.8
to prevent this issue from happening.
",
)?;
}
Ok(())
}
pub fn fix_get_proxy_lock_addr() -> Option<String> {
#[allow(clippy::disallowed_methods)]
env::var(FIX_ENV_INTERNAL).ok()
}
pub fn fix_exec_rustc(gctx: &GlobalContext, lock_addr: &str) -> CargoResult<()> {
let args = FixArgs::get()?;
trace!("cargo-fix as rustc got file {:?}", args.file);
let workspace_rustc = gctx
.get_env("RUSTC_WORKSPACE_WRAPPER")
.map(PathBuf::from)
.ok();
let mut rustc = ProcessBuilder::new(&args.rustc).wrapped(workspace_rustc.as_ref());
rustc.retry_with_argfile(true);
rustc.env_remove(FIX_ENV_INTERNAL);
args.apply(&mut rustc);
if let Some(client) = gctx.jobserver_from_env() {
rustc.inherit_jobserver(client);
}
trace!("start rustfixing {:?}", args.file);
let fixes = rustfix_crate(&lock_addr, &rustc, &args.file, &args, gctx)?;
if fixes.last_output.status.success() {
for (path, file) in fixes.files.iter() {
Message::Fixed {
file: path.clone(),
fixes: file.fixes_applied,
}
.post(gctx)?;
}
emit_output(&fixes.last_output)?;
return Ok(());
}
let allow_broken_code = gctx.get_env_os(BROKEN_CODE_ENV_INTERNAL).is_some();
if !allow_broken_code {
for (path, file) in fixes.files.iter() {
debug!("reverting {:?} due to errors", path);
paths::write(path, &file.original_code)?;
}
}
if fixes.files.is_empty() {
emit_output(&fixes.last_output)?;
exit_with(fixes.last_output.status);
} else {
let krate = {
let mut iter = rustc.get_args();
let mut krate = None;
while let Some(arg) = iter.next() {
if arg == "--crate-name" {
krate = iter.next().and_then(|s| s.to_owned().into_string().ok());
}
}
krate
};
log_failed_fix(
gctx,
krate,
&fixes.last_output.stderr,
fixes.last_output.status,
)?;
emit_output(&fixes.first_output)?;
exit_with(fixes.first_output.status);
}
}
fn emit_output(output: &Output) -> CargoResult<()> {
std::io::stderr().write_all(&output.stderr)?;
std::io::stdout().write_all(&output.stdout)?;
Ok(())
}
struct FixedCrate {
files: HashMap<String, FixedFile>,
first_output: Output,
last_output: Output,
}
#[derive(Debug)]
struct FixedFile {
errors_applying_fixes: Vec<String>,
fixes_applied: u32,
original_code: String,
}
fn rustfix_crate(
lock_addr: &str,
rustc: &ProcessBuilder,
filename: &Path,
args: &FixArgs,
gctx: &GlobalContext,
) -> CargoResult<FixedCrate> {
let _lock = LockServerClient::lock(&lock_addr.parse()?, "global")?;
let mut files = HashMap::new();
if !args.can_run_rustfix(gctx)? {
debug!("can't fix {filename:?}, running rustc: {rustc}");
let last_output = rustc.output()?;
let fixes = FixedCrate {
files,
first_output: last_output.clone(),
last_output,
};
return Ok(fixes);
}
let max_iterations = gctx
.get_env("CARGO_FIX_MAX_RETRIES")
.ok()
.and_then(|n| n.parse().ok())
.unwrap_or(4);
let mut last_output;
let mut last_made_changes;
let mut first_output = None;
let mut current_iteration = 0;
loop {
for file in files.values_mut() {
file.errors_applying_fixes.clear();
}
(last_output, last_made_changes) =
rustfix_and_fix(&mut files, rustc, filename, args, gctx)?;
if current_iteration == 0 {
first_output = Some(last_output.clone());
}
let mut progress_yet_to_be_made = false;
for (path, file) in files.iter_mut() {
if file.errors_applying_fixes.is_empty() {
continue;
}
debug!("had rustfix apply errors in {path:?} {file:?}");
if last_made_changes {
progress_yet_to_be_made = true;
}
}
if !progress_yet_to_be_made {
break;
}
current_iteration += 1;
if current_iteration >= max_iterations {
break;
}
}
if last_made_changes {
debug!("calling rustc one last time for final results: {rustc}");
last_output = rustc.output()?;
}
for (path, file) in files.iter_mut() {
for error in file.errors_applying_fixes.drain(..) {
Message::ReplaceFailed {
file: path.clone(),
message: error,
}
.post(gctx)?;
}
}
Ok(FixedCrate {
files,
first_output: first_output.expect("at least one iteration"),
last_output,
})
}
fn rustfix_and_fix(
files: &mut HashMap<String, FixedFile>,
rustc: &ProcessBuilder,
filename: &Path,
args: &FixArgs,
gctx: &GlobalContext,
) -> CargoResult<(Output, bool)> {
let only = HashSet::new();
debug!("calling rustc to collect suggestions and validate previous fixes: {rustc}");
let output = rustc.output()?;
if !output.status.success() && gctx.get_env_os(BROKEN_CODE_ENV_INTERNAL).is_none() {
debug!(
"rustfixing `{:?}` failed, rustc exited with {:?}",
filename,
output.status.code()
);
return Ok((output, false));
}
let fix_mode = gctx
.get_env_os("__CARGO_FIX_YOLO")
.map(|_| rustfix::Filter::Everything)
.unwrap_or(rustfix::Filter::MachineApplicableOnly);
let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as UTF-8")?;
let suggestions = stderr
.lines()
.filter(|x| !x.is_empty())
.inspect(|y| trace!("line: {}", y))
.filter_map(|line| serde_json::from_str::<Diagnostic>(line).ok())
.filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode));
let mut file_map = HashMap::new();
let mut num_suggestion = 0;
let home_path = gctx.home().as_path_unlocked();
for suggestion in suggestions {
trace!("suggestion");
let file_names = suggestion
.solutions
.iter()
.flat_map(|s| s.replacements.iter())
.map(|r| &r.snippet.file_name);
let file_name = if let Some(file_name) = file_names.clone().next() {
file_name.clone()
} else {
trace!("rejecting as it has no solutions {:?}", suggestion);
continue;
};
let file_path = Path::new(&file_name);
if file_path.starts_with(home_path) {
continue;
}
if let Some(sysroot) = args.sysroot.as_deref() {
if file_path.starts_with(sysroot) {
continue;
}
}
if !file_names.clone().all(|f| f == &file_name) {
trace!("rejecting as it changes multiple files: {:?}", suggestion);
continue;
}
trace!("adding suggestion for {:?}: {:?}", file_name, suggestion);
file_map
.entry(file_name)
.or_insert_with(Vec::new)
.push(suggestion);
num_suggestion += 1;
}
debug!(
"collected {} suggestions for `{}`",
num_suggestion,
filename.display(),
);
let mut made_changes = false;
for (file, suggestions) in file_map {
let code = match paths::read(file.as_ref()) {
Ok(s) => s,
Err(e) => {
warn!("failed to read `{}`: {}", file, e);
continue;
}
};
let num_suggestions = suggestions.len();
debug!("applying {} fixes to {}", num_suggestions, file);
let fixed_file = files.entry(file.clone()).or_insert_with(|| FixedFile {
errors_applying_fixes: Vec::new(),
fixes_applied: 0,
original_code: code.clone(),
});
let mut fixed = CodeFix::new(&code);
for suggestion in suggestions.iter().rev() {
match fixed.apply(suggestion) {
Ok(()) => fixed_file.fixes_applied += 1,
Err(rustfix::Error::AlreadyReplaced {
is_identical: true, ..
}) => continue,
Err(e) => fixed_file.errors_applying_fixes.push(e.to_string()),
}
}
if fixed.modified() {
made_changes = true;
let new_code = fixed.finish()?;
paths::write(&file, new_code)?;
}
}
Ok((output, made_changes))
}
fn exit_with(status: ExitStatus) -> ! {
#[cfg(unix)]
{
use std::os::unix::prelude::*;
if let Some(signal) = status.signal() {
drop(writeln!(
std::io::stderr().lock(),
"child failed with signal `{}`",
signal
));
process::exit(2);
}
}
process::exit(status.code().unwrap_or(3));
}
fn log_failed_fix(
gctx: &GlobalContext,
krate: Option<String>,
stderr: &[u8],
status: ExitStatus,
) -> CargoResult<()> {
let stderr = str::from_utf8(stderr).context("failed to parse rustc stderr as utf-8")?;
let diagnostics = stderr
.lines()
.filter(|x| !x.is_empty())
.filter_map(|line| serde_json::from_str::<Diagnostic>(line).ok());
let mut files = BTreeSet::new();
let mut errors = Vec::new();
for diagnostic in diagnostics {
errors.push(diagnostic.rendered.unwrap_or(diagnostic.message));
for span in diagnostic.spans.into_iter() {
files.insert(span.file_name);
}
}
errors.extend(
stderr
.lines()
.filter(|x| !x.starts_with('{'))
.map(|x| x.to_string()),
);
let files = files.into_iter().collect();
let abnormal_exit = if status.code().map_or(false, is_simple_exit_code) {
None
} else {
Some(exit_status_to_string(status))
};
Message::FixFailed {
files,
krate,
errors,
abnormal_exit,
}
.post(gctx)?;
Ok(())
}
struct FixArgs {
file: PathBuf,
prepare_for_edition: Option<Edition>,
idioms: bool,
enabled_edition: Option<Edition>,
other: Vec<OsString>,
rustc: PathBuf,
sysroot: Option<PathBuf>,
}
impl FixArgs {
fn get() -> CargoResult<FixArgs> {
Self::from_args(env::args_os())
}
fn from_args(argv: impl IntoIterator<Item = OsString>) -> CargoResult<Self> {
let mut argv = argv.into_iter();
let mut rustc = argv
.nth(1)
.map(PathBuf::from)
.ok_or_else(|| anyhow::anyhow!("expected rustc or `@path` as first argument"))?;
let mut file = None;
let mut enabled_edition = None;
let mut other = Vec::new();
let mut handle_arg = |arg: OsString| -> CargoResult<()> {
let path = PathBuf::from(arg);
if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() {
file = Some(path);
return Ok(());
}
if let Some(s) = path.to_str() {
if let Some(edition) = s.strip_prefix("--edition=") {
enabled_edition = Some(edition.parse()?);
return Ok(());
}
}
other.push(path.into());
Ok(())
};
if let Some(argfile_path) = rustc.to_str().unwrap_or_default().strip_prefix("@") {
if argv.next().is_some() {
bail!("argfile `@path` cannot be combined with other arguments");
}
let contents = fs::read_to_string(argfile_path)
.with_context(|| format!("failed to read argfile at `{argfile_path}`"))?;
let mut iter = contents.lines().map(OsString::from);
rustc = iter
.next()
.map(PathBuf::from)
.ok_or_else(|| anyhow::anyhow!("expected rustc as first argument"))?;
for arg in iter {
handle_arg(arg)?;
}
} else {
for arg in argv {
handle_arg(arg)?;
}
}
let file = file.ok_or_else(|| anyhow::anyhow!("could not find .rs file in rustc args"))?;
#[allow(clippy::disallowed_methods)]
let idioms = env::var(IDIOMS_ENV_INTERNAL).is_ok();
#[allow(clippy::disallowed_methods)]
let prepare_for_edition = env::var(EDITION_ENV_INTERNAL).ok().map(|_| {
enabled_edition
.unwrap_or(Edition::Edition2015)
.saturating_next()
});
#[allow(clippy::disallowed_methods)]
let sysroot = env::var_os(SYSROOT_INTERNAL).map(PathBuf::from);
Ok(FixArgs {
file,
prepare_for_edition,
idioms,
enabled_edition,
other,
rustc,
sysroot,
})
}
fn apply(&self, cmd: &mut ProcessBuilder) {
cmd.arg(&self.file);
cmd.args(&self.other);
if self.prepare_for_edition.is_some() {
cmd.arg("--cap-lints=allow");
} else {
cmd.arg("--cap-lints=warn");
}
if let Some(edition) = self.enabled_edition {
cmd.arg("--edition").arg(edition.to_string());
if self.idioms && edition.supports_idiom_lint() {
cmd.arg(format!("-Wrust-{}-idioms", edition));
}
}
if let Some(edition) = self.prepare_for_edition {
if edition.supports_compat_lint() {
cmd.arg("--force-warn")
.arg(format!("rust-{}-compatibility", edition));
}
}
}
fn can_run_rustfix(&self, gctx: &GlobalContext) -> CargoResult<bool> {
let Some(to_edition) = self.prepare_for_edition else {
return Message::Fixing {
file: self.file.display().to_string(),
}
.post(gctx)
.and(Ok(true));
};
if !to_edition.is_stable() && !gctx.nightly_features_allowed {
let message = format!(
"`{file}` is on the latest edition, but trying to \
migrate to edition {to_edition}.\n\
Edition {to_edition} is unstable and not allowed in \
this release, consider trying the nightly release channel.",
file = self.file.display(),
to_edition = to_edition
);
return Message::EditionAlreadyEnabled {
message,
edition: to_edition.previous().unwrap(),
}
.post(gctx)
.and(Ok(false)); }
let from_edition = self.enabled_edition.unwrap_or(Edition::Edition2015);
if from_edition == to_edition {
let message = format!(
"`{}` is already on the latest edition ({}), \
unable to migrate further",
self.file.display(),
to_edition
);
Message::EditionAlreadyEnabled {
message,
edition: to_edition,
}
.post(gctx)
} else {
Message::Migrating {
file: self.file.display().to_string(),
from_edition,
to_edition,
}
.post(gctx)
}
.and(Ok(true))
}
}
#[cfg(test)]
mod tests {
use super::FixArgs;
use std::ffi::OsString;
use std::io::Write as _;
use std::path::PathBuf;
#[test]
fn get_fix_args_from_argfile() {
let mut temp = tempfile::Builder::new().tempfile().unwrap();
let main_rs = tempfile::Builder::new().suffix(".rs").tempfile().unwrap();
let content = format!("/path/to/rustc\n{}\nfoobar\n", main_rs.path().display());
temp.write_all(content.as_bytes()).unwrap();
let argfile = format!("@{}", temp.path().display());
let args = ["cargo", &argfile];
let fix_args = FixArgs::from_args(args.map(|x| x.into())).unwrap();
assert_eq!(fix_args.rustc, PathBuf::from("/path/to/rustc"));
assert_eq!(fix_args.file, main_rs.path());
assert_eq!(fix_args.other, vec![OsString::from("foobar")]);
}
#[test]
fn get_fix_args_from_argfile_with_extra_arg() {
let mut temp = tempfile::Builder::new().tempfile().unwrap();
let main_rs = tempfile::Builder::new().suffix(".rs").tempfile().unwrap();
let content = format!("/path/to/rustc\n{}\nfoobar\n", main_rs.path().display());
temp.write_all(content.as_bytes()).unwrap();
let argfile = format!("@{}", temp.path().display());
let args = ["cargo", &argfile, "boo!"];
match FixArgs::from_args(args.map(|x| x.into())) {
Err(e) => assert_eq!(
e.to_string(),
"argfile `@path` cannot be combined with other arguments"
),
Ok(_) => panic!("should fail"),
}
}
}