use std::env;
use std::error::Error;
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use ar_archive_writer::{
write_archive_to_stream, ArchiveKind, COFFShortExport, MachineTypes, NewArchiveMember,
};
pub use ar_archive_writer::{ObjectReader, DEFAULT_OBJECT_READER};
use object::read::archive::ArchiveFile;
use object::read::macho::FatArch;
use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::memmap::Mmap;
use rustc_session::Session;
use rustc_span::symbol::Symbol;
use tempfile::Builder as TempFileBuilder;
use tracing::trace;
use super::metadata::search_for_section;
use crate::common;
pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
use crate::errors::{
DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorCreatingImportLibrary, ErrorWritingDEFFile,
};
pub trait ArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a>;
fn create_dll_import_lib(
&self,
sess: &Session,
lib_name: &str,
import_name_and_ordinal_vector: Vec<(String, Option<u16>)>,
output_path: &Path,
) {
if common::is_mingw_gnu_toolchain(&sess.target) {
create_mingw_dll_import_lib(
sess,
lib_name,
import_name_and_ordinal_vector,
output_path,
);
} else {
trace!("creating import library");
trace!(" dll_name {:#?}", lib_name);
trace!(" output_path {}", output_path.display());
trace!(
" import names: {}",
import_name_and_ordinal_vector
.iter()
.map(|(name, _ordinal)| name.clone())
.collect::<Vec<_>>()
.join(", "),
);
let mut file = match fs::File::create_new(&output_path) {
Ok(file) => file,
Err(error) => sess
.dcx()
.emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() }),
};
let exports = import_name_and_ordinal_vector
.iter()
.map(|(name, ordinal)| COFFShortExport {
name: name.to_string(),
ext_name: None,
symbol_name: None,
alias_target: None,
ordinal: ordinal.unwrap_or(0),
noname: ordinal.is_some(),
data: false,
private: false,
constant: false,
})
.collect::<Vec<_>>();
let machine = match &*sess.target.arch {
"x86_64" => MachineTypes::AMD64,
"x86" => MachineTypes::I386,
"aarch64" => MachineTypes::ARM64,
"arm64ec" => MachineTypes::ARM64EC,
"arm" => MachineTypes::ARMNT,
cpu => panic!("unsupported cpu type {cpu}"),
};
if let Err(error) = ar_archive_writer::write_import_library(
&mut file,
lib_name,
&exports,
machine,
!sess.target.is_like_msvc,
true,
) {
sess.dcx()
.emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() });
}
}
}
fn extract_bundled_libs<'a>(
&'a self,
rlib: &'a Path,
outdir: &Path,
bundled_lib_file_names: &FxIndexSet<Symbol>,
) -> Result<(), ExtractBundledLibsError<'a>> {
let archive_map = unsafe {
Mmap::map(
File::open(rlib)
.map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
)
.map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
};
let archive = ArchiveFile::parse(&*archive_map)
.map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
for entry in archive.members() {
let entry = entry
.map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
let data = entry
.data(&*archive_map)
.map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
let name = std::str::from_utf8(entry.name())
.map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
continue; }
let data = search_for_section(rlib, data, ".bundled_lib").map_err(|e| {
ExtractBundledLibsError::ExtractSection { rlib, error: Box::<dyn Error>::from(e) }
})?;
std::fs::write(&outdir.join(&name), data)
.map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
}
Ok(())
}
}
pub fn create_mingw_dll_import_lib(
sess: &Session,
lib_name: &str,
import_name_and_ordinal_vector: Vec<(String, Option<u16>)>,
output_path: &Path,
) {
let def_file_path = output_path.with_extension("def");
let def_file_content = format!(
"EXPORTS\n{}",
import_name_and_ordinal_vector
.into_iter()
.map(|(name, ordinal)| {
match ordinal {
Some(n) => format!("{name} @{n} NONAME"),
None => name,
}
})
.collect::<Vec<String>>()
.join("\n")
);
match std::fs::write(&def_file_path, def_file_content) {
Ok(_) => {}
Err(e) => {
sess.dcx().emit_fatal(ErrorWritingDEFFile { error: e });
}
};
let dlltool = find_binutils_dlltool(sess);
let temp_prefix = {
let mut path = PathBuf::from(&output_path);
path.pop();
path.push(lib_name);
path
};
let (dlltool_target_arch, dlltool_target_bitness) = match sess.target.arch.as_ref() {
"x86_64" => ("i386:x86-64", "--64"),
"x86" => ("i386", "--32"),
"aarch64" => ("arm64", "--64"),
"arm" => ("arm", "--32"),
_ => panic!("unsupported arch {}", sess.target.arch),
};
let mut dlltool_cmd = std::process::Command::new(&dlltool);
dlltool_cmd
.arg("-d")
.arg(def_file_path)
.arg("-D")
.arg(lib_name)
.arg("-l")
.arg(&output_path)
.arg("-m")
.arg(dlltool_target_arch)
.arg("-f")
.arg(dlltool_target_bitness)
.arg("--no-leading-underscore")
.arg("--temp-prefix")
.arg(temp_prefix);
match dlltool_cmd.output() {
Err(e) => {
sess.dcx().emit_fatal(ErrorCallingDllTool {
dlltool_path: dlltool.to_string_lossy(),
error: e,
});
}
Ok(output) if !output.stderr.is_empty() => {
sess.dcx().emit_fatal(DlltoolFailImportLibrary {
dlltool_path: dlltool.to_string_lossy(),
dlltool_args: dlltool_cmd
.get_args()
.map(|arg| arg.to_string_lossy())
.collect::<Vec<_>>()
.join(" "),
stdout: String::from_utf8_lossy(&output.stdout),
stderr: String::from_utf8_lossy(&output.stderr),
})
}
_ => {}
}
}
fn find_binutils_dlltool(sess: &Session) -> OsString {
assert!(sess.target.options.is_like_windows && !sess.target.options.is_like_msvc);
if let Some(dlltool_path) = &sess.opts.cg.dlltool {
return dlltool_path.clone().into_os_string();
}
let tool_name: OsString = if sess.host.options.is_like_windows {
"dlltool.exe"
} else {
match sess.target.arch.as_ref() {
"x86_64" => "x86_64-w64-mingw32-dlltool",
"x86" => "i686-w64-mingw32-dlltool",
"aarch64" => "aarch64-w64-mingw32-dlltool",
_ => "dlltool",
}
}
.into();
for dir in env::split_paths(&env::var_os("PATH").unwrap_or_default()) {
let full_path = dir.join(&tool_name);
if full_path.is_file() {
return full_path.into_os_string();
}
}
tool_name
}
pub trait ArchiveBuilder {
fn add_file(&mut self, path: &Path);
fn add_archive(
&mut self,
archive: &Path,
skip: Box<dyn FnMut(&str) -> bool + 'static>,
) -> io::Result<()>;
fn build(self: Box<Self>, output: &Path) -> bool;
}
#[must_use = "must call build() to finish building the archive"]
pub struct ArArchiveBuilder<'a> {
sess: &'a Session,
object_reader: &'static ObjectReader,
src_archives: Vec<(PathBuf, Mmap)>,
entries: Vec<(Vec<u8>, ArchiveEntry)>,
}
#[derive(Debug)]
enum ArchiveEntry {
FromArchive { archive_index: usize, file_range: (u64, u64) },
File(PathBuf),
}
impl<'a> ArArchiveBuilder<'a> {
pub fn new(sess: &'a Session, object_reader: &'static ObjectReader) -> ArArchiveBuilder<'a> {
ArArchiveBuilder { sess, object_reader, src_archives: vec![], entries: vec![] }
}
}
fn try_filter_fat_archs(
archs: &[impl FatArch],
target_arch: object::Architecture,
archive_path: &Path,
archive_map_data: &[u8],
) -> io::Result<Option<PathBuf>> {
let desired = match archs.iter().find(|a| a.architecture() == target_arch) {
Some(a) => a,
None => return Ok(None),
};
let (mut new_f, extracted_path) = tempfile::Builder::new()
.suffix(archive_path.file_name().unwrap())
.tempfile()?
.keep()
.unwrap();
new_f.write_all(
desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
)?;
Ok(Some(extracted_path))
}
pub fn try_extract_macho_fat_archive(
sess: &Session,
archive_path: &Path,
) -> io::Result<Option<PathBuf>> {
let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
let target_arch = match sess.target.arch.as_ref() {
"aarch64" => object::Architecture::Aarch64,
"x86_64" => object::Architecture::X86_64,
_ => return Ok(None),
};
if let Ok(h) = object::read::macho::MachOFatFile32::parse(&*archive_map) {
let archs = h.arches();
try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
} else if let Ok(h) = object::read::macho::MachOFatFile64::parse(&*archive_map) {
let archs = h.arches();
try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
} else {
Ok(None)
}
}
impl<'a> ArchiveBuilder for ArArchiveBuilder<'a> {
fn add_archive(
&mut self,
archive_path: &Path,
mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
) -> io::Result<()> {
let mut archive_path = archive_path.to_path_buf();
if self.sess.target.llvm_target.contains("-apple-macosx") {
if let Some(new_archive_path) = try_extract_macho_fat_archive(self.sess, &archive_path)?
{
archive_path = new_archive_path
}
}
if self.src_archives.iter().any(|archive| archive.0 == archive_path) {
return Ok(());
}
let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
let archive = ArchiveFile::parse(&*archive_map)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
let archive_index = self.src_archives.len();
for entry in archive.members() {
let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
let file_name = String::from_utf8(entry.name().to_vec())
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
if !skip(&file_name) {
if entry.is_thin() {
let member_path = archive_path.parent().unwrap().join(Path::new(&file_name));
self.entries.push((file_name.into_bytes(), ArchiveEntry::File(member_path)));
} else {
self.entries.push((
file_name.into_bytes(),
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
));
}
}
}
self.src_archives.push((archive_path, archive_map));
Ok(())
}
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
ArchiveEntry::File(file.to_owned()),
));
}
fn build(self: Box<Self>, output: &Path) -> bool {
let sess = self.sess;
match self.build_inner(output) {
Ok(any_members) => any_members,
Err(error) => {
sess.dcx().emit_fatal(ArchiveBuildFailure { path: output.to_owned(), error })
}
}
}
}
impl<'a> ArArchiveBuilder<'a> {
fn build_inner(self, output: &Path) -> io::Result<bool> {
let archive_kind = match &*self.sess.target.archive_format {
"gnu" => ArchiveKind::Gnu,
"bsd" => ArchiveKind::Bsd,
"darwin" => ArchiveKind::Darwin,
"coff" => ArchiveKind::Coff,
"aix_big" => ArchiveKind::AixBig,
kind => {
self.sess.dcx().emit_fatal(UnknownArchiveKind { kind });
}
};
let mut entries = Vec::new();
for (entry_name, entry) in self.entries {
let data =
match entry {
ArchiveEntry::FromArchive { archive_index, file_range } => {
let src_archive = &self.src_archives[archive_index];
let data = &src_archive.1
[file_range.0 as usize..file_range.0 as usize + file_range.1 as usize];
Box::new(data) as Box<dyn AsRef<[u8]>>
}
ArchiveEntry::File(file) => unsafe {
Box::new(
Mmap::map(File::open(file).map_err(|err| {
io_error_context("failed to open object file", err)
})?)
.map_err(|err| io_error_context("failed to map object file", err))?,
) as Box<dyn AsRef<[u8]>>
},
};
entries.push(NewArchiveMember {
buf: data,
object_reader: self.object_reader,
member_name: String::from_utf8(entry_name).unwrap(),
mtime: 0,
uid: 0,
gid: 0,
perms: 0o644,
})
}
let archive_tmpdir = TempFileBuilder::new()
.suffix(".temp-archive")
.tempdir_in(output.parent().unwrap_or_else(|| Path::new("")))
.map_err(|err| {
io_error_context("couldn't create a directory for the temp file", err)
})?;
let archive_tmpfile_path = archive_tmpdir.path().join("tmp.a");
let mut archive_tmpfile = File::create_new(&archive_tmpfile_path)
.map_err(|err| io_error_context("couldn't create the temp file", err))?;
write_archive_to_stream(
&mut archive_tmpfile,
&entries,
archive_kind,
false,
self.sess.target.arch == "arm64ec",
)?;
let any_entries = !entries.is_empty();
drop(entries);
drop(self.src_archives);
fs::rename(archive_tmpfile_path, output)
.map_err(|err| io_error_context("failed to rename archive file", err))?;
archive_tmpdir
.close()
.map_err(|err| io_error_context("failed to remove temporary directory", err))?;
Ok(any_entries)
}
}
fn io_error_context(context: &str, err: io::Error) -> io::Error {
io::Error::new(io::ErrorKind::Other, format!("{context}: {err}"))
}