diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 112a646e2..10b22f678 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -29,7 +29,7 @@ jobs: python-version: "3.9" - uses: actions/setup-python@v2 with: - python-version: "3.10" + python-version: "3.10.0" - name: Install cffi and virtualenv run: pip install cffi virtualenv - uses: actions-rs/toolchain@v1 diff --git a/Cargo.lock b/Cargo.lock index 05d5ec6d1..1264ed8ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1020,6 +1020,17 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +[[package]] +name = "lddtree" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f5d1501bcbb2572cd5aaec64307faf409b52f25c0d8adbaeaa8afe01387f467" +dependencies = [ + "fs-err", + "glob", + "goblin", +] + [[package]] name = "libc" version = "0.2.112" @@ -1074,6 +1085,7 @@ dependencies = [ "ignore", "indoc", "keyring", + "lddtree", "minijinja", "once_cell", "platform-info", diff --git a/Cargo.toml b/Cargo.toml index 181a65f4b..96fab0508 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,6 +57,7 @@ ignore = "0.4.18" dialoguer = "0.9.0" console = "0.15.0" minijinja = "0.8.2" +lddtree = "0.1.4" [dev-dependencies] indoc = "1.0.3" diff --git a/Changelog.md b/Changelog.md index 889cd4b6d..7c176e3d1 100644 --- a/Changelog.md +++ b/Changelog.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * Fix undefined auditwheel policy panic in [#740](https://github.com/PyO3/maturin/pull/740) * Fix sdist upload for packages where the pkgname contains multiple underscores in [#741](https://github.com/PyO3/maturin/pull/741) * Add `Cargo.lock` to sdist when `--locked` or `--frozen` specified in [#749](https://github.com/PyO3/maturin/pull/749) +* Implement auditwheel repair with patchelf in [#742](https://github.com/PyO3/maturin/pull/742) ## [0.12.4] - 2021-12-06 diff --git a/src/auditwheel/audit.rs b/src/auditwheel/audit.rs index c936e76b1..def57de55 100644 --- a/src/auditwheel/audit.rs +++ b/src/auditwheel/audit.rs @@ -33,8 +33,18 @@ pub enum AuditWheelError { #[error( "Your library is not {0} compliant because it links the following forbidden libraries: {1:?}", )] - PlatformTagValidationError(Policy, Vec), - /// The elf file isn't manylinux/musllinux compatible. Contains unsupported architecture + LinksForbiddenLibrariesError(Policy, Vec), + /// The elf file isn't manylinux/musllinux compatible. Contains the list of offending + /// libraries. + #[error( + "Your library is not {0} compliant because of the presence of too-recent versioned symbols: {1:?}. Consider building in a manylinux docker container", + )] + VersionedSymbolTooNewError(Policy, Vec), + /// The elf file isn't manylinux/musllinux compatible. Contains the list of offending + /// libraries with blacked-list symbols. + #[error("Your library is not {0} compliant because it depends on black-listed symbols: {1:?}")] + BlackListedSymbolsError(Policy, Vec), + /// The elf file isn't manylinux/musllinux compaible. Contains unsupported architecture #[error("Your library is not {0} compliant because it has unsupported architecture: {1}")] UnsupportedArchitecture(Policy, String), /// This platform tag isn't defined by auditwheel yet @@ -43,15 +53,15 @@ pub enum AuditWheelError { } #[derive(Clone, Debug)] -struct VersionedLibrary { +pub struct VersionedLibrary { /// library name - name: String, + pub name: String, /// versions needed versions: HashSet, } /// Find required dynamic linked libraries with version information -fn find_versioned_libraries(elf: &Elf) -> Result, AuditWheelError> { +pub fn find_versioned_libraries(elf: &Elf) -> Vec { let mut symbols = Vec::new(); if let Some(verneed) = &elf.verneed { for need_file in verneed.iter() { @@ -73,7 +83,7 @@ fn find_versioned_libraries(elf: &Elf) -> Result, AuditWhe } } } - Ok(symbols) + symbols } /// Find incompliant symbols from symbol versions @@ -107,6 +117,7 @@ fn policy_is_satisfied( AuditWheelError::UnsupportedArchitecture(policy.clone(), arch.to_string()) })?; let mut offending_libs = HashSet::new(); + let mut offending_versioned_syms = HashSet::new(); let mut offending_blacklist_syms = HashMap::new(); let undef_symbols: HashSet = elf .dynsyms @@ -173,26 +184,37 @@ fn policy_is_satisfied( offending_symbols.join(", ") ) }; - offending_libs.insert(offender); + offending_versioned_syms.insert(offender); } } } - // Checks if we can give a more helpful error message - let is_libpython = Regex::new(r"^libpython3\.\d+\.so\.\d+\.\d+$").unwrap(); - let mut offenders: Vec = offending_libs.into_iter().collect(); - for (lib, syms) in offending_blacklist_syms { - offenders.push(format!( - "{} offending black-listed symbols: {}", - lib, - syms.join(", ") + // Check for black-listed symbols + if !offending_blacklist_syms.is_empty() { + let offenders = offending_blacklist_syms + .into_iter() + .map(|(lib, syms)| format!("{}: {}", lib, syms.join(", "))) + .collect(); + return Err(AuditWheelError::BlackListedSymbolsError( + policy.clone(), + offenders, + )); + } + // Check for too-recent versioned symbols + if !offending_versioned_syms.is_empty() { + return Err(AuditWheelError::VersionedSymbolTooNewError( + policy.clone(), + offending_versioned_syms.into_iter().collect(), )); } + // Check for libpython and forbidden libraries + let is_libpython = Regex::new(r"^libpython3\.\d+\.so\.\d+\.\d+$").unwrap(); + let offenders: Vec = offending_libs.into_iter().collect(); match offenders.as_slice() { [] => Ok(()), [lib] if is_libpython.is_match(lib) => { Err(AuditWheelError::LinksLibPythonError(lib.clone())) } - offenders => Err(AuditWheelError::PlatformTagValidationError( + offenders => Err(AuditWheelError::LinksForbiddenLibrariesError( policy.clone(), offenders.to_vec(), )), @@ -218,8 +240,9 @@ fn get_default_platform_policies() -> Vec { /// An reimplementation of auditwheel, which checks elf files for /// manylinux/musllinux compliance. /// -/// If `platform_tag`, is None, it returns the the highest matching manylinux/musllinux policy, or `linux` -/// if nothing else matches. It will error for bogus cases, e.g. if libpython is linked. +/// If `platform_tag`, is None, it returns the the highest matching manylinux/musllinux policy +/// and whether we need to repair with patchelf,, or `linux` if nothing else matches. +/// It will error for bogus cases, e.g. if libpython is linked. /// /// If a specific manylinux/musllinux version is given, compliance is checked and a warning printed if /// a higher version would be possible. @@ -229,10 +252,11 @@ pub fn auditwheel_rs( path: &Path, target: &Target, platform_tag: Option, -) -> Result { +) -> Result<(Policy, bool), AuditWheelError> { if !target.is_linux() || platform_tag == Some(PlatformTag::Linux) { - return Ok(Policy::default()); + return Ok((Policy::default(), false)); } + let cross_compiling = target.cross_compiling(); let arch = target.target_arch().to_string(); let mut file = File::open(path).map_err(AuditWheelError::IoError)?; let mut buffer = Vec::new(); @@ -241,7 +265,7 @@ pub fn auditwheel_rs( let elf = Elf::parse(&buffer).map_err(AuditWheelError::GoblinError)?; // This returns essentially the same as ldd let deps: Vec = elf.libraries.iter().map(ToString::to_string).collect(); - let versioned_libraries = find_versioned_libraries(&elf)?; + let versioned_libraries = find_versioned_libraries(&elf); // Find the highest possible policy, if any let platform_policies = match platform_tag { @@ -267,15 +291,28 @@ pub fn auditwheel_rs( Some(PlatformTag::Linux) => unreachable!(), }; let mut highest_policy = None; + let mut should_repair = false; for policy in platform_policies.iter() { let result = policy_is_satisfied(policy, &elf, &arch, &deps, &versioned_libraries); match result { Ok(_) => { highest_policy = Some(policy.clone()); + should_repair = false; break; } + Err(err @ AuditWheelError::LinksForbiddenLibrariesError(..)) => { + // TODO: support repair for cross compiled wheels + if !cross_compiling { + highest_policy = Some(policy.clone()); + should_repair = true; + break; + } else { + return Err(err); + } + } + Err(AuditWheelError::VersionedSymbolTooNewError(..)) + | Err(AuditWheelError::BlackListedSymbolsError(..)) // UnsupportedArchitecture happens when trying 2010 with aarch64 - Err(AuditWheelError::PlatformTagValidationError(_, _)) | Err(AuditWheelError::UnsupportedArchitecture(..)) => continue, // If there was an error parsing the symbols or libpython was linked, // we error no matter what the requested policy was @@ -283,7 +320,7 @@ pub fn auditwheel_rs( } } - if let Some(platform_tag) = platform_tag { + let policy = if let Some(platform_tag) = platform_tag { let tag = platform_tag.to_string(); let mut policy = Policy::from_name(&tag).ok_or(AuditWheelError::UndefinedPolicy(tag))?; policy.fixup_musl_libc_so_name(target.target_arch()); @@ -299,7 +336,19 @@ pub fn auditwheel_rs( } match policy_is_satisfied(&policy, &elf, &arch, &deps, &versioned_libraries) { - Ok(_) => Ok(policy), + Ok(_) => { + should_repair = false; + Ok(policy) + } + Err(err @ AuditWheelError::LinksForbiddenLibrariesError(..)) => { + // TODO: support repair for cross compiled wheels + if !cross_compiling { + should_repair = true; + Ok(policy) + } else { + Err(err) + } + } Err(err) => Err(err), } } else if let Some(policy) = highest_policy { @@ -312,5 +361,6 @@ pub fn auditwheel_rs( // Fallback to linux Ok(Policy::default()) - } + }?; + Ok((policy, should_repair)) } diff --git a/src/auditwheel/mod.rs b/src/auditwheel/mod.rs index df586752f..e79bdbd6a 100644 --- a/src/auditwheel/mod.rs +++ b/src/auditwheel/mod.rs @@ -1,8 +1,11 @@ mod audit; mod musllinux; +pub mod patchelf; mod platform_tag; mod policy; +mod repair; -pub use self::audit::*; +pub use audit::*; pub use platform_tag::PlatformTag; pub use policy::{Policy, MANYLINUX_POLICIES, MUSLLINUX_POLICIES}; +pub use repair::{get_external_libs, hash_file}; diff --git a/src/auditwheel/patchelf.rs b/src/auditwheel/patchelf.rs new file mode 100644 index 000000000..44c31967f --- /dev/null +++ b/src/auditwheel/patchelf.rs @@ -0,0 +1,96 @@ +use anyhow::{bail, Context, Result}; +use std::ffi::OsStr; +use std::path::Path; +use std::process::Command; + +/// Replace a declared dependency on a dynamic library with another one (`DT_NEEDED`) +pub fn replace_needed>( + file: impl AsRef, + old_lib: &str, + new_lib: &S, +) -> Result<()> { + let mut cmd = Command::new("patchelf"); + cmd.arg("--replace-needed") + .arg(old_lib) + .arg(new_lib) + .arg(file.as_ref()); + let output = cmd + .output() + .context("Failed to execute 'patchelf', did you install it?")?; + if !output.status.success() { + bail!( + "patchelf --replace-needed failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(()) +} + +/// Change `SONAME` of a dynamic library +pub fn set_soname>(file: impl AsRef, soname: &S) -> Result<()> { + let mut cmd = Command::new("patchelf"); + cmd.arg("--set-soname").arg(soname).arg(file.as_ref()); + let output = cmd + .output() + .context("Failed to execute 'patchelf', did you install it?")?; + if !output.status.success() { + bail!( + "patchelf --set-soname failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(()) +} + +/// /// Remove a `RPATH` from executables and libraries +pub fn remove_rpath(file: impl AsRef) -> Result<()> { + let mut cmd = Command::new("patchelf"); + cmd.arg("--remove-rpath").arg(file.as_ref()); + let output = cmd + .output() + .context("Failed to execute 'patchelf', did you install it?")?; + if !output.status.success() { + bail!( + "patchelf --remove-rpath failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(()) +} + +/// Change the `RPATH` of executables and libraries +pub fn set_rpath>(file: impl AsRef, rpath: &S) -> Result<()> { + remove_rpath(&file)?; + let mut cmd = Command::new("patchelf"); + cmd.arg("--force-rpath") + .arg("--set-rpath") + .arg(rpath) + .arg(file.as_ref()); + let output = cmd + .output() + .context("Failed to execute 'patchelf', did you install it?")?; + if !output.status.success() { + bail!( + "patchelf --set-rpath failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(()) +} + +/// Get the `RPATH` of executables and libraries +pub fn get_rpath(file: impl AsRef) -> Result { + let mut cmd = Command::new("patchelf"); + cmd.arg("--print-rpath").arg(file.as_ref()); + let output = cmd + .output() + .context("Failed to execute 'patchelf', did you install it?")?; + if !output.status.success() { + bail!( + "patchelf --print-rpath failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + let rpath = String::from_utf8(output.stdout)?; + Ok(rpath.trim().to_string()) +} diff --git a/src/auditwheel/repair.rs b/src/auditwheel/repair.rs new file mode 100644 index 000000000..db8c9082f --- /dev/null +++ b/src/auditwheel/repair.rs @@ -0,0 +1,40 @@ +use super::audit::AuditWheelError; +use crate::auditwheel::Policy; +use anyhow::Result; +use fs_err as fs; +use lddtree::DependencyAnalyzer; +use sha2::{Digest, Sha256}; +use std::io; +use std::path::Path; + +pub fn get_external_libs( + artifact: impl AsRef, + policy: &Policy, +) -> Result, AuditWheelError> { + let dep_analyzer = DependencyAnalyzer::new(); + let deps = dep_analyzer.analyze(artifact).unwrap(); + let mut ext_libs = Vec::new(); + for (name, lib) in deps.libraries { + // Skip dynamic linker/loader and white-listed libs + if name.starts_with("ld-linux") + || name == "ld64.so.2" + || name == "ld64.so.1" + // musl libc, eg: libc.musl-aarch64.so.1 + || name.starts_with("libc.") + || policy.lib_whitelist.contains(&name) + { + continue; + } + ext_libs.push(lib); + } + Ok(ext_libs) +} + +/// Calculate the sha256 of a file +pub fn hash_file(path: impl AsRef) -> Result { + let mut file = fs::File::open(path.as_ref()).map_err(AuditWheelError::IoError)?; + let mut hasher = Sha256::new(); + io::copy(&mut file, &mut hasher).map_err(AuditWheelError::IoError)?; + let hex = format!("{:x}", hasher.finalize()); + Ok(hex) +} diff --git a/src/build_context.rs b/src/build_context.rs index feb1233a7..f85a3b613 100644 --- a/src/build_context.rs +++ b/src/build_context.rs @@ -1,21 +1,19 @@ -use crate::auditwheel::auditwheel_rs; -use crate::auditwheel::PlatformTag; -use crate::auditwheel::Policy; -use crate::compile; +use crate::auditwheel::{ + auditwheel_rs, get_external_libs, hash_file, patchelf, PlatformTag, Policy, +}; use crate::compile::warn_missing_py_init; -use crate::module_writer::write_python_part; -use crate::module_writer::WheelWriter; -use crate::module_writer::{write_bin, write_bindings_module, write_cffi_module}; +use crate::module_writer::{ + write_bin, write_bindings_module, write_cffi_module, write_python_part, WheelWriter, +}; use crate::python_interpreter::InterpreterKind; use crate::source_distribution::source_distribution; -use crate::Metadata21; -use crate::PyProjectToml; -use crate::PythonInterpreter; -use crate::Target; +use crate::{compile, Metadata21, ModuleWriter, PyProjectToml, PythonInterpreter, Target}; use anyhow::{anyhow, bail, Context, Result}; use cargo_metadata::Metadata; use fs_err as fs; +use lddtree::Library; use std::borrow::Cow; +use std::collections::HashMap; use std::path::{Path, PathBuf}; /// The way the rust code is used in the wheel @@ -256,23 +254,105 @@ impl BuildContext { python_interpreter: Option<&PythonInterpreter>, artifact: &Path, platform_tag: Option, - ) -> Result { + ) -> Result<(Policy, Vec)> { if self.skip_auditwheel { - return Ok(Policy::default()); + return Ok((Policy::default(), Vec::new())); } let target = python_interpreter .map(|x| &x.target) .unwrap_or(&self.target); - let policy = auditwheel_rs(artifact, target, platform_tag).context( - if let Some(platform_tag) = platform_tag { - format!("Error ensuring {} compliance", platform_tag) + let (policy, should_repair) = + auditwheel_rs(artifact, target, platform_tag).with_context(|| { + if let Some(platform_tag) = platform_tag { + format!("Error ensuring {} compliance", platform_tag) + } else { + "Error checking for manylinux/musllinux compliance".to_string() + } + })?; + let external_libs = if should_repair && !self.editable { + get_external_libs(&artifact, &policy).with_context(|| { + if let Some(platform_tag) = platform_tag { + format!("Error repairing wheel for {} compliance", platform_tag) + } else { + "Error repairing wheel for manylinux/musllinux compliance".to_string() + } + })? + } else { + Vec::new() + }; + Ok((policy, external_libs)) + } + + fn add_external_libs( + &self, + writer: &mut WheelWriter, + artifact: &Path, + ext_libs: &[Library], + ) -> Result<()> { + if ext_libs.is_empty() { + return Ok(()); + } + // Put external libs to ${module_name}.libs directory + // See https://github.com/pypa/auditwheel/issues/89 + let libs_dir = PathBuf::from(format!("{}.libs", self.module_name)); + writer.add_directory(&libs_dir)?; + + let temp_dir = tempfile::tempdir()?; + let mut soname_map = HashMap::new(); + for lib in ext_libs { + let lib_path = lib.realpath.clone().with_context(|| { + format!( + "Cannot repair wheel, because required library {} could not be located.", + lib.path.display() + ) + })?; + let short_hash = &hash_file(&lib_path)?[..8]; + let (file_stem, file_ext) = lib.name.split_once('.').unwrap(); + let new_soname = if !file_stem.ends_with(&format!("-{}", short_hash)) { + format!("{}-{}.{}", file_stem, short_hash, file_ext) } else { - "Error checking for manylinux/musllinux compliance".to_string() - }, - )?; - Ok(policy) + format!("{}.{}", file_stem, file_ext) + }; + let dest_path = temp_dir.path().join(&new_soname); + fs::copy(&lib_path, &dest_path)?; + patchelf::set_soname(&dest_path, &new_soname)?; + if !lib.rpath.is_empty() || !lib.runpath.is_empty() { + patchelf::set_rpath(&dest_path, &libs_dir)?; + } + soname_map.insert( + lib.name.clone(), + (new_soname.clone(), dest_path.clone(), lib.needed.clone()), + ); + + patchelf::replace_needed(artifact, &lib.name, &new_soname)?; + } + + // we grafted in a bunch of libraries and modified their sonames, but + // they may have internal dependencies (DT_NEEDED) on one another, so + // we need to update those records so each now knows about the new + // name of the other. + for (new_soname, path, needed) in soname_map.values() { + for n in needed { + if soname_map.contains_key(n) { + patchelf::replace_needed(path, n, &soname_map[n].0)?; + } + } + writer.add_file_with_permissions(libs_dir.join(new_soname), path, 0o755)?; + } + + // Currently artifact .so file always resides at ${module_name}/${module_name}.so + let artifact_dir = Path::new(&self.module_name); + let old_rpaths = patchelf::get_rpath(artifact)?; + // TODO: clean existing rpath entries if it's not pointed to a location within the wheel + // See https://github.com/pypa/auditwheel/blob/353c24250d66951d5ac7e60b97471a6da76c123f/src/auditwheel/repair.py#L160 + let mut new_rpaths: Vec<&str> = old_rpaths.split(':').collect(); + let new_rpath = Path::new("$ORIGIN").join(relpath(&libs_dir, artifact_dir)); + new_rpaths.push(new_rpath.to_str().unwrap()); + let new_rpath = new_rpaths.join(":"); + patchelf::set_rpath(artifact, &new_rpath)?; + Ok(()) } fn add_pth(&self, writer: &mut WheelWriter) -> Result<()> { @@ -286,6 +366,7 @@ impl BuildContext { &self, artifact: &Path, platform_tag: PlatformTag, + ext_libs: &[Library], major: u8, min_minor: u8, ) -> Result { @@ -295,6 +376,7 @@ impl BuildContext { let tag = format!("cp{}{}-abi3-{}", major, min_minor, platform); let mut writer = WheelWriter::new(&tag, &self.out, &self.metadata21, &[tag.clone()])?; + self.add_external_libs(&mut writer, artifact, ext_libs)?; write_bindings_module( &mut writer, @@ -329,9 +411,15 @@ impl BuildContext { python_interpreter, Some(self.project_layout.extension_name()), )?; - let policy = self.auditwheel(python_interpreter, &artifact, self.platform_tag)?; - let (wheel_path, tag) = - self.write_binding_wheel_abi3(&artifact, policy.platform_tag(), major, min_minor)?; + let (policy, external_libs) = + self.auditwheel(python_interpreter, &artifact, self.platform_tag)?; + let (wheel_path, tag) = self.write_binding_wheel_abi3( + &artifact, + policy.platform_tag(), + &external_libs, + major, + min_minor, + )?; println!( "📦 Built wheel for abi3 Python ≥ {}.{} to {}", @@ -349,10 +437,12 @@ impl BuildContext { python_interpreter: &PythonInterpreter, artifact: &Path, platform_tag: PlatformTag, + ext_libs: &[Library], ) -> Result { let tag = python_interpreter.get_tag(platform_tag, self.universal2)?; let mut writer = WheelWriter::new(&tag, &self.out, &self.metadata21, &[tag.clone()])?; + self.add_external_libs(&mut writer, artifact, ext_libs)?; write_bindings_module( &mut writer, @@ -391,9 +481,14 @@ impl BuildContext { Some(python_interpreter), Some(self.project_layout.extension_name()), )?; - let policy = self.auditwheel(Some(python_interpreter), &artifact, self.platform_tag)?; - let (wheel_path, tag) = - self.write_binding_wheel(python_interpreter, &artifact, policy.platform_tag())?; + let (policy, external_libs) = + self.auditwheel(Some(python_interpreter), &artifact, self.platform_tag)?; + let (wheel_path, tag) = self.write_binding_wheel( + python_interpreter, + &artifact, + policy.platform_tag(), + &external_libs, + )?; println!( "📦 Built wheel for {} {}.{}{} to {}", python_interpreter.interpreter_kind, @@ -409,8 +504,7 @@ impl BuildContext { Ok(wheels) } - /// Runs cargo build, extracts the cdylib from the output, runs auditwheel and returns the - /// artifact + /// Runs cargo build, extracts the cdylib from the output and returns the path to it /// /// The module name is used to warn about missing a `PyInit_` function for /// bindings modules. @@ -434,19 +528,29 @@ impl BuildContext { .context("Failed to parse the native library")?; } - Ok(artifact) + if self.editable || self.skip_auditwheel { + return Ok(artifact); + } + // auditwheel repair will edit the file, so we need to copy it to avoid errors in reruns + let maturin_build = artifact.parent().unwrap().join("maturin"); + fs::create_dir_all(&maturin_build)?; + let new_artifact = maturin_build.join(artifact.file_name().unwrap()); + fs::copy(&artifact, &new_artifact)?; + Ok(new_artifact) } fn write_cffi_wheel( &self, artifact: &Path, platform_tag: PlatformTag, + ext_libs: &[Library], ) -> Result { let (tag, tags) = self .target .get_universal_tags(platform_tag, self.universal2)?; let mut writer = WheelWriter::new(&tag, &self.out, &self.metadata21, &tags)?; + self.add_external_libs(&mut writer, artifact, ext_libs)?; write_cffi_module( &mut writer, @@ -468,8 +572,9 @@ impl BuildContext { pub fn build_cffi_wheel(&self) -> Result> { let mut wheels = Vec::new(); let artifact = self.compile_cdylib(None, None)?; - let policy = self.auditwheel(None, &artifact, self.platform_tag)?; - let (wheel_path, tag) = self.write_cffi_wheel(&artifact, policy.platform_tag())?; + let (policy, external_libs) = self.auditwheel(None, &artifact, self.platform_tag)?; + let (wheel_path, tag) = + self.write_cffi_wheel(&artifact, policy.platform_tag(), &external_libs)?; // Warn if cffi isn't specified in the requirements if !self @@ -494,6 +599,7 @@ impl BuildContext { &self, artifact: &Path, platform_tag: PlatformTag, + ext_libs: &[Library], ) -> Result { let (tag, tags) = self .target @@ -524,6 +630,8 @@ impl BuildContext { let bin_name = artifact .file_name() .expect("Couldn't get the filename from the binary produced by cargo"); + self.add_external_libs(&mut writer, artifact, ext_libs)?; + write_bin(&mut writer, artifact, &self.metadata21, bin_name)?; self.add_pth(&mut writer)?; @@ -545,12 +653,55 @@ impl BuildContext { .cloned() .ok_or_else(|| anyhow!("Cargo didn't build a binary"))?; - let policy = self.auditwheel(None, &artifact, self.platform_tag)?; + let (policy, external_libs) = self.auditwheel(None, &artifact, self.platform_tag)?; - let (wheel_path, tag) = self.write_bin_wheel(&artifact, policy.platform_tag())?; + let (wheel_path, tag) = + self.write_bin_wheel(&artifact, policy.platform_tag(), &external_libs)?; println!("📦 Built wheel to {}", wheel_path.display()); wheels.push((wheel_path, tag)); Ok(wheels) } } + +fn relpath(to: &Path, from: &Path) -> PathBuf { + let mut suffix_pos = 0; + for (f, t) in from.components().zip(to.components()) { + if f == t { + suffix_pos += 1; + } else { + break; + } + } + let mut result = PathBuf::new(); + from.components() + .skip(suffix_pos) + .map(|_| result.push("..")) + .last(); + to.components() + .skip(suffix_pos) + .map(|x| result.push(x.as_os_str())) + .last(); + result +} + +#[cfg(test)] +mod test { + use super::relpath; + use std::path::Path; + + #[test] + fn test_relpath() { + let cases = [ + ("", "", ""), + ("/", "/usr", ".."), + ("/", "/usr/lib", "../.."), + ]; + for (from, to, expected) in cases { + let from = Path::new(from); + let to = Path::new(to); + let result = relpath(from, to); + assert_eq!(result, Path::new(expected)); + } + } +}