Compare commits

..

8 Commits

Author SHA1 Message Date
d852ee43fe perf(precomputation): cleaned up allocations
recovered about 5% execution time
2025-12-02 13:09:19 -05:00
ed2c1d5816 build(.gitignore): .whl added
.whl files added to gitignore so large precompiled wheel folders are not
accidently commited
2025-12-02 10:04:42 -05:00
8a22496398 fix(wheels): Repair wheel macos
Script to repair RPATH issues in wheels on macos
2025-12-02 10:04:00 -05:00
Emily Boudreaux
e0a05bbd1a build(cross): macOS cross compilation
macos cross compilation now works. macos binaries can be compiled on
linux with osxcross installed and built
2025-12-01 13:28:25 -05:00
Emily Boudreaux
e260c7b02c fix(build): fortran
Fortran language only added if build-fortran enabled
2025-12-01 09:59:43 -05:00
Emily Boudreaux
b06b622c06 build(cross): working on cross compilation
We want to be able to build all targets on one machine, began
implimenting cross compilation
2025-12-01 09:59:22 -05:00
dac09ae24c build(fortran): added check for fortran
fortran tests only build when fortran build option is enabled
2025-11-30 10:19:56 -05:00
4fe242a5ef build(meson): added checks for header compatibility 2025-11-30 10:16:44 -05:00
23 changed files with 465 additions and 98 deletions

4
.gitignore vendored
View File

@@ -119,3 +119,7 @@ meson-boost-test/
*.json
*.xml
*_pynucastro_network.py
cross/python_includes
*.whl

View File

@@ -1,7 +1,8 @@
cmake = import('cmake')
subdir('fourdst')
subdir('python')
subdir('fourdst')
subdir('sundials')
subdir('cppad')

View File

@@ -0,0 +1,16 @@
py_installation = import('python').find_installation('python3', pure: false)
if meson.is_cross_build() and host_machine.system() == 'darwin'
py_ver = get_option('python-target-version')
message('Cross build on Darwin, using python version ' + py_ver)
py_inc_dir = include_directories('../../cross/python_includes/python-' + py_ver + '/include/python' + py_ver)
py_dep = declare_dependency(include_directories: py_inc_dir)
py_module_prefix = ''
py_module_suffic = 'so'
meson.override_dependency('python3', py_dep)
else
py_dep = py_installation.dependency()
py_module_prefix = ''
py_module_suffic = 'so'
endif

View File

@@ -4,8 +4,10 @@ cvode_cmake_options = cmake.subproject_options()
cvode_cmake_options.add_cmake_defines({
'CMAKE_CXX_FLAGS' : '-Wno-deprecated-declarations',
'CMAKE_C_FLAGS' : '-Wno-deprecated-declarations',
'BUILD_SHARED_LIBS' : 'ON',
'BUILD_STATIC_LIBS' : 'OFF',
'BUILD_SHARED_LIBS' : 'OFF',
'BUILD_STATIC_LIBS' : 'ON',
'EXAMPLES_ENABLE_C': 'OFF',
'CMAKE_POSITION_INDEPENDENT_CODE': true
})
@@ -19,29 +21,56 @@ cvode_sp = cmake.subproject(
options: cvode_cmake_options,
)
# For the core SUNDIALS library (SUNContext, etc.)
sundials_core_dep = cvode_sp.dependency('sundials_core_shared')
sundials_core_tgt = cvode_sp.target('sundials_core_static')
sundials_cvode_tgt = cvode_sp.target('sundials_cvode_static')
sundials_nvecserial_tgt = cvode_sp.target('sundials_nvecserial_static')
sundials_sunmatrixdense_tgt = cvode_sp.target('sundials_sunmatrixdense_static')
sundials_sunlinsoldense_tgt = cvode_sp.target('sundials_sunlinsoldense_static')
# For the CVODE integrator library
sundials_cvode_dep = cvode_sp.dependency('sundials_cvode_shared')
cvode_objs = [
sundials_core_tgt.extract_all_objects(recursive: true),
sundials_cvode_tgt.extract_all_objects(recursive: true),
sundials_nvecserial_tgt.extract_all_objects(recursive: true),
sundials_sunmatrixdense_tgt.extract_all_objects(recursive: true),
sundials_sunlinsoldense_tgt.extract_all_objects(recursive: true),
]
# For the serial NVector library
sundials_nvecserial_dep = cvode_sp.dependency('sundials_nvecserial_shared')
sundials_core_includes = cvode_sp.include_directories('sundials_core_static')
sundials_cvode_includes = cvode_sp.include_directories('sundials_cvode_static')
sundials_nvecserial_includes = cvode_sp.include_directories('sundials_nvecserial_static')
sundials_sunmatrixdense_includes = cvode_sp.include_directories('sundials_sunmatrixdense_static')
sundials_sunlinsoldense_includes = cvode_sp.include_directories('sundials_sunlinsoldense_static')
# For the dense matrix library
sundials_sunmatrixdense_dep = cvode_sp.dependency('sundials_sunmatrixdense_shared')
cvode_includes = [
sundials_core_includes,
sundials_cvode_includes,
sundials_nvecserial_includes,
sundials_sunmatrixdense_includes,
sundials_sunlinsoldense_includes
]
# For the dense linear solver library
sundials_sunlinsoldense_dep = cvode_sp.dependency('sundials_sunlinsoldense_shared')
cvode_dep = declare_dependency(
dependencies: [
sundials_core_dep,
sundials_cvode_dep,
sundials_nvecserial_dep,
sundials_sunmatrixdense_dep,
sundials_sunlinsoldense_dep,
],
empty_cvode_file = configure_file(
output: 'cvode_dummy_ar.cpp',
command: ['echo'],
capture: true
)
libcvode_static = static_library(
'cvode-static',
empty_cvode_file,
objects: cvode_objs,
include_directories: cvode_includes,
pic: true,
install: false
)
cvode_dep = declare_dependency(
link_with: libcvode_static,
include_directories: cvode_includes,
)

View File

@@ -5,8 +5,10 @@ kinsol_cmake_options = cmake.subproject_options()
kinsol_cmake_options.add_cmake_defines({
'CMAKE_CXX_FLAGS' : '-Wno-deprecated-declarations',
'CMAKE_C_FLAGS' : '-Wno-deprecated-declarations',
'BUILD_SHARED_LIBS' : 'ON',
'BUILD_STATIC_LIBS' : 'OFF',
'BUILD_SHARED_LIBS' : 'OFF',
'BUILD_STATIC_LIBS' : 'ON',
'EXAMPLES_ENABLE_C' : 'OFF',
'CMAKE_POSITION_INDEPENDENT_CODE': true
})
kinsol_cmake_options.add_cmake_defines({
@@ -19,11 +21,31 @@ kinsol_sp = cmake.subproject(
options: kinsol_cmake_options,
)
sundials_kinsol_shared = kinsol_sp.dependency('sundials_kinsol_shared')
sundials_kinsol_static_tgt = kinsol_sp.target('sundials_kinsol_obj_static')
kinsol_includes = kinsol_sp.include_directories('sundials_kinsol_obj_static')
kinsol_dep = declare_dependency(
dependencies: [
sundials_kinsol_shared,
]
kinsol_objs = [sundials_kinsol_static_tgt.extract_all_objects(recursive: false)]
empty_kinsol_file = configure_file(
output: 'kinsol_dummy_ar.cpp',
command: ['echo'],
capture: true
)
libkinsol_static = static_library(
'kinsol_static',
empty_kinsol_file,
objects: kinsol_objs,
include_directories: kinsol_includes,
pic: true,
install: false
)
kinsol_dep = declare_dependency(
link_with: libkinsol_static,
include_directories: kinsol_includes
)

View File

@@ -78,7 +78,7 @@ def fix_rpaths(binary_path):
def main():
if len(sys.argv) != 2:
print(f"--- Error: Expected one argument (path to .so file), got {sys.argv}", file=sys.stderr)
print(f"--- Error: Expected one argument (path to .dylib/.so file), got {sys.argv}", file=sys.stderr)
sys.exit(1)
# Get the file path directly from the command line argument

View File

@@ -1,7 +1,6 @@
# --- Python Extension Setup ---
py_installation = import('python').find_installation('python3', pure: false)
gridfire_py_deps = [
py_dep,
pybind11_dep,
const_dep,
config_dep,
@@ -9,9 +8,7 @@ gridfire_py_deps = [
gridfire_dep
]
py_mod = py_installation.extension_module(
'_gridfire', # Name of the generated .so/.pyd file (without extension)
sources: [
py_sources = [
meson.project_source_root() + '/src/python/bindings.cpp',
meson.project_source_root() + '/src/python/types/bindings.cpp',
meson.project_source_root() + '/src/python/partition/bindings.cpp',
@@ -29,11 +26,28 @@ py_mod = py_installation.extension_module(
meson.project_source_root() + '/src/python/policy/bindings.cpp',
meson.project_source_root() + '/src/python/policy/trampoline/py_policy.cpp',
meson.project_source_root() + '/src/python/utils/bindings.cpp',
],
]
if meson.is_cross_build() and host_machine.system() == 'darwin'
py_mod = shared_module(
'_gridfire',
sources: py_sources,
dependencies: gridfire_py_deps,
name_prefix: '',
name_suffix: 'so',
install: true,
install_dir: py_installation.get_install_dir() + '/gridfire'
)
else
py_mod = py_installation.extension_module(
'_gridfire', # Name of the generated .so/.pyd file (without extension)
sources: py_sources,
dependencies : gridfire_py_deps,
install : true,
subdir: 'gridfire',
)
)
endif
py_installation.install_sources(

19
cross/macos_arm64.ini Normal file
View File

@@ -0,0 +1,19 @@
[binaries]
c = 'arm64-apple-darwin25-clang'
cpp = 'arm64-apple-darwin25-clang++'
ar = 'arm64-apple-darwin25-ar'
strip = 'arm64-apple-darwin25-strip'
pkg-config = 'pkg-config'
ranlib = '/usr/bin/true'
[host_machine]
system = 'darwin'
cpu_family = 'aarch64'
cpu = 'arm64'
endian = 'little'
[built-in options]
c_args = ['-mmacosx-version-min=15.0']
cpp_args = ['-mmacos-version-min=15.0']
c_link_args = ['-mmacosx-version-min=15.0']
cpp_link_args = ['-mmacos-version-min=15.0']

View File

@@ -18,7 +18,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# *********************************************************************** #
project('GridFire', ['c', 'cpp', 'fortran'], version: 'v0.7.4_rc2', default_options: ['cpp_std=c++23'], meson_version: '>=1.5.0')
project('GridFire', ['c', 'cpp'], version: 'v0.7.4_rc2', default_options: ['cpp_std=c++23'], meson_version: '>=1.5.0')
if get_option('build-python')
add_project_arguments('-fvisibility=hidden', language: 'cpp')
@@ -27,30 +27,31 @@ else
endif
message('Found CXX compiler: ' + meson.get_compiler('cpp').get_id())
message('Found FORTRAN compiler: ' + meson.get_compiler('fortran').get_id())
message('C++ standard set to: ' + get_option('cpp_std'))
message('Fortran standard set to: ' + get_option('fortran_std'))
cppc = meson.get_compiler('cpp')
cc = meson.get_compiler('c')
if meson.get_compiler('cpp').get_id() == 'clang'
# We disable these because of CppAD
if cppc.get_id() == 'clang'
message('disabling bitwise-instead-of-logical warnings for clang')
add_project_arguments('-Wno-bitwise-instead-of-logical', language: 'cpp')
endif
if meson.get_compiler('cpp').get_id() == 'gcc'
# We disable these because of boost notes about abi changes from C++10 -> C++17 make the build too noisey
if cppc.get_id() == 'gcc'
message('disabling psabi warnings for gcc')
add_project_arguments('-Wno-psabi', language: 'cpp')
if (meson.get_compiler('cpp').version().version_compare('<14.0'))
error('g++ version must be at least 14.0, found ' + meson.get_compiler('cpp').version())
if (cppc.version().version_compare('<14.0'))
error('g++ version must be at least 14.0, found ' + cppc.version())
endif
endif
build_fortran = get_option('build-fortran')
if (build_fortran)
add_languages('fortran', native: true)
message('Found FORTRAN compiler: ' + meson.get_compiler('fortran').get_id())
message('Fortran standard set to: ' + get_option('fortran_std'))
message('Building fortran module (gridfire_mod.mod)')
fc = meson.get_compiler('fortran')
if not get_option('unsafe-fortran')
@@ -58,12 +59,25 @@ if (build_fortran)
error('The only supported fortran compiler for GridFire is gfortran (version >= 14.0), found ' + fc + '. GridFire has not been tested with any other compilers. You can disable this check with the -Dunsafe-fortran=true flag to try other compilers')
endif
endif
if (meson.get_compiler('fortran').version().version_compare('<14.0'))
error('gfortran version must be at least 14.0, found ' + meson.get_compiler('fortran').version())
if (fc.version().version_compare('<14.0'))
error('gfortran version must be at least 14.0, found ' + fc.version())
endif
endif
if not cppc.has_header('print')
error('C++ standard library header <print> not found. Please ensure your compiler and standard library supports C++23. We have already validated your compiler version so this is likely an issue with your standard library installation.')
endif
if not cppc.has_header('format')
error('C++ standard library header <format> not found. Please ensure your compiler and standard library supports C++23. We have already validated your compiler version so this is likely an issue with your standard library installation.')
endif
ignore_unused_args = '-Wno-unused-command-line-argument'
add_global_arguments(ignore_unused_args, language: 'cpp')
add_global_arguments(ignore_unused_args, language: 'c')
# For Eigen

View File

@@ -5,3 +5,4 @@ option('build-tests', type: 'boolean', value: true, description: 'build the test
option('build-fortran', type: 'boolean', value: false, description: 'build fortran module support')
option('unsafe-fortran', type: 'boolean', value: false, description: 'Allow untested fortran compilers (compilers other than gfortran)')
option('unity-safe', type: 'boolean', value: false, description: 'Enable safe unity builds for better compatibility across different compilers and platforms')
option('python-target-version', type: 'string', value: '3.13', description: 'Target version for python compilation, only used for cross compilation')

View File

@@ -53,7 +53,7 @@ namespace gridfire::engine {
struct StepDerivatives {
std::map<fourdst::atomic::Species, T> dydt{}; ///< Derivatives of abundances (dY/dt for each species).
T nuclearEnergyGenerationRate = T(0.0); ///< Specific energy generation rate (e.g., erg/g/s).
std::map<fourdst::atomic::Species, std::unordered_map<std::string, T>> reactionContributions{};
std::optional<std::map<fourdst::atomic::Species, std::unordered_map<std::string, T>>> reactionContributions = std::nullopt;
T neutrinoEnergyLossRate = T(0.0); // (erg/g/s)
T totalNeutrinoFlux = T(0.0); // (neutrinos/g/s)

View File

@@ -753,6 +753,14 @@ namespace gridfire::engine {
[[nodiscard]]
SpeciesStatus getSpeciesStatus(const fourdst::atomic::Species &species) const override;
[[nodiscard]] bool get_store_intermediate_reaction_contributions() const {
return m_store_intermediate_reaction_contributions;
}
void set_store_intermediate_reaction_contributions(const bool value) {
m_store_intermediate_reaction_contributions = value;
}
private:
struct PrecomputedReaction {
@@ -879,6 +887,7 @@ namespace gridfire::engine {
bool m_usePrecomputation = true; ///< Flag to enable or disable using precomputed reactions for efficiency. Mathematically, this should not change the results. Generally end users should not need to change this.
bool m_useReverseReactions = true; ///< Flag to enable or disable reverse reactions. If false, only forward reactions are considered.
bool m_store_intermediate_reaction_contributions = false; ///< Flag to enable or disable storing intermediate reaction contributions for debugging.
BuildDepthType m_depth;
@@ -1207,7 +1216,10 @@ namespace gridfire::engine {
const T nu_ij = static_cast<T>(reaction.stoichiometry(species));
const T dydt_increment = threshold_flag * molarReactionFlow * nu_ij;
dydt_vec[speciesIdx] += dydt_increment;
result.reactionContributions[species][std::string(reaction.id())] = dydt_increment;
if (m_store_intermediate_reaction_contributions) {
result.reactionContributions.value()[species][std::string(reaction.id())] = dydt_increment;
}
}
}

View File

@@ -237,7 +237,7 @@ namespace gridfire::solver {
};
struct CVODERHSOutputData {
std::map<fourdst::atomic::Species, std::unordered_map<std::string, double>> reaction_contribution_map;
std::optional<std::map<fourdst::atomic::Species, std::unordered_map<std::string, double>>> reaction_contribution_map;
double neutrino_energy_loss_rate;
double total_neutrino_flux;
};

View File

@@ -684,7 +684,7 @@ namespace gridfire::engine {
// --- Efficient lookup of only the active reactions ---
uint64_t reactionHash = utils::hash_reaction(*reaction);
const size_t reactionIndex = m_precomputedReactionIndexMap.at(reactionHash);
PrecomputedReaction precomputedReaction = m_precomputedReactions[reactionIndex];
const PrecomputedReaction& precomputedReaction = m_precomputedReactions[reactionIndex];
// --- Forward abundance product ---
double forwardAbundanceProduct = 1.0;
@@ -697,12 +697,12 @@ namespace gridfire::engine {
forwardAbundanceProduct = 0.0;
break; // No need to continue if one of the reactants has zero abundance
}
double factor = std::pow(comp.getMolarAbundance(reactant), power);
const double factor = std::pow(comp.getMolarAbundance(reactant), power);
if (!std::isfinite(factor)) {
LOG_CRITICAL(m_logger, "Non-finite factor encountered in forward abundance product for reaction '{}'. Check input abundances for validity.", reaction->id());
throw exceptions::BadRHSEngineError("Non-finite factor encountered in forward abundance product.");
}
forwardAbundanceProduct *= std::pow(comp.getMolarAbundance(reactant), power);
forwardAbundanceProduct *= factor;
}
const double bare_rate = bare_rates.at(reactionCounter);
@@ -764,8 +764,8 @@ namespace gridfire::engine {
default: ;
}
double local_neutrino_loss = molarReactionFlows.back() * q_abs * neutrino_loss_fraction * m_constants.Na * m_constants.MeV_to_erg;
double local_neutrino_flux = molarReactionFlows.back() * m_constants.Na;
const double local_neutrino_loss = molarReactionFlows.back() * q_abs * neutrino_loss_fraction * m_constants.Na * m_constants.MeV_to_erg;
const double local_neutrino_flux = molarReactionFlows.back() * m_constants.Na;
result.totalNeutrinoFlux += local_neutrino_flux;
result.neutrinoEnergyLossRate += local_neutrino_loss;
@@ -782,7 +782,7 @@ namespace gridfire::engine {
reactionCounter = 0;
for (const auto& reaction: activeReactions) {
size_t j = m_precomputedReactionIndexMap.at(utils::hash_reaction(*reaction));
const size_t j = m_precomputedReactionIndexMap.at(utils::hash_reaction(*reaction));
const auto& precomp = m_precomputedReactions[j];
const double R_j = molarReactionFlows[reactionCounter];
@@ -793,9 +793,12 @@ namespace gridfire::engine {
const int stoichiometricCoefficient = precomp.stoichiometric_coefficients[i];
// Update the derivative for this species
double dydt_increment = static_cast<double>(stoichiometricCoefficient) * R_j;
const double dydt_increment = static_cast<double>(stoichiometricCoefficient) * R_j;
result.dydt.at(species) += dydt_increment;
result.reactionContributions[species][std::string(reaction->id())] = dydt_increment;
if (m_store_intermediate_reaction_contributions) {
result.reactionContributions.value()[species][std::string(reaction->id())] = dydt_increment;
}
}
reactionCounter++;
}

View File

@@ -51,6 +51,7 @@ libgridfire = library('gridfire',
gridfire_sources,
include_directories: include_directories('include'),
dependencies: gridfire_build_dependencies,
objects: [cvode_objs, kinsol_objs],
install : true)
gridfire_dep = declare_dependency(

View File

@@ -1,2 +1,5 @@
subdir('C')
subdir('fortran')
if get_option('build-fortran')
subdir('fortran')
endif

View File

@@ -4,7 +4,6 @@
#include "gridfire/gridfire.h"
#include "fourdst/composition/composition.h"
#include "fourdst/plugin/bundle/bundle.h"
#include "fourdst/logging/logging.h"
#include "fourdst/atomic/species.h"
#include "fourdst/composition/utils.h"

View File

@@ -1,5 +1,5 @@
executable(
'graphnet_sandbox',
'main.cpp',
dependencies: [gridfire_dep, composition_dep, plugin_dep, cli11_dep],
dependencies: [gridfire_dep, composition_dep, cli11_dep],
)

View File

@@ -0,0 +1,132 @@
#!/bin/bash
# --- Configuration ---
PYTHON_VERSIONS=("3.8.10" "3.9.13" "3.10.11" "3.11.9" "3.12.3" "3.13.0" "3.14.0")
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BASE_OUTPUT_DIR="$SCRIPT_DIR/../../cross/python_includes"
# --- OS Detection ---
OS="$(uname -s)"
echo "Detected OS: $OS"
# --- Dependency Check ---
check_dependencies() {
if [ "$OS" == "Linux" ]; then
if ! command -v 7z &> /dev/null; then
echo "Error: '7z' (p7zip-full) is required on Linux."
exit 1
fi
if ! command -v cpio &> /dev/null; then
echo "Error: 'cpio' is required."
exit 1
fi
fi
}
# --- Extraction Logic (OS Specific) ---
extract_pkg() {
local pkg_file="$1"
local extract_root="$2"
local major_ver="$3" # e.g., 3.11
echo " -> Extracting..."
if [ "$OS" == "Darwin" ]; then
pkgutil --expand "$pkg_file" "$extract_root/expanded"
local payload_path="$extract_root/expanded/Python_Framework.pkg/Payload"
if [ ! -f "$payload_path" ]; then
echo " -> Error: Could not find Payload in package."
return 1
fi
mkdir -p "$extract_root/root"
pushd "$extract_root/root" > /dev/null
cat "$payload_path" | gunzip | cpio -id "*include/python${major_ver}/*" 2>/dev/null
popd > /dev/null
else
7z x "$pkg_file" -o"$extract_root/expanded" -y > /dev/null
local payload_path="$extract_root/expanded/Python_Framework.pkg/Payload"
if [ ! -f "$payload_path" ]; then
echo " -> Error: Could not find Payload in package."
return 1
fi
mkdir -p "$extract_root/root"
pushd "$extract_root/root" > /dev/null
cat "$payload_path" | gunzip | cpio -id "*include/python${major_ver}/*" 2>/dev/null
popd > /dev/null
fi
}
check_dependencies
mkdir -p "$BASE_OUTPUT_DIR"
for FULL_VER in "${PYTHON_VERSIONS[@]}"; do
MAJOR_VER=$(echo "$FULL_VER" | cut -d. -f1,2)
TARGET_DIR="$BASE_OUTPUT_DIR/python-$MAJOR_VER"
TEMP_DIR="$BASE_OUTPUT_DIR/tmp_$FULL_VER"
PKG_NAME="python-${FULL_VER}-macos11.pkg"
if [[ "$MAJOR_VER" == "3.8" ]]; then
PKG_NAME="python-${FULL_VER}-macosx10.9.pkg"
fi
DOWNLOAD_URL="https://www.python.org/ftp/python/${FULL_VER}/$PKG_NAME"
echo "Processing Python $FULL_VER..."
if [ -d "$TARGET_DIR" ] && [ "$(ls -A $TARGET_DIR)" ]; then
echo " -> Headers already exist in $TARGET_DIR. Skipping."
continue
fi
mkdir -p "$TEMP_DIR"
echo " -> Downloading from $DOWNLOAD_URL"
curl -L -s -o "$TEMP_DIR/python.pkg" "$DOWNLOAD_URL"
if [ $? -ne 0 ]; then
echo " -> Download failed! Check version number or internet connection."
rm -rf "$TEMP_DIR"
continue
fi
# 2. Extract
extract_pkg "$TEMP_DIR/python.pkg" "$TEMP_DIR" "$MAJOR_VER"
# 3. Move Headers to Final Location
# The cpio extraction usually results in: ./Versions/X.Y/include/pythonX.Y
# We want to move that specific include folder to our target dir
FOUND_HEADERS=$(find "$TEMP_DIR/root" -type d -path "*/include/python${MAJOR_VER}" | head -n 1)
if [ -n "$FOUND_HEADERS" ]; then
echo " -> Found headers at: $FOUND_HEADERS"
# Move the content to the final destination
# We want the folder to be .../python-3.11/include/python3.11
mkdir -p "$TARGET_DIR/include"
mv "$FOUND_HEADERS" "$TARGET_DIR/include/"
# Verify pyconfig.h exists (sanity check)
if [ -f "$TARGET_DIR/include/python${MAJOR_VER}/pyconfig.h" ]; then
echo " -> Success: Headers installed to $TARGET_DIR"
else
echo " -> Warning: Header move seemed successful, but pyconfig.h is missing."
fi
else
echo " -> Error: Could not locate header files after extraction."
fi
# 4. Cleanup
rm -rf "$TEMP_DIR"
echo "---------------------------------------------------"
done
echo "Done. All headers stored in $BASE_OUTPUT_DIR"

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
# 1. Validation
if [[ $(uname -m) != "arm64" ]]; then
echo "Error: This script is intended to run on an Apple Silicon (arm64) Mac."
exit 1
@@ -11,11 +12,12 @@ if [[ $# -ne 1 ]]; then
exit 1
fi
# --- Initial Setup ---
# 2. Setup Directories
REPO_URL="$1"
WORK_DIR="$(pwd)"
WHEEL_DIR="${WORK_DIR}/wheels_macos_aarch64_tmp"
FINAL_WHEEL_DIR="${WORK_DIR}/wheels_macos_aarch64"
RPATH_SCRIPT="${WORK_DIR}/../../build-python/fix_rpaths.py" # Assumes script is in this location relative to execution
echo "➤ Creating wheel output directories"
mkdir -p "${WHEEL_DIR}"
@@ -26,10 +28,22 @@ echo "➤ Cloning ${REPO_URL} → ${TMPDIR}/project"
git clone --depth 1 "${REPO_URL}" "${TMPDIR}/project"
cd "${TMPDIR}/project"
# --- macOS Build Configuration ---
# 3. Build Configuration
export MACOSX_DEPLOYMENT_TARGET=15.0
# Meson options passed to pip via config-settings
# Note: We use an array to keep the command clean
MESON_ARGS=(
"-Csetup-args=-Dunity=off"
"-Csetup-args=-Dbuild-python=true"
"-Csetup-args=-Dbuild-fortran=false"
"-Csetup-args=-Dbuild-tests=false"
"-Csetup-args=-Dpkg-config=false"
"-Csetup-args=-Dunity-safe=true"
)
PYTHON_VERSIONS=("3.8.20" "3.9.23" "3.10.18" "3.11.13" "3.12.11" "3.13.5" "3.13.5t" "3.14.0rc1" "3.14.0rc1t" 'pypy3.10-7.3.19' "pypy3.11-7.3.20")
PYTHON_VERSIONS=("3.9.23" "3.10.18" "3.11.13" "3.12.11" "3.13.5" "3.13.5t" "3.14.0rc1" "3.14.0rc1t" 'pypy3.10-7.3.19' "pypy3.11-7.3.20")
if ! command -v pyenv &> /dev/null; then
echo "Error: pyenv not found. Please install it to manage Python versions."
@@ -37,55 +51,48 @@ if ! command -v pyenv &> /dev/null; then
fi
eval "$(pyenv init -)"
# 4. Build Loop
for PY_VERSION in "${PYTHON_VERSIONS[@]}"; do
(
set -e
if ! pyenv versions --bare --filter="${PY_VERSION}." &>/dev/null; then
echo "⚠️ Python version matching '${PY_VERSION}.*' not found by pyenv. Skipping."
# Check if version exists in pyenv
if ! pyenv versions --bare --filter="${PY_VERSION}" &>/dev/null; then
echo "⚠️ Python version matching '${PY_VERSION}' not found by pyenv. Skipping."
continue
fi
pyenv shell "${PY_VERSION}"
PY="$(pyenv which python)"
echo "➤ Building for $($PY --version) on macOS arm64 (target: ${MACOSX_DEPLOYMENT_TARGET})"
echo "----------------------------------------------------------------"
echo "➤ Building for $($PY --version) on macOS arm64"
echo "----------------------------------------------------------------"
# Install build deps explicitly so we can skip build isolation
"$PY" -m pip install --upgrade pip setuptools wheel meson meson-python delocate
CC=clang CXX=clang++ "$PY" -m pip wheel . \
# PERF: --no-build-isolation prevents creating a fresh venv and reinstalling meson/ninja
# for every single build, saving significant I/O and network time.
CC="ccache clang" CXX="ccache clang++" "$PY" -m pip wheel . \
--no-build-isolation \
"${MESON_ARGS[@]}" \
-w "${WHEEL_DIR}" -vv
echo "➤ Sanitizing RPATHs before delocation..."
# We expect exactly one new wheel in the tmp dir per iteration
CURRENT_WHEEL=$(find "${WHEEL_DIR}" -name "*.whl" | head -n 1)
if [ -f "$CURRENT_WHEEL" ]; then
"$PY" -m wheel unpack "$CURRENT_WHEEL" -d "${WHEEL_DIR}/unpacked"
UNPACKED_ROOT=$(find "${WHEEL_DIR}/unpacked" -mindepth 1 -maxdepth 1 -type d)
find "$UNPACKED_ROOT" -name "*.so" | while read -r SO_FILE; do
echo " Processing: $SO_FILE"
"$PY" "../../build-python/fix_rpaths.py" "$SO_FILE"
done
"$PY" -m wheel pack "$UNPACKED_ROOT" -d "${WHEEL_DIR}"
rm -rf "${WHEEL_DIR}/unpacked"
else
echo "Error: No wheel found to sanitize!"
exit 1
fi
echo "➤ Repairing wheel(s) with delocate"
delocate-wheel -w "${FINAL_WHEEL_DIR}" "${WHEEL_DIR}"/*.whl
rm "${WHEEL_DIR}"/*.whl
echo "➤ Repairing wheel with delocate"
# Delocate moves the repaired wheel to FINAL_WHEEL_DIR
delocate-wheel -w "${FINAL_WHEEL_DIR}" "$CURRENT_WHEEL"
# Clean up the intermediate wheel from this iteration so it doesn't confuse the next
rm "$CURRENT_WHEEL"
)
done
# Cleanup
rm -rf "${TMPDIR}"
rm -rf "${WHEEL_DIR}"
echo "✅ All builds complete. Artifacts in ${FINAL_WHEEL_DIR}"

View File

@@ -0,0 +1,90 @@
#!/bin/zsh
set -e
# Color codes for output
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
function fix_file_rpaths() {
local file_path="$1"
echo -e "${YELLOW}Fixing RPATHs in file: $file_path...${NC}"
python3 "$FIX_RPATH_SCRIPT" "$file_path"
if [ $? -ne 0 ]; then
echo -e "${RED}Error: RPATH fix script failed for file: $file_path${NC}"
exit 1
fi
echo -e "${GREEN}RPATHs fixed for file: $file_path${NC}"
}
export -f fix_file_rpaths
echo -e "${YELLOW}"
echo "========================================================================="
echo " TEMPORARY WHEEL REPAIR WORKAROUND"
echo "========================================================================="
echo -e "${NC}"
echo ""
echo -e "${YELLOW}WARNING:${NC} This script applies a temporary patch to fix"
echo "a known issue with meson-python that causes duplicate RPATH entries in"
echo "built Python wheels on macOS, preventing module imports."
echo ""
echo "This workaround will:"
echo " 1. Unzip the wheel file"
echo " 2. Locate the extension modules"
echo " 3. Remove duplicate RPATH entries using install_name_tool"
echo " 4. Resign the wheel if necessary"
echo " 5. Repackage the wheel file"
echo ""
FIX_RPATH_SCRIPT="../../build-python/fix_rpaths.py"
# get the wheel directory to scan through
WHEEL_DIR="$1"
if [ -z "$WHEEL_DIR" ]; then
echo -e "${RED}Error: No wheel directory specified.${NC}"
echo "Usage: $0 /path/to/wheel_directory"
exit 1
fi
REPAIRED_WHEELS_DIR="repaired_wheels"
mkdir -p "$REPAIRED_WHEELS_DIR"
REPAIRED_DELOCATED_WHEELS_DIR="${REPAIRED_WHEELS_DIR}/delocated"
# Scal all files ending in .whl and not starting with a dot
for WHEEL_PATH in "$WHEEL_DIR"/*.whl; do
if [ ! -f "$WHEEL_PATH" ]; then
echo -e "${YELLOW}No wheel files found in directory: $WHEEL_DIR${NC}"
exit 0
fi
echo ""
echo -e "${GREEN}Processing wheel: $WHEEL_PATH${NC}"
WHEEL_NAME=$(basename "$WHEEL_PATH")
TEMP_DIR=$(mktemp -d)
echo -e "${GREEN}Step 1: Unzipping wheel...${NC}"
python -m wheel unpack "$WHEEL_PATH" -d "$TEMP_DIR"
echo -e "${GREEN}Step 2: Locating extension modules...${NC}"
while IFS= read -r -d '' so_file; do
echo "Found library: $so_file"
fix_file_rpaths "$so_file"
done < <(find "$TEMP_DIR" -name "*.so" -print0)
echo -e "${GREEN}Step 4: Repackaging wheel...${NC}"
python -m wheel pack "$TEMP_DIR/gridfire-0.7.4rc2" -d "$REPAIRED_WHEELS_DIR"
REPAIRED_WHEEL_PATH="${REPAIRED_WHEELS_DIR}/${WHEEL_NAME}"
echo -e "${GREEN}Step 5: Delocating wheel...${NC}"
# Ensure delocate is installed
pip install delocate
delocate-wheel -w "$REPAIRED_DELOCATED_WHEELS_DIR" "$REPAIRED_WHEEL_PATH"
echo -e "${GREEN}Repaired wheel saved to: ${REPAIRED_DELOCATED_WHEELS_DIR}/${WHEEL_NAME}${NC}"
# Clean up temporary directory
rm -rf "$TEMP_DIR"
done