Skip to content

Update scikit-build branch #1403

Update scikit-build branch

Update scikit-build branch #1403

name: Publish sdist and wheels for macos, manylinux, and windows, publish to pypi if a release
on: [pull_request, push]
env:
apt_options: -o Acquire::Retries=3
CIBW_BUILD_VERBOSITY: 1
CIBW_BUILD: 'cp*'
CIBW_SKIP: 'cp35-* cp36-* cp37-* cp38-* *-manylinux_i686 *-musllinux_* *-win32'
CIBW_BEFORE_TEST: pip install -r {project}/tests/requirement_tests.txt
CIBW_TEST_COMMAND: pytest -s -v {project}/tests
UNIXY_HDF5_VERSION: 1.14.3
WINDOWS_HDF5: 1.14.2
jobs:
build_wheels:
name: Build wheels on ${{ matrix.os }}
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, macos-12, windows-2022]
steps:
- uses: actions/checkout@v4
with:
submodules: 'true'
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install cibuildwheel
run: |
python -m pip install cibuildwheel
- name: Cache HDF5 On Linux/macOS
if: runner.os == 'Linux' || runner.os == 'macOS'
uses: actions/cache@v4
id: cache-hdf5-posix
env:
cache-name: cache-hdf5-posix
with:
path: src-cache/
key: ${{ runner.os }}-build-${{ env.cache-name }}
- name: Build wheels on Linux
if: runner.os == 'Linux'
env:
CIBW_ENVIRONMENT_PASS: "STATIC_HDF5 CMAKE_PREFIX_PATH"
CIBW_BEFORE_BUILD: |
# CMake complains if the dependencies come from within the same tree
# as the source, so we'll just pretend they are elsewhere
mkdir -p /host/home/runner/work/src-cache
ln -s /host/home/runner/work/src-cache /opt/hdf5-static
bash ci/hdf5-build.sh /opt/hdf5-static
run: |
# used by setup.py to decide if to set `FindHDF5` to use static hdf5 libraries
export STATIC_HDF5=True
export CMAKE_PREFIX_PATH=/opt/hdf5-static/install-x86_64/install/
CIBW_MANYLINUX_X86_64_IMAGE=manylinux2014 python -m cibuildwheel --output-dir dist
CIBW_MANYLINUX_X86_64_IMAGE=manylinux_2_28 python -m cibuildwheel --output-dir dist
- name: Build wheels Mac OS
if: runner.os == 'macOS'
env:
CIBW_ENVIRONMENT_PASS: "STATIC_HDF5 CMAKE_PREFIX_PATH CMAKE_OSX_ARCHITECTURES MACOSX_DEPLOYMENT_TARGET"
CIBW_BEFORE_BUILD: |
# CMake complains if the dependencies come from within the same tree
# as the source, so we'll just pretend they are elsewhere
mkdir -p $PWD/src-cache
ln -s $PWD/src-cache /Users/runner/work/src-cache
bash ci/hdf5-build.sh /Users/runner/work/src-cache
run: |
# used by setup.py to decide if to set `FindHDF5` to use static hdf5 libraries
export STATIC_HDF5=True
# x86_64 macOS allows for cross compilation; first we do arm64,
# only for the 11.0 target; and store in the cache the compiled code...
export CIBW_ARCHS_MACOS="arm64"
export CMAKE_OSX_ARCHITECTURES="arm64"
export MACOSX_DEPLOYMENT_TARGET="11.0"
export CMAKE_PREFIX_PATH=/Users/runner/work/src-cache/install-$CIBW_ARCHS_MACOS/install
python -m cibuildwheel --output-dir dist
# ...and now we do both targets for x86_64
export CIBW_ARCHS_MACOS="x86_64"
export CMAKE_OSX_ARCHITECTURES="x86_64"
unset MACOSX_DEPLOYMENT_TARGET
export CMAKE_PREFIX_PATH=/Users/runner/work/src-cache/install-$CIBW_ARCHS_MACOS/install
python -m cibuildwheel --output-dir dist
- name: Cache HDF5 On Windows
if: runner.os == 'Windows'
id: cache-hdf5-windows
uses: actions/cache@v4
env:
cache-name: cache-hdf5-windows
with:
path: C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.WINDOWS_HDF5 }}
- name: Fill Windows Cache
if: runner.os == 'Windows' && steps.cache-hdf5-windows.outputs.cache-hit != 'true'
continue-on-error: false
shell: cmd
run: |
mkdir C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}
curl -L -o C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}\hdf5.zip https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.2/bin/windows/hdf5-1.14.2-Std-win10_64-vs16.zip
- name: Build wheels Windows
if: runner.os == 'Windows'
# the v140 toolchain is used, so that fewer vc_redist installs are required
# some of the information is available here:
# https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line
# but tracking down the 14.0 magic number (and especially newer ones isn't always easy)
# this has a partial table:
# https://devblogs.microsoft.com/cppblog/side-by-side-minor-version-msvc-toolsets-in-visual-studio-2017/
run: |
# vc141 was removed, add it back here: https://github.com/actions/runner-images/issues/9701
Set-Location "C:\Program Files (x86)\Microsoft Visual Studio\Installer\"
$InstallPath = "C:\Program Files\Microsoft Visual Studio\2022\Enterprise"
$componentsToAdd= @(
"Microsoft.VisualStudio.Component.VC.v141.x86.x64"
"Microsoft.VisualStudio.Component.VC.v141.x86.x64.Spectre"
)
[string]$workloadArgs = $componentsToAdd | ForEach-Object {" --add " + $_}
$Arguments = ('/c', "vs_installer.exe", 'modify', '--installPath', "`"$InstallPath`"",$workloadArgs, '--quiet', '--norestart', '--nocache')
# should be run twice
$process = Start-Process -FilePath cmd.exe -ArgumentList $Arguments -Wait -PassThru -WindowStyle Hidden
$process = Start-Process -FilePath cmd.exe -ArgumentList $Arguments -Wait -PassThru -WindowStyle Hidden
- name: Build wheels Windows
if: runner.os == 'Windows'
shell: cmd
run: |
C:\windows\system32\tar.exe xf C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}\hdf5.zip
start /wait msiexec /a "%cd%\hdf\HDF5-${{ env.WINDOWS_HDF5 }}-win64.msi" /qn TARGETDIR="c:\hdf5\"
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat" -vcvars_ver=14.1
set HDF5_DIR=C:\hdf5\HDF_Group\HDF5\${{ env.WINDOWS_HDF5 }}\cmake
python -m cibuildwheel --output-dir dist
- name: Store wheel as artifact
uses: actions/upload-artifact@v4
with:
name: dist-wheels-${{ matrix.os }}
path: dist/*.whl
build_sdist:
name: Build sdist
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: 'true'
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install Dependencies
run:
python -m pip install build
- name: Build a source tarball
run:
python -m build --sdist
- name: Test tarball
run: |
sudo apt-get ${{env.apt_options}} update -y
sudo apt-get ${{env.apt_options}} install -y libhdf5-dev
pip install -r tests/requirement_tests.txt
pip install dist/*
pytest -s -v tests
- name: Store sdist as artifact
uses: actions/upload-artifact@v4
with:
name: dist-sdist
path: dist/*.tar.gz
upload_artifacts:
name: Upload wheels to PyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
runs-on: ubuntu-latest
needs: [build_wheels, build_sdist]
steps:
- name: Download artifacts produced during the build_wheels and build_sdist jobs
uses: actions/download-artifact@v4
with:
name: dist-*
path: dist/
merge-multiple: true
- name: Display structure of downloaded files
run: ls -R
working-directory: dist
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_PASSWORD }}
packages_dir: dist/