diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 5b450d6bf..e2994099d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -675,83 +675,28 @@ jobs: !fromJSON(needs.pre-setup.outputs.is-untagged-devel) && !fromJSON(needs.pre-setup.outputs.release-requested) ) && 'test' || '' - }}] manylinux + }}] manylinux [tested] needs: - build-src - pre-setup # transitive, for accessing settings - strategy: - matrix: - manylinux-python-target: - # NOTE: Must be from this list: - # NOTE: $ podman run -it --rm \ - # NOTE: quay.io/pypa/manylinux2014_x86_64 \ - # NOTE: ls -1 /opt/python - - cp38-cp38 - - cp39-cp39 - - cp310-cp310 - - cp311-cp311 - - cp312-cp312 - manylinux-year-target: - - 2014 - - _2_24 - - _2_28 - manylinux-image-target: - # NOTE: Keep in sync with `build-manylinux-container-images.yml`. - # NOTE: Ordered from "heavy" to "fast". - - arch: x86_64 - qemu_arch: amd64 - include: - # NOTE: manylinux2010 only on x86_64 - - manylinux-python-target: cp38-cp38 - manylinux-image-target: - arch: x86_64 - qemu_arch: amd64 - manylinux-year-target: 2010 - - manylinux-python-target: cp39-cp39 - manylinux-image-target: - arch: x86_64 - qemu_arch: amd64 - manylinux-year-target: 2010 - - manylinux-python-target: cp310-cp310 - manylinux-image-target: - arch: x86_64 - qemu_arch: amd64 - manylinux-year-target: 2010 - # NOTE: manylinux1 caps out at Python 3.9 - - manylinux-python-target: cp38-cp38 - manylinux-image-target: - arch: x86_64 - qemu_arch: amd64 - manylinux-year-target: 1 - - manylinux-python-target: cp39-cp39 - manylinux-image-target: - arch: x86_64 - qemu_arch: amd64 - manylinux-year-target: 1 - exclude: - # NOTE: cp312-cp312 unavailable before _2_28 - - manylinux-python-target: cp312-cp312 - manylinux-year-target: 2014 - - manylinux-python-target: cp312-cp312 - manylinux-year-target: _2_24 - - uses: ./.github/workflows/reusable-build-wheel.yml + uses: ./.github/workflows/reusable-cibuildwheel.yml with: - # qemu: ${{ matrix.qemu }} + os: ubuntu-latest + wheel-tags-to-skip: >- + *_i686 + *-musllinux_* + *-*linux_{aarch64,ppc64le,s390x} + pp* source-tarball-name: >- ${{ needs.pre-setup.outputs.sdist-artifact-name }} dists-artifact-name: >- ${{ needs.pre-setup.outputs.dists-artifact-name }} - wheel-artifact-name: ${{ needs.pre-setup.outputs.wheel-artifact-name }} - cache-key-files: ${{ needs.pre-setup.outputs.cache-key-files }} - release-requested: >- - ${{ needs.pre-setup.outputs.release-requested }} - manylinux-python-target: ${{ matrix.manylinux-python-target }} - manylinux-year-target: ${{ matrix.manylinux-year-target }} - manylinux-image-target-arch: >- - ${{ matrix.manylinux-image-target.arch }} - manylinux-image-target-qemu-arch: >- - ${{ matrix.manylinux-image-target.qemu_arch }} + cython-tracing: >- # Cython line tracing for coverage collection + ${{ + fromJSON(needs.pre-setup.outputs.profiling-enabled) + && 'true' + || 'false' + }} build-bin-manylinux-odd-arches: name: >- @@ -767,57 +712,29 @@ jobs: !fromJSON(needs.pre-setup.outputs.is-untagged-devel) && !fromJSON(needs.pre-setup.outputs.release-requested) ) && 'test' || '' - }}] manylinux + }}] manylinux [odd] needs: - build-src - pre-setup # transitive, for accessing settings - strategy: - matrix: - manylinux-python-target: - # NOTE: Must be from this list: - # NOTE: $ podman run -it --rm \ - # NOTE: quay.io/pypa/manylinux2014_x86_64 \ - # NOTE: ls -1 /opt/python - - cp38-cp38 - - cp39-cp39 - - cp310-cp310 - - cp311-cp311 - - cp312-cp312 - manylinux-year-target: - - 2014 - - _2_24 - - _2_28 - manylinux-image-target: - # NOTE: Keep in sync with `build-manylinux-container-images.yml`. - # NOTE: Ordered from "heavy" to "fast". - - arch: aarch64 - qemu_arch: arm64 - - arch: s390x - - arch: ppc64le - exclude: - # NOTE: cp312-cp312 unavailable before _2_28 - - manylinux-python-target: cp312-cp312 - manylinux-year-target: 2014 - - manylinux-python-target: cp312-cp312 - manylinux-year-target: _2_24 - - uses: ./.github/workflows/reusable-build-wheel.yml + uses: ./.github/workflows/reusable-cibuildwheel.yml with: - # qemu: ${{ matrix.qemu }} + os: ubuntu-latest + wheel-tags-to-skip: >- + *_i686 + *-musllinux_* + *-*linux_x86_64 + pp* source-tarball-name: >- ${{ needs.pre-setup.outputs.sdist-artifact-name }} dists-artifact-name: >- ${{ needs.pre-setup.outputs.dists-artifact-name }} - wheel-artifact-name: ${{ needs.pre-setup.outputs.wheel-artifact-name }} - cache-key-files: ${{ needs.pre-setup.outputs.cache-key-files }} - release-requested: >- - ${{ needs.pre-setup.outputs.release-requested }} - manylinux-python-target: ${{ matrix.manylinux-python-target }} - manylinux-year-target: ${{ matrix.manylinux-year-target }} - manylinux-image-target-arch: >- - ${{ matrix.manylinux-image-target.arch }} - manylinux-image-target-qemu-arch: >- - ${{ matrix.manylinux-image-target.qemu_arch }} + qemu: all + cython-tracing: >- # Cython line tracing for coverage collection + ${{ + fromJSON(needs.pre-setup.outputs.profiling-enabled) + && 'true' + || 'false' + }} build-src: name: >- diff --git a/.github/workflows/reusable-build-wheel.yml b/.github/workflows/reusable-build-wheel.yml deleted file mode 100644 index d47548a2a..000000000 --- a/.github/workflows/reusable-build-wheel.yml +++ /dev/null @@ -1,223 +0,0 @@ ---- - -name: ♲ 👷 Build wheel 🛞📦 - -on: # yamllint disable-line rule:truthy - workflow_call: - inputs: - dists-artifact-name: - description: Workflow artifact name containing dists - required: true - type: string - cython-tracing: - description: Whether to build Cython modules with line tracing - default: '0' - required: false - type: string - # os: - # description: VM OS to use, without version suffix - # default: ubuntu - # required: false - # type: string - # qemu: - # description: Emulated QEMU architecture - # default: '' - # required: false - # type: string - source-tarball-name: - description: Sdist filename wildcard - required: true - type: string - # wheel-tags-to-skip: - # description: Wheel tags to skip building - # default: '' - # required: false - # type: string - manylinux-python-target: - description: A Python version present in a manylinux container image - required: true - type: string - manylinux-year-target: - description: Manylinux container image name suffix - required: true - type: string - manylinux-image-target-arch: - description: Architecture of a wheel to be made - required: true - type: string - manylinux-image-target-qemu-arch: - description: QEMU-emulated architecture for the wheel - required: true - type: string - cache-key-files: - description: Dependency files cache - required: true - type: string - release-requested: - description: Flag whether this is CI run is a release request - default: 'false' - required: false - type: string - wheel-artifact-name: - description: Wheel filename wildcard - default: '' - required: false - type: string - -env: - FORCE_COLOR: "1" # Make tools pretty. - PIP_DISABLE_PIP_VERSION_CHECK: "1" - PIP_NO_PYTHON_VERSION_WARNING: "1" - TOX_VERSION: tox < 4.12 - -jobs: - - build-wheel: - name: >- - ${{ inputs.manylinux-year-target }}-${{ - inputs.manylinux-image-target-arch }} - 🐍 ${{ inputs.manylinux-python-target }} - runs-on: ubuntu-latest - - env: - ANSIBLE_PYLIBSSH_CYTHON_TRACING: ${{ inputs.cython-tracing }} - DOCKER_EXECUTABLE: podman - QEMU_ARCH: >- - ${{ - inputs.manylinux-image-target.qemu-arch - || inputs.manylinux-image-target-arch - }} - TOXENV: >- - build-dists-manylinux${{ inputs.manylinux-year-target - }}-${{ inputs.manylinux-image-target-arch }},metadata-validation - - steps: - - name: Compute GHA artifact name ending - id: gha-artifact-name - run: | - from hashlib import sha512 - from os import environ - from pathlib import Path - - FILE_APPEND_MODE = 'a' - - inputs_json_str = """${{ toJSON(inputs) }}""" - - hash = sha512(inputs_json_str.encode()).hexdigest() - - with Path(environ['GITHUB_OUTPUT']).open( - mode=FILE_APPEND_MODE, - ) as outputs_file: - print(f'hash={hash}', file=outputs_file) - shell: python - - - name: Switch to using Python 3.11 by default - uses: actions/setup-python@v5.3.0 - with: - python-version: 3.11 - - - name: Retrieve the project source from an sdist inside the GHA artifact - uses: re-actors/checkout-python-sdist@release/v2 - with: - source-tarball-name: ${{ inputs.source-tarball-name }} - workflow-artifact-name: ${{ inputs.dists-artifact-name }} - - - name: >- - Calculate Python interpreter version hash value - for use in the cache key - id: calc-cache-key-py - run: | - from hashlib import sha512 - from os import environ - from pathlib import Path - from sys import version - - FILE_APPEND_MODE = 'a' - - hash = sha512(version.encode()).hexdigest() - - with Path(environ['GITHUB_OUTPUT']).open( - mode=FILE_APPEND_MODE, - ) as outputs_file: - print(f'py-hash-key={hash}', file=outputs_file) - shell: python - - name: Set up pip cache - uses: actions/cache@v4 - with: - path: >- - ${{ - runner.os == 'Linux' - && '~/.cache/pip' - || '~/Library/Caches/pip' - }} - key: >- - ${{ runner.os }}-pip-${{ - steps.calc-cache-key-py.outputs.py-hash-key }}-${{ - inputs.cache-key-files }} - restore-keys: | - ${{ runner.os }}-pip-${{ - steps.calc-cache-key-py.outputs.py-hash-key - }}- - ${{ runner.os }}-pip- - ${{ runner.os }}- - - name: Install tox - run: >- - python -m - pip install - --user - '${{ env.TOX_VERSION }}' - - - name: Pre-populate the tox env - run: >- - python -m - tox - --parallel auto - --parallel-live - --skip-missing-interpreters false - --notest - - - name: >- - Set up QEMU ${{ env.QEMU_ARCH }} arch emulation - with Podman - if: env.QEMU_ARCH != 'amd64' - run: >- - sudo podman run - --rm --privileged - multiarch/qemu-user-static - --reset -p yes - - - name: >- - Build ${{ inputs.manylinux-python-target }} dist - and verify wheel metadata - run: >- - python -m - tox - --parallel auto - --parallel-live - --skip-missing-interpreters false - --skip-pkg-install - -- - ${{ inputs.manylinux-python-target }} - - name: Verify that the artifacts with expected names got created - run: >- - ls -1 - dist/${{ inputs.wheel-artifact-name }} - - name: Store ${{ inputs.manylinux-python-target }} binary wheel - uses: actions/upload-artifact@v4 - with: - name: ${{ inputs.dists-artifact-name }}- - ${{ inputs.manylinux-year-target }}- - ${{ inputs.manylinux-image-target-arch }}- - ${{ inputs.manylinux-image-target-qemu-arch }}- - ${{ inputs.manylinux-python-target }}- - ${{ steps.gha-artifact-name.outputs.hash }} - # NOTE: Exact expected file names are specified here - # NOTE: as a safety measure — if anything weird ends - # NOTE: up being in this dir or not all dists will be - # NOTE: produced, this will fail the workflow. - path: | - dist/${{ inputs.wheel-artifact-name }} - retention-days: >- - ${{ fromJSON(inputs.release-requested) && 7 || 4 }} - -... diff --git a/.github/workflows/reusable-cibuildwheel.yml b/.github/workflows/reusable-cibuildwheel.yml index 3007a6319..7672a8150 100644 --- a/.github/workflows/reusable-cibuildwheel.yml +++ b/.github/workflows/reusable-cibuildwheel.yml @@ -53,7 +53,7 @@ jobs: || format('Build wheels on {0} {1}', inputs.os, inputs.qemu) }} runs-on: ${{ inputs.os }} - timeout-minutes: ${{ inputs.qemu && 60 || 8 }} + timeout-minutes: ${{ inputs.qemu && 70 || 8 }} steps: - name: Compute GHA artifact name ending id: gha-artifact-name diff --git a/build-scripts/build-all-manylinux-wheels.sh b/build-scripts/build-all-manylinux-wheels.sh deleted file mode 100755 index ba05f3877..000000000 --- a/build-scripts/build-all-manylinux-wheels.sh +++ /dev/null @@ -1,41 +0,0 @@ -#! /usr/bin/env bash -if [ -n "$DEBUG" ] -then - set -x -fi - -set -Eeuo pipefail - -manylinux1_image_prefix="quay.io/pypa/manylinux1_" -manylinux1_image_prefix="pyca/cryptography-manylinux1:" -manylinux1_image_prefix="ghcr.io/pyca/cryptography-manylinux1:" -#manylinux1_image_prefix="quay.io/pypa/manylinux2010_" -#manylinux1_image_prefix="pyca/cryptography-manylinux2010:" -dock_ext_args="" -declare -A docker_pull_pids=() # This syntax requires at least bash v4 - -for arch in x86_64 -do - #docker pull "${manylinux1_image_prefix}${arch}" & - podman pull "${manylinux1_image_prefix}${arch}" & - docker_pull_pids[$arch]=$! -done - -for arch in x86_64 -do - echo - echo - arch_pull_pid=${docker_pull_pids[$arch]} - echo Waiting for docker pull PID $arch_pull_pid to complete downloading container for $arch arch... - wait $arch_pull_pid # await for docker image for current arch to be pulled from hub - [ $arch == "i686" ] && dock_ext_args="linux32" - - echo Building wheel for $arch arch - #docker run --rm -v `pwd`:/io "${manylinux1_image_prefix}${arch}" $dock_ext_args /io/build-scripts/build-manylinux-wheels.sh "$LIBSSH_VERSION" & - podman run --rm -v `pwd`:/io "${manylinux1_image_prefix}${arch}" $dock_ext_args /io/build-scripts/build-manylinux-wheels.sh & - - dock_ext_args="" # Reset docker args, just in case -done -wait - -set +u diff --git a/build-scripts/build-manylinux-wheels.sh b/build-scripts/build-manylinux-wheels.sh deleted file mode 100755 index 22acf407e..000000000 --- a/build-scripts/build-manylinux-wheels.sh +++ /dev/null @@ -1,247 +0,0 @@ -#!/usr/bin/env bash - -DEBUG=$DEBUG - -if [ -n "$DEBUG" ] -then - set -x -fi - -MANYLINUX_TARGET="${1}" -PYTHON_TARGET="${2}" - -set -Eeuo pipefail - -THIS_SCRIPT_DIR_PATH=$(dirname "$(readlink -m $(type -p "${0}"))") -IMAGE_SCRIPTS_DIR_PATH="${THIS_SCRIPT_DIR_PATH}/manylinux-container-image" - -source "${IMAGE_SCRIPTS_DIR_PATH}/get-static-deps-dir.sh" -source "${IMAGE_SCRIPTS_DIR_PATH}/activate-userspace-tools.sh" - -SRC_DIR=/io -GIT_DIR="${SRC_DIR}/.git" -PERM_REF_HOST_FILE="${SRC_DIR}/setup.cfg" -PEP517_CONFIG_FILE="${SRC_DIR}/pyproject.toml" -DIST_NAME="$(cat "${PERM_REF_HOST_FILE}" | grep '^name = ' | awk '{print$3}' | sed s/-/_/)" -IMPORTABLE_PKG="$(ls --ignore='*.egg-info' "${SRC_DIR}/src/")" # must contain only one dir - ->&2 echo Verifying that $IMPORTABLE_PKG can be the target package... ->/dev/null stat ${SRC_DIR}/src/${IMPORTABLE_PKG}/*.p{y,yx,xd} - -PYTHONS="$(ls -1 --ignore=cp34-cp34m /opt/python/ | sort -r)" -if [ -n "${PYTHON_TARGET}" ] -then - if &>/dev/null grep -ow "^${PYTHON_TARGET}$" <<<"$PYTHONS" - then - >&2 echo Using the target Python requested \ - by the second argument ${PYTHON_TARGET} - PYTHONS="${PYTHON_TARGET}" - else - >&2 echo Invalid Python target requested \ - by the second CLI argument ${PYTHON_TARGET} - exit 1 - fi -else - >&2 echo Using all Python targets found in this env -fi - ->&2 echo Selected Python targets for this build run: -echo "${PYTHONS}" | >&2 tr ' ' '\n' - - -MANYLINUX_TAG="$( - /opt/python/cp39-cp39/bin/python \ - "${IMAGE_SCRIPTS_DIR_PATH}/manylinux_mapping.py" \ - "${MANYLINUX_TARGET}" -)" - - -# Avoid creation of __pycache__/*.py[c|o] -export PYTHONDONTWRITEBYTECODE=1 - -import_userspace_tools - -PIP_GLOBAL_ARGS= -if [ -n "$DEBUG" ] -then - PIP_GLOBAL_ARGS=-vv -fi -GIT_GLOBAL_ARGS="--git-dir=${GIT_DIR} --work-tree=${SRC_DIR}" -TESTS_SRC_DIR="${SRC_DIR}/tests" -BUILD_DIR=$(mktemp -d "/tmp/${DIST_NAME}-${MANYLINUX_TAG}-build.XXXXXXXXXX") -TESTS_DIR="${BUILD_DIR}/tests" -STATIC_DEPS_PREFIX="$(get_static_deps_dir)" - -ORIG_WHEEL_DIR="${BUILD_DIR}/original-wheelhouse" -WHEEL_DEP_DIR="${BUILD_DIR}/deps-wheelhouse" -MANYLINUX_DIR="${BUILD_DIR}/manylinux-wheelhouse" -WHEELHOUSE_DIR="${SRC_DIR}/dist" -UNPACKED_WHEELS_DIR="${BUILD_DIR}/unpacked-wheels" -VENVS_DIR="${BUILD_DIR}/venvs" -ISOLATED_SRC_DIRS="${BUILD_DIR}/src" - -# NOTE: `LDFLAGS` is necessary for the C-extension build's linker to -# NOTE: locate the symbols in the libssh shared object files. -# NOTE: Otherwise, the error is: -# -# gcc -pthread -shared -lssh -I/opt/manylinux-static-deps.PPkLKziXI7/include -DCYTHON_TRACE=1 -DCYTHON_TRACE_NOGIL=1 /tmp/pip-req-build-4h841og7/src/tmpy3l03tmj/tmp/pip-req-build-4h841og7/src/pylibsshext/session.o -lssh -o build/lib.linux-x86_64-3.9/pylibsshext/session.cpython-39-x86_64-linux-gnu.so -# /opt/rh/devtoolset-2/root/usr/libexec/gcc/x86_64-CentOS-linux/4.8.2/ld: cannot find -lssh -# /opt/rh/devtoolset-2/root/usr/libexec/gcc/x86_64-CentOS-linux/4.8.2/ld: cannot find -lssh -# collect2: error: ld returned 1 exit status -# error: command '/opt/rh/devtoolset-2/root/usr/bin/gcc' failed with exit code 1 -# ---------------------------------------- -# ERROR: Failed building wheel for ansible-pylibssh -# Failed to build ansible-pylibssh -# ERROR: Failed to build one or more wheels -export LDFLAGS="'-L${STATIC_DEPS_PREFIX}/lib64' '-L${STATIC_DEPS_PREFIX}/lib'" - -# NOTE: `LD_LIBRARY_PATH` is necessary so that `auditwheel repair` could locate `libssh.so.4` -export LD_LIBRARY_PATH="${STATIC_DEPS_PREFIX}/lib64:${STATIC_DEPS_PREFIX}/lib:$LD_LIBRARY_PATH" - -ARCH=`uname -m` - ->&2 echo ->&2 echo ->&2 echo =============================================== ->&2 echo Copying the source repo to temporary locations: ->&2 echo =============================================== ->&2 echo -for PY in $PYTHONS; do - >&2 echo Creating "${ISOLATED_SRC_DIRS}/${PY}"... - if [ -d "${GIT_DIR}" ] - then - # NOTE: Allow `setuptools-scm` grab the version from Git - git ${GIT_GLOBAL_ARGS} worktree add --detach \ - "${ISOLATED_SRC_DIRS}/${PY}" - cp -v "${PEP517_CONFIG_FILE}" "${ISOLATED_SRC_DIRS}/${PY}"/ - else - # NOTE: Rely on `.git_archival.txt` for versioning - mkdir -pv "${ISOLATED_SRC_DIRS}/" - cp -a "${SRC_DIR}" "${ISOLATED_SRC_DIRS}/${PY}" - fi -done - ->&2 echo ->&2 echo ->&2 echo ================ ->&2 echo Building wheels: ->&2 echo ================ ->&2 echo -export CFLAGS="'-I${STATIC_DEPS_PREFIX}/include'" -for PY in $PYTHONS; do - PIP_BIN="/opt/python/${PY}/bin/pip" - >&2 echo Using "${PIP_BIN}"... - ${PIP_BIN} install -U 'pip >= 20' wheel ${PIP_GLOBAL_ARGS} - ${PIP_BIN} wheel "${ISOLATED_SRC_DIRS}/${PY}" -w "${ORIG_WHEEL_DIR}" ${PIP_GLOBAL_ARGS} -done - ->&2 echo ->&2 echo ->&2 echo ================ ->&2 echo Reparing wheels: ->&2 echo ================ ->&2 echo -# Bundle external shared libraries into the wheels -for PY in $PYTHONS; do - for whl in ${ORIG_WHEEL_DIR}/${DIST_NAME}-*-${PY}-linux_${ARCH}.whl; do - >&2 echo Reparing "${whl}" for "${MANYLINUX_TAG}"... - auditwheel repair --only-plat --plat "${MANYLINUX_TARGET}" "${whl}" -w ${MANYLINUX_DIR} - done -done - -# Download deps ->&2 echo ->&2 echo ->&2 echo ========================= ->&2 echo Downloading dependencies: ->&2 echo ========================= ->&2 echo -for PY in $PYTHONS; do - for WHEEL_FILE in `ls ${MANYLINUX_DIR}/${DIST_NAME}-*-${PY}-${MANYLINUX_TAG}.whl`; do - PIP_BIN="/opt/python/${PY}/bin/pip" - >&2 echo Downloading ${WHEEL_FILE} deps using ${PIP_BIN}... - ${PIP_BIN} download -d "${WHEEL_DEP_DIR}" "${WHEEL_FILE}" ${PIP_GLOBAL_ARGS} - done -done - ->&2 echo ->&2 echo =================== ->&2 echo Creating test venvs ->&2 echo =================== ->&2 echo -for PY in $PYTHONS; do - VENV_NAME="${PY}-${MANYLINUX_TAG}" - VENV_PATH="${VENVS_DIR}/${VENV_NAME}" - VENV_BIN="/opt/python/${PY}/bin/virtualenv" - - >&2 echo - >&2 echo Creating a venv at ${VENV_PATH}... - ${VENV_BIN} "${VENV_PATH}" -done - -# Install packages ->&2 echo ->&2 echo ->&2 echo ============================ ->&2 echo Testing wheels installation: ->&2 echo ============================ ->&2 echo -for PY in $PYTHONS; do - VENV_NAME="${PY}-${MANYLINUX_TAG}" - VENV_PATH="${VENVS_DIR}/${VENV_NAME}" - PIP_BIN="${VENV_PATH}/bin/pip" - >&2 echo Using ${PIP_BIN}... - ${PIP_BIN} install --no-compile "${DIST_NAME}" --no-index -f "${MANYLINUX_DIR}/" ${PIP_GLOBAL_ARGS} -done - ->&2 echo ->&2 echo ============== ->&2 echo WHEEL ANALYSIS ->&2 echo ============== ->&2 echo -for PY in $PYTHONS; do - WHEEL_BIN="/opt/python/${PY}/bin/wheel" - PLAT_TAG="${PY}-${MANYLINUX_TAG}" - UNPACKED_DIR=${UNPACKED_WHEELS_DIR}/${PLAT_TAG} - WHEEL_FILE=`ls ${MANYLINUX_DIR}/${DIST_NAME}-*-${PLAT_TAG}.whl` - >&2 echo - >&2 echo Analysing ${WHEEL_FILE}... - auditwheel show "${WHEEL_FILE}" - ${WHEEL_BIN} unpack -d "${UNPACKED_DIR}" "${WHEEL_FILE}" - # chmod avoids ldd warning about files being non-executable: - chmod +x "${UNPACKED_DIR}"/${DIST_NAME}-*/{${DIST_NAME}.libs/*.so.*,${IMPORTABLE_PKG}/*.so} - >&2 echo Verifying that all links in '`*.so`' files of ${WHEEL_FILE} exist... - ! ldd "${UNPACKED_DIR}"/${DIST_NAME}-*/{${DIST_NAME}.libs/*.so.*,${IMPORTABLE_PKG}/*.so} | grep '=> not found' -done - ->&2 echo ->&2 echo ->&2 echo =================================== ->&2 echo Running smoke tests against wheels: ->&2 echo =================================== ->&2 echo -cp -vr "${TESTS_SRC_DIR}" "${TESTS_DIR}" -sed \ - 's#\s\+--cov.*##;s#\s\+--no-cov-on-fail.*##;s#\s\+-p\spytest_cov.*##' \ - "${SRC_DIR}/pytest.ini" > "${TESTS_DIR}/pytest.ini" -pushd "${TESTS_DIR}" -for PY_BIN in `ls ${VENVS_DIR}/*/bin/python`; do - $PY_BIN -B -m pip install --no-compile pytest pytest-forked pytest-xdist ${PIP_GLOBAL_ARGS} - $PY_BIN -B -m pytest -m smoke "${TESTS_DIR}" -done -popd - ->&2 echo ->&2 echo ->&2 echo ================== ->&2 echo SELF-TEST COMPLETE ->&2 echo ================== ->&2 echo - ->&2 echo Copying built manylinux wheels back to the host... -chown -R --reference="${PERM_REF_HOST_FILE}" "${MANYLINUX_DIR}"/* -mkdir -pv "${WHEELHOUSE_DIR}" -chown --reference="${PERM_REF_HOST_FILE}" "${WHEELHOUSE_DIR}" -cp -av "${MANYLINUX_DIR}"/"${DIST_NAME}"-*-${MANYLINUX_TAG}.whl "${WHEELHOUSE_DIR}/" ->&2 echo Final OS-specific wheels for ${DIST_NAME}: -ls -l ${WHEELHOUSE_DIR} diff --git a/docs/changelog-fragments/562.breaking.rst b/docs/changelog-fragments/562.breaking.rst new file mode 100644 index 000000000..5deced1db --- /dev/null +++ b/docs/changelog-fragments/562.breaking.rst @@ -0,0 +1,4 @@ +PyPI no longer ships year-versioned manylinux wheels. One may +have to update their version of pip to pick up the new ones. + +-- by :user:`webknjaz` diff --git a/docs/changelog-fragments/562.contrib.rst b/docs/changelog-fragments/562.contrib.rst new file mode 100644 index 000000000..b7ab0b339 --- /dev/null +++ b/docs/changelog-fragments/562.contrib.rst @@ -0,0 +1,4 @@ +Manylinux wheels are no longer built using custom shell scripts. +Instead, this is delegated to the ``cibuildwheel`` tool. + +-- by :user:`webknjaz` diff --git a/docs/changelog-fragments/562.packaging.rst b/docs/changelog-fragments/562.packaging.rst new file mode 100644 index 000000000..079f0a2b0 --- /dev/null +++ b/docs/changelog-fragments/562.packaging.rst @@ -0,0 +1,2 @@ +PyPI now only ships :pep:`660`-compatible manylinux wheels +-- by :user:`webknjaz`. diff --git a/docs/installation_guide.rst b/docs/installation_guide.rst index 5f1cd0042..72e42e951 100644 --- a/docs/installation_guide.rst +++ b/docs/installation_guide.rst @@ -128,10 +128,7 @@ compatible wheels, use: .. code-block:: shell-session - [dir:pylibssh] $ tox -e build-dists-manylinux1-x86_64 # with Docker - - [dir:pylibssh] $ # or with Podman - [dir:pylibssh] $ DOCKER_EXECUTABLE=podman tox -e build-dists-manylinux1-x86_64 + [dir:pylibssh] $ tox r -e cibuildwheel .. seealso:: diff --git a/pyproject.toml b/pyproject.toml index 9ccc99803..507311cfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -142,6 +142,7 @@ profile = "True" [tool.cibuildwheel] build-frontend = "build" +# container-engine = "podman" # FIXME? test-requires = "pytest pytest-cov pytest-xdist pytest-forked" test-command = "python -Im pytest -m smoke --no-cov {project}/tests" skip = "pp*" @@ -158,6 +159,28 @@ PIP_NO_WARN_SCRIPT_LOCATION = "1" PRE_COMMIT_COLOR = "always" PY_COLORS = "1" +[tool.cibuildwheel.linux] +manylinux-aarch64-image = "ghcr.io/ansible/pylibssh-manylinux_2_28_aarch64:libssh-v0.9.6" +manylinux-ppc64le-image = "ghcr.io/ansible/pylibssh-manylinux_2_28_ppc64le:libssh-v0.9.6" +manylinux-s390x-image = "ghcr.io/ansible/pylibssh-manylinux_2_28_s390x:libssh-v0.9.6" +manylinux-x86_64-image = "ghcr.io/ansible/pylibssh-manylinux_2_28_x86_64:libssh-v0.9.6" +skip = [ + "*-musllinux_*", # FIXME: musllinux needs us to provide with containers pre-built libssh + "pp*", # FIXME: we don't ship these currently but could + + # The support for 32-bit wheels has been dropped long ago: + "*-manylinux_i686", + "*-musllinux_i686", +] + +[tool.cibuildwheel.linux.environment] +STATIC_DEPS_DIR = "$(cat /root/.static-deps-path)" +# LDFLAGS = "'-L${STATIC_DEPS_PREFIX}/lib64' '-L${STATIC_DEPS_PREFIX}/lib'" +# LD_LIBRARY_PATH = "${STATIC_DEPS_PREFIX}/lib64:${STATIC_DEPS_PREFIX}/lib:${LD_LIBRARY_PATH}" +CFLAGS="'-I$(cat /root/.static-deps-path)/include'" +LDFLAGS = "'-L$(cat /root/.static-deps-path)/lib64' '-L$(cat /root/.static-deps-path)/lib'" +LD_LIBRARY_PATH = "$(cat /root/.static-deps-path)/lib64:$(cat /root/.static-deps-path)/lib:${LD_LIBRARY_PATH}" + [tool.cibuildwheel.macos] before-build = [ "brew install libssh", # @0.9.4 # pinning the version does not work diff --git a/tox.ini b/tox.ini index 3f66882a7..8003de8ed 100644 --- a/tox.ini +++ b/tox.ini @@ -230,7 +230,7 @@ description = Verify that dists under the dist/ dir have valid metadata depends = build-dists - build-dists-manylinux{1,2010,2014,_2_24,_2_28}-{x86_64,aarch64,ppc64le,s390x} + cibuildwheel build-wheels-pip delocate-macos-wheels deps = @@ -245,44 +245,17 @@ commands = twine check --strict {env:PEP517_OUT_DIR}/* -[testenv:build-dists-manylinux{1,2010,2014,_2_24,_2_28}-{x86_64,aarch64,ppc64le,s390x}] -allowlist_externals = - {env:DOCKER_EXECUTABLE:docker} -basepython = python3 +[testenv:cibuildwheel] description = - Build manylinux wheels in a container and put them into the dists/ folder + Build publishable wheel dists using `cibuildwheel` and + put them into the dists/ folder commands = - # NOTE: `-t` shouldn't be used here because GitHub Actions - # NOTE: workflows don't have TTY - {env:DOCKER_EXECUTABLE:docker} run \ - -i --rm \ - -v {toxinidir}:/io \ - -e ANSIBLE_PYLIBSSH_CYTHON_TRACING \ - ghcr.io/ansible/pylibssh-manylinux{env:MANYLINUX_VERSION_TAG}_{env:MANYLINUX_ARCH_TAG}:libssh-v{env:LIBSSH_VERSION:0.9.6} \ - /io/build-scripts/build-manylinux-wheels.sh \ - "manylinux{env:MANYLINUX_VERSION_TAG}_{env:MANYLINUX_ARCH_TAG}" \ - {posargs:} + {envpython} -Im cibuildwheel {posargs} deps = -isolated_build = true -passenv = - # alternatively set `DOCKER_EXECUTABLE=podman` outside the container - DOCKER_EXECUTABLE - HOME - LIBSSH_VERSION -setenv = - {[testenv]setenv} - - x86_64: MANYLINUX_ARCH_TAG = x86_64 - aarch64: MANYLINUX_ARCH_TAG = aarch64 - ppc64le: MANYLINUX_ARCH_TAG = ppc64le - s390x: MANYLINUX_ARCH_TAG = s390x - - manylinux1: MANYLINUX_VERSION_TAG = 1 - manylinux2010: MANYLINUX_VERSION_TAG = 2010 - manylinux2014: MANYLINUX_VERSION_TAG = 2014 - manylinux_2_24: MANYLINUX_VERSION_TAG = _2_24 - manylinux_2_28: MANYLINUX_VERSION_TAG = _2_28 -skip_install = true + cibuildwheel +package = skip +set_env = + CIBW_CONFIG_SETTINGS = with-cython-tracing={env:ANSIBLE_PYLIBSSH_CYTHON_TRACING:true} [testenv:lint]