diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 75d70c7f32..147a671252 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -2,6 +2,9 @@ name: CI on: pull_request: + push: + branches: + - "feature/**" jobs: publish: @@ -38,11 +41,22 @@ jobs: # Make sure it is installable. sudo snap install --dangerous --classic ${{ steps.build.outputs.snap }} - - if: steps.decisions.outputs.PUBLISH == 'true' && github.event_name == 'pull_request' - # Use this until snapcore/action-publish#27 it is merged. - uses: sergiusens/action-publish@master + - name: Get branch name + id: vars + run: | + if [[ "${{ github.event_name }}" == "pull_request" ]] + then + echo "branch=pr-${{ github.event.number }}" >> "$GITHUB_OUTPUT" + else + branch=$(echo ${GITHUB_REF#refs/*/} | sed -e 's|feature/\(.*\)|\1|') + echo "branch=$branch" >> "$GITHUB_OUTPUT" + fi + + - name: Publish feature branch to edge/${{ steps.vars.outputs.branch }} + if: steps.decisions.outputs.PUBLISH == 'true' + uses: snapcore/action-publish@v1 env: SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAPCRAFT_STORE_CREDENTIALS }} with: snap: ${{ steps.build.outputs.snap }} - release: edge/pr-${{ github.event.number }} + release: edge/${{ steps.vars.outputs.branch }} diff --git a/.github/workflows/spread-scheduled.yaml b/.github/workflows/spread-scheduled.yaml index 3f7dbb146d..787ec0911d 100644 --- a/.github/workflows/spread-scheduled.yaml +++ b/.github/workflows/spread-scheduled.yaml @@ -3,7 +3,6 @@ on: schedule: # At 03:00 on Wednesday and Sunday. - cron: "0 3 * * WED,SUN" - workflow_dispatch: jobs: @@ -51,3 +50,29 @@ jobs: - name: Kernel plugin test run: | spread google:ubuntu-22.04-64:tests/spread/plugins/${{ matrix.type }}/kernel + + remote-build: + runs-on: self-hosted + needs: [snap-build] + strategy: + fail-fast: false + steps: + - name: Cleanup job workspace + run: | + rm -rf "${{ github.workspace }}" + mkdir "${{ github.workspace }}" + - name: Checkout snapcraft + uses: actions/checkout@v3 + with: + fetch-depth: 0 + submodules: true + - name: Download snap artifact + uses: actions/download-artifact@v3 + with: + name: snap + path: tests + - name: remote-build test + env: + LAUNCHPAD_TOKEN: "${{ secrets.LAUNCHPAD_TOKEN }}" + run: | + spread google:ubuntu-20.04-64:tests/spread/general/remote-build diff --git a/.github/workflows/tox.yaml b/.github/workflows/tox.yaml index 668ca3f02b..1be033611d 100644 --- a/.github/workflows/tox.yaml +++ b/.github/workflows/tox.yaml @@ -37,25 +37,23 @@ jobs: python -m pip install 'tox<5.0' tox-gh echo "::endgroup::" echo "::group::Create virtual environments for linting processes." - tox run -m lint build-docs --notest + tox run --colored yes -m lint build-docs --notest echo "::endgroup::" echo "::group::Build docs." - tox run -e build-docs + tox run --colored yes -e build-docs echo "::endgroup::" echo "::group::Wait for snap to complete" snap watch --last=install echo "::endgroup::" - name: Run Linters - run: tox run --skip-pkg-install -m lint + run: tox run --colored yes --skip-pkg-install -m lint tests: strategy: fail-fast: false # Run all the tests to their conclusions. matrix: - platform: [ubuntu-20.04, ubuntu-22.04] - python_version: ["3.8", "3.10"] + platform: [ubuntu-22.04] + python_version: ["3.10"] include: - - python_version: "3.8" - tox_python: py38 - python_version: "3.10" tox_python: py310 runs-on: ${{ matrix.platform }} @@ -80,9 +78,9 @@ jobs: echo "::endgroup::" mkdir -p results - name: Setup Tox environments - run: tox run-parallel --parallel auto --parallel-no-spinner --parallel-live -e test-${{ matrix.tox_python }},test-legacy-${{ matrix.tox_python }} --notest + run: tox run-parallel --parallel auto --parallel-no-spinner --parallel-live --colored yes -e test-${{ matrix.tox_python }},test-legacy-${{ matrix.tox_python }} --notest - name: Test with tox - run: tox run --skip-pkg-install --result-json results/tox-${{ matrix.platform }}.json -e test-${{ matrix.tox_python }},test-legacy-${{ matrix.tox_python }} + run: tox run --skip-pkg-install --result-json results/tox-${{ matrix.platform }}.json --colored yes -e test-${{ matrix.tox_python }},test-legacy-${{ matrix.tox_python }} - name: Upload code coverage uses: codecov/codecov-action@v3 with: diff --git a/Makefile b/Makefile index c8c3a1dd47..f24e3b9bc1 100644 --- a/Makefile +++ b/Makefile @@ -48,11 +48,11 @@ test-shellcheck: .PHONY: test-legacy-units test-legacy-units: - tox run -e py38-legacy + tox run -e test-legacy-py310 .PHONY: test-units test-units: test-legacy-units - tox run -e py38-unit + tox run -e test-py310 .PHONY: tests tests: tests-static test-units diff --git a/TESTING.md b/TESTING.md index 6683153d07..d554b30b44 100644 --- a/TESTING.md +++ b/TESTING.md @@ -78,13 +78,13 @@ For example: * To run only the unit tests for the plugins: ``` - pytest tests/unit/plugins + pytest tests/unit/parts/plugins ``` * To run only the integration tests for the store: ``` - pytest tests/integration/store + pytest tests/unit/store ``` The snaps tests script has more complex arguments. For an explanation of them, run: diff --git a/appveyor.yml b/appveyor.yml index 2dd633fc06..d6a5083206 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -7,7 +7,7 @@ environment: TIMESTAMP_SERVICE: http://timestamp.digicert.com matrix: - - PYTHON: C:\Python38-x64 + - PYTHON: C:\Python310-x64 cache: - '%LOCALAPPDATA%\pip\Cache\http' diff --git a/docs/.sphinx/pinned-requirements.txt b/docs/.sphinx/pinned-requirements.txt index 702d632667..5002d6ecb8 100644 --- a/docs/.sphinx/pinned-requirements.txt +++ b/docs/.sphinx/pinned-requirements.txt @@ -31,4 +31,4 @@ sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tornado==6.3.3 -urllib3==1.26.17 +urllib3==1.26.18 diff --git a/docs/howto/architectures.rst b/docs/howto/architectures.rst index 8ca6451f99..9d99c64c5d 100644 --- a/docs/howto/architectures.rst +++ b/docs/howto/architectures.rst @@ -35,13 +35,6 @@ following snippet snippet will produce the same result: architectures: - build-on: [amd64] -The shorthand format will also produce the same result: - -.. code-block:: yaml - - architectures: - - amd64 - core20 ^^^^^^ @@ -97,12 +90,6 @@ following snippet snippet will produce the same result: - build-on: [amd64] - build-on: [arm64] -The shorthand format will also produce the same result: - -.. code-block:: yaml - - architectures: [amd64, arm64] - core20 ^^^^^^ diff --git a/docs/reference/architectures.rst b/docs/reference/architectures.rst index fef00ad2aa..9a70e65fc3 100644 --- a/docs/reference/architectures.rst +++ b/docs/reference/architectures.rst @@ -71,11 +71,20 @@ use the ``all`` keyword. The same architecture cannot be defined in multiple ``build-for`` entries. +core20 +^^^^^^ + +The above syntax and rules for ``core22`` apply for ``core20`` except that +``run-on`` is used in place of ``build-for``. Additionally, ``core20`` supports +multiple architectures in the ``run-on`` field, which will create +multi-architecture snaps. + Shorthand format """""""""""""""" -As an alternative to the explicit format described above, a shorthand format -can be used for simple ``build-on/build-for`` pairs. The following shorthand: +As an alternative to the explicit format described above, ``core20`` snaps +support a shorthand format can be used for simple ``build-on/run-on`` +pairs. The following shorthand: .. code-block:: yaml @@ -87,19 +96,12 @@ is equivalent to: architectures: - build-on: [amd64] - build-for: [amd64] + run-on: [amd64] - build-on: [arm64] - build-for: [arm64] + run-on: [arm64] The explicit and shorthand format cannot be mixed. -core20 -^^^^^^ - -The above syntax and rules for ``core22`` apply for ``core20`` except that -``run-on`` is used in place of ``build-for``. Additionally, ``core20`` supports -multiple architectures in the ``run-on`` field, which will create -multi-architecture snaps. Project variables ----------------- diff --git a/docs/requirements.txt b/docs/requirements.txt index 204b6138b6..9c0805c895 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,10 +2,10 @@ attrs==23.1.0 catkin-pkg==0.5.2 click==8.1.3 craft-archives==1.1.3 -craft-cli==2.1.0 +craft-cli==2.4.0 craft-grammar==1.1.1 craft-parts==1.25.1 -craft-providers==1.15.0 +craft-providers==1.19.2 craft-store==2.4.0 Deprecated==1.2.13 distro==1.8.0 diff --git a/pyproject.toml b/pyproject.toml index df45608a51..53c4391636 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ extend-exclude = ''' ''' # Targeting future versions as well so we don't have black reformatting code # en masse later. -target_version = ["py38", "py310", "py311"] +target_version = ["py310", "py311"] [tool.isort] # black-compatible isort configuration @@ -48,7 +48,7 @@ load-plugins = "pylint_fixme_info,pylint_pytest" min-similarity-lines=10 [tool.mypy] -python_version = 3.8 +python_version = "3.10" ignore_missing_imports = true follow_imports = "silent" exclude = [ @@ -65,16 +65,17 @@ plugins = [ [tool.pyright] include = ["snapcraft", "tests"] -exclude = ["tests/legacy", "tests/spread", "build"] -pythonVersion = "3.8" +exclude = ["build", "tests/legacy", "tests/spread"] +pythonVersion = "3.10" [tool.pytest.ini_options] -minversion = 7.0 +minversion = "7.0" required_plugins = ["pytest-cov>=4.0", "pytest-mock>=3.10", "pytest-subprocess>=1.4"] addopts = ["--cov=snapcraft"] # Most of this ruff configuration comes from craft-parts [tool.ruff] +target-version = "py310" line-length = 88 extend-exclude = [ "docs", @@ -149,9 +150,17 @@ ignore = [ "C405", "C408", "C414", "Q000", # 2 single-quoted strings - probably accidental "RET504", "RET506", # Return value related. - + "PLR2004", # Magic values - widely used + "PLC1901", # Checking for empty string vs. falsey - many of these + "S603", # Untrusted input for subprocess calls + "S604", # shell=True parameter to a function + "S607", # Partial executable path for subprocess calls ] +[tool.ruff.pylint] +max-args = 6 +max-branches = 16 + [tool.ruff.per-file-ignores] "tests/**.py" = [ "D", # Ignore docstring rules in tests @@ -162,6 +171,11 @@ ignore = [ "S105", # Allow Possible hardcoded password. "S106", # Allow Possible hardcoded password. "S108", # Allow Probable insecure usage of temporary file or directory. + "PLR0913", # Allow many arguments to tests +] +"tests/unit/parts/plugins/test_kernel.py" = [ + "E101", # Mixed tabs and spaces. Ruff gets confused by tabs in multiline strings + "W191", # Indentation contains tabs - another Ruff false positive ] "__init__.py" = ["I001"] # Imports in __init__ filesare allowed to be out of order diff --git a/requirements-devel.txt b/requirements-devel.txt index e801f08f0c..89fe3f1a0a 100644 --- a/requirements-devel.txt +++ b/requirements-devel.txt @@ -12,10 +12,10 @@ codespell==2.2.4 colorama==0.4.6 coverage==7.2.5 craft-archives==1.1.3 -craft-cli==2.1.0 +craft-cli==2.4.0 craft-grammar==1.1.1 craft-parts==1.25.1 -craft-providers==1.15.0 +craft-providers==1.19.2 craft-store==2.4.0 cryptography==41.0.4 Deprecated==1.2.13 @@ -95,7 +95,7 @@ raven==6.10.0 requests==2.31.0 requests-toolbelt==1.0.0 requests-unixsocket==0.3.0 -ruff==0.0.220 +ruff==0.1.1 SecretStorage==3.3.3 simplejson==3.19.1 six==1.16.0 @@ -117,7 +117,7 @@ types-setuptools==67.7.0.2 types-tabulate==0.9.0.2 types-urllib3==1.26.25.13 typing_extensions==4.5.0 -urllib3==1.26.17 +urllib3==1.26.18 venusian==3.0.0 virtualenv==20.23.0 wadllib==1.3.6 diff --git a/requirements.txt b/requirements.txt index 6ba42e109e..7a0fe14a23 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,10 +6,10 @@ chardet==5.1.0 charset-normalizer==3.1.0 click==8.1.3 craft-archives==1.1.3 -craft-cli==2.1.0 +craft-cli==2.4.0 craft-grammar==1.1.1 craft-parts==1.25.1 -craft-providers==1.15.0 +craft-providers==1.19.2 craft-store==2.4.0 cryptography==41.0.4 Deprecated==1.2.13 @@ -67,7 +67,7 @@ toml==0.10.2 types-Deprecated==1.2.9.2 types-PyYAML==6.0.12.9 typing_extensions==4.5.0 -urllib3==1.26.17 +urllib3==1.26.18 wadllib==1.3.6 wrapt==1.15.0 ws4py==0.5.1 diff --git a/setup.py b/setup.py index b8f20bf9cb..054dc591b5 100755 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ def recursive_data_files(directory, install_directory): "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.10", "Topic :: Software Development :: Build Tools", "Topic :: System :: Software Distribution", ] @@ -82,7 +82,7 @@ def recursive_data_files(directory, install_directory): "pytest-cov", "pytest-mock", "pytest-subprocess", - "ruff==0.0.220", + "ruff~=0.1.1", "tox>=4.5", "types-PyYAML", "types-requests", diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index b030615d69..33e0e50b49 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -135,6 +135,16 @@ parts: sed -i "${SNAPCRAFT_PART_INSTALL}/usr/lib/python3.10/site.py" \ -e 's/^ENABLE_USER_SITE = None$/ENABLE_USER_SITE = False/' + libgit2: + source: https://github.com/libgit2/libgit2/archive/refs/tags/v1.7.1.tar.gz + source-checksum: sha256/17d2b292f21be3892b704dddff29327b3564f96099a1c53b00edc23160c71327 + plugin: cmake + cmake-parameters: + - -DCMAKE_INSTALL_PREFIX=/usr + build-attributes: + - enable-patchelf + prime: + - -usr/include snapcraft: source: . plugin: python @@ -159,8 +169,8 @@ parts: - enable-patchelf build-environment: # Build PyNaCl from source since the wheel files interact - # strangely with classic snaps. - - "PIP_NO_BINARY": "PyNaCl" + # strangely with classic snaps. Well, build it all from source. + - "PIP_NO_BINARY": ":all" # Use base image's libsodium for PyNaCl. - "SODIUM_INSTALL": "system" - "CFLAGS": "$(pkg-config python-3.10 yaml-0.1 --cflags)" @@ -176,4 +186,13 @@ parts: # The new implementation still requires this. ln -sf ../usr/bin/python3.10 $SNAPCRAFT_PART_INSTALL/bin/python3 - after: [snapcraft-libs] + after: [snapcraft-libs, libgit2] + + chisel: + plugin: go + source: https://github.com/canonical/chisel.git + source-commit: bd27f8700cd7d2a6b4e0df6b10c3761c83a70485 + build-snaps: + - go/1.18/stable + organize: + bin/chisel: libexec/snapcraft/chisel diff --git a/snapcraft/cli.py b/snapcraft/cli.py index 2ca423e63f..829c76ec68 100644 --- a/snapcraft/cli.py +++ b/snapcraft/cli.py @@ -32,6 +32,7 @@ import snapcraft_legacy from snapcraft import __version__, errors, store, utils from snapcraft.parts import plugins +from snapcraft.remote import RemoteBuildError from snapcraft_legacy.cli import legacy from . import commands @@ -243,7 +244,8 @@ def _emit_error(error, cause=None): emit.error(error) -def run(): # noqa: C901 +# pylint: disable-next=too-many-statements +def run(): # noqa: C901 (complex-structure) """Run the CLI.""" dispatcher = get_dispatcher() retcode = 1 @@ -298,6 +300,9 @@ def run(): # noqa: C901 except errors.LinterError as err: emit.error(craft_cli.errors.CraftError(f"linter error: {err}")) retcode = err.exit_code + except RemoteBuildError as err: + emit.error(craft_cli.errors.CraftError(f"remote-build error: {err}")) + retcode = 1 except errors.SnapcraftError as err: _emit_error(err) retcode = 1 diff --git a/snapcraft/commands/extensions.py b/snapcraft/commands/extensions.py index 3332192306..0e9d30050a 100644 --- a/snapcraft/commands/extensions.py +++ b/snapcraft/commands/extensions.py @@ -79,9 +79,9 @@ def run(self, parsed_args): ) # Extensions from snapcraft_legacy. - for extension_name in supported_extension_names(): - extension_class = find_extension(extension_name) - extension_name = extension_name.replace("_", "-") + for _extension_name in supported_extension_names(): + extension_class = find_extension(_extension_name) + extension_name = _extension_name.replace("_", "-") extension_bases = list(extension_class.get_supported_bases()) if extension_name in extension_presentation: extension_presentation[extension_name].bases += extension_bases diff --git a/snapcraft/commands/remote.py b/snapcraft/commands/remote.py index 61c765a38a..8eb8cf5f9e 100644 --- a/snapcraft/commands/remote.py +++ b/snapcraft/commands/remote.py @@ -21,7 +21,7 @@ import textwrap from enum import Enum from pathlib import Path -from typing import Optional +from typing import List, Optional from craft_cli import BaseCommand, emit from craft_cli.helptexts import HIDDEN @@ -30,9 +30,8 @@ from snapcraft.errors import MaintenanceBase, SnapcraftError from snapcraft.legacy_cli import run_legacy from snapcraft.parts import yaml_utils -from snapcraft.remote import get_build_id, is_repo -from snapcraft.utils import confirm_with_user, humanize_list -from snapcraft_legacy.internal.remote_build.errors import AcceptPublicUploadError +from snapcraft.remote import AcceptPublicUploadError, RemoteBuilder, is_repo +from snapcraft.utils import confirm_with_user, get_host_architecture, humanize_list _CONFIRMATION_PROMPT = ( "All data sent to remote builders will be publicly available. " @@ -70,7 +69,13 @@ class RemoteBuildCommand(BaseCommand): option, followed by the build number informed when the remote build was originally dispatched. The current state of the remote build for each architecture can be checked using the - --status option.""" + --status option. + + To set a timeout on the remote-build command, use the option + ``--launchpad-timeout=``. The timeout is local, so the build on + launchpad will continue even if the local instance of snapcraft is + interrupted or times out. + """ ) @overrides @@ -102,6 +107,13 @@ def fill_parser(self, parser: argparse.ArgumentParser) -> None: action="store_true", help="acknowledge that uploaded code will be publicly available.", ) + parser.add_argument( + "--launchpad-timeout", + type=int, + default=0, + metavar="", + help="Time in seconds to wait for launchpad to build.", + ) @overrides def run(self, parsed_args: argparse.Namespace) -> None: @@ -135,7 +147,12 @@ def run(self, parsed_args: argparse.Namespace) -> None: self._snapcraft_yaml = yaml_utils.get_snap_project().project_file self._parsed_args = parsed_args # pylint: enable=attribute-defined-outside-init - base = self._get_effective_base() + try: + base = self._get_effective_base() + except MaintenanceBase as base_err: + base = base_err.base + emit.progress(_get_esm_warning_for_base(base), permanent=True) + self._run_new_or_fallback_remote_build(base) def _run_new_or_fallback_remote_build(self, base: str) -> None: @@ -154,7 +171,7 @@ def _run_new_or_fallback_remote_build(self, base: str) -> None: """ # bases newer than core22 must use the new remote-build if base in yaml_utils.CURRENT_BASES - {"core22"}: - emit.debug("Running new remote-build because base is newer than core22.") + emit.debug("Running new remote-build because base is newer than core22") self._run_new_remote_build() return @@ -163,7 +180,7 @@ def _run_new_or_fallback_remote_build(self, base: str) -> None: if strategy == _Strategies.DISABLE_FALLBACK: emit.debug( "Running new remote-build because environment variable " - f"{_STRATEGY_ENVVAR!r} is {_Strategies.DISABLE_FALLBACK.value!r}." + f"{_STRATEGY_ENVVAR!r} is {_Strategies.DISABLE_FALLBACK.value!r}" ) self._run_new_remote_build() return @@ -171,19 +188,19 @@ def _run_new_or_fallback_remote_build(self, base: str) -> None: if strategy == _Strategies.FORCE_FALLBACK: emit.debug( "Running fallback remote-build because environment variable " - f"{_STRATEGY_ENVVAR!r} is {_Strategies.FORCE_FALLBACK.value!r}." + f"{_STRATEGY_ENVVAR!r} is {_Strategies.FORCE_FALLBACK.value!r}" ) run_legacy() return if is_repo(Path().absolute()): emit.debug( - "Running new remote-build because project is in a git repository." + "Running new remote-build because project is in a git repository" ) self._run_new_remote_build() return - emit.debug("Running fallback remote-build.") + emit.debug("Running fallback remote-build") run_legacy() def _get_project_name(self) -> str: @@ -201,7 +218,7 @@ def _get_project_name(self) -> str: if project_name: emit.debug( f"Using project name {project_name!r} from " - f"{str(self._snapcraft_yaml)!r}." + f"{str(self._snapcraft_yaml)!r}" ) return project_name @@ -211,23 +228,59 @@ def _get_project_name(self) -> str: def _run_new_remote_build(self) -> None: """Run new remote-build code.""" - # the build-id will be passed to the new remote-build code as part of #4323 - if self._parsed_args.build_id: - build_id = self._parsed_args.build_id - emit.debug(f"Using build ID {build_id!r} passed as a parameter.") + emit.progress("Setting up launchpad environment") + remote_builder = RemoteBuilder( + app_name="snapcraft", + build_id=self._parsed_args.build_id, + project_name=self._get_project_name(), + architectures=self._determine_architectures(), + project_dir=Path(), + timeout=self._parsed_args.launchpad_timeout, + ) + + if self._parsed_args.status: + remote_builder.print_status() + return + + emit.progress("Looking for existing build") + has_outstanding_build = remote_builder.has_outstanding_build() + if self._parsed_args.recover and not has_outstanding_build: + emit.progress("No build found", permanent=True) + return + + if has_outstanding_build: + emit.progress("Found existing build", permanent=True) + remote_builder.print_status() + + # If recovery specified, monitor build and exit. + if self._parsed_args.recover or confirm_with_user( + "Do you wish to recover this build?", default=True + ): + emit.progress("Building") + remote_builder.monitor_build() + emit.progress("Cleaning") + remote_builder.clean_build() + emit.progress("Build completed", permanent=True) + return + + # Otherwise clean running build before we start a new one. + emit.progress("Cleaning existing build") + remote_builder.clean_build() else: - build_id = get_build_id( - app_name="snapcraft", - project_name=self._get_project_name(), - project_path=Path(), - ) - emit.debug(f"Using computed build ID {build_id!r}.") + emit.progress("No existing build found", permanent=True) - # TODO: use new remote-build code (#4323) - emit.debug( - "Running fallback remote-build because new remote-build is not available." + emit.progress( + "If interrupted, resume with: 'snapcraft remote-build --recover " + f"--build-id {remote_builder.build_id}'", + permanent=True, ) - run_legacy() + emit.progress("Starting build") + remote_builder.start_build() + emit.progress("Building") + remote_builder.monitor_build() + emit.progress("Cleaning") + remote_builder.clean_build() + emit.progress("Build completed", permanent=True) def _get_build_strategy(self) -> Optional[_Strategies]: """Get the build strategy from the envvar `SNAPCRAFT_REMOTE_BUILD_STRATEGY`. @@ -268,7 +321,7 @@ def _get_effective_base(self) -> str: f"Could not determine base from {str(self._snapcraft_yaml)!r}." ) - emit.debug(f"Got base {base!r} from {str(self._snapcraft_yaml)!r}.") + emit.debug(f"Got base {base!r} from {str(self._snapcraft_yaml)!r}") if base in yaml_utils.ESM_BASES: raise MaintenanceBase(base) @@ -279,3 +332,72 @@ def _get_effective_base(self) -> str: ) return base + + def _get_project_build_on_architectures(self) -> List[str]: + """Get a list of build-on architectures from the project's snapcraft.yaml. + + :returns: A list of architectures. + """ + with open(self._snapcraft_yaml, encoding="utf-8") as file: + data = yaml_utils.safe_load(file) + + project_archs = data.get("architectures") + + archs = [] + if project_archs: + for item in project_archs: + if "build-on" in item: + new_arch = item["build-on"] + if isinstance(new_arch, list): + archs.extend(new_arch) + else: + archs.append(new_arch) + + return archs + + def _determine_architectures(self) -> List[str]: + """Determine architectures to build for. + + The build architectures can be set via the `--build-on` parameter or determined + from the build-on architectures listed in the project's snapcraft.yaml. + + :returns: A list of architectures. + + :raises SnapcraftError: If `--build-on` was provided and architectures are + defined in the project's snapcraft.yaml. + """ + project_architectures = self._get_project_build_on_architectures() + if project_architectures and self._parsed_args.build_for: + raise SnapcraftError( + "Cannot use `--build-on` because architectures are already defined in " + "snapcraft.yaml." + ) + + if project_architectures: + archs = project_architectures + elif self._parsed_args.build_for: + archs = self._parsed_args.build_for + else: + # default to typical snapcraft behavior (build for host) + archs = [get_host_architecture()] + + return archs + + +def _get_esm_warning_for_base(base: str) -> str: + """Return a warning appropriate for the base under ESM.""" + channel: Optional[str] = None + match base: + case "core": + channel = "4.x" + version = "4" + case "core18": + channel = "7.x" + version = "7" + case _: + raise RuntimeError(f"Unmatched base {base!r}") + + return ( + f"WARNING: base {base!r} was last supported on Snapcraft {version} available " + f"on the {channel!r} channel." + ) diff --git a/snapcraft/elf/_elf_file.py b/snapcraft/elf/_elf_file.py index 8c674127fe..0dd4bc429f 100644 --- a/snapcraft/elf/_elf_file.py +++ b/snapcraft/elf/_elf_file.py @@ -109,7 +109,7 @@ class _Library: :param soname_cache: The soname cache manager. """ - def __init__( + def __init__( # noqa PLR0913 self, *, soname: str, @@ -322,7 +322,9 @@ def is_linker_compatible(self, *, linker_version: str) -> bool: """Determine if the linker will work given the required glibc version.""" version_required = self.get_required_glibc() # TODO: pkg_resources is deprecated in setuptools>66 (CRAFT-1598) - is_compatible = parse_version(version_required) <= parse_version(linker_version) + parsed_version_required = parse_version(version_required) # type: ignore + parsed_linker_version = parse_version(linker_version) # type: ignore + is_compatible = parsed_version_required <= parsed_linker_version emit.debug( f"Check if linker {linker_version!r} works with GLIBC_{version_required} " f"required by {str(self.path)!r}: {is_compatible}" @@ -336,12 +338,12 @@ def get_required_glibc(self) -> str: version_required = "" for lib in self.needed.values(): - for version in lib.versions: - if not version.startswith("GLIBC_"): + for _version in lib.versions: + if not _version.startswith("GLIBC_"): continue - version = version[6:] + version = _version[6:] # TODO: pkg_resources is deprecated in setuptools>66 (CRAFT-1598) - if parse_version(version) > parse_version(version_required): + if parse_version(version) > parse_version(version_required): # type: ignore version_required = version self._required_glibc = version_required diff --git a/snapcraft/errors.py b/snapcraft/errors.py index 18ae86ff93..37148a05ea 100644 --- a/snapcraft/errors.py +++ b/snapcraft/errors.py @@ -138,6 +138,7 @@ def __init__(self, base: str) -> None: resolution=resolution, docs_url="https://snapcraft.io/docs/base-snaps", ) + self.base = base class StoreCredentialsUnauthorizedError(SnapcraftError): diff --git a/snapcraft/extensions/gnome.py b/snapcraft/extensions/gnome.py index a82f48a3e4..ea0a8e8adc 100644 --- a/snapcraft/extensions/gnome.py +++ b/snapcraft/extensions/gnome.py @@ -51,7 +51,7 @@ class GNOME(Extension): - GTK3 Themes. - Common Icon Themes. - Common Sound Themes. - - The GNOME runtime libraries and utilities corresponding to 3.38. + - The GNOME runtime libraries and utilities corresponding to versions from 3.38 to 45. For easier desktop integration, it also configures each application entry with these additional plugs: @@ -155,8 +155,11 @@ def get_root_snippet(self) -> Dict[str, Any]: } }, "layout": { - "/usr/lib/$SNAPCRAFT_ARCH_TRIPLET/webkit2gtk-4.0": { - "bind": "$SNAP/gnome-platform/usr/lib/$SNAPCRAFT_ARCH_TRIPLET/webkit2gtk-4.0" + "/usr/lib/$CRAFT_ARCH_TRIPLET/webkit2gtk-4.0": { + "bind": "$SNAP/gnome-platform/usr/lib/$CRAFT_ARCH_TRIPLET/webkit2gtk-4.0" + }, + "/usr/lib/$CRAFT_ARCH_TRIPLET/webkit2gtk-4.1": { + "bind": "$SNAP/gnome-platform/usr/lib/$CRAFT_ARCH_TRIPLET/webkit2gtk-4.1" }, "/usr/share/xml/iso-codes": { "bind": "$SNAP/gnome-platform/usr/share/xml/iso-codes" diff --git a/snapcraft/legacy_cli.py b/snapcraft/legacy_cli.py index 335688817d..29c66da77a 100644 --- a/snapcraft/legacy_cli.py +++ b/snapcraft/legacy_cli.py @@ -25,7 +25,7 @@ import snapcraft_legacy from snapcraft_legacy.cli import legacy -_LIB_NAMES = ("craft_parts", "craft_providers", "craft_store") +_LIB_NAMES = ("craft_parts", "craft_providers", "craft_store", "snapcraft.remote") _ORIGINAL_LIB_NAME_LOG_LEVEL: Dict[str, int] = {} diff --git a/snapcraft/linters/library_linter.py b/snapcraft/linters/library_linter.py index a6c5976919..5c84ad3501 100644 --- a/snapcraft/linters/library_linter.py +++ b/snapcraft/linters/library_linter.py @@ -15,6 +15,8 @@ # along with this program. If not, see . """Library linter implementation.""" +import re +import subprocess from pathlib import Path from typing import List, Set @@ -24,12 +26,16 @@ from snapcraft.elf import ElfFile, SonameCache, elf_utils from snapcraft.elf import errors as elf_errors -from .base import Linter, LinterIssue, LinterResult +from .base import Linter, LinterIssue, LinterResult, Optional class LibraryLinter(Linter): """Linter for dynamic library availability in snap.""" + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._ld_config_cache: dict[str, Path] = {} + @staticmethod def get_categories() -> List[str]: """Get the specific sub-categories that can be filtered against.""" @@ -52,6 +58,8 @@ def run(self) -> List[LinterIssue]: all_libraries: Set[Path] = set() used_libraries: Set[Path] = set() + self._generate_ld_config_cache() + for elf_file in elf_files: # Skip linting files listed in the ignore list for the main "library" # filter. @@ -101,6 +109,54 @@ def run(self) -> List[LinterIssue]: return issues + def _generate_ld_config_cache(self) -> None: + """Generate a cache of ldconfig output that maps library names to paths.""" + # Match lines like: + # libcurl.so.4 (libc6,x86-64) => /lib/x86_64-linux-gnu/libcurl.so.4 + # Ignored any architecture in it, may be a problem in the future? + ld_regex = re.compile(r"^\s*(\S+)\s+\(.*\)\s+=>\s+(\S+)$") + + try: + output = subprocess.run( + ["ldconfig", "-N", "-p"], + check=True, + stdout=subprocess.PIPE, + ) + except subprocess.CalledProcessError: + return + + for line in output.stdout.decode("UTF-8").splitlines(): + match = ld_regex.match(line) + if match: + self._ld_config_cache[match.group(1)] = Path(match.group(2)) + + def _find_deb_package(self, library_name: str) -> Optional[str]: + """Find the deb package that provides a library. + + :param library_name: The filename of the library to find. + + :returns: the corresponding deb package name, or None if the library + is not provided by any system package. + """ + if library_name in self._ld_config_cache: + # Must be resolved to an absolute path for dpkg to find it + library_absolute_path = self._ld_config_cache[library_name].resolve() + try: + output = subprocess.run( + ["dpkg", "-S", library_absolute_path.as_posix()], + check=True, + stdout=subprocess.PIPE, + ) + except subprocess.CalledProcessError: + # If the specified file doesn't belong to any package, the + # call will trigger an exception. + return None + except FileNotFoundError: + # In case that dpkg isn't available + return None + return output.stdout.decode("UTF-8").split(":", maxsplit=1)[0] + return None + def _check_dependencies_satisfied( self, elf_file: ElfFile, @@ -132,11 +188,15 @@ def _check_dependencies_satisfied( if path in dependency.parents: break else: + deb_package = self._find_deb_package(dependency.name) + message = f"missing dependency {dependency.name!r}." + if deb_package: + message += f" (provided by '{deb_package}')" issue = LinterIssue( name=self._name, result=LinterResult.WARNING, filename=str(elf_file.path), - text=f"missing dependency {dependency.name!r}.", + text=message, url="https://snapcraft.io/docs/linters-library", ) issues.append(issue) diff --git a/snapcraft/meta/appstream.py b/snapcraft/meta/appstream.py index f89fe1e1ae..de4074b5dc 100644 --- a/snapcraft/meta/appstream.py +++ b/snapcraft/meta/appstream.py @@ -137,7 +137,7 @@ def _get_transformed_dom(path: str): def _get_dom(path: str) -> lxml.etree.ElementTree: try: - return lxml.etree.parse(path) + return lxml.etree.parse(path) # noqa S320 except OSError as err: raise errors.SnapcraftError(str(err)) from err except lxml.etree.ParseError as err: @@ -145,7 +145,7 @@ def _get_dom(path: str) -> lxml.etree.ElementTree: def _get_xslt(): - xslt = lxml.etree.parse(StringIO(_XSLT)) + xslt = lxml.etree.parse(StringIO(_XSLT)) # noqa S320 return lxml.etree.XSLT(xslt) diff --git a/snapcraft/meta/manifest.py b/snapcraft/meta/manifest.py index cf6094aafc..4ae5925cd0 100644 --- a/snapcraft/meta/manifest.py +++ b/snapcraft/meta/manifest.py @@ -67,7 +67,7 @@ class Config: # pylint: disable=too-few-public-methods alias_generator = lambda s: s.replace("_", "-") # noqa: E731 -def write( +def write( # noqa PLR0913 project: Project, prime_dir: Path, *, diff --git a/snapcraft/meta/snap_yaml.py b/snapcraft/meta/snap_yaml.py index 80290e8a55..8c526db39f 100644 --- a/snapcraft/meta/snap_yaml.py +++ b/snapcraft/meta/snap_yaml.py @@ -166,9 +166,9 @@ def get_content_dirs(self, installed_path: Path) -> Set[Path]: """Obtain the slot's content directories.""" content_dirs: Set[Path] = set() - for path in self.read + self.write: + for path_ in self.read + self.write: # Strip leading "$SNAP" and "/". - path = re.sub(r"^\$SNAP", "", path) + path = re.sub(r"^\$SNAP", "", path_) path = re.sub(r"^/", "", path) path = re.sub(r"^./", "", path) content_dirs.add(installed_path / path) diff --git a/snapcraft/pack.py b/snapcraft/pack.py index 466ebfd214..17bf38fec4 100644 --- a/snapcraft/pack.py +++ b/snapcraft/pack.py @@ -35,10 +35,13 @@ def _verify_snap(directory: Path) -> None: universal_newlines=True, ) except subprocess.CalledProcessError as err: - msg = f"Cannot pack snap file: {err!s}" + stderr = None if err.stderr: - msg += f" ({err.stderr.strip()!s})" - raise errors.SnapcraftError(msg) + stderr = err.stderr.strip() + msg = f"Cannot pack snap: {stderr!s}" + else: + msg = "Cannot pack snap" + raise errors.SnapcraftError(msg, details=f"{err!s}") from err def _get_directory(output: Optional[str]) -> Path: @@ -135,10 +138,11 @@ def pack_snap( command, capture_output=True, check=True, universal_newlines=True ) except subprocess.CalledProcessError as err: - msg = f"Cannot pack snap file: {err!s}" + msg = f"{err!s}" + details = None if err.stderr: - msg += f" ({err.stderr.strip()!s})" - raise errors.SnapcraftError(msg) + details = err.stderr.strip() + raise errors.SnapcraftError(msg, details=details) from err snap_filename = Path(str(proc.stdout).partition(":")[2].strip()).name return snap_filename diff --git a/snapcraft/parts/grammar.py b/snapcraft/parts/grammar.py index af20e61abf..78efa6ba3a 100644 --- a/snapcraft/parts/grammar.py +++ b/snapcraft/parts/grammar.py @@ -64,12 +64,12 @@ def process_parts( :param yaml_data: unprocessed snapcraft.yaml. :returns: process snapcraft.yaml. """ + + def self_check(value: Any) -> bool: + return value == value # pylint: disable=comparison-with-itself # noqa PLR0124 + # TODO: make checker optional in craft-grammar. - processor = GrammarProcessor( - arch=arch, - target_arch=target_arch, - checker=lambda x: x == x, # pylint: disable=comparison-with-itself - ) + processor = GrammarProcessor(arch=arch, target_arch=target_arch, checker=self_check) for part_name in parts_yaml_data: parts_yaml_data[part_name] = process_part( diff --git a/snapcraft/parts/lifecycle.py b/snapcraft/parts/lifecycle.py index b3c7a57c52..d470a0bb8a 100644 --- a/snapcraft/parts/lifecycle.py +++ b/snapcraft/parts/lifecycle.py @@ -112,7 +112,7 @@ def run(command_name: str, parsed_args: "argparse.Namespace") -> None: ) -def _run_command( +def _run_command( # noqa PLR0913 # pylint: disable=too-many-branches, too-many-statements command_name: str, *, project: Project, @@ -210,6 +210,11 @@ def _run_command( emit.progress(msg, permanent=True) launch_shell() raise errors.SnapcraftError(msg) from err + except errors.SnapcraftError as err: + if parsed_args.debug: + emit.progress(str(err), permanent=True) + launch_shell() + raise except Exception as err: if parsed_args.debug: emit.progress(str(err), permanent=True) @@ -217,7 +222,7 @@ def _run_command( raise errors.SnapcraftError(str(err)) from err -def _run_lifecycle_and_pack( +def _run_lifecycle_and_pack( # noqa PLR0913 lifecycle: PartsLifecycle, *, command_name: str, @@ -368,7 +373,7 @@ def _clean_provider(project: Project, parsed_args: "argparse.Namespace") -> None # pylint: disable-next=too-many-branches, too-many-statements -def _run_in_provider( +def _run_in_provider( # noqa PLR0915 project: Project, command_name: str, parsed_args: "argparse.Namespace" ) -> None: """Pack image in provider instance.""" @@ -465,7 +470,8 @@ def _run_in_provider( except subprocess.CalledProcessError as err: raise errors.SnapcraftError( f"Failed to execute {command_name} in instance.", - details=( + details=err.stderr.strip() if err.stderr else None, + resolution=( "Run the same command again with --debug to shell into " "the environment if you wish to introspect this failure." ), diff --git a/snapcraft/parts/parts.py b/snapcraft/parts/parts.py index 24be7f1baf..9e66f377cd 100644 --- a/snapcraft/parts/parts.py +++ b/snapcraft/parts/parts.py @@ -18,12 +18,13 @@ import pathlib import subprocess +import types from typing import Any, Dict, List, Optional, Set import craft_parts from craft_archives import repo from craft_cli import emit -from craft_parts import ActionType, Part, ProjectDirs, Step +from craft_parts import Action, ActionType, Part, ProjectDirs, Step from craft_parts.packages import Repository from xdg import BaseDirectory # type: ignore @@ -52,7 +53,7 @@ class PartsLifecycle: :raises PartsLifecycleError: On error initializing the parts lifecycle. """ - def __init__( + def __init__( # noqa PLR0913 self, all_parts: Dict[str, Any], *, @@ -171,13 +172,11 @@ def run( self._install_package_repositories() - emit.progress("Executing parts lifecycle...") - with self._lcm.action_executor() as aex: for action in actions: # Workaround until canonical/craft-parts#540 is fixed if action.step == target_step and rerun_step: - action = craft_parts.Action( + action = craft_parts.Action( # noqa PLW2901 part_name=action.part_name, step=action.step, action_type=ActionType.RERUN, @@ -185,16 +184,13 @@ def run( project_vars=action.project_vars, properties=action.properties, ) - message = _action_message(action) - emit.progress(f"Executing parts lifecycle: {message}") - with emit.open_stream("Executing action") as stream: + message = _get_parts_action_message(action) + with emit.open_stream(message) as stream: aex.execute(action, stdout=stream, stderr=stream) - emit.progress(f"Executed: {message}", permanent=True) if shell_after: launch_shell() - emit.progress("Executed parts lifecycle", permanent=True) except RuntimeError as err: raise RuntimeError(f"Parts processing internal error: {err}") from err except OSError as err: @@ -298,42 +294,54 @@ def launch_shell(*, cwd: Optional[pathlib.Path] = None) -> None: subprocess.run(["bash"], check=False, cwd=cwd) -def _action_message(action: craft_parts.Action) -> str: - msg = { - Step.PULL: { - ActionType.RUN: "pull", - ActionType.RERUN: "repull", - ActionType.SKIP: "skip pull", - ActionType.UPDATE: "update sources for", - }, - Step.OVERLAY: { - ActionType.RUN: "overlay", - ActionType.RERUN: "re-overlay", - ActionType.SKIP: "skip overlay", - ActionType.UPDATE: "update overlay for", - ActionType.REAPPLY: "reapply", - }, - Step.BUILD: { - ActionType.RUN: "build", - ActionType.RERUN: "rebuild", - ActionType.SKIP: "skip build", - ActionType.UPDATE: "update build for", - }, - Step.STAGE: { - ActionType.RUN: "stage", - ActionType.RERUN: "restage", - ActionType.SKIP: "skip stage", - }, - Step.PRIME: { - ActionType.RUN: "prime", - ActionType.RERUN: "re-prime", - ActionType.SKIP: "skip prime", - }, +ACTION_MESSAGES = types.MappingProxyType( + { + Step.PULL: types.MappingProxyType( + { + ActionType.RUN: "Pulling", + ActionType.RERUN: "Repulling", + ActionType.SKIP: "Skipping pull for", + ActionType.UPDATE: "Updating sources for", + } + ), + Step.OVERLAY: types.MappingProxyType( + { + ActionType.RUN: "Overlaying", + ActionType.RERUN: "Re-overlaying", + ActionType.SKIP: "Skipping overlay for", + ActionType.UPDATE: "Updating overlay for", + ActionType.REAPPLY: "Reapplying", + } + ), + Step.BUILD: types.MappingProxyType( + { + ActionType.RUN: "Building", + ActionType.RERUN: "Rebuilding", + ActionType.SKIP: "Skipping build for", + ActionType.UPDATE: "Updating build for", + } + ), + Step.STAGE: types.MappingProxyType( + { + ActionType.RUN: "Staging", + ActionType.RERUN: "Restaging", + ActionType.SKIP: "Skipping stage for", + } + ), + Step.PRIME: types.MappingProxyType( + { + ActionType.RUN: "Priming", + ActionType.RERUN: "Repriming", + ActionType.SKIP: "Skipping prime for", + } + ), } +) - message = f"{msg[action.step][action.action_type]} {action.part_name}" +def _get_parts_action_message(action: Action) -> str: + """Get a user-readable message for a particular craft-parts action.""" + message = f"{ACTION_MESSAGES[action.step][action.action_type]} {action.part_name}" if action.reason: - message += f" ({action.reason})" - + return message + f" ({action.reason})" return message diff --git a/snapcraft/parts/plugins/_ros.py b/snapcraft/parts/plugins/_ros.py index 2379e19197..0340883c33 100644 --- a/snapcraft/parts/plugins/_ros.py +++ b/snapcraft/parts/plugins/_ros.py @@ -68,7 +68,7 @@ def _parse_rosdep_resolve_dependencies( dependencies: Dict[str, Set[str]] = {} dependency_set = None for line in lines: - line = line.strip() + line = line.strip() # noqa PLW2901 if line.startswith("#"): key = line.strip("# ") dependencies[key] = set() @@ -223,7 +223,7 @@ def _get_stage_runtime_dependencies_commands(self) -> List[str]: @overrides def get_build_commands(self) -> List[str]: return ( - [ + [ # noqa S608 (false positive on SQL injection) "if [ ! -f /etc/ros/rosdep/sources.list.d/20-default.list ]; then", # Preserve http(s)_proxy env var in root for remote-build proxy since rosdep # doesn't support proxy @@ -317,7 +317,7 @@ def get_installed_dependencies(installed_packages_path: str) -> Set[str]: @click.option("--target-arch", envvar="CRAFT_TARGET_ARCH", required=True) @click.option("--stage-cache-dir", required=True) @click.option("--base", required=True) -def stage_runtime_dependencies( +def stage_runtime_dependencies( # noqa: PLR0913 (too many arguments) part_src: str, part_install: str, ros_version: str, @@ -336,7 +336,7 @@ def stage_runtime_dependencies( catkin_packages.find_packages(part_install).values(), ) for pkg in catkin_packages.find_packages(part_src).values(): - pkg = cast(catkin_pkg.package.Package, pkg) + pkg = cast(catkin_pkg.package.Package, pkg) # noqa PLW2901 # Evaluate the conditions of all dependencies pkg.evaluate_conditions( { diff --git a/snapcraft/providers.py b/snapcraft/providers.py index 8fc682d31e..268904ef3e 100644 --- a/snapcraft/providers.py +++ b/snapcraft/providers.py @@ -199,7 +199,13 @@ def get_base_configuration( compatibility_tag=f"snapcraft-{bases.BuilddBase.compatibility_tag}.0", environment=environment, hostname=instance_name, - snaps=[Snap(name=snap_name, channel=snap_channel, classic=True)], + snaps=[ + Snap( + name=snap_name, + channel=snap_channel, + classic=True, + ) + ], # Requirement for apt gpg and version:git packages=["gnupg", "dirmngr", "git"], ) diff --git a/snapcraft/remote/__init__.py b/snapcraft/remote/__init__.py index b3e53455af..1fd5f8125c 100644 --- a/snapcraft/remote/__init__.py +++ b/snapcraft/remote/__init__.py @@ -16,14 +16,34 @@ """Remote-build and related utilities.""" -from .errors import GitError, RemoteBuildError +from .errors import ( + AcceptPublicUploadError, + GitError, + LaunchpadHttpsError, + RemoteBuildError, + RemoteBuildTimeoutError, + UnsupportedArchitectureError, +) from .git import GitRepo, is_repo -from .utils import get_build_id +from .launchpad import LaunchpadClient +from .remote_builder import RemoteBuilder +from .utils import get_build_id, humanize_list, rmtree, validate_architectures +from .worktree import WorkTree __all__ = [ "get_build_id", + "humanize_list", "is_repo", + "rmtree", + "validate_architectures", + "AcceptPublicUploadError", "GitError", "GitRepo", + "LaunchpadClient", + "LaunchpadHttpsError", + "RemoteBuilder", "RemoteBuildError", + "RemoteBuildTimeoutError", + "UnsupportedArchitectureError", + "WorkTree", ] diff --git a/snapcraft/remote/errors.py b/snapcraft/remote/errors.py index 2b673e11dd..c8d67d46fc 100644 --- a/snapcraft/remote/errors.py +++ b/snapcraft/remote/errors.py @@ -17,7 +17,7 @@ """Remote build errors.""" from dataclasses import dataclass -from typing import Optional +from typing import List, Optional @dataclass(repr=True) @@ -50,3 +50,55 @@ def __init__(self, message: str) -> None: details = message super().__init__(brief=brief, details=details) + + +class RemoteBuildTimeoutError(RemoteBuildError): + """Remote-build timed out.""" + + def __init__(self, recovery_command: str) -> None: + brief = "Remote build command timed out." + details = ( + "Build may still be running on Launchpad and can be recovered " + f"with {recovery_command!r}." + ) + + super().__init__(brief=brief, details=details) + + +class LaunchpadHttpsError(RemoteBuildError): + """Launchpad connectivity error.""" + + def __init__(self) -> None: + brief = "Failed to connect to Launchpad API service." + details = "Verify connectivity to https://api.launchpad.net and retry build." + + super().__init__(brief=brief, details=details) + + +class UnsupportedArchitectureError(RemoteBuildError): + """Unsupported architecture error.""" + + def __init__(self, architectures: List[str]) -> None: + brief = "Architecture not supported by the remote builder." + details = ( + "The following architectures are not supported by the remote builder: " + f"{architectures}.\nPlease remove them from the " + "architecture list and try again." + ) + + super().__init__(brief=brief, details=details) + + +class AcceptPublicUploadError(RemoteBuildError): + """Accept public upload error.""" + + def __init__(self) -> None: + brief = "Cannot upload data to build servers." + details = ( + "Remote build needs explicit acknowledgement that data sent to build " + "servers is public.\n" + "In non-interactive runs, please use the option " + "`--launchpad-accept-public-upload`." + ) + + super().__init__(brief=brief, details=details) diff --git a/snapcraft/remote/git.py b/snapcraft/remote/git.py index d48500bd76..f6b67b8e35 100644 --- a/snapcraft/remote/git.py +++ b/snapcraft/remote/git.py @@ -18,6 +18,7 @@ import logging from pathlib import Path +from typing import Optional import pygit2 @@ -136,7 +137,7 @@ def _init_repo(self) -> None: :raises GitError: if the repo cannot be initialized """ - logger.debug("Initializing git repository in {str(self.path)!r}") + logger.debug("Initializing git repository in %r", str(self.path)) try: pygit2.init_repository(self.path) @@ -145,27 +146,40 @@ def _init_repo(self) -> None: f"Could not initialize a git repository in {str(self.path)!r}." ) from error - def push_url(self, remote_url: str, remote_branch: str, ref: str = "HEAD") -> None: + def push_url( + self, + remote_url: str, + remote_branch: str, + ref: str = "HEAD", + token: Optional[str] = None, + ) -> None: """Push a reference to a branch on a remote url. :param remote_url: the remote repo URL to push to :param remote_branch: the branch on the remote to push to :param ref: name of shorthand ref to push (i.e. a branch, tag, or `HEAD`) + :param token: token in the url to hide in logs and errors :raises GitError: if the ref cannot be resolved or pushed """ resolved_ref = self._resolve_ref(ref) refspec = f"{resolved_ref}:refs/heads/{remote_branch}" + # hide secret tokens embedded in a url + if token: + stripped_url = remote_url.replace(token, "") + else: + stripped_url = remote_url + logger.debug( - "Pushing %r to remote %r with refspec %r.", ref, remote_url, refspec + "Pushing %r to remote %r with refspec %r.", ref, stripped_url, refspec ) try: self._repo.remotes.create_anonymous(remote_url).push([refspec]) except pygit2.GitError as error: raise GitError( - f"Could not push {ref!r} to {remote_url!r} with refspec {refspec!r} " + f"Could not push {ref!r} to {stripped_url!r} with refspec {refspec!r} " f"for the git repository in {str(self.path)!r}." ) from error diff --git a/snapcraft/remote/launchpad.py b/snapcraft/remote/launchpad.py new file mode 100644 index 0000000000..b5451b41e5 --- /dev/null +++ b/snapcraft/remote/launchpad.py @@ -0,0 +1,487 @@ +# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- +# +# Copyright (C) 2019, 2023 Canonical Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + + +"""Class to manage remote builds on Launchpad.""" + +import gzip +import logging +import shutil +import time +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import Any, Dict, List, Optional, Sequence, cast +from urllib.parse import unquote, urlsplit + +import requests +from launchpadlib.launchpad import Launchpad +from lazr import restfulclient +from lazr.restfulclient.resource import Entry +from xdg import BaseDirectory + +from . import GitRepo, errors + +_LP_POLL_INTERVAL = 30 +_LP_SUCCESS_STATUS = "Successfully built" +_LP_FAIL_STATUS = "Failed to build" + +logger = logging.getLogger(__name__) + + +def _is_build_pending(build: Dict[str, Any]) -> bool: + """Check if build is pending. + + Possible values: + - Needs building + - Successfully built + - Failed to build + - Dependency wait + - Chroot problem + - Build for superseded Source + - Currently building + - Failed to upload + - Uploading build + - Cancelling build + - Cancelled build + """ + if _is_build_status_success(build) or _is_build_status_failure(build): + return False + + return True + + +def _is_build_status_success(build: Dict[str, Any]) -> bool: + build_state = build["buildstate"] + return build_state == "Successfully built" + + +def _is_build_status_failure(build: Dict[str, Any]) -> bool: + build_state = build["buildstate"] + return build_state in ["Failed to build", "Cancelled build"] + + +def _get_url_basename(url: str): + path = urlsplit(url).path + return unquote(path).split("/")[-1] + + +class LaunchpadClient: + """Launchpad remote builder operations. + + :param app_name: Name of the application. + :param build_id: Unique identifier for the build. + :param project_name: Name of the project. + :param architectures: List of architectures to build on. + :param timeout: Time in seconds to wait for the build to complete. + """ + + def __init__( + self, + *, + app_name: str, + build_id: str, + project_name: str, + architectures: Sequence[str], + timeout: int = 0, + ) -> None: + self._app_name = app_name + + self._cache_dir = self._create_cache_directory() + self._data_dir = self._create_data_directory() + self._credentials = self._data_dir / "credentials" + + self.architectures = architectures + self._build_id = build_id + self._lp_name = build_id + self._project_name = project_name + + self._lp: Launchpad = self._login() + self.user = self._lp.me.name # type: ignore + + # calculate deadline from the timeout + if timeout > 0: + self._deadline = int(time.time()) + timeout + else: + self._deadline = 0 + + @property + def architectures(self) -> Sequence[str]: + """Get architectures.""" + return self._architectures + + @architectures.setter + def architectures(self, architectures: Sequence[str]) -> None: + self._lp_processors: Optional[Sequence[str]] = None + + if architectures: + self._lp_processors = ["/+processors/" + a for a in architectures] + + self._architectures = architectures + + @property + def user(self) -> str: + """Get the launchpad user.""" + return self._lp_user + + @user.setter + def user(self, user: str) -> None: + self._lp_user = user + self._lp_owner = f"/~{user}" + + def _check_timeout_deadline(self) -> None: + if self._deadline <= 0: + return + + if int(time.time()) >= self._deadline: + raise errors.RemoteBuildTimeoutError( + recovery_command=( + f"{self._app_name} remote-build --recover --build-id {self._build_id}" + ) + ) + + def _create_data_directory(self) -> Path: + data_dir = Path( + BaseDirectory.save_data_path(self._app_name, "provider", "launchpad") + ) + data_dir.mkdir(mode=0o700, exist_ok=True) + return data_dir + + def _create_cache_directory(self) -> Path: + cache_dir = Path( + BaseDirectory.save_cache_path(self._app_name, "provider", "launchpad") + ) + cache_dir.mkdir(mode=0o700, exist_ok=True) + return cache_dir + + def _fetch_artifacts(self, snap: Optional[Entry]) -> None: + """Fetch build arftifacts (logs and snaps).""" + builds = self._get_builds(snap) + + logger.info("Downloading artifacts...") + for build in builds: + self._download_build_artifacts(build) + self._download_log(build) + + def _get_builds_collection_entry(self, snap: Optional[Entry]) -> Optional[Entry]: + logger.debug("Fetching builds collection information from Launchpad...") + if snap: + url = cast(str, snap.builds_collection_link) + return self._lp_load_url(url) + return None + + def _get_builds(self, snap: Optional[Entry]) -> List[Dict[str, Any]]: + builds_collection = self._get_builds_collection_entry(snap) + if builds_collection is None: + return [] + + return cast(List[Dict[str, Any]], builds_collection.entries) + + def _get_snap(self) -> Optional[Entry]: + try: + return self._lp.snaps.getByName( # type: ignore + name=self._lp_name, owner=self._lp_owner + ) + except restfulclient.errors.NotFound: # type: ignore + return None + + def _issue_build_request(self, snap: Entry) -> Entry: + dist = self._lp.distributions["ubuntu"] # type: ignore + archive = dist.main_archive + return snap.requestBuilds( # type: ignore + archive=archive, + pocket="Updates", + ) + + def _lp_load_url(self, url: str) -> Entry: + """Load Launchpad url with a retry in case the connection is lost.""" + try: + return self._lp.load(url) + except ConnectionResetError: + self._lp = self._login() + return self._lp.load(url) + + def _wait_for_build_request_acceptance(self, build_request: Entry) -> None: + # Not to be confused with the actual build(s), this is + # ensuring that Launchpad accepts the build request. + while build_request.status == "Pending": + # Check to see if we've run out of time. + self._check_timeout_deadline() + + logger.info("Waiting on Launchpad build request...") + logger.debug( + "status=%s error=%s", build_request.status, build_request.error_message + ) + + time.sleep(1) + + # Refresh status. + build_request.lp_refresh() + + if build_request.status == "Failed": + # Build request failed. + self.cleanup() + raise errors.RemoteBuildError(cast(str, build_request.error_message)) + + if build_request.status != "Completed": + # Shouldn't end up here. + self.cleanup() + raise errors.RemoteBuildError( + f"Unknown builder error - reported status: {build_request.status}" + ) + + if not build_request.builds.entries: # type: ignore + # Shouldn't end up here either. + self.cleanup() + raise errors.RemoteBuildError( + "Unknown builder error - no build entries found." + ) + + build_number = _get_url_basename(cast(str, build_request.self_link)) + logger.info("Build request accepted: %s", build_number) + + def _login(self) -> Launchpad: + """Login to launchpad.""" + try: + return Launchpad.login_with( + f"{self._app_name} remote-build", + "production", + self._cache_dir, + credentials_file=str(self._credentials), + version="devel", + ) + except (ConnectionRefusedError, TimeoutError) as error: + raise errors.LaunchpadHttpsError() from error + + def get_git_repo_path(self) -> str: + """Get path to the git repository.""" + return f"~{self._lp_user}/+git/{self._lp_name}" + + def get_git_https_url(self, token: Optional[str] = None) -> str: + """Get url for launchpad repository.""" + if token: + return ( + f"https://{self._lp_user}:{token}@git.launchpad.net/" + f"~{self._lp_user}/+git/{self._lp_name}/" + ) + + return ( + f"https://{self._lp_user}@git.launchpad.net/" + f"~{self._lp_user}/+git/{self._lp_name}/" + ) + + def _create_git_repository(self, force=False) -> Entry: + """Create git repository.""" + if force: + self._delete_git_repository() + + logger.info( + "creating git repo: name=%s, owner=%s, target=%s", + self._lp_name, + self._lp_owner, + self._lp_owner, + ) + return self._lp.git_repositories.new( # type: ignore + name=self._lp_name, owner=self._lp_owner, target=self._lp_owner + ) + + def _delete_git_repository(self) -> None: + """Delete git repository.""" + git_path = self.get_git_repo_path() + git_repo = self._lp.git_repositories.getByPath(path=git_path) # type: ignore + + # git_repositories.getByPath returns None if git repo does not exist. + if git_repo is None: + return + + logger.info("Deleting source repository from Launchpad...") + git_repo.lp_delete() + + def _create_snap(self, force=False) -> Entry: + """Create a snap recipe. Use force=true to replace existing snap.""" + git_url = self.get_git_https_url() + + if force: + self._delete_snap() + + optional_kwargs = {} + if self._lp_processors: + optional_kwargs["processors"] = self._lp_processors + + logger.info("Registering snap job on Launchpad...") + logger.debug( + "url=https://launchpad.net/%s/+snap/%s", self._lp_owner, self._lp_name + ) + + return self._lp.snaps.new( # type: ignore + name=self._lp_name, + owner=self._lp_owner, + git_repository_url=git_url, + git_path="main", + auto_build=False, + auto_build_archive="/ubuntu/+archive/primary", + auto_build_pocket="Updates", + **optional_kwargs, + ) + + def _delete_snap(self) -> None: + """Remove snap info and all associated files.""" + snap = self._get_snap() + if snap is None: + return + + logger.info("Removing snap job from Launchpad...") + snap.lp_delete() + + def cleanup(self) -> None: + """Delete snap and git repository from launchpad.""" + self._delete_snap() + self._delete_git_repository() + + def start_build(self) -> None: + """Start build with specified timeout (time.time() in seconds).""" + snap = self._create_snap(force=True) + + logger.info("Issuing build request on Launchpad...") + build_request = self._issue_build_request(snap) + self._wait_for_build_request_acceptance(build_request) + + def monitor_build(self, interval: int = _LP_POLL_INTERVAL) -> None: + """Check build progress, and download artifacts when ready.""" + snap = self._get_snap() + + while True: + # Check to see if we've run out of time. + self._check_timeout_deadline() + + builds = self._get_builds(snap) + pending = False + statuses = [] + for build in builds: + state = build["buildstate"] + arch = build["arch_tag"] + statuses.append(f"{arch}: {state}") + + if _is_build_pending(build): + pending = True + + logger.info(", ".join(statuses)) + + if pending is False: + break + + time.sleep(interval) + + # Build is complete - download build artifacts. + self._fetch_artifacts(snap) + + def get_build_status(self) -> Dict[str, str]: + """Get status of builds.""" + snap = self._get_snap() + builds = self._get_builds(snap) + build_status: Dict[str, str] = {} + for build in builds: + state = build["buildstate"] + arch = build["arch_tag"] + build_status[arch] = state + + return build_status + + def _get_logfile_name(self, arch: str) -> str: + index = 0 + base_name = f"{self._project_name}_{arch}" + log_name = f"{base_name}.txt" + + while Path(log_name).is_file(): + index += 1 + log_name = f"{base_name}.{index}.txt" + + return log_name + + def _download_log(self, build: Dict[str, Any]) -> None: + url = build["build_log_url"] + arch = build["arch_tag"] + if url is None: + logger.info("No build log available for %r.", arch) + else: + log_name = self._get_logfile_name(arch) + self._download_file(url=url, dst=log_name, gunzip=True) + logger.info("Build log available at %r.", log_name) + + if _is_build_status_failure(build): + logger.error("Build failed for arch %r.", arch) + + def _download_file(self, *, url: str, dst: str, gunzip: bool = False) -> None: + # TODO: consolidate with, and use indicators.download_requests_stream + logger.info("Downloading: %s", url) + try: + with requests.get(url, stream=True, timeout=3600) as response: + # Wrap response with gzipfile if gunzip is requested. + stream = response.raw + if gunzip: + stream = gzip.GzipFile(fileobj=stream) + with open(dst, "wb") as f_dst: + shutil.copyfileobj(stream, f_dst) + response.raise_for_status() + except requests.exceptions.RequestException as error: + logger.error("Error downloading %s: %s", url, str(error)) + + def _download_build_artifacts(self, build: Dict[str, Any]) -> None: + arch = build["arch_tag"] + snap_build = self._lp_load_url(build["self_link"]) + urls = snap_build.getFileUrls() # type: ignore + + if not urls: + logger.error("Snap file not available for arch %r.", arch) + return + + for url in urls: + file_name = _get_url_basename(url) + + self._download_file(url=url, dst=file_name) + + if file_name.endswith(".snap"): + logger.info("Snapped %s", file_name) + else: + logger.info("Fetched %s", file_name) + + def has_outstanding_build(self) -> bool: + """Check if there is an existing build configured on Launchpad.""" + snap = self._get_snap() + return snap is not None + + def push_source_tree(self, repo_dir: Path) -> None: + """Push source tree to Launchpad.""" + lp_repo = self._create_git_repository(force=True) + # This token will only be used once, immediately after issuing it, + # so it can have a short expiry time. It's not a problem if it + # expires before the build completes, or even before the push + # completes. + date_expires = datetime.now(timezone.utc) + timedelta(minutes=1) + token = lp_repo.issueAccessToken( # type: ignore + description=f"{self._app_name} remote-build for {self._build_id}", + scopes=["repository:push"], + date_expires=date_expires.isoformat(), + ) + + url = self.get_git_https_url(token=token) + stripped_url = self.get_git_https_url( + token="" # noqa: S106 (hardcoded-password) + ) + + logger.info("Sending build data to Launchpad: %s", stripped_url) + + repo = GitRepo(repo_dir) + repo.push_url(url, "main", "HEAD", token) diff --git a/snapcraft/remote/remote_builder.py b/snapcraft/remote/remote_builder.py new file mode 100644 index 0000000000..c19d0093a0 --- /dev/null +++ b/snapcraft/remote/remote_builder.py @@ -0,0 +1,135 @@ +# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- +# +# Copyright 2023 Canonical Ltd. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Manager for creating, monitoring, and cleaning remote builds.""" + +import logging +from pathlib import Path +from typing import List, Optional + +from .launchpad import LaunchpadClient +from .utils import get_build_id, humanize_list, validate_architectures +from .worktree import WorkTree + +logger = logging.getLogger(__name__) + + +class RemoteBuilder: + """Remote builder class. + + :param app_name: Name of the application. + :param build_id: Unique identifier for the build. + :param project_name: Name of the project. + :param architectures: List of architectures to build on. + :param project_dir: Path of the project. + :param timeout: Time in seconds to wait for the build to complete. + + :raises UnsupportedArchitectureError: if any architecture is not supported + for remote building. + :raises LaunchpadHttpsError: If a connection to Launchpad cannot be established. + """ + + def __init__( # noqa: PLR0913 pylint: disable=too-many-arguments + self, + app_name: str, + build_id: Optional[str], + project_name: str, + architectures: List[str], + project_dir: Path, + timeout: int, + ): + self._app_name = app_name + self._project_name = project_name + self._project_dir = project_dir + + if build_id: + self._build_id = build_id + else: + self._build_id = get_build_id( + app_name=self._app_name, + project_name=self._project_name, + project_path=self._project_dir, + ) + + validate_architectures(architectures) + self._architectures = architectures + + self._worktree = WorkTree( + app_name=self._app_name, + build_id=self._build_id, + project_dir=self._project_dir, + ) + + logger.debug("Setting up launchpad environment.") + + self._lpc = LaunchpadClient( + app_name=self._app_name, + build_id=self._build_id, + project_name=self._project_name, + architectures=self._architectures, + timeout=timeout, + ) + + @property + def build_id(self) -> str: + """Get the build id.""" + return self._build_id + + def print_status(self) -> None: + """Print the status of a remote build in Launchpad.""" + if self._lpc.has_outstanding_build(): + build_status = self._lpc.get_build_status() + for arch, status in build_status.items(): + logger.info("Build status for arch %s: %s", arch, status) + else: + logger.info("No build found.") + + def has_outstanding_build(self) -> bool: + """Check if there is an existing build on Launchpad. + + :returns: True if there is an existing (incomplete) build on Launchpad. + """ + return self._lpc.has_outstanding_build() + + def monitor_build(self) -> None: + """Monitor and periodically log the status of a remote build in Launchpad.""" + logger.info( + "Building snap package for %s. This may take some time to finish.", + humanize_list(self._lpc.architectures, "and", "{}"), + ) + + logger.info("Building...") + self._lpc.monitor_build() + + logger.info("Build complete.") + + def clean_build(self) -> None: + """Clean the cache and Launchpad build.""" + logger.info("Cleaning existing builds and artefacts.") + self._lpc.cleanup() + self._worktree.clean_cache() + + def start_build(self) -> None: + """Start a build in Launchpad. + + A local copy of the project is created and pushed to Launchpad via git. + """ + self._worktree.init_repo() + + logger.debug("Cached project at %s", self._worktree.repo_dir) + self._lpc.push_source_tree(repo_dir=self._worktree.repo_dir) + + self._lpc.start_build() diff --git a/snapcraft/remote/utils.py b/snapcraft/remote/utils.py index 308a01f829..f4140b16ca 100644 --- a/snapcraft/remote/utils.py +++ b/snapcraft/remote/utils.py @@ -16,16 +16,38 @@ """Remote build utilities.""" +import shutil +import stat from functools import partial from hashlib import md5 from pathlib import Path -from typing import List +from typing import Iterable, List + +from .errors import UnsupportedArchitectureError + +_SUPPORTED_ARCHS = ["amd64", "arm64", "armhf", "i386", "ppc64el", "s390x"] + + +def validate_architectures(architectures: List[str]) -> None: + """Validate that architectures are supported for remote building. + + :param architectures: list of architectures to validate + + :raises UnsupportedArchitectureError: if any architecture in the list in not + supported for remote building. + """ + unsupported_archs = [] + for arch in architectures: + if arch not in _SUPPORTED_ARCHS: + unsupported_archs.append(arch) + if unsupported_archs: + raise UnsupportedArchitectureError(architectures=unsupported_archs) def get_build_id(app_name: str, project_name: str, project_path: Path) -> str: """Get the build id for a project. - The build id is formatted as `snapcraft--`. + The build id is formatted as `--`. The hash is a hash of all files in the project directory. :param app_name: Name of the application. @@ -76,3 +98,57 @@ def _compute_hash(directory: Path) -> str: all_hashes = "".join(hashes).encode() return md5(all_hashes).hexdigest() # noqa: S324 (insecure-hash-function) + + +def humanize_list( + items: Iterable[str], + conjunction: str, + item_format: str = "{!r}", + sort: bool = True, +) -> str: + """Format a list into a human-readable string. + + :param items: list to humanize. + :param conjunction: the conjunction used to join the final element to + the rest of the list (e.g. 'and'). + :param item_format: format string to use per item. + :param sort: if true, sort the list. + """ + if not items: + return "" + + quoted_items = [item_format.format(item) for item in items] + + if sort: + quoted_items = sorted(quoted_items) + + if len(quoted_items) == 1: + return quoted_items[0] + + humanized = ", ".join(quoted_items[:-1]) + + if len(quoted_items) > 2: + humanized += "," + + return f"{humanized} {conjunction} {quoted_items[-1]}" + + +def rmtree(directory: Path) -> None: + """Cross-platform rmtree implementation. + + :param directory: Directory to remove. + """ + shutil.rmtree(str(directory.resolve()), onerror=_remove_readonly) + + +def _remove_readonly(func, filepath, _): + """Shutil onerror function to make read-only files writable. + + Try setting file to writeable if error occurs during rmtree. Known to be required + on Windows where file is not writeable, but it is owned by the user (who can + set file permissions). + + :param filepath: filepath to make writable + """ + Path(filepath).chmod(stat.S_IWRITE) + func(filepath) diff --git a/snapcraft/remote/worktree.py b/snapcraft/remote/worktree.py new file mode 100644 index 0000000000..480bbbf04e --- /dev/null +++ b/snapcraft/remote/worktree.py @@ -0,0 +1,67 @@ +# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- +# +# Copyright (C) 2019, 2023 Canonical Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Manages trees for remote builds.""" + +from pathlib import Path +from shutil import copytree + +from xdg import BaseDirectory + +from .git import GitRepo +from .utils import rmtree + + +class WorkTree: + """Class to manage trees for remote builds. + + :param app_name: Name of the application. + :param build_id: Unique identifier for the build. + :param project_dir: Path to project directory. + """ + + def __init__(self, app_name: str, build_id: str, project_dir: Path) -> None: + self._project_dir = project_dir + self._base_dir = Path( + BaseDirectory.save_cache_path(app_name, "remote-build", build_id) + ) + self._repo_dir = self._base_dir / "repo" + + @property + def repo_dir(self) -> Path: + """Get path the cached repository.""" + return self._repo_dir + + def init_repo(self) -> None: + """Initialize a clean repo.""" + if self._repo_dir.exists(): + rmtree(self._repo_dir) + + copytree(self._project_dir, self._repo_dir) + + self._gitify_repository() + + def _gitify_repository(self) -> None: + """Git-ify source repository tree.""" + repo = GitRepo(self._repo_dir) + if not repo.is_clean(): + repo.add_all() + repo.commit() + + def clean_cache(self): + """Clean the cache.""" + if self._base_dir.exists(): + rmtree(self._base_dir) diff --git a/snapcraft/store/_legacy_account.py b/snapcraft/store/_legacy_account.py index c910329124..7f877cb84c 100644 --- a/snapcraft/store/_legacy_account.py +++ b/snapcraft/store/_legacy_account.py @@ -154,7 +154,7 @@ def store_credentials(cls, config_content) -> None: cls.CONFIG_PATH.write_text(config_content) @overrides - def __init__( + def __init__( # noqa PLR0913 self, *, base_url: str, diff --git a/snapcraft/utils.py b/snapcraft/utils.py index 060f008916..84a5cd22e7 100644 --- a/snapcraft/utils.py +++ b/snapcraft/utils.py @@ -100,7 +100,7 @@ def get_os_platform( else: os_release = {} for line in lines: - line = line.strip() + line = line.strip() # noqa PLW2901 if not line or line.startswith("#") or "=" not in line: continue key, value = line.rstrip().split("=", 1) diff --git a/snapcraft_legacy/plugins/v2/python.py b/snapcraft_legacy/plugins/v2/python.py index 2eb4e21efd..30585d3a67 100644 --- a/snapcraft_legacy/plugins/v2/python.py +++ b/snapcraft_legacy/plugins/v2/python.py @@ -19,7 +19,7 @@ It can be used for python projects where you would want to do: - import python modules with a requirements.txt - - build a python project that has a setup.py + - build a python project that has a setup.py or pyproject.toml file - install packages straight from pip This plugin uses the common plugin keywords as well as those for "sources". @@ -140,7 +140,9 @@ def get_build_commands(self) -> List[str]: requirements_cmd = f"pip install {constraints} -U {requirements}" build_commands.append(requirements_cmd) - build_commands.append(f"[ -f setup.py ] && pip install {constraints} -U .") + build_commands.append( + f"[ -f setup.py -o -f pyproject.toml ] && pip install {constraints} -U ." + ) # Now fix shebangs. # TODO: replace with snapcraftctl once the two scripts are consolidated diff --git a/snapcraft_legacy/ruff.toml b/snapcraft_legacy/ruff.toml index e26460ba3b..a93c2ef9e1 100644 --- a/snapcraft_legacy/ruff.toml +++ b/snapcraft_legacy/ruff.toml @@ -16,5 +16,18 @@ select = [ "W", ] +[per-file-ignores] +"snapcraft_legacy/plugins/v2/_kernel_build.py" = [ + "E101", # Mixed tabs and spaces. Ruff gets confused by tabs in multiline strings + "W191", # Indentation contains tabs - another Ruff false positive +] +"tests/legacy/unit/plugins/v2/test_kernel.py" = [ + "E101", # Mixed tabs and spaces. Ruff gets confused by tabs in multiline strings + "W191", # Indentation contains tabs - another Ruff false positive +] +"tests/legacy/**.py" = [ + "E721", # Allowing type comparison +] + [mccabe] max-complexity = 10 diff --git a/spread.yaml b/spread.yaml index a15928f833..7e3777f055 100644 --- a/spread.yaml +++ b/spread.yaml @@ -67,7 +67,8 @@ backends: # SPREAD_SYSTEM has the following format here 'ubuntu-XX.YY-64' and gets # translated to an image XX.YY. image=$(echo $SPREAD_SYSTEM | sed -e 's/ubuntu-\(.*\)-64/\1/') - instance_name="spread-${image}" + spread_name=$(echo ${image} | sed -e 's/\.//g') + instance_name="spread-${spread_name}" fi if [ -z "${image}" ]; then @@ -96,7 +97,8 @@ backends: # SPREAD_SYSTEM has the following format here 'ubuntu-XX.YY-64' and gets # translated to an image XX.YY. image=$(echo $SPREAD_SYSTEM | sed -e 's/ubuntu-\(.*\)-64/\1/') - instance_name="spread-${image}" + spread_name=$(echo ${image} | sed -e 's/\.//g') + instance_name="spread-${spread_name}" fi if [ -z "${image}" ]; then @@ -227,6 +229,8 @@ prepare: | install_snapcraft pushd /snapcraft + git config --global user.email "you@example.com" + git config --global user.name "Your Name" git init git add . git commit -m "Testing Commit" @@ -282,6 +286,11 @@ suites: systems: - ubuntu-22.04* + tests/spread/core24/: + summary: core24 tests + systems: + - ubuntu-22.04* + # General, core suite tests/spread/general/: summary: tests of snapcraft core functionality @@ -300,6 +309,11 @@ suites: tests/spread/general/hooks/: summary: tests of snapcraft hook functionality + tests/spread/core-devel/: + summary: tests of devel base snaps + environment: + SNAPCRAFT_BUILD_ENVIRONMENT: "" + # General, core suite tests/spread/cross-compile/: summary: tests of supported cross-compile functionality diff --git a/tests/legacy/unit/meta/test_meta.py b/tests/legacy/unit/meta/test_meta.py index bf4735e4c7..97fe12a5f1 100644 --- a/tests/legacy/unit/meta/test_meta.py +++ b/tests/legacy/unit/meta/test_meta.py @@ -1575,7 +1575,7 @@ def make_snapcraft_project(self, common_id): source: . plugin: dump parse-info: ["1.metainfo.xml", "2.metainfo.xml"] - + """ ) diff --git a/tests/legacy/unit/plugins/v2/test_python.py b/tests/legacy/unit/plugins/v2/test_python.py index 438c324c91..f14a31d6f4 100644 --- a/tests/legacy/unit/plugins/v2/test_python.py +++ b/tests/legacy/unit/plugins/v2/test_python.py @@ -113,7 +113,7 @@ class Options: == [ '"${SNAPCRAFT_PYTHON_INTERPRETER}" -m venv ${SNAPCRAFT_PYTHON_VENV_ARGS} "${SNAPCRAFT_PART_INSTALL}"', 'SNAPCRAFT_PYTHON_VENV_INTERP_PATH="${SNAPCRAFT_PART_INSTALL}/bin/${SNAPCRAFT_PYTHON_INTERPRETER}"', - "[ -f setup.py ] && pip install -U .", + "[ -f setup.py -o -f pyproject.toml ] && pip install -U .", ] + _FIXUP_BUILD_COMMANDS ) @@ -134,7 +134,7 @@ class Options: 'SNAPCRAFT_PYTHON_VENV_INTERP_PATH="${SNAPCRAFT_PART_INSTALL}/bin/${SNAPCRAFT_PYTHON_INTERPRETER}"', "pip install -c 'constraints.txt' -U pip 'some-pkg; sys_platform != '\"'\"'win32'\"'\"''", "pip install -c 'constraints.txt' -U -r 'requirements.txt'", - "[ -f setup.py ] && pip install -c 'constraints.txt' -U .", + "[ -f setup.py -o -f pyproject.toml ] && pip install -c 'constraints.txt' -U .", ] + _FIXUP_BUILD_COMMANDS ) diff --git a/tests/spread/core-devel/basic/snap/snapcraft.yaml b/tests/spread/core-devel/basic/snap/snapcraft.yaml new file mode 100644 index 0000000000..0616761408 --- /dev/null +++ b/tests/spread/core-devel/basic/snap/snapcraft.yaml @@ -0,0 +1,16 @@ +name: build-base-devel +version: '1.0' +summary: build-base-devel +description: Build a base snap with a build-base of devel +confinement: strict + +type: base +build-base: devel + +# grade must be devel when build-base is devel +grade: devel + +parts: + build-base-devel: + plugin: nil + stage-packages: [base-files] \ No newline at end of file diff --git a/tests/spread/core-devel/basic/task.yaml b/tests/spread/core-devel/basic/task.yaml new file mode 100644 index 0000000000..9b8a3c62a3 --- /dev/null +++ b/tests/spread/core-devel/basic/task.yaml @@ -0,0 +1,19 @@ +summary: Test basic build for devel base + +environment: + SNAPCRAFT_BUILD_ENVIRONMENT: "" + +restore: | + cd "./snap" + snapcraft clean + rm -f ./*.snap + snap remove build-base-devel + +execute: | + cd "./snap" + + snapcraft pack + + snap install --dangerous ./*.snap + + grep -i "devel" /snap/build-base-devel/current/etc/os-release || { echo "Devel image not found" ; exit 1; } diff --git a/tests/spread/core22/chisel-base/snapcraft.yaml b/tests/spread/core22/chisel-base/snapcraft.yaml new file mode 100644 index 0000000000..26adcca210 --- /dev/null +++ b/tests/spread/core22/chisel-base/snapcraft.yaml @@ -0,0 +1,19 @@ +name: core2x +type: base +build-base: core22 +summary: base using chisel from core22 assets +description: | + Test creating a base using a stable release of chisel +version: 0+git +confinement: strict + +parts: + base: + plugin: nil + stage-packages: + - base-files_etc + - base-files_bin + - base-files_lib + - base-files_tmp + - base-files_var + - base-files_home diff --git a/tests/spread/core22/chisel-base/task.yaml b/tests/spread/core22/chisel-base/task.yaml new file mode 100644 index 0000000000..d38dade27d --- /dev/null +++ b/tests/spread/core22/chisel-base/task.yaml @@ -0,0 +1,17 @@ +summary: Build a simple snap + +environment: + SNAPCRAFT_BUILD_ENVIRONMENT: "" + +restore: | + snapcraft clean + rm -f ./*.snap + +execute: | + snapcraft pack + snap install core2x_*.snap --dangerous + + # verify that the chisel packages made it to the base + [ -d /snap/core2x/current/bin ] || exit 1 + [ -d /snap/core2x/current/lib ] || exit 1 + [ -d /snap/core2x/current/var ] || exit 1 diff --git a/tests/spread/core22/clean/task.yaml b/tests/spread/core22/clean/task.yaml index b179da7bb0..dd1fec6733 100644 --- a/tests/spread/core22/clean/task.yaml +++ b/tests/spread/core22/clean/task.yaml @@ -30,14 +30,14 @@ execute: | snapcraft pack 2>&1 | tee output.txt - grep "Executing parts lifecycle: pull part1" < output.txt - grep "Executing parts lifecycle: skip pull part2 (already ran)" < output.txt - grep "Executing parts lifecycle: build part1" < output.txt - grep "Executing parts lifecycle: skip build part2 (already ran)" < output.txt - grep "Executing parts lifecycle: stage part1" < output.txt - grep "Executing parts lifecycle: skip stage part2 (already ran)" < output.txt - grep "Executing parts lifecycle: prime part1" < output.txt - grep "Executing parts lifecycle: skip prime part2 (already ran)" < output.txt + grep "Pulling part1" < output.txt + grep "Skipping pull for part2 (already ran)" < output.txt + grep "Building part1" < output.txt + grep "Skipping build for part2 (already ran)" < output.txt + grep "Staging part1" < output.txt + grep "Skipping stage for part2 (already ran)" < output.txt + grep "Priming part1" < output.txt + grep "Skip prime for part2 (already ran)" < output.txt snapcraft clean if lxc --project=snapcraft list | grep snapcraft-clean; then diff --git a/tests/spread/core22/linters/library-missing/expected_linter_output.txt b/tests/spread/core22/linters/library-missing/expected_linter_output.txt index fc910046c6..1b23411f5d 100644 --- a/tests/spread/core22/linters/library-missing/expected_linter_output.txt +++ b/tests/spread/core22/linters/library-missing/expected_linter_output.txt @@ -2,6 +2,6 @@ Running linters... Running linter: classic Running linter: library Lint warnings: -- library: linter-test: missing dependency 'libcaca.so.0'. (https://snapcraft.io/docs/linters-library) -- library: linter-test: missing dependency 'libslang.so.2'. (https://snapcraft.io/docs/linters-library) +- library: linter-test: missing dependency 'libcaca.so.0'. (provided by 'libcaca0') (https://snapcraft.io/docs/linters-library) +- library: linter-test: missing dependency 'libslang.so.2'. (provided by 'libslang2') (https://snapcraft.io/docs/linters-library) Creating snap package... diff --git a/tests/spread/core24/simple-snap/snap/Makefile b/tests/spread/core24/simple-snap/snap/Makefile new file mode 100644 index 0000000000..bec21e786e --- /dev/null +++ b/tests/spread/core24/simple-snap/snap/Makefile @@ -0,0 +1,11 @@ +# -*- Mode: Makefile; indent-tabs-mode:t; tab-width: 4 -*- +.PHONY: all + +all: hello + +install: hello + install -d $(DESTDIR)/bin/ + install -D $^ $(DESTDIR)/bin/ + +hello: hello.c + $(CC) hello.c -o hello -lcurl diff --git a/tests/spread/core24/simple-snap/snap/hello.c b/tests/spread/core24/simple-snap/snap/hello.c new file mode 100644 index 0000000000..0980d5a387 --- /dev/null +++ b/tests/spread/core24/simple-snap/snap/hello.c @@ -0,0 +1,6 @@ +#include +#include + +int main() { + curl_global_init(CURL_GLOBAL_DEFAULT); +} diff --git a/tests/spread/core24/simple-snap/snap/snapcraft.yaml b/tests/spread/core24/simple-snap/snap/snapcraft.yaml new file mode 100644 index 0000000000..6737b7bc49 --- /dev/null +++ b/tests/spread/core24/simple-snap/snap/snapcraft.yaml @@ -0,0 +1,25 @@ +name: simple-snap +version: "1.0" +summary: Build a simple confined snap +description: | + Build a simple confined snap to test the build process. + +base: core24 +build-base: devel + +grade: devel +confinement: strict + +apps: + hello: + command: bin/hello + +parts: + hello: + source: . + plugin: make + build-packages: + - gcc + - libc-dev + - libcurl4-openssl-dev + stage-packages: [libcurl4] \ No newline at end of file diff --git a/tests/spread/core24/simple-snap/task.yaml b/tests/spread/core24/simple-snap/task.yaml new file mode 100644 index 0000000000..b38e204007 --- /dev/null +++ b/tests/spread/core24/simple-snap/task.yaml @@ -0,0 +1,16 @@ +summary: Build a simple snap + +environment: + SNAPCRAFT_BUILD_ENVIRONMENT: "" + +restore: | + snap remove simple-snap + snapcraft clean + rm -f ./*.snap + +execute: | + cd "./snap" + snapcraft pack + snap install --edge core24 + snap install --dangerous ./*.snap + /snap/bin/simple-snap.hello diff --git a/tests/spread/general/remote-build/task.yaml b/tests/spread/general/remote-build/task.yaml new file mode 100644 index 0000000000..5f3d0ac5ca --- /dev/null +++ b/tests/spread/general/remote-build/task.yaml @@ -0,0 +1,30 @@ +summary: Test the remote builder +manual: true +kill-timeout: 180m +systems: + - -ubuntu-22.04 + - -ubuntu-22.04-64 + - -ubuntu-22.04-amd64 + +environment: + LAUNCHPAD_TOKEN: "$(HOST: echo ${LAUNCHPAD_TOKEN})" + STRATEGY/DISABLE_FALLBACK: "disable-fallback" + STRATEGY/FORCE_FALLBACK: "force-fallback" + +prepare: | + if [[ -z "$LAUNCHPAD_TOKEN" ]]; then + echo "No credentials set in env LAUNCHPAD_TOKEN" + exit 1 + fi + + snapcraft init + mkdir -p ~/.local/share/snapcraft/provider/launchpad/ + echo -e "$LAUNCHPAD_TOKEN" >> ~/.local/share/snapcraft/provider/launchpad/credentials + +restore: | + rm -f ./*.snap ./*.txt + +execute: | + export SNAPCRAFT_REMOTE_BUILD_STRATEGY="$STRATEGY" + + snapcraft remote-build --launchpad-accept-public-upload diff --git a/tests/spread/plugins/v2/build-and-run-hello/task.yaml b/tests/spread/plugins/v2/build-and-run-hello/task.yaml index 083e9315e4..7d43254175 100644 --- a/tests/spread/plugins/v2/build-and-run-hello/task.yaml +++ b/tests/spread/plugins/v2/build-and-run-hello/task.yaml @@ -16,6 +16,7 @@ environment: SNAP/python: python-hello SNAP/python_multiple_parts: python-hello-multiple-parts SNAP/python_multiple_parts_staged: python-hello-multiple-parts-staged + SNAP/python_pyproject: python-hello-pyproject SNAP/python_staged: python-hello-staged-python SNAP/python_with_stage_package_in_base: python-with-stage-package-in-base SNAP/python_with_python_package_dep: python-hello-with-python-package-dep diff --git a/tests/spread/plugins/v2/snaps/python-hello-pyproject/hello b/tests/spread/plugins/v2/snaps/python-hello-pyproject/hello new file mode 100644 index 0000000000..e3095b2229 --- /dev/null +++ b/tests/spread/plugins/v2/snaps/python-hello-pyproject/hello @@ -0,0 +1,2 @@ +def main(): + print("hello world") diff --git a/tests/spread/plugins/v2/snaps/python-hello-pyproject/pyproject.toml b/tests/spread/plugins/v2/snaps/python-hello-pyproject/pyproject.toml new file mode 100644 index 0000000000..6149544990 --- /dev/null +++ b/tests/spread/plugins/v2/snaps/python-hello-pyproject/pyproject.toml @@ -0,0 +1,11 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "python_hello_pyproject" +description = "A simple hello world in python" +version = "0.0.1" + +[project.scripts] +python-hello-pyproject = "pythonhellopyproject:main" diff --git a/tests/spread/plugins/v2/snaps/python-hello-pyproject/snap/snapcraft.yaml b/tests/spread/plugins/v2/snaps/python-hello-pyproject/snap/snapcraft.yaml new file mode 100644 index 0000000000..951d20d932 --- /dev/null +++ b/tests/spread/plugins/v2/snaps/python-hello-pyproject/snap/snapcraft.yaml @@ -0,0 +1,18 @@ +name: python-hello-pyproject +version: "1.0" +summary: hello world +description: A simple hello world in python using a pyproject.toml. +grade: devel +base: core20 +confinement: strict + +apps: + python-hello-pyproject: + command: bin/python-hello-pyproject + +parts: + hello: + source: . + plugin: python + python-packages: + - pip==20.0.2 diff --git a/tests/spread/plugins/v2/snaps/python-hello-pyproject/src/pythonhellopyproject/__init__.py b/tests/spread/plugins/v2/snaps/python-hello-pyproject/src/pythonhellopyproject/__init__.py new file mode 120000 index 0000000000..9ad7f2b392 --- /dev/null +++ b/tests/spread/plugins/v2/snaps/python-hello-pyproject/src/pythonhellopyproject/__init__.py @@ -0,0 +1 @@ +../../hello \ No newline at end of file diff --git a/tests/unit/cli/test_exit.py b/tests/unit/cli/test_exit.py index e2a853834d..36bae5fb04 100644 --- a/tests/unit/cli/test_exit.py +++ b/tests/unit/cli/test_exit.py @@ -24,6 +24,7 @@ from craft_providers import ProviderError from snapcraft import cli +from snapcraft.remote import RemoteBuildError def test_no_keyring_error(capsys, mocker): @@ -74,6 +75,24 @@ def test_craft_providers_error(capsys, mocker): assert stderr[2].startswith("test resolution") +def test_remote_build_error(capsys, mocker): + """Catch remote-build errors.""" + mocker.patch.object(sys, "argv", ["cmd", "pull"]) + mocker.patch.object(sys.stdin, "isatty", return_value=True) + mocker.patch( + "snapcraft.commands.lifecycle.PullCommand.run", + side_effect=RemoteBuildError(brief="test brief", details="test details"), + ) + + cli.run() + + stderr = capsys.readouterr().err.splitlines() + + # Simple verification that our expected message is being printed + assert stderr[0].startswith("remote-build error: test brief") + assert stderr[1].startswith("test details") + + @pytest.mark.parametrize("is_managed,report_errors", [(True, False), (False, True)]) def test_emit_error(emitter, mocker, is_managed, report_errors): mocker.patch("snapcraft.utils.is_managed_mode", return_value=is_managed) diff --git a/tests/unit/commands/test_remote.py b/tests/unit/commands/test_remote.py index f03ee82e0e..c8c3688f7c 100644 --- a/tests/unit/commands/test_remote.py +++ b/tests/unit/commands/test_remote.py @@ -18,6 +18,7 @@ import sys from pathlib import Path +from unittest.mock import ANY, call import pytest from yaml import safe_dump @@ -25,7 +26,6 @@ from snapcraft import cli from snapcraft.parts.yaml_utils import CURRENT_BASES, ESM_BASES, LEGACY_BASES from snapcraft.remote import GitRepo -from snapcraft_legacy.internal.remote_build.errors import AcceptPublicUploadError # remote-build control logic may check if the working dir is a git repo, # so execute all tests inside a test directory @@ -44,7 +44,7 @@ def use_new_remote_build(monkeypatch): monkeypatch.setenv("SNAPCRAFT_REMOTE_BUILD_STRATEGY", "disable-fallback") -@pytest.fixture +@pytest.fixture() def fake_sudo(monkeypatch): monkeypatch.setenv("SUDO_USER", "fake") monkeypatch.setattr("os.geteuid", lambda: 0) @@ -56,13 +56,20 @@ def mock_argv(mocker): return mocker.patch.object(sys, "argv", ["snapcraft", "remote-build"]) -@pytest.fixture +@pytest.fixture() def mock_confirm(mocker): return mocker.patch( "snapcraft.commands.remote.confirm_with_user", return_value=True ) +@pytest.fixture() +def mock_remote_builder(mocker): + _mock_remote_builder = mocker.patch("snapcraft.commands.remote.RemoteBuilder") + _mock_remote_builder.return_value.has_outstanding_build.return_value = False + return _mock_remote_builder + + @pytest.fixture() def mock_run_new_or_fallback_remote_build(mocker): return mocker.patch( @@ -70,6 +77,13 @@ def mock_run_new_or_fallback_remote_build(mocker): ) +@pytest.fixture() +def mock_run_new_remote_build(mocker): + return mocker.patch( + "snapcraft.commands.remote.RemoteBuildCommand._run_new_remote_build" + ) + + @pytest.fixture() def mock_run_legacy(mocker): return mocker.patch("snapcraft.commands.remote.run_legacy") @@ -102,19 +116,19 @@ def test_command_user_confirms_upload( ) @pytest.mark.usefixtures("create_snapcraft_yaml", "mock_argv") def test_command_user_denies_upload( - mock_confirm, mock_run_new_or_fallback_remote_build + capsys, mock_confirm, mock_run_new_or_fallback_remote_build ): """Raise an error if the user denies the upload prompt.""" mock_confirm.return_value = False - with pytest.raises(AcceptPublicUploadError): - cli.run() + cli.run() - mock_confirm.assert_called_once_with( - "All data sent to remote builders will be publicly available. " - "Are you sure you want to continue?" - ) - mock_run_new_or_fallback_remote_build.assert_not_called() + _, err = capsys.readouterr() + assert ( + "Cannot upload data to build servers.\n" + "Remote build needs explicit acknowledgement " + "that data sent to build servers is public." + ) in err @pytest.mark.parametrize( @@ -181,6 +195,50 @@ def test_cannot_load_snapcraft_yaml(capsys): ) +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_confirm", "use_new_remote_build", "mock_argv" +) +def test_launchpad_timeout_default(mock_remote_builder): + """Use the default timeout `0` when `--launchpad-timeout` is not provided.""" + cli.run() + + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id=None, + project_name="mytest", + architectures=ANY, + project_dir=Path(), + timeout=0, + ) + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_confirm", "use_new_remote_build" +) +def test_launchpad_timeout(mocker, mock_remote_builder): + """Pass the `--launchpad-timeout` to the remote builder.""" + mocker.patch.object( + sys, "argv", ["snapcraft", "remote-build", "--launchpad-timeout", "100"] + ) + + cli.run() + + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id=None, + project_name="mytest", + architectures=ANY, + project_dir=Path(), + timeout=100, + ) + + ################################ # Snapcraft project base tests # ################################ @@ -220,7 +278,7 @@ def test_get_effective_base_with_build_base( @pytest.mark.usefixtures("mock_argv", "mock_confirm") -@pytest.mark.parametrize("base", CURRENT_BASES | LEGACY_BASES) +@pytest.mark.parametrize("base", CURRENT_BASES | LEGACY_BASES | ESM_BASES) def test_get_effective_base_type( base, snapcraft_yaml, mock_run_new_or_fallback_remote_build ): @@ -255,15 +313,37 @@ def test_get_effective_base_none(capsys, snapcraft_yaml): @pytest.mark.usefixtures("mock_argv", "mock_confirm") -@pytest.mark.parametrize("base", ESM_BASES) -def test_get_effective_base_esm(base, capsys, snapcraft_yaml): - """Raise an error if an ESM base is used.""" - snapcraft_yaml(base=base) +def test_get_effective_base_core_esm_warning( + emitter, snapcraft_yaml, mock_run_new_or_fallback_remote_build +): + """Warn if core, an ESM base, is used.""" + snapcraft_yaml(base="core") cli.run() - _, err = capsys.readouterr() - assert f"{base!r} is not supported on this version of Snapcraft." in err + mock_run_new_or_fallback_remote_build.assert_called_once_with("core") + emitter.assert_progress( + "WARNING: base 'core' was last supported on Snapcraft 4 available on the " + "'4.x' channel.", + permanent=True, + ) + + +@pytest.mark.usefixtures("mock_argv", "mock_confirm") +def test_get_effective_base_core18_esm_warning( + emitter, snapcraft_yaml, mock_run_new_or_fallback_remote_build +): + """Warn if core18, an ESM base, is used.""" + snapcraft_yaml(base="core18") + + cli.run() + + mock_run_new_or_fallback_remote_build.assert_called_once_with("core18") + emitter.assert_progress( + "WARNING: base 'core18' was last supported on Snapcraft 7 available on the " + "'7.x' channel.", + permanent=True, + ) ####################### @@ -275,15 +355,12 @@ def test_get_effective_base_esm(base, capsys, snapcraft_yaml): "create_snapcraft_yaml", CURRENT_BASES - {"core22"}, indirect=True ) @pytest.mark.usefixtures("create_snapcraft_yaml", "mock_confirm", "mock_argv") -def test_run_newer_than_core_22(emitter, mock_run_legacy): +def test_run_newer_than_core_22(emitter, mock_run_new_remote_build): """Bases newer than core22 must use new remote-build.""" cli.run() - # this should fail when new remote-build code is used (#4323) - mock_run_legacy.assert_called_once() - emitter.assert_debug( - "Running fallback remote-build because new remote-build is not available." - ) + mock_run_new_remote_build.assert_called_once() + emitter.assert_debug("Running new remote-build because base is newer than core22") @pytest.mark.parametrize( @@ -295,7 +372,7 @@ def test_run_core22_and_older(emitter, mock_run_legacy): cli.run() mock_run_legacy.assert_called_once() - emitter.assert_debug("Running fallback remote-build.") + emitter.assert_debug("Running fallback remote-build") @pytest.mark.parametrize( @@ -305,7 +382,9 @@ def test_run_core22_and_older(emitter, mock_run_legacy): "envvar", ["force-fallback", "disable-fallback", "badvalue", None] ) @pytest.mark.usefixtures("create_snapcraft_yaml", "mock_confirm", "mock_argv") -def test_run_envvar_newer_than_core22(envvar, emitter, mock_run_legacy, monkeypatch): +def test_run_envvar_newer_than_core22( + envvar, emitter, mock_run_new_remote_build, monkeypatch +): """Bases newer than core22 run new remote-build regardless of envvar.""" if envvar: monkeypatch.setenv("SNAPCRAFT_REMOTE_BUILD_STRATEGY", envvar) @@ -314,26 +393,24 @@ def test_run_envvar_newer_than_core22(envvar, emitter, mock_run_legacy, monkeypa cli.run() - mock_run_legacy.assert_called_once() - emitter.assert_debug( - "Running fallback remote-build because new remote-build is not available." - ) + mock_run_new_remote_build.assert_called_once() + emitter.assert_debug("Running new remote-build because base is newer than core22") @pytest.mark.parametrize( "create_snapcraft_yaml", LEGACY_BASES | {"core22"}, indirect=True ) @pytest.mark.usefixtures("create_snapcraft_yaml", "mock_confirm", "mock_argv") -def test_run_envvar_disable_fallback(emitter, mock_run_legacy, monkeypatch): +def test_run_envvar_disable_fallback(emitter, mock_run_new_remote_build, monkeypatch): """core22 and older bases run new remote-build if envvar is `disable-fallback`.""" monkeypatch.setenv("SNAPCRAFT_REMOTE_BUILD_STRATEGY", "disable-fallback") cli.run() - mock_run_legacy.assert_called_once() + mock_run_new_remote_build.assert_called_once() emitter.assert_debug( "Running new remote-build because environment variable " - "'SNAPCRAFT_REMOTE_BUILD_STRATEGY' is 'disable-fallback'." + "'SNAPCRAFT_REMOTE_BUILD_STRATEGY' is 'disable-fallback'" ) @@ -350,7 +427,7 @@ def test_run_envvar_force_fallback(emitter, mock_run_legacy, monkeypatch): mock_run_legacy.assert_called_once() emitter.assert_debug( "Running fallback remote-build because environment variable " - "'SNAPCRAFT_REMOTE_BUILD_STRATEGY' is 'force-fallback'." + "'SNAPCRAFT_REMOTE_BUILD_STRATEGY' is 'force-fallback'" ) @@ -365,7 +442,7 @@ def test_run_envvar_force_fallback_unset(emitter, mock_run_legacy, monkeypatch): cli.run() mock_run_legacy.assert_called_once() - emitter.assert_debug("Running fallback remote-build.") + emitter.assert_debug("Running fallback remote-build") @pytest.mark.parametrize( @@ -379,7 +456,7 @@ def test_run_envvar_force_fallback_empty(emitter, mock_run_legacy, monkeypatch): cli.run() mock_run_legacy.assert_called_once() - emitter.assert_debug("Running fallback remote-build.") + emitter.assert_debug("Running fallback remote-build") @pytest.mark.parametrize( @@ -396,7 +473,7 @@ def test_run_envvar_invalid(capsys, emitter, mock_run_legacy, monkeypatch): assert ( "Unknown value 'badvalue' in environment variable " "'SNAPCRAFT_REMOTE_BUILD_STRATEGY'. Valid values are 'disable-fallback' and " - "'force-fallback'." + "'force-fallback'" ) in err @@ -404,17 +481,16 @@ def test_run_envvar_invalid(capsys, emitter, mock_run_legacy, monkeypatch): "create_snapcraft_yaml", LEGACY_BASES | {"core22"}, indirect=True ) @pytest.mark.usefixtures("create_snapcraft_yaml", "mock_confirm", "mock_argv") -def test_run_in_repo(emitter, mock_run_legacy, new_dir): +def test_run_in_repo(emitter, mock_run_new_remote_build, new_dir): """core22 and older bases run new remote-build if in a git repo.""" # initialize a git repo GitRepo(new_dir) cli.run() - # this should fail when new remote-build code is used (#4323) - mock_run_legacy.assert_called_once() + mock_run_new_remote_build.assert_called_once() emitter.assert_debug( - "Running new remote-build because project is in a git repository." + "Running new remote-build because project is in a git repository" ) @@ -427,75 +503,212 @@ def test_run_not_in_repo(emitter, mock_run_legacy): cli.run() mock_run_legacy.assert_called_once() - emitter.assert_debug("Running fallback remote-build.") + emitter.assert_debug("Running fallback remote-build") @pytest.mark.parametrize( "create_snapcraft_yaml", CURRENT_BASES - {"core22"}, indirect=True ) @pytest.mark.usefixtures("create_snapcraft_yaml", "mock_confirm", "mock_argv") -def test_run_in_repo_newer_than_core22(emitter, mock_run_legacy, monkeypatch, new_dir): +def test_run_in_repo_newer_than_core22( + emitter, mock_run_new_remote_build, monkeypatch, new_dir +): """Bases newer than core22 run new remote-build regardless of being in a repo.""" # initialize a git repo GitRepo(new_dir) cli.run() - # this should fail when new remote-build code is used (#4323) - mock_run_legacy.assert_called_once() - emitter.assert_debug( - "Running fallback remote-build because new remote-build is not available." + mock_run_new_remote_build.assert_called_once() + emitter.assert_debug("Running new remote-build because base is newer than core22") + + +###################### +# Architecture tests # +###################### + + +@pytest.mark.parametrize("base", CURRENT_BASES | LEGACY_BASES) +@pytest.mark.parametrize( + ["archs", "expected_archs"], + [ + # single arch as scalar + ([{"build-on": "arm64", "build-for": "arm64"}], ["arm64"]), + # single arch as list + ([{"build-on": ["arm64"], "build-for": ["arm64"]}], ["arm64"]), + # no build-for as scalar + ([{"build-on": "arm64"}], ["arm64"]), + # no build-for as list + ([{"build-on": ["arm64"]}], ["arm64"]), + # multiple archs as scalars + ( + [ + {"build-on": "amd64", "build-for": "amd64"}, + {"build-on": "arm64", "build-for": "arm64"}, + ], + ["amd64", "arm64"], + ), + # multiple archs as lists + ( + [ + {"build-on": ["amd64"], "build-for": ["amd64"]}, + {"build-on": ["arm64"], "build-for": ["arm64"]}, + ], + ["amd64", "arm64"], + ), + # multiple build-ons + ( + [ + {"build-on": ["amd64", "arm64"], "build-for": "amd64"}, + {"build-on": ["armhf", "powerpc"], "build-for": "arm64"}, + ], + ["amd64", "arm64", "armhf", "powerpc"], + ), + ], +) +@pytest.mark.usefixtures("mock_argv", "mock_confirm", "use_new_remote_build") +def test_determine_architectures_from_snapcraft_yaml( + archs, expected_archs, base, snapcraft_yaml, mock_remote_builder +): + """Parse `build-on` architectures from a snapcraft.yaml file.""" + snapcraft_yaml(base=base, architectures=archs) + + cli.run() + + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id=None, + project_name="mytest", + architectures=expected_archs, + project_dir=Path(), + timeout=0, + ) + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_argv", "mock_confirm", "use_new_remote_build" +) +def test_determine_architectures_host_arch(mocker, mock_remote_builder): + """Use host architecture if not defined in the snapcraft.yaml.""" + mocker.patch( + "snapcraft.commands.remote.get_host_architecture", return_value="arm64" + ) + + cli.run() + + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id=None, + project_name="mytest", + architectures=["arm64"], + project_dir=Path(), + timeout=0, ) +@pytest.mark.parametrize( + ("args", "expected_archs"), + [ + (["--build-for", "amd64"], ["amd64"]), + (["--build-for", "amd64", "arm64"], ["amd64", "arm64"]), + # launchpad will accept and ignore duplicates + (["--build-for", "amd64", "amd64"], ["amd64", "amd64"]), + ], +) +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_confirm", "use_new_remote_build" +) +def test_determine_architectures_provided_by_user( + args, expected_archs, mocker, mock_remote_builder +): + """Use architectures provided by the user.""" + mocker.patch.object(sys, "argv", ["snapcraft", "remote-build"] + args) + + cli.run() + + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id=None, + project_name="mytest", + architectures=expected_archs, + project_dir=Path(), + timeout=0, + ) + + +@pytest.mark.parametrize("base", CURRENT_BASES | LEGACY_BASES) +@pytest.mark.usefixtures("mock_confirm", "use_new_remote_build") +def test_determine_architectures_error(base, capsys, snapcraft_yaml, mocker): + """Error if `--build-for` is provided and archs are in the snapcraft.yaml.""" + mocker.patch.object( + sys, "argv", ["snapcraft", "remote-build", "--build-for", "amd64"] + ) + snapcraft_yaml( + base=base, architectures=[{"build-on": "arm64", "build-for": "arm64"}] + ) + + cli.run() + + _, err = capsys.readouterr() + assert ( + "Cannot use `--build-on` because architectures are already defined in " + "snapcraft.yaml." + ) in err + + ################## # Build id tests # ################## -# The build-id is not currently used, so these unit tests test the log output. -# When #4323 is complete, these tests can be rewritten to verify the build-id passed -# to the new remote-build code. - @pytest.mark.parametrize( "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True ) @pytest.mark.usefixtures( - "create_snapcraft_yaml", - "mock_confirm", - "mock_argv", - "mock_run_legacy", - "use_new_remote_build", + "create_snapcraft_yaml", "mock_confirm", "use_new_remote_build" ) -def test_build_id_provided(emitter, mocker): - """Use the build id provided as an argument.""" +def test_build_id_provided(mocker, mock_remote_builder): + """Pass the build id provided as an argument.""" mocker.patch.object( sys, "argv", ["snapcraft", "remote-build", "--build-id", "test-build-id"] ) cli.run() - emitter.assert_debug("Using build ID 'test-build-id' passed as a parameter.") + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id="test-build-id", + project_name="mytest", + architectures=ANY, + project_dir=Path(), + timeout=0, + ) @pytest.mark.parametrize( "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True ) @pytest.mark.usefixtures( - "create_snapcraft_yaml", - "mock_confirm", - "mock_argv", - "mock_run_legacy", - "use_new_remote_build", + "create_snapcraft_yaml", "mock_confirm", "mock_argv", "use_new_remote_build" ) -def test_build_id_computed(emitter): - """Compute the build id.""" +def test_build_id_not_provided(mock_remote_builder): + """Pass `None` for the build id if it is not provided as an argument.""" + cli.run() - # The create_snapcraft_yaml fixture uses the project name 'mytest'. - # Look for an md5 hash (a 32 character lowercase hex string). - emitter.assert_debug( - "Using computed build ID 'snapcraft-mytest-[0-9a-f]{32}'.", regex=True + mock_remote_builder.assert_called_with( + app_name="snapcraft", + build_id=None, + project_name="mytest", + architectures=ANY, + project_dir=Path(), + timeout=0, ) @@ -523,3 +736,119 @@ def test_build_id_no_project_name_error(base, capsys): _, err = capsys.readouterr() assert "Could not get project name from 'snapcraft.yaml'." in err + + +######################## +# Remote builder tests # +######################## + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_confirm", "use_new_remote_build" +) +def test_status(mocker, mock_remote_builder): + """Print the status when `--status` is provided.""" + mocker.patch.object(sys, "argv", ["snapcraft", "remote-build", "--status"]) + + cli.run() + + assert mock_remote_builder.mock_calls[-1] == call().print_status() + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", + "mock_confirm", + "mock_remote_builder", + "use_new_remote_build", +) +def test_recover_no_build(emitter, mocker): + """Warn if no build is found when `--recover` is provided.""" + mocker.patch.object(sys, "argv", ["snapcraft", "remote-build", "--recover"]) + + cli.run() + + emitter.assert_progress("No build found", permanent=True) + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_confirm", "use_new_remote_build" +) +def test_recover_build(emitter, mocker, mock_remote_builder): + """Recover a build when `--recover` is provided.""" + mocker.patch.object(sys, "argv", ["snapcraft", "remote-build", "--recover"]) + mock_remote_builder.return_value.has_outstanding_build.return_value = True + + cli.run() + + assert mock_remote_builder.mock_calls[-3:] == [ + call().print_status(), + call().monitor_build(), + call().clean_build(), + ] + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_argv", "mock_confirm", "use_new_remote_build" +) +def test_recover_build_user_confirms(emitter, mocker, mock_remote_builder): + """Recover a build when a user confirms.""" + mock_remote_builder.return_value.has_outstanding_build.return_value = True + + cli.run() + + assert mock_remote_builder.mock_calls[-3:] == [ + call().print_status(), + call().monitor_build(), + call().clean_build(), + ] + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures("create_snapcraft_yaml", "mock_argv", "use_new_remote_build") +def test_recover_build_user_denies(emitter, mocker, mock_remote_builder): + """Clean and start a new build when a user denies to recover an existing build.""" + mocker.patch( + # confirm data upload, deny build recovery + "snapcraft.commands.remote.confirm_with_user", + side_effect=[True, False], + ) + mock_remote_builder.return_value.has_outstanding_build.return_value = True + + cli.run() + + assert mock_remote_builder.mock_calls[-3:] == [ + call().start_build(), + call().monitor_build(), + call().clean_build(), + ] + + +@pytest.mark.parametrize( + "create_snapcraft_yaml", CURRENT_BASES | LEGACY_BASES, indirect=True +) +@pytest.mark.usefixtures( + "create_snapcraft_yaml", "mock_argv", "mock_confirm", "use_new_remote_build" +) +def test_remote_build(emitter, mocker, mock_remote_builder): + """Clean and start a new build.""" + cli.run() + + assert mock_remote_builder.mock_calls[-3:] == [ + call().start_build(), + call().monitor_build(), + call().clean_build(), + ] diff --git a/tests/unit/elf/conftest.py b/tests/unit/elf/conftest.py index b0236d9ff6..aab4dd4a58 100644 --- a/tests/unit/elf/conftest.py +++ b/tests/unit/elf/conftest.py @@ -93,7 +93,9 @@ def fake_tools(new_dir, monkeypatch): monkeypatch.setenv("PATH", f"{bin_path!s}:{os.getenv('PATH')}") -def _fake_elffile_extract_attributes(self): # pylint: disable=too-many-statements +def _fake_elffile_extract_attributes( # noqa: PLR0915 + self, +): # pylint: disable=too-many-statements """Mock method definition for ElfFile._extract_attributes().""" name = self.path.name diff --git a/tests/unit/linters/test_library_linter.py b/tests/unit/linters/test_library_linter.py index 57fcca3fe9..683439e0bb 100644 --- a/tests/unit/linters/test_library_linter.py +++ b/tests/unit/linters/test_library_linter.py @@ -285,3 +285,93 @@ def test_is_library_path_directory(mocker): result = linter._is_library_path(path=Path("/test/dir")) assert not result + + +def test_ld_config_cache(fake_process): + """Check that the ldconfig cache is generated correctly.""" + fake_process.register_subprocess( + ["ldconfig", "-N", "-p"], + stdout=b"""\ + 1223 libs found in cache `/etc/ld.so.cache' + libcurl.so.4 (libc6,x86-64) => /lib/x86_64-linux-gnu/libcurl.so.4 + libcurl.so (libc6,x86-64) => /lib/x86_64-linux-gnu/libcurl.so + libcrypto.so.3 (libc6,x86-64) => /lib/x86_64-linux-gnu/libcrypto.so.3 + libcrypto.so (libc6,x86-64) => /lib/x86_64-linux-gnu/libcrypto.so + libcrypt.so.1 (libc6,x86-64) => /lib/x86_64-linux-gnu/libcrypt.so.1 + libcrypt.so (libc6,x86-64) => /lib/x86_64-linux-gnu/libcrypt.so + Cache generated by: ldconfig (Ubuntu GLIBC 2.38-1ubuntu6) stable release version 2.38 + """, + returncode=0, + ) + + linter = LibraryLinter(name="library", snap_metadata=Mock(), lint=None) + + linter._generate_ld_config_cache() + + assert linter._ld_config_cache == { + "libcurl.so.4": Path("/lib/x86_64-linux-gnu/libcurl.so.4"), + "libcurl.so": Path("/lib/x86_64-linux-gnu/libcurl.so"), + "libcrypto.so.3": Path("/lib/x86_64-linux-gnu/libcrypto.so.3"), + "libcrypto.so": Path("/lib/x86_64-linux-gnu/libcrypto.so"), + "libcrypt.so.1": Path("/lib/x86_64-linux-gnu/libcrypt.so.1"), + "libcrypt.so": Path("/lib/x86_64-linux-gnu/libcrypt.so"), + } + + +def test_find_deb_package(mocker, fake_process): + """Sarching a system package that includes a library file""" + mocker.patch( + "snapcraft.linters.library_linter.LibraryLinter._generate_ld_config_cache" + ) + + fake_process.register_subprocess( + ["dpkg", "-S", "/usr/lib/x86_64-linux-gnu/libcurl.so.4"], + stdout=b"libcurl4:amd64: /usr/lib/x86_64-linux-gnu/libcurl.so.4", + ) + + linter = LibraryLinter(name="library", snap_metadata=Mock(), lint=None) + linter._ld_config_cache = { + "libcurl.so.4": Path("/lib/x86_64-linux-gnu/libcurl.so.4"), + "libcurl.so": Path("/lib/x86_64-linux-gnu/libcurl.so"), + "libcrypto.so.3": Path("/lib/x86_64-linux-gnu/libcrypto.so.3"), + "libcrypto.so": Path("/lib/x86_64-linux-gnu/libcrypto.so"), + "libcrypt.so.1": Path("/lib/x86_64-linux-gnu/libcrypt.so.1"), + "libcrypt.so": Path("/lib/x86_64-linux-gnu/libcrypt.so"), + } + + mocker.patch("pathlib.Path.resolve").return_value = Path( + "/usr/lib/x86_64-linux-gnu/libcurl.so.4" + ) + result = linter._find_deb_package("libcurl.so.4") + assert result == "libcurl4" + + +def test_find_deb_package_no_available(mocker, fake_process): + """Sarching a system package that includes a library file but not found""" + mocker.patch( + "snapcraft.linters.library_linter.LibraryLinter._generate_ld_config_cache" + ) + + fake_process.register_subprocess( + ["dpkg", "-S", "/usr/lib/x86_64-linux-gnu/libcurl.so.4"], + stdout=b"dpkg-query: no path found matching pattern /usr/lib/x86_64-linux-gnu/libcurl.so.4", + returncode=1, + ) + + linter = LibraryLinter(name="library", snap_metadata=Mock(), lint=None) + linter._ld_config_cache = { + "libcurl.so.4": Path("/lib/x86_64-linux-gnu/libcurl.so.4"), + "libcurl.so": Path("/lib/x86_64-linux-gnu/libcurl.so"), + "libcrypto.so.3": Path("/lib/x86_64-linux-gnu/libcrypto.so.3"), + "libcrypto.so": Path("/lib/x86_64-linux-gnu/libcrypto.so"), + "libcrypt.so.1": Path("/lib/x86_64-linux-gnu/libcrypt.so.1"), + "libcrypt.so": Path("/lib/x86_64-linux-gnu/libcrypt.so"), + } + + mocker.patch("pathlib.Path.resolve").return_value = Path( + "/usr/lib/x86_64-linux-gnu/libcurl.so.4" + ) + + result = linter._find_deb_package("libcurl.so.4") + + assert not result diff --git a/tests/unit/parts/test_grammar.py b/tests/unit/parts/test_grammar.py index 856d2bfd20..6e90a48b01 100644 --- a/tests/unit/parts/test_grammar.py +++ b/tests/unit/parts/test_grammar.py @@ -26,7 +26,7 @@ _PROCESSOR = GrammarProcessor( arch="amd64", target_arch="amd64", - checker=lambda x: x == x, # pylint: disable=comparison-with-itself + checker=lambda x: x == x, # pylint: disable=comparison-with-itself # noqa PLR0124 ) GrammarEntry = namedtuple("GrammarEntry", ["value", "expected"]) diff --git a/tests/unit/parts/test_parts.py b/tests/unit/parts/test_parts.py index 1a2719cd95..d6000c1af1 100644 --- a/tests/unit/parts/test_parts.py +++ b/tests/unit/parts/test_parts.py @@ -75,7 +75,6 @@ def test_parts_lifecycle_run(mocker, parts_data, step_name, new_dir, emitter): project_base="core22", ) ] - emitter.assert_progress(f"Executing parts lifecycle: {step_name} p1") def test_parts_lifecycle_run_bad_step(parts_data, new_dir): diff --git a/tests/unit/remote/test_errors.py b/tests/unit/remote/test_errors.py index 1c44538fde..9420decaa9 100644 --- a/tests/unit/remote/test_errors.py +++ b/tests/unit/remote/test_errors.py @@ -28,3 +28,95 @@ def test_git_error(): ) assert error.brief == "Git operation failed." assert error.details == "Error details." + + +def test_remote_build_timeout_error(): + """Test RemoteBuildTimeoutError.""" + error = errors.RemoteBuildTimeoutError( + recovery_command="craftapp remote-build --recover --build-id test-id" + ) + + assert str(error) == ( + "Remote build command timed out.\nBuild may still be running on Launchpad and " + "can be recovered with 'craftapp remote-build --recover --build-id test-id'." + ) + assert repr(error) == ( + "RemoteBuildTimeoutError(brief='Remote build command timed out.', " + 'details="Build may still be running on Launchpad and can be recovered with ' + "'craftapp remote-build --recover --build-id test-id'.\")" + ) + assert error.brief == "Remote build command timed out." + assert error.details == ( + "Build may still be running on Launchpad and can be recovered with " + "'craftapp remote-build --recover --build-id test-id'." + ) + + +def test_launchpad_https_error(): + """Test LaunchpadHttpsError.""" + error = errors.LaunchpadHttpsError() + + assert str(error) == ( + "Failed to connect to Launchpad API service.\n" + "Verify connectivity to https://api.launchpad.net and retry build." + ) + assert repr(error) == ( + "LaunchpadHttpsError(brief='Failed to connect to Launchpad API service.', " + "details='Verify connectivity to https://api.launchpad.net and retry build.')" + ) + + assert error.brief == "Failed to connect to Launchpad API service." + assert error.details == ( + "Verify connectivity to https://api.launchpad.net and retry build." + ) + + +def test_unsupported_architecture_error(): + """Test UnsupportedArchitectureError.""" + error = errors.UnsupportedArchitectureError(architectures=["amd64", "arm64"]) + + assert str(error) == ( + "Architecture not supported by the remote builder.\nThe following " + "architectures are not supported by the remote builder: ['amd64', 'arm64'].\n" + "Please remove them from the architecture list and try again." + ) + assert repr(error) == ( + "UnsupportedArchitectureError(brief='Architecture not supported by the remote " + "builder.', details=\"The following architectures are not supported by the " + "remote builder: ['amd64', 'arm64'].\\nPlease remove them from the " + 'architecture list and try again.")' + ) + + assert error.brief == "Architecture not supported by the remote builder." + assert error.details == ( + "The following architectures are not supported by the remote builder: " + "['amd64', 'arm64'].\nPlease remove them from the architecture list and " + "try again." + ) + + +def test_accept_public_upload_error(): + """Test AcceptPublicUploadError.""" + error = errors.AcceptPublicUploadError() + + assert str(error) == ( + "Cannot upload data to build servers.\nRemote build needs explicit " + "acknowledgement that data sent to build servers is public.\n" + "In non-interactive runs, please use the option " + "`--launchpad-accept-public-upload`." + ) + assert repr(error) == ( + "AcceptPublicUploadError(brief='Cannot upload data to build servers.', " + "details='Remote build needs explicit acknowledgement that data sent to build " + "servers is public.\\n" + "In non-interactive runs, please use the option " + "`--launchpad-accept-public-upload`.')" + ) + + assert error.brief == "Cannot upload data to build servers." + assert error.details == ( + "Remote build needs explicit acknowledgement that data sent to build servers " + "is public.\n" + "In non-interactive runs, please use the option " + "`--launchpad-accept-public-upload`." + ) diff --git a/tests/unit/remote/test_git.py b/tests/unit/remote/test_git.py index b1f55bee5d..ebff27e1d6 100644 --- a/tests/unit/remote/test_git.py +++ b/tests/unit/remote/test_git.py @@ -19,6 +19,7 @@ import re from pathlib import Path +from unittest.mock import ANY import pygit2 import pytest @@ -358,6 +359,53 @@ def test_push_url_refspec_unknown_ref(new_dir): ) +@pytest.mark.parametrize( + ("url", "expected_url"), + [ + # no-op if token is not in url + ("fake-url", "fake-url"), + # hide single occurrence of the token + ("fake-url/test-token", "fake-url/"), + # hide multiple occurrences of the token + ("fake-url/test-token/test-token", "fake-url//"), + ], +) +def test_push_url_hide_token(url, expected_url, mocker, new_dir): + """Hide the token in the log and error output.""" + mock_logs = mocker.patch("logging.Logger.debug") + + repo = GitRepo(new_dir) + (repo.path / "test-file").touch() + repo.add_all() + repo.commit() + expected_error_details = ( + f"Could not push 'HEAD' to {expected_url!r} with refspec " + "'.*:refs/heads/test-branch' for the git repository " + f"in {str(new_dir)!r}." + ) + + with pytest.raises(GitError) as raised: + repo.push_url( + remote_url=url, + remote_branch="test-branch", + token="test-token", + ) + + # token should be hidden in the log output + mock_logs.assert_called_with( + # The last argument is the refspec `.*:refs/heads/test-branch`, which can only + # be asserted with regex. It is not relevant to this test, so `ANY` is used. + "Pushing %r to remote %r with refspec %r.", + "HEAD", + expected_url, + ANY, + ) + + # token should be hidden in the error message + assert raised.value.details is not None + assert re.match(expected_error_details, raised.value.details) + + def test_push_url_refspec_git_error(mocker, new_dir): """Raise an error if git fails when looking for a refspec.""" mocker.patch( diff --git a/tests/unit/remote/test_launchpad.py b/tests/unit/remote/test_launchpad.py new file mode 100644 index 0000000000..42a549124f --- /dev/null +++ b/tests/unit/remote/test_launchpad.py @@ -0,0 +1,543 @@ +# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- +# +# Copyright (C) 2019, 2023 Canonical Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import Optional +from unittest.mock import ANY, MagicMock, Mock, call, patch + +import pytest + +from snapcraft.remote import LaunchpadClient, errors + + +class FakeLaunchpadObject: + """Mimic behavior of many launchpad objects.""" + + def __init__(self): + pass + + def __setitem__(self, key, value): + self.__setattr__(key, value) + + def __getitem__(self, key): + return self.__getattribute__(key) + + +class BuildImpl(FakeLaunchpadObject): + """Fake build implementation.""" + + def __init__(self, fake_arch="i386"): + self._fake_arch = fake_arch + self.getFileUrls_mock = Mock( + return_value=[f"url_for/snap_file_{self._fake_arch}.snap"] + ) + + def getFileUrls(self, *args, **kw): + return self.getFileUrls_mock(*args, **kw) + + +class SnapBuildEntryImpl(FakeLaunchpadObject): + """Fake snap build entry.""" + + def __init__( + self, + arch_tag="", + buildstate="", + self_link="", + build_log_url: Optional[str] = "", + ): + self.arch_tag = arch_tag + self.buildstate = buildstate + self.self_link = self_link + self.build_log_url = build_log_url + + +class SnapBuildsImpl(FakeLaunchpadObject): + """Fake snap builds.""" + + def __init__(self): + self.entries = [ + SnapBuildEntryImpl( + arch_tag="i386", + buildstate="Successfully built", + self_link="http://build_self_link_1", + build_log_url="url_for/build_log_file_1", + ), + SnapBuildEntryImpl( + arch_tag="amd64", + buildstate="Failed to build", + self_link="http://build_self_link_2", + build_log_url="url_for/build_log_file_2", + ), + SnapBuildEntryImpl( + arch_tag="arm64", + buildstate="Failed to build", + self_link="http://build_self_link_2", + build_log_url=None, + ), + ] + + +class SnapBuildReqImpl(FakeLaunchpadObject): + """Fake snap build requests.""" + + def __init__( + self, + status="Completed", + error_message="", + self_link="http://request_self_link/1234", + builds_collection_link="http://builds_collection_link", + ): + self.status = status + self.error_message = error_message + self.self_link = self_link + self.builds_collection_link = builds_collection_link + self.builds = SnapBuildsImpl() + + def lp_refresh(self): + pass + + +class SnapImpl(FakeLaunchpadObject): + """Fake snap.""" + + def __init__(self, builds_collection_link="http://builds_collection_link"): + self._req = SnapBuildReqImpl() + self.lp_delete_mock = Mock() + self.requestBuilds_mock = Mock(return_value=self._req) + self.builds_collection_link = builds_collection_link + + def lp_delete(self, *args, **kw): + return self.lp_delete_mock(*args, **kw) + + def requestBuilds(self, *args, **kw): + return self.requestBuilds_mock(*args, **kw) + + +class SnapsImpl(FakeLaunchpadObject): + """Fake snaps.""" + + def __init__(self): + self._snap = SnapImpl() + self.getByName_mock = Mock(return_value=self._snap) + self.new_mock = Mock(return_value=self._snap) + + def getByName(self, *args, **kw): + return self.getByName_mock(*args, **kw) + + def new(self, *args, **kw): + return self.new_mock(*args, **kw) + + +class GitImpl(FakeLaunchpadObject): + """Fake git.""" + + def __init__(self): + self.issueAccessToken_mock = Mock(return_value="access-token") + self.lp_delete_mock = Mock() + + def issueAccessToken(self, *args, **kw): + return self.issueAccessToken_mock(*args, **kw) + + def lp_delete(self, *args, **kw): + return self.lp_delete_mock(*args, **kw) + + +class GitRepositoriesImpl(FakeLaunchpadObject): + """Fake git repositories.""" + + def __init__(self): + self._git = GitImpl() + self.new_mock = Mock(return_value=self._git) + self.getByPath_mock = Mock(return_value=self._git) + + def getByPath(self, *args, **kw): + return self.getByPath_mock(*args, **kw) + + def new(self, *args, **kw): + return self.new_mock(*args, **kw) + + +class DistImpl(FakeLaunchpadObject): + """Fake distributions.""" + + def __init__(self): + self.main_archive = "main_archive" + + +class MeImpl(FakeLaunchpadObject): + """Fake 'me' object.""" + + def __init__(self): + self.name = "user" + + +class LaunchpadImpl(FakeLaunchpadObject): + """Fake implementation of the Launchpad object.""" + + def __init__(self): + self._login_mock = Mock() + self._load_mock = Mock() + self._rbi = SnapBuildReqImpl() + + self.git_repositories = GitRepositoriesImpl() + self.snaps = SnapsImpl() + self.people = {"user": "/~user"} + self.distributions = {"ubuntu": DistImpl()} + self.rbi = self._rbi + self.me = MeImpl() + + def load(self, url: str, *args, **kw): + self._load_mock(url, *args, **kw) + if "/+build-request/" in url: + return self._rbi + if "http://build_self_link_1" in url: + return BuildImpl(fake_arch="i386") + if "http://build_self_link_2" in url: + return BuildImpl(fake_arch="amd64") + return self._rbi.builds + + +@pytest.fixture() +def mock_git_repo(mocker): + """Returns a mocked GitRepo.""" + return mocker.patch("snapcraft.remote.launchpad.GitRepo") + + +@pytest.fixture() +def mock_login_with(mocker): + """Mock for launchpadlib's `login_with()`.""" + lp = LaunchpadImpl() + return mocker.patch("launchpadlib.launchpad.Launchpad.login_with", return_value=lp) + + +@pytest.fixture() +def launchpad_client(mock_login_with): + """Returns a LaunchpadClient object.""" + return LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + ) + + +def test_login(mock_login_with): + lpc = LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + ) + + assert lpc.user == "user" + + assert mock_login_with.called_with( + "test-app remote-build", + "production", + ANY, + credentials_file=ANY, + version="devel", + ) + + +@pytest.mark.parametrize("error", [ConnectionRefusedError, TimeoutError]) +def test_login_connection_issues(error, mock_login_with): + mock_login_with.side_effect = error + + with pytest.raises(errors.LaunchpadHttpsError): + LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + ) + + mock_login_with.assert_called() + + +def test_load_connection_refused(launchpad_client, mock_login_with): + """ConnectionRefusedError should surface.""" + launchpad_client._lp._load_mock.side_effect = ConnectionRefusedError + + with pytest.raises(ConnectionRefusedError): + launchpad_client._lp_load_url("foo") + + mock_login_with.assert_called() + + +def test_load_connection_reset_once(launchpad_client, mock_login_with): + """Load URL should work OK after single connection reset.""" + launchpad_client._lp._load_mock.side_effect = [ConnectionResetError, None] + launchpad_client._lp_load_url(url="foo") + + mock_login_with.assert_called() + + +def test_load_connection_reset_twice(launchpad_client, mock_login_with): + """Load URL should fail with two connection resets.""" + launchpad_client._lp._load_mock.side_effect = [ + ConnectionResetError, + ConnectionResetError, + ] + + with pytest.raises(ConnectionResetError): + launchpad_client._lp_load_url("foo") + + mock_login_with.assert_called() + + +def test_create_snap(launchpad_client): + launchpad_client._create_snap() + launchpad_client._lp.snaps.new_mock.assert_called_with( + auto_build=False, + auto_build_archive="/ubuntu/+archive/primary", + auto_build_pocket="Updates", + git_path="main", + git_repository_url="https://user@git.launchpad.net/~user/+git/id/", + name="id", + owner="/~user", + ) + + +def test_create_snap_with_archs(launchpad_client): + launchpad_client.architectures = ["arch1", "arch2"] + launchpad_client._create_snap() + launchpad_client._lp.snaps.new_mock.assert_called_with( + auto_build=False, + auto_build_archive="/ubuntu/+archive/primary", + auto_build_pocket="Updates", + git_path="main", + git_repository_url="https://user@git.launchpad.net/~user/+git/id/", + name="id", + owner="/~user", + processors=["/+processors/arch1", "/+processors/arch2"], + ) + + +def test_delete_snap(launchpad_client): + launchpad_client._delete_snap() + launchpad_client._lp.snaps.getByName_mock.assert_called_with( + name="id", owner="/~user" + ) + + +def test_start_build(launchpad_client): + launchpad_client.start_build() + + +def test_start_build_error(mocker, launchpad_client): + mocker.patch( + "tests.unit.remote.test_launchpad.SnapImpl.requestBuilds", + return_value=SnapBuildReqImpl( + status="Failed", error_message="snapcraft.yaml not found..." + ), + ) + with pytest.raises(errors.RemoteBuildError) as raised: + launchpad_client.start_build() + + assert str(raised.value) == "snapcraft.yaml not found..." + + +def test_start_build_deadline_not_reached(mock_login_with, mocker): + """Do not raise an error if the deadline has not been reached.""" + + def lp_refresh(self): + """Update the status from Pending to Completed when refreshed.""" + self.status = "Completed" + + mocker.patch( + "tests.unit.remote.test_launchpad.SnapImpl.requestBuilds", + return_value=SnapBuildReqImpl(status="Pending", error_message=""), + ) + mocker.patch.object(SnapBuildReqImpl, "lp_refresh", lp_refresh) + mocker.patch("time.time", return_value=500) + + lpc = LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + timeout=100, + ) + + lpc.start_build() + + +def test_start_build_timeout_error(mock_login_with, mocker): + """Raise an error if the build times out.""" + mocker.patch( + "tests.unit.remote.test_launchpad.SnapImpl.requestBuilds", + return_value=SnapBuildReqImpl(status="Pending", error_message=""), + ) + mocker.patch("time.time", return_value=500) + lpc = LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + timeout=100, + ) + # advance 1 second past deadline + mocker.patch("time.time", return_value=601) + + with pytest.raises(errors.RemoteBuildTimeoutError) as raised: + lpc.start_build() + + assert str(raised.value) == ( + "Remote build command timed out.\nBuild may still be running on Launchpad and " + "can be recovered with 'test-app remote-build --recover --build-id id'." + ) + + +def test_issue_build_request_defaults(launchpad_client): + fake_snap = MagicMock() + + launchpad_client._issue_build_request(fake_snap) + + assert fake_snap.mock_calls == [ + call.requestBuilds( + archive="main_archive", + pocket="Updates", + ) + ] + + +@patch("snapcraft.remote.LaunchpadClient._download_file") +def test_monitor_build(mock_download_file, new_dir, launchpad_client): + Path("test-project_i386.txt", encoding="utf-8").touch() + Path("test-project_i386.1.txt", encoding="utf-8").touch() + + launchpad_client.start_build() + launchpad_client.monitor_build(interval=0) + + assert mock_download_file.mock_calls == [ + call(url="url_for/snap_file_i386.snap", dst="snap_file_i386.snap"), + call( + url="url_for/build_log_file_1", dst="test-project_i386.2.txt", gunzip=True + ), + call(url="url_for/snap_file_amd64.snap", dst="snap_file_amd64.snap"), + call(url="url_for/build_log_file_2", dst="test-project_amd64.txt", gunzip=True), + call(url="url_for/snap_file_amd64.snap", dst="snap_file_amd64.snap"), + ] + + +@patch("snapcraft.remote.LaunchpadClient._download_file") +@patch("logging.Logger.error") +def test_monitor_build_error(mock_log, mock_download_file, mocker, launchpad_client): + mocker.patch( + "tests.unit.remote.test_launchpad.BuildImpl.getFileUrls", return_value=[] + ) + launchpad_client.start_build() + launchpad_client.monitor_build(interval=0) + + assert mock_download_file.mock_calls == [ + call(url="url_for/build_log_file_1", dst="test-project_i386.txt", gunzip=True), + call(url="url_for/build_log_file_2", dst="test-project_amd64.txt", gunzip=True), + ] + + assert mock_log.mock_calls == [ + call("Snap file not available for arch %r.", "i386"), + call("Snap file not available for arch %r.", "amd64"), + call("Build failed for arch %r.", "amd64"), + call("Snap file not available for arch %r.", "arm64"), + call("Build failed for arch %r.", "arm64"), + ] + + +def test_monitor_build_deadline_not_reached(mock_login_with, mocker): + """Do not raise an error if the deadline has not been reached.""" + mocker.patch("snapcraft.remote.LaunchpadClient._download_file") + lpc = LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + timeout=100, + ) + + lpc.start_build() + lpc.monitor_build(interval=0) + + +def test_monitor_build_timeout_error(mock_login_with, mocker): + """Raise an error if the build times out.""" + mocker.patch("snapcraft.remote.LaunchpadClient._download_file") + mocker.patch("time.time", return_value=500) + lpc = LaunchpadClient( + app_name="test-app", + build_id="id", + project_name="test-project", + architectures=[], + timeout=100, + ) + # advance 1 second past deadline + mocker.patch("time.time", return_value=601) + + lpc.start_build() + + with pytest.raises(errors.RemoteBuildTimeoutError) as raised: + lpc.monitor_build(interval=0) + + assert str(raised.value) == ( + "Remote build command timed out.\nBuild may still be running on Launchpad and " + "can be recovered with 'test-app remote-build --recover --build-id id'." + ) + + +def test_get_build_status(launchpad_client): + launchpad_client.start_build() + build_status = launchpad_client.get_build_status() + + assert build_status == { + "amd64": "Failed to build", + "arm64": "Failed to build", + "i386": "Successfully built", + } + + +def test_push_source_tree(new_dir, mock_git_repo, launchpad_client): + now = datetime.now(timezone.utc) + + with patch("snapcraft.remote.launchpad.datetime") as mock_datetime: + mock_datetime.now = lambda tz: now + launchpad_client.push_source_tree(Path()) + + launchpad_client._lp.git_repositories._git.issueAccessToken_mock.assert_called_once_with( + description="test-app remote-build for id", + scopes=["repository:push"], + date_expires=(now + timedelta(minutes=1)).isoformat(), + ) + + mock_git_repo.assert_has_calls( + [ + call(Path()), + call().push_url( + "https://user:access-token@git.launchpad.net/~user/+git/id/", + "main", + "HEAD", + "access-token", + ), + ] + ) + + +def test_push_source_tree_error(new_dir, mock_git_repo, launchpad_client): + mock_git_repo.return_value.push_url.side_effect = errors.GitError("test error") + + with pytest.raises(errors.GitError): + launchpad_client.push_source_tree(Path()) diff --git a/tests/unit/remote/test_remote_builder.py b/tests/unit/remote/test_remote_builder.py new file mode 100644 index 0000000000..a78451174f --- /dev/null +++ b/tests/unit/remote/test_remote_builder.py @@ -0,0 +1,192 @@ +# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- +# +# Copyright 2023 Canonical Ltd. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Remote builder tests.""" + +import re +from pathlib import Path +from unittest.mock import call, patch + +import pytest + +from snapcraft.remote import RemoteBuilder, UnsupportedArchitectureError +from snapcraft.remote.utils import _SUPPORTED_ARCHS + + +@pytest.fixture(autouse=True) +def mock_launchpad_client(mocker): + """Returns a mocked LaunchpadClient.""" + _mock_launchpad_client = mocker.patch( + "snapcraft.remote.remote_builder.LaunchpadClient" + ) + _mock_launchpad_client.return_value.has_outstanding_build.return_value = False + _mock_launchpad_client.return_value.architectures = ["amd64"] + return _mock_launchpad_client + + +@pytest.fixture(autouse=True) +def mock_worktree(mocker): + """Returns a mocked WorkTree.""" + return mocker.patch("snapcraft.remote.remote_builder.WorkTree") + + +@pytest.fixture() +def fake_remote_builder(new_dir, mock_launchpad_client, mock_worktree): + """Returns a fake RemoteBuilder.""" + return RemoteBuilder( + app_name="test-app", + build_id="test-build-id", + project_name="test-project", + architectures=["amd64"], + project_dir=Path(), + timeout=0, + ) + + +def test_remote_builder_init(mock_launchpad_client, mock_worktree): + """Verify remote builder is properly initialized.""" + RemoteBuilder( + app_name="test-app", + build_id="test-build-id", + project_name="test-project", + architectures=["amd64"], + project_dir=Path(), + timeout=10, + ) + + assert mock_launchpad_client.mock_calls == [ + call( + app_name="test-app", + build_id="test-build-id", + project_name="test-project", + architectures=["amd64"], + timeout=10, + ) + ] + assert mock_worktree.mock_calls == [ + call(app_name="test-app", build_id="test-build-id", project_dir=Path()) + ] + + +@pytest.mark.usefixtures("new_dir") +def test_build_id_computed(): + """Compute a build id if it is not provided.""" + remote_builder = RemoteBuilder( + app_name="test-app", + build_id=None, + project_name="test-project", + architectures=["amd64"], + project_dir=Path(), + timeout=0, + ) + + assert re.match("test-app-test-project-[0-9a-f]{32}", remote_builder.build_id) + + +@pytest.mark.parametrize("archs", (["amd64"], _SUPPORTED_ARCHS)) +def test_validate_architectures_supported(archs): + """Supported architectures should not raise an error.""" + RemoteBuilder( + app_name="test-app", + build_id="test-build-id", + project_name="test-project", + architectures=archs, + project_dir=Path(), + timeout=0, + ) + + +@pytest.mark.parametrize( + "archs", + [ + # unsupported + ["bad"], + # supported and unsupported + ["amd64", "bad"], + # multiple supported and unsupported + ["bad", "amd64", "bad2", "arm64"], + ], +) +def test_validate_architectures_unsupported(archs): + """Raise an error for unsupported architectures.""" + with pytest.raises(UnsupportedArchitectureError): + RemoteBuilder( + app_name="test-app", + build_id="test-build-id", + project_name="test-project", + architectures=archs, + project_dir=Path(), + timeout=0, + ) + + +@patch("logging.Logger.info") +def test_print_status_builds_found( + mock_log, mock_launchpad_client, fake_remote_builder +): + """Print the status of a remote build.""" + mock_launchpad_client.return_value.has_outstanding_build.return_value = True + mock_launchpad_client.return_value.get_build_status.return_value = { + "amd64": "Needs building", + "arm64": "Currently building", + } + + fake_remote_builder.print_status() + + assert mock_log.mock_calls == [ + call("Build status for arch %s: %s", "amd64", "Needs building"), + call("Build status for arch %s: %s", "arm64", "Currently building"), + ] + + +@patch("logging.Logger.info") +def test_print_status_no_build_found(mock_log, fake_remote_builder): + """Print the status of a remote build.""" + fake_remote_builder.print_status() + + assert mock_log.mock_calls == [call("No build found.")] + + +@pytest.mark.parametrize("has_builds", (True, False)) +def test_has_outstanding_build(has_builds, fake_remote_builder, mock_launchpad_client): + """Check for outstanding builds.""" + mock_launchpad_client.return_value.has_outstanding_build.return_value = has_builds + + assert fake_remote_builder.has_outstanding_build() == has_builds + + +def test_monitor_build(fake_remote_builder, mock_launchpad_client): + """Monitor a build.""" + fake_remote_builder.monitor_build() + + mock_launchpad_client.return_value.monitor_build.assert_called_once() + + +def test_clean_build(fake_remote_builder, mock_launchpad_client, mock_worktree): + """Clean a build.""" + fake_remote_builder.clean_build() + + mock_launchpad_client.return_value.cleanup.assert_called_once() + mock_worktree.return_value.clean_cache.assert_called_once() + + +def test_start_build(fake_remote_builder, mock_launchpad_client, mock_worktree): + """Start a build.""" + fake_remote_builder.start_build() + + mock_worktree.return_value.init_repo.assert_called_once() + mock_launchpad_client.return_value.push_source_tree.assert_called_once() + mock_launchpad_client.return_value.start_build.assert_called_once() diff --git a/tests/unit/remote/test_utils.py b/tests/unit/remote/test_utils.py index 83b47665c2..18068267e8 100644 --- a/tests/unit/remote/test_utils.py +++ b/tests/unit/remote/test_utils.py @@ -21,7 +21,93 @@ import pytest -from snapcraft.remote import get_build_id +from snapcraft.remote import ( + UnsupportedArchitectureError, + get_build_id, + humanize_list, + rmtree, + validate_architectures, +) +from snapcraft.remote.utils import _SUPPORTED_ARCHS + +############################### +# validate architecture tests # +############################### + + +@pytest.mark.parametrize(("archs"), [["amd64"], _SUPPORTED_ARCHS]) +def test_validate_architectures(archs): + """Validate architectures.""" + assert validate_architectures(archs) is None + + +@pytest.mark.parametrize( + ("archs", "expected_archs"), + [ + # invalid arch + (["unknown"], ["unknown"]), + # valid and invalid archs + (["amd64", "unknown"], ["unknown"]), + # multiple invalid archs + (["unknown1", "unknown2"], ["unknown1", "unknown2"]), + # multiple valid and invalid archs + (["unknown1", "unknown2"], ["unknown1", "unknown2"]), + ], +) +def test_validate_architectures_error(archs, expected_archs): + """Raise an error if an unsupported architecture is passed.""" + with pytest.raises(UnsupportedArchitectureError) as raised: + validate_architectures(archs) + + assert ( + "The following architectures are not supported by the remote builder: " + f"{expected_archs}" + ) in str(raised.value) + + +################# +# Humanize List # +################# + + +@pytest.mark.parametrize( + "items,conjunction,expected", + ( + ([], "and", ""), + (["foo"], "and", "'foo'"), + (["foo", "bar"], "and", "'bar' and 'foo'"), + (["foo", "bar", "baz"], "and", "'bar', 'baz', and 'foo'"), + (["foo", "bar", "baz", "qux"], "and", "'bar', 'baz', 'foo', and 'qux'"), + ([], "or", ""), + (["foo"], "or", "'foo'"), + (["foo", "bar"], "or", "'bar' or 'foo'"), + (["foo", "bar", "baz"], "or", "'bar', 'baz', or 'foo'"), + (["foo", "bar", "baz", "qux"], "or", "'bar', 'baz', 'foo', or 'qux'"), + ), +) +def test_humanize_list(items, conjunction, expected): + """Test humanize_list.""" + assert humanize_list(items, conjunction) == expected + + +def test_humanize_list_sorted(): + """Verify `sort` parameter.""" + input_list = ["z", "a", "m test", "1"] + + # unsorted list is in the same order as the original list + expected_list_unsorted = "'z', 'a', 'm test', and '1'" + + # sorted list is sorted alphanumerically + expected_list_sorted = "'1', 'a', 'm test', and 'z'" + + assert humanize_list(input_list, "and") == expected_list_sorted + assert humanize_list(input_list, "and", sort=True) == expected_list_sorted + assert humanize_list(input_list, "and", sort=False) == expected_list_unsorted + + +################## +# build id tests # +################## @pytest.mark.usefixtures("new_dir") @@ -103,3 +189,36 @@ def test_get_build_id_directory_is_not_a_directory_error(): f"Could not compute hash because {str(Path('regular-file').absolute())} " "is not a directory." ) + + +################ +# rmtree tests # +################ + + +@pytest.fixture() +def stub_directory_tree(new_dir): + """Creates a tree of directories and files.""" + root_dir = Path("root-dir") + (root_dir / "dir1/dir2").mkdir(parents=True, exist_ok=True) + (root_dir / "dir3").mkdir(parents=True, exist_ok=True) + (root_dir / "file1").touch() + (root_dir / "dir1/file2").touch() + (root_dir / "dir1/dir2/file3").touch() + return root_dir + + +def test_rmtree(stub_directory_tree): + """Remove a directory tree.""" + rmtree(stub_directory_tree) + + assert not Path(stub_directory_tree).exists() + + +def test_rmtree_readonly(stub_directory_tree): + """Remove a directory tree that contains a read-only file.""" + (stub_directory_tree / "read-only-file").touch(mode=0o444) + + rmtree(stub_directory_tree) + + assert not Path(stub_directory_tree).exists() diff --git a/tests/unit/remote/test_worktree.py b/tests/unit/remote/test_worktree.py new file mode 100644 index 0000000000..ebf0068d8c --- /dev/null +++ b/tests/unit/remote/test_worktree.py @@ -0,0 +1,91 @@ +# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- +# +# Copyright (C) 2023 Canonical Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Unit tests for the worktree module.""" + +from pathlib import Path +from unittest.mock import call + +import pytest + +from snapcraft.remote import WorkTree + + +@pytest.fixture(autouse=True) +def mock_git_repo(mocker): + """Returns a mocked GitRepo.""" + return mocker.patch("snapcraft.remote.worktree.GitRepo") + + +@pytest.fixture(autouse=True) +def mock_base_directory(mocker, new_dir): + """Returns a mocked `xdg.BaseDirectory`.""" + _mock_base_directory = mocker.patch("snapcraft.remote.worktree.BaseDirectory") + _mock_base_directory.save_cache_path.return_value = new_dir + return _mock_base_directory + + +@pytest.fixture(autouse=True) +def mock_copytree(mocker): + """Returns a mocked `shutil.copytree()`.""" + return mocker.patch("snapcraft.remote.worktree.copytree") + + +def test_worktree_init_clean( + mock_base_directory, mock_copytree, mock_git_repo, new_dir +): + """Test initialization of a WorkTree with a clean git repository.""" + mock_git_repo.return_value.is_clean.return_value = True + + worktree = WorkTree(app_name="test-app", build_id="test-id", project_dir=Path()) + worktree.init_repo() + + assert isinstance(worktree, WorkTree) + mock_base_directory.save_cache_path.assert_called_with( + "test-app", "remote-build", "test-id" + ) + assert mock_git_repo.mock_calls == [ + call(Path().resolve() / "repo"), + call().is_clean(), + ] + + +def test_worktree_init_dirty( + mock_base_directory, mock_copytree, mock_git_repo, new_dir +): + """Test initialization of a WorkTree with a clean git repository.""" + mock_git_repo.return_value.is_clean.return_value = False + + worktree = WorkTree(app_name="test-app", build_id="test-id", project_dir=Path()) + worktree.init_repo() + + assert isinstance(worktree, WorkTree) + mock_base_directory.save_cache_path.assert_called_with( + "test-app", "remote-build", "test-id" + ) + assert mock_git_repo.mock_calls == [ + call(Path().resolve() / "repo"), + call().is_clean(), + call().add_all(), + call().commit(), + ] + + +def test_worktree_repo_dir(new_dir): + """Verify the `repo_dir` property.""" + worktree = WorkTree(app_name="test-app", build_id="test-id", project_dir=Path()) + + assert worktree.repo_dir == Path().resolve() / "repo" diff --git a/tests/unit/test_pack.py b/tests/unit/test_pack.py index 8a729b1426..38920a5d2a 100644 --- a/tests/unit/test_pack.py +++ b/tests/unit/test_pack.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import subprocess from unittest.mock import call import pytest @@ -283,11 +282,22 @@ def test_pack_snap_use_output_name_over_name_version_arch(mocker, new_dir): ] -def test_pack_snap_error(mocker, new_dir): - mocker.patch("subprocess.run", side_effect=subprocess.CalledProcessError(42, "cmd")) +def test_pack_snap_error(mocker, new_dir, fake_process): + fake_process.register_subprocess( + ["snap", "pack", "--check-skeleton", str(new_dir)], + stdout=b"xxxx", + stderr=b'error: cannot validate snap "pack-error": ' + b'invalid definition of application "pack-error": ' + b"app description field 'command' contains illegal " + b"\"pack-error foo=bar\" (legal: '^[A-Za-z0-9/. _#:$-]*$')", + returncode=1, + ) with pytest.raises(errors.SnapcraftError) as raised: pack.pack_snap(new_dir, output=str(new_dir)) assert str(raised.value) == ( - "Cannot pack snap file: Command 'cmd' returned non-zero exit status 42." + """Cannot pack snap: error: cannot validate snap "pack-error": """ + """invalid definition of application "pack-error": """ + """app description field 'command' contains illegal "pack-error foo=bar" """ + """(legal: '^[A-Za-z0-9/. _#:$-]*$')""" ) diff --git a/tests/unit/test_projects.py b/tests/unit/test_projects.py index 627400836f..5a2a9b5758 100644 --- a/tests/unit/test_projects.py +++ b/tests/unit/test_projects.py @@ -361,7 +361,7 @@ def test_project_grade_assignment(self, grade, project_yaml_data): else: error = ".*unexpected value; permitted: 'stable', 'devel'" with pytest.raises(pydantic.ValidationError, match=error): - project.grade = grade + project.grade = grade # type: ignore def test_project_summary_valid(self, project_yaml_data): summary = "x" * 78 diff --git a/tools/brew_install_from_source.py b/tools/brew_install_from_source.py index 22ca41d371..95feb6ab4f 100755 --- a/tools/brew_install_from_source.py +++ b/tools/brew_install_from_source.py @@ -51,7 +51,7 @@ def download_snapcraft_source(dest_dir): os.environ.get("TRAVIS_PULL_REQUEST_BRANCH") or "master", ) print("Downloading branch source from {}".format(branch_source)) - urllib.request.urlretrieve(branch_source, dest_file) + urllib.request.urlretrieve(branch_source, dest_file) # noqa S310 return dest_file @@ -68,7 +68,7 @@ def download_brew_formula(destination_path): "https://raw.githubusercontent.com/Homebrew/homebrew-core/master/" "Formula/snapcraft.rb" ) - urllib.request.urlretrieve(brew_formula_url, destination_path) + urllib.request.urlretrieve(brew_formula_url, destination_path) # noqa S310 def patch_brew_formula_source( diff --git a/tools/environment-setup.sh b/tools/environment-setup.sh index 24abffb7a0..dfe52a4477 100755 --- a/tools/environment-setup.sh +++ b/tools/environment-setup.sh @@ -10,7 +10,7 @@ fi # Create the container. if ! lxc info snapcraft-dev >/dev/null 2>&1; then - lxc init ubuntu:20.04 snapcraft-dev + lxc init ubuntu:22.04 snapcraft-dev fi if ! lxc config get snapcraft-dev raw.idmap | grep -q "both $UID 1000"; then lxc config set snapcraft-dev raw.idmap "both $UID 1000" diff --git a/tools/version.py b/tools/version.py index b8630b5f4e..672d508a92 100755 --- a/tools/version.py +++ b/tools/version.py @@ -34,8 +34,10 @@ def determine_version(): subprocess.run( ["git", "describe", "--always", "--long"], stdout=subprocess.PIPE, + check=False, + text=True, ) - .stdout.decode() + .stdout .strip() ) diff --git a/tox.ini b/tox.ini index efecf47a67..e0a4d9bf89 100644 --- a/tox.ini +++ b/tox.ini @@ -15,8 +15,8 @@ [tox] env_list = # Environments to run when called with no parameters. lint-{black,ruff,isort,mypy,pylint,pyright,shellcheck,codespell,yaml} - test-py38 - test-legacy-py38 + test-py310 + test-legacy-py310 minversion = 4.5 # Tox will use these requirements to bootstrap a venv if necessary. # tox-igore-env-name-mismatch allows us to have one virtualenv for all linting. @@ -27,6 +27,8 @@ minversion = 4.5 requires = # renovate: datasource=pypi tox-ignore-env-name-mismatch==0.2.0.post2 + # renovate: datasource=pypi + tox-gh==1.3.1 # Allow tox to access the user's $TMPDIR environment variable if set. # This workaround is required to avoid circular dependencies for TMPDIR, # since tox will otherwise attempt to use the environment's TMPDIR variable. @@ -48,18 +50,18 @@ package = wheel allowlist_externals = mkdir commands_pre = mkdir -p results -[testenv:test-{py38,py39,py310,py311,py312}] # Configuration for all tests using pytest +[testenv:test-{py39,py310,py311,py312}] # Configuration for all tests using pytest base = testenv, test description = Run unit tests with pytest labels = - py38, py310, py311: tests, unit-tests + py310, py311: tests, unit-tests commands = pytest {tty:--color=yes} --cov=snapcraft --cov-report=xml:results/coverage-{env_name}.xml --junit-xml=results/test-results-{env_name}.xml {posargs:tests/unit} -[testenv:test-legacy-{py38,py39,py310,py311,py312}] +[testenv:test-legacy-{py39,py310,py311,py312}] base = testenv, test description = Run legacy tests with pytest labels = - py38, py310, py311: tests, integration-tests + py310, py311: tests, integration-tests commands = pytest {tty:--color=yes} --cov=snapcraft_legacy --cov-report=xml:results/coverage-{env_name}.xml --junit-xml=results/test-results-{env_name}.xml {posargs:tests/legacy} [testenv:test-noreq] @@ -143,9 +145,8 @@ base = testenv, lint labels = format commands = black: black {tty:--color} {posargs} . - ruff: - ruff --fix --respect-gitignore setup.py snapcraft tests tools - ruff --fix --config snapcraft_legacy/ruff.toml snapcraft_legacy tests/legacy + ruff: ruff --fix --respect-gitignore setup.py snapcraft tests tools + ruff: ruff --fix --config snapcraft_legacy/ruff.toml snapcraft_legacy tests/legacy codespell: codespell --toml {tox_root}/pyproject.toml --write-changes {posargs} [docs] # Sphinx documentation configuration