diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000000000..dc2137f024b9fc --- /dev/null +++ b/.dockerignore @@ -0,0 +1,3 @@ +* +!install_build_dependencies.sh +!scripts/install_dependencies/install_openvino_dependencies.sh diff --git a/.github/actions/handle_docker/action.yml b/.github/actions/handle_docker/action.yml new file mode 100644 index 00000000000000..a0aa99c1442d5b --- /dev/null +++ b/.github/actions/handle_docker/action.yml @@ -0,0 +1,72 @@ +name: 'Handle Docker images' +description: 'Builds, tags and pushes a given Docker image when needed' +inputs: + images: + description: 'Image names (registry name + namespace + base name)' + required: true + registry: + description: 'Docker registry' + required: true + dockerfiles_root_dir: + description: 'Path to dockerfiles root dir relative to repository root' + required: true + push: + description: 'Push built images to registry' + required: false + default: 'true' + changed_components: + description: 'Components changed by a pull request' + required: true + +outputs: + images: + description: "Images to use in workflow" + value: ${{ steps.handle_images.outputs.images }} + +runs: + using: 'composite' + steps: + - name: Checkout head + uses: actions/checkout@v4 + + - name: Checkout base + uses: actions/checkout@v4 + with: + ref: ${{ github.base_ref || github.event.merge_group.base_ref }} + sparse-checkout: ${{ inputs.dockerfiles_root_dir }}/docker_tag + path: base + + - name: Install Python dependencies + uses: py-actions/py-dependency-install@v4 + with: + path: "${{ github.action_path }}/requirements.txt" + update-setuptools: "false" + update-wheel: "false" + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + + - name: Handle docker images + id: handle_images + shell: bash + run: | + images=$(echo "${{ inputs.images }}" | tr '\n' ',' | sed 's/,*$//') + pr="${{ github.event.pull_request.number }}" + + python3 .github/actions/handle_docker/get_images_to_build.py \ + -d "${{ inputs.dockerfiles_root_dir }}" \ + -r "${{ inputs.registry }}" \ + --images "$images" \ + --head_tag_file "${{ inputs.dockerfiles_root_dir }}/docker_tag" \ + --base_tag_file "base/${{ inputs.dockerfiles_root_dir }}/docker_tag" \ + --docker_env_changed "${{ fromJSON(inputs.changed_components).docker_env }}" \ + --dockerfiles_changed "${{ fromJSON(inputs.changed_components).dockerfiles }}" \ + --docker_builder "${{ steps.buildx.outputs.name}}" \ + --repo "${{ github.repository }}" \ + --ref_name "${{ github.ref_name }}" \ + $([[ -n $pr ]] && echo "--pr $pr" || echo '-s ${{ github.sha }}') \ + $([[ -n "${{ inputs.push }}" ]] && echo "--push" || echo '') + env: + GITHUB_TOKEN: ${{ github.token }} + diff --git a/.github/actions/handle_docker/get_images_to_build.py b/.github/actions/handle_docker/get_images_to_build.py new file mode 100644 index 00000000000000..33fcfa1d839c9d --- /dev/null +++ b/.github/actions/handle_docker/get_images_to_build.py @@ -0,0 +1,126 @@ +import argparse +import json +import re +import sys + +from distutils.util import strtobool +from helpers import * +from images_api import * + + +def parse_args(): + parser = argparse.ArgumentParser(description='Returns list of Docker images to build for a given workflow') + parser.add_argument('-i', '--images', required=True, help='Comma-separated docker images') + parser.add_argument('-d', '--dockerfiles_root', required=True, help='Path to dockerfiles') + parser.add_argument('-r', '--registry', required=True, help='Docker registry name') + parser.add_argument('-s', '--commit', required=False, help='Commit SHA. If not set, --pr is used') + parser.add_argument('-b', '--docker_builder', required=False, help='Docker buildx builder name') + parser.add_argument('--pr', type=int, required=False, help='PR number, if event is pull_request') + parser.add_argument('--head_tag_file', default='.github/dockerfiles/docker_tag', help='Head docker tag file path') + parser.add_argument('--base_tag_file', default=None, required=False, help='Base docker tag file path') + parser.add_argument('--ref_name', required=False, default='', help='GitHub ref name') + parser.add_argument('--repo', default='openvinotoolkit/openvino', help='GitHub repository') + parser.add_argument('--docker_env_changed', type=lambda x: bool(strtobool(x)), default=True, + help='Whether PR changes docker env') + parser.add_argument('--dockerfiles_changed', type=lambda x: bool(strtobool(x)), default=True, + help='Whether PR changes dockerfiles') + parser.add_argument('--action_path', default='.github/actions/handle_docker', help='Path to this GitHub action') + parser.add_argument('--push', action='store_true', required=False, help='Whether to push images to registry') + parser.add_argument('--dry_run', action='store_true', required=False, help='Dry run') + args = parser.parse_args() + return args + + +def main(): + init_logger() + logger = logging.getLogger(__name__) + args = parse_args() + for arg, value in sorted(vars(args).items()): + logger.info(f"Argument {arg}: {value}") + + head_tag = Path(args.head_tag_file).read_text().strip() + + base_tag_exists = args.base_tag_file and Path(args.base_tag_file).exists() + base_tag = Path(args.base_tag_file).read_text().strip() if base_tag_exists else None + + all_dockerfiles = Path(args.dockerfiles_root).rglob('**/*/Dockerfile') + + images = ImagesHandler(args.dry_run) + for image in all_dockerfiles: + images.add_from_dockerfile(image, args.dockerfiles_root, args.registry, head_tag, base_tag) + + requested_images = set(args.images.split(',')) + skip_workflow = False + missing_only = False + + merge_queue_target_branch = next(iter(re.findall(f'^gh-readonly-queue/(.*)/', args.ref_name)), None) + + if args.pr: + environment_affected = args.docker_env_changed or args.dockerfiles_changed + if environment_affected: + expected_tag = f'pr-{args.pr}' + + if head_tag != expected_tag: + logger.error(f"Please update docker tag in {args.head_tag_file} to {expected_tag}") + sys.exit(1) + + elif merge_queue_target_branch: + environment_affected = head_tag != base_tag + if environment_affected: + logger.info(f"Environment is affected by PR(s) in merge group") + else: + environment_affected = False + + if environment_affected: + changeset = get_changeset(args.repo, args.pr, merge_queue_target_branch, args.commit) + changed_dockerfiles = [p for p in changeset if p.startswith(args.dockerfiles_root) and p.endswith('Dockerfile')] + + if args.docker_env_changed: + logger.info(f"Common docker environment is modified, will build all requested images") + changed_images = requested_images + else: + logger.info(f"Common docker environment is not modified, will build only changed and missing images") + changed_images = set([name_from_dockerfile(d, args.dockerfiles_root) for d in changed_dockerfiles]) + + unchanged_images = requested_images - changed_images + unchanged_with_no_base = images.get_missing(unchanged_images, base=True) + + if unchanged_with_no_base: + logger.info("The following images were unchanged, but will be built anyway since the base for them " + f"is missing in registry: {unchanged_with_no_base}") + + images_to_tag = unchanged_images.difference(unchanged_with_no_base) + images_to_build = requested_images.intersection(changed_images).union(unchanged_with_no_base) + + only_dockerfiles_changed = len(changeset) == len(changed_dockerfiles) + if only_dockerfiles_changed and not images_to_build: + skip_workflow = True + else: + logger.info(f"Environment is not affected, will build only missing images, if any") + images_to_build = requested_images + images_to_tag = [] + missing_only = True + + if not images_to_build: + logger.info(f"No images to build, will return the list of pre-built images with a new tag") + + built_images = images.build(images_to_build, missing_only, args.push, args.docker_builder) + if not built_images: + logger.info(f"No images were built, a new tag will be applied to a pre-built base image if needed") + + # When a custom builder is used, it allows to push the image automatically once built. Otherwise, pushing manually + if args.push and not args.docker_builder: + images.push(images_to_build, missing_only) + + if environment_affected and base_tag: + images.tag(images_to_tag) + + images_output = images_to_output(images.get(requested_images)) + set_github_output("images", json.dumps(images_output)) + + if skip_workflow: + logger.info(f"Docker image changes are irrelevant for current workflow, workflow may be skipped") + set_github_output("skip_workflow", str(skip_workflow)) + + +main() diff --git a/.github/actions/handle_docker/helpers.py b/.github/actions/handle_docker/helpers.py new file mode 100644 index 00000000000000..37e865803b533d --- /dev/null +++ b/.github/actions/handle_docker/helpers.py @@ -0,0 +1,72 @@ +import logging +import os +import subprocess +from ghapi.all import GhApi +from pathlib import Path + + +def init_logger(): + logging.basicConfig(level=logging.INFO, + format='%(asctime)s %(name)-15s %(levelname)-8s %(message)s', + datefmt='%m-%d-%Y %H:%M:%S') + + +def set_github_output(name: str, value: str, github_output_var_name: str = 'GITHUB_OUTPUT'): + """Sets output variable for a GitHub Action""" + logger = logging.getLogger(__name__) + # In an environment variable "GITHUB_OUTPUT" GHA stores path to a file to write outputs to + with open(os.environ.get(github_output_var_name), 'a+') as file: + logger.info(f"Add {name}={value} to {github_output_var_name}") + print(f'{name}={value}', file=file) + + +def images_to_output(images: list): + images_output = {} + for image in images: + image_name, os_name = image.name.split('/', 1) + if image_name not in images_output: + images_output[image_name] = {} + + images_output[image_name][os_name] = image.ref() + + return images_output + + +def get_changeset(repo: str, pr: str, target_branch: str, commit_sha: str): + """Returns changeset either from PR or commit""" + owner, repository = repo.split('/') + gh_api = GhApi(owner=owner, repo=repository, token=os.getenv("GITHUB_TOKEN")) + if pr: + changed_files = gh_api.pulls.list_files(pr) + elif target_branch: + target_branch_head_commit = gh_api.repos.get_branch(target_branch).commit.sha + changed_files = gh_api.repos.compare_commits(f'{target_branch_head_commit}...{commit_sha}').get('files', []) + else: + raise ValueError(f'Either "pr" or "target_branch" parameter must be non-empty') + return set([f.filename for f in changed_files]) + + +def run(cmd: str, dry_run: bool = False, fail_on_error: bool = True): + logger = logging.getLogger('run') + logger.info(cmd) + + if dry_run: + return 0, '' + + with subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as proc: + for line in proc.stdout: + logger.info(line.strip()) + + proc.communicate() + if proc.returncode != 0: + msg = f"Command '{cmd}' returned non-zero exit status {proc.returncode}" + if fail_on_error: + raise RuntimeError(msg) + + logger.warning(msg) + return proc.returncode + + +def name_from_dockerfile(dockerfile: str | Path, dockerfiles_root: str | Path) -> str: + image_name = str(Path(dockerfile).relative_to(dockerfiles_root).parent.as_posix()) + return image_name diff --git a/.github/actions/handle_docker/images_api.py b/.github/actions/handle_docker/images_api.py new file mode 100644 index 00000000000000..4fbcb8c6df35af --- /dev/null +++ b/.github/actions/handle_docker/images_api.py @@ -0,0 +1,134 @@ +import logging +import os +import subprocess +from pathlib import Path +from typing import Iterable + +from helpers import run, name_from_dockerfile + + +class Image: + def __init__(self, name: str, dockerfile: Path, registry: str): + self.logger = logging.getLogger(self.__class__.__name__) + self.name = name + self.dockerfile = dockerfile + self.registry = registry + self.tag = 'latest' + self.base_tag = None + + def __str__(self): + return self.name + + def __eq__(self, img): + return img.name == self.name if img else False + + def with_tag(self, tag: str): + self.tag = tag + return self + + def with_base_tag(self, tag: str): + self.base_tag = tag + return self + + def ref(self): + return f"{self.registry}/{self.name}:{self.tag}" + + def base_ref(self): + if not self.base_tag: + return None + return f"{self.registry}/{self.name}:{self.base_tag}" + + def push(self, dry: bool = False): + cmd = f"docker push {self.ref()} " + run(cmd, dry) + + def build(self, dry: bool = False, push: bool = True, docker_builder: str = None, import_cache: bool = True, + export_cache: bool = True): + cache_cmd = "" + if import_cache: + cache_cmd += f"--cache-from type=registry,ref={self.ref()}-cache " + if self.base_tag: + cache_cmd += f"--cache-from type=registry,ref={self.base_ref()}-cache " + + if export_cache: + cache_cmd += f"--cache-to type=registry,ref={self.ref()}-cache,mode=max " + + build_cmd = f"docker buildx build --builder={docker_builder}" if docker_builder else "docker build" + push_cmd = f"--push" if push else "" + + cmd = f"{build_cmd} " \ + f"--file {self.dockerfile} " \ + f"--tag {self.ref()} " \ + f"{cache_cmd} " \ + f"{push_cmd} " \ + "." + + run(cmd, dry) + + def tag_base(self, dry: bool = False): + if not self.base_tag: + raise AttributeError("Tag for base image is not specified") + + cmd = f"docker buildx imagetools create -t {self.ref()} {self.base_ref()}" + + run(cmd, dry) + + def is_missing(self, dry: bool = False, base: bool = False) -> bool: + image = self.base_ref() if base else self.ref() + if base and not image: + self.logger.warning(f"Base ref for image {self.ref()} is missing") + return True + + cmd = f"docker manifest inspect {image}" + is_missing = False + + self.logger.info(cmd) + if not dry: + try: + subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True, text=True) + except subprocess.CalledProcessError: + self.logger.warning(f"{image} is missing in registry") + is_missing = True + + return is_missing + + +# Making it a class, so it's a little easier to switch to a tree structure for building inherited images if we want +class ImagesHandler: + def __init__(self, dry_run: bool = False): + self.logger = logging.getLogger(self.__class__.__name__) + self.images = dict() + self.dry_run = dry_run + + def add_from_dockerfile(self, dockerfile: str | Path, dockerfiles_root: str | Path, registry: str, tag: str, + base_tag: str = None): + image_name = name_from_dockerfile(dockerfile, dockerfiles_root) + image = Image(image_name, Path(dockerfile), registry).with_tag(tag).with_base_tag(base_tag) + self.add(image) + + def add(self, image: Image): + self.images[image.name] = image + + def get(self, image_names: Iterable = None) -> list: + images = [self.images[name] for name in image_names] if image_names is not None else self.images.values() + return images + + def get_missing(self, image_names: Iterable = None, base: bool = False) -> list: + missing_images = [image.name for image in self.get(image_names) if image.is_missing(self.dry_run, base)] + return missing_images + + def build(self, image_names: Iterable = None, missing_only: bool = False, push: bool = True, builder: str = None, + import_cache: bool = True, export_cache: bool = True): + to_build = self.get(self.get_missing(image_names)) if missing_only else self.get(image_names) + for image in to_build: + image.build(self.dry_run, push, builder, import_cache, export_cache) + return to_build + + def push(self, image_names: Iterable = None, missing_only: bool = False): + to_push = self.get(self.get_missing(image_names)) if missing_only else self.get(image_names) + for image in to_push: + image.push(self.dry_run) + + def tag(self, image_names: Iterable = None): + for image in self.get(image_names): + image.tag_base(self.dry_run) diff --git a/.github/actions/handle_docker/requirements.txt b/.github/actions/handle_docker/requirements.txt new file mode 100644 index 00000000000000..0684774fbe07ec --- /dev/null +++ b/.github/actions/handle_docker/requirements.txt @@ -0,0 +1 @@ +ghapi~=1.0.5 diff --git a/.github/actions/smart-ci/action.yml b/.github/actions/smart-ci/action.yml index ebe420d4ef301a..007db90d13251b 100644 --- a/.github/actions/smart-ci/action.yml +++ b/.github/actions/smart-ci/action.yml @@ -51,6 +51,9 @@ outputs: affected_components: description: "Affected components to run validation for and their validation scope" value: ${{ steps.smart_ci.outputs.affected_components }} + changed_components: + description: "Actually changed components (for push events everything is marked as changed)" + value: ${{ steps.smart_ci.outputs.changed_components }} skip_workflow: description: "Whether the workflow should be run with Smart CI rules applied or skipped completely" value: ${{ steps.smart_ci.outputs.skip_workflow }} diff --git a/.github/actions/smart-ci/smart_ci.py b/.github/actions/smart-ci/smart_ci.py index ae6786d9882bad..42ed8486ecdcd8 100644 --- a/.github/actions/smart-ci/smart_ci.py +++ b/.github/actions/smart-ci/smart_ci.py @@ -212,8 +212,8 @@ def main(): # In post-commits - validate all components regardless of changeset # In pre-commits - validate only changed components with their dependencies all_defined_components = components_config.keys() - changed_component_names = set(all_defined_components) if run_full_scope else \ - get_changed_component_names(pr, all_possible_components, args.pattern) + changed_by_pr = get_changed_component_names(pr, all_possible_components, args.pattern) if pr else None + changed_component_names = set(all_defined_components) if run_full_scope else changed_by_pr logger.info(f"changed_component_names: {changed_component_names}") @@ -244,6 +244,12 @@ def main(): affected_components_output = {name: {s: True for s in scope} for name, scope in affected_components.items()} set_github_output("affected_components", json.dumps(affected_components_output)) + # Components actually changed by a pull request are marked as True (if event is PR; + # otherwise all components considered changed). + changed_components_output = {name: True if not pr or name in changed_by_pr else False + for name in all_possible_components} + set_github_output("changed_components", json.dumps(changed_components_output)) + if __name__ == '__main__': main() diff --git a/.github/dockerfiles/docker_tag b/.github/dockerfiles/docker_tag new file mode 100644 index 00000000000000..d3ad10a3a0f206 --- /dev/null +++ b/.github/dockerfiles/docker_tag @@ -0,0 +1 @@ +pr-23064 \ No newline at end of file diff --git a/.github/dockerfiles/ov_build/ubuntu_20_04_arm64/Dockerfile b/.github/dockerfiles/ov_build/ubuntu_20_04_arm64/Dockerfile new file mode 100644 index 00000000000000..8b1f8a80b35b3c --- /dev/null +++ b/.github/dockerfiles/ov_build/ubuntu_20_04_arm64/Dockerfile @@ -0,0 +1,72 @@ +FROM openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 + +USER root + +# APT configuration +RUN echo 'Acquire::Retries "10";' > /etc/apt/apt.conf && \ + echo 'APT::Get::Assume-Yes "true";' >> /etc/apt/apt.conf && \ + echo 'APT::Get::Fix-Broken "true";' >> /etc/apt/apt.conf && \ + echo 'APT::Get::no-install-recommends "true";' >> /etc/apt/apt.conf + +ENV DEBIAN_FRONTEND="noninteractive" \ + TZ="Europe/London" + +RUN apt-get update && \ + apt-get install software-properties-common && \ + add-apt-repository --yes --no-update ppa:git-core/ppa && \ + add-apt-repository --yes --no-update ppa:deadsnakes/ppa && \ + apt-get update && \ + apt-get install \ + curl \ + git \ + ca-certificates \ + gpg-agent \ + tzdata \ + # Pythons + python3.8-dev \ + python3.8-venv \ + python3.8-distutils \ + python3.11-dev \ + python3.11-venv \ + python3.11-distutils \ + # For Java API + default-jdk \ + # Compiler + gcc-10 \ + g++-10 \ + && \ + rm -rf /var/lib/apt/lists/* + +# Install build dependencies +ADD install_build_dependencies.sh /install_build_dependencies.sh +RUN chmod +x /install_build_dependencies.sh && \ + /install_build_dependencies.sh && \ + rm -rf /var/lib/apt/lists/* + +# Set gcc-10 as a default compiler +RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 30 && \ + update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 30 + +# Install sscache +ARG SCCACHE_VERSION="v0.7.5" +ENV SCCACHE_HOME="/opt/sccache" \ + SCCACHE_PATH="/opt/sccache/sccache" + +RUN mkdir ${SCCACHE_HOME} && cd ${SCCACHE_HOME} && \ + SCCACHE_ARCHIVE="sccache-${SCCACHE_VERSION}-aarch64-unknown-linux-musl.tar.gz" && \ + curl -SLO https://github.com/mozilla/sccache/releases/download/${SCCACHE_VERSION}/${SCCACHE_ARCHIVE} && \ + tar -xzf ${SCCACHE_ARCHIVE} --strip-components=1 && rm ${SCCACHE_ARCHIVE} + +# Setup pip +ENV PIP_VERSION="24.0" +RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \ + python3.8 get-pip.py --no-cache-dir pip==${PIP_VERSION} && \ + python3.11 get-pip.py --no-cache-dir pip==${PIP_VERSION} && \ + rm -f get-pip.py + +# Use Python 3.11 as default instead of Python 3.8 +# Using venv here 'cause other methods to switch the default Python on Ubuntu 20 break both system and wheels build +RUN python3.11 -m venv venv +ENV PATH="/venv/bin:$SCCACHE_HOME:$PATH" + +ENV PIP_CACHE_DIR=/mount/caches/pip/linux/${PIP_VERSION} diff --git a/.github/dockerfiles/ov_test/ubuntu_20_04_arm64/Dockerfile b/.github/dockerfiles/ov_test/ubuntu_20_04_arm64/Dockerfile new file mode 100644 index 00000000000000..5d4483d498f69a --- /dev/null +++ b/.github/dockerfiles/ov_test/ubuntu_20_04_arm64/Dockerfile @@ -0,0 +1,49 @@ +FROM openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 + +USER root + +# APT configuration +RUN echo 'Acquire::Retries "10";' > /etc/apt/apt.conf && \ + echo 'APT::Get::Assume-Yes "true";' >> /etc/apt/apt.conf && \ + echo 'APT::Get::Fix-Broken "true";' >> /etc/apt/apt.conf && \ + echo 'APT::Get::no-install-recommends "true";' >> /etc/apt/apt.conf + +ENV DEBIAN_FRONTEND="noninteractive" \ + TZ="Europe/London" + +RUN apt-get update && \ + apt-get install software-properties-common && \ + add-apt-repository --yes --no-update ppa:git-core/ppa && \ + add-apt-repository --yes --no-update ppa:deadsnakes/ppa && \ + apt-get update && \ + apt-get install \ + curl \ + git \ + ca-certificates \ + gpg-agent \ + tzdata \ + # Python + python3.11-dev \ + python3.11-venv \ + python3.11-distutils \ + && \ + rm -rf /var/lib/apt/lists/* + +# Install openvino dependencies +ADD scripts/install_dependencies/install_openvino_dependencies.sh /install_openvino_dependencies.sh +RUN chmod +x /install_openvino_dependencies.sh && \ + /install_openvino_dependencies.sh && \ + rm -rf /var/lib/apt/lists/* + +# Setup pip +ENV PIP_VERSION="24.0" +RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \ + python3.11 get-pip.py --no-cache-dir pip==${PIP_VERSION} && \ + rm -f get-pip.py + +# Use Python 3.11 as default instead of Python 3.8 +# Using venv here 'cause other methods to switch the default Python on Ubuntu 20 break both system and wheels build +RUN python3.11 -m venv venv +ENV PATH="/venv/bin:$PATH" + +ENV PIP_CACHE_DIR=/mount/caches/pip/linux/${PIP_VERSION} diff --git a/.github/workflows/job_python_unit_tests.yml b/.github/workflows/job_python_unit_tests.yml index 3c531041d59386..4efd95d512282d 100644 --- a/.github/workflows/job_python_unit_tests.yml +++ b/.github/workflows/job_python_unit_tests.yml @@ -80,10 +80,10 @@ jobs: sparse-checkout: | .github/actions/setup_python/action.yml sparse-checkout-cone-mode: false - path: 'openvino' + path: 'action_root' - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python + uses: ./action_root/.github/actions/setup_python with: version: ${{ env.PYTHON_VERSION }} pip-cache-path: ${{ runner.os == 'Linux' && env.PIP_CACHE_PATH || '' }} @@ -277,7 +277,7 @@ jobs: if: runner.os != 'macOS' uses: actions/checkout@v4 with: - sparse-checkout: openvino/docs/snippets + sparse-checkout: docs/snippets path: ${{ env.OPENVINO_REPO }} submodules: 'false' diff --git a/.github/workflows/linux_arm64.yml b/.github/workflows/linux_arm64.yml index 3f32023bee07e4..f4f3c358f01bd7 100644 --- a/.github/workflows/linux_arm64.yml +++ b/.github/workflows/linux_arm64.yml @@ -22,6 +22,7 @@ jobs: runs-on: ubuntu-latest outputs: affected_components: "${{ steps.smart_ci.outputs.affected_components }}" + changed_components: "${{ steps.smart_ci.outputs.changed_components }}" skip_workflow: "${{ steps.smart_ci.outputs.skip_workflow }}" steps: - name: checkout action @@ -47,20 +48,42 @@ jobs: echo "${{ toJSON(steps.smart_ci.outputs.affected_components) }}" shell: bash - Build: + Docker: needs: Smart_CI + runs-on: aks-linux-16-cores-arm-docker-build + container: + image: openvinogithubactions.azurecr.io/docker_build:0.2 + volumes: + - /mount:/mount + outputs: + images: "${{ steps.handle_docker.outputs.images }}" + steps: + - name: Checkout + uses: actions/checkout@v4 + + - uses: ./.github/actions/handle_docker + id: handle_docker + with: + images: | + ov_build/ubuntu_20_04_arm64 + ov_test/ubuntu_20_04_arm64 + registry: 'openvinogithubactions.azurecr.io' + dockerfiles_root_dir: '.github/dockerfiles' + changed_components: ${{ needs.Smart_CI.outputs.changed_components }} + + Build: + needs: [ Smart_CI, Docker ] timeout-minutes: 150 defaults: run: shell: bash runs-on: 'aks-linux-16-cores-arm' container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 + image: ${{ fromJSON(needs.docker.outputs.images).ov_build.ubuntu_20_04_arm64 }} volumes: - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input CMAKE_BUILD_TYPE: 'Release' CMAKE_GENERATOR: 'Ninja Multi-Config' CMAKE_CXX_COMPILER_LAUNCHER: sccache @@ -82,12 +105,6 @@ jobs: if: "!needs.smart_ci.outputs.skip_workflow" steps: - - name: Set apt retries - run: echo 'Acquire::Retries "10";' > /etc/apt/apt.conf.d/80-retries - - - name: Install git - run: apt-get update && apt-get install --assume-yes --no-install-recommends git ca-certificates - - name: Clone OpenVINO uses: actions/checkout@v4 with: @@ -114,33 +131,6 @@ jobs: # Dependencies # - - name: Install build dependencies - run: | - bash ${OPENVINO_REPO}/install_build_dependencies.sh - - # default-jdk - Java API - apt install --assume-yes --no-install-recommends default-jdk gcc-10 g++-10 - - # Set gcc-10 as a default one - update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 30 - update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 30 - - # For building the latest h5py - apt install --assume-yes --no-install-recommends libhdf5-dev - - - name: Install sccache - uses: mozilla-actions/sccache-action@v0.0.4 - with: - version: "v0.7.5" - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - pip-cache-path: ${{ env.PIP_CACHE_PATH }} - should-setup-pip-paths: 'true' - show-cache-info: 'true' - - name: Install python dependencies run: | # For Python API: build and wheel packaging @@ -314,22 +304,22 @@ jobs: image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' Samples: - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] if: fromJSON(needs.smart_ci.outputs.affected_components).samples uses: ./.github/workflows/job_samples_tests.yml with: runner: 'aks-linux-16-cores-arm' - image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + image: ${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }} affected-components: ${{ needs.smart_ci.outputs.affected_components }} JS_API: name: OpenVINO JS API - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] if: fromJSON(needs.smart_ci.outputs.affected_components).JS_API uses: ./.github/workflows/job_openvino_js.yml with: runner: 'aks-linux-16-cores-arm' - container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04"}' + container: '{"image": "${{ fromJSON(needs.docker.outputs.images).ov_build.ubuntu_20_04_arm64 }}"}' ONNX_Runtime: name: ONNX Runtime Integration @@ -344,73 +334,73 @@ jobs: Openvino_tokenizers: name: OpenVINO tokenizers extension - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] uses: ./.github/workflows/job_tokenizers.yml with: runner: 'aks-linux-16-cores-arm' shell: bash - container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"]}' + container: '{"image": "${{ fromJSON(needs.docker.outputs.images).ov_build.ubuntu_20_04_arm64 }}", "volumes": ["/mount:/mount"]}' affected-components: ${{ needs.smart_ci.outputs.affected_components }} if: fromJSON(needs.smart_ci.outputs.affected_components).TOKENIZERS CXX_Unit_Tests: name: C++ unit tests - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] uses: ./.github/workflows/job_cxx_unit_tests.yml with: runner: 'aks-linux-16-cores-arm' - image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + image: ${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }} affected-components: ${{ needs.smart_ci.outputs.affected_components }} Python_Unit_Tests: name: Python unit tests - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] uses: ./.github/workflows/job_python_unit_tests.yml with: runner: 'aks-linux-16-cores-arm' - container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"]}' + container: '{"image": "${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }}", "volumes": ["/mount:/mount"]}' affected-components: ${{ needs.smart_ci.outputs.affected_components }} TensorFlow_Layer_Tests: name: TensorFlow Layer Tests - needs: [ Build, Smart_CI, Openvino_tokenizers ] + needs: [ Build, Docker, Smart_CI, Openvino_tokenizers ] uses: ./.github/workflows/job_tensorflow_layer_tests.yml with: runner: 'aks-linux-16-cores-arm' shell: bash - container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"]}' + container: '{"image": "${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }}", "volumes": ["/mount:/mount"]}' affected-components: ${{ needs.smart_ci.outputs.affected_components }} CPU_Functional_Tests: name: CPU functional tests if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] uses: ./.github/workflows/job_cpu_functional_tests.yml with: runner: 'aks-linux-16-cores-arm' - image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + image: ${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }} TensorFlow_Models_Tests: name: TensorFlow Models tests if: ${{ 'false' }} # TODO: Enable once the dependencies are ready for arm (no tensorflow-text available for arm from PyPI) # if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || # fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - needs: [ Build, Smart_CI, Openvino_tokenizers] + needs: [ Build, Docker, Smart_CI, Openvino_tokenizers] uses: ./.github/workflows/job_tensorflow_models_tests.yml with: runner: 'aks-linux-16-cores-arm' - container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04"}' + container: '{"image": "${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }}"}' model_scope: 'precommit' PyTorch_Models_Tests: name: PyTorch Models tests if: ${{ 'false' }} # TODO: Enable once the dependencies are ready for arm (no tensorflow-text available for arm from PyPI) # if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - needs: [ Build, Smart_CI ] + needs: [ Build, Docker, Smart_CI ] uses: ./.github/workflows/job_pytorch_models_tests.yml with: runner: 'aks-linux-16-cores-arm' - container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04"}' + container: '{"image": "${{ fromJSON(needs.docker.outputs.images).ov_test.ubuntu_20_04_arm64 }}"}' event: ${{ github.event_name }} Overall_Status: