Skip to content

Commit

Permalink
Merge branch 'master' into onnx_string
Browse files Browse the repository at this point in the history
  • Loading branch information
gkrivor authored Jun 14, 2024
2 parents a0349ee + 7b51990 commit 0baa27a
Show file tree
Hide file tree
Showing 3,539 changed files with 210,891 additions and 210,826 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
3 changes: 3 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
*
!install_build_dependencies.sh
!scripts/install_dependencies/install_openvino_dependencies.sh
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ body:
- type: textarea
id: build_script
attributes:
label: Build scrip or step-by-step to reproduce
label: Build script or step-by-step to reproduce
description: How can we reproduce your issue?
placeholder: Please provide detailed instructions on how to reproduce the issue
validations:
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/cache/dist/save-only/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -33258,7 +33258,7 @@ async function save() {

// remote cache directory may not be created yet
if (!(await checkFileExists(cacheRemotePath))) {
await fs.mkdir(cacheRemotePath);
await fs.mkdir(cacheRemotePath, { recursive: true });
}

core.info('Copying cache...');
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/cache/dist/save/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -33258,7 +33258,7 @@ async function save() {

// remote cache directory may not be created yet
if (!(await checkFileExists(cacheRemotePath))) {
await fs.mkdir(cacheRemotePath);
await fs.mkdir(cacheRemotePath, { recursive: true });
}

core.info('Copying cache...');
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/cache/src/saveImpl.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ async function save() {

// remote cache directory may not be created yet
if (!(await checkFileExists(cacheRemotePath))) {
await fs.mkdir(cacheRemotePath);
await fs.mkdir(cacheRemotePath, { recursive: true });
}

core.info('Copying cache...');
Expand Down
72 changes: 72 additions & 0 deletions .github/actions/handle_docker/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
name: 'Handle Docker images'
description: 'Builds, tags and pushes a given Docker image when needed'
inputs:
images:
description: 'Image names (registry name + namespace + base name)'
required: true
registry:
description: 'Docker registry'
required: true
dockerfiles_root_dir:
description: 'Path to dockerfiles root dir relative to repository root'
required: true
push:
description: 'Push built images to registry'
required: false
default: 'true'
changed_components:
description: 'Components changed by a pull request'
required: true

outputs:
images:
description: "Images to use in workflow"
value: ${{ steps.handle_images.outputs.images }}

runs:
using: 'composite'
steps:
- name: Checkout head
uses: actions/checkout@v4

- name: Checkout base
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref || github.event.merge_group.base_ref }}
sparse-checkout: ${{ inputs.dockerfiles_root_dir }}/docker_tag
path: base

- name: Install Python dependencies
uses: py-actions/py-dependency-install@v4
with:
path: "${{ github.action_path }}/requirements.txt"
update-setuptools: "false"
update-wheel: "false"

- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3

- name: Handle docker images
id: handle_images
shell: bash
run: |
images=$(echo "${{ inputs.images }}" | tr '\n' ',' | sed 's/,*$//')
pr="${{ github.event.pull_request.number }}"
python3 .github/actions/handle_docker/get_images_to_build.py \
-d "${{ inputs.dockerfiles_root_dir }}" \
-r "${{ inputs.registry }}" \
--images "$images" \
--head_tag_file "${{ inputs.dockerfiles_root_dir }}/docker_tag" \
--base_tag_file "base/${{ inputs.dockerfiles_root_dir }}/docker_tag" \
--docker_env_changed "${{ fromJSON(inputs.changed_components).docker_env }}" \
--dockerfiles_changed "${{ fromJSON(inputs.changed_components).dockerfiles }}" \
--docker_builder "${{ steps.buildx.outputs.name}}" \
--repo "${{ github.repository }}" \
--ref_name "${{ github.ref_name }}" \
$([[ -n $pr ]] && echo "--pr $pr" || echo '-s ${{ github.sha }}') \
$([[ -n "${{ inputs.push }}" ]] && echo "--push" || echo '')
env:
GITHUB_TOKEN: ${{ github.token }}

129 changes: 129 additions & 0 deletions .github/actions/handle_docker/get_images_to_build.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import argparse
import json
import re
import sys

from distutils.util import strtobool
from helpers import *
from images_api import *


def parse_args():
parser = argparse.ArgumentParser(description='Returns list of Docker images to build for a given workflow')
parser.add_argument('-i', '--images', required=True, help='Comma-separated docker images')
parser.add_argument('-d', '--dockerfiles_root', required=True, help='Path to dockerfiles')
parser.add_argument('-r', '--registry', required=True, help='Docker registry name')
parser.add_argument('-s', '--commit', required=False, help='Commit SHA. If not set, --pr is used')
parser.add_argument('-b', '--docker_builder', required=False, help='Docker buildx builder name')
parser.add_argument('--pr', type=int, required=False, help='PR number, if event is pull_request')
parser.add_argument('--head_tag_file', default='.github/dockerfiles/docker_tag', help='Head docker tag file path')
parser.add_argument('--base_tag_file', default=None, required=False, help='Base docker tag file path')
parser.add_argument('--ref_name', required=False, default='', help='GitHub ref name')
parser.add_argument('--repo', default='openvinotoolkit/openvino', help='GitHub repository')
parser.add_argument('--docker_env_changed', type=lambda x: bool(strtobool(x)), default=True,
help='Whether PR changes docker env')
parser.add_argument('--dockerfiles_changed', type=lambda x: bool(strtobool(x)), default=True,
help='Whether PR changes dockerfiles')
parser.add_argument('--action_path', default='.github/actions/handle_docker', help='Path to this GitHub action')
parser.add_argument('--push', action='store_true', required=False, help='Whether to push images to registry')
parser.add_argument('--dry_run', action='store_true', required=False, help='Dry run')
args = parser.parse_args()
return args


def main():
init_logger()
logger = logging.getLogger(__name__)
args = parse_args()
for arg, value in sorted(vars(args).items()):
logger.info(f"Argument {arg}: {value}")

head_tag = Path(args.head_tag_file).read_text().strip()

base_tag_exists = args.base_tag_file and Path(args.base_tag_file).exists()
base_tag = Path(args.base_tag_file).read_text().strip() if base_tag_exists else None

all_dockerfiles = Path(args.dockerfiles_root).rglob('**/*/Dockerfile')

images = ImagesHandler(args.dry_run)
for image in all_dockerfiles:
images.add_from_dockerfile(image, args.dockerfiles_root, args.registry, head_tag, base_tag)

requested_images = set(args.images.split(','))
skip_workflow = False
missing_only = False

merge_queue_target_branch = next(iter(re.findall(f'^gh-readonly-queue/(.*)/', args.ref_name)), None)

if args.pr:
environment_affected = args.docker_env_changed or args.dockerfiles_changed
if environment_affected:
expected_tag = f'pr-{args.pr}'

if head_tag != expected_tag:
logger.error(f"Please update docker tag in {args.head_tag_file} to {expected_tag}")
sys.exit(1)

elif merge_queue_target_branch:
environment_affected = head_tag != base_tag
if environment_affected:
logger.info(f"Environment is affected by PR(s) in merge group")
else:
environment_affected = False

if environment_affected:
changeset = get_changeset(args.repo, args.pr, merge_queue_target_branch, args.commit)
changed_dockerfiles = [p for p in changeset if p.startswith(args.dockerfiles_root) and p.endswith('Dockerfile')]

if args.docker_env_changed:
logger.info(f"Common docker environment is modified, will build all requested images")
changed_images = requested_images
else:
logger.info(f"Common docker environment is not modified, will build only changed and missing images")
changed_images = set([name_from_dockerfile(d, args.dockerfiles_root) for d in changed_dockerfiles])

unchanged_images = requested_images - changed_images
unchanged_with_no_base = images.get_missing(unchanged_images, base=True)

if unchanged_with_no_base:
logger.info("The following images were unchanged, but will be built anyway since the base for them "
f"is missing in registry: {unchanged_with_no_base}")

images_to_tag = unchanged_images.difference(unchanged_with_no_base)
images_to_build = requested_images.intersection(changed_images).union(unchanged_with_no_base)

only_dockerfiles_changed = len(changeset) == len(changed_dockerfiles)
if only_dockerfiles_changed and not images_to_build:
skip_workflow = True
else:
logger.info(f"Environment is not affected, will build only missing images, if any")
images_to_build = requested_images
images_to_tag = []
missing_only = True

if not images_to_build:
logger.info(f"No images to build, will return the list of pre-built images with a new tag")

built_images = images.build(images_to_build, missing_only, args.push, args.docker_builder)
if not built_images:
logger.info(f"No images were built, a new tag will be applied to a pre-built base image if needed")

# When a custom builder is used, it allows to push the image automatically once built. Otherwise, pushing manually
if args.push and not args.docker_builder:
images.push(images_to_build, missing_only)

if environment_affected and base_tag:
images.tag(images_to_tag)

images_output = images_to_output(images.get(requested_images))
set_github_output("images", json.dumps(images_output))

if skip_workflow:
logger.info(f"Docker image changes are irrelevant for current workflow, workflow may be skipped")
set_github_output("skip_workflow", str(skip_workflow))


main()
75 changes: 75 additions & 0 deletions .github/actions/handle_docker/helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import logging
import os
import subprocess
from ghapi.all import GhApi
from pathlib import Path


def init_logger():
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-15s %(levelname)-8s %(message)s',
datefmt='%m-%d-%Y %H:%M:%S')


def set_github_output(name: str, value: str, github_output_var_name: str = 'GITHUB_OUTPUT'):
"""Sets output variable for a GitHub Action"""
logger = logging.getLogger(__name__)
# In an environment variable "GITHUB_OUTPUT" GHA stores path to a file to write outputs to
with open(os.environ.get(github_output_var_name), 'a+') as file:
logger.info(f"Add {name}={value} to {github_output_var_name}")
print(f'{name}={value}', file=file)


def images_to_output(images: list):
images_output = {}
for image in images:
image_name, os_name = image.name.split('/', 1)
if image_name not in images_output:
images_output[image_name] = {}

images_output[image_name][os_name] = image.ref()

return images_output


def get_changeset(repo: str, pr: str, target_branch: str, commit_sha: str):
"""Returns changeset either from PR or commit"""
owner, repository = repo.split('/')
gh_api = GhApi(owner=owner, repo=repository, token=os.getenv("GITHUB_TOKEN"))
if pr:
changed_files = gh_api.pulls.list_files(pr)
elif target_branch:
target_branch_head_commit = gh_api.repos.get_branch(target_branch).commit.sha
changed_files = gh_api.repos.compare_commits(f'{target_branch_head_commit}...{commit_sha}').get('files', [])
else:
raise ValueError(f'Either "pr" or "target_branch" parameter must be non-empty')
return set([f.filename for f in changed_files])


def run(cmd: str, dry_run: bool = False, fail_on_error: bool = True):
logger = logging.getLogger('run')
logger.info(cmd)

if dry_run:
return 0, ''

with subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as proc:
for line in proc.stdout:
logger.info(line.strip())

proc.communicate()
if proc.returncode != 0:
msg = f"Command '{cmd}' returned non-zero exit status {proc.returncode}"
if fail_on_error:
raise RuntimeError(msg)

logger.warning(msg)
return proc.returncode


def name_from_dockerfile(dockerfile: str | Path, dockerfiles_root: str | Path) -> str:
image_name = str(Path(dockerfile).relative_to(dockerfiles_root).parent.as_posix())
return image_name
Loading

0 comments on commit 0baa27a

Please sign in to comment.