diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..c748f7d --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @casillas2/Developers \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000..dd351d8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,33 @@ +name: "\U0001F41B Bug Report" +description: Report a bug +title: "Bug: TITLE" +labels: ["bug"] +body: + - type: textarea + id: expected_behaviour + attributes: + label: Expected Behaviour + validations: + required: true + + - type: textarea + id: current_behaviour + attributes: + label: Current Behaviour + validations: + required: true + + - type: textarea + id: reproduction_steps + attributes: + label: Reproduction Steps + validations: + required: true + + - type: textarea + id: code_snippet + attributes: + label: Code Snippet + validations: + required: true + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..ec4bb38 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/doc.yml b/.github/ISSUE_TEMPLATE/doc.yml new file mode 100644 index 0000000..883623b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/doc.yml @@ -0,0 +1,13 @@ + +name: "📕 Documentation Issue" +description: Issue in the documentation +title: "Docs: TITLE" +labels: ["documenation"] +body: + - type: textarea + id: documentation_issue + attributes: + label: Documentation Issue + description: Describe the issue + validations: + required: true \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000..ed7f957 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,17 @@ +name: "\U0001F680 Feature Request" +description: Request a new feature +title: "Feature request: TITLE" +labels: ["feature"] +body: + - type: textarea + id: use_case + attributes: + label: Use Case + validations: + required: true + - type: textarea + id: proposed_solution + attributes: + label: Proposed Solution + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/maintenance.yml b/.github/ISSUE_TEMPLATE/maintenance.yml new file mode 100644 index 0000000..a0f98ff --- /dev/null +++ b/.github/ISSUE_TEMPLATE/maintenance.yml @@ -0,0 +1,17 @@ +name: "🛠️ Maintenance" +description: Some type of improvement +title: "Maintenance: TITLE" +labels: ["feature"] +body: + - type: textarea + id: description + attributes: + label: Description + validations: + required: true + - type: textarea + id: solution + attributes: + label: Solution + validations: + required: true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..e41006d --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,11 @@ +### What was the problem/requirement? (What/Why) + +### What was the solution? (How) + +### What is the impact of this change? + +### How was this change tested? + +### Was this change documented? + +### Is this a breaking change? \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..a8325ac --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,17 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" # Location of package manifests + schedule: + interval: "daily" + commit-message: + prefix: "chore(deps):" + - package-ecosystem: "github-actions" + directory: "/" # Location of package manifests + schedule: + interval: "daily" \ No newline at end of file diff --git a/.github/workflows/auto_approve.yml b/.github/workflows/auto_approve.yml new file mode 100644 index 0000000..5fbbef7 --- /dev/null +++ b/.github/workflows/auto_approve.yml @@ -0,0 +1,21 @@ +name: Dependabot auto-approve +on: pull_request + +permissions: + pull-requests: write + +jobs: + dependabot: + runs-on: ubuntu-latest + if: ${{ github.actor == 'dependabot[bot]' }} + steps: + - name: Dependabot metadata + id: metadata + uses: dependabot/fetch-metadata@v1 + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + - name: Approve a PR + run: gh pr review --approve "$PR_URL" + env: + PR_URL: ${{ github.event.pull_request.html_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml new file mode 100644 index 0000000..58fcdc8 --- /dev/null +++ b/.github/workflows/code_quality.yml @@ -0,0 +1,12 @@ +name: Code Quality + +on: + pull_request: + branches: [ mainline ] + +jobs: + Test: + name: Code Quality + uses: ./.github/workflows/reuse_python_build.yml + secrets: inherit + diff --git a/.github/workflows/publish_release.yml b/.github/workflows/publish_release.yml new file mode 100644 index 0000000..2873f96 --- /dev/null +++ b/.github/workflows/publish_release.yml @@ -0,0 +1,21 @@ +name: Release Publish +on: + workflow_dispatch: + +jobs: + Publish: + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.AWS_CODEBUILD_RELEASE_PUBLISH_ROLE }} + aws-region: us-west-2 + - name: Run CodeBuild + uses: aws-actions/aws-codebuild-run-build@v1 + with: + project-name: deadline-cloud-test-fixtures-Publish + hide-cloudwatch-logs: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..4ae0cb4 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,99 @@ +name: Release + +on: + workflow_dispatch: + inputs: + version_to_publish: + description: "Version to be release" + required: false + +jobs: + TestMainline: + name: Test Mainline + uses: ./.github/workflows/reuse_python_build.yml + with: + branch: mainline + secrets: inherit + + Merge: + needs: TestMainline + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v3 + with: + ref: release + fetch-depth: 0 + token: ${{ secrets.CI_TOKEN }} + - name: Set Git config + run: | + git config --local user.email "client-software-ci@amazon.com" + git config --local user.name "client-software-ci" + - name: Update Release + run: git merge --ff-only origin/mainline -v + - name: Push new release + if: ${{ inputs.version_to_publish}} + run: | + git tag -a ${{ inputs.version_to_publish }} -m "Release ${{ inputs.version_to_publish }}" + git push origin release ${{ inputs.version_to_publish }} + - name: Push post release + if: ${{ !inputs.version_to_publish}} + run: git push origin release + + TestRelease: + needs: Merge + name: Test Release + uses: ./.github/workflows/reuse_python_build.yml + with: + branch: release + secrets: inherit + + PublishMirror: + needs: TestRelease + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + env: + CODEARTIFACT_REGION: "us-west-2" + CODEARTIFACT_DOMAIN: ${{ secrets.CODEARTIFACT_DOMAIN }} + CODEARTIFACT_ACCOUNT_ID: ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} + CODEARTIFACT_REPOSITORY: ${{ secrets.CODEARTIFACT_REPOSITORY }} + CUSTOMER_REPOSITORY: ${{ secrets.CUSTOMER_REPOSITORY }} + steps: + - name: "Checkout" + uses: actions/checkout@v3 + with: + ref: release + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.AWS_CODEARTIFACT_ROLE }} + aws-region: us-west-2 + - name: Install dependencies + run: | + CODEARTIFACT_AUTH_TOKEN=$(aws codeartifact get-authorization-token --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --query authorizationToken --output text --region us-west-2) + echo "::add-mask::$CODEARTIFACT_AUTH_TOKEN" + echo CODEARTIFACT_AUTH_TOKEN=$CODEARTIFACT_AUTH_TOKEN >> $GITHUB_ENV + pip install --upgrade hatch + pip install --upgrade twine + - name: Run Build + run: hatch build + - name: Publish + run: | + export TWINE_USERNAME=aws + export TWINE_PASSWORD=`aws codeartifact get-authorization-token --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --query authorizationToken --output text` + export TWINE_REPOSITORY_URL=`aws codeartifact get-repository-endpoint --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --repository ${{ secrets.CODEARTIFACT_REPOSITORY }} --format pypi --query repositoryEndpoint --output text` + twine upload dist/* + - name: Publish to Customer CodeArtifact + run: | + export TWINE_USERNAME=aws + export TWINE_PASSWORD=`aws codeartifact get-authorization-token --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --query authorizationToken --output text` + export TWINE_REPOSITORY_URL=`aws codeartifact get-repository-endpoint --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --repository ${{ secrets.CUSTOMER_REPOSITORY }} --format pypi --query repositoryEndpoint --output text` + twine upload dist/* diff --git a/.github/workflows/reuse_python_build.yml b/.github/workflows/reuse_python_build.yml new file mode 100644 index 0000000..9d8f4ee --- /dev/null +++ b/.github/workflows/reuse_python_build.yml @@ -0,0 +1,60 @@ +name: Python Build + +on: + workflow_call: + inputs: + branch: + required: false + type: string + +jobs: + Python: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + strategy: + matrix: + python-version: ['3.9', '3.10', '3.11'] + env: + PYTHON: ${{ matrix.python-version }} + CODEARTIFACT_REGION: "us-west-2" + CODEARTIFACT_DOMAIN: ${{ secrets.CODEARTIFACT_DOMAIN }} + CODEARTIFACT_ACCOUNT_ID: ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} + CODEARTIFACT_REPOSITORY: ${{ secrets.CODEARTIFACT_REPOSITORY }} + steps: + - uses: actions/checkout@v3 + if: ${{ !inputs.branch }} + + - uses: actions/checkout@v3 + if: ${{ inputs.branch }} + with: + ref: ${{ inputs.branch }} + fetch-depth: 0 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.AWS_CODEARTIFACT_ROLE }} + aws-region: us-west-2 + + - name: Install Hatch + run: | + CODEARTIFACT_AUTH_TOKEN=$(aws codeartifact get-authorization-token --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --query authorizationToken --output text --region us-west-2) + echo "::add-mask::$CODEARTIFACT_AUTH_TOKEN" + echo CODEARTIFACT_AUTH_TOKEN=$CODEARTIFACT_AUTH_TOKEN >> $GITHUB_ENV + pip install --upgrade hatch + + - name: Run Linting + run: hatch run lint + + - name: Run Build + run: hatch build + + - name: Run Tests + run: hatch run test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..63583e2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +/dist +*.egg-info/ +__pycache__/ +.coverage +.tox +.venv +/htmlcov +.vscode +build +*_version.py \ No newline at end of file diff --git a/README.md b/README.md index 847260c..3549b04 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,33 @@ -## My Project +## Deadline Test Scaffolding -TODO: Fill this README out! +# Build / Test / Release -Be sure to: +## Setup Code Artifact -* Change the title in this README -* Edit your repository description on GitHub +## Build the package. +``` +hatch build +``` + +## Run tests +``` +hatch run test +``` + +## Run linting +``` +hatch run lint +``` + +## Run formating +``` +hatch run fmt +``` + +## Run a tests for all supported Python versions. +``` +hatch run all:test +``` ## Security diff --git a/hatch.toml b/hatch.toml new file mode 100644 index 0000000..9f3896b --- /dev/null +++ b/hatch.toml @@ -0,0 +1,35 @@ +[envs.default] +pre-install-commands = [ + "pip install -r requirements-testing.txt" +] + +[envs.default.scripts] +sync = "pip install -r requirements-testing.txt" +test = "pytest --cov-config pyproject.toml {args:test/unit}" +typing = "mypy {args:src test}" +style = [ + "ruff {args:.}", + "black --check --diff {args:.}", +] +fmt = [ + "black {args:.}", + "style", +] +lint = [ + "style", + "typing", +] + +[[envs.all.matrix]] +python = ["3.9", "3.10", "3.11"] + +[envs.default.env-vars] +PIP_INDEX_URL="https://aws:{env:CODEARTIFACT_AUTH_TOKEN}@{env:CODEARTIFACT_DOMAIN}-{env:CODEARTIFACT_ACCOUNT_ID}.d.codeartifact.{env:CODEARTIFACT_REGION}.amazonaws.com/pypi/{env:CODEARTIFACT_REPOSITORY}/simple/" +SKIP_BOOTSTRAP_TEST_RESOURCES="True" + +[envs.codebuild.scripts] +build = "hatch build" + +[envs.codebuild.env-vars] +PIP_INDEX_URL="" +SKIP_BOOTSTRAP_TEST_RESOURCES="True" diff --git a/hatch_version_hook.py b/hatch_version_hook.py new file mode 100644 index 0000000..27c1255 --- /dev/null +++ b/hatch_version_hook.py @@ -0,0 +1,160 @@ +import logging +import os +import shutil +import sys + +from dataclasses import dataclass +from hatchling.builders.hooks.plugin.interface import BuildHookInterface +from typing import Any, Optional + + +_logger = logging.Logger(__name__, logging.INFO) +_stdout_handler = logging.StreamHandler(sys.stdout) +_stdout_handler.addFilter(lambda record: record.levelno <= logging.INFO) +_stderr_handler = logging.StreamHandler(sys.stderr) +_stderr_handler.addFilter(lambda record: record.levelno > logging.INFO) +_logger.addHandler(_stdout_handler) +_logger.addHandler(_stderr_handler) + + +@dataclass +class CopyConfig: + sources: list[str] + destinations: list[str] + + +class CustomBuildHookException(Exception): + pass + + +class CustomBuildHook(BuildHookInterface): + """ + A Hatch build hook that is pulled in automatically by Hatch's "custom" hook support + See: https://hatch.pypa.io/1.6/plugins/build-hook/custom/ + This build hook copies files from one location (sources) to another (destinations). + Config options: + - `log_level (str)`: The logging level. Any value accepted by logging.Logger.setLevel is allowed. Default is INFO. + - `copy_map (list[dict])`: A list of mappings of files to copy and the destinations to copy them into. In TOML files, + this is expressed as an array of tables. See https://toml.io/en/v1.0.0#array-of-tables + Example TOML config: + ``` + [tool.hatch.build.hooks.custom] + path = "hatch_hook.py" + log_level = "DEBUG" + [[tool.hatch.build.hooks.custom.copy_map]] + sources = [ + "_version.py", + ] + destinations = [ + "src/openjobio", + "src/openjobio_adaptor_runtime", + "src/openjobio_adaptor_runtime_client", + ] + [[tool.hatch.build.hooks.custom.copy_map]] + sources = [ + "something_the_tests_need.py", + "something_else_the_tests_need.ini", + ] + destinations = [ + "test/openjobio", + "test/openjobio_adaptor_runtime", + "test/openjobio_adaptor_runtime_client", + ] + ``` + """ + + REQUIRED_OPTS = [ + "copy_map", + ] + + def initialize(self, version: str, build_data: dict[str, Any]) -> None: + if not self._prepare(): + return + + for copy_cfg in self.copy_map: + _logger.info(f"Copying {copy_cfg.sources} to {copy_cfg.destinations}") + for destination in copy_cfg.destinations: + for source in copy_cfg.sources: + copy_func = shutil.copy if os.path.isfile(source) else shutil.copytree + copy_func( + os.path.join(self.root, source), + os.path.join(self.root, destination), + ) + _logger.info("Copy complete") + + def clean(self, versions: list[str]) -> None: + if not self._prepare(): + return + + for copy_cfg in self.copy_map: + _logger.info(f"Cleaning {copy_cfg.sources} from {copy_cfg.destinations}") + cleaned_count = 0 + for destination in copy_cfg.destinations: + for source in copy_cfg.sources: + source_path = os.path.join(self.root, destination, source) + remove_func = os.remove if os.path.isfile(source_path) else os.rmdir + try: + remove_func(source_path) + except FileNotFoundError: + _logger.debug(f"Skipping {source_path} because it does not exist...") + else: + cleaned_count += 1 + _logger.info(f"Cleaned {cleaned_count} items") + + def _prepare(self) -> bool: + missing_required_opts = [ + opt for opt in self.REQUIRED_OPTS if opt not in self.config or not self.config[opt] + ] + if missing_required_opts: + _logger.warn( + f"Required options {missing_required_opts} are missing or empty. " + "Contining without copying sources to destinations...", + file=sys.stderr, + ) + return False + + log_level = self.config.get("log_level") + if log_level: + _logger.setLevel(log_level) + + return True + + @property + def copy_map(self) -> Optional[list[CopyConfig]]: + raw_copy_map: list[dict] = self.config.get("copy_map") + if not raw_copy_map: + return None + + if not ( + isinstance(raw_copy_map, list) + and all(isinstance(copy_cfg, dict) for copy_cfg in raw_copy_map) + ): + raise CustomBuildHookException( + f'"copy_map" config option is a nonvalid type. Expected list[dict], but got {raw_copy_map}' + ) + + def verify_list_of_file_paths(file_paths: Any, config_name: str): + if not (isinstance(file_paths, list) and all(isinstance(fp, str) for fp in file_paths)): + raise CustomBuildHookException( + f'"{config_name}" config option is a nonvalid type. Expected list[str], but got {file_paths}' + ) + + missing_paths = [ + fp for fp in file_paths if not os.path.exists(os.path.join(self.root, fp)) + ] + if len(missing_paths) > 0: + raise CustomBuildHookException( + f'"{config_name}" config option contains some file paths that do not exist: {missing_paths}' + ) + + copy_map: list[CopyConfig] = [] + for copy_cfg in raw_copy_map: + destinations: list[str] = copy_cfg.get("destinations") + verify_list_of_file_paths(destinations, "destinations") + + sources: list[str] = copy_cfg.get("sources") + verify_list_of_file_paths(sources, "source") + + copy_map.append(CopyConfig(sources, destinations)) + + return copy_map diff --git a/pipeline/CODEOWNERS b/pipeline/CODEOWNERS new file mode 100644 index 0000000..02f91fd --- /dev/null +++ b/pipeline/CODEOWNERS @@ -0,0 +1 @@ +* @casillas2/Admin \ No newline at end of file diff --git a/pipeline/build.sh b/pipeline/build.sh new file mode 100755 index 0000000..e8014f3 --- /dev/null +++ b/pipeline/build.sh @@ -0,0 +1,10 @@ +#!/bin/sh +# Set the -e option +set -e + +pip install --upgrade pip +pip install --upgrade hatch +pip install --upgrade twine +hatch run codebuild:lint +hatch run codebuild:test +hatch run codebuild:build \ No newline at end of file diff --git a/pipeline/e2e.sh b/pipeline/e2e.sh new file mode 100755 index 0000000..ed3d91d --- /dev/null +++ b/pipeline/e2e.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Set the -e option +set -e + +./pipeline/build.sh \ No newline at end of file diff --git a/pipeline/integ.sh b/pipeline/integ.sh new file mode 100755 index 0000000..ed3d91d --- /dev/null +++ b/pipeline/integ.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Set the -e option +set -e + +./pipeline/build.sh \ No newline at end of file diff --git a/pipeline/publish.sh b/pipeline/publish.sh new file mode 100755 index 0000000..9e9c1b5 --- /dev/null +++ b/pipeline/publish.sh @@ -0,0 +1,6 @@ +#!/bin/sh +# Set the -e option +set -e + +./pipeline/build.sh +twine upload --repository codeartifact dist/* --verbose \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100755 index 0000000..d300691 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,111 @@ +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[project] +name = "deadline-cloud-test-fixtures" +dynamic = ["version"] +requires-python = ">=3.7" + +dependencies = [ + "boto3 ~= 1.26", +] + +[project.entry-points.pytest11] +deadline_test_scaffolding = "deadline_test_scaffolding.fixtures" + +[tool.hatch.build] +artifacts = [ + "*_version.py", +] + +[tool.hatch.version] +source = "vcs" + +[tool.hatch.version.raw-options] +version_scheme = "post-release" + +[tool.hatch.build.hooks.vcs] +version-file = "_version.py" + +[tool.hatch.build.hooks.custom] +path = "hatch_version_hook.py" + +[[tool.hatch.build.hooks.custom.copy_map]] +sources = [ + "_version.py", +] +destinations = [ + "src/deadline_test_scaffolding", +] + +[tool.hatch.build.targets.sdist] +include = [ + "src/*", + "hatch_version_hook.py", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/deadline_test_scaffolding" +] + +[tool.mypy] +check_untyped_defs = true +show_error_codes = true +pretty = true +files = [ "src/**/*.py" ] + +[[tool.mypy.overrides]] +module = [ + "boto3", + "botocore.*" +] +ignore_missing_imports = true + +[tool.ruff] +ignore = [ + "E501", +] +line-length = 100 + +[tool.ruff.isort] +known-first-party = [ + "deadline_test_scaffolding" +] + +[tool.black] +line-length = 100 + +[tool.pytest.ini_options] +xfail_strict = true +addopts = [ + "--durations=5", + "--cov=src/deadline_test_scaffolding", + "--color=yes", + "--cov-report=html:build/coverage", + "--cov-report=xml:build/coverage/coverage.xml", + "--cov-report=term-missing", + "--numprocesses=auto", +] +testpaths = [ "test" ] +looponfailroots = [ + "src", + "test", +] +# looponfailroots is deprecated, this removes the deprecation from the test output +filterwarnings = [ + "ignore::DeprecationWarning" +] + +[tool.coverage.run] +source_pkgs = [ "deadline_test_scaffolding" ] + + +[tool.coverage.paths] +source = [ + "src/" +] + +[tool.coverage.report] +show_missing = true \ No newline at end of file diff --git a/requirements-testing.txt b/requirements-testing.txt new file mode 100644 index 0000000..6c34d78 --- /dev/null +++ b/requirements-testing.txt @@ -0,0 +1,9 @@ +coverage[toml] ~= 7.3 +pytest ~= 7.4 +pytest-cov ~= 4.1 +pytest-timeout ~= 2.1 +pytest-xdist ~= 3.3 +black ~= 23.7 +mypy == 1.5.0 +ruff ~= 0.0.284 +twine ~= 4.0 \ No newline at end of file diff --git a/scripts/add_copyright_headers.sh b/scripts/add_copyright_headers.sh new file mode 100755 index 0000000..df92935 --- /dev/null +++ b/scripts/add_copyright_headers.sh @@ -0,0 +1,75 @@ +#!/bin/bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +if [ $# -eq 0 ]; then + echo "Usage: add-copyright-headers ..." >&2 + exit 1 +fi + +for file in "$@"; do + if ! head -1 | grep 'Copyright ' "$file" >/dev/null; then + case $file in + *.java) + CONTENT=$(cat "$file") + cat > "$file" </dev/null; then + CONTENT=$(tail -n +2 "$file") + cat > "$file" < +$CONTENT +EOF + else + CONTENT=$(cat "$file") + cat > "$file" < +$CONTENT +EOF + fi + ;; + *.py) + CONTENT=$(cat "$file") + cat > "$file" < "$file" < "$file" < "$file" <&2 + exit 1 + ;; + esac + fi +done \ No newline at end of file diff --git a/scripts/publish.sh b/scripts/publish.sh new file mode 100755 index 0000000..1adb6b0 --- /dev/null +++ b/scripts/publish.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +set -euo pipefail + +hatch clean +hatch run lint +hatch run test:test +hatch build + +export TWINE_USERNAME=aws +export TWINE_PASSWORD=`aws codeartifact get-authorization-token --domain $CODEARTIFACT_DOMAIN --domain-owner $CODEARTIFACT_ACCOUNT_ID --query authorizationToken --output text` +export TWINE_REPOSITORY_URL=`aws codeartifact get-repository-endpoint --domain $CODEARTIFACT_DOMAIN --domain-owner $CODEARTIFACT_ACCOUNT_ID --repository $CODEARTIFACT_REPOSITORY --format pypi --query repositoryEndpoint --output text` +twine upload dist/* diff --git a/src/deadline_test_scaffolding/__init__.py b/src/deadline_test_scaffolding/__init__.py new file mode 100644 index 0000000..9a05c03 --- /dev/null +++ b/src/deadline_test_scaffolding/__init__.py @@ -0,0 +1,17 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +from .deadline_manager import DeadlineManager, DeadlineClient +from .deadline_stub import StubDeadlineClient +from .fixtures import deadline_manager_fixture, deadline_scaffolding, create_worker_agent +from .job_attachment_manager import JobAttachmentManager +from ._version import __version__ as version # noqa + +__all__ = [ + "DeadlineManager", + "DeadlineClient", + "JobAttachmentManager", + "deadline_manager_fixture", + "deadline_scaffolding", + "StubDeadlineClient", + "version", + "create_worker_agent", +] diff --git a/src/deadline_test_scaffolding/cf_templates/job_attachments.yaml b/src/deadline_test_scaffolding/cf_templates/job_attachments.yaml new file mode 100644 index 0000000..a8faba2 --- /dev/null +++ b/src/deadline_test_scaffolding/cf_templates/job_attachments.yaml @@ -0,0 +1,42 @@ +AWSTemplateFormatVersion: 2010-09-09 +Parameters: + BucketName: + Type: String +Resources: + JobAttachmentBucket: + Type: AWS::S3::Bucket + Properties: + BucketName: !Ref BucketName + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + UpdateReplacePolicy: Delete + DeletionPolicy: Delete + # Deny all non-https traffic + JobAttachmentBucketPolicy: + Type: AWS::S3::BucketPolicy + Properties: + Bucket: + Ref: JobAttachmentBucket + PolicyDocument: + Statement: + - Action: s3:* + Condition: + Bool: + aws:SecureTransport: "false" + Effect: Deny + Principal: + AWS: "*" + Resource: + - !GetAtt JobAttachmentBucket.Arn + - !Join + - "" + - - !GetAtt JobAttachmentBucket.Arn + - /* + Version: "2012-10-17" diff --git a/src/deadline_test_scaffolding/constants.py b/src/deadline_test_scaffolding/constants.py new file mode 100644 index 0000000..d3186fd --- /dev/null +++ b/src/deadline_test_scaffolding/constants.py @@ -0,0 +1,47 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +import os + +STAGE = os.environ.get("STAGE", "Prod") + +BOOTSTRAP_CLOUDFORMATION_STACK_NAME = f"TestScaffoldingStack{STAGE}" + +# Role Names +DEADLINE_WORKER_BOOTSTRAP_ROLE = f"DeadlineWorkerBootstrapRole{STAGE}" +DEADLINE_WORKER_BOOSTRAP_INSTANCE_PROFILE_NAME = f"DeadlineWorkerBootstrapInstanceProfile{STAGE}" +DEADLINE_WORKER_ROLE = f"DeadlineWorkerTestRole{STAGE}" +DEADLINE_QUEUE_SESSION_ROLE = f"DeadlineScaffoldingQueueSessionRole{STAGE}" + +# Job Attachments +JOB_ATTACHMENTS_BUCKET_RESOURCE = "ScaffoldingJobAttachmentsBucket" +JOB_ATTACHMENTS_BUCKET_NAME = os.environ.get( + "JOB_ATTACHMENTS_BUCKET_NAME", "scaffolding-job-attachments-bucket" +) +JOB_ATTACHMENTS_BUCKET_POLICY_RESOURCE = f"JobAttachmentsPolicy{STAGE}" +JOB_ATTACHMENTS_ROOT_PREFIX = "root" + +# Worker Agent Configurations +DEFAULT_CMF_CONFIG = { + "customerManaged": { + "autoScalingConfiguration": { + "mode": "NO_SCALING", + "maxFleetSize": 1, + }, + "workerRequirements": { + "vCpuCount": {"min": 1}, + "memoryMiB": {"min": 1024}, + "osFamily": "linux", + "cpuArchitectureType": "x86_64", + }, + } +} + +# Service Principals +CREDENTIAL_VENDING_PRINCIPAL = os.environ.get( + "CREDENTIAL_VENDING_PRINCIPAL", "credential-vending.deadline-closed-beta.amazonaws.com" +) + +# Temporary constants +DEADLINE_SERVICE_MODEL_BUCKET = os.environ.get("DEADLINE_SERVICE_MODEL_BUCKET", "") +CODEARTIFACT_DOMAIN = os.environ.get("CODEARTIFACT_DOMAIN", "") +CODEARTIFACT_ACCOUNT_ID = os.environ.get("CODEARTIFACT_ACCOUNT_ID", "") +CODEARTIFACT_REPOSITORY = os.environ.get("CODEARTIFACT_REPOSITORY", "") diff --git a/src/deadline_test_scaffolding/deadline_manager.py b/src/deadline_test_scaffolding/deadline_manager.py new file mode 100644 index 0000000..d46be13 --- /dev/null +++ b/src/deadline_test_scaffolding/deadline_manager.py @@ -0,0 +1,571 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +from __future__ import annotations + +import os +import posixpath +import sys +import tempfile +import uuid +from time import sleep +from typing import Any, Dict, Optional, List + +import boto3 +from botocore.client import BaseClient +from botocore.exceptions import ClientError +from botocore.loaders import Loader +from botocore.model import ServiceModel, OperationModel + +from .constants import ( + JOB_ATTACHMENTS_ROOT_PREFIX, + DEFAULT_CMF_CONFIG, +) + + +class DeadlineManager: + """This class is responsible for setting up and tearing down the required components + for the tests to be run.""" + + deadline_service_model_bucket: Optional[str] = None + deadline_endpoint: Optional[str] = None + + kms_client: BaseClient + kms_key_metadata: Optional[Dict[str, Any]] + + deadline_client: DeadlineClient + farm_id: Optional[str] + queue_id: Optional[str] + fleet_id: Optional[str] + job_attachment_bucket: Optional[str] + additional_queues: list[dict[str, Any]] + deadline_model_dir: Optional[tempfile.TemporaryDirectory] = None + + MOCKED_SERVICE_VERSION = "2020-08-21" + + def __init__(self, should_add_deadline_models: bool = False) -> None: + """ + Initializing the Deadline Manager + """ + self.deadline_service_model_bucket = os.getenv("DEADLINE_SERVICE_MODEL_BUCKET") + self.deadline_endpoint = os.getenv("DEADLINE_ENDPOINT") + + # Installing the deadline service models. + if should_add_deadline_models: + self.get_deadline_models() + + self.deadline_client = self._get_deadline_client(self.deadline_endpoint) + + # Create the KMS client + self.kms_client = boto3.client("kms") + + self.farm_id: Optional[str] = None + self.queue_id: Optional[str] = None + self.fleet_id: Optional[str] = None + self.additional_queues: list[dict[str, Any]] = [] + self.kms_key_metadata: Optional[dict[str, Any]] = None + + def get_deadline_models(self): + """ + This function will download and install the models for deadline so we can use the deadline + client. + """ + if self.deadline_service_model_bucket is None: + raise ValueError( + "Environment variable DEADLINE_SERVICE_MODEL_BUCKET is not set. " + "Unable to get deadline service model." + ) + + # Create the S3 client + s3_client: BaseClient = boto3.client("s3") + + # Create a temp directory to store the model file + self.deadline_model_dir = tempfile.TemporaryDirectory() + service_model_dir = posixpath.join( + self.deadline_model_dir.name, "deadline", self.MOCKED_SERVICE_VERSION + ) + os.makedirs(service_model_dir) + + # Downloading the deadline models. + s3_client.download_file( + self.deadline_service_model_bucket, + "service-2.json", + posixpath.join(service_model_dir, "service-2.json"), + ) + os.environ["AWS_DATA_PATH"] = self.deadline_model_dir.name + + def create_scaffolding( + self, + worker_role_arn: str, + job_attachments_bucket: str, + farm_name: str = uuid.uuid4().hex, + queue_name: str = uuid.uuid4().hex, + fleet_name: str = uuid.uuid4().hex, + ) -> None: + self.create_kms_key() + self.create_farm(farm_name) + self.create_queue(queue_name) + self.add_job_attachments_bucket(job_attachments_bucket) + self.create_fleet(fleet_name, worker_role_arn) + self.queue_fleet_association() + + def create_kms_key(self) -> None: + try: + response: Dict[str, Any] = self.kms_client.create_key( + Description="The KMS used for testing created by the " + "DeadlineClientSoftwareTestScaffolding.", + Tags=[{"TagKey": "Name", "TagValue": "DeadlineClientSoftwareTestScaffolding"}], + ) + except ClientError as e: + print("Failed to create CMK.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + self.kms_key_metadata = response["KeyMetadata"] + + # We should always get a metadata when successful, this is for mypy. + if self.kms_key_metadata: # pragma: no cover + print(f"Created CMK with id = {self.kms_key_metadata['KeyId']}") + self.kms_client.enable_key(KeyId=self.kms_key_metadata["KeyId"]) + print(f"Enabled CMK with id = {self.kms_key_metadata['KeyId']}") + + def delete_kms_key(self) -> None: + if ( + not hasattr(self, "kms_key_metadata") + or self.kms_key_metadata is None + or "KeyId" not in self.kms_key_metadata + ): + raise Exception("ERROR: Attempting to delete a KMS key when None was created!") + + try: + # KMS keys by default are deleted in 30 days (this is their pending window). + # 7 days is the fastest we can clean them up. + pending_window = 7 + self.kms_client.schedule_key_deletion( + KeyId=self.kms_key_metadata["KeyId"], PendingWindowInDays=pending_window + ) + except ClientError as e: + print( + "Failed to schedule the deletion of CMK with id = " + f"{self.kms_key_metadata['KeyId']}", + file=sys.stderr, + ) + print(f"The following error was raised: {e}", file=sys.stderr) + raise + else: + print(f"Scheduled deletion of CMK with id = {self.kms_key_metadata['KeyId']}") + self.kms_key_metadata = None + + def create_farm(self, farm_name: str) -> None: + if ( + not hasattr(self, "kms_key_metadata") + or self.kms_key_metadata is None + or "Arn" not in self.kms_key_metadata + ): + raise Exception("ERROR: Attempting to create a farm without having creating a CMK.") + + try: + response = self.deadline_client.create_farm( + displayName=farm_name, kmsKeyArn=self.kms_key_metadata["Arn"] + ) + except ClientError as e: + print("Failed to create a farm.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + self.farm_id = response["farmId"] + print(f"Successfully create farm with id = {self.farm_id}") + + def delete_farm(self) -> None: + if not hasattr(self, "farm_id") or not self.farm_id: + raise Exception("ERROR: Attempting to delete a farm without having created one.") + + try: + self.deadline_client.delete_farm(farmId=self.farm_id) + except ClientError as e: + print(f"Failed to delete farm with id = {self.farm_id}.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + print(f"Successfully deleted farm with id = {self.farm_id}") + self.farm_id = None + + # TODO: Add support for queue users with jobsRunAs + def create_queue(self, queue_name: str) -> None: + if not hasattr(self, "farm_id") or self.farm_id is None: + raise Exception( + "ERROR: Attempting to create a queue without having had created a farm!" + ) + + try: + response = self.deadline_client.create_queue( + displayName=queue_name, + farmId=self.farm_id, + ) + except ClientError as e: + print(f"Failed to create queue with displayName = {queue_name}.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + self.queue_id = response["queueId"] + print(f"Successfully created queue with id = {self.queue_id}") + + def add_job_attachments_bucket(self, job_attachments_bucket: str): + """Add a job attachments bucket to the queue""" + self.deadline_client.update_queue( + queueId=self.queue_id, + farmId=self.farm_id, + jobAttachmentSettings={ + "s3BucketName": job_attachments_bucket, + "rootPrefix": JOB_ATTACHMENTS_ROOT_PREFIX, + }, + ) + + def create_additional_queue(self, **kwargs) -> Dict[str, Any]: + """Create and add another queue to the deadline manager""" + input = {"farmId": self.farm_id} + input.update(kwargs) + response = self.deadline_client.create_queue(**input) + response = self.deadline_client.get_queue( + farmId=input["farmId"], queueId=response["queueId"] + ) + self.additional_queues.append(response) + return response + + def delete_queue(self) -> None: + if not hasattr(self, "farm_id") or not self.farm_id: + raise Exception( + "ERROR: Attempting to delete a queue without having had created a farm!" + ) + + if not hasattr(self, "queue_id") or not self.queue_id: + raise Exception("ERROR: Attempting to delete a queue without having had created one!") + + try: + self.deadline_client.delete_queue(queueId=self.queue_id, farmId=self.farm_id) + except ClientError as e: + print(f"Failed to delete queue with id = {self.queue_id}.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + print(f"Successfully deleted queue with id = {self.queue_id}") + self.queue_id = None + + def delete_additional_queues(self) -> None: + """Delete all additional queues that have been added.""" + for queue in self.additional_queues: + try: + self.deadline_client.delete_queue(farmId=queue["farmId"], queueId=queue["queueId"]) + except Exception as e: + print(f"delete queue exception {str(e)}") + continue + + def create_fleet(self, fleet_name: str, worker_role_arn: str) -> None: + if not hasattr(self, "farm_id") or not self.farm_id: + raise Exception( + "ERROR: Attempting to create a fleet without having had created a farm!" + ) + try: + response = self.deadline_client.create_fleet( + farmId=self.farm_id, + displayName=fleet_name, + roleArn=worker_role_arn, + configuration=DEFAULT_CMF_CONFIG, + ) + except ClientError as e: + print(f"Failed to create fleet with displayName = {fleet_name}.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + self.fleet_id = response["fleetId"] + self.wait_for_desired_fleet_status( + desired_status="ACTIVE", allowed_status=["ACTIVE", "CREATE_IN_PROGRESS"] + ) + print(f"Successfully created a fleet with id = {self.fleet_id}") + + # Temporary until we have waiters + def wait_for_desired_fleet_status(self, desired_status: str, allowed_status: List[str]) -> None: + max_retries = 10 + fleet_status = None + retry_count = 0 + while fleet_status != desired_status and retry_count < max_retries: + response = self.deadline_client.get_fleet(fleetId=self.fleet_id, farmId=self.farm_id) + + fleet_status = response["status"] + + if fleet_status not in allowed_status: + raise ValueError( + f"fleet entered a nonvalid status ({fleet_status}) while " + f"waiting for the desired status: {desired_status}." + ) + + if fleet_status == desired_status: + return response + + print(f"Fleet status: {fleet_status}\nChecking again...") + retry_count += 1 + sleep(10) + + raise ValueError( + f"Timed out waiting for fleet status to reach the desired status {desired_status}." + ) + + def queue_fleet_association(self) -> None: + if not hasattr(self, "farm_id") or not self.farm_id: + raise Exception("ERROR: Attempting to queue a fleet without having had created a farm!") + + if not hasattr(self, "queue_id") or not self.queue_id: + raise Exception("ERROR: Attempting to queue a fleet without creating a queue") + + if not hasattr(self, "fleet_id") or not self.fleet_id: + raise Exception("ERROR: Attempting to queue a fleet without having had created one!") + + try: + self.deadline_client.create_queue_fleet_association( + farmId=self.farm_id, queueId=self.queue_id, fleetId=self.fleet_id + ) + except ClientError as e: + print(f"Failed to associate fleet with id = {self.fleet_id}.", file=sys.stderr) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + print(f"Successfully queued fleet with id = {self.fleet_id}") + + # Temporary until we have waiters + def stop_queue_fleet_associations_and_wait(self) -> None: + self.deadline_client.update_queue_fleet_association( + farmId=self.farm_id, + queueId=self.queue_id, + fleetId=self.fleet_id, + status="CANCEL_WORK", + ) + max_retries = 10 + retry_count = 0 + qfa_status = None + allowed_status = ["STOPPED", "CANCEL_WORK"] + while qfa_status != "STOPPED" and retry_count < max_retries: + response = self.deadline_client.get_queue_fleet_association( + farmId=self.farm_id, queueId=self.queue_id, fleetId=self.fleet_id + ) + + qfa_status = response["status"] + + if qfa_status not in allowed_status: + raise ValueError( + f"Association entered a nonvalid status ({qfa_status}) while " + f"waiting for the desired status: STOPPED" + ) + + if qfa_status == "STOPPED": + return response + + print(f"Queue Fleet Association: {qfa_status}\nChecking again...") + retry_count += 1 + sleep(10) + raise ValueError("Timed out waiting for association to reach a STOPPED status.") + + def delete_fleet(self) -> None: + if not hasattr(self, "farm_id") or not self.farm_id: + raise Exception( + "ERROR: Attempting to delete a fleet without having had created a farm!" + ) + + if not hasattr(self, "fleet_id") or not self.fleet_id: + raise Exception("ERROR: Attempting to delete a fleet when none was created!") + + try: + # Delete queue fleet association. + self.stop_queue_fleet_associations_and_wait() + self.deadline_client.delete_queue_fleet_association( + farmId=self.farm_id, queueId=self.queue_id, fleetId=self.fleet_id + ) + # Deleting the fleet. + self.deadline_client.delete_fleet(farmId=self.farm_id, fleetId=self.fleet_id) + except ClientError as e: + print( + f"ERROR: Failed to delete delete fleet with id = {self.fleet_id}", file=sys.stderr + ) + print(f"The following exception was raised: {e}", file=sys.stderr) + raise + else: + print(f"Successfully deleted fleet with id = {self.fleet_id}") + self.fleet_id = None + + def cleanup_scaffolding(self) -> None: + # Only deleting the fleet if we have a fleet. + if hasattr(self, "fleet_id") and self.fleet_id: + self.delete_fleet() + + if hasattr(self, "farm_id") and self.farm_id: + # Only deleting the queue if we have a queue. + if hasattr(self, "queue_id") and self.queue_id: + self.delete_queue() + + self.delete_farm() + + # Only deleting the kms key if we have a kms key. + if hasattr(self, "kms_key_metadata") and self.kms_key_metadata: + self.delete_kms_key() + + def _get_deadline_client(self, deadline_endpoint: Optional[str]) -> DeadlineClient: + """Create a DeadlineClient shim layer over an actual boto client""" + self.session = boto3.Session() + real_deadline_client = self.session.client( + "deadline", + endpoint_url=deadline_endpoint, + ) + + return DeadlineClient(real_deadline_client) + + +class DeadlineClient: + """ + A shim layer for boto Deadline client. This class will check if a method exists on the real + boto3 Deadline client and call it if it exists. If it doesn't exist, an AttributeError will be raised. + """ + + _real_client: Any + + def __init__(self, real_client: Any) -> None: + self._real_client = real_client + + def create_farm(self, *args, **kwargs) -> Any: + create_farm_input_members = self._get_deadline_api_input_shape("CreateFarm") + if "displayName" not in create_farm_input_members and "name" in create_farm_input_members: + kwargs["name"] = kwargs.pop("displayName") + return self._real_client.create_farm(*args, **kwargs) + + def create_fleet(self, *args, **kwargs) -> Any: + create_fleet_input_members = self._get_deadline_api_input_shape("CreateFleet") + if "displayName" not in create_fleet_input_members and "name" in create_fleet_input_members: + kwargs["name"] = kwargs.pop("displayName") + if ( + "roleArn" not in create_fleet_input_members + and "workeRoleArn" in create_fleet_input_members + ): + kwargs["workerRoleArn"] = kwargs.pop("roleArn") + return self._real_client.create_fleet(*args, **kwargs) + + def get_fleet(self, *args, **kwargs) -> Any: + response = self._real_client.get_fleet(*args, **kwargs) + if "name" in response and "displayName" not in response: + response["displayName"] = response["name"] + del response["name"] + if "state" in response and "status" not in response: + response["status"] = response["state"] + del response["state"] + if "type" in response: + del response["type"] + return response + + def get_queue_fleet_association(self, *args, **kwargs) -> Any: + response = self._real_client.get_queue_fleet_association(*args, **kwargs) + if "state" in response and "status" not in response: + response["status"] = response["state"] + del response["state"] + return response + + def create_queue(self, *args, **kwargs) -> Any: + create_queue_input_members = self._get_deadline_api_input_shape("CreateQueue") + if "displayName" not in create_queue_input_members and "name" in create_queue_input_members: + kwargs["name"] = kwargs.pop("displayName") + return self._real_client.create_queue(*args, **kwargs) + + def create_queue_fleet_association(self, *args, **kwargs) -> Any: + create_queue_fleet_association_method_name: Optional[str] + create_queue_fleet_association_method: Optional[str] + + for create_queue_fleet_association_method_name in ( + "put_queue_fleet_association", + "create_queue_fleet_association", + ): + create_queue_fleet_association_method = getattr( + self._real_client, create_queue_fleet_association_method_name, None + ) + if create_queue_fleet_association_method: + break + else: + create_queue_fleet_association_method = None + + # mypy complains about they kwargs type + return create_queue_fleet_association_method(*args, **kwargs) # type: ignore + + def create_job(self, *args, **kwargs) -> Any: + create_job_input_members = self._get_deadline_api_input_shape("CreateJob") + # revert to old parameter names if old service model is used + if "maxRetriesPerTask" in kwargs: + if "maxErrorsPerTask" in create_job_input_members: + kwargs["maxErrorsPerTask"] = kwargs.pop("maxRetriesPerTask") + if "template" in kwargs: + if "jobTemplate" in create_job_input_members: + kwargs["jobTemplate"] = kwargs.pop("template") + kwargs["jobTemplateType"] = kwargs.pop("templateType") + if "parameters" in kwargs: + kwargs["jobParameters"] = kwargs.pop("parameters") + if "targetTaskRunStatus" in kwargs: + if "initialState" in create_job_input_members: + kwargs["initialState"] = kwargs.pop("targetTaskRunStatus") + if "priority" not in kwargs: + kwargs["priority"] = 50 + return self._real_client.create_job(*args, **kwargs) + + def update_queue_fleet_association(self, *args, **kwargs) -> Any: + update_queue_fleet_association_method_name: Optional[str] + update_queue_fleet_association_method: Optional[str] + + for update_queue_fleet_association_method_name in ( + "update_queue_fleet_association", + "update_queue_fleet_association_state", + ): + update_queue_fleet_association_method = getattr( + self._real_client, update_queue_fleet_association_method_name, None + ) + if update_queue_fleet_association_method: + break + else: + update_queue_fleet_association_method = None + + if update_queue_fleet_association_method_name == "update_queue_fleet_association": + # mypy complains about they kwargs type + return update_queue_fleet_association_method(*args, **kwargs) # type: ignore + + if update_queue_fleet_association_method_name == "update_queue_fleet_association_state": + kwargs["state"] = kwargs.pop("status") + # mypy complains about they kwargs type + return update_queue_fleet_association_method(*args, **kwargs) # type: ignore + + def _get_deadline_api_input_shape(self, api_name: str) -> dict[str, Any]: + """ + Given a string name of an API e.g. CreateJob, returns the shape of the + inputs to that API. + """ + api_model = self._get_deadline_api_model(api_name) + if api_model: + return api_model.input_shape.members + return {} + + def _get_deadline_api_model(self, api_name: str) -> Optional[OperationModel]: + """ + Given a string name of an API e.g. CreateJob, returns the OperationModel + for that API from the service model. + """ + data_model_path = os.getenv("AWS_DATA_PATH") + loader = Loader(extra_search_paths=[data_model_path] if data_model_path is not None else []) + deadline_service_description = loader.load_service_model("deadline", "service-2") + deadline_service_model = ServiceModel(deadline_service_description, service_name="deadline") + return OperationModel( + deadline_service_description["operations"][api_name], deadline_service_model + ) + + def __getattr__(self, __name: str) -> Any: + """ + Respond to unknown method calls by calling the underlying _real_client + If the underlying _real_client does not have a given method, an AttributeError + will be raised. + Note that __getattr__ is only called if the attribute cannot otherwise be found, + so if this class alread has the called method defined, __getattr__ will not be called. + This is in opposition to __getattribute__ which is called by default. + """ + + def method(*args, **kwargs): + return getattr(self._real_client, __name)(*args, **kwargs) + + return method diff --git a/src/deadline_test_scaffolding/deadline_stub.py b/src/deadline_test_scaffolding/deadline_stub.py new file mode 100644 index 0000000..c75bb6b --- /dev/null +++ b/src/deadline_test_scaffolding/deadline_stub.py @@ -0,0 +1,94 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import dataclasses +from dataclasses import dataclass +from typing import Optional +from deadline_test_scaffolding.constants import JOB_ATTACHMENTS_ROOT_PREFIX + +from botocore.exceptions import ClientError as OriginalClientError + + +class ClientError(OriginalClientError): + def __init__(self, errmsg, operation_name): + super().__init__( + error_response={"Error": {"Message": errmsg}}, + operation_name=operation_name, + ) + + +@dataclass +class FarmInfo: + """ + Dataclass used to build list-farm responses + """ + + displayName: str + farmId: str = "farm-01234567890123456789012345678901" # pylint: disable=invalid-name + status: str = "ACTIVE" + + +@dataclass +class QueueInfo: + """ + Dataclass used to build list-queues responses + """ + + displayName: str + queueId: str = "queue-01234567890123456789012345678901" # pylint: disable=invalid-name + status: str = "ACTIVE" + farmId: str = "farm-01234567890123456789012345678901" # pylint: disable=invalid-name + + +@dataclass +class JobInfo: + jobId: str + farmId: str + queueId: str + template: str + templateType: str + priority: str + attachments: dict + + +@dataclass +class StubDeadlineClient: + """ + Stub implementation of the Deadline client generated by botocore. + """ + + farm: FarmInfo + queue: QueueInfo + job: Optional[JobInfo] = None + job_attachments_bucket_name: Optional[str] = None + + def create_job(self, **kwargs) -> dict: + self.job = JobInfo(jobId="job-123", **kwargs) + return { + "jobId": self.job.jobId, + "state": "CREATING", + } + + def get_queue(self, *, farmId: str, queueId: str) -> dict: + if farmId != self.farm.farmId: + raise ClientError( + f"Wrong farm ID. Expected {self.farm.farmId}, got {farmId}", "GetQueue" + ) + if queueId != self.queue.queueId: + raise ClientError( + f"Wrong queue ID. Expected {self.queue.queueId}, got {queueId}", "GetQueue" + ) + + return { + **dataclasses.asdict(self.queue), + "fleets": [], + "jobAttachmentSettings": { + "s3BucketName": self.job_attachments_bucket_name, + "rootPrefix": JOB_ATTACHMENTS_ROOT_PREFIX, + }, + } + + def list_queues(self, *, farmId: str) -> dict: + return {"queues": [dataclasses.asdict(self.queue)] if farmId == self.queue.farmId else []} + + def list_farms(self, *, isMemberOf: bool = False) -> dict: + return {"farms": [dataclasses.asdict(self.farm)]} diff --git a/src/deadline_test_scaffolding/fixtures.py b/src/deadline_test_scaffolding/fixtures.py new file mode 100644 index 0000000..ccf7378 --- /dev/null +++ b/src/deadline_test_scaffolding/fixtures.py @@ -0,0 +1,335 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import botocore +import boto3 +import os +import time +import pytest +import json +from typing import Any, Callable, Generator, Dict, Optional, Type +from types import TracebackType + +from .deadline_manager import DeadlineManager +from .job_attachment_manager import JobAttachmentManager +from .utils import ( + generate_worker_role_cfn_template, + generate_boostrap_worker_role_cfn_template, + generate_boostrap_instance_profile_cfn_template, + generate_queue_session_role, + generate_job_attachments_bucket, + generate_job_attachments_bucket_policy, +) + +from .constants import ( + DEADLINE_WORKER_ROLE, + DEADLINE_WORKER_BOOTSTRAP_ROLE, + DEADLINE_WORKER_BOOSTRAP_INSTANCE_PROFILE_NAME, + DEADLINE_QUEUE_SESSION_ROLE, + DEADLINE_SERVICE_MODEL_BUCKET, + CODEARTIFACT_DOMAIN, + CODEARTIFACT_ACCOUNT_ID, + CODEARTIFACT_REPOSITORY, + JOB_ATTACHMENTS_BUCKET_NAME, + JOB_ATTACHMENTS_BUCKET_RESOURCE, + JOB_ATTACHMENTS_BUCKET_POLICY_RESOURCE, + BOOTSTRAP_CLOUDFORMATION_STACK_NAME, + STAGE, +) + +AMI_ID = os.environ.get("AMI_ID", "") +SUBNET_ID = os.environ.get("SUBNET_ID", "") +SECURITY_GROUP_ID = os.environ.get("SECURITY_GROUP_ID", "") + + +@pytest.fixture(scope="session") +def stage() -> str: + if os.getenv("LOCAL_DEVELOPMENT", "false").lower() == "true": + return "dev" + else: + return os.environ["STAGE"] + + +@pytest.fixture(scope="session") +def account_id() -> str: + return os.environ["SERVICE_ACCOUNT_ID"] + + +# Boto client fixtures +@pytest.fixture(scope="session") +def session() -> boto3.Session: + return boto3.Session() + + +@pytest.fixture(scope="session") +def iam_client(session: boto3.Session) -> botocore.client.BaseClient: + return session.client("iam") + + +@pytest.fixture(scope="session") +def ec2_client(session: boto3.Session) -> botocore.client.BaseClient: + return session.client("ec2") + + +@pytest.fixture(scope="session") +def ssm_client(session: boto3.Session) -> botocore.client.BaseClient: + return session.client("ssm") + + +@pytest.fixture(scope="session") +def cfn_client(session: boto3.Session) -> botocore.client.BaseClient: + return session.client("cloudformation") + + +# Bootstrap persistent resources +@pytest.fixture( + scope="session", autouse=os.environ.get("SKIP_BOOTSTRAP_TEST_RESOURCES", "False") != "True" +) +def bootstrap_test_resources(cfn_client: botocore.client.BaseClient) -> None: + # All required resources are created using CloudFormation stack + cfn_template: dict[str, Any] = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "Stack created by deadline-cloud-test-fixtures", + "Resources": { + # A role for use by the Worker Agent after being bootstrapped + DEADLINE_WORKER_ROLE: generate_worker_role_cfn_template(), + DEADLINE_WORKER_BOOTSTRAP_ROLE: generate_boostrap_worker_role_cfn_template(), + DEADLINE_QUEUE_SESSION_ROLE: generate_queue_session_role(), + DEADLINE_WORKER_BOOSTRAP_INSTANCE_PROFILE_NAME: generate_boostrap_instance_profile_cfn_template(), + JOB_ATTACHMENTS_BUCKET_RESOURCE: generate_job_attachments_bucket(), + JOB_ATTACHMENTS_BUCKET_POLICY_RESOURCE: generate_job_attachments_bucket_policy(), + }, + } + stack_name = BOOTSTRAP_CLOUDFORMATION_STACK_NAME + update_or_create_cfn_stack(cfn_client, stack_name, cfn_template) + + +# create or update bootstrap +def update_or_create_cfn_stack( + cfn_client: botocore.client.BaseClient, stack_name: str, cfn_template: Dict[str, Any] +) -> None: + try: + cfn_client.update_stack( + StackName=stack_name, + TemplateBody=json.dumps(cfn_template), + Capabilities=["CAPABILITY_NAMED_IAM"], + ) + waiter = cfn_client.get_waiter("stack_update_complete") + waiter.wait(StackName=stack_name) + except cfn_client.exceptions.ClientError as e: + if e.response["Error"]["Message"] != "No updates are to be performed.": + cfn_client.create_stack( + StackName=stack_name, + TemplateBody=json.dumps(cfn_template), + Capabilities=["CAPABILITY_NAMED_IAM"], + OnFailure="DELETE", + EnableTerminationProtection=False, + ) + waiter = cfn_client.get_waiter("stack_create_complete") + waiter.wait(StackName=stack_name) + + +@pytest.fixture(scope="session") +def deadline_manager_fixture(): + deadline_manager_fixture = DeadlineManager(should_add_deadline_models=True) + yield deadline_manager_fixture + + +# get the worker role arn +@pytest.fixture(scope="session") +def worker_role_arn(iam_client: botocore.client.BaseClient) -> str: + response = iam_client.get_role(RoleName=DEADLINE_WORKER_ROLE) + return response["Role"]["Arn"] + + +@pytest.fixture(scope="session") +def deadline_scaffolding( + deadline_manager_fixture: DeadlineManager, worker_role_arn: str +) -> Generator[Any, None, None]: + deadline_manager_fixture.create_scaffolding(worker_role_arn, JOB_ATTACHMENTS_BUCKET_NAME) + + yield deadline_manager_fixture + + deadline_manager_fixture.cleanup_scaffolding() + + +@pytest.fixture(scope="session") +def launch_instance(ec2_client: botocore.client.BaseClient) -> Generator[Any, None, None]: + with _InstanceLauncher( + ec2_client, + AMI_ID, + SUBNET_ID, + SECURITY_GROUP_ID, + DEADLINE_WORKER_BOOSTRAP_INSTANCE_PROFILE_NAME, + ) as instance_id: + yield instance_id + + +@pytest.fixture(scope="session") +def create_worker_agent( + deadline_scaffolding, launch_instance: str, send_ssm_command: Callable +) -> Generator[Any, None, None]: + def configure_worker_agent_func() -> Dict: + """Creates a Deadline Farm, starts an instance and configures and starts a Worker Agent.""" + assert deadline_scaffolding + assert launch_instance + + configuration_command_response = send_ssm_command( + launch_instance, + ( + f"adduser -r -m agentuser && \n" + f"adduser -r -m jobuser && \n" + f"usermod -a -G jobuser agentuser && \n" + f"chmod 770 /home/jobuser && \n" + f"touch /etc/sudoers.d/deadline-worker-job-user && \n" + f'echo "agentuser ALL=(jobuser) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/deadline-worker-job-user && \n' + f"python3.9 -m venv /opt/deadline/worker && \n" + f"source /opt/deadline/worker/bin/activate && \n" + f"pip install --upgrade pip && \n" + f"touch /opt/deadline/worker/pip.conf && \n" + # TODO: Remove when pypi is available + f"aws codeartifact login --tool pip --domain {CODEARTIFACT_DOMAIN} --domain-owner {CODEARTIFACT_ACCOUNT_ID} --repository {CODEARTIFACT_REPOSITORY} && \n" + f"aws s3 cp s3://{DEADLINE_SERVICE_MODEL_BUCKET}/service-2.json /tmp/deadline-beta-2020-08-21.json && \n" + f"chmod +r /tmp/deadline-beta-2020-08-21.json && \n" + f"sudo -u agentuser aws configure add-model --service-model file:///tmp/deadline-beta-2020-08-21.json --service-name deadline && \n" + f"mkdir /var/lib/deadline /var/log/amazon/deadline/ && \n" + f"chown agentuser:agentuser /var/lib/deadline /var/log/amazon/deadline/ && \n" + f"pip install deadline-worker-agent && \n" + f"sudo -u agentuser /opt/deadline/worker/bin/deadline_worker_agent --help" + ), + ) + + return configuration_command_response + + def start_worker_agent_func() -> Dict: + start_command_response = send_ssm_command( + launch_instance, + ( + f"nohup sudo -E AWS_DEFAULT_REGION=us-west-2 -u agentuser /opt/deadline/worker/bin/deadline_worker_agent --farm-id {deadline_scaffolding.farm_id} --fleet-id {deadline_scaffolding.fleet_id} --allow-instance-profile >/dev/null 2>&1 &" + ), + ) + + return start_command_response + + configuration_result = configure_worker_agent_func() + + assert configuration_result["ResponseCode"] == 0 + + run_worker = start_worker_agent_func() + + assert run_worker["ResponseCode"] == 0 + + yield run_worker + + +@pytest.fixture(scope="session") +def send_ssm_command(ssm_client: botocore.client.BaseClient) -> Callable: + def send_ssm_command_func(instance_id: str, command: str) -> Dict: + """Helper function to send single commands via SSM to a shell on a launched EC2 instance. Once the command has fully + finished the result of the invocation is returned. + """ + ssm_waiter = ssm_client.get_waiter("command_executed") + + # To successfully send an SSM Command to an instance the instance must: + # 1) Be in RUNNING state; + # 2) Have the AWS Systems Manager (SSM) Agent running; and + # 3) Have had enough time for the SSM Agent to connect to System's Manager + # + # If we send an SSM command then we will get an InvalidInstanceId error + # if the instance isn't in that state. + NUM_RETRIES = 10 + SLEEP_INTERVAL_S = 5 + for i in range(0, NUM_RETRIES): + try: + send_command_response = ssm_client.send_command( + InstanceIds=[instance_id], + DocumentName="AWS-RunShellScript", + Parameters={"commands": [command]}, + ) + # Successfully sent. Bail out of the loop. + break + except botocore.exceptions.ClientError as error: + error_code = error.response["Error"]["Code"] + if error_code == "InvalidInstanceId" and i < NUM_RETRIES - 1: + time.sleep(SLEEP_INTERVAL_S) + continue + raise + + command_id = send_command_response["Command"]["CommandId"] + + ssm_waiter.wait(InstanceId=instance_id, CommandId=command_id) + ssm_command_result = ssm_client.get_command_invocation( + InstanceId=instance_id, CommandId=command_id + ) + + return ssm_command_result + + return send_ssm_command_func + + +@pytest.fixture(scope="session") +def job_attachment_manager_fixture(stage: str, account_id: str): + job_attachment_manager = JobAttachmentManager(stage, account_id) + yield job_attachment_manager + + +@pytest.fixture(scope="session") +def deploy_job_attachment_resources(job_attachment_manager_fixture: JobAttachmentManager): + job_attachment_manager_fixture.deploy_resources() + yield job_attachment_manager_fixture + job_attachment_manager_fixture.cleanup_resources() + + +class _InstanceLauncher: + ami_id: str + subnet_id: str + security_group_id: str + instance_profile_name: str + instance_id: str + ec2_client: botocore.client.BaseClient + + def __init__( + self, + ec2_client: botocore.client.BaseClient, + ami_id: str, + subnet_id: str, + security_group_id: str, + instance_profile_name: str, + ) -> None: + self.ec2_client = ec2_client + self.ami_id = ami_id + self.subnet_id = subnet_id + self.security_group_id = security_group_id + self.instance_profile_name = instance_profile_name + + def __enter__(self) -> str: + instance_running_waiter = self.ec2_client.get_waiter("instance_status_ok") + + run_instance_response = self.ec2_client.run_instances( + MinCount=1, + MaxCount=1, + ImageId=self.ami_id, + InstanceType="t3.micro", + IamInstanceProfile={"Name": self.instance_profile_name}, + SubnetId=self.subnet_id, + SecurityGroupIds=[self.security_group_id], + MetadataOptions={"HttpTokens": "required", "HttpEndpoint": "enabled"}, + TagSpecifications=[ + { + "ResourceType": "instance", + "Tags": [{"Key": "InstanceIdentification", "Value": f"TestScaffolding{STAGE}"}], + } + ], + ) + + self.instance_id = run_instance_response["Instances"][0]["InstanceId"] + + instance_running_waiter.wait(InstanceIds=[self.instance_id]) + return self.instance_id + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + self.ec2_client.terminate_instances(InstanceIds=[self.instance_id]) diff --git a/src/deadline_test_scaffolding/job_attachment_manager.py b/src/deadline_test_scaffolding/job_attachment_manager.py new file mode 100644 index 0000000..b0757c4 --- /dev/null +++ b/src/deadline_test_scaffolding/job_attachment_manager.py @@ -0,0 +1,122 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import pathlib + +import boto3 +from botocore.exceptions import ClientError, WaiterError + +from deadline_test_scaffolding.deadline_manager import DeadlineManager + + +class JobAttachmentManager: + """ + Responsible for setting up and tearing down job attachment test resources + """ + + RESOURCE_CF_TEMPLATE_LOCATION = pathlib.Path( + pathlib.Path(__file__).parent / "cf_templates" / "job_attachments.yaml" + ) + + def __init__(self, stage: str, account_id: str): + cloudformation = boto3.resource("cloudformation") + s3 = boto3.resource("s3") + self.stack = cloudformation.Stack("JobAttachmentIntegTest") + self.deadline_manager = DeadlineManager(should_add_deadline_models=True) + self.bucket = s3.Bucket(f"job-attachment-integ-test-{stage.lower()}-{account_id}") + + def deploy_resources(self): + """ + Deploy all of the resources needed for job attachment integration tests. + """ + try: + self.deadline_manager.create_kms_key() + self.deadline_manager.create_farm("job_attachments_test_farm") + self.deadline_manager.create_queue("job_attachments_test_queue") + self.deploy_stack() + except (ClientError, WaiterError): + # If anything goes wrong, rollback + self.cleanup_resources() + raise + + def _create_stack(self, template_body: str): + try: + # The stack resource doesn't have an action for creating the stack, + # only updating it. So we need to go through the client. + self.stack.meta.client.create_stack( + StackName=self.stack.name, + TemplateBody=template_body, + OnFailure="DELETE", + EnableTerminationProtection=False, + Parameters=[ + { + "ParameterKey": "BucketName", + "ParameterValue": self.bucket.name, + }, + ], + ) + except ClientError as e: + # Sometimes the cloudformation create stack waiter will release even if if the stack + # isn't in create_complete. So we have to catch that here and move on. + if e.response["Error"]["Message"] != f"Stack [{self.stack.name}] already exists": + raise + + waiter = self.stack.meta.client.get_waiter("stack_create_complete") + waiter.wait( + StackName=self.stack.name, + ) + + def deploy_stack(self): + """ + Deploy the job attachment test stack to the test account. If the stack already exists then + update it, if the stack doesn't exist then create it. + + Keep the stack around between tests to reduce further test times. + """ + with open(self.RESOURCE_CF_TEMPLATE_LOCATION) as f: + template_body = f.read() + + try: + self.stack.update( + TemplateBody=template_body, + Parameters=[ + { + "ParameterKey": "BucketName", + "ParameterValue": self.bucket.name, + }, + ], + ) + waiter = self.stack.meta.client.get_waiter("stack_update_complete") + waiter.wait(StackName=self.stack.name) + except ClientError as e: + if ( + "is in CREATE_IN_PROGRESS state and can not be updated." + in e.response["Error"]["Message"] + ): + waiter = self.stack.meta.client.get_waiter("stack_create_complete") + waiter.wait(StackName=self.stack.name) + + elif e.response["Error"]["Message"] != "No updates are to be performed.": + self._create_stack(template_body) + + def empty_bucket(self): + """ + Empty the bucket between session runs + """ + try: + self.bucket.objects.all().delete() + except ClientError as e: + if e.response["Error"]["Message"] != "The specified bucket does not exist": + raise + + def cleanup_resources(self): + """ + Cleanup all of the resources that the test used, except for the stack. + """ + self.deadline_manager.delete_additional_queues() + if self.deadline_manager.queue_id: + self.deadline_manager.delete_queue() + if self.deadline_manager.farm_id: + self.deadline_manager.delete_farm() + if self.deadline_manager.kms_key_metadata: + self.deadline_manager.delete_kms_key() + self.empty_bucket() diff --git a/src/deadline_test_scaffolding/py.typed b/src/deadline_test_scaffolding/py.typed new file mode 100644 index 0000000..7ef2116 --- /dev/null +++ b/src/deadline_test_scaffolding/py.typed @@ -0,0 +1 @@ +# Marker file that indicates this package supports typing diff --git a/src/deadline_test_scaffolding/utils.py b/src/deadline_test_scaffolding/utils.py new file mode 100644 index 0000000..b903b63 --- /dev/null +++ b/src/deadline_test_scaffolding/utils.py @@ -0,0 +1,270 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from .constants import ( + DEADLINE_WORKER_ROLE, + DEADLINE_WORKER_BOOTSTRAP_ROLE, + DEADLINE_WORKER_BOOSTRAP_INSTANCE_PROFILE_NAME, + JOB_ATTACHMENTS_BUCKET_NAME, + JOB_ATTACHMENTS_BUCKET_RESOURCE, + DEADLINE_SERVICE_MODEL_BUCKET, + CODEARTIFACT_DOMAIN, + CODEARTIFACT_ACCOUNT_ID, + CODEARTIFACT_REPOSITORY, + DEADLINE_QUEUE_SESSION_ROLE, + CREDENTIAL_VENDING_PRINCIPAL, +) + +from typing import Any, Dict + + +# IAM Roles +def generate_boostrap_worker_role_cfn_template() -> Dict[str, Any]: + cfn_template = { + "Type": "AWS::IAM::Role", + "Properties": { + "RoleName": DEADLINE_WORKER_BOOTSTRAP_ROLE, + "Description": DEADLINE_WORKER_BOOTSTRAP_ROLE, + "AssumeRolePolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": {"Service": "ec2.amazonaws.com"}, + "Action": "sts:AssumeRole", + } + ], + }, + "Policies": [ + { + "PolicyName": f"{DEADLINE_WORKER_BOOTSTRAP_ROLE}Policy", + "PolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + # Allows the worker to bootstrap itself and grab the correct credentials + { + "Effect": "Allow", + "Action": [ + "deadline:CreateWorker", + "deadline:GetWorkerIamCredentials", + "deadline:AssumeFleetRoleForWorker", + ], + "Resource": "*", + }, + # Allows the worker to download service model + { + "Action": ["s3:GetObject", "s3:HeadObject"], + "Resource": [ + f"arn:aws:s3:::{DEADLINE_SERVICE_MODEL_BUCKET}/service-2.json" + ], + "Effect": "Allow", + }, + # Allows access to code artifact + { + "Action": ["codeartifact:GetAuthorizationToken"], + "Resource": [ + f"arn:aws:codeartifact:us-west-2:{CODEARTIFACT_ACCOUNT_ID}:domain/{CODEARTIFACT_DOMAIN}" + ], + "Effect": "Allow", + }, + { + "Action": ["sts:GetServiceBearerToken"], + "Resource": "*", + "Effect": "Allow", + }, + { + "Action": [ + "codeartifact:ReadFromRepository", + "codeartifact:GetRepositoryEndpoint", + ], + "Resource": [ + f"arn:aws:codeartifact:us-west-2:{CODEARTIFACT_ACCOUNT_ID}:repository/{CODEARTIFACT_DOMAIN}/{CODEARTIFACT_REPOSITORY}" + ], + "Effect": "Allow", + }, + ], + }, + }, + ], + "ManagedPolicyArns": ["arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore"], + }, + } + return cfn_template + + +def generate_boostrap_instance_profile_cfn_template() -> Dict[str, Any]: + cfn_template = { + "Type": "AWS::IAM::InstanceProfile", + "Properties": { + "InstanceProfileName": DEADLINE_WORKER_BOOSTRAP_INSTANCE_PROFILE_NAME, + "Roles": [ + {"Ref": DEADLINE_WORKER_BOOTSTRAP_ROLE}, + ], + }, + } + return cfn_template + + +def generate_worker_role_cfn_template() -> Dict[str, Any]: + """This role matches the worker role of the closed-beta console""" + cfn_template = { + "Type": "AWS::IAM::Role", + "Properties": { + "RoleName": DEADLINE_WORKER_ROLE, + "Description": DEADLINE_WORKER_ROLE, + "AssumeRolePolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": {"Service": CREDENTIAL_VENDING_PRINCIPAL}, + "Action": "sts:AssumeRole", + } + ], + }, + "Policies": [ + { + "PolicyName": f"{DEADLINE_WORKER_ROLE}Policy", + "PolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "deadline:DeleteWorker", + "deadline:BatchGetJobEntity", + "deadline:AssumeQueueRoleForWorker", + "deadline:AssumeFleetRoleForWorker", + "deadline:UpdateWorkerSchedule", + "deadline:UpdateWorker", + ], + "Resource": "*", + }, + { + "Effect": "Allow", + "Action": [ + # Allows the read permissions via credentials + "logs:GetLogEvents", + # Allows the worker to push logs to CWL + "logs:PutLogEvents", + # Allows the worker to pass credentials to create log streams + "logs:CreateLogStream", + ], + "Resource": "arn:aws:logs:*:*:*:/aws/deadline/*", + }, + { + # For uploading logs and synchronizing job attachments + # Equivalent actions to CDK's Bucket.grantReadWrite for "deadline-" buckets + "Effect": "Allow", + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*", + "s3:DeleteObject*", + "s3:PutObject", + "s3:PutObjectLegalHold", + "s3:PutObjectRetention", + "s3:PutObjectTagging", + "s3:PutObjectVersionTagging", + "s3:Abort*", + ], + "Resource": ["arn:aws:s3:::deadline-*"], + }, + ], + }, + }, + ], + }, + } + return cfn_template + + +def generate_queue_session_role() -> Dict[str, Any]: + cfn_template = { + "Type": "AWS::IAM::Role", + "Properties": { + "RoleName": DEADLINE_QUEUE_SESSION_ROLE, + "Description": DEADLINE_QUEUE_SESSION_ROLE, + "AssumeRolePolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": {"Service": CREDENTIAL_VENDING_PRINCIPAL}, + "Action": "sts:AssumeRole", + } + ], + }, + "Policies": [ + { + "PolicyName": f"{DEADLINE_QUEUE_SESSION_ROLE}Policy", + "PolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:ListBucket", + "s3:GetBucketLocation", + ], + "Resource": [ + f"arn:aws:s3:::{JOB_ATTACHMENTS_BUCKET_NAME}" + f"arn:aws:s3:::{JOB_ATTACHMENTS_BUCKET_NAME}/*" + ], + } + ], + }, + } + ], + }, + } + + return cfn_template + + +# Job Attachments Bucket +def generate_job_attachments_bucket() -> Dict[str, Any]: + cfn_template = { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketName": JOB_ATTACHMENTS_BUCKET_NAME, + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + {"ServerSideEncryptionByDefault": {"SSEAlgorithm": "AES256"}} + ] + }, + "PublicAccessBlockConfiguration": { + "BlockPublicAcls": True, + "BlockPublicPolicy": True, + "IgnorePublicAcls": True, + "RestrictPublicBuckets": True, + }, + }, + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete", + } + + return cfn_template + + +def generate_job_attachments_bucket_policy() -> Dict[str, Any]: + cfn_template = { + "Type": "AWS::S3::BucketPolicy", + "Properties": { + "Bucket": {"Ref": JOB_ATTACHMENTS_BUCKET_RESOURCE}, + "PolicyDocument": { + "Statement": [ + { + "Action": "s3:*", + "Effect": "Deny", + "Principal": "*", + "Resource": f"arn:aws:s3:::{JOB_ATTACHMENTS_BUCKET_NAME}/*", + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + } + ] + }, + }, + } + + return cfn_template diff --git a/test/test_copyright_headers.py b/test/test_copyright_headers.py new file mode 100644 index 0000000..40564b6 --- /dev/null +++ b/test/test_copyright_headers.py @@ -0,0 +1,70 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import re +from pathlib import Path + +# For distributed open source and proprietary code, we must include a copyright header in source every file: +_copyright_header_re = re.compile( + r"Copyright Amazon\.com, Inc\. or its affiliates\. All Rights Reserved\.", re.IGNORECASE +) +_generated_by_scm = re.compile(r"# file generated by setuptools_scm", re.IGNORECASE) + + +def _check_file(filename: Path) -> None: + with open(filename) as infile: + lines_read = 0 + for line in infile: + if _copyright_header_re.search(line): + return # success + lines_read += 1 + if lines_read > 10: + raise Exception( + f"Could not find a valid Amazon.com copyright header in the top of {filename}." + " Please add one." + ) + else: + # __init__.py files are usually empty, this is to catch that. + raise Exception( + f"Could not find a valid Amazon.com copyright header in the top of {filename}." + " Please add one." + ) + + +def _is_version_file(filename: Path) -> bool: + if filename.name != "_version.py": + return False + with open(filename) as infile: + lines_read = 0 + for line in infile: + if _generated_by_scm.search(line): + return True + lines_read += 1 + if lines_read > 10: + break + return False + + +def test_copyright_headers(): + """Verifies every .py file has an Amazon copyright header.""" + root_project_dir = Path(__file__) + # The root of the project is the directory that contains the test directory. + while not (root_project_dir / "test").exists(): + root_project_dir = root_project_dir.parent + # Choose only a few top level directories to test. + # That way we don't snag any virtual envs a developer might create, at the risk of missing + # some top level .py files. + top_level_dirs = ["src", "test", "scripts"] + file_count = 0 + for top_level_dir in top_level_dirs: + for glob_pattern in ("**/*.py", "**/*.sh"): + for path in Path(root_project_dir / top_level_dir).glob(glob_pattern): + print(path) + if not _is_version_file(path): + _check_file(path) + file_count += 1 + + print(f"test_copyright_headers checked {file_count} files successfully.") + + +if __name__ == "__main__": + test_copyright_headers() diff --git a/test/unit/conftest.py b/test/unit/conftest.py new file mode 100644 index 0000000..77dc4cc --- /dev/null +++ b/test/unit/conftest.py @@ -0,0 +1,26 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from typing import Generator +from unittest import mock +from unittest.mock import patch + +import pytest + +from deadline_test_scaffolding import DeadlineManager + + +@pytest.fixture() +def mock_get_deadline_models(): + with mock.patch.object(DeadlineManager, "get_deadline_models") as mocked_get_deadline_models: + yield mocked_get_deadline_models + + +@pytest.fixture(scope="function") +def boto_config() -> Generator[None, None, None]: + updated_environment = { + "AWS_ACCESS_KEY_ID": "ACCESSKEY", + "AWS_SECRET_ACCESS_KEY": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", + "AWS_DEFAULT_REGION": "us-west-2", + } + with patch.dict("os.environ", updated_environment): + yield diff --git a/test/unit/shared_constants.py b/test/unit/shared_constants.py new file mode 100644 index 0000000..94f498d --- /dev/null +++ b/test/unit/shared_constants.py @@ -0,0 +1,25 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +MOCK_FARM_ID = "farm-0123456789abcdefabcdefabcdefabcd" +MOCK_FARM_NAME = "fake_farm_name" +MOCK_FLEET_ID = "fleet-0123456789abcdefabcdefabcdefabcd" +MOCK_FLEET_NAME = "fake_fleet_name" +MOCK_QUEUE_ID = "queue-0123456789abcdefabcdefabcdefabcd" +MOCK_QUEUE_NAME = "fake_queue_name" +MOCK_WORKER_ROLE_ARN = "fake_worker_role_arn" +MOCK_JOB_ATTACHMENTS_BUCKET_NAME = "fake_job_attachments_bucket_name" + +MOCK_DEFAULT_CMF_CONFIG = { + "customerManaged": { + "autoScalingConfiguration": { + "mode": "NO_SCALING", + "maxFleetSize": 1, + }, + "workerRequirements": { + "vCpuCount": {"min": 1}, + "memoryMiB": {"min": 1024}, + "osFamily": "linux", + "cpuArchitectureType": "x86_64", + }, + } +} diff --git a/test/unit/test_deadline_manager.py b/test/unit/test_deadline_manager.py new file mode 100644 index 0000000..3c860f4 --- /dev/null +++ b/test/unit/test_deadline_manager.py @@ -0,0 +1,571 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from __future__ import annotations + +import os +from typing import Any +from unittest import mock + +import pytest +from botocore.exceptions import ClientError + +from deadline_test_scaffolding import DeadlineManager + +from shared_constants import ( + MOCK_FARM_ID, + MOCK_FARM_NAME, + MOCK_FLEET_ID, + MOCK_FLEET_NAME, + MOCK_QUEUE_ID, + MOCK_QUEUE_NAME, + MOCK_DEFAULT_CMF_CONFIG, +) + + +class TestDeadlineManager: + @pytest.fixture(autouse=True) + def setup_test(self, mock_get_deadline_models): + pass + + @pytest.fixture(scope="function") + def mock_deadline_manager(self) -> DeadlineManager: + """ + Returns a DeadlineManager where any boto3 clients are mocked, including + the deadline_client that is part of the DeadlineManager. + """ + with mock.patch.object(DeadlineManager, "_get_deadline_client"), mock.patch( + "deadline_test_scaffolding.deadline_manager.boto3.client" + ): + return DeadlineManager() + + ids = [ + pytest.param(None, None, None, None, id="NoKMSKey"), + pytest.param({"KeyId": "FakeKMSKeyID"}, None, None, None, id="KMSKeyNoFarm"), + pytest.param({"KeyId": "FakeKMSKeyID"}, MOCK_FARM_ID, None, None, id="KMSKeyFarmNoFleet"), + pytest.param( + {"KeyId": "FakeKMSKeyID"}, + MOCK_FARM_ID, + MOCK_FLEET_ID, + None, + id="KMSKeyFarmFleetNoQueue", + ), + pytest.param( + {"KeyId": "FakeKMSKeyID"}, + MOCK_FARM_ID, + MOCK_FLEET_ID, + MOCK_QUEUE_ID, + id="KMSKeyFarmFleetQueue", + ), + pytest.param( + {"KeyId": "FakeKMSKeyID"}, + MOCK_FARM_ID, + None, + MOCK_QUEUE_ID, + id="KMSKeyFarmQueueNoFleet", + ), + ] + + @mock.patch.object(DeadlineManager, "create_fleet") + @mock.patch.object(DeadlineManager, "create_queue") + @mock.patch.object(DeadlineManager, "create_farm") + @mock.patch.object(DeadlineManager, "create_kms_key") + @mock.patch.object(DeadlineManager, "add_job_attachments_bucket") + @mock.patch.object(DeadlineManager, "queue_fleet_association") + def test_create_scaffolding( + self, + mocked_create_kms_key: mock.Mock, + mocked_create_farm: mock.Mock, + mocked_create_queue: mock.Mock, + mocked_create_fleet: mock.Mock, + mocked_queue_fleet_association: mock.Mock, + mocked_add_job_attachments_bucket: mock.Mock, + mock_deadline_manager: DeadlineManager, + ) -> None: + # GIVEN + mock_deadline_manager.farm_id = MOCK_FARM_ID + mock_deadline_manager.fleet_id = MOCK_FLEET_ID + mock_deadline_manager.queue_id = MOCK_QUEUE_ID + worker_role_arn = "fake_worker_role" + job_attachments_bucket = "fake_job_attachments_bucket" + + # WHEN + mock_deadline_manager.create_scaffolding(worker_role_arn, job_attachments_bucket) + + mocked_create_kms_key.assert_called_once() + mocked_create_farm.assert_called_once() + mocked_create_queue.assert_called_once() + mocked_add_job_attachments_bucket.assert_called_once() + mocked_create_fleet.assert_called_once() + mocked_queue_fleet_association.assert_called_once() + + @mock.patch.object(DeadlineManager, "delete_fleet") + @mock.patch.object(DeadlineManager, "delete_queue") + @mock.patch.object(DeadlineManager, "delete_farm") + @mock.patch.object(DeadlineManager, "delete_kms_key") + @pytest.mark.parametrize("kms_key_metadata, farm_id, fleet_id, queue_id", ids) + def test_cleanup_scaffolding( + self, + mocked_delete_kms_key: mock.Mock, + mocked_delete_farm: mock.Mock, + mocked_delete_queue: mock.Mock, + mocked_delete_fleet: mock.Mock, + kms_key_metadata: dict[str, Any] | None, + farm_id: str | None, + fleet_id: str | None, + queue_id: str | None, + mock_deadline_manager: DeadlineManager, + ) -> None: + # GIVEN + mock_deadline_manager.kms_key_metadata = kms_key_metadata + mock_deadline_manager.farm_id = farm_id + mock_deadline_manager.fleet_id = fleet_id + mock_deadline_manager.queue_id = queue_id + + # WHEN + mock_deadline_manager.cleanup_scaffolding() + + # c + if fleet_id: + mocked_delete_fleet.assert_called_once() + + if queue_id: + mocked_delete_queue.assert_called_once() + + if farm_id: + mocked_delete_farm.assert_called_once() + + if kms_key_metadata: + mocked_delete_kms_key.assert_called_once() + + def test_create_kms_key(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + fake_kms_metadata = {"KeyMetadata": {"KeyId": "Foo"}} + mock_deadline_manager.kms_client.create_key.return_value = fake_kms_metadata + + # WHEN + mock_deadline_manager.create_kms_key() + + # THEN + mock_deadline_manager.kms_client.create_key.assert_called_once_with( + Description="The KMS used for testing created by the " + "DeadlineClientSoftwareTestScaffolding.", + Tags=[{"TagKey": "Name", "TagValue": "DeadlineClientSoftwareTestScaffolding"}], + ) + + assert mock_deadline_manager.kms_key_metadata == fake_kms_metadata["KeyMetadata"] + + mock_deadline_manager.kms_client.enable_key.assert_called_once_with( + KeyId=fake_kms_metadata["KeyMetadata"]["KeyId"] + ) + + def test_delete_kms_key(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + fake_kms_metadata = {"KeyId": "Foo"} + mock_deadline_manager.kms_key_metadata = fake_kms_metadata + + # WHEN + mock_deadline_manager.delete_kms_key() + + # THEN + mock_deadline_manager.kms_client.schedule_key_deletion.assert_called_once_with( + KeyId=fake_kms_metadata["KeyId"], PendingWindowInDays=7 + ) + + assert mock_deadline_manager.kms_key_metadata is None + + key_metadatas = [ + pytest.param(None, id="NoMetadata"), + pytest.param({"Foo": "Bar"}, id="NoKeyInMetadata"), + ] + + @pytest.mark.parametrize("key_metadatas", key_metadatas) + def test_delete_kms_key_no_key( + self, + key_metadatas: dict[str, Any] | None, + mock_deadline_manager: DeadlineManager, + ) -> None: + # GIVEN + mock_deadline_manager.kms_key_metadata = key_metadatas + + # WHEN / THEN + with pytest.raises(Exception): + mock_deadline_manager.delete_kms_key() + + assert not mock_deadline_manager.kms_client.schedule_key_deletion.called + + def test_create_farm(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + fake_kms_metadata = {"Arn": "fake_kms_arn"} + + mock_deadline_manager.kms_key_metadata = fake_kms_metadata + mock_deadline_manager.deadline_client.create_farm.return_value = {"farmId": MOCK_FARM_ID} # type: ignore[attr-defined] + + # WHEN + mock_deadline_manager.create_farm(MOCK_FARM_NAME) + + # THEN + mock_deadline_manager.deadline_client.create_farm.assert_called_once_with( # type: ignore[attr-defined] # noqa + displayName=MOCK_FARM_NAME, kmsKeyArn=fake_kms_metadata["Arn"] + ) + assert mock_deadline_manager.farm_id == MOCK_FARM_ID + + key_metadatas = [ + pytest.param(None, id="NoMetadata"), + pytest.param({"Foo": "Bar"}, id="NoKeyInMetadata"), + ] + + @pytest.mark.parametrize("key_metadatas", key_metadatas) + def test_create_farm_kms_not_valid( + self, + key_metadatas: dict[str, Any] | None, + mock_deadline_manager: DeadlineManager, + ) -> None: + # GIVEN + mock_deadline_manager.kms_key_metadata = key_metadatas + + # WHEN / THEN + with pytest.raises(Exception): + mock_deadline_manager.create_farm(MOCK_FARM_NAME) + + assert not mock_deadline_manager.deadline_client.create_farm.called # type: ignore[attr-defined] # noqa + assert mock_deadline_manager.farm_id is None + + def test_delete_farm(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + mock_deadline_manager.farm_id = MOCK_FARM_ID + + # WHEN + mock_deadline_manager.delete_farm() + + # THEN + mock_deadline_manager.deadline_client.delete_farm.assert_called_once_with( + farmId=MOCK_FARM_ID + ) + + assert mock_deadline_manager.farm_id is None + + def test_delete_farm_not_created(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + # mock_deadline_manager fixture + # WHEN / THEN + with pytest.raises(Exception): + mock_deadline_manager.delete_farm() + + # THEN + assert not mock_deadline_manager.deadline_client.delete_farm.called + + def test_create_queue(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + mock_deadline_manager.farm_id = MOCK_FARM_ID + mock_deadline_manager.deadline_client.create_queue.return_value = {"queueId": MOCK_QUEUE_ID} # type: ignore[attr-defined] + + # WHEN + mock_deadline_manager.create_queue(MOCK_QUEUE_NAME) + + # THEN + mock_deadline_manager.deadline_client.create_queue.assert_called_once_with( # type: ignore[attr-defined] + displayName=MOCK_QUEUE_NAME, + farmId=MOCK_FARM_ID, + ) + + assert mock_deadline_manager.queue_id == MOCK_QUEUE_ID + + def test_create_queue_no_farm(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + mock_deadline_manager.deadline_client.create_queue.return_value = {"queueId": MOCK_QUEUE_ID} # type: ignore[attr-defined] + + # WHEN + with pytest.raises(Exception): + mock_deadline_manager.create_queue(MOCK_QUEUE_NAME) + + # THEN + assert not mock_deadline_manager.deadline_client.create_queue.called # type: ignore[attr-defined] + + assert mock_deadline_manager.queue_id is None + + def test_delete_queue(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + mock_deadline_manager.queue_id = MOCK_QUEUE_ID + mock_deadline_manager.farm_id = MOCK_FARM_ID + + # WHEN + mock_deadline_manager.delete_queue() + + # THEN + mock_deadline_manager.deadline_client.delete_queue.assert_called_once_with( + queueId=MOCK_QUEUE_ID, farmId=MOCK_FARM_ID + ) + + assert mock_deadline_manager.queue_id is None + + farm_queue_ids = [ + pytest.param(MOCK_QUEUE_ID, None, id="NoFarmId"), + pytest.param(None, MOCK_FARM_ID, id="NoQueueId"), + ] + + @pytest.mark.parametrize("fake_queue_id, fake_farm_id", farm_queue_ids) + def test_delete_queue_no_farm_queue( + self, + fake_queue_id: str | None, + fake_farm_id: str | None, + mock_deadline_manager: DeadlineManager, + ) -> None: + # GIVEN + mock_deadline_manager.queue_id = fake_queue_id + mock_deadline_manager.farm_id = fake_farm_id + + # WHEN / THEN + with pytest.raises(Exception): + mock_deadline_manager.delete_queue() + + assert not mock_deadline_manager.deadline_client.delete_queue.called + + def test_create_fleet(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + mock_deadline_manager.farm_id = MOCK_FARM_ID + fake_worker_role_arn = "fake_worker_role_arn" + mock_deadline_manager.deadline_client.create_fleet.return_value = {"fleetId": MOCK_FLEET_ID} # type: ignore[attr-defined] + mock_deadline_manager.deadline_client.get_fleet.return_value = {"status": "ACTIVE"} # type: ignore[attr-defined] + + # WHEN + mock_deadline_manager.create_fleet(MOCK_FLEET_NAME, fake_worker_role_arn) + + # THEN + mock_deadline_manager.deadline_client.create_fleet.assert_called_once_with( # type: ignore[attr-defined] + farmId=MOCK_FARM_ID, + displayName=MOCK_FLEET_NAME, + roleArn=fake_worker_role_arn, + configuration=MOCK_DEFAULT_CMF_CONFIG, + ) + + assert mock_deadline_manager.fleet_id == MOCK_FLEET_ID + + def test_create_fleet_no_farm(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + # mock_deadline_manager fixture + worker_role_arn = "fake_worker_role_arn" + + # WHEN / THEN + with pytest.raises(Exception): + mock_deadline_manager.create_fleet(MOCK_FLEET_NAME, worker_role_arn) + + assert not mock_deadline_manager.deadline_client.create_fleet.called # type: ignore[attr-defined] + assert mock_deadline_manager.fleet_id is None + + def test_delete_fleet(self, mock_deadline_manager: DeadlineManager) -> None: + # GIVEN + mock_deadline_manager.farm_id = MOCK_FARM_ID + mock_deadline_manager.fleet_id = MOCK_FLEET_ID + mock_deadline_manager.deadline_client.get_queue_fleet_association.return_value = {"status": "STOPPED"} # type: ignore[attr-defined] + mock_deadline_manager.deadline_client.get_fleet.return_value = {"status": "DELETED"} # type: ignore[attr-defined] + + # WHEN + mock_deadline_manager.delete_fleet() + + # THEN + mock_deadline_manager.deadline_client.delete_fleet.assert_called_once_with( + farmId=MOCK_FARM_ID, fleetId=MOCK_FLEET_ID + ) + + assert mock_deadline_manager.fleet_id is None + + farm_queue_ids = [ + pytest.param(MOCK_FARM_ID, None, id="NoFleetId"), + pytest.param(None, MOCK_FLEET_ID, id="NoFarmId"), + ] + + # Create a test for test_delete_fleet + + @pytest.mark.parametrize("fake_farm_id, fake_fleet_id", farm_queue_ids) + def test_delete_fleet_no_farm_fleet( + self, + fake_farm_id: str | None, + fake_fleet_id: str | None, + mock_deadline_manager: DeadlineManager, + ) -> None: + # GIVEN + mock_deadline_manager.farm_id = fake_farm_id + mock_deadline_manager.fleet_id = fake_fleet_id + + # WHEN / THEN + with pytest.raises(Exception): + mock_deadline_manager.delete_fleet() + + farm_queue_ids = [ + pytest.param( + "kms_client", + {}, + "create_key", + "create_kms_key", + [], + "kms_key_metadata", + id="FailedCreateKMSKey", + ), + pytest.param( + "kms_client", + {"kms_key_metadata": {"KeyId": "TestKeyId"}}, + "schedule_key_deletion", + "delete_kms_key", + [], + None, + id="FailedDeleteKMSKey", + ), + pytest.param( + "deadline_client", + {"kms_key_metadata": {"Arn": "TestArn"}}, + "create_farm", + "create_farm", + ["TestFarm"], + "farm_id", + id="FailedCreateFarm", + ), + pytest.param( + "deadline_client", + {"farm_id": "fake_farm_id"}, + "delete_farm", + "delete_farm", + [], + None, + id="FailedDeleteFarm", + ), + pytest.param( + "deadline_client", + {"farm_id": "fake_farm_id"}, + "create_queue", + "create_queue", + ["TestQueue"], + "queue_id", + id="FailedCreateQueue", + ), + pytest.param( + "deadline_client", + {"farm_id": "fake_farm_id", "queue_id": "fake_queue_id"}, + "delete_queue", + "delete_queue", + [], + None, + id="FailedDeleteQueue", + ), + pytest.param( + "deadline_client", + {"farm_id": "fake_farm_id", "worker_role_arn": "fake_worker_role_arn"}, + "create_fleet", + "create_fleet", + ["TestFleet", "fake_worker_arn"], + "fleet_id", + id="FailedCreateFleet", + ), + pytest.param( + "deadline_client", + {"farm_id": "fake_farm_id", "fleet_id": "fake_fleet_id"}, + "get_queue_fleet_association", # This is the first boto call in delete fleet + "delete_fleet", + [], + None, + id="FailedDeleteFleet", + ), + ] + + @mock.patch("deadline_test_scaffolding.deadline_manager.boto3.Session") + @mock.patch("deadline_test_scaffolding.deadline_manager.boto3.client") + @pytest.mark.parametrize( + "client, bm_properties, client_function_name, manager_function_name, args," + "expected_parameter", + farm_queue_ids, + ) + def test_failure_with_boto( + self, + _: mock.Mock, + mocked_boto_session: mock.MagicMock, + client: str, + bm_properties: dict[str, Any], + client_function_name: str, + manager_function_name: str, + args: list[Any], + expected_parameter: str, + ) -> None: + """This test will confirm that when a ClientError is raised when we use the boto3 + clients for deadline and kms + + Args: + _ (mock.Mock): _description_ + client (str): _description_ + bm_properties (dict[str, Any]): _description_ + client_function_name (str): _description_ + manager_function_name (str): _description_ + args (list[Any]): _description_ + expected_parameter (str): _description_ + """ + + # GIVEN + mocked_function = mock.Mock( + side_effect=ClientError( + { + "Error": { + "Code": "TestException", + "Message": "This is a test exception to simulate an exception being " + "raised.", + } + }, + "TestException", + ) + ) + mocked_client = mock.Mock() + setattr(mocked_client, client_function_name, mocked_function) + + bm = DeadlineManager() + setattr(bm, client, mocked_client) + + for property, value in bm_properties.items(): + setattr(bm, property, value) + + # WHEN + with pytest.raises(ClientError): + manager_function = getattr(bm, manager_function_name) + manager_function(*args) + + # THEN + if expected_parameter: + assert getattr(bm, expected_parameter) is None + + +class TestDeadlineManagerAddModels: + """This class is here because the tests above are mocking out the add_deadline_models method + using a fixture.""" + + @mock.patch.dict(os.environ, {"DEADLINE_SERVICE_MODEL_BUCKET": "test-bucket"}) + @mock.patch("os.makedirs") + @mock.patch("tempfile.TemporaryDirectory") + @mock.patch("deadline_test_scaffolding.deadline_manager.boto3.Session") + @mock.patch("deadline_test_scaffolding.deadline_manager.boto3.client") + def test_get_deadline_models( + self, + mocked_boto_client: mock.MagicMock, + mocked_boto_session: mock.MagicMock, + mocked_temp_dir: mock.MagicMock, + mocked_mkdir: mock.MagicMock, + ): + # GIVEN + temp_path = "/tmp/test" + mocked_temp_dir.return_value.name = temp_path + deadline_endpoint = os.getenv("DEADLINE_ENDPOINT") + + # WHEN + manager = DeadlineManager(should_add_deadline_models=True) + + # THEN + mocked_boto_client.assert_any_call("s3") + mocked_temp_dir.assert_called_once() + mocked_mkdir.assert_called_once_with( + f"{temp_path}/deadline/{DeadlineManager.MOCKED_SERVICE_VERSION}" + ) + mocked_boto_client.return_value.download_file.assert_called_with( + "test-bucket", + "service-2.json", + f"{temp_path}/deadline/{DeadlineManager.MOCKED_SERVICE_VERSION}/service-2.json", + ) + mocked_boto_session.return_value.client.assert_called_with( + "deadline", endpoint_url=deadline_endpoint + ) + assert manager.deadline_model_dir is not None + assert manager.deadline_model_dir.name == temp_path diff --git a/test/unit/test_deadline_shim.py b/test/unit/test_deadline_shim.py new file mode 100644 index 0000000..5655328 --- /dev/null +++ b/test/unit/test_deadline_shim.py @@ -0,0 +1,255 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +import pytest +from unittest.mock import MagicMock, patch +from deadline_test_scaffolding import DeadlineClient +from shared_constants import MOCK_FARM_NAME, MOCK_FLEET_NAME, MOCK_QUEUE_NAME + + +class FakeClient: + def fake_deadline_client_has_this(self) -> str: + return "from fake client" + + def but_not_this(self) -> str: + return "from fake client" + + +class FakeDeadlineClient(DeadlineClient): + def fake_deadline_client_has_this(self) -> str: + return "from fake deadline client" + + +class TestDeadlineShim: + def test_deadline_client_pass_through(self) -> None: + """ + Confirm that DeadlineClient passes through unknown methods to the underlying client + but just executes known methods. + """ + fake_client = FakeClient() + deadline_client = FakeDeadlineClient(fake_client) + + assert deadline_client.fake_deadline_client_has_this() == "from fake deadline client" + assert deadline_client.but_not_this() == "from fake client" + + @pytest.mark.parametrize( + "kwargs_input, name_in_model, kwargs_output", + [ + pytest.param( + {"displayName": MOCK_FARM_NAME}, + "name", + {"name": MOCK_FARM_NAME}, + id="DisplayNameInSubmissionNotModel", + ), + pytest.param( + {"displayName": MOCK_FARM_NAME}, + "displayName", + {"displayName": MOCK_FARM_NAME}, + id="DisplayNameInSubmissionAndModel", + ), + ], + ) + def test_create_farm_name_to_display_name( + self, kwargs_input, name_in_model, kwargs_output + ) -> None: + """ + create_farm will be updated so that name is renamed to displayName. Here we + make sure that the shim is doing its job of: + 1. Calling the underlying client method + 2. Replacing the appropriate key if needed + """ + fake_client = MagicMock() + deadline_client = DeadlineClient(fake_client) + + with patch.object(deadline_client, "_get_deadline_api_input_shape") as input_shape_mock: + input_shape_mock.return_value = {name_in_model: MOCK_FARM_NAME} + deadline_client.create_farm(**kwargs_input) + fake_client.create_farm.assert_called_once_with(**kwargs_output) + + @pytest.mark.parametrize( + "kwargs_input, name_in_model, kwargs_output", + [ + pytest.param( + {"displayName": MOCK_FLEET_NAME}, + "name", + {"name": MOCK_FLEET_NAME}, + id="DisplayNameInSubmissionNotModel", + ), + pytest.param( + {"displayName": MOCK_FLEET_NAME}, + "displayName", + {"displayName": MOCK_FLEET_NAME}, + id="DisplayNameInSubmissionAndModel", + ), + ], + ) + def test_create_fleet_name_to_display_name( + self, kwargs_input, name_in_model, kwargs_output + ) -> None: + """ + create_fleet will be updated so that name is renamed to displayName. + Here we make sure that the shim is doing its job of: + 1. Calling the underlying client method + 2. Replacing the appropriate key if needed + """ + fake_client = MagicMock() + deadline_client = DeadlineClient(fake_client) + + with patch.object(deadline_client, "_get_deadline_api_input_shape") as input_shape_mock: + input_shape_mock.return_value = {name_in_model: MOCK_FLEET_NAME} + deadline_client.create_fleet(**kwargs_input) + fake_client.create_fleet.assert_called_once_with(**kwargs_output) + + @pytest.mark.parametrize( + "kwargs_input, name_in_model, kwargs_output", + [ + pytest.param( + {"displayName": MOCK_QUEUE_NAME}, + "name", + {"name": MOCK_QUEUE_NAME}, + id="DisplayNameInSubmissionNotModel", + ), + pytest.param( + {"displayName": MOCK_QUEUE_NAME}, + "displayName", + {"displayName": MOCK_QUEUE_NAME}, + id="DisplayNameInSubmissionAndModel", + ), + ], + ) + def test_create_queue_name_to_display_name( + self, kwargs_input, name_in_model, kwargs_output + ) -> None: + """ + create_queue will be updated so that name is renamed to displayName. + Here we make sure that the shim is doing its job of: + 1. Calling the underlying client method + 2. Replacing the appropriate key if needed + """ + fake_client = MagicMock() + deadline_client = DeadlineClient(fake_client) + + with patch.object(deadline_client, "_get_deadline_api_input_shape") as input_shape_mock: + input_shape_mock.return_value = {name_in_model: MOCK_QUEUE_NAME} + deadline_client.create_queue(**kwargs_input) + fake_client.create_queue.assert_called_once_with(**kwargs_output) + + @pytest.mark.parametrize( + "kwargs_input, kwargs_output, names_in_model", + [ + pytest.param( + {"template": "", "templateType": "", "parameters": ""}, + {"template": "", "templateType": "", "parameters": ""}, + ["template", "templateType", "parameters"], + id="jobTemplate_NewAPI", + ), + pytest.param( + {"template": "", "templateType": "", "parameters": ""}, + { + "jobTemplate": "", + "jobTemplateType": "", + "jobParameters": "", + }, + [ + "jobTemplate", + "jobTemplateType", + "jobParameters", + ], + id="jobTemplate_OldAPI", + ), + pytest.param( + {"template": "", "templateType": "", "parameters": "", "initialState": ""}, + {"jobTemplate": "", "jobTemplateType": "", "jobParameters": "", "initialState": ""}, + ["jobTemplate", "jobTemplateType", "jobParameters", "initialState"], + id="jobTemplate_StateToState", + ), + pytest.param( + {"template": "", "templateType": "", "parameters": "", "targetTaskRunStatus": ""}, + { + "jobTemplate": "", + "jobTemplateType": "", + "jobParameters": "", + "initialState": "", + }, + ["jobTemplate", "jobTemplateType", "jobParameters", "initialState"], + id="jobTemplate_StatusToState", + ), + pytest.param( + {"template": "", "templateType": "", "parameters": "", "targetTaskRunStatus": ""}, + { + "jobTemplate": "", + "jobTemplateType": "", + "jobParameters": "", + "targetTaskRunStatus": "", + }, + ["jobTemplate", "jobTemplateType", "jobParameters", "targetTaskRunStatus"], + id="jobTemplate_StatusToStatus", + ), + ], + ) + def test_create_job_old_api_compatibility( + self, kwargs_input, kwargs_output, names_in_model + ) -> None: + """ + create_job will be updated so that template is renamed to + jobTemplate. Here we make sure that the shim is doing its job of: + 1. Calling the underlying client method + 2. Replacing the appropriate key + + """ + fake_client = MagicMock() + kwargs_output["priority"] = 50 + model: dict = {k: "" for k in names_in_model} + model["priority"] = 50 + deadline_client = DeadlineClient(fake_client) + with patch.object(deadline_client, "_get_deadline_api_input_shape") as input_shape_mock: + input_shape_mock.return_value = kwargs_output + deadline_client.create_job(**kwargs_input) + fake_client.create_job.assert_called_once_with(**kwargs_output) + + def test_get_fleet_name_state_to_displayname_status_remove_type(self) -> None: + """ + get_fleet will be updated such that "name" will be replaced with "displayName" + and "state" will be replaced with "status". "type" will be removed. + Here we make sure that the shim is doing it's job of: + 1. Calling the underlying client method + 2. Replacing the appropriate keys + """ + fake_client = MagicMock() + fake_client.get_fleet.return_value = { + "name": "fleet1", + "state": "ACTIVE", + "type": "CUSTOMER_MANAGER", + } + deadline_client = DeadlineClient(fake_client) + response = deadline_client.get_fleet("fleetid-somefleet") + + assert "name" not in response + assert "displayName" in response + assert "state" not in response + assert "status" in response + assert "type" not in response + assert response["displayName"] == "fleet1" + assert response["status"] == "ACTIVE" + fake_client.get_fleet.assert_called_once_with("fleetid-somefleet") + + def test_get_queue_fleet_association_state_to_status(self) -> None: + """ + get_queue_fleet_association will be updated such that "state" will be replaced with "status". + Here we make sure that the shim is doing it's job of: + 1. Calling the underlying client method + 2. Replacing the appropriate keys + """ + fake_client = MagicMock() + fake_client.get_queue_fleet_association.return_value = { + "state": "STOPPED", + } + deadline_client = DeadlineClient(fake_client) + response = deadline_client.get_queue_fleet_association( + "fake-farm-id", "fake-queue-id", "fake-fleet-id" + ) + + assert "state" not in response + assert "status" in response + assert response["status"] == "STOPPED" + fake_client.get_queue_fleet_association.assert_called_once_with( + "fake-farm-id", "fake-queue-id", "fake-fleet-id" + ) diff --git a/test/unit/test_job_attachment_manager.py b/test/unit/test_job_attachment_manager.py new file mode 100644 index 0000000..2289a81 --- /dev/null +++ b/test/unit/test_job_attachment_manager.py @@ -0,0 +1,374 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from datetime import datetime +from unittest import mock + +import pytest +from botocore.exceptions import ClientError, WaiterError +from botocore.stub import ANY, Stubber + +from deadline_test_scaffolding import JobAttachmentManager + + +class TestJobAttachmentManager: + """ + Test suite for the job attachment manager + """ + + @pytest.fixture(autouse=True) + def setup_test(self, mock_get_deadline_models, boto_config): + with mock.patch("deadline_test_scaffolding.job_attachment_manager.DeadlineManager"): + self.job_attachment_manager = JobAttachmentManager( + stage="test", account_id="123456789101" + ) + yield + + def test_deploy_resources(self): + """ + Test that during the normal flow that the upgrade stack boto call is made. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber: + stubber.add_response( + "update_stack", + { + "StackId": "arn:aws:cloudformation:us-west-2:123456789101:stack/" + "JobAttachmentIntegTest/abcdefgh-1234-ijkl-5678-mnopqrstuvwx" + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + "TemplateBody": ANY, + "Parameters": [ + { + "ParameterKey": "BucketName", + "ParameterValue": "job-attachment-integ-test-test-123456789101", + }, + ], + }, + ) + stubber.add_response( + "describe_stacks", + { + "Stacks": [ + { + "StackName": "JobAttachmentIntegTest", + "CreationTime": datetime(2015, 1, 1), + "StackStatus": "UPDATE_COMPLETE", + }, + ], + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + }, + ) + self.job_attachment_manager.deploy_resources() + + stubber.assert_no_pending_responses() + + @mock.patch( + "deadline_test_scaffolding.job_attachment_manager." "JobAttachmentManager.cleanup_resources" + ) + def test_deploy_resources_client_error( + self, + mocked_cleanup_resources: mock.MagicMock, + ): + """ + Test that if there's an issue deploying resources, the rest get cleaned up. + """ + # WHEN + with mock.patch.object( + self.job_attachment_manager.deadline_manager, + "create_kms_key", + side_effect=ClientError( + {"ErrorCode": "Oops", "Message": "Something went wrong"}, "create_kms_key" + ), + ), pytest.raises(ClientError): + self.job_attachment_manager.deploy_resources() + + mocked_cleanup_resources.assert_called_once() + + @mock.patch( + "deadline_test_scaffolding.job_attachment_manager." "JobAttachmentManager.cleanup_resources" + ) + def test_deploy_resources_waiter_error( + self, + mocked_cleanup_resources: mock.MagicMock, + ): + """ + Test that if there's an issue deploying resources, the rest get cleaned up. + But this time with a waiter error. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber, pytest.raises( + WaiterError + ): + stubber.add_response( + "update_stack", + { + "StackId": "arn:aws:cloudformation:us-west-2:123456789101:stack/" + "JobAttachmentIntegTest/abcdefgh-1234-ijkl-5678-mnopqrstuvwx" + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + "TemplateBody": ANY, + "Parameters": [ + { + "ParameterKey": "BucketName", + "ParameterValue": "job-attachment-integ-test-test-123456789101", + }, + ], + }, + ) + stubber.add_client_error( + "describe_stacks", service_error_code="400", service_message="Oops" + ) + + self.job_attachment_manager.deploy_resources() + + stubber.assert_no_pending_responses() + + mocked_cleanup_resources.assert_called_once() + + def test_deploy_stack_update_while_create_in_progress(self): + """ + Test that if an attempt to update a stack when a stack is in the process of being created, + we wait for the stack to complete being created. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber: + stubber.add_client_error( + "update_stack", + service_error_code="400", + service_message="JobAttachmentIntegTest is in CREATE_IN_PROGRESS " + "state and can not be updated.", + ) + stubber.add_response( + "describe_stacks", + { + "Stacks": [ + { + "StackName": "JobAttachmentIntegTest", + "CreationTime": datetime(2015, 1, 1), + "StackStatus": "CREATE_COMPLETE", + }, + ], + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + }, + ) + + self.job_attachment_manager.deploy_stack() + + stubber.assert_no_pending_responses() + + @mock.patch( + "deadline_test_scaffolding.job_attachment_manager." "JobAttachmentManager._create_stack" + ) + def test_deploy_stack_update_while_stack_doesnt_need_updating( + self, mocked__create_stack: mock.MagicMock + ): + """ + Test that if a stack already exists that doesn't need updating, nothing happens. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber: + stubber.add_client_error( + "update_stack", + service_error_code="400", + service_message="No updates are to be performed.", + ) + + self.job_attachment_manager.deploy_stack() + + stubber.assert_no_pending_responses() + + mocked__create_stack.assert_not_called() + + def test_deploy_stack_stack_doesnt_exist(self): + """ + Test that if when updating the stack, that it gets created if it doesn't exist. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber: + stubber.add_client_error( + "update_stack", + service_error_code="400", + service_message="The Stack JobAttachmentIntegTest doesn't exist", + ) + stubber.add_response( + "create_stack", + { + "StackId": "arn:aws:cloudformation:us-west-2:123456789101:stack/" + "JobAttachmentIntegTest/abcdefgh-1234-ijkl-5678-mnopqrstuvwx" + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + "TemplateBody": ANY, + "OnFailure": "DELETE", + "EnableTerminationProtection": False, + "Parameters": [ + { + "ParameterKey": "BucketName", + "ParameterValue": "job-attachment-integ-test-test-123456789101", + }, + ], + }, + ) + stubber.add_response( + "describe_stacks", + { + "Stacks": [ + { + "StackName": "JobAttachmentIntegTest", + "CreationTime": datetime(2015, 1, 1), + "StackStatus": "CREATE_COMPLETE", + }, + ], + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + }, + ) + + self.job_attachment_manager.deploy_stack() + + stubber.assert_no_pending_responses() + + def test_deploy_stack_stack_already_exists(self): + """ + Test the if we try to create a stack when it already exists, + we wait for it to finish being created. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber: + stubber.add_client_error( + "update_stack", + service_error_code="400", + service_message="The Stack JobAttachmentIntegTest doesn't exist", + ) + stubber.add_client_error( + "create_stack", + service_error_code="400", + service_message="Stack [JobAttachmentIntegTest] already exists", + ) + stubber.add_response( + "describe_stacks", + { + "Stacks": [ + { + "StackName": "JobAttachmentIntegTest", + "CreationTime": datetime(2015, 1, 1), + "StackStatus": "CREATE_COMPLETE", + }, + ], + }, + expected_params={ + "StackName": "JobAttachmentIntegTest", + }, + ) + + self.job_attachment_manager.deploy_stack() + + stubber.assert_no_pending_responses() + + def test_deploy_stack_other_client_error(self): + """ + Test that when we create a stack, unhandled client errors get raised. + """ + # WHEN + with Stubber(self.job_attachment_manager.stack.meta.client) as stubber, pytest.raises( + ClientError + ): + stubber.add_client_error( + "update_stack", + service_error_code="400", + service_message="The Stack JobAttachmentIntegTest doesn't exist", + ) + stubber.add_client_error( + "create_stack", + service_error_code="400", + service_message="Oops", + ) + + self.job_attachment_manager.deploy_stack() + + stubber.assert_no_pending_responses() + + def test_empty_bucket_bucket_doesnt_exist(self): + """ + If we try to empty a bucket that doesn't exist, make sure nothing happens. + """ + # WHEN + with Stubber(self.job_attachment_manager.bucket.meta.client) as stubber: + stubber.add_client_error( + "list_objects", + service_error_code="400", + service_message="The specified bucket does not exist", + ) + + self.job_attachment_manager.empty_bucket() + + stubber.assert_no_pending_responses() + + def test_empty_bucket_any_other_error(self): + """ + Test that unhandled client errors during bucket creation are raised. + """ + # WHEN + with Stubber(self.job_attachment_manager.bucket.meta.client) as stubber, pytest.raises( + ClientError + ): + stubber.add_client_error( + "list_objects", + service_error_code="400", + service_message="Ooops", + ) + + self.job_attachment_manager.empty_bucket() + + stubber.assert_no_pending_responses() + + def test_cleanup_resources(self): + """ + Test that all resources get cleaned up when they exist. + """ + self.job_attachment_manager.deadline_manager.farm_id = "farm-asdf" + self.job_attachment_manager.deadline_manager.kms_key_metadata = {"key_id": "aasdfkj"} + self.job_attachment_manager.deadline_manager.queue_id = "queue-asdfji" + + # WHEN + with Stubber(self.job_attachment_manager.bucket.meta.client) as stubber: + stubber.add_response( + "list_objects", + { + "Contents": [], + }, + ) + self.job_attachment_manager.cleanup_resources() + + stubber.assert_no_pending_responses() + + self.job_attachment_manager.deadline_manager.delete_farm.assert_called_once() # type: ignore[attr-defined] # noqa + self.job_attachment_manager.deadline_manager.delete_kms_key.assert_called_once() # type: ignore[attr-defined] # noqa + self.job_attachment_manager.deadline_manager.delete_queue.assert_called_once() # type: ignore[attr-defined] # noqa + + def test_cleanup_resources_no_resource_exist(self): + """ + Test that no deletion calls are made when resources don't exist. + """ + # WHEN + with Stubber(self.job_attachment_manager.bucket.meta.client) as stubber: + stubber.add_response( + "list_objects", + { + "Contents": [], + }, + ) + self.job_attachment_manager.cleanup_resources() + + stubber.assert_no_pending_responses() + + self.job_attachment_manager.deadline_manager.create_farm.assert_not_called() # type: ignore[attr-defined] # noqa + self.job_attachment_manager.deadline_manager.create_kms_key.assert_not_called() # type: ignore[attr-defined] # noqa + self.job_attachment_manager.deadline_manager.create_queue.assert_not_called() # type: ignore[attr-defined] # noqa