From 05a0dd9888943ce79c5337a9a1d7246ec35c6890 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 14 Jul 2023 03:08:38 +0200 Subject: [PATCH 01/15] chore: initial commit --- .github/workflows/reusable-build-test-release.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 77a96ab7b..8e5b1f71d 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1184,6 +1184,15 @@ jobs: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests diag path: | ${{ needs.setup.outputs.directory-path }}/diag* + - name: print workflow name + if: always() + run: | + echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" + - name: Check workflow cancel + id: cancel-workflow + if: cancelled() + run: | + argo stop -v -n workflows --argo-base-href '' ${{ steps.run-tests.outputs.workflow-name }} run-requirement-tests: if: | From 6987c4cb9b0b8476c8dfa3d7c8c8fee41c1db7a7 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 14 Jul 2023 03:24:24 +0200 Subject: [PATCH 02/15] chore: remove always condition for test job --- .github/workflows/reusable-build-test-release.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 8e5b1f71d..340f18ebc 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -976,7 +976,6 @@ jobs: run-knowledge-tests: if: | - always() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-labeled-knowledge == 'true') From 02f3ccc02761dc4ff5bc9a63920936c0bda322e8 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 14 Jul 2023 03:42:25 +0200 Subject: [PATCH 03/15] chore: rework conditionals for steps --- .../workflows/reusable-build-test-release.yml | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 340f18ebc..d65f36f5a 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1065,13 +1065,22 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: always() + if: always() && !cancelled() run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" + - name: print workflow name + if: always() + run: | + echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" + - name: Check workflow cancel + id: cancel-workflow + if: cancelled() + run: | + argo stop -v -n workflows --argo-base-href '' ${{ steps.run-tests.outputs.workflow-name }} - name: Check if pod was deleted id: is-pod-deleted - if: always() + if: always() && !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1085,7 +1094,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: always() + if: always() && !cancelled() run: | set -o xtrace set +e @@ -1103,7 +1112,7 @@ jobs: env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} shell: bash - if: always() + if: always() && !cancelled() run: | set +e # shellcheck disable=SC2157 @@ -1121,13 +1130,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() + if: always() && !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() + if: always() && !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1139,13 +1148,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() + if: always() && !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() + if: always() && !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs path: | @@ -1167,7 +1176,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() + if: always() && !cancelled() with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1183,15 +1192,7 @@ jobs: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests diag path: | ${{ needs.setup.outputs.directory-path }}/diag* - - name: print workflow name - if: always() - run: | - echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" - - name: Check workflow cancel - id: cancel-workflow - if: cancelled() - run: | - argo stop -v -n workflows --argo-base-href '' ${{ steps.run-tests.outputs.workflow-name }} + run-requirement-tests: if: | From 33ed3a81e9ec8d0ab18ecab1244ec3ed524b1266 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 14 Jul 2023 10:54:49 +0200 Subject: [PATCH 04/15] chore: move to staging --- .../workflows/reusable-build-test-release.yml | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index d65f36f5a..206bfaeff 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -612,7 +612,7 @@ jobs: run: | echo "name=$(basename "${{ steps.slim.outputs.OUTPUT }}")" >> "$GITHUB_OUTPUT" basename "${{ steps.slim.outputs.OUTPUT }}" - aws s3 cp "${{ steps.slim.outputs.OUTPUT }}" s3://ta-production-artifacts/ta-apps/ + aws s3 cp "${{ steps.slim.outputs.OUTPUT }}" s3://ta-staging-artifacts/ta-apps/ - name: artifact-splunk-parts uses: actions/upload-artifact@v3 with: @@ -745,7 +745,7 @@ jobs: uses: splunk/addonfactory-workflow-requirement-files-unit-tests@v1.4 with: input-files: tests/requirement_test/logs - - name: Archive production artifacts + - name: Archive staging artifacts if: always() uses: actions/upload-artifact@v3 with: @@ -924,16 +924,16 @@ jobs: JOB_NAME=$(echo "$ADDON_NAME" | tail -c 16)-$(echo "${GITHUB_SHA}" | tail -c 8)-TEST-TYPE-${GITHUB_RUN_ID} JOB_NAME=${JOB_NAME//[_.]/-} LABELS="addon-name=${ADDON_NAME}" - ADDON_UPLOAD_PATH="s3://ta-production-artifacts/ta-apps/${{ needs.build.outputs.buildname }}" + ADDON_UPLOAD_PATH="s3://ta-staging-artifacts/ta-apps/${{ needs.build.outputs.buildname }}" { - echo "argo-server=argo.wfe.splgdi.com:443" + echo "argo-server=argo.staging.wfe.splgdi.com:443" echo "argo-http1=true" echo "argo-secure=true" echo "argo-base-href=\'\'" echo "argo-namespace=workflows" echo "argo-workflow-tmpl-name=ta-workflow" echo "directory-path=/tmp" - echo "s3-bucket=ta-production-artifacts" + echo "s3-bucket=ta-staging-artifacts" echo "addon-name=\"$ADDON_NAME\"" echo "job-name=wf-$JOB_NAME" echo "labels=$LABELS" @@ -1022,7 +1022,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1067,7 +1067,7 @@ jobs: id: update-argo-token if: always() && !cancelled() run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: print workflow name if: always() @@ -1242,7 +1242,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1434,7 +1434,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1479,7 +1479,7 @@ jobs: id: update-argo-token if: always() run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted @@ -1633,7 +1633,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1690,7 +1690,7 @@ jobs: id: update-argo-token if: always() run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted @@ -1841,7 +1841,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -2046,7 +2046,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -2249,7 +2249,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name From 21926402eb8dada4cd672f5ec66089715f257c1e Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 14 Jul 2023 21:44:53 +0200 Subject: [PATCH 05/15] chore: use cancel-workflow to cancel job --- .github/workflows/reusable-build-test-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 206bfaeff..84966e805 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1077,7 +1077,7 @@ jobs: id: cancel-workflow if: cancelled() run: | - argo stop -v -n workflows --argo-base-href '' ${{ steps.run-tests.outputs.workflow-name }} + argo submit -v -o json --from wftmpl/cancel-workflow -n workflows -l workflows.argoproj.io/workflow-template=cancel-workflow --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }} - name: Check if pod was deleted id: is-pod-deleted if: always() && !cancelled() From 97de2bcc18c9f34c3d3aa2897c13367a61a24231 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Sat, 15 Jul 2023 07:46:18 +0200 Subject: [PATCH 06/15] test: test commit with argo submit directly from gh wf --- .../workflows/reusable-build-test-release.yml | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 84966e805..49174e772 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1046,23 +1046,8 @@ jobs: echo "Splunk password is available in LastPass shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" - name: run-tests id: run-tests - env: - ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - uses: splunk/wfe-test-runner-action@v1.6 - with: - splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} - test-type: ${{ env.TEST_TYPE }} - test-args: "" - job-name: ${{ steps.create-job-name.outputs.job-name }} - labels: ${{ needs.setup.outputs.labels }} - workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} - workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} - delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-ko }} - addon-url: ${{ needs.setup.outputs.addon-upload-path }} - addon-name: ${{ needs.setup.outputs.addon-name }} - sc4s-version: ${{ matrix.sc4s.version }} - sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} - k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + run: | + argo submit -v -o json --from wftmpl/ta-workflow -n workflows -l workflows.argoproj.io/workflow-template=ta-workflow --argo-base-href '' -p ci-repository-url=https://github.com/splunk/splunk-add-on-for-google-workspace.git -p ci-commit-sha=ci/job-cancel -p delay-destroy=No -p addon-url=s3://ta-staging-artifacts/ta-apps/Splunk_TA_Google_Workspace-0.477.5549584494.spl -p job-name=wf-oogle-workspace-7d6e830-knowledge-5549584494-eqmf -p splunk-version=9.0.5 -p test-type=knowledge -p k8s-manifests-branch=main -p pytest-args= -p 'addon-name="GOOGLE_WORKSPACE"' -p vendor-version=default -p sc4s-version=2.49.5 -p install-java=No -p sc4s-docker-registry=ghcr.io/splunk/splunk-connect-for-syslog/container2 -p os-name=ubuntu -p os-version=latest -l=addon-name=GOOGLE_WORKSPACE,test-type=knowledge,splunk-version=9.0.5 - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: always() && !cancelled() From 5959172d28ba0f5bd3c7330987d8c76bb4b4820e Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Sat, 15 Jul 2023 08:09:49 +0200 Subject: [PATCH 07/15] chore: try using wfe runner action for cancelling --- .../workflows/reusable-build-test-release.yml | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 49174e772..944c7053c 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1046,8 +1046,23 @@ jobs: echo "Splunk password is available in LastPass shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" - name: run-tests id: run-tests - run: | - argo submit -v -o json --from wftmpl/ta-workflow -n workflows -l workflows.argoproj.io/workflow-template=ta-workflow --argo-base-href '' -p ci-repository-url=https://github.com/splunk/splunk-add-on-for-google-workspace.git -p ci-commit-sha=ci/job-cancel -p delay-destroy=No -p addon-url=s3://ta-staging-artifacts/ta-apps/Splunk_TA_Google_Workspace-0.477.5549584494.spl -p job-name=wf-oogle-workspace-7d6e830-knowledge-5549584494-eqmf -p splunk-version=9.0.5 -p test-type=knowledge -p k8s-manifests-branch=main -p pytest-args= -p 'addon-name="GOOGLE_WORKSPACE"' -p vendor-version=default -p sc4s-version=2.49.5 -p install-java=No -p sc4s-docker-registry=ghcr.io/splunk/splunk-connect-for-syslog/container2 -p os-name=ubuntu -p os-version=latest -l=addon-name=GOOGLE_WORKSPACE,test-type=knowledge,splunk-version=9.0.5 + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + uses: splunk/wfe-test-runner-action@v1.6 + with: + splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} + test-type: ${{ env.TEST_TYPE }} + test-args: "" + job-name: ${{ steps.create-job-name.outputs.job-name }} + labels: ${{ needs.setup.outputs.labels }} + workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} + workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} + delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-ko }} + addon-url: ${{ needs.setup.outputs.addon-upload-path }} + addon-name: ${{ needs.setup.outputs.addon-name }} + sc4s-version: ${{ matrix.sc4s.version }} + sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} + k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: always() && !cancelled() @@ -1061,8 +1076,9 @@ jobs: - name: Check workflow cancel id: cancel-workflow if: cancelled() - run: | - argo submit -v -o json --from wftmpl/cancel-workflow -n workflows -l workflows.argoproj.io/workflow-template=cancel-workflow --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }} + uses: splunk/wfe-test-runner-action@feat/cancel-workflow + with: + workflow-to-cancel: ${{ steps.run-tests.outputs.workflow-name }} - name: Check if pod was deleted id: is-pod-deleted if: always() && !cancelled() From 1f2fdcc266c2d1aa768519326a8211814c85724a Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Sat, 15 Jul 2023 10:42:54 +0200 Subject: [PATCH 08/15] chore: add env ARGO_TOKEN to cancel workflow step --- .github/workflows/reusable-build-test-release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 944c7053c..8fbd9c79e 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1075,6 +1075,8 @@ jobs: echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" - name: Check workflow cancel id: cancel-workflow + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() uses: splunk/wfe-test-runner-action@feat/cancel-workflow with: From 0034264c88d7a3eac689508b96e2832036487aa1 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Sat, 15 Jul 2023 10:56:12 +0200 Subject: [PATCH 09/15] chore: run argo submit directly from reusable workflow --- .github/workflows/reusable-build-test-release.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 8fbd9c79e..224841a56 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1078,9 +1078,8 @@ jobs: env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() - uses: splunk/wfe-test-runner-action@feat/cancel-workflow - with: - workflow-to-cancel: ${{ steps.run-tests.outputs.workflow-name }} + run: | + cancel_response=`argo submit -v -o json --from wftmpl/cancel-workflow -n workflows -l workflows.argoproj.io/workflow-template=cancel-workflow --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted if: always() && !cancelled() From 29733b75161d1f2dfbb417e683ed99713259f7f9 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Thu, 20 Jul 2023 15:57:21 +0200 Subject: [PATCH 10/15] chore: replace always conditional with !cancelled --- .../workflows/reusable-build-test-release.yml | 141 +++++++++--------- .pre-commit-config.yaml | 2 +- 2 files changed, 72 insertions(+), 71 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 224841a56..fa640e88c 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -495,7 +495,7 @@ jobs: - run-unit-tests - fossa-scan if: | - always() && + !cancelled() && (needs.run-unit-tests.result == 'success' || needs.run-unit-tests.result == 'skipped') outputs: buildname: ${{ steps.buildupload.outputs.name }} @@ -597,12 +597,13 @@ jobs: with: name: artifact-openapi path: ${{ github.workspace }}/${{ steps.uccgen.outputs.OUTPUT }}/static/openapi.json + if: !cancelled() - name: artifact-splunk-base uses: actions/upload-artifact@v3 with: name: package-splunkbase path: ${{ steps.slim.outputs.OUTPUT }} - if: always() + if: !cancelled() - name: upload-build-to-s3 id: buildupload env: @@ -618,7 +619,7 @@ jobs: with: name: package-deployment path: build/package/deployment** - if: always() + if: !cancelled() build-311: runs-on: ubuntu-latest @@ -704,7 +705,7 @@ jobs: name: security-virustotal needs: build if: | - always() && + !cancelled() && needs.build.result == 'success' runs-on: ubuntu-latest steps: @@ -725,7 +726,7 @@ jobs: - build - test-inventory if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' permissions: @@ -746,7 +747,7 @@ jobs: with: input-files: tests/requirement_test/logs - name: Archive staging artifacts - if: always() + if: !cancelled() uses: actions/upload-artifact@v3 with: name: test-results @@ -757,7 +758,7 @@ jobs: name: quality-appinspect-${{ matrix.tags }} needs: build if: | - always() && + !cancelled() && needs.build.result == 'success' runs-on: ubuntu-latest strategy: @@ -785,7 +786,7 @@ jobs: included_tags: ${{ matrix.tags }} result_file: appinspect_result_${{ matrix.tags }}.json - name: upload-appinspect-report - if: always() + if: !cancelled() uses: actions/upload-artifact@v3 with: name: appinspect_${{ matrix.tags }}_checks.json @@ -804,7 +805,7 @@ jobs: - security-virustotal - meta if: | - always() && + !cancelled() && needs.security-virustotal.result == 'success' && needs.meta.result == 'success' outputs: @@ -882,7 +883,7 @@ jobs: - build - test-inventory if: | - always() && + !cancelled() && needs.build.result == 'success' runs-on: ubuntu-latest outputs: @@ -1065,12 +1066,12 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: always() && !cancelled() + if: !cancelled() && !cancelled() run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: print workflow name - if: always() + if: !cancelled() run: | echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" - name: Check workflow cancel @@ -1082,7 +1083,7 @@ jobs: cancel_response=`argo submit -v -o json --from wftmpl/cancel-workflow -n workflows -l workflows.argoproj.io/workflow-template=cancel-workflow --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted - if: always() && !cancelled() + if: !cancelled() && !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1096,7 +1097,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: always() && !cancelled() + if: !cancelled() && !cancelled() run: | set -o xtrace set +e @@ -1114,7 +1115,7 @@ jobs: env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} shell: bash - if: always() && !cancelled() + if: !cancelled() && !cancelled() run: | set +e # shellcheck disable=SC2157 @@ -1132,13 +1133,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() && !cancelled() + if: !cancelled() && !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() && !cancelled() + if: !cancelled() && !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1150,13 +1151,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() && !cancelled() + if: !cancelled() && !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() && !cancelled() + if: !cancelled() && !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs path: | @@ -1178,7 +1179,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() && !cancelled() + if: !cancelled() && !cancelled() with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1198,7 +1199,7 @@ jobs: run-requirement-tests: if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' && (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-labeled-requirement == 'true') @@ -1287,7 +1288,7 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Check if pod was deleted id: is-pod-deleted - if: always() + if: !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} @@ -1301,7 +1302,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() run: | set -o xtrace set +e @@ -1319,7 +1320,7 @@ jobs: env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} shell: bash - if: always() + if: !cancelled() run: | set +e # shellcheck disable=SC2157 @@ -1337,13 +1338,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() + if: !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() + if: !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1355,13 +1356,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests logs path: | @@ -1369,7 +1370,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() + if: !cancelled() with: name: splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1388,7 +1389,7 @@ jobs: run-ui-tests: if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.ui == 'true' && (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-labeled-ui == 'true') @@ -1479,13 +1480,13 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: always() + if: !cancelled() run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted - if: always() + if: !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1499,7 +1500,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() run: | set -o xtrace set +e @@ -1516,7 +1517,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() shell: bash run: | set +e @@ -1535,13 +1536,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() + if: !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() + if: !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1553,13 +1554,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests logs path: | @@ -1567,7 +1568,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() + if: !cancelled() with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1586,7 +1587,7 @@ jobs: run-modinput-tests: if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.modinput_functional == 'true' && (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-labeled-modinput == 'true') @@ -1690,13 +1691,13 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: always() + if: !cancelled() run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted - if: always() + if: !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1710,7 +1711,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() run: | set -o xtrace set +e @@ -1727,7 +1728,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() shell: bash run: | set +e @@ -1746,13 +1747,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() + if: !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() + if: !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1764,13 +1765,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests logs path: | @@ -1778,7 +1779,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() + if: !cancelled() with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1797,7 +1798,7 @@ jobs: run-scripted-input-tests-full-matrix: if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') @@ -1901,7 +1902,7 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Check if pod was deleted id: is-pod-deleted - if: always() + if: !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} @@ -1915,7 +1916,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() run: | set -o xtrace set +e @@ -1932,7 +1933,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() shell: bash run: | set +e @@ -1951,13 +1952,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() + if: !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() + if: !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1969,13 +1970,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs path: | @@ -1983,7 +1984,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() + if: !cancelled() with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -2002,7 +2003,7 @@ jobs: run-scripted-input-tests-canary: if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') @@ -2105,7 +2106,7 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Check if pod was deleted id: is-pod-deleted - if: always() + if: !cancelled() shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} @@ -2119,7 +2120,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() run: | set -o xtrace set +e @@ -2136,7 +2137,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: always() + if: !cancelled() shell: bash run: | set +e @@ -2155,13 +2156,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: always() + if: !cancelled() run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: always() + if: !cancelled() run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -2173,13 +2174,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: always() + if: !cancelled() with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs path: | @@ -2187,7 +2188,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: always() + if: !cancelled() with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -2206,7 +2207,7 @@ jobs: run-escu-tests: if: | - always() && + !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.escu == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' || github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-escu == 'Yes' || needs.setup-workflow.outputs.execute-labeled-escu == 'true') @@ -2419,7 +2420,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* pre-publish: - if: always() + if: !cancelled() needs: - meta - compliance-copyrights @@ -2458,7 +2459,7 @@ jobs: exit 1 publish: - if: always() && needs.pre-publish.result == 'success' && github.event_name != 'pull_request' && github.event_name != 'schedule' + if: !cancelled() && needs.pre-publish.result == 'success' && github.event_name != 'pull_request' && github.event_name != 'schedule' needs: - pre-publish - run-escu-tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10f00f054..e80be3ed8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: - id: actionlint name: actionlint entry: actionlint - args: [-ignore, 'property ".+" is not defined in object type', -ignore, 'receiver of object dereference "version" must be type of object but got "string"'] + args: [-ignore, 'property ".+" is not defined in object type', -ignore, 'receiver of object dereference "version" must be type of object but got "string"', --ignore, 'could not parse as YAML'] language: script types: ["yaml"] files: ^.github/workflows/ From e5d1911a550415178153b228e8edede4abe69a17 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Thu, 20 Jul 2023 16:02:09 +0200 Subject: [PATCH 11/15] chore: syntax fixes --- .../workflows/reusable-build-test-release.yml | 180 +++++++----------- .pre-commit-config.yaml | 2 +- 2 files changed, 72 insertions(+), 110 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index fa640e88c..e85c70f21 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -494,9 +494,7 @@ jobs: - semgrep - run-unit-tests - fossa-scan - if: | - !cancelled() && - (needs.run-unit-tests.result == 'success' || needs.run-unit-tests.result == 'skipped') + if: ${{ !cancelled() && (needs.run-unit-tests.result == 'success' || needs.run-unit-tests.result == 'skipped') }} outputs: buildname: ${{ steps.buildupload.outputs.name }} permissions: @@ -597,13 +595,13 @@ jobs: with: name: artifact-openapi path: ${{ github.workspace }}/${{ steps.uccgen.outputs.OUTPUT }}/static/openapi.json - if: !cancelled() + if: ${{ !cancelled() }} - name: artifact-splunk-base uses: actions/upload-artifact@v3 with: name: package-splunkbase path: ${{ steps.slim.outputs.OUTPUT }} - if: !cancelled() + if: ${{ !cancelled() }} - name: upload-build-to-s3 id: buildupload env: @@ -619,7 +617,7 @@ jobs: with: name: package-deployment path: build/package/deployment** - if: !cancelled() + if: ${{ !cancelled() }} build-311: runs-on: ubuntu-latest @@ -704,9 +702,7 @@ jobs: continue-on-error: true name: security-virustotal needs: build - if: | - !cancelled() && - needs.build.result == 'success' + if: ${{ !cancelled() && needs.build.result == 'success' }} runs-on: ubuntu-latest steps: - uses: actions/download-artifact@v3 @@ -725,10 +721,7 @@ jobs: needs: - build - test-inventory - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.requirement_test == 'true' + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' }} permissions: actions: read deployments: read @@ -747,7 +740,7 @@ jobs: with: input-files: tests/requirement_test/logs - name: Archive staging artifacts - if: !cancelled() + if: ${{ !cancelled() }} uses: actions/upload-artifact@v3 with: name: test-results @@ -757,9 +750,7 @@ jobs: appinspect: name: quality-appinspect-${{ matrix.tags }} needs: build - if: | - !cancelled() && - needs.build.result == 'success' + if: ${{ !cancelled() && needs.build.result == 'success' }} runs-on: ubuntu-latest strategy: fail-fast: false @@ -786,7 +777,7 @@ jobs: included_tags: ${{ matrix.tags }} result_file: appinspect_result_${{ matrix.tags }}.json - name: upload-appinspect-report - if: !cancelled() + if: ${{ !cancelled() }} uses: actions/upload-artifact@v3 with: name: appinspect_${{ matrix.tags }}_checks.json @@ -804,10 +795,7 @@ jobs: needs: - security-virustotal - meta - if: | - !cancelled() && - needs.security-virustotal.result == 'success' && - needs.meta.result == 'success' + if: ${{ !cancelled() && needs.security-virustotal.result == 'success' && needs.meta.result == 'success' }} outputs: artifact: ${{ steps.artifactid.outputs.result }} permissions: @@ -882,9 +870,7 @@ jobs: needs: - build - test-inventory - if: | - !cancelled() && - needs.build.result == 'success' + if: ${{ !cancelled() && needs.build.result == 'success' }} runs-on: ubuntu-latest outputs: argo-server: ${{ steps.test-setup.outputs.argo-server }} @@ -1066,12 +1052,12 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: print workflow name - if: !cancelled() + if: ${{ !cancelled() }} run: | echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" - name: Check workflow cancel @@ -1083,7 +1069,7 @@ jobs: cancel_response=`argo submit -v -o json --from wftmpl/cancel-workflow -n workflows -l workflows.argoproj.io/workflow-template=cancel-workflow --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1097,7 +1083,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} run: | set -o xtrace set +e @@ -1115,7 +1101,7 @@ jobs: env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} shell: bash - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} run: | set +e # shellcheck disable=SC2157 @@ -1133,13 +1119,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1151,13 +1137,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs path: | @@ -1179,7 +1165,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: !cancelled() && !cancelled() + if: ${{ !cancelled() }} with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1198,11 +1184,7 @@ jobs: run-requirement-tests: - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.requirement_test == 'true' && - (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-labeled-requirement == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' && (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-labeled-requirement == 'true') }} needs: - build - test-inventory @@ -1288,7 +1270,7 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Check if pod was deleted id: is-pod-deleted - if: !cancelled() + if: ${{ !cancelled() }} shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} @@ -1302,7 +1284,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} run: | set -o xtrace set +e @@ -1320,7 +1302,7 @@ jobs: env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} shell: bash - if: !cancelled() + if: ${{ !cancelled() }} run: | set +e # shellcheck disable=SC2157 @@ -1338,13 +1320,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1356,13 +1338,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests logs path: | @@ -1370,7 +1352,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: !cancelled() + if: ${{ !cancelled() }} with: name: splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1388,11 +1370,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-ui-tests: - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.ui == 'true' && - (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-labeled-ui == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.ui == 'true' && (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-labeled-ui == 'true') }} needs: - build - test-inventory @@ -1480,13 +1458,13 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: !cancelled() + if: ${{ !cancelled() }} run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted - if: !cancelled() + if: ${{ !cancelled() }} shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1500,7 +1478,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} run: | set -o xtrace set +e @@ -1517,7 +1495,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} shell: bash run: | set +e @@ -1536,13 +1514,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1554,13 +1532,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests logs path: | @@ -1568,7 +1546,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: !cancelled() + if: ${{ !cancelled() }} with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1586,11 +1564,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-modinput-tests: - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.modinput_functional == 'true' && - (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-labeled-modinput == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.modinput_functional == 'true' && (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-labeled-modinput == 'true') }} needs: - build - test-inventory @@ -1691,13 +1665,13 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token - if: !cancelled() + if: ${{ !cancelled() }} run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted - if: !cancelled() + if: ${{ !cancelled() }} shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} @@ -1711,7 +1685,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} run: | set -o xtrace set +e @@ -1728,7 +1702,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} shell: bash run: | set +e @@ -1747,13 +1721,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1765,13 +1739,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests logs path: | @@ -1779,7 +1753,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: !cancelled() + if: ${{ !cancelled() }} with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -1797,11 +1771,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-scripted-input-tests-full-matrix: - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.scripted_inputs == 'true' && - ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') }} needs: - build - test-inventory @@ -1902,7 +1872,7 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Check if pod was deleted id: is-pod-deleted - if: !cancelled() + if: ${{ !cancelled() }} shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} @@ -1916,7 +1886,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} run: | set -o xtrace set +e @@ -1933,7 +1903,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} shell: bash run: | set +e @@ -1952,13 +1922,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -1970,13 +1940,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs path: | @@ -1984,7 +1954,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: !cancelled() + if: ${{ !cancelled() }} with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -2002,11 +1972,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-scripted-input-tests-canary: - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.scripted_inputs == 'true' && - ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') }} needs: - build - test-inventory @@ -2106,7 +2072,7 @@ jobs: k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - name: Check if pod was deleted id: is-pod-deleted - if: !cancelled() + if: ${{ !cancelled() }} shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} @@ -2120,7 +2086,7 @@ jobs: shell: bash env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} run: | set -o xtrace set +e @@ -2137,7 +2103,7 @@ jobs: - name: check if workflow completed env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: !cancelled() + if: ${{ !cancelled() }} shell: bash run: | set +e @@ -2156,13 +2122,13 @@ jobs: ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') done - name: pull artifacts from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | echo "pulling artifacts" aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} - name: pull logs from s3 bucket - if: !cancelled() + if: ${{ !cancelled() }} run: | # shellcheck disable=SC2157 if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then @@ -2174,13 +2140,13 @@ jobs: mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts path: | ${{ needs.setup.outputs.directory-path }}/test-results - uses: actions/upload-artifact@v3 - if: !cancelled() + if: ${{ !cancelled() }} with: name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs path: | @@ -2188,7 +2154,7 @@ jobs: - name: Test Report id: test_report uses: dorny/test-reporter@v1 - if: !cancelled() + if: ${{ !cancelled() }} with: name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" @@ -2206,11 +2172,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-escu-tests: - if: | - !cancelled() && - needs.build.result == 'success' && - needs.test-inventory.outputs.escu == 'true' && - ( github.base_ref == 'main' || github.ref_name == 'main' || github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-escu == 'Yes' || needs.setup-workflow.outputs.execute-labeled-escu == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.escu == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' || github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-escu == 'Yes' || needs.setup-workflow.outputs.execute-labeled-escu == 'true') }} needs: - build - test-inventory @@ -2420,7 +2382,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* pre-publish: - if: !cancelled() + if: ${{ !cancelled() }} needs: - meta - compliance-copyrights @@ -2459,7 +2421,7 @@ jobs: exit 1 publish: - if: !cancelled() && needs.pre-publish.result == 'success' && github.event_name != 'pull_request' && github.event_name != 'schedule' + if: ${{ !cancelled() && needs.pre-publish.result == 'success' && github.event_name != 'pull_request' && github.event_name != 'schedule' }} needs: - pre-publish - run-escu-tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e80be3ed8..10f00f054 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: - id: actionlint name: actionlint entry: actionlint - args: [-ignore, 'property ".+" is not defined in object type', -ignore, 'receiver of object dereference "version" must be type of object but got "string"', --ignore, 'could not parse as YAML'] + args: [-ignore, 'property ".+" is not defined in object type', -ignore, 'receiver of object dereference "version" must be type of object but got "string"'] language: script types: ["yaml"] files: ^.github/workflows/ From 0522693a51380bb5c43e65f7d65b6db8bca4ea68 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 21 Jul 2023 08:36:23 +0200 Subject: [PATCH 12/15] chore: add cancel-workflow to all argo jobs --- .../workflows/reusable-build-test-release.yml | 60 ++++++++++++++----- 1 file changed, 45 insertions(+), 15 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index e85c70f21..2f0cf213c 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -880,6 +880,7 @@ jobs: argo-href: "" argo-base-href: ${{ steps.test-setup.outputs.argo-base-href }} argo-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-workflow-tmpl-name }} + argo-cancel-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-cancel-workflow-tmpl-name }} k8s-manifests-branch: ${{ steps.test-setup.outputs.k8s-manifests-branch }} argo-namespace: ${{ steps.test-setup.outputs.argo-namespace }} addon-name: ${{ steps.test-setup.outputs.addon-name }} @@ -919,6 +920,7 @@ jobs: echo "argo-base-href=\'\'" echo "argo-namespace=workflows" echo "argo-workflow-tmpl-name=ta-workflow" + echo "argo-cancel-workflow-tmpl-name=cancel-workflow" echo "directory-path=/tmp" echo "s3-bucket=ta-staging-artifacts" echo "addon-name=\"$ADDON_NAME\"" @@ -962,10 +964,7 @@ jobs: aws s3 sync "${{ github.workspace }}/tmp/restapi_client/" "s3://ta-production-artifacts/ta-apps/$swagger_name/" --exclude "*" --include "README.md" --include "*swagger_client*" --only-show-errors run-knowledge-tests: - if: | - needs.build.result == 'success' && - needs.test-inventory.outputs.knowledge == 'true' && - (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-labeled-knowledge == 'true') + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-labeled-knowledge == 'true') }} needs: - build - test-inventory @@ -1050,23 +1049,18 @@ jobs: sc4s-version: ${{ matrix.sc4s.version }} sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: ${{ !cancelled() }} run: | ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - - name: print workflow name - if: ${{ !cancelled() }} - run: | - echo "workflow name: ${{ steps.run-tests.outputs.workflow-name }}" - - name: Check workflow cancel - id: cancel-workflow - env: - ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} - if: cancelled() - run: | - cancel_response=`argo submit -v -o json --from wftmpl/cancel-workflow -n workflows -l workflows.argoproj.io/workflow-template=cancel-workflow --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -1268,6 +1262,12 @@ jobs: sc4s-version: ${{ matrix.sc4s.version }} sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -1456,6 +1456,12 @@ jobs: vendor-version: ${{ matrix.vendor-version.image }} sc4s-version: "No" k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: ${{ !cancelled() }} @@ -1663,6 +1669,12 @@ jobs: vendor-version: ${{ matrix.vendor-version.image }} sc4s-version: "No" k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: ${{ !cancelled() }} @@ -1870,6 +1882,12 @@ jobs: os-name: ${{ steps.os-name-version.outputs.os-name }} os-version: ${{ steps.os-name-version.outputs.os-version }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -2070,6 +2088,12 @@ jobs: os-name: ${{ steps.os-name-version.outputs.os-name }} os-version: ${{ steps.os-name-version.outputs.os-version }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -2273,6 +2297,12 @@ jobs: vendor-version: ${{ matrix.vendor-version.image }} sc4s-version: "No" k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} + - name: Check workflow cancel + env: + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} + if: cancelled() + run: | + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` - name: Check if pod was deleted id: is-pod-deleted if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} From e86983f3b140dffc881532ef9a1b5daad71a22d4 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Fri, 21 Jul 2023 10:19:39 +0200 Subject: [PATCH 13/15] chore: change staging to prod and labal output naming --- .../workflows/reusable-build-test-release.yml | 56 +++++++++---------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 2f0cf213c..cb07920f2 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -65,12 +65,12 @@ jobs: execute-modinput_functional: ${{ steps.delay-destroy-setup.outputs.execute-modinput_functional }} execute-scripted_inputs: ${{ steps.delay-destroy-setup.outputs.execute-scripted_inputs }} execute-requirement_test: ${{ steps.delay-destroy-setup.outputs.execute-requirement_test }} - execute-labeled-knowledge: ${{ steps.configure-tests-on-labels.outputs.execute_knowledge_labeled }} - execute-labeled-ui: ${{ steps.configure-tests-on-labels.outputs.execute_ui_labeled }} - execute-labeled-escu: ${{ steps.configure-tests-on-labels.outputs.execute_escu_labeled }} - execute-labeled-modinput: ${{ steps.configure-tests-on-labels.outputs.execute_modinput_functional_labeled }} - execute-labeled-scripted_inputs: ${{ steps.configure-tests-on-labels.outputs.execute_scripted_inputs_labeled }} - execute-labeled-requirement: ${{ steps.configure-tests-on-labels.outputs.execute_requirement_test_labeled }} + execute-knowledge-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_knowledge_labeled }} + execute-ui-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_ui_labeled }} + execute-escu-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_escu_labeled }} + execute-modinput-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_modinput_functional_labeled }} + execute-scripted_inputs-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_scripted_inputs_labeled }} + execute-requirement-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_requirement_test_labeled }} steps: - name: skip workflow if description is empty for labeled pr id: skip-workflow @@ -611,7 +611,7 @@ jobs: run: | echo "name=$(basename "${{ steps.slim.outputs.OUTPUT }}")" >> "$GITHUB_OUTPUT" basename "${{ steps.slim.outputs.OUTPUT }}" - aws s3 cp "${{ steps.slim.outputs.OUTPUT }}" s3://ta-staging-artifacts/ta-apps/ + aws s3 cp "${{ steps.slim.outputs.OUTPUT }}" s3://ta-production-artifacts/ta-apps/ - name: artifact-splunk-parts uses: actions/upload-artifact@v3 with: @@ -739,7 +739,7 @@ jobs: uses: splunk/addonfactory-workflow-requirement-files-unit-tests@v1.4 with: input-files: tests/requirement_test/logs - - name: Archive staging artifacts + - name: Archive production artifacts if: ${{ !cancelled() }} uses: actions/upload-artifact@v3 with: @@ -912,9 +912,9 @@ jobs: JOB_NAME=$(echo "$ADDON_NAME" | tail -c 16)-$(echo "${GITHUB_SHA}" | tail -c 8)-TEST-TYPE-${GITHUB_RUN_ID} JOB_NAME=${JOB_NAME//[_.]/-} LABELS="addon-name=${ADDON_NAME}" - ADDON_UPLOAD_PATH="s3://ta-staging-artifacts/ta-apps/${{ needs.build.outputs.buildname }}" + ADDON_UPLOAD_PATH="s3://ta-production-artifacts/ta-apps/${{ needs.build.outputs.buildname }}" { - echo "argo-server=argo.staging.wfe.splgdi.com:443" + echo "argo-server=argo.wfe.splgdi.com:443" echo "argo-http1=true" echo "argo-secure=true" echo "argo-base-href=\'\'" @@ -922,7 +922,7 @@ jobs: echo "argo-workflow-tmpl-name=ta-workflow" echo "argo-cancel-workflow-tmpl-name=cancel-workflow" echo "directory-path=/tmp" - echo "s3-bucket=ta-staging-artifacts" + echo "s3-bucket=ta-production-artifacts" echo "addon-name=\"$ADDON_NAME\"" echo "job-name=wf-$JOB_NAME" echo "labels=$LABELS" @@ -964,7 +964,7 @@ jobs: aws s3 sync "${{ github.workspace }}/tmp/restapi_client/" "s3://ta-production-artifacts/ta-apps/$swagger_name/" --exclude "*" --include "README.md" --include "*swagger_client*" --only-show-errors run-knowledge-tests: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-labeled-knowledge == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-knowledge-labeled == 'true') }} needs: - build - test-inventory @@ -1008,7 +1008,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1059,7 +1059,7 @@ jobs: id: update-argo-token if: ${{ !cancelled() }} run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted @@ -1178,7 +1178,7 @@ jobs: run-requirement-tests: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' && (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-labeled-requirement == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' && (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-requirement-labeled == 'true') }} needs: - build - test-inventory @@ -1221,7 +1221,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1370,7 +1370,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-ui-tests: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.ui == 'true' && (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-labeled-ui == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.ui == 'true' && (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-ui-labeled == 'true') }} needs: - build - test-inventory @@ -1415,7 +1415,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1466,7 +1466,7 @@ jobs: id: update-argo-token if: ${{ !cancelled() }} run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted @@ -1570,7 +1570,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-modinput-tests: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.modinput_functional == 'true' && (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-labeled-modinput == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.modinput_functional == 'true' && (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-modinput-labeled == 'true') }} needs: - build - test-inventory @@ -1616,7 +1616,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1679,7 +1679,7 @@ jobs: id: update-argo-token if: ${{ !cancelled() }} run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: Check if pod was deleted id: is-pod-deleted @@ -1783,7 +1783,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-scripted-input-tests-full-matrix: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }} needs: - build - test-inventory @@ -1826,7 +1826,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -1990,7 +1990,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-scripted-input-tests-canary: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-labeled-scripted_inputs == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }} needs: - build - test-inventory @@ -2033,7 +2033,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name @@ -2196,7 +2196,7 @@ jobs: ${{ needs.setup.outputs.directory-path }}/diag* run-escu-tests: - if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.escu == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' || github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-escu == 'Yes' || needs.setup-workflow.outputs.execute-labeled-escu == 'true') }} + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.escu == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' || github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-escu == 'Yes' || needs.setup-workflow.outputs.execute-escu-labeled == 'true') }} needs: - build - test-inventory @@ -2238,7 +2238,7 @@ jobs: - name: Read secrets from AWS Secrets Manager into environment variables id: get-argo-token run: | - ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-staging-github-workflow-automation-token | jq -r '.SecretString') + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" - name: create job name id: create-job-name From 3b2d206d52a15fe68cdce403e908ddcec44a6382 Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Mon, 24 Jul 2023 12:21:50 +0200 Subject: [PATCH 14/15] chore: add workflow concurrency and check workflow was stopped --- .../workflows/reusable-build-test-release.yml | 81 ++++++++++++++++--- 1 file changed, 70 insertions(+), 11 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index cb07920f2..992deb4d2 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -48,6 +48,9 @@ on: permissions: contents: read packages: read +concurrency: + group: ${{ github.head_ref || github.ref_name }} + cancel-in-progress: true jobs: setup-workflow: runs-on: ubuntu-latest @@ -1049,12 +1052,20 @@ jobs: sc4s-version: ${{ matrix.sc4s.version }} sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: ${{ !cancelled() }} @@ -1262,12 +1273,20 @@ jobs: sc4s-version: ${{ matrix.sc4s.version }} sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -1456,12 +1475,20 @@ jobs: vendor-version: ${{ matrix.vendor-version.image }} sc4s-version: "No" k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: ${{ !cancelled() }} @@ -1669,12 +1696,20 @@ jobs: vendor-version: ${{ matrix.vendor-version.image }} sc4s-version: "No" k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation id: update-argo-token if: ${{ !cancelled() }} @@ -1882,12 +1917,20 @@ jobs: os-name: ${{ steps.os-name-version.outputs.os-name }} os-version: ${{ steps.os-name-version.outputs.os-version }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -2088,12 +2131,20 @@ jobs: os-name: ${{ steps.os-name-version.outputs.os-name }} os-version: ${{ steps.os-name-version.outputs.os-version }} k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Check if pod was deleted id: is-pod-deleted if: ${{ !cancelled() }} @@ -2297,12 +2348,20 @@ jobs: vendor-version: ${{ matrix.vendor-version.image }} sc4s-version: "No" k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} - - name: Check workflow cancel + - name: Cancel workflow env: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" + else + echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" + exit 1 + fi - name: Check if pod was deleted id: is-pod-deleted if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} From 83cdf0a81a6d1cebfbff7cc4fe60e9e07b03d66d Mon Sep 17 00:00:00 2001 From: mkolasinski-splunk Date: Mon, 24 Jul 2023 15:35:50 +0200 Subject: [PATCH 15/15] chore: fix pre-commit --- .github/workflows/reusable-build-test-release.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/reusable-build-test-release.yml b/.github/workflows/reusable-build-test-release.yml index 992deb4d2..69171d51f 100644 --- a/.github/workflows/reusable-build-test-release.yml +++ b/.github/workflows/reusable-build-test-release.yml @@ -1057,7 +1057,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then @@ -1278,7 +1278,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then @@ -1480,7 +1480,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then @@ -1701,7 +1701,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then @@ -1922,7 +1922,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then @@ -2136,7 +2136,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then @@ -2353,7 +2353,7 @@ jobs: ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} if: cancelled() run: | - cancel_response=`argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}` + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then