Skip to content

#301 cloud front distribution for example data s3 bucket #117

#301 cloud front distribution for example data s3 bucket

#301 cloud front distribution for example data s3 bucket #117

Workflow file for this run

name: CI
on:
pull_request:
jobs:
run-unit-tests:
name: Unit Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Run shellcheck
run: ./scripts/build/shellcheck.sh
- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/python-environment@0.13.0
with:
python-version: "3.10"
poetry-version: "1.8.2"
- name: Check Version Number
run: poetry run python3 -u "./scripts/build/check_release.py"
- name: Run Unit Tests
run: >
poetry run pytest
--capture=no
--override-ini=log_cli=true
--override-ini=log_cli_level=INFO
test/unit
run-integration-tests:
name: Integration Tests
environment: AWS_CI_TESTS
runs-on: ubuntu-latest
needs: run-unit-tests
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Free disk space
uses: jlumbroso/free-disk-space@main
with:
tool-cache: true
large-packages: false
- name: Free disk space by removing large directories
run: |
sudo rm -rf /usr/local/graalvm/
sudo rm -rf /usr/local/.ghcup/
sudo rm -rf /usr/local/share/powershell
sudo rm -rf /usr/local/share/chromium
sudo rm -rf /usr/local/lib/node_modules
sudo rm -rf /opt/ghc
- name: Show available disk space
run: df -h
- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/python-environment@0.13.0
with:
python-version: "3.10"
poetry-version: "1.8.2"
- name: Run Integration Tests
run: >
poetry run pytest
--capture=no
--override-ini=log_cli=true
--override-ini=log_cli_level=INFO
test/integration
env: # Set the secret as an env variable
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
approval-for-notebook-tests:
name: Run Jupyter Notebook Tests?
runs-on: ubuntu-latest
steps:
- name: Detect Running Notebook Tests
run: true
environment:
approve-test-execution
run-notebook-tests:
name: Jupyter Notebook Tests
environment: AWS_SAGEMAKER
runs-on: ubuntu-latest
needs: approval-for-notebook-tests
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Free disk space
uses: jlumbroso/free-disk-space@main
with:
tool-cache: true
large-packages: false
- name: Free disk space by removing large directories
run: |
sudo rm -rf /usr/local/graalvm/
sudo rm -rf /usr/local/.ghcup/
sudo rm -rf /usr/local/share/powershell
sudo rm -rf /usr/local/share/chromium
sudo rm -rf /usr/local/lib/node_modules
sudo rm -rf /opt/ghc
- name: Show available disk space
run: df -h
- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/python-environment@0.13.0
with:
python-version: "3.10"
poetry-version: "1.8.2"
- name: Run notebook tests
run: >
poetry run pytest
--capture=no
--override-ini=log_cli=true
--override-ini=log_cli_level=INFO
test/notebook_test_runner/test_notebooks_in_dss_docker_image.py
env: # Set the secret as an env variable
NBTEST_AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
NBTEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
NBTEST_AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }}
SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }}
SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }}
gate-2:
name: Gate 2 - Allow Merge
runs-on: ubuntu-latest
needs: [ run-unit-tests, run-integration-tests, run-notebook-tests ]
steps:
- name: Branch Protection
run: true