Skip to content

pytest

pytest #124

name: Build and Test
on:
push:
branches:
- main
pull_request_target:
branches:
- main
env:
PYTHON_VERSION: 3.8.16
AIRFLOW_HOME: /home/runner/work/airflow-dags/airflow-dags
REGISTRY: registry.cern.ch
IMAGE: cern-sis/airflow
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@localhost/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@localhost/airflow
AIRFLOW__CELERY__BROKER_URL: redis://localhost:6379/0
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth"
IOP_FTP_HOST: "sftp"
SPRINGER_FTP_HOST: "sftp"
OUP_FTP_HOST: "ftp"
S3_ENDPOINT: "http://localhost:9000"
SPRINGER_FTP_PORT: 22
IOP_FTP_PORT: 22
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: "true"
jobs:
build_test:
name: Build and Test
runs-on: ubuntu-latest
steps:
- name: Checkout
if: ${{ github.event_name == 'push' }}
uses: actions/checkout@v3
- name: Checkout PR
if: ${{ github.event_name == 'pull_request_target' }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Python 3
uses: actions/setup-python@v4
with:
python-version: 3.8.16
- name: Run services on docker compose
run: docker compose up -d postgres redis sftp ftp s3 create_buckets
- name: List services for IT Tests
run: docker ps
- name: Build Image
id: build
uses: cern-sis/gh-workflows/.github/actions/docker-build@v6
with:
registry: ${{ env.REGISTRY }}
image: ${{ env.IMAGE }}
tags: type=ref,event=pr
cache: false
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_PASSWORD }}
- name: Run tests with pytest and generate report
run: >
docker run
--name airflowdags
--network=host
-v "$(pwd)"/tests:/opt/airflow/tests
-v "$(pwd)"/dags:/opt/airflow/dags
-v "$(pwd)"/airflow.cfg:/opt/airflow/airflow.cfg
-v "$(pwd)"/data:/opt/airflow/data
bash -c "airflow db init && pytest"
$REGISTRY/$IMAGE@${{ steps.build.outputs.image-digest }}
tests
--cov=./
--cov-report=xml
- name: Copy test coverage file to host machine
run: docker cp airflowdags:/opt/airflow/coverage.xml .
- name: Upload Coverage to Codecov
uses: codecov/codecov-action@v3
with:
verbose: true
- name: Deploy QA
if: ${{ github.event_name == 'push' }}
uses: cern-sis/gh-workflows/.github/actions/kubernetes-project-new-images@v6
with:
event-type: update
images: ${{ env.REGISTRY }}/${{ env.IMAGE }}@${{ steps.build.outputs.image-digest }}
token: ${{ secrets.PAT_FIRE_EVENTS_ON_CERN_SIS_KUBERNETES }}