diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index bae2bc90..81c7902a 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -23,8 +23,7 @@ jobs: fail-fast: false matrix: # NOTE: Use a matrix (instead of dockerBuild.sh) for parallelism - dockerfile: [amazon, debian, fedora, redhat, - amazon_pypy, debian_pypy, fedora_pypy, redhat_pypy] + dockerfile: [amazon, debian, fedora, redhat] runs-on: [ubuntu-latest] steps: diff --git a/.github/workflows/docker_build_plugins.yml b/.github/workflows/docker_build_plugins.yml index 76a1951f..adffb10d 100644 --- a/.github/workflows/docker_build_plugins.yml +++ b/.github/workflows/docker_build_plugins.yml @@ -6,6 +6,7 @@ on: schedule: # Run it every Tuesday at midnight. - cron: '0 0 * * 2' + workflow_dispatch: permissions: actions: read diff --git a/.github/workflows/run_workflows.yml b/.github/workflows/run_workflows.yml deleted file mode 100644 index b2011439..00000000 --- a/.github/workflows/run_workflows.yml +++ /dev/null @@ -1,244 +0,0 @@ -name: Run Workflows - -on: - workflow_dispatch: - # Have to declare parameters here for those that will be sent through 'workflow-dispatch' - # event in branch_dispatch.yml. Otherwise, there'll be github API errors: - # '"message": "Unexpected inputs provided: ...",' - inputs: - event_type: - description: An arbitrary string used to dispatch steps - required: true - type: string - commit_message: - description: The commit message - required: true - type: string - sender_repo: - description: The repository which initiated the workflow dispatch - required: true - type: string - sender_repo_owner: - description: The account name of the repository initiated the workflow dispatch - required: true - type: string - wic_owner: - description: The account name of the wic repository - required: true - type: string - wic_ref: - description: The branch name within the wic repository - required: true - type: string - mm-workflows_owner: - description: The account name of the mm-workflows repository - required: true - type: string - mm-workflows_ref: - description: The branch name within the mm-workflows repository - required: true - type: string - image-workflows_owner: - description: The account name of the image-workflows repository - required: true - type: string - image-workflows_ref: - description: The branch name within the image-workflows repository - required: true - type: string - -defaults: - run: - shell: bash -l {0} # Invoke bash in login mode, NOT interactive mode. - # This will cause bash to look for the startup file ~/.bash_profile, NOT ~/.bashrc - # This is important since conda init writes to ~/.bashrc - -permissions: - actions: read - contents: read - pull-requests: read - -jobs: - run_workflows: - # See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#concurrency - # This will prevent DOS attacks from people blasting the CI with rapid fire commits. - concurrency: - group: ${{ github.workflow }}-cwltool-${{ github.ref }}-${{ inputs.sender_repo }}-${{ inputs.mm-workflows_ref}} - cancel-in-progress: true - runs-on: [self-hosted, linux] - - steps: - - name: Checkout sophios - uses: actions/checkout@v3 - with: - repository: ${{ inputs.wic_owner }}/sophios - ref: ${{ inputs.wic_ref }} - path: sophios - - - name: Checkout biobb_adapters - if: always() - uses: actions/checkout@v3 - with: - # NOTE: temporarily hardcode sameeul & master because we can only - # have up to 10 input parameters for workflow_dispatch... - repository: vjaganat90/biobb_adapters - ref: master - path: biobb_adapters - - - name: Checkout mm-workflows - if: always() - uses: actions/checkout@v3 - with: - repository: ${{ inputs.mm-workflows_owner }}/mm-workflows - ref: ${{ inputs.mm-workflows_ref }} - path: mm-workflows - - - name: Checkout image-workflows - if: always() - uses: actions/checkout@v3 - with: - repository: ${{ inputs.image-workflows_owner }}/image-workflows - ref: ${{ inputs.image-workflows_ref }} - path: image-workflows - - - name: Remove old global config - if: always() - run: rm -rf "/home/$(whoami)/wic/" && rm -rf "/home/$(whoami)/.toil/" - # For self-hosted runners, make sure we use new global config settings - - # Using pypy increases performance / decreases cwltool runtime by about a minute. - - name: Append pypy to conda environment files - if: runner.os != 'Windows' - run: cd sophios/install/ && echo " - pypy" >> system_deps.yml - - - name: Remove old mamba environment - if: always() - run: rm -rf "/home/$(whoami)/miniconda3/envs/wic_github_actions/" - # For self-hosted runners, make sure we install into a new mamba environment - # NOTE: Every time the github self-hosted runner executes, it sets "set -e" in ~/.bash_profile - # So if we rm -rf the old mamba environment without also removing the mamba init code in ~/.bash_profile - # (or removing the file altogether), then unless we immediately re-create the environment, - # (i.e. if we try to run any other commands between removing and re-creating the environment) - # we will get "EnvironmentNameNotFound: Could not find conda environment: wic_github_actions" - # and (again, due to "set -e") the workflow step will fail. - - - name: Setup miniforge (linux, macos) - if: always() - uses: conda-incubator/setup-miniconda@v3.0.1 - with: - miniforge-variant: Miniforge-pypy3 - miniforge-version: 24.7.1-0 - environment-file: sophios/install/system_deps.yml - activate-environment: wic_github_actions - channels: conda-forge - python-version: "3.9.*" # pypy is not yet compatible with 3.10 and 3.11 - - # - name: Docker pull - # if: always() - # run: cd mm-workflows/ && ./dockerPull.sh - # # For self-hosted runners, make sure the docker cache is up-to-date. - - - name: Install Workflow Inference Compiler - if: always() - run: cd sophios/ && pip install ".[all_except_runner_src]" - - - name: Install Molecular Modeling Workflows - if: always() - # Also run mm-workflows command to generate - # mm-workflows/autogenerated/schemas/config_schemas.json - # NOTE: Use ".[test]" instead of ".[all_except_runner_src]" - # We do not want or need to install the workflow_deps extra. - # (Many of the packages conflict with pypy.) - run: cd mm-workflows/ && pip install ".[test]" && mm-workflows --generate_schemas - - - name: Generate Sophios Python API Workflows (*.py -> *.wic) - if: always() - run: cd sophios/ && python -c 'import sophios; import sophios.plugins; sophios.plugins.blindly_execute_python_workflows()' - - - name: Generate Sophios Validation Jsonschema - if: always() - run: cd sophios/ && sophios --generate_schemas - - # Please read docs/validation.md#Property-Based-Testing - # This is essentially an integration test for all of the - # WIC Python API workflows as well as the WIC Python API itself. - - name: Validate Sophios Python API Workflows (*.py -> *.wic) - if: always() - run: cd sophios/ && python -c 'import sophios; import sophios.plugins; sophios.plugins.blindly_execute_python_workflows()' - - - name: cwl-docker-extract (i.e. recursively docker pull) - if: always() - run: cd sophios/ && pytest tests/test_examples.py -k test_cwl_docker_extract - # For self-hosted runners, make sure the docker cache is up-to-date. - - - name: PyTest Run Workflows - if: always() - # NOTE: Do NOT add coverage to PYPY CI runs https://github.com/tox-dev/tox/issues/2252 - run: cd sophios/ && pytest tests/test_examples.py -k test_run_workflows_on_push --workers 8 --cwl_runner cwltool # --cov - - - name: PyTest Run REST Core Tests - if: always() - # NOTE: Do NOT add coverage to PYPY CI runs https://github.com/tox-dev/tox/issues/2252 - run: cd sophios/ && pytest tests/test_rest_core.py -k test_rest_core --cwl_runner cwltool - - - name: PyTest Run REST WFB Tests - if: always() - # NOTE: Do NOT add coverage to PYPY CI runs https://github.com/tox-dev/tox/issues/2252 - run: cd sophios/ && pytest tests/test_rest_wfb.py -k test_rest_wfb --cwl_runner cwltool - - - name: PyTest Run ICT to CLT conversion Tests - if: always() - # NOTE: Do NOT add coverage to PYPY CI runs https://github.com/tox-dev/tox/issues/2252 - run: cd sophios/ && pytest tests/test_ict_to_clt_conversion.py -k test_ict_to_clt - - - name: PyTest Run update wfb payload Tests - if: always() - # NOTE: Do NOT add coverage to PYPY CI runs https://github.com/tox-dev/tox/issues/2252 - run: cd sophios/ && pytest tests/test_fix_payload.py -k test_fix - - # NOTE: The steps below are for repository_dispatch only. For all other steps, please insert above - # this comment. - - # Need to store success value in environment variable, rather than using 'success()' in-line inside a run tag. - # Otherwise: "The workflow is not valid. ... Unrecognized function: 'success'." - # https://github.com/actions/runner/issues/834#issuecomment-889484016 - - name: The workflow has succeeded - if: ${{ success() }} - run: | - echo 'workflow_success=true' >> "$GITHUB_ENV" - - # It is not clear from the documentation, but the 'success()' and 'failure()' functions - # do not consider skipped steps. Specifically, the above 'success()' function will not - # affect the 'failure()' function here. - # https://docs.github.com/en/actions/learn-github-actions/expressions#status-check-function - - name: The workflow has failed - if: ${{ failure() }} - run: | - echo 'workflow_success=false' >> "$GITHUB_ENV" - - # See token.md - - name: Generate a token - if: always() - id: generate_token - uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92 - with: - app_id: ${{ secrets.APP_ID }} - private_key: ${{ secrets.APP_PRIVATE_KEY }} - - - name: Reply CI results to sender - # In case of failure, we still need to return the failure status to the original repository. - # Use 'always()' so this step runs even if there's a failure and use the bash if-statement - # to only run this step only if the repository_dispatch sends the signal. - # https://github.com/actions/runner/issues/834#issuecomment-907096707 - # Use inputs.sender_repo to reply the original sender. - if: always() - uses: ./sophios/.github/my_actions/reply_sender/ # Must start with ./ - with: - github_repository: ${{ github.repository }} - event_type: ${{ inputs.event_type }} - sender_repo: ${{ inputs.sender_repo }} - operating_system: self-hosted-linux - commit_message: ${{ inputs.commit_message }} - mm_workflows_ref: ${{ inputs.mm-workflows_ref }} - workflow_success: ${{ env.workflow_success }} - access_token: ${{ steps.generate_token.outputs.token }} \ No newline at end of file diff --git a/docker/Dockerfile_amazon_pypy b/docker/Dockerfile_amazon_pypy deleted file mode 100644 index 2d4d8f32..00000000 --- a/docker/Dockerfile_amazon_pypy +++ /dev/null @@ -1,37 +0,0 @@ -# IMPORTANT: Use .. for the Build Context https://docs.docker.com/build/building/context/ -# i.e. `sudo docker build ... -f Dockerfile ..` -FROM amazonlinux - -# Install conda / mamba -RUN yum update -y && yum install -y wget - -RUN CONDA="Miniforge-pypy3-Linux-x86_64.sh" && \ - wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/$CONDA && \ - chmod +x $CONDA && \ - ./$CONDA -b -p /Miniforge-pypy3 && \ - rm -f $CONDA -ENV PATH /Miniforge-pypy3/bin:$PATH - -# Install wic -RUN yum install -y git - -COPY . /workflow-inference-compiler -WORKDIR /workflow-inference-compiler - -#RUN conda create --name wic -#RUN conda activate wic -# The above command prints -# CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'. -# It still prints that even if we run `conda init bash` first. -# But this is a Docker image; we don't necessarily need to additionally isolate -# wic within a conda environment. Let's just install it globally! -RUN mamba env update --name base --file install/pypy.yml -RUN mamba env update --name base --file install/system_deps.yml -RUN mamba env update --name base --file install/pypy_docker_deps.yml -RUN pip install -e ".[all_except_runner_src]" - -RUN mamba clean --all --yes -RUN pip cache purge -RUN yum clean all - -ADD docker/Dockerfile_amazon_pypy . diff --git a/docker/Dockerfile_debian_pypy b/docker/Dockerfile_debian_pypy deleted file mode 100644 index 251912ac..00000000 --- a/docker/Dockerfile_debian_pypy +++ /dev/null @@ -1,37 +0,0 @@ -# IMPORTANT: Use .. for the Build Context https://docs.docker.com/build/building/context/ -# i.e. `sudo docker build ... -f Dockerfile ..` -FROM debian:stable-slim - -# Install conda / mamba -RUN apt-get update -y && apt-get install -y wget - -RUN CONDA="Miniforge-pypy3-Linux-x86_64.sh" && \ - wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/$CONDA && \ - chmod +x $CONDA && \ - ./$CONDA -b -p /Miniforge-pypy3 && \ - rm -f $CONDA -ENV PATH /Miniforge-pypy3/bin:$PATH - -# Install wic -RUN apt-get install -y git - -COPY . /workflow-inference-compiler -WORKDIR /workflow-inference-compiler - -#RUN conda create --name wic -#RUN conda activate wic -# The above command prints -# CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'. -# It still prints that even if we run `conda init bash` first. -# But this is a Docker image; we don't necessarily need to additionally isolate -# wic within a conda environment. Let's just install it globally! -RUN mamba env update --name base --file install/pypy.yml -RUN mamba env update --name base --file install/system_deps.yml -RUN mamba env update --name base --file install/pypy_docker_deps.yml -RUN pip install -e ".[all_except_runner_src]" - -RUN mamba clean --all --yes -RUN pip cache purge -RUN apt-get clean - -ADD docker/Dockerfile_debian_pypy . diff --git a/docker/Dockerfile_fedora_pypy b/docker/Dockerfile_fedora_pypy deleted file mode 100644 index b67bad5c..00000000 --- a/docker/Dockerfile_fedora_pypy +++ /dev/null @@ -1,37 +0,0 @@ -# IMPORTANT: Use .. for the Build Context https://docs.docker.com/build/building/context/ -# i.e. `sudo docker build ... -f Dockerfile ..` -FROM fedora - -# Install conda / mamba -RUN yum update -y && yum install -y wget - -RUN CONDA="Miniforge-pypy3-Linux-x86_64.sh" && \ - wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/$CONDA && \ - chmod +x $CONDA && \ - ./$CONDA -b -p /Miniforge-pypy3 && \ - rm -f $CONDA -ENV PATH /Miniforge-pypy3/bin:$PATH - -# Install wic -RUN yum install -y git - -COPY . /workflow-inference-compiler -WORKDIR /workflow-inference-compiler - -#RUN conda create --name wic -#RUN conda activate wic -# The above command prints -# CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'. -# It still prints that even if we run `conda init bash` first. -# But this is a Docker image; we don't necessarily need to additionally isolate -# wic within a conda environment. Let's just install it globally! -RUN mamba env update --name base --file install/pypy.yml -RUN mamba env update --name base --file install/system_deps.yml -RUN mamba env update --name base --file install/pypy_docker_deps.yml -RUN pip install -e ".[all_except_runner_src]" - -RUN mamba clean --all --yes -RUN pip cache purge -RUN yum clean all - -ADD docker/Dockerfile_fedora_pypy . diff --git a/docker/Dockerfile_redhat_pypy b/docker/Dockerfile_redhat_pypy deleted file mode 100644 index 9e2a5556..00000000 --- a/docker/Dockerfile_redhat_pypy +++ /dev/null @@ -1,38 +0,0 @@ -# IMPORTANT: Use .. for the Build Context https://docs.docker.com/build/building/context/ -# i.e. `sudo docker build ... -f Dockerfile ..` -FROM redhat/ubi9-minimal -# NOTE: the package manager is microdnf, not dnf - -# Install conda / mamba -RUN microdnf update -y && microdnf install -y wget - -RUN CONDA="Miniforge-pypy3-Linux-x86_64.sh" && \ - wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/$CONDA && \ - chmod +x $CONDA && \ - ./$CONDA -b -p /Miniforge-pypy3 && \ - rm -f $CONDA -ENV PATH /Miniforge-pypy3/bin:$PATH - -# Install wic -RUN microdnf install -y git - -COPY . /workflow-inference-compiler -WORKDIR /workflow-inference-compiler - -#RUN conda create --name wic -#RUN conda activate wic -# The above command prints -# CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'. -# It still prints that even if we run `conda init bash` first. -# But this is a Docker image; we don't necessarily need to additionally isolate -# wic within a conda environment. Let's just install it globally! -RUN mamba env update --name base --file install/pypy.yml -RUN mamba env update --name base --file install/system_deps.yml -RUN mamba env update --name base --file install/pypy_docker_deps.yml -RUN pip install -e ".[all_except_runner_src]" - -RUN mamba clean --all --yes -RUN pip cache purge -RUN microdnf clean all - -ADD docker/Dockerfile_redhat_pypy . diff --git a/docker/Dockerfile_ubuntu_pypy b/docker/Dockerfile_ubuntu_pypy deleted file mode 100644 index 27ab9c3e..00000000 --- a/docker/Dockerfile_ubuntu_pypy +++ /dev/null @@ -1,37 +0,0 @@ -# IMPORTANT: Use .. for the Build Context https://docs.docker.com/build/building/context/ -# i.e. `sudo docker build ... -f Dockerfile ..` -FROM ubuntu - -# Install conda / mamba -RUN apt-get update -y && apt-get install -y wget - -RUN CONDA="Miniforge-pypy3-Linux-x86_64.sh" && \ - wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/$CONDA && \ - chmod +x $CONDA && \ - ./$CONDA -b -p /Miniforge-pypy3 && \ - rm -f $CONDA -ENV PATH /Miniforge-pypy3/bin:$PATH - -# Install wic -RUN apt-get install -y git - -COPY . /workflow-inference-compiler -WORKDIR /workflow-inference-compiler - -#RUN conda create --name wic -#RUN conda activate wic -# The above command prints -# CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'. -# It still prints that even if we run `conda init bash` first. -# But this is a Docker image; we don't necessarily need to additionally isolate -# wic within a conda environment. Let's just install it globally! -RUN mamba env update --name base --file install/pypy.yml -RUN mamba env update --name base --file install/system_deps.yml -RUN mamba env update --name base --file install/pypy_docker_deps.yml -RUN pip install -e ".[all_except_runner_src]" - -RUN mamba clean --all --yes -RUN pip cache purge -RUN apt-get clean - -ADD docker/Dockerfile_ubuntu_pypy . diff --git a/examples/scripts/Dockerfile_check_linear_fit b/examples/scripts/Dockerfile_check_linear_fit index b611ff69..ec80bd96 100644 --- a/examples/scripts/Dockerfile_check_linear_fit +++ b/examples/scripts/Dockerfile_check_linear_fit @@ -1,4 +1,4 @@ -FROM condaforge/Miniforge-pypy3 +FROM condaforge/miniforge3 RUN mamba install -c conda-forge numpy scipy diff --git a/examples/scripts/Dockerfile_scatter_plot b/examples/scripts/Dockerfile_scatter_plot index c3072ba5..521ae728 100644 --- a/examples/scripts/Dockerfile_scatter_plot +++ b/examples/scripts/Dockerfile_scatter_plot @@ -1,4 +1,4 @@ -FROM condaforge/Miniforge-pypy3 +FROM condaforge/miniforge3 RUN mamba install -c conda-forge matplotlib