Trigger score-submission
CI job on pull requests
#99
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Score submission | |
on: | |
push: | |
pull_request: | |
workflow_call: | |
outputs: | |
run-scorer: | |
description: 'The pr_config.json says to run the scorer.' | |
value: ${{ jobs.read-pr-config.outputs.run-scorer }} | |
submission-main: | |
description: 'SUBMISSION_MAIN in the pr_config.json.' | |
value: ${{ jobs.read-pr-config.outputs.submission-main }} | |
jobs: | |
read-pr-config: | |
name: Read pull request config | |
runs-on: ubuntu-latest | |
outputs: | |
contributor: ${{ steps.save-env.outputs.contributor }} # GitHub username of committer or pull request author | |
run-scorer: ${{ steps.save-env.outputs.run-scorer }} # Boolean to run the run-scorer job | |
requirements-filename: ${{ steps.save-env.outputs.requirements-filename }} # filename of the submission's Pip requirements file (usually requirements.txt) | |
requirements-path: ${{ steps.save-env.outputs.requirements-path }} # path to the parent directory that contains requirements-filename | |
submission-main: ${{ steps.save-env.outputs.submission-main }} # SUBMISSION_MAIN of the pr_config.json | |
submission-main-filename: ${{ steps.save-env.outputs.submission-main-filename }} # filename of the file that should be executed by run-scorer | |
submission-main-path: ${{ steps.save-env.outputs.submission-main-path }} # path to the parent directory that contains submission-main-filename | |
test-sets-for-scoring: ${{ steps.save-env.outputs.test-sets-for-scoring }} # a comma-separated list of test set names to use for scoring (e.g. case1,case2,case3a) | |
steps: | |
- name: Install Python 3.10 | |
uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Checkout | |
uses: actions/checkout@v3 | |
- name: Install ivcurves | |
run: pip3 install . | |
- name: Save pull request contributor username | |
id: save-pr-author | |
run: echo 'contributor=${{ github.event.pull_request.user.login }}' >> $GITHUB_OUTPUT | |
- name: Save GitHub actor username | |
id: save-github-actor | |
if: ${{ steps.save-pr-author.outputs.contributor == '' }} | |
run: echo 'contributor=${{ github.actor }}' >> $GITHUB_OUTPUT | |
- name: Set the contributor environment variable | |
env: | |
CONTRIBUTOR: ${{ join(steps.*.outputs.contributor, '') }} | |
run: echo 'CONTRIBUTOR=${{ env.CONTRIBUTOR }}' >> ${{ github.env }} | |
- name: Set pr_config.json entries as environment variables | |
run: python3 .github/workflows/utils/print_json_as_env.py | |
submissions/${{ env.CONTRIBUTOR }}/pr_config.json | |
--validate-pr-config | |
--split-path-variables | |
>> ${{ github.env }} | |
- name: Save scorer environment to job output | |
id: save-env | |
run: | | |
echo 'contributor=${{ env.CONTRIBUTOR }}' >> $GITHUB_OUTPUT | |
echo 'run-scorer=${{ env.RUN_SCORER }}' >> $GITHUB_OUTPUT | |
echo 'requirements-filename=${{ env.REQUIREMENTS_FILENAME }}' >> $GITHUB_OUTPUT | |
echo 'requirements-path=${{ env.REQUIREMENTS_PATH }}' >> $GITHUB_OUTPUT | |
echo 'submission-main=${{ env.SUBMISSION_MAIN }}' >> $GITHUB_OUTPUT | |
echo 'submission-main-filename=${{ env.SUBMISSION_MAIN_FILENAME }}' >> $GITHUB_OUTPUT | |
echo 'submission-main-path=${{ env.SUBMISSION_MAIN_PATH }}' >> $GITHUB_OUTPUT | |
echo 'test-sets-for-scoring=${{ env.TEST_SETS_FOR_SCORING }}' >> $GITHUB_OUTPUT | |
run-scorer: | |
name: Run scorer | |
runs-on: ubuntu-latest | |
needs: read-pr-config | |
if: ${{ needs.read-pr-config.outputs.run-scorer == 'true' }} | |
env: | |
CONTRIBUTOR: ${{ needs.read-pr-config.outputs.contributor }} | |
REQUIREMENTS_FILENAME: ${{ needs.read-pr-config.outputs.requirements-filename }} | |
REQUIREMENTS_PATH: ${{ needs.read-pr-config.outputs.requirements-path }} | |
SUBMISSION_MAIN_FILENAME: ${{ needs.read-pr-config.outputs.submission-main-filename }} | |
SUBMISSION_MAIN_PATH: ${{ needs.read-pr-config.outputs.submission-main-path }} | |
TEST_SETS_FOR_SCORING: ${{ needs.read-pr-config.outputs.test-sets-for-scoring }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v3 | |
- name: Install Python 3.10 | |
uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Install ivcurves | |
run: pip3 install . | |
- name: Install submission requirements | |
working-directory: submissions/${{ env.CONTRIBUTOR }}/${{ env.REQUIREMENTS_PATH }} | |
run: pip3 install -r "${{ env.REQUIREMENTS_FILENAME }}" | |
- name: Run submission | |
working-directory: submissions/${{ env.CONTRIBUTOR }}/${{ env.SUBMISSION_MAIN_PATH }} | |
env: | |
SUBMISSION_TIMEOUT: 2 # minutes | |
# timeout sends a SIGINT after SUBMISSION_TIMEOUT minutes. | |
# if the submission is still running after SIGINT, SIGKILL is sent the next second. | |
run: timeout -k 1s ${{ env.SUBMISSION_TIMEOUT }}m python3 "${{ env.SUBMISSION_MAIN_FILENAME }}" | |
- name: Run scorer | |
working-directory: submissions/${{ env.CONTRIBUTOR }} | |
# will fail if no CSV output from the submission, | |
# or its CSV files contain very inaccurate results (no intersection between the true and fitted curves can be found) | |
run: python3 ../../ivcurves/compare_curves.py "${{ env.SUBMISSION_MAIN_PATH }}" --csv-output-path . --test-sets "${{ env.TEST_SETS_FOR_SCORING }}" | |
- name: Validate scores | |
run: python3 .github/workflows/utils/record_scores.py | |
--overall-scores-path overall_scores.csv | |
--no-save-database | |
- name: Save scores to artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: overall_scores.csv | |
path: submissions/${{ env.CONTRIBUTOR }}/overall_scores.csv | |