diff --git a/ci/actions/run_tests/Dockerfile b/.github/actions/run_tests/Dockerfile similarity index 100% rename from ci/actions/run_tests/Dockerfile rename to .github/actions/run_tests/Dockerfile diff --git a/.github/actions/run_tests/Dockerfile.gempak b/.github/actions/run_tests/Dockerfile.gempak new file mode 100644 index 0000000000..5f9691115c --- /dev/null +++ b/.github/actions/run_tests/Dockerfile.gempak @@ -0,0 +1,11 @@ +ARG METPLUS_ENV_TAG=metplus_base +ARG METPLUS_IMG_TAG=develop + +FROM dtcenter/metplus-envs:${METPLUS_ENV_TAG} as env + +ARG METPLUS_IMG_TAG=develop +FROM dtcenter/metplus-dev:${METPLUS_IMG_TAG} + +COPY --from=env /usr/lib/jvm/jre /usr/lib/jvm/jre/ +COPY --from=env /usr/share/javazi-1.8/tzdb.dat /usr/share/javazi-1.8/ +COPY --from=env /data/input/GempakToCF.jar /data/input/GempakToCF.jar diff --git a/.github/actions/run_tests/Dockerfile.run b/.github/actions/run_tests/Dockerfile.run new file mode 100644 index 0000000000..7a2772f6f1 --- /dev/null +++ b/.github/actions/run_tests/Dockerfile.run @@ -0,0 +1,11 @@ +ARG METPLUS_ENV_TAG=metplus_base +ARG METPLUS_IMG_TAG=develop + +FROM dtcenter/metplus-envs:${METPLUS_ENV_TAG} as env + +ARG METPLUS_IMG_TAG=develop +FROM dtcenter/metplus-dev:${METPLUS_IMG_TAG} + +COPY --from=env /usr/local/envs /usr/local/envs/ + +COPY --from=env /usr/local/bin/conda /usr/local/bin/conda diff --git a/ci/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml similarity index 66% rename from ci/actions/run_tests/action.yml rename to .github/actions/run_tests/action.yml index 0073edb84d..902fe8d0b3 100644 --- a/ci/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -6,14 +6,9 @@ inputs: categories: description: 'Use case category or categories to run (separate by comma)' required: true - run_diff: - description: 'Obtain truth data and run diffing logic if true' - required: false - default: false runs: using: "docker" image: "Dockerfile" args: - ${{ inputs.categories }} - - ${{ inputs.run_diff }} diff --git a/.github/actions/run_tests/entrypoint.sh b/.github/actions/run_tests/entrypoint.sh new file mode 100644 index 0000000000..53692ab10f --- /dev/null +++ b/.github/actions/run_tests/entrypoint.sh @@ -0,0 +1,92 @@ +#! /bin/bash + +# The repo source code is cloned to $RUNNER_WORKSPACE/$REPO_NAME +# Setup the workspace path to that for easier access later +REPO_NAME=$(basename $RUNNER_WORKSPACE) +WS_PATH=$RUNNER_WORKSPACE/$REPO_NAME + +# set CI jobs directory variable to easily move it +CI_JOBS_DIR=.github/jobs + +source ${GITHUB_WORKSPACE}/${CI_JOBS_DIR}/bash_functions.sh + +# get branch name for push or pull request events +# add -pull_request if pull request event to keep separated +branch_name=`${GITHUB_WORKSPACE}/${CI_JOBS_DIR}/print_branch_name.py` +if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then + branch_name=${branch_name}-pull_request +fi + +# try to pull image from DockerHub +DOCKERHUBTAG=dtcenter/metplus-dev:${branch_name} +time_command docker pull $DOCKERHUBTAG + +# if unsuccessful (i.e. pull request from a fork) +# then build image locally +docker inspect --type=image $DOCKERHUBTAG > /dev/null +if [ $? != 0 ]; then + # if docker pull fails, build locally + echo docker pull failed. Building Docker image locally... + ${GITHUB_WORKSPACE}/${CI_JOBS_DIR}/docker_setup.sh +fi + +# +# running unit tests (pytests) +# +if [ "$INPUT_CATEGORIES" == "pytests" ]; then + export METPLUS_ENV_TAG="pytest" + export METPLUS_IMG_TAG=${branch_name} + echo METPLUS_ENV_TAG=${METPLUS_ENV_TAG} + echo METPLUS_IMG_TAG=${METPLUS_IMG_TAG} + + export RUN_TAG=metplus-run-env + + # use BuildKit to build image + export DOCKER_BUILDKIT=1 + + start_seconds=$SECONDS + + # build an image with the pytest conda env and the METplus branch image + # Note: adding --build-arg without any value tells docker to + # use value from local environment (export METPLUS_IMG_TAG) + time_command docker build -t $RUN_TAG \ + --build-arg METPLUS_IMG_TAG \ + --build-arg METPLUS_ENV_TAG \ + -f .github/actions/run_tests/Dockerfile.run \ + . + + echo Running Pytests + command="export METPLUS_PYTEST_HOST=docker; cd internal_tests/pytests; /usr/local/envs/pytest/bin/pytest -vv --cov=../../metplus" + time_command docker run -v $WS_PATH:$GITHUB_WORKSPACE --workdir $GITHUB_WORKSPACE $RUN_TAG bash -c "$command" + exit $? +fi + +# +# running use case tests +# + +# split apart use case category and subset list from input +CATEGORIES=`echo $INPUT_CATEGORIES | awk -F: '{print $1}'` +SUBSETLIST=`echo $INPUT_CATEGORIES | awk -F: '{print $2}'` + +# run all cases if no subset list specified +if [ -z "${SUBSETLIST}" ]; then + SUBSETLIST="all" +fi + +# get METviewer if used in any use cases +all_requirements=`./${CI_JOBS_DIR}/get_requirements.py ${CATEGORIES} ${SUBSETLIST}` +echo All requirements: $all_requirements +NETWORK_ARG="" +if [[ "$all_requirements" =~ .*"metviewer".* ]]; then + echo "Setting up METviewer" + ${GITHUB_WORKSPACE}/${CI_JOBS_DIR}/get_metviewer.sh + NETWORK_ARG=--network="container:mysql_mv" +fi + +# export network arg so it can be read by setup_and_run_use_cases.py +export NETWORK_ARG + +# call script to loop over use case groups to +# get data volumes, set up run image, and run use cases +./${CI_JOBS_DIR}/setup_and_run_use_cases.py ${CATEGORIES} ${SUBSETLIST} diff --git a/.github/jobs/bash_functions.sh b/.github/jobs/bash_functions.sh new file mode 100755 index 0000000000..c018add863 --- /dev/null +++ b/.github/jobs/bash_functions.sh @@ -0,0 +1,16 @@ +#! /bin/bash + +# utility function to run command get log the time it took to run +function time_command { + local start_seconds=$SECONDS + echo "RUNNING: $*" + "$@" + local error=$? + + local duration=$(( SECONDS - start_seconds )) + echo "TIMING: Command took `printf '%02d' $(($duration / 60))`:`printf '%02d' $(($duration % 60))` (MM:SS): '$*'" + if [ ${error} -ne 0 ]; then + echo "ERROR: '$*' exited with status = ${error}" + fi + return $error +} diff --git a/.github/jobs/build_documentation.sh b/.github/jobs/build_documentation.sh index 11394bda7d..3306facee0 100755 --- a/.github/jobs/build_documentation.sh +++ b/.github/jobs/build_documentation.sh @@ -22,6 +22,9 @@ if [ -s $warning_file ]; then cp -r ${DOCS_DIR}/_build/warnings.log artifact/doc_warnings.log cp artifact/doc_warnings.log artifact/documentation echo ERROR: Warnings/Errors found in documentation + echo Summary: + grep WARNING ${DOCS_DIR}/_build/warnings.log + grep ERROR ${DOCS_DIR}/_build/warnings.log echo Review this log file or download documentation_warnings.log artifact exit 1 fi diff --git a/.github/jobs/copy_error_logs.py b/.github/jobs/copy_error_logs.py new file mode 100755 index 0000000000..9f3cb45c7f --- /dev/null +++ b/.github/jobs/copy_error_logs.py @@ -0,0 +1,47 @@ +#! /usr/bin/env python3 + +################################################################################ +# Used in GitHub Actions (in .github/workflows/testing.yml) to copy logs for +# use cases that reported errors to another directory + +import os +import sys +import shutil + +def main(output_data_dir, error_logs_dir): + """! Copy log output to error log directory if any use case failed """ + for use_case_dir in os.listdir(output_data_dir): + log_dir = os.path.join(output_data_dir, + use_case_dir, + 'logs') + if not os.path.isdir(log_dir): + continue + + # check if there are errors in the metplus.log file and + # only copy directory if there are any errors + metplus_log = os.path.join(log_dir, 'metplus.log') + found_errors = False + with open(metplus_log, 'r') as file_handle: + if 'ERROR:' in file_handle.read(): + found_errors = True + + if not found_errors: + continue + + output_dir = os.path.join(error_logs_dir, + use_case_dir) + log_files = os.listdir(log_dir) + for log_file in log_files: + log_path = os.path.join(log_dir, log_file) + output_path = os.path.join(output_dir, log_file) + print(f"Copying {log_path} to {output_path}") + # create output directory if it doesn't exist + output_dir = os.path.dirname(output_path) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + shutil.copyfile(log_path, output_path) + +if __name__ == '__main__': + output_data_dir = sys.argv[1] + error_logs_dir = sys.argv[2] + main(output_data_dir, error_logs_dir) diff --git a/ci/jobs/create_output_data_volumes.sh b/.github/jobs/create_output_data_volumes.sh similarity index 95% rename from ci/jobs/create_output_data_volumes.sh rename to .github/jobs/create_output_data_volumes.sh index 533c00fa6f..c63b7a675c 100755 --- a/ci/jobs/create_output_data_volumes.sh +++ b/.github/jobs/create_output_data_volumes.sh @@ -9,7 +9,7 @@ if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then exit 0 fi -branch_name=`${GITHUB_WORKSPACE}/ci/jobs/print_branch_name.py` +branch_name=`${GITHUB_WORKSPACE}/.github/jobs/print_branch_name.py` if [ "${branch_name: -4}" != "-ref" ]; then echo Branch ${branch_name} is not a reference branch, so skip this step diff --git a/ci/jobs/docker_setup.sh b/.github/jobs/docker_setup.sh similarity index 54% rename from ci/jobs/docker_setup.sh rename to .github/jobs/docker_setup.sh index 0ae5dde212..acd97a1c38 100755 --- a/ci/jobs/docker_setup.sh +++ b/.github/jobs/docker_setup.sh @@ -5,71 +5,51 @@ # used by the use case tests. # If GitHub Actions run is triggered by a fork that does not have # permissions to push Docker images to DockerHub, the script is -# is also called (in ci/actions/run_tests/entrypoint.sh) to +# is also called (in .github/actions/run_tests/entrypoint.sh) to # build the Docker image to use for each use case test group -branch_name=`${GITHUB_WORKSPACE}/ci/jobs/print_branch_name.py` +source ${GITHUB_WORKSPACE}/.github/jobs/bash_functions.sh + +branch_name=`${GITHUB_WORKSPACE}/.github/jobs/print_branch_name.py` if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then branch_name=${branch_name}-pull_request fi -#DOCKERHUB_TAG=dtcenter/metplus-dev:${DOCKER_IMAGE} DOCKERHUB_TAG=dtcenter/metplus-dev:${branch_name} echo Get Docker image: ${DOCKERHUB_TAG} -echo 'doing docker build' -# Note: adding --build-arg without any value tells docker to -# use value from local environment (export DO_GIT_CLONE) -echo Timing docker pull... +# can't use time_command function for this command because it contains redirection start_seconds=$SECONDS # pipe result to true because it will fail if image has not yet been built docker pull ${DOCKERHUB_TAG} &> /dev/null || true duration=$(( SECONDS - start_seconds )) -echo TIMING docker_setup -echo "Docker pull took $(($duration / 60)) minutes and $(($duration % 60)) seconds." - -echo Timing docker build with --cache-from... -start_seconds=$SECONDS +echo "TIMING: docker pull ${DOCKERHUB_TAG} took `printf '%02d' $(($duration / 60))`:`printf '%02d' $(($duration % 60))` (MM:SS)" # set DOCKERFILE_PATH that is used by docker hook script get_met_version export DOCKERFILE_PATH=${GITHUB_WORKSPACE}/ci/docker/Dockerfile MET_TAG=`${GITHUB_WORKSPACE}/ci/docker/hooks/get_met_version` -echo Running docker build with MET_TAG=$MET_TAG -docker build --pull --cache-from ${DOCKERHUB_TAG} \ +echo Setting DOCKER_BUILDKIT=1 +export DOCKER_BUILDKIT=1 + +time_command docker build --pull --cache-from ${DOCKERHUB_TAG} \ -t ${DOCKERHUB_TAG} \ ---build-arg OBTAIN_SOURCE_CODE='copy' \ +--build-arg OBTAIN_SOURCE_CODE=copy \ --build-arg MET_TAG=$MET_TAG \ -f ${DOCKERFILE_PATH} ${GITHUB_WORKSPACE} -duration=$(( SECONDS - start_seconds )) -echo TIMING docker_setup -echo "Docker build took $(($duration / 60)) minutes and $(($duration % 60)) seconds." -echo - # skip docker push if credentials are not set if [ -z ${DOCKER_USERNAME+x} ] || [ -z ${DOCKER_PASSWORD+x} ]; then echo "DockerHub credentials not set. Skipping docker push" exit 0 fi -echo Timing docker push... -start_seconds=$SECONDS - echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USERNAME" --password-stdin -docker push ${DOCKERHUB_TAG} +time_command docker push ${DOCKERHUB_TAG} -duration=$(( SECONDS - start_seconds )) -echo TIMING docker_setup -echo "Docker push took $(($duration / 60)) minutes and $(($duration % 60)) seconds." -echo - -echo DOCKER IMAGES after DOCKER_SETUP +echo Running docker images docker images -echo - -echo 'done' diff --git a/ci/jobs/docker_update_data_volumes.py b/.github/jobs/docker_update_data_volumes.py similarity index 100% rename from ci/jobs/docker_update_data_volumes.py rename to .github/jobs/docker_update_data_volumes.py diff --git a/ci/jobs/docker_utils.py b/.github/jobs/docker_utils.py similarity index 100% rename from ci/jobs/docker_utils.py rename to .github/jobs/docker_utils.py diff --git a/ci/jobs/get_artifact_name.sh b/.github/jobs/get_artifact_name.sh similarity index 88% rename from ci/jobs/get_artifact_name.sh rename to .github/jobs/get_artifact_name.sh index 8bddace867..518dd95206 100755 --- a/ci/jobs/get_artifact_name.sh +++ b/.github/jobs/get_artifact_name.sh @@ -1,7 +1,7 @@ #! /bin/bash # Run by GitHub Actions (in .github/workflows/testing.yml and -# ci/actions/run_tests/entrypoint.sh) to get properly +# .github/actions/run_tests/entrypoint.sh) to get properly # formatted artifact name for use case output artifact_name=$1 diff --git a/ci/jobs/get_data_volumes.py b/.github/jobs/get_data_volumes.py similarity index 77% rename from ci/jobs/get_data_volumes.py rename to .github/jobs/get_data_volumes.py index be1ffbba25..d650398a8a 100755 --- a/ci/jobs/get_data_volumes.py +++ b/.github/jobs/get_data_volumes.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 -# Run by GitHub Actions (in ci/actions/run_tests/entrypoint.sh) +# Run by GitHub Actions (in .github/actions/run_tests/entrypoint.sh) # to obtain Docker data volumes for input and output data, create # an alias name for the volumes, and generate --volumes-from arguments # that are added to the Docker run command to make data available @@ -13,23 +13,25 @@ from docker_utils import docker_get_volumes_last_updated, get_branch_name from docker_utils import get_data_repo, DOCKERHUB_METPLUS_DATA_DEV -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), +def main(args): + # get METplus version + version_file = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, - os.pardir,))) - -from metplus import __version__ - -# METPLUS_VERSION should be set to develop or a release version, i.e. X.Y -# if version is set to X.Y without -betaZ or -dev, use that version -# otherwise use develop -if len(__version__.split('-')) == 1: - # only get first 2 numbers from version, i.e. X.Y.Z will use X.Y - METPLUS_VERSION = '.'.join(__version__.split('.')[:2]) - -else: - METPLUS_VERSION = 'develop' + os.pardir, + 'metplus', + 'VERSION')) + with open(version_file, 'r') as file_handle: + version = file_handle.read().strip() + + # version should be set to develop or a release version, i.e. X.Y + # if version is set to X.Y without -betaZ or -dev, use that version + # otherwise use develop + if len(version.split('-')) == 1: + # only get first 2 numbers from version, i.e. X.Y.Z will use X.Y + metplus_version = '.'.join(version.split('.')[:2]) + else: + metplus_version = 'develop' -def main(args): volume_list = [] # get the name of the current branch @@ -44,7 +46,7 @@ def main(args): # if running development version, use metplus-data-dev # if released version, i.e. X.Y.Z, use metplus-data - data_repo = get_data_repo(METPLUS_VERSION) + data_repo = get_data_repo(metplus_version) if branch_name.startswith('main_v'): branch_name = branch_name[5:] @@ -60,7 +62,7 @@ def main(args): # if getting all input data, set volume name to METplus version if model_app_name == 'all_metplus_data': - volume_name = METPLUS_VERSION + volume_name = metplus_version # requested data volume is output data # should match output-{pr_dest_branch}-use_cases_{dataset_id} @@ -82,11 +84,11 @@ def main(args): # if using development version and branch data volume is available # use it, otherwise use develop version of data volume - elif (METPLUS_VERSION == 'develop' and + elif (metplus_version == 'develop' and f'{branch_name}-{model_app_name}' in available_volumes): volume_name = f'{branch_name}-{model_app_name}' else: - volume_name = f'{METPLUS_VERSION}-{model_app_name}' + volume_name = f'{metplus_version}-{model_app_name}' cmd = f'docker pull {repo_to_use}:{volume_name}' ret = subprocess.run(shlex.split(cmd), stdout=subprocess.DEVNULL) @@ -110,6 +112,7 @@ def main(args): if __name__ == "__main__": # split up command line args that have commas before passing into main args = [] + for arg in sys.argv[1:]: args.extend(arg.split(',')) out = main(args) diff --git a/ci/jobs/python_requirements/get_metviewer.sh b/.github/jobs/get_metviewer.sh similarity index 100% rename from ci/jobs/python_requirements/get_metviewer.sh rename to .github/jobs/get_metviewer.sh diff --git a/ci/jobs/get_requirements.py b/.github/jobs/get_requirements.py similarity index 94% rename from ci/jobs/get_requirements.py rename to .github/jobs/get_requirements.py index 4b13b85603..d0b91cd717 100755 --- a/ci/jobs/get_requirements.py +++ b/.github/jobs/get_requirements.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 -# Used in GitHub Actions (in ci/actions/run_tests/entrypoint.sh) +# Used in GitHub Actions (in .github/actions/run_tests/entrypoint.sh) # to obtain list of requirements from use case group import os diff --git a/.github/jobs/get_use_case_commands.py b/.github/jobs/get_use_case_commands.py new file mode 100755 index 0000000000..2324e8cd62 --- /dev/null +++ b/.github/jobs/get_use_case_commands.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 + +# Script to obtain commands needed to run use case groups including +# scripts or pip commands to obtain external Python dependencies +# Run by GitHub Actions (in .github/jobs/run_use_cases.py) to run use case tests + +import sys +import os + +# add METplus directory to sys path so the test suite can be found +USE_CASES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, + os.pardir)) +sys.path.insert(0, USE_CASES_DIR) + +from internal_tests.use_cases.metplus_use_case_suite import METplusUseCaseSuite +from metplus.util.met_util import expand_int_string_to_list + +METPLUS_BASE_ENV = 'metplus_base' +METPLUS_DOCKER_LOC = '/metplus/METplus' + +# keywords in requirements list that trigger obtaining METcalcpy and METplotpy +PLOTCALC_KEYWORDS = [ + 'metplotpy', + 'metcalcpy', + 'spacetime', + 'weatherregime', +] + +def handle_automation_env(host_name, reqs, work_dir): + # if no env is specified, use metplus base environment + conda_env = METPLUS_BASE_ENV + + # if requirement ending with _env is set, then + # use that version of python3 to run + use_env = [item for item in reqs if item.endswith('_env')] + if use_env: + conda_env = use_env[0].replace('_env', '') + + # if not using docker (automation), + # return no setup commands and python embedding argument to command + if host_name != 'docker': + if 'py_embed' in reqs and conda_env != METPLUS_BASE_ENV: + return '', 'user_env_vars.MET_PYTHON_EXE=python3' + return '', '' + + # start building commands to run before run_metplus.py in Docker + setup_env = 'source /etc/bashrc;' + + # add conda bin to beginning of PATH + python_dir = os.path.join('/usr', 'local', 'envs', + conda_env, 'bin') + python_path = os.path.join(python_dir, 'python3') + setup_env += f' export PATH={python_dir}:$PATH;' + + # if py_embed listed in requirements and using a Python + # environment that differs from the MET env, set MET_PYTHON_EXE + if 'py_embed' in reqs and conda_env != METPLUS_BASE_ENV: + py_embed_arg = f'user_env_vars.MET_PYTHON_EXE={python_path} ' + else: + py_embed_arg = '' + + # if any metplotpy/metcalcpy keywords are in requirements list, + # add command to obtain and install METplotpy and METcalcpy + if any([item for item in PLOTCALC_KEYWORDS if item in str(reqs).lower()]): + setup_env += ( + f'cd {METPLUS_DOCKER_LOC};' + f'{work_dir}/manage_externals/checkout_externals' + f' -e {work_dir}/.github/parm/Externals_metplotcalcpy.cfg;' + f'{python_path} -m pip install {METPLUS_DOCKER_LOC}/../METplotpy;' + f'{python_path} -m pip install {METPLUS_DOCKER_LOC}/../METcalcpy;' + 'cd -;' + ) + + # if metdatadb is in requirements list, add command to obtain METdatadb + if 'metdatadb' in str(reqs).lower(): + setup_env += ( + f'cd {METPLUS_DOCKER_LOC};' + f'{work_dir}/manage_externals/checkout_externals' + f' -e {work_dir}/.github/parm/Externals_metdatadb.cfg;' + 'cd -;' + ) + + # if gempak is in requirements list, add JRE bin to path for java + if 'gempak' in str(reqs).lower(): + setup_env += 'export PATH=$PATH:/usr/lib/jvm/jre/bin;' + + # if metplus is in requirements list, + # add top of METplus repo to PYTHONPATH so metplus can be imported + if 'metplus' in str(reqs).lower(): + setup_env += f'export PYTHONPATH={METPLUS_DOCKER_LOC}:$PYTHONPATH;' + + # list packages in python environment that will be used + if conda_env != 'gempak': + setup_env += ( + f'echo Using environment: dtcenter/metplus-envs:{conda_env};' + f'echo cat /usr/local/envs/{conda_env}/environments.yml;' + f'echo ----------------------------------------;' + f'cat /usr/local/envs/{conda_env}/environments.yml;' + 'echo ----------------------------------------;' + ) + + return setup_env, py_embed_arg + +def main(categories, subset_list, work_dir=None, + host_name=os.environ.get('HOST_NAME')): + all_commands = [] + + if work_dir is None: + work_dir = USE_CASES_DIR + + test_suite = METplusUseCaseSuite() + test_suite.add_use_case_groups(categories, subset_list) + + output_top_dir = os.environ.get('METPLUS_TEST_OUTPUT_BASE', '/data/output') + + # use METPLUS_TEST_SETTINGS_CONF if set + test_settings_conf = os.environ.get('METPLUS_TEST_SETTINGS_CONF', '') + if not test_settings_conf and host_name == 'docker': + test_settings_conf = os.path.join(work_dir, + '.github', + 'parm', + 'test_settings.conf') + + for group_name, use_cases_by_req in test_suite.category_groups.items(): + for use_case_by_requirement in use_cases_by_req: + reqs = use_case_by_requirement.requirements + + setup_env, py_embed_arg = handle_automation_env(host_name, reqs, work_dir) + + use_case_cmds = [] + for use_case in use_case_by_requirement.use_cases: + # add parm/use_cases path to config args if they are conf files + config_args = [] + for config_arg in use_case.config_args: + if config_arg.endswith('.conf'): + config_arg = os.path.join(work_dir, 'parm', + 'use_cases', + config_arg) + + config_args.append(config_arg) + + output_base = os.path.join(output_top_dir, use_case.name) + use_case_cmd = (f"run_metplus.py" + f" {' '.join(config_args)}" + f" {py_embed_arg}{test_settings_conf}" + f" config.OUTPUT_BASE={output_base}") + use_case_cmds.append(use_case_cmd) + + # add commands to set up environment before use case commands + group_commands = f"{setup_env}{';'.join(use_case_cmds)}" + all_commands.append((group_commands, reqs)) + + return all_commands + +def handle_command_line_args(): + # read command line arguments to determine which use cases to run + if len(sys.argv) < 2: + print("No use cases specified") + sys.exit(1) + + # split up categories by & or , + categories = sys.argv[1] + + # get subset values if specified + if len(sys.argv) > 2: + if sys.argv[2] == 'all': + subset_list = None + else: + subset_list = expand_int_string_to_list(sys.argv[2]) + else: + subset_list = None + + + # check if comparison flag should be set + if len(sys.argv) > 3: + do_comparison = True + else: + do_comparison = False + + return categories, subset_list, do_comparison + +if __name__ == '__main__': + categories, subset_list, _ = handle_command_line_args() + all_commands = main(categories, subset_list) + for command, requirements in all_commands: + print(f"REQUIREMENTS: {','.join(requirements)}") + command_format = ';\\\n'.join(command.split(';')) + print(f"COMMAND:\n{command_format}\n") diff --git a/.github/jobs/get_use_cases_to_run.sh b/.github/jobs/get_use_cases_to_run.sh new file mode 100755 index 0000000000..5e4cd9035b --- /dev/null +++ b/.github/jobs/get_use_cases_to_run.sh @@ -0,0 +1,52 @@ +#! /bin/bash + +use_case_groups_filepath=.github/parm/use_case_groups.json +# set matrix to string of an empty array in case no use cases will be run +matrix="[]" + +run_use_cases=$1 +run_all_use_cases=$2 +run_unit_tests=$3 + +echo Run use cases: $run_use_cases +echo Run All use cases: $run_all_use_cases +echo Run unit tests: $run_unit_tests + +# if running use cases, generate JQ filter to use +if [ "$run_use_cases" == "true" ]; then + echo generate JQ filter to get use cases to run + + # if only running new use cases, add to filter criteria + if [ "$run_all_use_cases" == "false" ]; then + echo Only run new use cases + matrix=$(jq '[.[] | select(.new == true) | (.category + ":" + .index_list)]' $use_case_groups_filepath) + else + echo Add all available use cases + matrix=$(jq '[.[] | (.category + ":" + .index_list)]' $use_case_groups_filepath) + fi + +fi + +# if unit tests will be run, add "pytests" to beginning of matrix list +if [ "$run_unit_tests" == "true" ]; then + echo Adding unit tests to list to run + + # if matrix is empty, set to an array that only includes pytests + if [ "$matrix" == "[]" ]; then + matrix="[\"pytests\"]" + # otherwise prepend item to list + else + matrix="[\"pytests\", ${matrix:1}" + fi +fi + +echo Array of groups to run is: $matrix +# if matrix is still empty, exit 1 to fail step and skip rest of workflow +if [ "$matrix" == "[]" ]; then + echo No tests to run! + echo ::set-output name=run_some_tests::false + exit 0 +fi + +echo ::set-output name=run_some_tests::true +echo ::set-output name=matrix::{\"categories\":$(echo $matrix)}\" diff --git a/ci/jobs/print_branch_name.py b/.github/jobs/print_branch_name.py similarity index 50% rename from ci/jobs/print_branch_name.py rename to .github/jobs/print_branch_name.py index b3bfcd299a..422e40f917 100755 --- a/ci/jobs/print_branch_name.py +++ b/.github/jobs/print_branch_name.py @@ -1,8 +1,8 @@ #! /usr/bin/env python3 # Script to easily get branch name from docker_utils function -# Run by GitHub Actions (in ci/actions/run_tests/entrypoint.sh, -# ci/jobs/create_output_data_volumes.sh, and ci/jobs/docker_setup.sh) +# Run by GitHub Actions (in .github/actions/run_tests/entrypoint.sh, +# .github/jobs/create_output_data_volumes.sh, and .github/jobs/docker_setup.sh) from docker_utils import get_branch_name diff --git a/.github/jobs/run_diff_docker.py b/.github/jobs/run_diff_docker.py new file mode 100755 index 0000000000..fee43a9d48 --- /dev/null +++ b/.github/jobs/run_diff_docker.py @@ -0,0 +1,105 @@ +#! /usr/bin/env python3 + +# Used in GitHub Actions (in .github/actions/run_tests/entrypoint.sh) +# to obtain and run commands to run use cases from group, +# execute difference tests if requested, copy error logs and/or +# files that reported differences into directory to make +# them available in GitHub Actions artifacts for easy review + +import os +import sys +import subprocess +import shlex +import shutil + +GITHUB_WORKSPACE = os.environ.get('GITHUB_WORKSPACE') +# add ci/util to sys path to get diff utility +diff_util_dir = os.path.join(GITHUB_WORKSPACE, + 'ci', + 'util') +sys.path.insert(0, diff_util_dir) +from diff_util import compare_dir + +TRUTH_DIR = '/data/truth' +OUTPUT_DIR = '/data/output' +DIFF_DIR = '/data/diff' +# DIFF_DIR = os.path.join(GITHUB_WORKSPACE, +# 'artifact', +# 'diff') + +def copy_diff_output(diff_files): + """! Loop through difference output and copy files + to directory so it can be made available for comparison. + Files will be put into the same directory with _truth or + _output added before their file extension. + + @param diff_files list of tuples containing truth file path + and file path of output that was just generated. Either tuple + value may be an empty string if the file was not found. + """ + for truth_file, out_file, _, diff_file in diff_files: + if truth_file: + copy_to_diff_dir(truth_file, + 'truth') + if out_file: + copy_to_diff_dir(out_file, + 'output') + if diff_file: + copy_to_diff_dir(diff_file, + 'diff') + +def copy_to_diff_dir(file_path, data_type): + """! Generate output path based on input file path, + adding text based on data_type to the filename, then + copy input file to that output path. + + @param file_path full path of file to copy + @param data_type data identifier, should be 'truth' + or 'output' + @returns True if success, False if there was a problem + copying the file + """ + if data_type == 'truth': + data_dir = TRUTH_DIR + else: + data_dir = OUTPUT_DIR + + # replace data dir with diff directory + diff_out = file_path.replace(data_dir, DIFF_DIR) + + # add data type identifier to filename before extension + # if data is not difference output + if data_type == 'diff': + output_path = diff_out + else: + output_path, extension = os.path.splitext(diff_out) + output_path = f'{output_path}_{data_type}{extension}' + + # create output directory if it doesn't exist + output_dir = os.path.dirname(output_path) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + print(f'Copying {file_path} to\n{output_path}') + try: + shutil.copyfile(file_path, output_path) + except OSError as err: + print(f'Could not copy file. {err}') + return False + + return True + +def main(): + print('******************************') + print("Comparing output to truth data") + diff_files = compare_dir(TRUTH_DIR, OUTPUT_DIR, + debug=True, + save_diff=True) + + # copy difference files into directory + # so it can be easily downloaded and compared + if diff_files: + copy_diff_output(diff_files) + +if __name__ == '__main__': + main() diff --git a/ci/jobs/set_job_controls.sh b/.github/jobs/set_job_controls.sh similarity index 89% rename from ci/jobs/set_job_controls.sh rename to .github/jobs/set_job_controls.sh index b20dc13ff7..c8b241413b 100755 --- a/ci/jobs/set_job_controls.sh +++ b/.github/jobs/set_job_controls.sh @@ -96,3 +96,11 @@ echo run_all_use_cases=${run_all_use_cases} >> job_control_status echo run_diff=${run_diff} >> job_control_status echo Job Control Settings: cat job_control_status + +echo ::set-output name=run_get_image::$run_get_image +echo ::set-output name=run_get_input_data::$run_get_input_data +echo ::set-output name=run_diff::$run_diff +echo ::set-output name=run_save_truth_data::$run_save_truth_data + +# get use cases to run +.github/jobs/get_use_cases_to_run.sh $run_use_cases $run_all_use_cases $run_unit_tests diff --git a/.github/jobs/setup_and_run_diff.py b/.github/jobs/setup_and_run_diff.py new file mode 100755 index 0000000000..ff7acfcfdf --- /dev/null +++ b/.github/jobs/setup_and_run_diff.py @@ -0,0 +1,76 @@ +#! /usr/bin/env python3 + +import os +import sys +import subprocess +import shlex + +ci_dir = os.path.join(os.environ.get('GITHUB_WORKSPACE'), '.github') +sys.path.insert(0, ci_dir) + +from jobs import get_data_volumes + +CI_JOBS_DIR = '.github/jobs' + +RUNNER_WORKSPACE = os.environ.get('RUNNER_WORKSPACE') +GITHUB_WORKSPACE = os.environ.get('GITHUB_WORKSPACE') +REPO_NAME = os.path.basename(RUNNER_WORKSPACE) +WS_PATH = os.path.join(RUNNER_WORKSPACE, REPO_NAME) +print(f"WS_PATH is {WS_PATH}") +print(f"GITHUB_WORKSPACE is {GITHUB_WORKSPACE}") + +INPUT_CATEGORIES = sys.argv[1] +artifact_name = sys.argv[2] + +# get output data volumes +print("Get Docker data volumes for output data") + +# use develop branch output data volumes if not a pull request (forced diff) +if os.environ.get('GITHUB_EVENT_NAME') == "pull_request": + output_data_branch = os.environ.get('GITHUB_BASE_REF') +else: + output_data_branch = 'develop' + +output_category = f"output-{output_data_branch}-{artifact_name}" + +VOLUMES_FROM = get_data_volumes.main([output_category]) + +print(f"Output Volumes: {VOLUMES_FROM}") + +volume_mounts = [ + f'-v {WS_PATH}:{GITHUB_WORKSPACE}', + f'-v {RUNNER_WORKSPACE}/output:/data/output', + f'-v {RUNNER_WORKSPACE}/diff:/data/diff', +] + +mount_args = ' '.join(volume_mounts) + +# command to run inside Docker +cmd = ('/usr/local/envs/diff/bin/python3 ' + f'{GITHUB_WORKSPACE}/{CI_JOBS_DIR}/run_diff_docker.py') + +# run inside diff env: mount METplus code and output dir, volumes from output volumes +docker_cmd = (f'docker run -e GITHUB_WORKSPACE {VOLUMES_FROM} ' + f'{mount_args} dtcenter/metplus-envs:diff ' + f'bash -c "{cmd}"') +print(f'RUNNING: {docker_cmd}') +try: + process = subprocess.Popen(shlex.split(docker_cmd), + shell=False, + encoding='utf-8', + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + # Poll process.stdout to show stdout live + while True: + output = process.stdout.readline() + if process.poll() is not None: + break + if output: + print(output.strip()) + rc = process.poll() + if rc: + raise subprocess.CalledProcessError(rc, docker_cmd) + +except subprocess.CalledProcessError as err: + print(f"ERROR: Command failed -- {err}") + sys.exit(1) diff --git a/.github/jobs/setup_and_run_use_cases.py b/.github/jobs/setup_and_run_use_cases.py new file mode 100755 index 0000000000..e096365657 --- /dev/null +++ b/.github/jobs/setup_and_run_use_cases.py @@ -0,0 +1,147 @@ +#! /usr/bin/env python3 + +################################################################################ +# Used in GitHub Actions (in .github/actions/run_tests/entrypoint.sh) to run cases +# For each use case group specified: +# - create input Docker data volumes and get --volumes-from arguments +# - build Docker image with conda environment and METplus branch image +# - Run commands to run use cases + + +import os +import sys +import subprocess +import shlex +import time + +import get_use_case_commands +import get_data_volumes +from docker_utils import get_branch_name + +runner_workspace = os.environ.get('RUNNER_WORKSPACE') +github_workspace = os.environ.get('GITHUB_WORKSPACE') + +repo_name =os.path.basename(runner_workspace) +ws_path = os.path.join(runner_workspace, repo_name) + +docker_data_dir = '/data' +docker_output_dir = os.path.join(docker_data_dir, 'output') +gha_output_dir = os.path.join(runner_workspace, 'output') + +def main(): + categories, subset_list, _ = ( + get_use_case_commands.handle_command_line_args() + ) + categories_list = categories.split(',') + all_commands = ( + get_use_case_commands.main(categories_list, + subset_list, + work_dir=os.environ.get('GITHUB_WORKSPACE'), + host_name='docker') + ) + # get input data volumes + volumes_from = get_data_volumes.main(categories_list) + print(f"Input Volumes: {volumes_from}") + + # build Docker image with conda environment and METplus branch image + branch_name = get_branch_name() + if os.environ.get('GITHUB_EVENT_NAME') == 'pull_request': + branch_name = f"{branch_name}-pull_request" + + run_tag = 'metplus-run-env' + dockerfile_dir = os.path.join('.github', 'actions', 'run_tests') + + # use BuildKit to build image + os.environ['DOCKER_BUILDKIT'] = '1' + + volume_mounts = [ + f"-v {runner_workspace}/output/mysql:/var/lib/mysql", + f"-v {gha_output_dir}:{docker_output_dir}", + f"-v {ws_path}:{github_workspace}", + ] + + isOK = True + for cmd, requirements in all_commands: + + # get environment image tag + use_env = [item for item in requirements if item.endswith('_env')] + if use_env: + env_tag = use_env[0].replace('_env', '') + else: + env_tag = 'metplus_base' + + # get Dockerfile to use (gempak if using gempak) + if 'gempak' in str(requirements).lower(): + dockerfile_name = 'Dockerfile.gempak' + else: + dockerfile_name = 'Dockerfile.run' + + docker_build_cmd = ( + f"docker build -t {run_tag} " + f"--build-arg METPLUS_IMG_TAG={branch_name} " + f"--build-arg METPLUS_ENV_TAG={env_tag} " + f"-f {dockerfile_dir}/{dockerfile_name} ." + ) + print(f"Building Docker environment/branch image...\n" + f"Running: {docker_build_cmd}") + start_time = time.time() + try: + subprocess.run(shlex.split(docker_build_cmd), check=True) + except subprocess.CalledProcessError as err: + print(f"ERROR: Docker Build failed: {docker_build_cmd} -- {err}") + isOK = False + continue + + end_time = time.time() + print("TIMING: Command took " + f"{time.strftime('%H:%M', time.gmtime(end_time - start_time))}" + f" (MM:SS): '{docker_build_cmd}')") + + cmd_args = {'check': True, + 'encoding': 'utf-8', + 'capture_output': True, + } + output = subprocess.run(shlex.split('docker ps -a'), + **cmd_args).stdout.strip() + print(f"docker ps -a\n{output}") + + full_cmd = ( + f"docker run -e GITHUB_WORKSPACE " + f"{os.environ.get('NETWORK_ARG', '')} " + f"{' '.join(volume_mounts)} " + f"{volumes_from} --workdir {github_workspace} " + f'{run_tag} bash -c "{cmd}"') + print(f"RUNNING: {full_cmd}") + start_time = time.time() + try: + process = subprocess.Popen(shlex.split(full_cmd), + shell=False, + encoding='utf-8', + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + # Poll process.stdout to show stdout live + while True: + output = process.stdout.readline() + if process.poll() is not None: + break + if output: + print(output.strip()) + rc = process.poll() + if rc: + raise subprocess.CalledProcessError(rc, full_cmd) + + except subprocess.CalledProcessError as err: + print(f"ERROR: Command failed -- {err}") + isOK = False + + end_time = time.time() + print("TIMING: Command took " + f"{time.strftime('%H:%M', time.gmtime(end_time - start_time))}" + f" (MM:SS): '{full_cmd}')") + + if not isOK: + print("ERROR: Some commands failed.") + sys.exit(1) + +if __name__ == '__main__': + main() diff --git a/ci/parm/Externals_metdatadb.cfg b/.github/parm/Externals_metdatadb.cfg similarity index 100% rename from ci/parm/Externals_metdatadb.cfg rename to .github/parm/Externals_metdatadb.cfg diff --git a/ci/parm/Externals_metcalcpy.cfg b/.github/parm/Externals_metplotcalcpy.cfg similarity index 57% rename from ci/parm/Externals_metcalcpy.cfg rename to .github/parm/Externals_metplotcalcpy.cfg index 81a417fbad..fdf1383555 100644 --- a/ci/parm/Externals_metcalcpy.cfg +++ b/.github/parm/Externals_metplotcalcpy.cfg @@ -5,5 +5,12 @@ required = True repo_url = https://github.com/dtcenter/METcalcpy branch = develop +[METplotpy] +local_path = ../METplotpy +protocol = git +required = True +repo_url = https://github.com/dtcenter/METplotpy +branch = develop + [externals_description] schema_version = 1.0.0 diff --git a/.github/parm/test_settings.conf b/.github/parm/test_settings.conf new file mode 100644 index 0000000000..9363b2fba6 --- /dev/null +++ b/.github/parm/test_settings.conf @@ -0,0 +1,13 @@ +[config] +LOG_LEVEL = DEBUG +LOG_MET_OUTPUT_TO_METPLUS = no +LOG_LINE_FORMAT = (%(filename)s) %(levelname)s: %(message)s +LOG_ERR_LINE_FORMAT = {LOG_LINE_FORMAT} +LOG_DEBUG_LINE_FORMAT = {LOG_LINE_FORMAT} +LOG_INFO_LINE_FORMAT = {LOG_LINE_FORMAT} + +LOG_METPLUS = {LOG_DIR}/metplus.log +LOG_TIMESTAMP_TEMPLATE = + +# also set path to GempakToCF.jar for GEMPAK use cases +GEMPAKTOCF_JAR = /data/input/GempakToCF.jar \ No newline at end of file diff --git a/.github/parm/use_case_groups.json b/.github/parm/use_case_groups.json new file mode 100644 index 0000000000..b765aef421 --- /dev/null +++ b/.github/parm/use_case_groups.json @@ -0,0 +1,142 @@ +[ + { + "category": "met_tool_wrapper", + "index_list": "0-54", + "new": false + }, + { + "category": "air_quality_and_comp", + "index_list": "0", + "new": false + }, + { + "category": "climate", + "index_list": "0-1", + "new": false + }, + { + "category": "convection_allowing_models", + "index_list": "0", + "new": false + }, + { + "category": "convection_allowing_models", + "index_list": "1", + "new": false + }, + { + "category": "convection_allowing_models", + "index_list": "2-6", + "new": false + }, + { + "category": "convection_allowing_models", + "index_list": "7", + "new": false + }, + { + "category": "convection_allowing_models", + "index_list": "8", + "new": false + }, + { + "category": "cryosphere", + "index_list": "0", + "new": false + }, + { + "category": "data_assimilation", + "index_list": "0", + "new": false + }, + { + "category": "marine_and_coastal", + "index_list": "0", + "new": false + }, + { + "category": "medium_range", + "index_list": "0", + "new": false + }, + { + "category": "medium_range", + "index_list": "1-2", + "new": false + }, + { + "category": "medium_range", + "index_list": "3-5", + "new": false + }, + { + "category": "medium_range", + "index_list": "6", + "new": false + }, + { + "category": "medium_range", + "index_list": "7-8", + "new": false + }, + { + "category": "precipitation", + "index_list": "0", + "new": false + }, + { + "category": "precipitation", + "index_list": "1", + "new": false + }, + { + "category": "precipitation", + "index_list": "2", + "new": false + }, + { + "category": "precipitation", + "index_list": "3-8", + "new": false + }, + { + "category": "s2s", + "index_list": "0", + "new": false + }, + { + "category": "s2s", + "index_list": "1-3", + "new": false + }, + { + "category": "s2s", + "index_list": "4", + "new": false + }, + { + "category": "s2s", + "index_list": "5", + "new": false + }, + { + "category": "s2s", + "index_list": "6", + "new": false + }, + { + "category": "space_weather", + "index_list": "0-1", + "new": false + }, + { + "category": "tc_and_extra_tc", + "index_list": "0-2", + "new": false + }, + { + "category": "tc_and_extra_tc", + "index_list": "3", + "new": false + } +] diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 3792ad9859..31180d6528 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -16,6 +16,13 @@ jobs: job_control: name: Determine which jobs to run runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.job_status.outputs.matrix }} + run_some_tests: ${{ steps.job_status.outputs.run_some_tests }} + run_get_image: ${{ steps.job_status.outputs.run_get_image }} + run_get_input_data: ${{ steps.job_status.outputs.run_get_input_data }} + run_diff: ${{ steps.job_status.outputs.run_diff }} + run_save_truth_data: ${{ steps.job_status.outputs.run_save_truth_data }} steps: - uses: actions/checkout@v2 - name: Print GitHub values for reference @@ -23,8 +30,8 @@ jobs: GITHUB_CONTEXT: ${{ toJson(github) }} run: echo "$GITHUB_CONTEXT" - name: Set job controls - id: status - run: ${GITHUB_WORKSPACE}/ci/jobs/set_job_controls.sh + id: job_status + run: .github/jobs/set_job_controls.sh env: commit_msg: ${{ github.event.head_commit.message }} - uses: actions/upload-artifact@v2 @@ -35,20 +42,14 @@ jobs: name: Docker Setup - Get METplus Image runs-on: ubuntu-latest needs: job_control + if: ${{ needs.job_control.outputs.run_get_image == 'true' }} steps: - - uses: actions/download-artifact@v2 - with: - name: job_control_status - - run: cat job_control_status >> $GITHUB_ENV - uses: actions/checkout@v2 - if: ${{ env.run_get_image == 'true' }} - uses: actions/setup-python@v2 - if: ${{ env.run_get_image == 'true' }} with: python-version: '3.6' - name: Get METplus Image - if: ${{ env.run_get_image == 'true' }} - run: ${GITHUB_WORKSPACE}/ci/jobs/docker_setup.sh + run: .github/jobs/docker_setup.sh env: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} @@ -56,162 +57,115 @@ jobs: name: Docker Setup - Update Data Volumes runs-on: ubuntu-latest needs: job_control + if: ${{ needs.job_control.outputs.run_get_input_data == 'true' }} steps: - - uses: actions/download-artifact@v2 - with: - name: job_control_status - - run: cat job_control_status >> $GITHUB_ENV - uses: actions/checkout@v2 - if: ${{ env.run_get_input_data == 'true' }} - uses: actions/setup-python@v2 - if: ${{ env.run_get_input_data == 'true' }} with: python-version: '3.6' - name: Install dependencies - if: ${{ env.run_get_input_data == 'true' }} run: python -m pip install --upgrade pip python-dateutil requests bs4 - name: Update Data Volumes - if: ${{ env.run_get_input_data == 'true' }} - run: ${GITHUB_WORKSPACE}/ci/jobs/docker_update_data_volumes.py + run: .github/jobs/docker_update_data_volumes.py env: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - unit_tests: - name: Unit Tests - Pytest - runs-on: ubuntu-latest - needs: get_image - steps: - - uses: actions/download-artifact@v2 - with: - name: job_control_status - - run: cat job_control_status >> $GITHUB_ENV - - uses: actions/checkout@v2 - if: ${{ env.run_unit_tests == 'true' }} - - uses: ./ci/actions/run_tests - if: ${{ env.run_unit_tests == 'true' }} - with: - categories: pytests use_case_tests: name: Use Case Tests runs-on: ubuntu-latest - needs: [get_image, update_data_volumes] + needs: [get_image, update_data_volumes, job_control] + if: ${{ always() && needs.job_control.outputs.run_some_tests == 'true' }} strategy: fail-fast: false - matrix: - categories: - - "met_tool_wrapper:0-54" - - "air_quality_and_comp:0" - - "climate:0-1" - - "convection_allowing_models:0" - - "convection_allowing_models:1" - - "convection_allowing_models:2-6" - - "convection_allowing_models:7" - - "convection_allowing_models:8" - - "cryosphere:0" - - "data_assimilation:0" - - "marine_and_coastal:0" - - "medium_range:0" - - "medium_range:1-2" - - "medium_range:3-5" - - "medium_range:6" - - "medium_range:7-8" - - "precipitation:0" - - "precipitation:1" - - "precipitation:2" - - "precipitation:3-8" - - "s2s:0" - - "s2s:1-3" - - "s2s:4" - - "s2s:5" - - "s2s:6" - - "space_weather:0-1" - - "tc_and_extra_tc:0-2" - - "tc_and_extra_tc:3" + matrix: ${{fromJson(needs.job_control.outputs.matrix)}} steps: - - uses: actions/download-artifact@v2 - with: - name: job_control_status - - run: cat job_control_status >> $GITHUB_ENV - - name: Check if use case group should run or be skipped - run: | - if [ "${{ env.run_use_cases == 'true' && (endsWith(matrix.categories, ':NEW') || env.run_all_use_cases) }}" == "true" ]; then - run_this_case=true - else - run_this_case=false - fi - echo run_this_case=$run_this_case >> $GITHUB_ENV - echo Ends with NEW: ${{ endsWith(matrix.categories, ':NEW') }} - echo Run all: ${{ env.run_all_use_cases }} - name: Create directories for database run: | mkdir -p $RUNNER_WORKSPACE/mysql mkdir -p $RUNNER_WORKSPACE/output/metviewer chmod a+w $RUNNER_WORKSPACE/mysql chmod a+w $RUNNER_WORKSPACE/output/metviewer + - name: Create directory for artifacts + run: mkdir -p artifact - uses: actions/checkout@v2 - if: ${{ env.run_this_case == 'true' }} - - uses: ./ci/actions/run_tests + - name: Get artifact name + id: get-artifact-name + run: | + artifact_name=`.github/jobs/get_artifact_name.sh ${{ matrix.categories }}` + echo ::set-output name=artifact_name::${artifact_name} + - uses: ./.github/actions/run_tests id: run_tests - if: ${{ env.run_this_case == 'true' }} with: categories: ${{ matrix.categories }} - run_diff: ${{ env.run_diff }} - # copy output data to save as artifact - - name: Save output data - id: save-output - if: ${{ always() && steps.run_tests.conclusion != 'skipped' }} + # copy logs with errors to error_logs directory to save as artifact + - name: Save error logs + id: save-errors + if: ${{ always() && steps.run_tests.conclusion == 'failure' && matrix.categories != 'pytests' }} run: | - artifact_name=`${GITHUB_WORKSPACE}/ci/jobs/get_artifact_name.sh ${{matrix.categories}}` - mkdir -p artifact/${artifact_name} - cp -r ${GITHUB_WORKSPACE}/../output/* artifact/${artifact_name}/ - echo ::set-output name=artifact_name::${artifact_name} - if [ "$( ls -A ${GITHUB_WORKSPACE}/../diff)" ]; then - mkdir -p artifact/diff-${artifact_name} - cp -r ${GITHUB_WORKSPACE}/../diff/* artifact/diff-${artifact_name}/ - echo ::set-output name=upload_diff::true - else - echo ::set-output name=upload_diff::false - fi - if [ "$( ls -A ${GITHUB_WORKSPACE}/../error_logs)" ]; then - mkdir -p artifact/error_logs - cp -r ${GITHUB_WORKSPACE}/../error_logs/* artifact/error_logs + .github/jobs/copy_error_logs.py \ + ${RUNNER_WORKSPACE}/output \ + artifact/error_logs + if [ -d "artifact/error_logs" ]; then echo ::set-output name=upload_error_logs::true else echo ::set-output name=upload_error_logs::false fi + + # run difference testing + - name: Run difference tests + id: run-diff + if: ${{ needs.job_control.outputs.run_diff == 'true' && steps.run_tests.conclusion == 'success' && matrix.categories != 'pytests' }} + run: | + artifact_name=${{ steps.get-artifact-name.outputs.artifact_name }} + .github/jobs/setup_and_run_diff.py ${{ matrix.categories }} $artifact_name + if [ "$( ls -A ${RUNNER_WORKSPACE}/diff)" ]; then + echo ::set-output name=upload_diff::true + mkdir -p artifact/diff-${artifact_name} + cp -r ${RUNNER_WORKSPACE}/diff/* artifact/diff-${artifact_name} + exit 1 + else + echo ::set-output name=upload_diff::false + fi + + # copy output data to save as artifact + - name: Save output data + id: save-output + if: ${{ always() && steps.run_tests.conclusion != 'skipped' && matrix.categories != 'pytests' }} + run: | + artifact_name=${{ steps.get-artifact-name.outputs.artifact_name }} + mkdir -p artifact/${artifact_name} + cp -r ${RUNNER_WORKSPACE}/output/* artifact/${artifact_name}/ + - uses: actions/upload-artifact@v2 - if: ${{ always() && steps.run_tests.conclusion != 'skipped' }} + name: Upload output data artifact + if: ${{ always() && steps.run_tests.conclusion != 'skipped' && matrix.categories != 'pytests' }} with: - name: ${{ steps.save-output.outputs.artifact_name }} - path: artifact/${{ steps.save-output.outputs.artifact_name }} + name: ${{ steps.get-artifact-name.outputs.artifact_name }} + path: artifact/${{ steps.get-artifact-name.outputs.artifact_name }} - uses: actions/upload-artifact@v2 - if: ${{ always() && steps.save-output.outputs.upload_error_logs }} + name: Upload error logs artifact + if: ${{ always() && steps.save-errors.outputs.upload_error_logs }} with: name: error_logs path: artifact/error_logs if-no-files-found: ignore - uses: actions/upload-artifact@v2 - if: ${{ always() && steps.save-output.outputs.upload_diff }} + name: Upload difference data artifact + if: ${{ always() && steps.run-diff.outputs.upload_diff == 'true' }} with: - name: diff-${{ steps.save-output.outputs.artifact_name }} - path: artifact/diff-${{ steps.save-output.outputs.artifact_name }} + name: diff-${{ steps.get-artifact-name.outputs.artifact_name }} + path: artifact/diff-${{ steps.get-artifact-name.outputs.artifact_name }} if-no-files-found: ignore create_output_data_volumes: name: Create Output Docker Data Volumes runs-on: ubuntu-latest needs: [use_case_tests] + if: ${{ needs.job_control.outputs.run_save_truth_data == 'true' }} steps: - - uses: actions/download-artifact@v2 - with: - name: job_control_status - - run: cat job_control_status >> $GITHUB_ENV - uses: actions/checkout@v2 - if: ${{ env.run_save_truth_data == 'true' }} - uses: actions/download-artifact@v2 - if: ${{ env.run_save_truth_data == 'true' }} - - run: ci/jobs/create_output_data_volumes.sh - if: ${{ env.run_save_truth_data == 'true' }} + - run: .github/jobs/create_output_data_volumes.sh env: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} diff --git a/ci/actions/run_tests/entrypoint.sh b/ci/actions/run_tests/entrypoint.sh deleted file mode 100644 index 931b2ee0ec..0000000000 --- a/ci/actions/run_tests/entrypoint.sh +++ /dev/null @@ -1,104 +0,0 @@ -#! /bin/bash - -# The repo source code is cloned to $RUNNER_WORKSPACE/$REPO_NAME -# Setup the workspace path to that for easier access later -REPO_NAME=$(basename $RUNNER_WORKSPACE) -WS_PATH=$RUNNER_WORKSPACE/$REPO_NAME - -DOCKER_DATA_DIR=/data -DOCKER_OUTPUT_DIR=${DOCKER_DATA_DIR}/output -GHA_OUTPUT_DIR=$RUNNER_WORKSPACE/output - -DOCKER_DIFF_DIR=${DOCKER_DATA_DIR}/diff -GHA_DIFF_DIR=$RUNNER_WORKSPACE/diff - -DOCKER_ERROR_LOG_DIR=${DOCKER_DATA_DIR}/error_logs -GHA_ERROR_LOG_DIR=$RUNNER_WORKSPACE/error_logs - -# get use case category, subset list, and optional NEW tag from input -CATEGORIES=`echo $INPUT_CATEGORIES | awk -F: '{print $1}'` -SUBSETLIST=`echo $INPUT_CATEGORIES | awk -F: '{print $2}'` - -# run all cases if no subset list specified -if [ -z "${SUBSETLIST}" ]; then - SUBSETLIST="all" -fi - -branch_name=`${GITHUB_WORKSPACE}/ci/jobs/print_branch_name.py` -if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then - branch_name=${branch_name}-pull_request -fi -DOCKERHUBTAG=dtcenter/metplus-dev:${branch_name} - -echo "Pulling docker image: $DOCKERHUBTAG" -docker pull $DOCKERHUBTAG -docker inspect --type=image $DOCKERHUBTAG > /dev/null -if [ $? != 0 ]; then - # if docker pull fails, build locally - echo docker pull failed. Building Docker image locally... - ${GITHUB_WORKSPACE}/ci/jobs/docker_setup.sh -fi - -if [ "$INPUT_CATEGORIES" == "pytests" ]; then - echo Running Pytests - command="pip3 install pytest-cov; export METPLUS_PYTEST_HOST=docker; cd internal_tests/pytests; pytest --cov=../../metplus" - docker run -v $WS_PATH:$GITHUB_WORKSPACE --workdir $GITHUB_WORKSPACE $DOCKERHUBTAG bash -c "$command" - exit $? -fi - -# get METviewer if used in any use cases -all_requirements=`./ci/jobs/get_requirements.py ${CATEGORIES} ${SUBSETLIST}` -echo All requirements: $all_requirements -NETWORK_ARG="" -if [[ "$all_requirements" =~ .*"metviewer".* ]]; then - echo "Setting up METviewer" - ${GITHUB_WORKSPACE}/ci/jobs/python_requirements/get_metviewer.sh - NETWORK_ARG=--network="container:mysql_mv" -fi - -# install Pillow library needed for diff testing -# this will be replaced with better image diffing package used by METplotpy -pip_command="pip3 install Pillow; yum -y install poppler-utils; pip3 install pdf2image" - -# build command to run -command="./ci/jobs/run_use_cases.py ${CATEGORIES} ${SUBSETLIST}" - -# add input volumes to run command -# keep track of --volumes-from arguments to docker run command -echo "Get Docker data volumes for input data" -VOLUMES_FROM=`${GITHUB_WORKSPACE}/ci/jobs/get_data_volumes.py $CATEGORIES` - -echo Input: ${VOLUMES_FROM} -# get Docker data volumes for output data and run diffing logic -# if running a pull request into develop or main_v* branches, not -ref branches -if [ "${INPUT_RUN_DIFF}" == "true" ]; then - echo "Get Docker data volumes for output data" - - # use develop branch output data volumes if not a pull request (forced diff) - if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then - output_data_branch=${GITHUB_BASE_REF} - else - output_data_branch=develop - fi - - category=`${GITHUB_WORKSPACE}/ci/jobs/get_artifact_name.sh $INPUT_CATEGORIES` - output_category=output-${output_data_branch}-${category} - - echo Get output data volume: ${output_category} - OUT_VOLUMES_FROM=`${GITHUB_WORKSPACE}/ci/jobs/get_data_volumes.py $output_category` - - echo Output: ${OUT_VOLUMES_FROM} - VOLUMES_FROM=${VOLUMES_FROM}" "$OUT_VOLUMES_FROM - - # add 3rd argument to command to trigger difference testing - command=${command}" true" -fi - -echo VOLUMES_FROM: $VOLUMES_FROM - -echo docker ps: -docker ps -a - -echo "Run Docker container: $DOCKERHUBTAG" -echo docker run -e GITHUB_WORKSPACE $NETWORK_ARG -v $RUNNER_WORKSPACE/output/mysql:/var/lib/mysql -v $GHA_OUTPUT_DIR:$DOCKER_OUTPUT_DIR -v $GHA_DIFF_DIR:$DOCKER_DIFF_DIR -v $GHA_ERROR_LOG_DIR:$DOCKER_ERROR_LOG_DIR -v $WS_PATH:$GITHUB_WORKSPACE ${VOLUMES_FROM} --workdir $GITHUB_WORKSPACE $DOCKERHUBTAG bash -c "${pip_command};${command}" -docker run -e GITHUB_WORKSPACE $NETWORK_ARG -v $RUNNER_WORKSPACE/output/mysql:/var/lib/mysql -v $GHA_OUTPUT_DIR:$DOCKER_OUTPUT_DIR -v $GHA_DIFF_DIR:$DOCKER_DIFF_DIR -v $GHA_ERROR_LOG_DIR:$DOCKER_ERROR_LOG_DIR -v $WS_PATH:$GITHUB_WORKSPACE ${VOLUMES_FROM} --workdir $GITHUB_WORKSPACE $DOCKERHUBTAG bash -c "${pip_command};${command}" diff --git a/ci/docker/Dockerfile b/ci/docker/Dockerfile index 2b2a8917c1..9c3320dbad 100644 --- a/ci/docker/Dockerfile +++ b/ci/docker/Dockerfile @@ -55,12 +55,10 @@ RUN echo export PATH=$PATH:`pwd`/METplus/ush >> /etc/bashrc \ # Install Java 1.8.0 OpenJDK for GempakToCF.jar # Install pytest and netCDF4 python packages # Obtain GempakToCF.jar -RUN yum -y update \ - && yum -y install nco.x86_64 \ - && yum -y install java-1.8.0-openjdk \ - && python3 -m pip install pytest netCDF4 \ - && mkdir -p /data/input \ - && curl -L -o /data/input/GempakToCF.jar -O https://dtcenter.org/sites/default/files/community-code/metplus/utilities/GempakToCF.jar || true +#RUN yum -y update \ +# && yum -y install java-1.8.0-openjdk \ +# && mkdir -p /data/input \ +# && curl -L -o /data/input/GempakToCF.jar -O https://dtcenter.org/sites/default/files/community-code/metplus/utilities/GempakToCF.jar || true # if source code was retrieved, set default config variables and install package diff --git a/ci/docker/docker_env/Dockerfile b/ci/docker/docker_env/Dockerfile new file mode 100644 index 0000000000..a5f2549a66 --- /dev/null +++ b/ci/docker/docker_env/Dockerfile @@ -0,0 +1,13 @@ +# Dockerfile to create conda environments used for use case tests + +ARG BASE_ENV=metplus_base +FROM dtcenter/metplus-envs:${BASE_ENV} + +ARG ENV_NAME +WORKDIR /scripts +COPY scripts/${ENV_NAME}_env.sh . + +ARG BASE_ENV=metplus_base +RUN ./${ENV_NAME}_env.sh ${BASE_ENV} + +RUN conda list --name ${ENV_NAME} > /usr/local/envs/${ENV_NAME}/environments.yml diff --git a/ci/docker/docker_env/Dockerfile.gempak_env b/ci/docker/docker_env/Dockerfile.gempak_env new file mode 100644 index 0000000000..87f52baacb --- /dev/null +++ b/ci/docker/docker_env/Dockerfile.gempak_env @@ -0,0 +1,11 @@ +# Dockerfile to create conda environments used for use case tests + +ARG BASE_ENV=metplus_base +FROM dtcenter/metplus-envs:${BASE_ENV} + +ARG ENV_NAME +WORKDIR /scripts +COPY scripts/${ENV_NAME}_env.sh . + +ARG BASE_ENV=metplus_base +RUN ./${ENV_NAME}_env.sh ${BASE_ENV} diff --git a/ci/docker/docker_env/Dockerfile.metplus_base b/ci/docker/docker_env/Dockerfile.metplus_base new file mode 100644 index 0000000000..b57e28edf4 --- /dev/null +++ b/ci/docker/docker_env/Dockerfile.metplus_base @@ -0,0 +1,11 @@ +# Dockerfile to create conda environment used for use cases +# that don't require any additional packages + +FROM conda/miniconda3-centos7 + +# create conda environments +RUN conda update -y -n base -c defaults conda \ + && conda create -y --name metplus_base python=3.6.8 \ + && conda install -y --name metplus_base -c conda-forge python-dateutil==2.8.1 + +RUN conda list --name metplus_base > /usr/local/envs/metplus_base/environments.yml diff --git a/ci/docker/docker_env/Dockerfile.py_embed_base b/ci/docker/docker_env/Dockerfile.py_embed_base new file mode 100644 index 0000000000..fc3a41b028 --- /dev/null +++ b/ci/docker/docker_env/Dockerfile.py_embed_base @@ -0,0 +1,13 @@ +# Dockerfile to create conda environment used as base for +# use cases that use python embedding but need additional +# packages + +FROM conda/miniconda3-centos7 + +# create conda environments +RUN conda update -y -n base -c defaults conda \ + && conda create -y --name py_embed_base python=3.6.8 \ + && conda install -y --name py_embed_base -c conda-forge xarray==0.16.2 \ + && conda install -y --name py_embed_base -c conda-forge netcdf4==1.5.6 + +RUN conda list --name py_embed_base > /usr/local/envs/py_embed_base/environments.yml diff --git a/ci/docker/docker_env/README.md b/ci/docker/docker_env/README.md new file mode 100644 index 0000000000..0a284e76a5 --- /dev/null +++ b/ci/docker/docker_env/README.md @@ -0,0 +1,89 @@ +# Docker Conda Environments + +## Commands to create Docker images in dtcenter/metplus-envs + +Run from this directory (ci/docker/docker_env) + +### create metplus_base env (just dateutil) +``` +docker build -t dtcenter/metplus-envs:metplus_base -f Dockerfile.metplus_base . +docker push dtcenter/metplus-envs:metplus_base +``` + +### create py_embed_base env (all python embedding requirements (xarray and netcdf) +``` +docker build -t dtcenter/metplus-envs:py_embed_base -f Dockerfile.py_embed_base . +docker push dtcenter/metplus-envs:py_embed_base +``` + +### create h5py env from py_embed_base +``` +docker build -t dtcenter/metplus-envs:h5py --build-arg BASE_ENV=py_embed_base --build-arg ENV_NAME=h5py . +docker push dtcenter/metplus-envs:h5py +``` + +### create gempak env from metplus_base using Dockerfile.gempak_env +``` +docker build -t dtcenter/metplus-envs:gempak --build-arg ENV_NAME=gempak -f ./Dockerfile.gempak_env . +docker push dtcenter/metplus-envs:gempak +``` + +### create metdatadb env from metplus_base +``` +docker build -t dtcenter/metplus-envs:metdatadb --build-arg ENV_NAME=metdatadb . +docker push dtcenter/metplus-envs:metdatadb +``` + +### create pygrib env from py_embed_base +``` +docker build -t dtcenter/metplus-envs:pygrib --build-arg BASE_ENV=py_embed_base --build-arg ENV_NAME=pygrib . +docker push dtcenter/metplus-envs:pygrib +``` + +### create netcdf4 env from metplus_base +``` +docker build -t dtcenter/metplus-envs:netcdf4 --build-arg ENV_NAME=netcdf4 . +docker push dtcenter/metplus-envs:netcdf4 +``` + +### create xesmf env from metplus_base +``` +docker build -t dtcenter/metplus-envs:xesmf --build-arg ENV_NAME=xesmf . +docker push dtcenter/metplus-envs:xesmf +``` + +### create spacetime env from metplus_base +``` +docker build -t dtcenter/metplus-envs:spacetime --build-arg ENV_NAME=spacetime . +docker push dtcenter/metplus-envs:spacetime +``` + +### create metplotpy env from metplus_base +``` +docker build -t dtcenter/metplus-envs:metplotpy --build-arg ENV_NAME=metplotpy . +docker push dtcenter/metplus-envs:metplotpy +``` + +### create pytest env from metplus_base +``` +docker build -t dtcenter/metplus-envs:pytest --build-arg ENV_NAME=pytest . +docker push dtcenter/metplus-envs:pytest +``` + +### create diff env from netcdf4 +``` +docker build -t dtcenter/metplus-envs:diff --build-arg BASE_ENV=netcdf4 --build-arg ENV_NAME=diff . +docker push dtcenter/metplus-envs:diff +``` + +### create cycloneplotter env from metplus_base +``` +docker build -t dtcenter/metplus-envs:cycloneplotter --build-arg ENV_NAME=cycloneplotter . +docker push dtcenter/metplus-envs:cycloneplotter +``` + +### create icecover env from py_embed_base +``` +docker build -t dtcenter/metplus-envs:icecover --build-arg BASE_ENV=py_embed_base --build-arg ENV_NAME=icecover . +docker push dtcenter/metplus-envs:icecover +``` diff --git a/ci/docker/docker_env/scripts/cycloneplotter_env.sh b/ci/docker/docker_env/scripts/cycloneplotter_env.sh new file mode 100755 index 0000000000..477f428a5c --- /dev/null +++ b/ci/docker/docker_env/scripts/cycloneplotter_env.sh @@ -0,0 +1,25 @@ +#! /bin/sh + +################################################################################ +# Environment: cycloneplotter +# Last Updated: 2021-06-09 (mccabe@ucar.edu) +# Notes: Adds packages needed to run CyclonePlotter wrapper +# Added pandas because it is used by tc_and_extra_tc use case +# Python Packages: +# cartopy==0.17.0 +# matplotlib==3.3.0 +# pandas==? +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=cycloneplotter + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge cartopy==0.17.0 +conda install -y --name ${ENV_NAME} -c conda-forge matplotlib==3.3.0 +conda install -y --name ${ENV_NAME} -c conda-forge pandas diff --git a/ci/docker/docker_env/scripts/diff_env.sh b/ci/docker/docker_env/scripts/diff_env.sh new file mode 100755 index 0000000000..f74f430872 --- /dev/null +++ b/ci/docker/docker_env/scripts/diff_env.sh @@ -0,0 +1,27 @@ +#! /bin/sh + +################################################################################ +# Environment: diff +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds packages needed to run differences tests to compare output to +# truth data. +# Python Packages: +# pillow==? +# pdf2image==? +# +# Other Content: +# poppler-utils +################################################################################ + +# Conda environment to create +ENV_NAME=diff + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge pillow + +yum -y install poppler-utils + +conda install -y --name ${ENV_NAME} -c conda-forge pdf2image diff --git a/ci/docker/docker_env/scripts/gempak_env.sh b/ci/docker/docker_env/scripts/gempak_env.sh new file mode 100755 index 0000000000..ceaa6e20a3 --- /dev/null +++ b/ci/docker/docker_env/scripts/gempak_env.sh @@ -0,0 +1,18 @@ +#! /bin/sh + +################################################################################ +# Environment: gempak +# Last Updated: 2021-06-22 (mccabe@ucar.edu) +# Notes: Installs Java and obtains GempakToCF.jar to convert GEMPAK +# files to NetCDF format. +# Python Packages: None +# +# Other Content: +# - Java 1.8.0 OpenJDK +# - GempakToCF.jar (downloaded from DTCenter web server +################################################################################ + +yum -y update +yum -y install java-1.8.0-openjdk +mkdir -p /data/input +curl -L -o /data/input/GempakToCF.jar -O https://dtcenter.org/sites/default/files/community-code/metplus/utilities/GempakToCF.jar diff --git a/ci/docker/docker_env/scripts/h5py_env.sh b/ci/docker/docker_env/scripts/h5py_env.sh new file mode 100755 index 0000000000..54c220c9b8 --- /dev/null +++ b/ci/docker/docker_env/scripts/h5py_env.sh @@ -0,0 +1,20 @@ +#! /bin/sh + +################################################################################ +# Environment: h5py +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds Python interface to the HDF5 binary format +# Python Packages: +# h5py==2.10.0 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=h5py + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge h5py==2.10.0 diff --git a/ci/docker/docker_env/scripts/icecover_env.sh b/ci/docker/docker_env/scripts/icecover_env.sh new file mode 100755 index 0000000000..eeeaff193a --- /dev/null +++ b/ci/docker/docker_env/scripts/icecover_env.sh @@ -0,0 +1,26 @@ +#! /bin/sh + +################################################################################ +# Environment: icecover +# Last Updated: 2021-06-29 (mccabe@ucar.edu) +# Notes: Adds Python packages required for ice cover use case +# Python Packages: +# pyproj==3.0.1 +# pyresample==1.20.0 +# scikit-learn==0.24.2 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=icecover + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge xarray==0.18.2 +conda install -y --name ${ENV_NAME} -c conda-forge pyresample==1.16.0 +conda install -y --name ${ENV_NAME} -c conda-forge scikit-learn==0.23.2 +#conda install -y --name ${ENV_NAME} -c conda-forge pyproj==3.0.1 +conda install -y --name ${ENV_NAME} -c conda-forge pyproj diff --git a/ci/docker/docker_env/scripts/metdatadb_env.sh b/ci/docker/docker_env/scripts/metdatadb_env.sh new file mode 100755 index 0000000000..4a29a6981f --- /dev/null +++ b/ci/docker/docker_env/scripts/metdatadb_env.sh @@ -0,0 +1,25 @@ +#! /bin/sh + +################################################################################ +# Environment: metdatadb +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds Python packages needed to run METdbLoad from METdatadb +# Python Packages: +# lxml==3.8.0 +# pymysql==1.0.2 +# pandas==1.1.4 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=metdatadb + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} + +conda install -y --name ${ENV_NAME} -c conda-forge lxml==3.8.0 +conda install -y --name ${ENV_NAME} -c conda-forge pymysql==1.0.2 +conda install -y --name ${ENV_NAME} -c conda-forge pandas==1.1.4 diff --git a/ci/docker/docker_env/scripts/metplotpy_env.sh b/ci/docker/docker_env/scripts/metplotpy_env.sh new file mode 100755 index 0000000000..3bead781c1 --- /dev/null +++ b/ci/docker/docker_env/scripts/metplotpy_env.sh @@ -0,0 +1,44 @@ +#! /bin/sh + +################################################################################ +# Environment: metplotpy +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds Python packages needed to run METplotpy and METcalcpy +# Uses pip to install kaleido because +# could not install via Conda (glibc conflict) +# Python Packages: +# matplotlib==3.3.0 +# scipy==1.5.1 +# plotly==4.9.0 +# pingouin==0.3.8 +# cartopy==0.18.0 +# eofs==1.3.0 +# cmocean==2.0 +# xarray==0.17 +# netcdf4==1.5.6 +# pyyaml==? +# python-kaleido==0.2.1 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=metplotpy + +# Conda environment to use as base for new environment +BASE_ENV=$1 + + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} + +conda install -y --name ${ENV_NAME} -c conda-forge matplotlib==3.3.0 +conda install -y --name ${ENV_NAME} -c conda-forge scipy==1.5.1 +conda install -y --name ${ENV_NAME} -c conda-forge plotly==4.9.0 +conda install -y --name ${ENV_NAME} -c conda-forge pingouin==0.3.8 +conda install -y --name ${ENV_NAME} -c conda-forge cartopy==0.18.0 +conda install -y --name ${ENV_NAME} -c conda-forge eofs==1.3.0 +conda install -y --name ${ENV_NAME} -c conda-forge cmocean==2.0 +conda install -y --name ${ENV_NAME} -c conda-forge xarray==0.17 +conda install -y --name ${ENV_NAME} -c conda-forge netcdf4==1.5.6 +conda install -y --name ${ENV_NAME} -c conda-forge pyyaml +/usr/local/envs/${ENV_NAME}/bin/pip3 install kaleido==0.2.1 diff --git a/ci/docker/docker_env/scripts/netcdf4_env.sh b/ci/docker/docker_env/scripts/netcdf4_env.sh new file mode 100755 index 0000000000..43e4a98f48 --- /dev/null +++ b/ci/docker/docker_env/scripts/netcdf4_env.sh @@ -0,0 +1,20 @@ +#! /bin/sh + +################################################################################ +# Environment: netcdf4 +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds NetCDF4 Python package +# Python Packages: +# netcdf4==1.5.6 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=netcdf4 + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge netcdf4==1.5.6 diff --git a/ci/docker/docker_env/scripts/pygrib_env.sh b/ci/docker/docker_env/scripts/pygrib_env.sh new file mode 100755 index 0000000000..7a74287b86 --- /dev/null +++ b/ci/docker/docker_env/scripts/pygrib_env.sh @@ -0,0 +1,24 @@ +#! /bin/sh + +################################################################################ +# Environment: pygrib +# Last Updated: 2021-06-18 (mccabe@ucar.edu) +# Notes: Adds Python packages needed to read GRIB data +# Python Packages: +# pygrib==2.0.2 +# metpy==1.0.1 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=pygrib + +# Conda environment to use as base for new environment +BASE_ENV=$1 + + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} + +conda install -y --name ${ENV_NAME} -c conda-forge pygrib==2.0.2 +conda install -y --name ${ENV_NAME} -c conda-forge metpy==1.0.1 diff --git a/ci/docker/docker_env/scripts/pytest_env.sh b/ci/docker/docker_env/scripts/pytest_env.sh new file mode 100755 index 0000000000..e549f9f94d --- /dev/null +++ b/ci/docker/docker_env/scripts/pytest_env.sh @@ -0,0 +1,27 @@ +#! /bin/sh + +################################################################################ +# Environment: pytest +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds pytest and pytest coverage packages to run unit tests +# Added pandas because plot_util test needs it +# Added netcdf4 because SeriesAnalysis test needs it +# Python Packages: +# pytest==? +# pytest-cov==? +# pandas==? +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=pytest + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge pytest +conda install -y --name ${ENV_NAME} -c conda-forge pytest-cov +conda install -y --name ${ENV_NAME} -c conda-forge pandas +conda install -y --name ${ENV_NAME} -c conda-forge netcdf4 diff --git a/ci/docker/docker_env/scripts/spacetime_env.sh b/ci/docker/docker_env/scripts/spacetime_env.sh new file mode 100755 index 0000000000..d1da9ef93b --- /dev/null +++ b/ci/docker/docker_env/scripts/spacetime_env.sh @@ -0,0 +1,36 @@ +#! /bin/sh + +################################################################################ +# Environment: spacetime +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds Python packages needed to generate coherence spectra (METplotpy) +# Python Packages: +# netCDF4==1.5.4 +# xarray==0.18.2 +# scipy==1.5.3 +# matplotlib==3.2.2 +# pyngl==1.6.1 +# pyyaml==5.3.1 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=spacetime + +# Conda environment to use as base for new environment +# Not used in this script because Python version differs from base version +BASE_ENV=$1 + + +conda create -y --name ${ENV_NAME} python=3.8 + +conda install -y --name ${ENV_NAME} -c conda-forge netCDF4==1.5.4 +conda install -y --name ${ENV_NAME} -c conda-forge xarray==0.18.2 +conda install -y --name ${ENV_NAME} -c conda-forge scipy==1.5.3 +conda install -y --name ${ENV_NAME} -c conda-forge matplotlib==3.2.2 +conda install -y --name ${ENV_NAME} -c conda-forge pyngl==1.6.1 +conda install -y --name ${ENV_NAME} -c conda-forge pyyaml==5.3.1 + +# tested using anaconda channel but changed to using conda-forge +#conda install -y --name ${ENV_NAME} -c anaconda pyyaml diff --git a/ci/docker/docker_env/scripts/weatherregime_env.sh b/ci/docker/docker_env/scripts/weatherregime_env.sh new file mode 100755 index 0000000000..bb2f7c81d3 --- /dev/null +++ b/ci/docker/docker_env/scripts/weatherregime_env.sh @@ -0,0 +1,38 @@ +#! /bin/sh + +################################################################################ +# Environment: weatherregime +# Last Updated: 2021-06-29 (mccabe@ucar.edu) +# Notes: Adds Python packages needed to run weather regime use case +# METplotpy and METcalcpy +# Uses pip to install kaleido because +# could not install via Conda (glibc conflict) +# Python Packages: +# matplotlib==3.3.0 +# scipy==1.5.1 +# plotly==4.9.0 +# pingouin==0.3.8 +# cartopy==0.18.0 +# eofs==1.3.0 +# cmocean==2.0 +# xarray==0.17 +# netcdf4==1.5.6 +# pyyaml==? +# python-kaleido==0.2.1 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=weatherregime + +# Conda environment to use as base for new environment +BASE_ENV=$1 + + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge scikit-learn==0.24.2 +#conda install -y --name ${ENV_NAME} -c conda-forge scipy==1.5.4 +conda install -y --name ${ENV_NAME} -c conda-forge eofs==1.4.0 +conda install -y --name ${ENV_NAME} -c conda-forge netcdf4==1.5.7 +#conda install -y --name ${ENV_NAME} -c conda-forge numpy==1.19.5 diff --git a/ci/docker/docker_env/scripts/xesmf_env.sh b/ci/docker/docker_env/scripts/xesmf_env.sh new file mode 100755 index 0000000000..44e5a084f9 --- /dev/null +++ b/ci/docker/docker_env/scripts/xesmf_env.sh @@ -0,0 +1,22 @@ +#! /bin/sh + +################################################################################ +# Environment: xesmf +# Last Updated: 2021-06-08 (mccabe@ucar.edu) +# Notes: Adds Python package to read Tripolar grids +# Python Packages: +# xesmf==0.3.0 +# +# Other Content: None +################################################################################ + +# Conda environment to create +ENV_NAME=xesmf + +# Conda environment to use as base for new environment +BASE_ENV=$1 + +conda create -y --clone ${BASE_ENV} --name ${ENV_NAME} +conda install -y --name ${ENV_NAME} -c conda-forge netcdf4 +conda install -y --name ${ENV_NAME} -c conda-forge xarray +conda install -y --name ${ENV_NAME} -c conda-forge xesmf diff --git a/ci/jobs/diff_output.py b/ci/jobs/diff_output.py deleted file mode 100755 index 969e474497..0000000000 --- a/ci/jobs/diff_output.py +++ /dev/null @@ -1,25 +0,0 @@ -#! /usr/bin/env python3 - -# Run by GitHub Actions (in ci/jobs/run_use_cases.py) to -# trigger difference tests. - -import sys -import os - -workspace = os.environ.get('GITHUB_WORKSPACE') - -util_dir = os.path.join(workspace, - 'ci', - 'util') -print(f"UTIL DIR is {util_dir}") -sys.path.insert(0, util_dir) - -import diff_util - -data_dir = os.path.abspath(os.path.join(workspace, - os.pardir)) -dir_a = os.path.join(data_dir, 'truth') -dir_b = os.path.join(data_dir, 'output') - -if not diff_util.compare_dir(dir_a, dir_b, debug=True): - sys.exit(1) diff --git a/ci/jobs/get_use_case_commands.py b/ci/jobs/get_use_case_commands.py deleted file mode 100755 index 761d88a060..0000000000 --- a/ci/jobs/get_use_case_commands.py +++ /dev/null @@ -1,104 +0,0 @@ -#! /usr/bin/env python3 - -# Script to obtain commands needed to run use case groups including -# scripts or pip commands to obtain external Python dependencies -# Run by GitHub Actions (in ci/jobs/run_use_cases.py) to run use case tests - -import sys -import os - -# add internal_tests/use_cases directory to path so the test suite can be found -USE_CASES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, - os.pardir)) -sys.path.insert(0, USE_CASES_DIR) - -from internal_tests.use_cases.metplus_use_case_suite import METplusUseCaseSuite -from metplus.util.met_util import expand_int_string_to_list - -def handle_requirements(requirements, work_dir): - requirement_args = [] - for requirement in requirements: - # don't obtain METviewer here because it has to be set up outside of - # docker container that runs the use cases - if requirement.lower() == 'metviewer': - continue - - # check if get_{requirement} script exists and use it if it does - script_path = os.path.join(work_dir, - 'ci', - 'jobs', - 'python_requirements', - f'get_{requirement.lower()}.sh') - print(f"Looking for script: {script_path}") - if os.path.exists(script_path): - print("Script found, using script to obtain dependencies") - requirement_args.append(script_path) - else: - # if script doesn't exist, use pip3 install to obtain package - print("Script does not exist. Using pip3 install to obtain depdencies") - requirement_args.append(f"pip3 install {requirement}") - - return requirement_args - -def main(categories, subset_list, work_dir=None, host_name='docker'): - all_commands = [] - - if work_dir is None: - work_dir = USE_CASES_DIR - - test_suite = METplusUseCaseSuite() - test_suite.add_use_case_groups(categories, subset_list) - - for group_name, use_cases_by_requirement in test_suite.category_groups.items(): - for use_case_by_requirement in use_cases_by_requirement: - requirement_args = handle_requirements(use_case_by_requirement.requirements, - work_dir) - all_use_case_args = [] - for use_case in use_case_by_requirement.use_cases: - use_case_args = f"--config {','.join(use_case.config_args)}" - all_use_case_args.append(use_case_args) - - all_use_case_args.append('--skip_output_check') - use_case_args = ' '.join(all_use_case_args) - cmd = (f'{work_dir}/internal_tests/use_cases/run_test_use_cases.sh ' - f'{host_name} {use_case_args}') - all_commands.append((cmd, requirement_args)) - - return sorted(all_commands, key = lambda x: x[1]) - -def handle_command_line_args(): - # read command line arguments to determine which use cases to run - if len(sys.argv) < 2: - print("No use cases specified") - sys.exit(1) - - # split up categories by & or , - categories = sys.argv[1] - - # get subset values if specified - if len(sys.argv) > 2: - if sys.argv[2] == 'all': - subset_list = None - else: - subset_list = expand_int_string_to_list(sys.argv[2]) - else: - subset_list = None - - - # check if comparison flag should be set - if len(sys.argv) > 3: - do_comparison = True - else: - do_comparison = False - - return categories, subset_list, do_comparison - -if __name__ == '__main__': - categories, subset_list, _ = handle_command_line_args() - all_commands = main(categories, subset_list) - for command, requirements in all_commands: - print(f"COMMAND:") - for req in requirements: - print(f'{req}') - print(f'{command}\n') diff --git a/ci/jobs/print_python_version.py b/ci/jobs/print_python_version.py deleted file mode 100755 index 5b6e382354..0000000000 --- a/ci/jobs/print_python_version.py +++ /dev/null @@ -1,15 +0,0 @@ -#! /usr/bin/env python3 - -# Script to easily obtain minimum python version requirement -# Used in GitHub Actions (in ci/jobs/python_requirements/get_miniconda.sh) - -import sys -import os - -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, - os.pardir))) - -from metplus import get_python_version - -print(get_python_version()) diff --git a/ci/jobs/python_requirements/get_cartopy.sh b/ci/jobs/python_requirements/get_cartopy.sh deleted file mode 100755 index e3ccc8cd18..0000000000 --- a/ci/jobs/python_requirements/get_cartopy.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -#Shell script for installing cartopy -#Called from a docker run command inside "test_use_cases_met_tool_wrappers.sh" - -mkdir /cartopy -cd /cartopy -wget https://download.osgeo.org/proj/proj-4.9.1.tar.gz -tar zxf proj-4.9.1.tar.gz -cd proj-4.9.1 -./configure -make -make install -yum -y install geos -yum -y install geos-devel -pip3 install --upgrade cython numpy pyshp six -pip3 install shapely --no-binary shapely -pip3 install cartopy==0.18.0 - -#some cartopy functionality fails without scipy -pip3 install scipy diff --git a/ci/jobs/python_requirements/get_metcalcpy.sh b/ci/jobs/python_requirements/get_metcalcpy.sh deleted file mode 100755 index 6054ed17fc..0000000000 --- a/ci/jobs/python_requirements/get_metcalcpy.sh +++ /dev/null @@ -1,13 +0,0 @@ -#! /bin/bash - -pip3 install matplotlib -pip3 install scipy -pip3 install pingouin - -basedir=$(dirname "$0") -work_dir=$basedir/../../.. - -# run manage externals to obtain METcalcpy -${work_dir}/manage_externals/checkout_externals -e ${work_dir}/ci/parm/Externals_metcalcpy.cfg - -pip3 install ${work_dir}/../METcalcpy diff --git a/ci/jobs/python_requirements/get_metdatadb.sh b/ci/jobs/python_requirements/get_metdatadb.sh deleted file mode 100755 index 6aad83ee26..0000000000 --- a/ci/jobs/python_requirements/get_metdatadb.sh +++ /dev/null @@ -1,10 +0,0 @@ -#! /bin/bash - -pip3 install lxml -pip3 install PyMySQL - -basedir=$(dirname "$0") -work_dir=$basedir/../../.. - -# run manage externals to obtain METdatadb -${work_dir}/manage_externals/checkout_externals -e ${work_dir}/ci/parm/Externals_metdatadb.cfg diff --git a/ci/jobs/python_requirements/get_metplotpy.sh b/ci/jobs/python_requirements/get_metplotpy.sh deleted file mode 100755 index 1d2170504b..0000000000 --- a/ci/jobs/python_requirements/get_metplotpy.sh +++ /dev/null @@ -1,15 +0,0 @@ -#! /bin/bash - -pip3 install matplotlib -pip3 install scipy -pip3 install cmocean -pip3 install plotly==4.9.0 -pip3 install kaleido - -basedir=$(dirname "$0") -work_dir=$basedir/../../.. - -# run manage externals to obtain METcalcpy -${work_dir}/manage_externals/checkout_externals -e ${work_dir}/ci/parm/Externals_metplotpy.cfg - -pip3 install ${work_dir}/../METplotpy diff --git a/ci/jobs/python_requirements/get_miniconda.sh b/ci/jobs/python_requirements/get_miniconda.sh deleted file mode 100755 index c7a3d0bac8..0000000000 --- a/ci/jobs/python_requirements/get_miniconda.sh +++ /dev/null @@ -1,36 +0,0 @@ -#! /bin/bash - -script_dir=$(dirname "$0") - -if [ -z "${1+x}" ]; then - python_version=`${script_dir}/../print_python_version.py` -else - python_version=$1 -fi - -echo Creating Conda environment using $python_version - -# these are used to obtain version of MiniConda3 -# the version determines the default version of Python -# that is used, but earlier versions can be obtained -# there is no version of MiniConda available that -# matches the current Python version requirement of METplus -MINIC_PYTHON_VERS=38 -MINIC_VERS=4.8.3 - -echo Checking if Miniconda is installed -# check if conda is already available and exit if it is -conda --version &> /dev/null -if [ $? == 0 ]; then - echo Miniconda is already installed - exit 0 -fi - -echo Installing Miniconda -curl -sSL https://repo.continuum.io/miniconda/Miniconda3-py${MINIC_PYTHON_VERS}_${MINIC_VERS}-Linux-x86_64.sh -o /tmp/miniconda.sh - -bash /tmp/miniconda.sh -bfp /usr/local/ -rm -rf /tmp/miniconda.sh -conda install -y python=${python_version} -conda update conda -conda clean --all --yes diff --git a/ci/jobs/python_requirements/get_pygrib.sh b/ci/jobs/python_requirements/get_pygrib.sh deleted file mode 100755 index 6f1b63aa05..0000000000 --- a/ci/jobs/python_requirements/get_pygrib.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -#shell script to install pygrib with dependencies - -yum -y install eccodes-devel -pip3 install numpy -pip3 install pyproj -pip3 install eccodes-python -pip3 install pygrib diff --git a/ci/jobs/python_requirements/get_spacetime.sh b/ci/jobs/python_requirements/get_spacetime.sh deleted file mode 100755 index 25287efe2f..0000000000 --- a/ci/jobs/python_requirements/get_spacetime.sh +++ /dev/null @@ -1,25 +0,0 @@ -#! /bin/bash - -script_dir=$(dirname "$0") - -work_dir=$script_dir/../../.. - -echo Installing environment for UserScript_obsPrecip_obsOnly_CrossSpectraPlot with conda - -# create a MiniConda environemtn using Python 3.8 -${script_dir}/get_miniconda.sh 3.8 - -conda install -y pip - -# run manage externals to obtain METcalcpy -${work_dir}/manage_externals/checkout_externals -e ${work_dir}/ci/parm/Externals_metplotpy.cfg -python3 -m pip install ${work_dir}/../METplotpy - -# run manage externals to obtain METcalcpy -${work_dir}/manage_externals/checkout_externals -e ${work_dir}/ci/parm/Externals_metcalcpy.cfg - -python3 -m pip install ${work_dir}/../METcalcpy - -# install required packages for use case -conda install -y -c conda-forge python-dateutil netCDF4 xarray scipy matplotlib pyngl -conda install -c anaconda pyyaml diff --git a/ci/jobs/python_requirements/get_xesmf.sh b/ci/jobs/python_requirements/get_xesmf.sh deleted file mode 100755 index e5820c9163..0000000000 --- a/ci/jobs/python_requirements/get_xesmf.sh +++ /dev/null @@ -1,8 +0,0 @@ -#! /bin/bash - -script_dir=$(dirname "$0") - -${script_dir}/get_miniconda.sh - -echo Installing xesmf with conda -conda install -c conda-forge dask netCDF4 xesmf diff --git a/ci/jobs/run_use_cases.py b/ci/jobs/run_use_cases.py deleted file mode 100755 index cf16096aaf..0000000000 --- a/ci/jobs/run_use_cases.py +++ /dev/null @@ -1,167 +0,0 @@ -#! /usr/bin/env python3 - -# Used in GitHub Actions (in ci/actions/run_tests/entrypoint.sh) -# to obtain and run commands to run use cases from group, -# execute difference tests if requested, copy error logs and/or -# files that reported differences into directory to make -# them available in GitHub Actions artifacts for easy review - -import os -import sys -import subprocess -import shlex -import shutil - -import get_use_case_commands - -# add ci/util to sys path to get diff utility -diff_util_dir = os.path.join(os.environ.get('GITHUB_WORKSPACE'), - 'ci', - 'util') -sys.path.insert(0, diff_util_dir) -from diff_util import compare_dir - -TRUTH_DIR = '/data/truth' -OUTPUT_DIR = '/data/output' -DIFF_DIR = '/data/diff' -ERROR_LOG_DIR = '/data/error_logs' - -def copy_error_logs(): - """! Copy log output to error log directory if any use case failed """ - use_case_dirs = os.listdir(OUTPUT_DIR) - for use_case_dir in use_case_dirs: - log_dir = os.path.join(OUTPUT_DIR, - use_case_dir, - 'logs') - if not os.path.isdir(log_dir): - continue - - # check if there are errors in the metplus.log file and - # only copy directory if there are any errors - metplus_log = os.path.join(log_dir, 'metplus.log') - found_errors = False - with open(metplus_log, 'r') as file_handle: - if 'ERROR:' in file_handle.read(): - found_errors = True - if not found_errors: - continue - - output_dir = os.path.join(ERROR_LOG_DIR, - use_case_dir) - log_files = os.listdir(log_dir) - for log_file in log_files: - log_path = os.path.join(log_dir, log_file) - output_path = os.path.join(output_dir, log_file) - print(f"Copying {log_path} to {output_path}") - # create output directory if it doesn't exist - output_dir = os.path.dirname(output_path) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - shutil.copyfile(log_path, output_path) - -def copy_diff_output(diff_files): - """! Loop through difference output and copy files - to directory so it can be made available for comparison. - Files will be put into the same directory with _truth or - _output added before their file extension. - - @param diff_files list of tuples containing truth file path - and file path of output that was just generated. Either tuple - value may be an empty string if the file was not found. - """ - for truth_file, out_file, _, diff_file in diff_files: - if truth_file: - copy_to_diff_dir(truth_file, - 'truth') - if out_file: - copy_to_diff_dir(out_file, - 'output') - if diff_file: - copy_to_diff_dir(diff_file, - 'diff') - -def copy_to_diff_dir(file_path, data_type): - """! Generate output path based on input file path, - adding text based on data_type to the filename, then - copy input file to that output path. - - @param file_path full path of file to copy - @param data_type data identifier, should be 'truth' - or 'output' - @returns True if success, False if there was a problem - copying the file - """ - if data_type == 'truth': - data_dir = TRUTH_DIR - else: - data_dir = OUTPUT_DIR - - # replace data dir with diff directory - diff_out = file_path.replace(data_dir, DIFF_DIR) - - # add data type identifier to filename before extension - # if data is not difference output - if data_type == 'diff': - output_path = diff_out - else: - output_path, extension = os.path.splitext(diff_out) - output_path = f'{output_path}_{data_type}{extension}' - - # create output directory if it doesn't exist - output_dir = os.path.dirname(output_path) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - - print(f'Copying {file_path} to\n{output_path}') - try: - shutil.copyfile(file_path, output_path) - except OSError as err: - print(f'Could not copy file. {err}') - return False - - return True - -def main(): - categories, subset_list, compare = ( - get_use_case_commands.handle_command_line_args() - ) - categories_list = categories.split(',') - all_commands = ( - get_use_case_commands.main(categories_list, - subset_list, - work_dir=os.environ.get('GITHUB_WORKSPACE')) - ) - - isOK = True - for cmd, reqs in all_commands: - if reqs: - reqs_fmt = f"{';'.join(reqs)};" - else: - reqs_fmt = '' - print(f'{reqs}\n{cmd}') - full_cmd = f"{reqs_fmt}pip3 freeze;{cmd}" - try: - subprocess.run(full_cmd, check=True, shell=True) - except subprocess.CalledProcessError as err: - print(f"ERROR: Command failed: {full_cmd} -- {err}") - isOK = False - copy_error_logs() - - if compare and isOK: - print('******************************') - print("Comparing output to truth data") - diff_files = compare_dir(TRUTH_DIR, OUTPUT_DIR, - debug=True, - save_diff=True) - if diff_files: - isOK = False - - # copy difference files into directory - # so it can be easily downloaded and compared - copy_diff_output(diff_files) - - if not isOK: - sys.exit(1) - -if __name__ == '__main__': - main() diff --git a/ci/parm/Externals_metplotpy.cfg b/ci/parm/Externals_metplotpy.cfg deleted file mode 100644 index ca57164148..0000000000 --- a/ci/parm/Externals_metplotpy.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[METplotpy] -local_path = ../METplotpy -protocol = git -required = True -repo_url = https://github.com/dtcenter/METplotpy -branch = develop - -[externals_description] -schema_version = 1.0.0 diff --git a/ci/util/diff_util.py b/ci/util/diff_util.py index ca2012fe81..a2b0f9a53c 100644 --- a/ci/util/diff_util.py +++ b/ci/util/diff_util.py @@ -17,13 +17,7 @@ SKIP_EXTENSIONS = [ '.zip', '.png', -] - -PDF_EXTENSIONS = [ - '.pdf', -] - -UNSUPPORTED_EXTENSIONS = [ + '.gif', ] PDF_EXTENSIONS = [ @@ -51,7 +45,7 @@ def get_file_type(filepath): pass if file_extension in SKIP_EXTENSIONS: - return 'skip' + return f'skip {file_extension}' if file_extension in PDF_EXTENSIONS: return 'pdf' @@ -151,8 +145,8 @@ def compare_files(filepath_a, filepath_b, debug=False, dir_a=None, dir_b=None, return (filepath_a, '', 'file not found', '') file_type = get_file_type(filepath_a) - if file_type == 'skip': - print(f'Skipping') + if file_type.startswith('skip'): + print(f"Skipping {file_type.split(' ')[1]} file") return None if file_type.startswith('unsupported'): @@ -460,3 +454,10 @@ def nc_is_equal(file_a, file_b, fields=None, debug=False): return False return is_equal + +if __name__ == '__main__': + dir_a = sys.argv[1] + dir_b = sys.argv[2] + if len(sys.argv) > 3: + save_diff = True + compare_dir(dir_a, dir_b, debug=True, save_diff=save_diff) diff --git a/docs/Contributors_Guide/add_use_case.rst b/docs/Contributors_Guide/add_use_case.rst index c3d600070c..529680c74a 100644 --- a/docs/Contributors_Guide/add_use_case.rst +++ b/docs/Contributors_Guide/add_use_case.rst @@ -1,12 +1,13 @@ +**************** Adding Use Cases -================ +**************** .. |metplus_data_dir| replace:: /d2/www/dtcenter/dfiles/code/METplus/METplus_Data .. |metplus_staging_dir| replace:: /d2/projects/METplus/METplus_Data_Staging .. |dtc_web_server| replace:: mohawk.rap.ucar.edu Work in a Feature Branch ------------------------- +======================== Test and develop new use cases in a GitHub feature branch. More information on this process can be found in the @@ -17,7 +18,7 @@ This branch will be the source of the pull request to merge the changes into the develop branch. Types of Use Cases ------------------- +================== * Use cases that involve a single MET tool/METplus wrapper will reside in the *METplus/parm/use_cases/met_tool_wrapper* directory. @@ -29,7 +30,7 @@ Types of Use Cases .. _use_case_categories: Use Case Categories -------------------- +=================== New MET tool wrapper use cases will be put in the repository under parm/use_cases/met_tool_wrapper/ where @@ -61,10 +62,10 @@ or are unsure which category is the most appropriate, please contact MET Help (met_help@ucar.edu). Use Case Content ----------------- +================ Configure New Use Case -^^^^^^^^^^^^^^^^^^^^^^ +---------------------- If creating a new MET tool wrapper use case, in the MET tool name sub-directory (parm/use_cases/met_tool_wrapper/), each @@ -107,11 +108,11 @@ configuration file name excluding the .conf suffix. .. figure:: figure/model_applications_subdir.png Use Case Rules -^^^^^^^^^^^^^^ +-------------- - The name of the use case files should conform to the guidelines listed above in Use Case Content. -- The use case METplus configuration file should not set any variables that +- The use case METplus configuration file should not **set** any variables that specific to the user's environment, such as INPUT_BASE, OUTPUT_BASE, and PARM_BASE, METPLUS_CONF, etc. - A limited number of run times should be processed so that they use case runs @@ -147,10 +148,10 @@ Use Case Rules .. _use_case_documentation: Document New Use Case -^^^^^^^^^^^^^^^^^^^^^ +--------------------- Create a New Model Applications Docs Directory -"""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **If the use case falls under an existing Model Applications category, you can skip this section.** @@ -178,7 +179,7 @@ category in the User's Guide > METplus Use Cases > page. Add Sphinx Documentation File -""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In the corresponding documentation MET tool name directory (**docs**/use_cases/met_tool_wrapper/) for a met_tool_wrappers @@ -224,7 +225,7 @@ use case OR category directory for a model_applications use case documentation. Accessing the Documentation -^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------- It is important to ensure that the new use case files is displayed and the formatting looks correct. Prior to the release of METplus v4.0.0 contributors @@ -239,7 +240,7 @@ information. .. _manual-build: Build the Documentation Manually -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +-------------------------------- Build the documentation and ensure that the new use case file is displayed and the formatting looks correct. The Python packages sphinx, @@ -283,7 +284,8 @@ in your Python 3 environment:: .. _use_case_input_data: Input Data ----------- +========== + Sample input data needed to run the use case should be provided. Please try to limit your input data to the minimum that is needed to demonstrate your use case effectively. GRIB2 files can be pared down @@ -300,13 +302,13 @@ If the input data is in NetCDF format, the the file(s). Providing new data -^^^^^^^^^^^^^^^^^^ +------------------ Log into the computer where your input data resides -""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Switch to Bash -"""""""""""""" +^^^^^^^^^^^^^^ If you are using a shell other than bash, run "bash" to activate a bash shell. This will make the instructions you need to run on the DTC web server @@ -327,7 +329,7 @@ If you are unsure which shell you use, run the following command:: that you can copy and paste into the terminal. Download the template environment file -"""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ This file is available on the DTC web server. You can use wget to download the file to your current working directory, or visit the URL in a browser and save @@ -338,7 +340,7 @@ it to your computer:: Or click this `link `_. Rename env file -""""""""""""""" +^^^^^^^^^^^^^^^ Rename this file to include your feature branch. For example, if your branch is feature_ABC_desc, then run:: @@ -346,7 +348,7 @@ is feature_ABC_desc, then run:: mv add_use_case_env.bash feature_ABC_desc_env.bash Change the values of the env file -""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Open this file with your favorite editor and modify it to include the appropriate information for your use case. @@ -378,7 +380,7 @@ If the above is shown, then METPLUS_VERSION should be set to 4.0 exactly. Source the env file and check environment -""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Source your environment file and verify that the variables are set correctly. If the source command fails, make sure you have switched to using @@ -392,7 +394,7 @@ bash:: file that you just sourced. Create sub-directories for input data -""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Put new dataset into a directory that matches the use case directories, i.e. model_applications/${METPLUS_USE_CASE_CATEGORY}/${METPLUS_USE_CASE_NAME}. @@ -404,7 +406,7 @@ use case directory can be used to separate out different data sources if desired. Verify use case config file contains correct directory -"""""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Set directory paths in the use case config file relative to INPUT_BASE i.e {INPUT_BASE}/model_applications// where @@ -415,7 +417,7 @@ You can set {INPUT_BASE} to your local directory to test that the use case still runs properly. Create new data tarfile -""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^ Create a tarfile on your development machine with the new dataset. Make sure the tarfile contains directories, i.e. @@ -441,7 +443,7 @@ model_applications// directory. For example:: model_applications/marine_and_coastal/PlotDataPlane_obsHYCOM_coordTripolar/weight_south.nc Copy files to DTC Web Server -"""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If you have access to the internal DTC web server, copy over the tarfile and the environment file to the staging directory: @@ -460,7 +462,7 @@ For an example on how to upload data to the ftp site see `MET Help Webpage `_. Adding new data to full sample data tarfile -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +------------------------------------------- If you are unable to access the DTC web server to upload data or if you do not have permission to use the met_test shared user account, someone from the @@ -471,7 +473,7 @@ member(s) you have been coordinating with for this work. If you are unsure who to contact, then please email MET Help (met_help@ucar.edu). Log into the DTC Web Server with SSH -"""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The web server is only accessible if you are on the NCAR VPN. @@ -480,7 +482,7 @@ The web server is only accessible if you are on the NCAR VPN. ssh |dtc_web_server| Switch to the met_test user -""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^ The commands must be run as the met_test user to write into the data directory:: @@ -490,7 +492,7 @@ directory:: If unable to run this command successfully, please contact a METplus developer. Setup the environment to run commands on web server -""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Change directory to the data staging dir, source the environment file you created, and make sure the environment @@ -503,7 +505,7 @@ variables are set properly. printenv | grep METPLUS\_ Create a feature branch directory in the tarfile directory -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ As the met_test user, create a new directory in the METplus_Data web directory named after the branch containing the changes for the new use case. @@ -514,7 +516,7 @@ On the DTC web server:: cd ${METPLUS_FEATURE_BRANCH} Copy the environment file into the feature branch directory -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ This will make it easier for the person who will update the tarfiles for the next release to include the new data (right before the pull request is merged @@ -523,7 +525,7 @@ into the develop branch):: cp ${METPLUS_DATA_STAGING_DIR}/${METPLUS_USER_ENV_FILE} ${METPLUS_DATA_TARFILE_DIR}/${METPLUS_FEATURE_BRANCH} Check if the category tarfile exists already -"""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Check the symbolic link in the develop directory to determine latest tarball:: @@ -536,7 +538,7 @@ that no sample data tarfiles for the category are found in any of the release or develop directories. Add contents of existing tarfile to feature branch directory (if applicable) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **If you have determined that there is an existing tarfile for the category (from the previous step)**, then untar the sample data tarball into @@ -546,7 +548,7 @@ step:: tar zxf ${METPLUS_TARFILE_TO_ADD_DATA} -C ${METPLUS_DATA_TARFILE_DIR}/${METPLUS_FEATURE_BRANCH} Create the new tarfile -"""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^ Untar the new data tarball into the feature branch directory:: @@ -566,7 +568,7 @@ MET Tool Wrapper Use Case Example:: tar czf sample_data-${METPLUS_USE_CASE_CATEGORY}.tgz met_test Add volume_mount_directories file -""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Copy the volume_mount_directories file from the develop directory into the branch directory. Update the entry for the new tarball if the mounting point @@ -578,13 +580,13 @@ climate:model_applications/climate:: cp ${METPLUS_DATA_TARFILE_DIR}/develop/volume_mount_directories ${METPLUS_DATA_TARFILE_DIR}/${METPLUS_FEATURE_BRANCH} Log out of DTC Web Server -""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^ The rest of the instructions are run on the machine where the use case was created and tested. Trigger Input Data Ingest -^^^^^^^^^^^^^^^^^^^^^^^^^ +------------------------- If working in the dtcenter/METplus repository, please skip this step. If working in a forked METplus repository, the newly added input data will not @@ -622,7 +624,7 @@ will be used in the final pull request. .. _add_use_case_to_test_suite: Add use case to the test suite -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +------------------------------ In the METplus repository, there is a text file that contains the list of all use cases:: @@ -643,7 +645,7 @@ The use cases can be defined using 3 different formats:: :: :::: - :::::: + :::::: **** @@ -679,116 +681,143 @@ conf variable override must be separated by a comma. Example:: The above example is named 'GridStat_multiple_config' and uses 3 .conf files. Use cases with only one configuration file can also use this format is desired. -**::::::** +**::::::** + +This format is used if there are additional dependencies required to run +the use case such as a different Python environment. + is a list of keywords separated by commas. + +Example:: + + 0::CyclonePlotter::met_tool_wrapper/CyclonePlotter/CyclonePlotter.conf,user_env_vars.MET_PYTHON_EXE=python3:: cycloneplotter_env + +See the next section for more information on valid values to supply as +dependencies. + +Dependencies +^^^^^^^^^^^^ + +Environments +"""""""""""" -This format is used if there are additional Python packages required to run -the use case. is a list of packages to install before running -the use case separated by commas. +The keywords that end with "_env" are Python environments created in Docker +images using Conda that can be used to run use cases. These images are stored +on DockerHub in dtcenter/metplus-envs and are named with a tag that corresponds +to the keyword without the "_env" suffix. +The environments were created using Docker commands via scripts that are found +in ci/docker/docker_env. Existing keywords that set up Conda environments used +for use cases are: + +* metplotpy_env +* spacetime_env +* xesmf_env +* netcdf4_env +* pygrib_env +* metdatadb_env +* h5py_env +* gempak_env Example:: - 8::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics.conf,user_env_vars.MET_PYTHON_EXE=python3::pygrib,metpy + spacetime_env -The above example is named -TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics. -It uses a configuration file and sets the variable MET_PYTHON_EXE from the -user_env_vars config section to python3 (This is needed to run Python Embedding -use cases that contain additional Python depedencies). It also needs pygrib -and metpy Python packages to be installed before running. +The above example uses the Conda environment +in dtcenter/metplus-envs:**spacetime** to run a user script. +Note that only one dependency that contains the "_env" suffix can be supplied +to a given use case. -**Obtaining Python Packages** +The **gempak_env** is handled a little differently. It is used if +GempakToCF.jar is needed for a use case to convert GEMPAK data to NetCDF +format so it can be read by the MET tools. Instead of creating a Python +environment to use for the use case, this Docker image installs Java and +obtains the GempakToCF.jar file. When creating the Docker container to run +the use cases, the necessary Java files are copied over into the container +that runs the use cases so that the JAR file can be run by METplus wrappers. -Some Python packages can be installed simply by running -"pip3 install " while others require their own dependencies to be -installed as well. If pip3 is sufficient, then no additional action is -required. If not, then a bash script can be added to the ci/jobs directory to -handle the installation. The script should be named get_.sh where - is the name of the package in all lowercase. For example, if a use -case in all_use_cases.txt lists METcalcpy as a Python package dependency, then -the test will look for a script called ci/jobs/get_metcalpy.sh and call it if -it exists. If it does not exist, it will try to run "pip3 install metcalcpy" -which would fail (as of the time of writing this documentation). +Other Keywords +"""""""""""""" -Existing scripts currently include:: +Besides specifying Python environments, +there are additional keywords that can be used to set up the environment +to run a use case: - ci/jobs/get_cartopy.sh - ci/jobs/get_metcalpy.sh - ci/jobs/get_metplotpy.sh - ci/jobs/get_pygrib.sh - ci/jobs/get_xesmf.sh +* **py_embed** - Used if a different Python environment is required to + run a Python Embedding script. If this keyword is included with a Python + environment, then the MET_PYTHON_EXE environment variable will be set to + specify the version of Python3 that is included in that environment -**Using Conda** +Example:: -If Conda (Miniconda) is needed to install the package, then script should -contain a call to get_miniconda.sh. If Miniconda was already installed for -another package, the script is smart enough to skip that step. Here is an -example of a script that uses Conda to install a package:: + pygrib_env,py_embed - #! /bin/bash +In this example, the dtcenter/metplus-envs:**pygrib** environment is used to +run the use case. Since **py_embed** is also included, then the following will +be added to the call to run_metplus.py so that the Python embedding script +will use the **pygrib** environment to run:: - script_dir=$(dirname "$0") + user_env_vars.MET_PYTHON_EXE=/usr/local/envs/pygrib/bin/python3 - ${script_dir}/get_miniconda.sh +Please see the MET User's Guide for more information on how to use Python +Embedding. - echo Installing xesmf with conda - conda install -c conda-forge dask netCDF4 xesmf +* **metviewer** - Used if METviewer should be made available to the use case. + This is typically added for a METdbLoad use case that needs to populate a + database with MET output. + +* **metplus** - Used if a user script needs to call utility functions from the + metplus Python package. This keyword simply adds the METplus source code + directory to the PYTHONPATH so that the metplus.util functions can be + imported. Note that this keyword is not needed unless a different Python + environment is specified with a "_env" keyword. The version of Python that + is used to run typical use cases has already installed the METplus Python + package in its environment, so the package can be imported easily. + + +Creating New Python Environments +"""""""""""""""""""""""""""""""" + +**COMING SOON!** + +In METplus v4.0.0 and earlier, a list of Python packages were added to use +cases that required additional packages. These packages were either installed +with pip3 or using a script. This approach was very time consuming as some +packages take a very long time to install in Docker. The new approach involves +creating Docker images that use Conda to create a Python environment that can +run the use case. To see what is available in each of the existing Python +environments, refer to the comments in the scripts found in +**ci/docker/docker_env/scripts**. New environments must be added by a METplus +developer, so please contact MET Help if none of these environments contain the +package requirements needed to run a new use case. .. _add_new_category_to_test_runs: Add new category to test runs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +----------------------------- -Add a new entry to the testing.yml file found in the .github/workflows directory -in the METplus repository. -Find the job in the testing.yml file named "use_case_tests" and add a new entry -in the "categories" list with the category of the new use case followed by a -colon, then the index of the use case from the list described in the -:ref:`add_use_case_to_test_suite` section, then another colon followed by -"NEW". For example, if the new use case -is found in the medium_range category with an index of 9, and the testing.yml file -has the following entries in the categories list:: - - use_case_tests: - name: Use Case Tests - runs-on: ubuntu-latest - if: "!contains(github.event.head_commit.message, 'ci-doc-only')" - needs: [get_image, update_data_volumes] - strategy: - fail-fast: false - matrix: - categories: - - "met_tool_wrapper:0-53" - - "air_quality_and_comp:0" - - "climate:0-1" - - "convection_allowing_models:0" - - "convection_allowing_models:1" - - "convection_allowing_models:2-6" - - "convection_allowing_models:7" - - "convection_allowing_models:8" - - "cryosphere:0" - - "data_assimilation:0" - - "marine_and_coastal:0" - - "medium_range:0" - - "medium_range:1-2" - - "medium_range:3-5" - - "medium_range:6" - - "medium_range:7-8" - - "precipitation:0" - - "precipitation:1" - - "precipitation:2" - - "precipitation:3-8" - - "s2s:0" - - "space_weather:0-1" - - "tc_and_extra_tc:0-2" - -then add the following to the list:: - - - "medium_range:9:NEW" + +The **.github/parm/use_case_groups.json** file in the METplus repository +contains a list of the use case groups to run together. +In METplus version 4.0.0 and earlier, this list was +found in the .github/workflows/testing.yml file. +Add a new entry to the list that includes the category of the new use case, +the list of indices that correspond to the index number described in the +:ref:`add_use_case_to_test_suite` section, +and set the "new" variable to true. + +Example:: + + { + "category": "climate", + "index_list": "2", + "new": true + } .. note:: - Make sure that the indentation matches the other lines and use spaces to - indent instead of tabs. + Make sure there is a comma after the curly braces for the item that comes + before the new item in the list. +This example adds a new use case group that contains the climate use case +with index 2 and is marked as a "new" use case. New use cases are added as a separate item to make reviewing the test results easier. A new use case will produce new output data that is not found in the "truth" data set which is compared the output of the use case runs to check @@ -802,26 +831,41 @@ or if it should remain in its own group. .. _subset_category: Subset Category into Multiple Tests -""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use cases can be separated into multiple test jobs. -Add a colon (:), then define -the cases to run for the job. Use cases are numbered -starting with 0 and are in order of how they are found in the all_use_cases.txt -file. +In the "index_list" value, define the cases to run for the job. +Use cases are numbered starting with 0 and are in order of how they are +found in the all_use_cases.txt file. The argument supports a comma-separated list of numbers. Example:: - - "data_assimilation:0,2,4" - - "data_assimilation:1,3" + { + "category": "data_assimilation", + "index_list": "0,2,4", + "new": false + }, + { + "category": "data_assimilation", + "index_list": "1,3", + "new": false + }, The above example will run a job with data_assimilation use cases 0, 2, and 4, then another job with data_assimilation use cases 1 and 3. It also supports a range of numbers separated with a dash. Example:: - - "data_assimilation:0-3" - - "data_assimilation:4-5" + { + "category": "data_assimilation", + "index_list": "0-3", + "new": false + }, + { + "category": "data_assimilation", + "index_list": "4-5", + "new": false + }, The above example will run a job with data_assimilation 0, 1, 2, and 3, then another job with data_assimilation 4 and 5. @@ -829,15 +873,22 @@ another job with data_assimilation 4 and 5. You can also use a combination of commas and dashes to define the list of cases to run. Example:: - "data_assimilation:0-2,4" - ... - "data_assimilation:3" + { + "category": "data_assimilation", + "index_list": "0-2,4", + "new": false + }, + { + "category": "data_assimilation", + "index_list": "3", + "new": false + }, The above example will run data_assimilation 0, 1, 2, and 4 in one job, then data_assimilation 3 in another job. Monitoring Automated Tests -^^^^^^^^^^^^^^^^^^^^^^^^^^ +-------------------------- All of the use cases in the METplus repository are run via GitHub Actions to ensure @@ -864,7 +915,7 @@ Click on the text next to the icon (last commit message) to see more details. .. _verify-new-input-data-was-found: Verifying that new input data was found -""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ On the left side of the window there will be a list of jobs that are run. Click on the job titled "Docker Setup - Update Data Volumes" @@ -894,7 +945,7 @@ If this is the case and data should be found, repeat the instructions to stage the input data or contact MET Help (met_help@ucar.edu) for assistance. Verify that the use case ran successfully -""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ You should verify that the use case was actually run by referring to the appropriate section under "Jobs" that starts @@ -903,7 +954,7 @@ filename in the log output by using the search box on the top right of the log output. Verify that the use case ran in a reasonable amount of time -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Find the last successful run of the use case category job and compare the time it took to run to the run that includes the new use case. The time for the job @@ -918,7 +969,7 @@ new job for the new use case. See the :ref:`subset_category` section and the multiple medium_range jobs for an example. Create a Pull Request ---------------------- +===================== Create a pull request to merge the changes from your branch into the develop branch. More information on this process can be found in the @@ -926,10 +977,10 @@ branch. More information on this process can be found in the "Open a pull request using your browser." Pull Request Reviewer Instructions ----------------------------------- +================================== Update the develop data directory -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------------- Once the person reviewing the pull request has verified that the new use case was run successfully using the new data, @@ -944,7 +995,7 @@ pull request is merged so that the develop branch will contain the new data. to be updated to handle this situation. Log into the DTC Web Server with SSH -"""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The web server is only accessible if you are on the NCAR VPN. @@ -953,14 +1004,14 @@ The web server is only accessible if you are on the NCAR VPN. ssh |dtc_web_server| Switch to the met_test user -""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^ Commands must run as the met_test user:: runas met_test Change directory to the METplus Data Directory -"""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. parsed-literal:: @@ -972,7 +1023,7 @@ something like this:: source feature_ABC_desc/feature_ABC_desc_env.sh Compare the volume_mount_directories file -""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Compare the feature branch file to the develop directory file:: @@ -984,7 +1035,7 @@ copy the feature file into the develop directory:: cp ${METPLUS_FEATURE_BRANCH}/volume_mount_directories develop/volume_mount_directories Copy the data from the feature directory into the next version directory -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Make sure the paths are correct before copying. @@ -1041,7 +1092,7 @@ MET Tool Wrapper Use Cases:: tar czf sample_data-${METPLUS_USE_CASE_CATEGORY}-${METPLUS_VERSION}.tgz met_test Update the link in the develop directory if needed -"""""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Check if the develop directory contains a symbolic link to an older version of the tarfile. Note: These commands must be run together (no other commands in @@ -1063,10 +1114,10 @@ Check that the link now points to the new tarfile that was just created:: ls -lh sample_data-${METPLUS_USE_CASE_CATEGORY}.tgz After the Pull Request is Approved ----------------------------------- +================================== Merge the pull request and ensure that all tests pass -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +----------------------------------------------------- Merge the pull request on GitHub. Then go to the "Actions" tab and verify that all of the GitHub Actions tests pass for the develop branch. A green check mark @@ -1079,7 +1130,7 @@ If the circle on the left side is yellow, then the run has not completed yet. If everything ran smoothly, clean up the files on the web server. Consider rearranging the use case groups -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +---------------------------------------- If another group of use cases in the same category exists, consider adding the new use case to an existing group to speed up execution. @@ -1103,7 +1154,7 @@ then combine the two list items into a single item:: - "met_tool_wrapper:0-54" Create a pull request from develop into develop-ref -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------------------------------- The addition of a new use case results in new output data. When this happens, the reference branch needs to be updated so that future pull requests will @@ -1119,10 +1170,10 @@ and click the green "Create pull request" button to create the pull request. .. figure:: figure/develop_into_develop-ref.png Clean Up DTC Web Server -^^^^^^^^^^^^^^^^^^^^^^^ +----------------------- Remove the saved copy of the sample data tarfile -"""""""""""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Check if there are any "sav" files in the METplus version directory:: @@ -1133,7 +1184,7 @@ If there is more than one file with "sav" in the filename, make sure that the file removed is the file that was created for this feature. Remove the feature branch data directory -"""""""""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If more development is needed for the feature branch, do not remove the directory. If the work is complete, then remove the directory:: @@ -1142,7 +1193,7 @@ directory. If the work is complete, then remove the directory:: rm -rf ${METPLUS_DATA_TARFILE_DIR}/${METPLUS_FEATURE_BRANCH} Clean up the staging directory -"""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Remove the tarfile and environment file from the staging directory:: diff --git a/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding.py b/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding.py index afadf6f354..c5121f1f54 100644 --- a/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding.py +++ b/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding.py @@ -10,7 +10,8 @@ # Scientific Objective # -------------------- # -# None. Simply converting file formats so point observations can be read by the MET tools. +# Simply converting file formats so point observations can be read by the MET +# tools through the use of a Python script ############################################################################## # Datasets diff --git a/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.py b/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.py deleted file mode 100644 index e5bace92eb..0000000000 --- a/docs/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -ASCII2NC: Using Python Embedding with MET_PYTHON_EXE -============================================================================== - -met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding -_user_py.conf - -""" -############################################################################## -# Scientific Objective -# -------------------- -# -# Simply converting file formats so point observations can be read by the MET tools through the use of a user-defined -# Python script and running in either a different environment that the MET build or using a package not currently a -# METplus dependency. - - -############################################################################## -# Datasets -# -------- -# -# | **Observations:** Precipitation accumulation observations in ASCII text files -# -# | **Location:** All of the input data required for this use case can be found in the met_test sample data tarball. Click here to the METplus releases page and download smaple data fore the appropriate release: https://github.com/dtcenter/METplus/releases -# | This tarball should be unpacked into the directory that you will set the value of INPUT_BASE. See `Running METplus`_ section for more information. -# -# | **Data Source:** Unknown -# | - -############################################################################## -# METplus Components -# ------------------ -# -# This use case utilizes the METplus ASCII2NC wrapper to generate a command to run the MET tool ASCII2NC. - -############################################################################## -# METplus Workflow -# ---------------- -# -# ASCII2NC is the only tool called in this example. It has one run time, but the time is not relevant because the files processed do not have any time information in the names. -# - -############################################################################## -# METplus Configuration -# --------------------- -# -# METplus first loads all of the configuration files found in parm/metplus_config, -# then it loads any configuration files passed to METplus via the command line -# with the -c option, i.e. -c parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf -# -# .. highlight:: bash -# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf - -############################################################################## -# MET Configuration -# ----------------- -# -# None. No MET configuration file for ASCII2NC is used in this case. -# - -############################################################################## -# Python Embedding -# ---------------- -# -# This use case calls a Python script to read the input data. -# The Python script is stored in the MET repository: /path/to/MET/installation/share/met/python/read_ascii_point.py -# -# `read_ascii_point.py `_ - -############################################################################## -# Running METplus -# --------------- -# -# This use case can be run two ways: -# -# 1) Passing in ASCII2NC_python_embedding_user_py.conf then a user-specific system configuration file:: -# -# run_metplus.py -c /path/to/METplus/parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf -c /path/to/user_system.conf -# -# 2) Modifying the configurations in parm/metplus_config, then passing in ASCII2NC_python_embedding_user_py.conf:: -# -# run_metplus.py -c /path/to/METplus/parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf -# -# The former method is recommended. Whether you add them to a user-specific configuration file or modify the metplus_config files, the following variables must be set correctly: -# -# * **INPUT_BASE** - Path to directory where sample data tarballs are unpacked (See Datasets section to obtain tarballs). This is not required to run METplus, but it is required to run the examples in parm/use_cases -# * **OUTPUT_BASE** - Path where METplus output will be written. This must be in a location where you have write permissions -# * **MET_INSTALL_DIR** - Path to location where MET is installed locally -# -# Example User Configuration File:: -# -# [dir] -# INPUT_BASE = /path/to/sample/input/data -# OUTPUT_BASE = /path/to/output/dir -# MET_INSTALL_DIR = /path/to/met-X.Y -# -# **NOTE:** All of these items must be found under the [dir] section. -# - -############################################################################## -# Expected Output -# --------------- -# -# A successful run will output the following both to the screen and to the logfile:: -# -# INFO: METplus has successfully finished running. -# -# Refer to the value set for **OUTPUT_BASE** to find where the output data was generated. -# Output for this use case will be found in met_tool_wrapper/ASCII2NC (relative to **OUTPUT_BASE**) -# and will contain the following file: -# -# * ascii2nc_python_user.nc - -############################################################################## -# Keywords -# -------- -# -# sphinx_gallery_thumbnail_path = '_static/met_tool_wrapper-ASCII2NC.png' -# -# .. note:: -# `ASCII2NCToolUseCase `_, -# `PythonEmbeddingFileUseCase `_, -# `MET_PYTHON_EXEUseCase `_ diff --git a/internal_tests/use_cases/all_use_cases.txt b/internal_tests/use_cases/all_use_cases.txt index 544c8352b7..2abbdf0ced 100644 --- a/internal_tests/use_cases/all_use_cases.txt +++ b/internal_tests/use_cases/all_use_cases.txt @@ -1,14 +1,14 @@ Category: met_tool_wrapper -0::CyclonePlotter::met_tool_wrapper/CyclonePlotter/CyclonePlotter.conf,user_env_vars.MET_PYTHON_EXE=python3::cartopy,matplotlib -1::PCPCombine_python_embedding:: met_tool_wrapper/PCPCombine/PCPCombine_python_embedding.conf,user_env_vars.MET_PYTHON_EXE=python3::h5py +0::CyclonePlotter::met_tool_wrapper/CyclonePlotter/CyclonePlotter.conf,user_env_vars.MET_PYTHON_EXE=python3:: cycloneplotter_env +1::PCPCombine_python_embedding:: met_tool_wrapper/PCPCombine/PCPCombine_python_embedding.conf:: h5py_env,py_embed 2::ASCII2NC:: met_tool_wrapper/ASCII2NC/ASCII2NC.conf 3::met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding.conf -4::ASCII2NC_python_embedding_user_py:: met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf +4::ExtractTiles:: met_tool_wrapper/ExtractTiles/ExtractTiles.conf 5::PyEmbedIngest:: met_tool_wrapper/PyEmbedIngest/PyEmbedIngest.conf 6::EnsembleStat:: met_tool_wrapper/EnsembleStat/EnsembleStat.conf 7::EnsembleStat_python_embedding:: met_tool_wrapper/EnsembleStat/EnsembleStat_python_embedding.conf 8::Example:: met_tool_wrapper/Example/Example.conf -9::GempakToCF:: met_tool_wrapper/GempakToCF/GempakToCF.conf +9::GempakToCF:: met_tool_wrapper/GempakToCF/GempakToCF.conf:: gempak_env 10::GenVxMask:: met_tool_wrapper/GenVxMask/GenVxMask.conf 11::GenVxMask_multiple:: met_tool_wrapper/GenVxMask/GenVxMask_multiple.conf 12::GenVxMask_with_arguments:: met_tool_wrapper/GenVxMask/GenVxMask_with_arguments.conf @@ -36,8 +36,8 @@ Category: met_tool_wrapper 34::RegridDataPlane_python_embedding:: met_tool_wrapper/RegridDataPlane/RegridDataPlane_python_embedding.conf 35::StatAnalysis:: met_tool_wrapper/StatAnalysis/StatAnalysis.conf 36::StatAnalysis_python_embedding:: met_tool_wrapper/StatAnalysis/StatAnalysis_python_embedding.conf -37::SeriesAnalysis:: met_tool_wrapper/SeriesAnalysis/SeriesAnalysis.conf::netCDF4 -38::SeriesAnalysis_python_embedding:: met_tool_wrapper/SeriesAnalysis/SeriesAnalysis_python_embedding.conf::netCDF4 +37::SeriesAnalysis:: met_tool_wrapper/SeriesAnalysis/SeriesAnalysis.conf::netcdf4_env +38::SeriesAnalysis_python_embedding:: met_tool_wrapper/SeriesAnalysis/SeriesAnalysis_python_embedding.conf:: netcdf4_env 39::TCGen:: met_tool_wrapper/TCGen/TCGen.conf 40::TCPairs_extra_tropical:: met_tool_wrapper/TCPairs/TCPairs_extra_tropical.conf 41::TCPairs_tropical:: met_tool_wrapper/TCPairs/TCPairs_tropical.conf @@ -47,17 +47,16 @@ Category: met_tool_wrapper 45::met_tool_wrapper/PlotDataPlane/PlotDataPlane_grib1.conf 46::met_tool_wrapper/PlotDataPlane/PlotDataPlane_netcdf.conf 47::met_tool_wrapper/PlotDataPlane/PlotDataPlane_python_embedding.conf -48::met_tool_wrapper/ExtractTiles/ExtractTiles.conf -49::met_tool_wrapper/UserScript/UserScript_run_once.conf -50::met_tool_wrapper/UserScript/UserScript_run_once_for_each.conf -51::met_tool_wrapper/UserScript/UserScript_run_once_per_init.conf -52::met_tool_wrapper/UserScript/UserScript_run_once_per_lead.conf -53::met_tool_wrapper/UserScript/UserScript_run_once_per_valid.conf -54::METdbLoad::met_tool_wrapper/METdbLoad/METdbLoad.conf::metdatadb,metviewer +48::met_tool_wrapper/UserScript/UserScript_run_once.conf +49::met_tool_wrapper/UserScript/UserScript_run_once_for_each.conf +50::met_tool_wrapper/UserScript/UserScript_run_once_per_init.conf +51::met_tool_wrapper/UserScript/UserScript_run_once_per_lead.conf +52::met_tool_wrapper/UserScript/UserScript_run_once_per_valid.conf +53::METdbLoad::met_tool_wrapper/METdbLoad/METdbLoad.conf:: metdatadb_env,metviewer Category: air_quality_and_comp -0::EnsembleStat_fcstICAP_obsMODIS_aod::model_applications/air_quality_and_comp/EnsembleStat_fcstICAP_obsMODIS_aod.conf,user_env_vars.MET_PYTHON_EXE=python3::netCDF4 +0::EnsembleStat_fcstICAP_obsMODIS_aod::model_applications/air_quality_and_comp/EnsembleStat_fcstICAP_obsMODIS_aod.conf Category: climate @@ -83,29 +82,29 @@ Category: cryosphere Category: data_assimilation -0::StatAnalysis_fcstHAFS_obsPrepBufr_JEDI_IODA_interface::model_applications/data_assimilation/StatAnalysis_fcstHAFS_obsPrepBufr_JEDI_IODA_interface.conf,user_env_vars.MET_PYTHON_EXE=python3::netCDF4 +0::StatAnalysis_fcstHAFS_obsPrepBufr_JEDI_IODA_interface::model_applications/data_assimilation/StatAnalysis_fcstHAFS_obsPrepBufr_JEDI_IODA_interface.conf Category: marine_and_coastal -0::PlotDataPlane_obsHYCOM_coordTripolar::model_applications/marine_and_coastal/PlotDataPlane_obsHYCOM_coordTripolar.conf,user_env_vars.MET_PYTHON_EXE=python3::xesmf +0::PlotDataPlane_obsHYCOM_coordTripolar::model_applications/marine_and_coastal/PlotDataPlane_obsHYCOM_coordTripolar.conf:: xesmf_env, py_embed Category: medium_range 0::PointStat_fcstGFS_obsNAM_Sfc_MultiField_PrepBufr:: model_applications/medium_range/PointStat_fcstGFS_obsNAM_Sfc_MultiField_PrepBufr.conf -1::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByInit:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByInit.conf::netCDF4 -2::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead.conf::netCDF4 +1::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByInit:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByInit.conf:: netcdf4_env +2::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead.conf:: netcdf4_env 3::GridStat_fcstGFS_obsGFS_climoNCEP_MultiField:: model_applications/medium_range/GridStat_fcstGFS_obsGFS_climoNCEP_MultiField.conf 4::GridStat_fcstGFS_obsGFS_Sfc_MultiField:: model_applications/medium_range/GridStat_fcstGFS_obsGFS_Sfc_MultiField.conf -5::UserScript_fcstGEFS_Difficulty_Index::model_applications/medium_range/UserScript_fcstGEFS_Difficulty_Index.conf::METcalcpy,METplotpy +5::UserScript_fcstGEFS_Difficulty_Index::model_applications/medium_range/UserScript_fcstGEFS_Difficulty_Index.conf:: metplotpy_env 6::model_applications/medium_range/PointStat_fcstGFS_obsGDAS_UpperAir_MultiField_PrepBufr.conf -7::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_IVT:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_IVT.conf,user_env_vars.MET_PYTHON_EXE=python3::pygrib,metpy,netCDF4 -8::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics.conf,user_env_vars.MET_PYTHON_EXE=python3::pygrib,metpy,netCDF4 +7::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_IVT:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_IVT.conf:: pygrib_env,py_embed +8::TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics:: model_applications/medium_range/TCStat_SeriesAnalysis_fcstGFS_obsGFS_FeatureRelative_SeriesByLead_PyEmbed_Multiple_Diagnostics.conf:: pygrib_env,py_embed Category: precipitation 0::EnsembleStat_fcstHRRRE_FcstOnly_NetCDF:: model_applications/precipitation/EnsembleStat_fcstHRRRE_FcstOnly_NetCDF.conf -1::GridStat_fcstHREFmean_obsStgIV_Gempak:: model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak.conf +1::GridStat_fcstHREFmean_obsStgIV_Gempak:: model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak.conf:: gempak_env 2::GridStat_fcstHREFmean_obsStgIV_NetCDF:: model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF.conf 3::GridStat_fcstGFS_obsCCPA_GRIB:: model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB.conf 4::GridStat_fcstHRRR-TLE_obsStgIV_GRIB:: model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB.conf @@ -114,13 +113,13 @@ Category: precipitation Category: s2s -0::GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast:: model_applications/s2s/GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast.conf::netCDF4 -1::UserScript_fcstGFS_obsERA_Blocking:: model_applications/s2s/UserScript_fcstGFS_obsERA_Blocking.conf::cartopy,METplotpy,METcalcpy -2::UserScript_obsERA_obsOnly_Blocking:: model_applications/s2s/UserScript_obsERA_obsOnly_Blocking.conf::cartopy,METplotpy,METcalcpy -3::UserScript_obsERA_obsOnly_WeatherRegime:: model_applications/s2s/UserScript_obsERA_obsOnly_WeatherRegime.conf::cartopy,METplotpy,METcalcpy,eofs -4::TCGen_fcstGFSO_obsBDECKS_GDF_TDF:: model_applications/s2s/TCGen_fcstGFSO_obsBDECKS_GDF_TDF.conf:: cartopy,METplotpy -5::UserScript_obsPrecip_obsOnly_Hovmoeller:: model_applications/s2s/UserScript_obsPrecip_obsOnly_Hovmoeller.conf:: requests,psutil,pyyaml,plotly,METplotpy,METcalcpy -6:: UserScript_obsPrecip_obsOnly_CrossSpectraPlot:: model_applications/s2s/UserScript_obsPrecip_obsOnly_CrossSpectraPlot.conf:: spacetime +0::GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast:: model_applications/s2s/GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast.conf:: netcdf4_env +1::UserScript_fcstGFS_obsERA_Blocking:: model_applications/s2s/UserScript_fcstGFS_obsERA_Blocking.conf:: metplotpy_env,metplus +2::UserScript_obsERA_obsOnly_Blocking:: model_applications/s2s/UserScript_obsERA_obsOnly_Blocking.conf:: metplotpy_env,metplus +3::UserScript_obsERA_obsOnly_WeatherRegime:: model_applications/s2s/UserScript_obsERA_obsOnly_WeatherRegime.conf:: weatherregime_env,metplus +4::TCGen_fcstGFSO_obsBDECKS_GDF_TDF:: model_applications/s2s/TCGen_fcstGFSO_obsBDECKS_GDF_TDF.conf:: metplotpy_env,metplus +5::UserScript_obsPrecip_obsOnly_Hovmoeller:: model_applications/s2s/UserScript_obsPrecip_obsOnly_Hovmoeller.conf:: metplotpy_env +6:: UserScript_obsPrecip_obsOnly_CrossSpectraPlot:: model_applications/s2s/UserScript_obsPrecip_obsOnly_CrossSpectraPlot.conf:: spacetime_env Category: space_weather @@ -130,6 +129,6 @@ Category: space_weather Category: tc_and_extra_tc 0::TCRMW_fcstGFS_fcstOnly_gonzalo:: model_applications/tc_and_extra_tc/TCRMW_fcstGFS_fcstOnly_gonzalo.conf -1::CyclonePlotter_fcstGFS_obsGFS_OPC:: model_applications/tc_and_extra_tc/CyclonePlotter_fcstGFS_obsGFS_OPC.conf:: cartopy,matplotlib +1::CyclonePlotter_fcstGFS_obsGFS_OPC:: model_applications/tc_and_extra_tc/CyclonePlotter_fcstGFS_obsGFS_OPC.conf:: cycloneplotter_env 2::UserScript_ASCII2NC_PointStat_fcstHAFS_obsFRD_NetCDF:: model_applications/tc_and_extra_tc/UserScript_ASCII2NC_PointStat_fcstHAFS_obsFRD_NetCDF.conf,model_applications/tc_and_extra_tc/UserScript_ASCII2NC_PointStat_fcstHAFS_obsFRD_NetCDF/ci_overrides.conf -3::GridStat_fcstHAFS_obsTDR_NetCDF:: model_applications/tc_and_extra_tc/GridStat_fcstHAFS_obsTDR_NetCDF.conf::matplotlib +3::GridStat_fcstHAFS_obsTDR_NetCDF:: model_applications/tc_and_extra_tc/GridStat_fcstHAFS_obsTDR_NetCDF.conf:: py_embed diff --git a/metplus/util/met_util.py b/metplus/util/met_util.py index 2c886d357d..f10d070892 100644 --- a/metplus/util/met_util.py +++ b/metplus/util/met_util.py @@ -2781,7 +2781,7 @@ def expand_int_string_to_list(int_string): """! Expand string into a list of integer values. Items are separated by commas. Items that are formatted X-Y will be expanded into each number from X to Y inclusive. If the string ends with +, then add a str '+' - to the end of the list. Used in ci/jobs/get_use_case_commands.py + to the end of the list. Used in .github/jobs/get_use_case_commands.py @param int_string String containing a comma-separated list of integers @returns List of integers and potentially '+' as the last item diff --git a/parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf b/parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf deleted file mode 100644 index 74f4a61e32..0000000000 --- a/parm/use_cases/met_tool_wrapper/ASCII2NC/ASCII2NC_python_embedding_user_py.conf +++ /dev/null @@ -1,114 +0,0 @@ -[config] -## Configuration-related settings such as the process list, begin and end times, etc. - -# List of applications to run - only ASCII2NC for this case -PROCESS_LIST = ASCII2NC - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = VALID - -# Format of VALID_BEG and VALID_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -VALID_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match VALID_TIME_FMT -VALID_BEG = 2010010112 - -# End time for METplus run - must match VALID_TIME_FMT -VALID_END = 2010010112 - -# Increment between METplus runs (in seconds if no units are specified) -# Must be >= 60 seconds -VALID_INCREMENT = 1M - -# List of forecast leads to process for each run time (init or valid) -# If unset, defaults to 0 (don't loop through forecast leads -LEAD_SEQ = 0 - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times - -# Verbosity of MET output - overrides LOG_VERBOSITY for ASCII2NC only -#LOG_ASCII2NC_VERBOSITY = 1 - -# MET Configuration file for ASCII2NC -# References CONFIG_DIR from the [dir] section -ASCII2NC_CONFIG_FILE = - -# If set to True, skip run if the output file determined by the output directory and -# filename template already exists -ASCII2NC_SKIP_IF_OUTPUT_EXISTS = False - -# Time relative to valid time (in seconds if no units are specified) to allow files to be considered -# valid. Set both BEGIN and END to 0 to require the exact time in the filename -# Not used in this example. -ASCII2NC_FILE_WINDOW_BEGIN = 0 -ASCII2NC_FILE_WINDOW_END = 0 - -# Time relative to each input file's valid time (in seconds if no units are specified) for data within the file to be -# considered valid. -ASCII2NC_WINDOW_BEGIN = 0 -ASCII2NC_WINDOW_END = 0 - -# Value to pass with the -format argument to ascii2nc. See MET User's Guide for more information -ASCII2NC_INPUT_FORMAT = python - -# Value to pass with the -mask_grid argument to ascii2nc. See MET User's Guide for more information -ASCII2NC_MASK_GRID = - -# Value to pass with the -mask_poly argument to ascii2nc. See MET User's Guide for more information -ASCII2NC_MASK_POLY = - -# Value to pass with the -mask_sid argument to ascii2nc. See MET User's Guide for more information -ASCII2NC_MASK_SID = - -# For defining the time periods for summarization -# False for no time summary, True otherwise -# The rest of the ASCII2NC_TIME_SUMMARY variables are ignored if set to False -# See the MET User's Guide section regarding ASCII2NC time summary options for more information. -ASCII2NC_TIME_SUMMARY_FLAG = False -ASCII2NC_TIME_SUMMARY_RAW_DATA = False -ASCII2NC_TIME_SUMMARY_BEG = 000000 -ASCII2NC_TIME_SUMMARY_END = 235959 -ASCII2NC_TIME_SUMMARY_STEP = 300 -ASCII2NC_TIME_SUMMARY_WIDTH = 600 -ASCII2NC_TIME_SUMMARY_GRIB_CODES = 11, 204, 211 -ASCII2NC_TIME_SUMMARY_VAR_NAMES = -ASCII2NC_TIME_SUMMARY_TYPES = min, max, range, mean, stdev, median, p80 -ASCII2NC_TIME_SUMMARY_VALID_FREQ = 0 -ASCII2NC_TIME_SUMMARY_VALID_THRESH = 0.0 - - -# End of [config] section and start of [dir] section -[dir] -# location of configuration files used by MET applications -CONFIG_DIR={PARM_BASE}/met_config - -# Input/Output directories can be left empty if the corresponding template contains the full path to the files -ASCII2NC_INPUT_DIR = -ASCII2NC_OUTPUT_DIR = - -# End of [dir] section and start of [filename_templates] section -[filename_templates] - -# Template to look for input to ASCII2NC relative to ASCII2NC_INPUT_DIR -ASCII2NC_INPUT_TEMPLATE = "{MET_INSTALL_DIR}/share/met/python/read_ascii_point.py {INPUT_BASE}/met_test/data/sample_obs/ascii/sample_ascii_obs.txt" - -# Template to use to write output from ASCII2NC -ASCII2NC_OUTPUT_TEMPLATE = {OUTPUT_BASE}/met_tool_wrapper/ASCII2NC/ascii2nc_python_user.nc - -[user_env_vars] -MET_PYTHON_EXE = python3