diff --git a/.github/ISSUE_TEMPLATE/fix_file.md b/.github/ISSUE_TEMPLATE/fix_file.md new file mode 100644 index 0000000000..1e05f0c9df --- /dev/null +++ b/.github/ISSUE_TEMPLATE/fix_file.md @@ -0,0 +1,24 @@ +--- +name: Fix File Update +about: Use this template for adding, updating, or removing fix files from global dataset +title: +labels: Fix Files +assignees: + - KateFriedman-NOAA + - WalterKolczynski-NOAA + +--- + +**Description** + + + + + + +**Tasks** + +- [ ] Discuss needs with global-workflow developer assigned to request. +- [ ] Add/update/remove fix file(s) in fix sets on supported platforms (global-workflow assignee task). +- [ ] Update "Fix File Management" spreadsheet (https://docs.google.com/spreadsheets/d/1BeIvcz6TO3If4YCqkUK-oz_kGS9q2wTjwLS-BBemSEY/edit?usp=sharing). +- [ ] Make related workflow/component updates. diff --git a/.gitignore b/.gitignore index 79607e39fd..e73b9f2e05 100644 --- a/.gitignore +++ b/.gitignore @@ -129,6 +129,7 @@ ush/global_chgres.sh ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh +ush/jediinc2fv3.py ush/mkwfsgbl.sh ush/ufsda ush/wafs_blending.sh diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..fa854552e5 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,21 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/source/conf.py + +python: + install: + - requirements: docs/requirements.txt + system_packages: true diff --git a/.shellcheckrc b/.shellcheckrc index 1e31bd4600..6d540ba17f 100644 --- a/.shellcheckrc +++ b/.shellcheckrc @@ -11,3 +11,6 @@ disable=SC1090 # Disable non-existent binary disable=SC1091 + +# Disable -p -m only applies to deepest directory +disable=SC2174 diff --git a/Externals.cfg b/Externals.cfg index d1577a481b..24f921c578 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -1,7 +1,7 @@ # External sub-modules of global-workflow [UFS] -tag = c22aaad +tag = 2247060 local_path = sorc/ufs_model.fd repo_url = https://github.com/ufs-community/ufs-weather-model.git protocol = git @@ -15,7 +15,7 @@ protocol = git required = True [UFS-Utils] -hash = 5b67e4d +hash = 72a0471 local_path = sorc/ufs_utils.fd repo_url = https://github.com/ufs-community/UFS_UTILS.git protocol = git @@ -43,26 +43,19 @@ protocol = git required = False [GSI-Monitor] -hash = 8cf16de +hash = 45783e3 local_path = sorc/gsi_monitor.fd repo_url = https://github.com/NOAA-EMC/GSI-monitor.git protocol = git required = False [GDASApp] -hash = db2f998 +hash = aaf7caa local_path = sorc/gdas.cd repo_url = https://github.com/NOAA-EMC/GDASApp.git protocol = git required = False -[GLDAS] -tag = fd8ba62 -local_path = sorc/gldas.fd -repo_url = https://github.com/NOAA-EMC/GLDAS.git -protocol = git -required = False - [EMC-gfs_wafs] hash = 014a0b8 local_path = sorc/gfs_wafs.fd diff --git a/ci/cases/C96C48_hybatmDA.yaml b/ci/cases/C96C48_hybatmDA.yaml new file mode 100644 index 0000000000..9efce40900 --- /dev/null +++ b/ci/cases/C96C48_hybatmDA.yaml @@ -0,0 +1,15 @@ +experiment: + mode: cycled + +arguments: + app: ATM + resdet: 96 + resens: 48 + comrot: ${RUNTESTS}/${pslot}/COMROT + expdir: ${RUNTESTS}/${pslot}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122200 + nens: 2 + gfs_cyc: 1 + start: cold diff --git a/ci/cases/C96_atm3DVar.yaml b/ci/cases/C96_atm3DVar.yaml new file mode 100644 index 0000000000..1648432e09 --- /dev/null +++ b/ci/cases/C96_atm3DVar.yaml @@ -0,0 +1,14 @@ +experiment: + mode: cycled + +arguments: + app: ATM + resdet: 96 + comrot: ${RUNTESTS}/${pslot}/COMROT + expdir: ${RUNTESTS}/${pslot}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122100 + nens: 0 + gfs_cyc: 1 + start: cold diff --git a/ci/experiments/C96C48_hybatmDA.yaml b/ci/experiments/C96C48_hybatmDA.yaml deleted file mode 100644 index cf291e5081..0000000000 --- a/ci/experiments/C96C48_hybatmDA.yaml +++ /dev/null @@ -1,19 +0,0 @@ -environment: - HOMEgfs: ${HOMEGFS} # TODO - using the env var ${HOMEgfs} will cause - # the rocoto XML file to have HOMEgfs set to None -experiment: - mode: cycled - -arguments: - pslot: ${pslot} # TODO - same bug as above with HOMEgfs - app: ATM - resdet: 96 - resens: 48 - comrot: ${RUNTEST} - expdir: ${RUNTEST}/expdir - icsdir: ${ICSDIR_ROOT}/C96C48 - idate: 2021122018 - edate: 2021122200 - nens: 2 - gfs_cyc: 1 - start: cold diff --git a/ci/experiments/C96C48_hybatmDA_also.yaml b/ci/experiments/C96C48_hybatmDA_also.yaml deleted file mode 100644 index cf291e5081..0000000000 --- a/ci/experiments/C96C48_hybatmDA_also.yaml +++ /dev/null @@ -1,19 +0,0 @@ -environment: - HOMEgfs: ${HOMEGFS} # TODO - using the env var ${HOMEgfs} will cause - # the rocoto XML file to have HOMEgfs set to None -experiment: - mode: cycled - -arguments: - pslot: ${pslot} # TODO - same bug as above with HOMEgfs - app: ATM - resdet: 96 - resens: 48 - comrot: ${RUNTEST} - expdir: ${RUNTEST}/expdir - icsdir: ${ICSDIR_ROOT}/C96C48 - idate: 2021122018 - edate: 2021122200 - nens: 2 - gfs_cyc: 1 - start: cold diff --git a/ci/environments/hera.sh b/ci/platforms/hera.sh similarity index 57% rename from ci/environments/hera.sh rename to ci/platforms/hera.sh index 843b8b103b..35fe7bca91 100644 --- a/ci/environments/hera.sh +++ b/ci/platforms/hera.sh @@ -1,10 +1,7 @@ #!/usr/bin/bash export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT -export GFS_MODULE_USE="${GFS_CI_ROOT}/global-workflow/modulefiles" export SLURM_ACCOUNT=fv3-cpu export SALLOC_ACCOUNT="${SLURM_ACCOUNT}" export SBATCH_ACCOUNT="${SLURM_ACCOUNT}" export SLURM_QOS=debug -export repo_url="https://github.com/NOAA-EMC/global-workflow.git" -#export repo_url="https://github.com/TerrenceMcGuinness-NOAA/global-workflow.git" export ICSDIR_ROOT="/scratch1/NCEPDEV/global/glopara/data/ICSDIR" diff --git a/ci/environments/orion.sh b/ci/platforms/orion.sh similarity index 65% rename from ci/environments/orion.sh rename to ci/platforms/orion.sh index 901d9488e9..7d69a3b276 100644 --- a/ci/environments/orion.sh +++ b/ci/platforms/orion.sh @@ -1,7 +1,7 @@ #!/usr/bin/bash -export GFS_CI_ROOT="TDB" #TODO -export GFS_MODULE_USE="${GFS_CI_ROOT}/global-workflow/modulefiles" +export GFS_CI_ROOT=/work2/noaa/global/mterry/GFS_CI_ROOT +export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR export SLURM_ACCOUNT=fv3-cpu export SALLOC_ACCOUNT=${SLURM_ACCOUNT} export SBATCH_ACCOUNT=${SLURM_ACCOUNT} diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh new file mode 100755 index 0000000000..aa48e9f894 --- /dev/null +++ b/ci/scripts/check_ci.sh @@ -0,0 +1,115 @@ +#!/bin/bash +set -eux +##################################################################################### +# +# Script description: BASH script for checking for cases in a given PR and +# running rocotostat on each to determine if the experiment has +# succeeded or faild. This script is intended +# to run from within a cron job in the CI Managers account +# Abstract TODO +##################################################################################### + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +GH=${HOME}/bin/gh +REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +module list +set -x +rocotostat=$(which rocotostat) +if [[ -z ${rocotostat+x} ]]; then + echo "rocotostat not found on system" + exit 1 +else + echo "rocotostat being used from ${rocotostat}" +fi + +pr_list_file="open_pr_list" + +if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then + pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}") +else + echo "no PRs to process .. exit" + exit 0 +fi + +############################################################# +# Loop throu all PRs in PR List and look for expirments in +# the RUNTESTS dir and for each one run runcotorun on them +############################################################# + +for pr in ${pr_list}; do + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + echo "Processing Pull Request #${pr} and looking for cases" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + + # If there is no RUNTESTS dir for this PR then cases have not been made yet + if [[ ! -d "${pr_dir}/RUNTESTS" ]]; then + continue + fi + num_cases=$(find "${pr_dir}/RUNTESTS" -mindepth 1 -maxdepth 1 -type d | wc -l) || true + + #Check for PR success when ${pr_dir}/RUNTESTS is void of subfolders + # since all successfull ones where previously removed + if [[ "${num_cases}" -eq 0 ]] && [[ -d "${pr_dir}/RUNTESTS" ]]; then + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + sed -i "/${pr}/d" "${GFS_CI_ROOT}/${pr_list_file}" + # Completely remove the PR and its cloned repo on sucess of all cases + rm -Rf "${pr_dir}" + continue + fi + + for cases in "${pr_dir}/RUNTESTS/"*; do + pslot=$(basename "${cases}") + xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" + db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true + num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true + num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true + num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true + num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true + if [[ ${num_failed} -ne 0 ]]; then + { + echo "Experiment ${pslot} Terminated: *FAILED*" + echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + sed -i "/${pr}/d" "${GFS_CI_ROOT}/${pr_list_file}" + fi + if [[ "${num_done}" -eq "${num_cycles}" ]]; then + { + echo "Experiment ${pslot} completed: *SUCCESS*" + echo "Experiment ${pslot} Completed at $(date)" || true + echo -n "with ${num_succeeded} successfully completed jobs" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + #Remove Experment cases that completed successfully + rm -Rf "${pr_dir}/RUNTESTS/${pslot}" + fi + done +done diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index e6b5eb53ef..022cc44378 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -1,11 +1,6 @@ #!/bin/bash set -eux -################################################################# -# TODO using static build for GitHub CLI until fixed in HPC-Stack -################################################################# -GH=/home/Terry.McGuinness/bin/gh -repo_url=${repo_url:-"https://github.com/global-workflow.git"} ##################################################################### # Usage and arguments for specfifying cloned directgory ##################################################################### @@ -45,25 +40,14 @@ while getopts "p:d:o:h" opt; do esac done -#################################################################### -# start output file -{ - echo "Automated global-workflow Testing Results:" - echo "Machine: ${CI_HOST}" - echo '```' - echo "Start: $(date) on $(hostname)" || true - echo "---------------------------------------------------" -} >> "${outfile}" -###################################################################### - -cd "${repodir}" +cd "${repodir}" || exit 1 # clone copy of repo if [[ -d global-workflow ]]; then rm -Rf global-workflow fi -git clone "${repo_url}" -cd global-workflow +git clone "${REPO_URL}" +cd global-workflow || exit 1 pr_state=$(gh pr view "${PR}" --json state --jq '.state') if [[ "${pr_state}" != "OPEN" ]]; then @@ -73,34 +57,63 @@ if [[ "${pr_state}" != "OPEN" ]]; then fi # checkout pull request -"${GH}" pr checkout "${PR}" --repo "${repo_url}" +"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +HOMEgfs="${PWD}" +source "${HOMEgfs}/ush/detect_machine.sh" + +#################################################################### +# start output file +{ + echo "Automated global-workflow Testing Results:" + echo '```' + echo "Machine: ${MACHINE_ID^}" + echo "Start: $(date) on $(hostname)" || true + echo "---------------------------------------------------" +} >> "${outfile}" +###################################################################### # get commit hash commit=$(git log --pretty=format:'%h' -n 1) echo "${commit}" > "../commit" -# run build script -cd sorc +# run checkout script +cd sorc || exit 1 +set +e +./checkout.sh -c -g -u &>> log.checkout +checkout_status=$? +if [[ ${checkout_status} != 0 ]]; then + { + echo "Checkout: *FAILED*" + echo "Checkout: Failed at $(date)" || true + echo "Checkout: see output at ${PWD}/log.checkout" + } >> "${outfile}" + exit "${checkout_status}" +else + { + echo "Checkout: *SUCCESS*" + echo "Checkout: Completed at $(date)" || true + } >> "${outfile}" +fi + +# build full cycle +source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./checkout.sh -g -c -# build full cycle -./build_all.sh -g &>> log.build - -# Validations +./build_all.sh &>> log.build build_status=$? -if [[ ${build_status} -eq 0 ]]; then -{ - echo "Build: *SUCCESS*" - echo "Build: Completed at $(date)" || true -} >> "${outfile}" + +if [[ ${build_status} != 0 ]]; then + { + echo "Build: *FAILED*" + echo "Build: Failed at $(date)" || true + echo "Build: see output at ${PWD}/log.build" + } >> "${outfile}" + exit "${build_status}" else -{ - echo "Build: *FAILED*" - echo "Build: Failed at $(date)" || true - echo "Build: see output at ${PWD}/log.build" -} - echo '```' >> "${outfile}" + { + echo "Build: *SUCCESS*" + echo "Build: Completed at $(date)" || true + } >> "${outfile}" fi ./link_workflow.sh diff --git a/ci/scripts/create_experiment.py b/ci/scripts/create_experiment.py index 6b946f3a4a..ce95714d48 100755 --- a/ci/scripts/create_experiment.py +++ b/ci/scripts/create_experiment.py @@ -10,14 +10,14 @@ ${HOMEgfs}/workflow/setup_expt.py ${HOMEgfs}/workflow/setup_xml.py -The yaml file are simply the argments for these two scripts. +The yaml file are simply the arguments for these two scripts. After this scripts runs these two the use will have an experiment ready for launching Output ------ -Functionally an experement is setup as a result running the two scripts discribed above -with an error code of 0 apon success. +Functionally an experiment is setup as a result running the two scripts described above +with an error code of 0 upon success. """ import sys @@ -30,8 +30,6 @@ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -from workflow.hosts import Host - logger = Logger(level='DEBUG', colored_log=True) @@ -47,12 +45,7 @@ def input_args(): Description ----------- - A full path to a YAML file with the following format with required sections: environment, experiment, arguments - - environment: - HOMEgfs: ${HOMEGFS} - used to pass the environment variable $HOMEGFS - of the path to the global-workflow repo being tested + A full path to a YAML file with the following format with required sections: experiment, arguments experiment: mode: @@ -60,7 +53,7 @@ def input_args(): arguments: holds all the remaining key values pairs for all requisite arguments documented for setup_expt.py - + Note: the argument pslot is derived from the basename of the yamlfile itself Returns ------- @@ -68,10 +61,10 @@ def input_args(): args: Namespace Namespace with the value of the file path to a yaml file from the key yaml - +:w """ - description = """Single agument as a yaml file containing the + description = """Single argument as a yaml file containing the key value pairs as arguments to setup_expt.py """ @@ -79,6 +72,7 @@ def input_args(): formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument('--yaml', help='yaml configuration file per experiment', type=str, required=True) + parser.add_argument('--dir', help='full path to top level of repo of global-workflow', type=str, required=True) args = parser.parse_args() return args @@ -87,17 +81,10 @@ def input_args(): if __name__ == '__main__': user_inputs = input_args() - - try: - host = Host() - logger.info(f'Running on HOST:{host.machine}') - except NotImplementedError: - logger.error(f'HOST:{socket.gethostname()} is not currently supported') - sys.exit(1) - setup_expt_args = YAMLFile(path=user_inputs.yaml) - HOMEgfs = setup_expt_args.environment.HOMEgfs + HOMEgfs = user_inputs.dir + pslot = Path(user_inputs.yaml).stem mode = setup_expt_args.experiment.mode setup_expt_cmd = Executable(Path.absolute(Path.joinpath(Path(HOMEgfs), 'workflow', 'setup_expt.py'))) @@ -107,11 +94,14 @@ def input_args(): setup_expt_cmd.add_default_arg(f'--{conf}') setup_expt_cmd.add_default_arg(str(value)) + setup_expt_cmd.add_default_arg('--pslot') + setup_expt_cmd.add_default_arg(pslot) + logger.info(f'Run command: {setup_expt_cmd.command}') setup_expt_cmd(output='stdout_expt', error='stderr_expt') setup_xml_cmd = Executable(Path.absolute(Path.joinpath(Path(HOMEgfs), 'workflow', 'setup_xml.py'))) - expdir = Path.absolute(Path.joinpath(Path(setup_expt_args.arguments.expdir), Path(setup_expt_args.arguments.pslot))) + expdir = Path.absolute(Path.joinpath(Path(setup_expt_args.arguments.expdir), Path(pslot))) setup_xml_cmd.add_default_arg(str(expdir)) logger.info(f'Run command: {setup_xml_cmd.command}') diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 60634c3352..0bd90db36c 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -1,5 +1,6 @@ -#!/bin/bash --login -# +#!/bin/bash +set -eux + ##################################################################################### # # Script description: Top level driver script for checking PR @@ -7,7 +8,7 @@ # # Abstract: # -# This script uses GitHub CLI to check for Pull Requests with {machine}-CI tags on the +# This script uses GitHub CLI to check for Pull Requests with CI-Ready-${machine} tags on the # development branch for the global-workflow repo. It then stages tests directories per # PR number and calls clone-build_ci.sh to perform a clone and full build from $(HOMEgfs)/sorc # of the PR. It then is ready to run a suite of regression tests with various @@ -17,50 +18,26 @@ ################################################################# # TODO using static build for GitHub CLI until fixed in HPC-Stack ################################################################# -GH=/home/Terry.McGuinness/bin/gh -repo_url=${repo_url:-"https://github.com/NOAA-EMC/global-workflow.git"} +export GH=${HOME}/bin/gh +export REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} ################################################################ # Setup the reletive paths to scripts and PS4 for better logging ################################################################ -WF_ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" scriptname=$(basename "${BASH_SOURCE[0]}") echo "Begin ${scriptname} at $(date -u)" || true export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' - -usage() { - set +x - echo - echo "Usage: $0 -h" - echo - echo " -h display this message and quit" - echo - echo "This is top level script to run CI tests on the global-workflow repo" - if [[ -n "${TARGET+x}" ]]; then - echo "on the DEFAULT: ${TARGET} machine" - fi - echo - exit 0 -} - - ######################################################################### # Set up runtime environment varibles for accounts on supproted machines ######################################################################### -source "${WF_ROOT_DIR}/ush/detect_machine.sh" -if [[ "${MACHINE_ID}" != "UNKNOWN" ]]; then - TARGET="${MACHINE_ID}" -else - echo "Unsupported platform. Exiting with error." - exit 1 -fi - -case ${TARGET} in +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in hera | orion) - echo "Running Automated Testing on ${TARGET}" - source "${WF_ROOT_DIR}/ci/environments/${TARGET}.sh" + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" ;; *) echo "Unsupported platform. Exiting with error." @@ -68,15 +45,22 @@ case ${TARGET} in ;; esac +###################################################### +# setup runtime env for correct python install and git +###################################################### +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +set -x + ############################################################ # query repo and get list of open PRs with tags {machine}-CI ############################################################ -set -eux -export CI_HOST="${TARGET^}" pr_list_file="open_pr_list" -rm -f "${pr_list_file}" -list=$(${GH} pr list --repo "${repo_url}" --label "${CI_HOST}-CI" --state "open") -list=$(echo "${list}" | awk '{print $1;}' > "${GFS_CI_ROOT}/${pr_list_file}") +touch "${GFS_CI_ROOT}/${pr_list_file}" +list=$(${GH} pr list --repo "${REPO_URL}" --label "CI-${MACHINE_ID^}-Ready" --state "open") +list=$(echo "${list}" | awk '{print $1;}' >> "${GFS_CI_ROOT}/${pr_list_file}") if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}") @@ -84,71 +68,64 @@ else echo "no PRs to process .. exit" exit 0 fi - + ############################################################# # Loop throu all open PRs -# Clone, checkout, build, creat set of experiments, for each +# Clone, checkout, build, creat set of cases, for each ############################################################# -cd "${GFS_CI_ROOT}" for pr in ${pr_list}; do - "${GH}" pr edit --repo "${repo_url}" "${pr}" --remove-label "${CI_HOST}-CI" --add-label "${CI_HOST}-Running" + + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Ready" --add-label "CI-${MACHINE_ID^}-Building" echo "Processing Pull Request #${pr}" pr_dir="${GFS_CI_ROOT}/PR/${pr}" mkdir -p "${pr_dir}" # call clone-build_ci to clone and build PR - id=$("${GH}" pr view "${pr}" --repo "${repo_url}" --json id --jq '.id') - "${WF_ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + set +e + "${HOMEgfs}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" ci_status=$? + set -e if [[ ${ci_status} -eq 0 ]]; then - #setup runtime env for correct python install - export HOMEGFS="${pr_dir}/global-workflow" - module use "${HOMEGFS}/modulefiles" - module load "module_setup.${TARGET}" - module list #setup space to put an experiment - export RUNTEST="${pr_dir}/RUNTEST" - rm -Rf "${RUNTEST:?}"/* - mkdir -p "${RUNTEST}" - #make links to the python packages used in the PR'ed repo - cd "${WF_ROOT_DIR}/ci/scripts" - if [[ ! -L workflow ]]; then - ln -s "${HOMEGFS}/workflow" workflow - fi - if [[ ! -L pygw ]]; then - ln -s "${HOMEGFS}/ush/python/pygw/src/pygw" pygw - fi + # export RUNTESTS for yaml case files to pickup + export RUNTESTS="${pr_dir}/RUNTESTS" + rm -Rf "${pr_dir:?}/RUNTESTS/"* + ############################################################# - # loop over every yaml file in ${WF_ROOT_DIR}/ci/experiments + # loop over every yaml file in ${HOMEgfs}/ci/cases # and create an run directory for each one for this PR loop ############################################################# - for yaml_config in "${WF_ROOT_DIR}/ci/experiments/"*.yaml; do + for yaml_config in "${HOMEgfs}/ci/cases/"*.yaml; do pslot=$(basename "${yaml_config}" .yaml) || true export pslot - "${WF_ROOT_DIR}/ci/scripts/create_experiment.py" --yaml "${WF_ROOT_DIR}/ci/experiments/${pslot}.yaml" + set +e + "${HOMEgfs}/ci/scripts/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/${pslot}.yaml" --dir "${pr_dir}/global-workflow" ci_status=$? + set -e if [[ ${ci_status} -eq 0 ]]; then { - echo "Created experiment" - echo "Experiment setup: Completed at $(date) for expirment ${pslot}" || true + echo "Created experiment: *SUCCESS*" + echo "Case setup: Completed at $(date) for experiment ${pslot}" || true } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" else { - echo "Failed on createing experiment ${pslot}" + echo "Failed to create experiment}: *FAIL* ${pslot}" echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit "${pr}" --repo "${repo_url}" --remove-label "${CI_HOST}-Running" --add-label "${CI_HOST}-Failed" + "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" fi done - "${GH}" pr comment "${pr}" --repo "${repo_url}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit --repo "${repo_url}" "${pr}" --remove-label "${CI_HOST}-Running" --add-label "${CI_HOST}-Passed" + else { echo "Failed on cloning and building global-workflowi PR: ${pr}" - echo "CI on ${CI_HOST} failed to build on $(date) for repo ${repo_url}}" || true + echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}}" || true } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit "${pr}" --repo "${repo_url}" --remove-label "${CI_HOST}-Running" --add-label "${CI_HOST}-Failed" + "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" fi + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" done # looping over each open and labeled PR diff --git a/ci/scripts/pygw b/ci/scripts/pygw new file mode 120000 index 0000000000..77d784f6ca --- /dev/null +++ b/ci/scripts/pygw @@ -0,0 +1 @@ +../../ush/python/pygw/src/pygw \ No newline at end of file diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh new file mode 100755 index 0000000000..c79ea06e77 --- /dev/null +++ b/ci/scripts/run_ci.sh @@ -0,0 +1,71 @@ +#!/bin/bash +set -eux + +##################################################################################### +# +# Script description: BASH script for checking for cases in a given PR and +# simply running rocotorun on each. This script is intended +# to run from within a cron job in the CI Managers account +# Abstract TODO +##################################################################################### + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +module list +set -eux +rocotorun=$(which rocotorun) +if [[ -z ${var+x} ]]; then + echo "rocotorun being used from ${rocotorun}" +else + echo "rocotorun not found on system" + exit 1 +fi + +pr_list_file="open_pr_list" + +if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then + pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}") +else + echo "no PRs to process .. exit" + exit 0 +fi + +############################################################# +# Loop throu all PRs in PR List and look for expirments in +# the RUNTESTS dir and for each one run runcotorun on them +############################################################# + +for pr in ${pr_list}; do + echo "Processing Pull Request #${pr} and looking for cases" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + for cases in "${pr_dir}/RUNTESTS/"*; do + pslot=$(basename "${cases}") + xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" + db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + echo "Running: ${rocotorun} -v 6 -w ${xml} -d ${db}" + "${rocotorun}" -v 10 -w "${xml}" -d "${db}" + done +done + diff --git a/docs/note_fixfield.txt b/docs/note_fixfield.txt index 3b22de5e13..af2539e48a 100644 --- a/docs/note_fixfield.txt +++ b/docs/note_fixfield.txt @@ -4,6 +4,8 @@ They are saved locally on all platforms Hera: /scratch1/NCEPDEV/global/glopara/fix Orion: /work/noaa/global/glopara/fix +Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix +S4: /data/prod/glopara/fix ------------------------------------------------------------------------------ 09/28/2018 diff --git a/docs/source/components.rst b/docs/source/components.rst index 6b947b3432..9e4377f739 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -1,5 +1,5 @@ ########################### -Global Workflow Components +Global Workflow Components ########################### The global-workflow is a combination of several components working together to prepare, analyze, produce, and post-process forecast data. @@ -13,7 +13,7 @@ The major components of the system are: * Post-processing * Verification -The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. +The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. ====================== Component repositories @@ -21,12 +21,11 @@ Component repositories Components checked out via sorc/checkout.sh: -* **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration -* **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory +* **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration +* **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory * **GSI** (https://github.com/NOAA-EMC/GSI): This is the core code base for atmospheric Data Assimilation -* **GSI UTILS** (https://github.com/NOAA-EMC/GSI-Utils): Utility codes needed by GSI to create analysis -* **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values -* **GLDAS** (https://github.com/NOAA-EMC/GLDAS): Code base for Land Data Assimiation +* **GSI UTILS** (https://github.com/NOAA-EMC/GSI-Utils): Utility codes needed by GSI to create analysis +* **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only @@ -43,7 +42,7 @@ External dependencies Libraries ^^^^^^^^^ -All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms +All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms Find information on official installations of HPC-STACK here: @@ -60,6 +59,7 @@ Observation data, also known as dump data, is prepared in production and then ar * Hera: /scratch1/NCEPDEV/global/glopara/dump * Orion: /work/noaa/rstprod/dump +* Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/dump * WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/dump * S4: /data/prod/glopara/dump diff --git a/docs/source/configure.rst b/docs/source/configure.rst index 284297459d..477e95cec7 100644 --- a/docs/source/configure.rst +++ b/docs/source/configure.rst @@ -10,7 +10,7 @@ The global-workflow configs contain switches that change how the system runs. Ma | APP | Model application | ATM | YES | See case block in config.base for options | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | DOIAU | Enable 4DIAU for control | YES | NO | Turned off for cold-start first half cycle | -| | with 3 increments | | | | +| | with 3 increments | | | | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | DOHYBVAR | Run EnKF | YES | YES | Don't recommend turning off | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ @@ -26,9 +26,6 @@ The global-workflow configs contain switches that change how the system runs. Ma | DO_GEMPAK | Run job to produce GEMPAK | NO | YES | downstream processing, ops only | | | products | | | | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_GLDAS | Run GLDAS to spin up land | YES | YES | Spins up for 84hrs if sflux files not available | -| | ICs | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | DO_VRFY | Run vrfy job | NO | YES | Whether to include vrfy job (GSI monitoring, | | | | | | tracker, VSDB, fit2obs) | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ @@ -48,9 +45,6 @@ The global-workflow configs contain switches that change how the system runs. Ma | QUILTING | Use I/O quilting | .true. | NO | If .true. choose OUTPUT_GRID as cubed_sphere_grid | | | | | | in netcdf or gaussian_grid | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| RETRO | Use retrospective parallel | NO | NO | Default of NO will tell getic job to pull from | -| | for ICs | | | production tapes. | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | WAFSF | Run jobs to produce WAFS | NO | YES | downstream processing, ops only | | | products | | | | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ diff --git a/docs/source/development.rst b/docs/source/development.rst index 6c7711bfe1..e95516bcca 100644 --- a/docs/source/development.rst +++ b/docs/source/development.rst @@ -92,21 +92,21 @@ All new code after 2022 Sep 1 will be required to meet these standards. We will .. _commit-standards: -======================== -Commit message standards -======================== +====================== +Pull request standards +====================== -**ALL** commits must follow best practices for commit messages: https://chris.beams.io/posts/git-commit/ +Pull requests should follow the pre-filled template provided when you open the PR. PR titles and descriptions become the commit message when the PR is squashed and merged, so we ask that they follow best practices for commit messages: - * Separate subject from body with a blank line - * Limit the subject line to 50 characters + * Limit the subject line (PR title) to 50 characters * Capitalize the subject line * Do not end the subject line with a period * Use the `imperative mood `_ in the subject line - * Wrap the body at 72 characters * Use the body to explain what and why vs. how * The final line of the commit message should include tags to relevant issues (e.g. ``Refs: #217, #300``) +This list is a modified version of the one provided at https://chris.beams.io/posts/git-commit/ with a couple removed that are not relevant to GitHub PRs. That source also provides the motivation for making sure we have good commit messages. + Here is the example commit message from the article linked above; it includes descriptions of what would be in each part of the commit message for guidance: :: diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst index 7161e2b742..da54f29521 100644 --- a/docs/source/hpc.rst +++ b/docs/source/hpc.rst @@ -22,6 +22,7 @@ HPC helpdesks * HPSS: rdhpcs.hpss.help@noaa.gov * Gaea: oar.gfdl.help@noaa.gov * S4: david.huber@noaa.gov +* Jet: rdhpcs.jet.help@noaa.gov ====================== Restricted data access @@ -76,6 +77,8 @@ It is advised to use Git v2+ when available. At the time of writing this documen +---------+----------+---------------------------------------+ | Orion | v1.8.3.1 | **module load git/2.28.0** | +---------+----------+---------------------------------------+ +| Jet | v2.18.0 | default | ++---------+----------+---------------------------------------+ | WCOSS2 | v2.26.2 | default or **module load git/2.29.0** | +---------+----------+---------------------------------------+ | S4 | v1.8.3.1 | **module load git/2.30.0** | @@ -96,9 +99,9 @@ For the manage_externals utility functioning:: Error: fatal: ssh variant 'simple' does not support setting port Fix: git config --global ssh.variant ssh -=================================== -Stacksize on R&Ds (Hera, Orion, S4) -=================================== +======================================== +Stacksize on R&Ds (Hera, Orion, Jet, S4) +======================================== Some GFS components, like the UPP, need an unlimited stacksize. Add the following setting into your appropriate .*rc file to support these components: diff --git a/docs/source/init.rst b/docs/source/init.rst index b7a86f5b62..5c9c811052 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -103,79 +103,114 @@ Start date = 2021122018 Cycled ATM w/ Coupled (S2S) model ********************************* -Warm-start cycled w/ coupled (S2S) model C48 atmosphere 5 degree ocean/ice ICs are available in the following locations on supported platforms: +Warm-start cycled w/ coupled (S2S) model C48 atmosphere C48 enkf (80 members) 5 degree ocean/ice ICs are available in the following locations on supported platforms: :: - Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48mx500 - Orion: /work/noaa/global/glopara/data/ICSDIR/C48mx500 - WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C48mx500 + Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48C48mx500 + Orion: /work/noaa/global/glopara/data/ICSDIR/C48C48mx500 + WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C48C48mx500 + Jet: /lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/C48C48mx500 Start date = 2021032312 +.. note:: + The EnKF member ICs are dummy duplicates of the deterministic at the moment. + :: - -bash-4.2$ tree /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48mx500 - `-- gdas.20210323 - |-- 06 - | |-- atmos - | | `-- RESTART - | | |-- 20210323.120000.ca_data.tile1.nc - | | |-- 20210323.120000.ca_data.tile2.nc - | | |-- 20210323.120000.ca_data.tile3.nc - | | |-- 20210323.120000.ca_data.tile4.nc - | | |-- 20210323.120000.ca_data.tile5.nc - | | |-- 20210323.120000.ca_data.tile6.nc - | | |-- 20210323.120000.coupler.res - | | |-- 20210323.120000.fv_core.res.nc - | | |-- 20210323.120000.fv_core.res.tile1.nc - | | |-- 20210323.120000.fv_core.res.tile2.nc - | | |-- 20210323.120000.fv_core.res.tile3.nc - | | |-- 20210323.120000.fv_core.res.tile4.nc - | | |-- 20210323.120000.fv_core.res.tile5.nc - | | |-- 20210323.120000.fv_core.res.tile6.nc - | | |-- 20210323.120000.fv_srf_wnd.res.tile1.nc - | | |-- 20210323.120000.fv_srf_wnd.res.tile2.nc - | | |-- 20210323.120000.fv_srf_wnd.res.tile3.nc - | | |-- 20210323.120000.fv_srf_wnd.res.tile4.nc - | | |-- 20210323.120000.fv_srf_wnd.res.tile5.nc - | | |-- 20210323.120000.fv_srf_wnd.res.tile6.nc - | | |-- 20210323.120000.fv_tracer.res.tile1.nc - | | |-- 20210323.120000.fv_tracer.res.tile2.nc - | | |-- 20210323.120000.fv_tracer.res.tile3.nc - | | |-- 20210323.120000.fv_tracer.res.tile4.nc - | | |-- 20210323.120000.fv_tracer.res.tile5.nc - | | |-- 20210323.120000.fv_tracer.res.tile6.nc - | | |-- 20210323.120000.phy_data.tile1.nc - | | |-- 20210323.120000.phy_data.tile2.nc - | | |-- 20210323.120000.phy_data.tile3.nc - | | |-- 20210323.120000.phy_data.tile4.nc - | | |-- 20210323.120000.phy_data.tile5.nc - | | |-- 20210323.120000.phy_data.tile6.nc - | | |-- 20210323.120000.sfc_data.tile1.nc - | | |-- 20210323.120000.sfc_data.tile2.nc - | | |-- 20210323.120000.sfc_data.tile3.nc - | | |-- 20210323.120000.sfc_data.tile4.nc - | | |-- 20210323.120000.sfc_data.tile5.nc - | | `-- 20210323.120000.sfc_data.tile6.nc - | |-- ice - | | `-- RESTART - | | `-- 20210323.120000.cice_model.res.nc - | |-- med - | | `-- RESTART - | | `-- 20210323.120000.ufs.cpld.cpl.r.nc - | `-- ocean - | `-- RESTART - | `-- 20210323.120000.MOM.res.nc - `-- 12 - |-- atmos - | |-- gdas.t12z.abias - | |-- gdas.t12z.abias_air - | |-- gdas.t12z.abias_int - | |-- gdas.t12z.abias_pc - | `-- gdas.t12z.radstat - `-- ocean - `-- gdas.t12z.ocninc.nc + -bash-4.2$ tree /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48C48mx500 + ├── enkfgdas.20210323 + │   ├── 06 + │   │   ├── mem001 + │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   ├── mem002 + │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   ├── mem003 + │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + ... + │   │   └── mem080 + │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   └── 12 + │   ├── mem001 + │   │   └── analysis + │   │   └── ocean + │   │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + │   ├── mem002 + │   │   └── analysis + │   │   └── ocean + │   │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + │   ├── mem003 + │   │   └── analysis + │   │   └── ocean + │   │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + ... + │   └── mem080 + │   └── analysis + │   └── ocean + │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + └── gdas.20210323 + ├── 06 + │   └── model_data + │   ├── atmos + │   │   └── restart + │   │   ├── 20210323.120000.ca_data.tile1.nc + │   │   ├── 20210323.120000.ca_data.tile2.nc + │   │   ├── 20210323.120000.ca_data.tile3.nc + │   │   ├── 20210323.120000.ca_data.tile4.nc + │   │   ├── 20210323.120000.ca_data.tile5.nc + │   │   ├── 20210323.120000.ca_data.tile6.nc + │   │   ├── 20210323.120000.coupler.res + │   │   ├── 20210323.120000.fv_core.res.nc + │   │   ├── 20210323.120000.fv_core.res.tile1.nc + │   │   ├── 20210323.120000.fv_core.res.tile2.nc + │   │   ├── 20210323.120000.fv_core.res.tile3.nc + │   │   ├── 20210323.120000.fv_core.res.tile4.nc + │   │   ├── 20210323.120000.fv_core.res.tile5.nc + │   │   ├── 20210323.120000.fv_core.res.tile6.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile1.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile2.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile3.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile4.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile5.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile6.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile1.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile2.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile3.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile4.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile5.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile6.nc + │   │   ├── 20210323.120000.phy_data.tile1.nc + │   │   ├── 20210323.120000.phy_data.tile2.nc + │   │   ├── 20210323.120000.phy_data.tile3.nc + │   │   ├── 20210323.120000.phy_data.tile4.nc + │   │   ├── 20210323.120000.phy_data.tile5.nc + │   │   ├── 20210323.120000.phy_data.tile6.nc + │   │   ├── 20210323.120000.sfc_data.tile1.nc + │   │   ├── 20210323.120000.sfc_data.tile2.nc + │   │   ├── 20210323.120000.sfc_data.tile3.nc + │   │   ├── 20210323.120000.sfc_data.tile4.nc + │   │   ├── 20210323.120000.sfc_data.tile5.nc + │   │   └── 20210323.120000.sfc_data.tile6.nc + │   ├── ice + │   │   └── restart + │   │   └── 20210323.120000.cice_model.res.nc + │   ├── med + │   │   └── restart + │   │   └── 20210323.120000.ufs.cpld.cpl.r.nc + │   └── ocean + │   └── restart + │   └── 20210323.120000.MOM.res.nc + └── 12 + └── analysis + ├── atmos + │   ├── gdas.t12z.abias + │   ├── gdas.t12z.abias_air + │   ├── gdas.t12z.abias_int + │   ├── gdas.t12z.abias_pc + │   └── gdas.t12z.radstat + └── ocean + └── gdas.t12z.ocninc.nc .. _staged_ics_prototype: @@ -190,6 +225,7 @@ Forecast-only P8 prototype initial conditions are made available to users on sup WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/IC/COUPLED HERA: /scratch1/NCEPDEV/climate/role.ufscpara/IC ORION: /work/noaa/global/glopara/data/ICSDIR/prototype_ICs + JET: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs S4: /data/prod/glopara/coupled_ICs These locations are known within the workflow via paths set in ``parm/config/config.coupled_ic``. @@ -217,7 +253,7 @@ Not yet supported. See :ref:`Manual Generation` section below --------------------- Forecast-only coupled --------------------- -Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). +Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). .. _forecastonly-atmonly: @@ -225,9 +261,7 @@ Coupled initial conditions are currently only generated offline and copied prior Forecast-only mode (atm-only) ----------------------------- -Forecast-only mode in global workflow includes ``getic`` and ``init`` jobs for the gfs suite. The ``getic`` job pulls inputs for ``chgres_cube`` (init job) or warm start ICs into your ``ROTDIR/COMROT``. The ``init`` job then ingests those files to produce initial conditions for your experiment. - -Users on machines without HPSS access (e.g. Orion) need to perform the ``getic`` step manually and stage inputs for the ``init`` job. The table below lists the needed files for ``init`` and where to place them in your ``ROTDIR``. +The table below lists the needed initial condition files from past GFS versions to be used by the UFS_UTILS gdas_init utility. The utility will pull these files for you. See the next section (Manual Generation) for how to run the UFS_UTILS gdas_init utility and create initial conditions for your experiment. Note for table: yyyy=year; mm=month; dd=day; hh=cycle diff --git a/docs/source/jobs.rst b/docs/source/jobs.rst index ae7e1cd68a..67863bb9a2 100644 --- a/docs/source/jobs.rst +++ b/docs/source/jobs.rst @@ -3,7 +3,7 @@ GFS Configuration ################# .. figure:: _static/GFS_v16_flowchart.png - + Schematic flow chart for GFS v16 in operations The sequence of jobs that are run for an end-to-end (analysis+forecast+post processing+verification) GFS configuration using the Global Workflow is shown above. The system utilizes a collection of scripts that perform the tasks for each step. @@ -12,7 +12,7 @@ For any cycle the system consists of two suites -- the "gdas" suite which provid An experimental run is different from operations in the following ways: -* Workflow manager: operations utilizes `ecFlow `__, while development currently utilizes `ROCOTO `__. Note, experiments can also be run using ecFlow on platforms with ecFlow servers established. +* Workflow manager: operations utilizes `ecFlow `__, while development currently utilizes `ROCOTO `__. Note, experiments can also be run using ecFlow on platforms with ecFlow servers established. * Dump step is not run as it has already been completed during the real-time production runs and dump data is available in the global dump archive on supported machines. @@ -25,7 +25,7 @@ An experimental run is different from operations in the following ways: Downstream jobs (e.g. awips, gempak, etc.) are not included in the diagram. Those jobs are not normally run in developmental tests. ============================= -Jobs in the GFS Configuration +Jobs in the GFS Configuration ============================= +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | JOB NAME | PURPOSE | @@ -65,8 +65,6 @@ Jobs in the GFS Configuration +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | fcst | Runs the forecast (with or without one-way waves). | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ -| gldas | Runs the Global Land Data Assimilation System (GLDAS). | -+-------------------+-----------------------------------------------------------------------------------------------------------------------+ | metpN | Runs MET/METplus verification via EMC_verif-global. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | prep | Runs the data preprocessing prior to the analysis (storm relocation if needed and generation of prepbufr file). | diff --git a/docs/source/setup.rst b/docs/source/setup.rst index a4e70fbfcb..ec63327fbc 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -28,6 +28,13 @@ Experiment Setup module load miniconda/3.8-s4 + * - Jet + - :: + + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles + module load miniconda3/4.12.0 + conda activate ufswm + If running with Rocoto make sure to have a Rocoto module loaded before running setup scripts: .. list-table:: ROCOTO Module Load Commands @@ -54,6 +61,10 @@ If running with Rocoto make sure to have a Rocoto module loaded before running s - :: module load rocoto/1.3.4 + * - Jet + - :: + + module load rocoto/1.3.3 ^^^^^^^^^^^^^^^^^^^^^^^^ Forecast-only experiment @@ -73,18 +84,20 @@ The following command examples include variables for reference but users should :: cd workflow - ./setup_expt.py forecast-only --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC] [--resdet $RESDET] + ./setup_expt.py gfs forecast-only --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC] [--resdet $RESDET] [--pslot $PSLOT] [--configdir $CONFIGDIR] [--comrot $COMROT] [--expdir $EXPDIR] where: - * ``forecast-only`` is the first positional argument that instructs the setup script to produce an experiment directory for forecast only experiments. + * ``gfs`` is the first positional argument that instructs the setup script to produce a GFS experiment directory + * ``forecast-only`` is the second positional argument that instructs the setup script to produce an experiment directory for forecast only experiments. * ``$APP`` is the target application, one of: - ATM: atmosphere-only [default] - ATMW: atm-wave - ATMA: atm-aerosols - S2S: atm-ocean-ice + - S2SA: atm-ocean-ice-aerosols - S2SW: atm-ocean-ice-wave - S2SWA: atm-ocean-ice-wave-aerosols @@ -105,21 +118,21 @@ Atm-only: :: cd workflow - ./setup_expt.py forecast-only --pslot test --idate 2020010100 --edate 2020010118 --resdet 384 --gfs_cyc 4 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir + ./setup_expt.py gfs forecast-only --pslot test --idate 2020010100 --edate 2020010118 --resdet 384 --gfs_cyc 4 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir Coupled: :: cd workflow - ./setup_expt.py forecast-only --app S2SW --pslot coupled_test --idate 2013040100 --edate 2013040100 --resdet 384 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir + ./setup_expt.py gfs forecast-only --app S2SW --pslot coupled_test --idate 2013040100 --edate 2013040100 --resdet 384 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir Coupled with aerosols: :: cd workflow - ./setup_expt.py forecast-only --app S2SWA --pslot coupled_test --idate 2013040100 --edate 2013040100 --resdet 384 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir + ./setup_expt.py gfs forecast-only --app S2SWA --pslot coupled_test --idate 2013040100 --edate 2013040100 --resdet 384 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir **************************************** Step 2: Set user and experiment settings @@ -182,13 +195,14 @@ The following command examples include variables for reference but users should :: cd workflow - ./setup_expt.py cycled --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC] + ./setup_expt.py gfs cycled --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC] [--resdet $RESDET] [--resens $RESENS] [--nens $NENS] [--cdump $CDUMP] [--pslot $PSLOT] [--configdir $CONFIGDIR] [--comrot $COMROT] [--expdir $EXPDIR] [--icsdir $ICSDIR] where: - * ``cycled`` is the first positional argument that instructs the setup script to produce an experiment directory for cycled experiments. + * ``gfs`` is the first positional argument that instructs the setup script to produce a GFS experiment directory + * ``cycled`` is the second positional argument that instructs the setup script to produce an experiment directory for cycled experiments. * ``$APP`` is the target application, one of: - ATM: atmosphere-only [default] @@ -215,13 +229,13 @@ Example: :: cd workflow - ./setup_expt.py cycled --pslot test --configdir /home/Joe.Schmo/git/global-workflow/parm/config --idate 2020010100 --edate 2020010118 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir --resdet 384 --resens 192 --nens 80 --gfs_cyc 4 + ./setup_expt.py gfs cycled --pslot test --configdir /home/Joe.Schmo/git/global-workflow/parm/config --idate 2020010100 --edate 2020010118 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir --resdet 384 --resens 192 --nens 80 --gfs_cyc 4 Example ``setup_expt.py`` on Orion: :: - Orion-login-3$ ./setup_expt.py cycled --pslot test --idate 2022010118 --edate 2022010200 --resdet 192 --resens 96 --nens 80 --comrot /work/noaa/stmp/jschmo/comrot --expdir /work/noaa/global/jschmo/expdir + Orion-login-3$ ./setup_expt.py gfs cycled --pslot test --idate 2022010118 --edate 2022010200 --resdet 192 --resens 96 --nens 80 --comrot /work/noaa/stmp/jschmo/comrot --expdir /work/noaa/global/jschmo/expdir EDITED: /work/noaa/global/jschmo/expdir/test/config.base as per user input. EDITED: /work/noaa/global/jschmo/expdir/test/config.aeroanl as per user input. EDITED: /work/noaa/global/jschmo/expdir/test/config.ocnanal as per user input. @@ -232,7 +246,7 @@ What happens if I run ``setup_expt.py`` again for an experiment that already exi :: - Orion-login-3$ ./setup_expt.py cycled --pslot test --idate 2022010118 --edate 2022010200 --resdet 192 --resens 96 --nens 80 --comrot /work/noaa/stmp/jschmo/comrot --expdir /work/noaa/global/jschmo/expdir + Orion-login-3$ ./setup_expt.py gfs cycled --pslot test --idate 2022010118 --edate 2022010200 --resdet 192 --resens 96 --nens 80 --comrot /work/noaa/stmp/jschmo/comrot --expdir /work/noaa/global/jschmo/expdir directory already exists in /work/noaa/stmp/jschmo/comrot/test diff --git a/ecf/defs/gfs_00.def b/ecf/defs/gfs_00.def index b564c6e260..2ff0a785a7 100644 --- a/ecf/defs/gfs_00.def +++ b/ecf/defs/gfs_00.def @@ -2224,10 +2224,6 @@ trigger /prod/primary/00/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/00/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2354,7 +2350,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/gfs_06.def b/ecf/defs/gfs_06.def index 29b896d769..4524d28374 100644 --- a/ecf/defs/gfs_06.def +++ b/ecf/defs/gfs_06.def @@ -2224,10 +2224,6 @@ trigger /prod/primary/06/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/06/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2354,7 +2350,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/gfs_12.def b/ecf/defs/gfs_12.def index f04e1f79a6..6e366bffcd 100644 --- a/ecf/defs/gfs_12.def +++ b/ecf/defs/gfs_12.def @@ -2225,10 +2225,6 @@ trigger /prod/primary/12/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/12/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2355,7 +2351,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/gfs_18.def b/ecf/defs/gfs_18.def index cefe301707..0a8c52cf7e 100644 --- a/ecf/defs/gfs_18.def +++ b/ecf/defs/gfs_18.def @@ -2224,10 +2224,6 @@ trigger /prod/primary/18/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/18/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2354,7 +2350,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/prod00.def b/ecf/defs/prod00.def index 20e391faee..f36fee8ed0 100644 --- a/ecf/defs/prod00.def +++ b/ecf/defs/prod00.def @@ -2259,10 +2259,6 @@ suite prod00 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2389,7 +2385,7 @@ suite prod00 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '00' diff --git a/ecf/defs/prod06.def b/ecf/defs/prod06.def index 1d8a767a9e..9ba8b46332 100644 --- a/ecf/defs/prod06.def +++ b/ecf/defs/prod06.def @@ -2262,10 +2262,6 @@ suite prod06 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2392,7 +2388,7 @@ suite prod06 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '06' diff --git a/ecf/defs/prod12.def b/ecf/defs/prod12.def index 3d44ed917d..1c058e2640 100644 --- a/ecf/defs/prod12.def +++ b/ecf/defs/prod12.def @@ -2259,10 +2259,6 @@ suite prod12 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2389,7 +2385,7 @@ suite prod12 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '12' diff --git a/ecf/defs/prod18.def b/ecf/defs/prod18.def index ba0b951044..a7f715b016 100644 --- a/ecf/defs/prod18.def +++ b/ecf/defs/prod18.def @@ -2259,10 +2259,6 @@ suite prod18 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2389,7 +2385,7 @@ suite prod18 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '18' diff --git a/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf b/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf deleted file mode 100755 index 0834533051..0000000000 --- a/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf +++ /dev/null @@ -1,69 +0,0 @@ -#PBS -S /bin/bash -#PBS -N %RUN%_atmos_gldas_%CYC% -#PBS -j oe -#PBS -q %QUEUE% -#PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:20:00 -#PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 -#PBS -l place=vscatter:exclhost -#PBS -l debug=true - -model=gfs -%include -%include - -set -x - -export NET=%NET:gfs% -export RUN=%RUN% -export CDUMP=%RUN% - -############################################################ -# Load modules -############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} -module load craype/${craype_ver} -module load intel/${intel_ver} -module load cray-mpich/${cray_mpich_ver} -module load cray-pals/${cray_pals_ver} -module load cfp/${cfp_ver} -module load libjpeg/${libjpeg_ver} -module load hdf5/${hdf5_ver} -module load netcdf/${netcdf_ver} -module load grib_util/${grib_util_ver} -module load wgrib2/${wgrib2_ver} - -module list - -############################################################# -# environment settings -############################################################# -export cyc=%CYC% -export cycle=t%CYC%z -export USE_CFP=YES - -############################################################ -# CALL executable job script here -############################################################ -${HOMEgfs}/jobs/JGDAS_ATMOS_GLDAS -if [ $? -ne 0 ]; then - ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" - ecflow_client --abort - exit -fi - -%include -%manual -###################################################################### -# Purpose: To execute the job that prepares initial condition for -# gdas. -###################################################################### - -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -%end diff --git a/env/CONTAINER.env b/env/CONTAINER.env index 48014ab313..4f85ae56de 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/HERA.env b/env/HERA.env index a42e3a0170..9e35b4e38e 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -17,7 +16,7 @@ step=$1 export npe_node_max=40 export launcher="srun -l --export=ALL" -export mpmd_opt="--multi-prog" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" # Configure MPI environment #export I_MPI_ADJUST_ALLREDUCE=5 @@ -49,29 +48,21 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then - - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" +elif [[ "${step}" = "atmensanlrun" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -83,6 +74,14 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + elif [[ "${step}" = "ocnanalbmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" @@ -103,6 +102,16 @@ elif [[ "${step}" = "ocnanalrun" ]]; then [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + nth_max=$((npe_node_max / npe_node_ocnanalchkpt)) + + export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 @@ -141,27 +150,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="NO" - export CFP_MP="YES" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian}" - -# Must run data processing with exactly the number of tasks as time -# periods being processed. - - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 @@ -252,10 +240,6 @@ elif [[ "${step}" = "epos" ]]; then [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} export APRUN_EPOS="${launcher} -n ${npe_epos}" -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher} -n ${npe_init}" - elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" diff --git a/env/JET.env b/env/JET.env index 4035e8414c..a74828915c 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -18,11 +18,13 @@ if [[ "${PARTITION_BATCH}" = "xjet" ]]; then export npe_node_max=24 elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then export npe_node_max=16 +elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 fi -export launcher="srun -l --export=ALL" -export mpmd_opt="--multi-prog" +export launcher="srun -l --epilog=/apps/local/bin/report-mem --export=ALL" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" -# Configure STACK +# Configure MPI environment export OMP_STACKSIZE=2048000 export NTHSTACK=1024000000 @@ -34,46 +36,85 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then nth_max=$((npe_node_max / npe_node_prep)) export POE="NO" - export BACK=${BACK:-"NO"} + export BACK="NO" export sys_tp="JET" + export launcher_PREP="srun" elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + export CFP_MP="YES" if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_aeroanlrun)) export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} - export APRUN_AEROANL="${launcher} ${npe_aeroanlrun}" + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalbmat)) + + export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + +elif [[ "${step}" = "ocnanalrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalrun)) + + export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then -elif [[ "${step}" = "anal" ]]; then + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" nth_max=$((npe_node_max / npe_node_anal)) - export NTHREADS_GSI=${nth_gsi:-${nth_max}} + export NTHREADS_GSI=${nth_anal:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} ${npe_gsi:-${npe_anal:-${PBS_NP}}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} @@ -82,11 +123,11 @@ elif [[ "${step}" = "anal" ]]; then export NTHREADS_CYCLE=${nth_cycle:-12} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_cycle=${ntiles:-6} - export APRUN_CYCLE="${launcher} ${npe_cycle}" + export APRUN_CYCLE="${launcher} -n ${npe_cycle}" export NTHREADS_GAUSFCANL=1 npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="${launcher} ${npe_gausfcanl}" + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" elif [[ "${step}" = "sfcanl" ]]; then nth_max=$((npe_node_max / npe_node_sfcanl)) @@ -94,35 +135,34 @@ elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${nth_sfcanl:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_sfcanl=${ntiles:-6} - export APRUN_CYCLE="${launcher} ${npe_sfcanl}" - -elif [[ "${step}" = "gldas" ]]; then - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} ${npe_gaussian}" + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" elif [[ "${step}" = "eobs" ]]; then + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + nth_max=$((npe_node_max / npe_node_eobs)) - export NTHREADS_GSI=${nth_gsi:-${nth_max}} + export NTHREADS_GSI=${nth_eobs:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} ${npe_gsi:-${npe_eobs:-${PBS_NP}}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" elif [[ "${step}" = "eupd" ]]; then nth_max=$((npe_node_max / npe_node_eupd)) - export NTHREADS_ENKF=${nth_enkf:-${nth_max}} + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} - export APRUN_ENKF="${launcher} ${npe_enkf:-${npe_eupd:-${PBS_NP}}}" + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then @@ -145,11 +185,11 @@ elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${nth_np:-1} [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher}" + export APRUN_NP="${launcher} -n ${npe_post}" export NTHREADS_DWN=${nth_dwn:-1} [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_NP="${launcher}" + export APRUN_DWN="${launcher} -n ${npe_dwn}" elif [[ "${step}" = "ecen" ]]; then @@ -157,7 +197,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${nth_ecen:-${nth_max}} [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} - export APRUN_ECEN="${launcher} ${npe_ecen:-${PBS_NP}}" + export APRUN_ECEN="${launcher} -n ${npe_ecen}" export NTHREADS_CHGRES=${nth_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} @@ -165,7 +205,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} - export APRUN_CALCINC="${launcher} ${npe_ecen:-${PBS_NP}}" + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" elif [[ "${step}" = "esfc" ]]; then @@ -173,12 +213,11 @@ elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${nth_esfc:-${nth_max}} [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} - export APRUN_ESFC="${launcher} ${npe_esfc:-${PBS_NP}}" + export APRUN_ESFC="${launcher} -n ${npe_esfc}" - export NTHREADS_CYCLE=${nth_cycle:-12} + export NTHREADS_CYCLE=${nth_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} - export APRUN_CYCLE="${launcher} ${npe_esfc}" - + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" elif [[ "${step}" = "epos" ]]; then @@ -186,23 +225,21 @@ elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${nth_epos:-${nth_max}} [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} - export APRUN_EPOS="${launcher} ${npe_epos:-${PBS_NP}}" - -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher}" + export APRUN_EPOS="${launcher} -n ${npe_epos}" elif [[ "${step}" = "postsnd" ]]; then + export CFP_MP="YES" + nth_max=$((npe_node_max / npe_node_postsnd)) export NTHREADS_POSTSND=${nth_postsnd:-1} [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} - export APRUN_POSTSND="${launcher} ${npe_postsnd}" + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} - export APRUN_POSTSNDCFP="${launcher} ${npe_postsndcfp}" + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" elif [[ "${step}" = "awips" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 04b1344609..17d08e0658 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -17,7 +16,7 @@ step=$1 export npe_node_max=40 export launcher="srun -l --export=ALL" -export mpmd_opt="--multi-prog" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" # Configure MPI environment export MPI_BUFS_PER_PROC=2048 @@ -42,36 +41,29 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="ORION" export launcher_PREP="srun" -elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]]|| [[ "${step}" = "wavepostpnt" ]]; then +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || \ + [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostpnt" ]] || [[ "${step}" == "wavepostbndpntbll" ]]; then export CFP_MP="YES" if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - nth_max=$((npe_node_max / npe_node_atmensanalrun)) - - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -83,6 +75,14 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + elif [[ "${step}" = "ocnanalbmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" @@ -103,6 +103,16 @@ elif [[ "${step}" = "ocnanalrun" ]]; then [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalchkpt)) + + export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 @@ -139,26 +149,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="NO" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian}" - -# Must run data processing with exactly the number of tasks as time -# periods being processed. - - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 @@ -250,10 +240,6 @@ elif [[ "${step}" = "epos" ]]; then [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} export APRUN_EPOS="${launcher} -n ${npe_epos}" -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher} -n ${npe_init}" - elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" diff --git a/env/S4.env b/env/S4.env index e48cde8505..c2f82630d6 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -22,7 +21,7 @@ elif [[ ${PARTITION_BATCH} = "ivy" ]]; then export npe_node_max=20 fi export launcher="srun -l --export=ALL" -export mpmd_opt="--multi-prog" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" # Configure MPI environment export OMP_STACKSIZE=2048000 @@ -47,29 +46,21 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_atmanlrun)) - nth_max=$((npe_node_max / npe_node_atmanalrun)) + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" +elif [[ "${step}" = "atmensanlrun" ]]; then -elif [[ "${step}" = "atmensanalrun" ]]; then + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) - - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -81,6 +72,14 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + elif [[ "${step}" = "ocnanalbmat" ]]; then echo "WARNING: ${step} is not enabled on S4!" @@ -124,27 +123,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="NO" - export CFP_MP="YES" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian}" - -# Must run data processing with exactly the number of tasks as time -# periods being processed. - - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 @@ -235,10 +213,6 @@ elif [[ "${step}" = "epos" ]]; then [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} export APRUN_EPOS="${launcher} -n ${npe_epos}" -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher} -n ${npe_init}" - elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 44f8dbc994..f609ea0249 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" @@ -36,29 +35,21 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec="${launcher} -np" export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - nth_max=$((npe_node_max / npe_node_atmensanalrun)) - - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -70,6 +61,14 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export OMP_PLACES=cores @@ -116,27 +115,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="YES" - export CFP_MP="NO" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas} -ppn ${npe_node_gldas} --cpu-bind depth --depth ${NTHREADS_GLDAS}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian} -ppn ${npe_node_gaussian} --cpu-bind depth --depth ${NTHREADS_GAUSSIAN}" - - # Must run data processing with exactly the number of tasks as time - # periods being processed. - export USE_CFP=${USE_CFP:-"YES"} - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -np ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export OMP_PLACES=cores @@ -250,10 +228,6 @@ elif [[ "${step}" = "epos" ]]; then [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} export APRUN_EPOS="${launcher} -n ${npe_epos} -ppn ${npe_node_epos} --cpu-bind depth --depth ${NTHREADS_EPOS}" -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher}" - elif [[ "${step}" = "postsnd" ]]; then export MPICH_MPIIO_HINTS_DISPLAY=1 diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG index a9e2ff14ef..6ad5c8f31b 100755 --- a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG @@ -7,8 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal analdiag" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export CDUMP="${RUN/enkf}" export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} @@ -16,47 +15,21 @@ export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} # Begin JOB SPECIFIC work ############################################## -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -${assim_freq} ${PDY}${cyc}) +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" export OPREFIX="${CDUMP}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." -export GPREFIX_ENS="enkf${GDUMP}.t${gcyc}z." - - -if [ ${RUN_ENVIR} = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then - export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${RUN}.${PDY}/${cyc}/atmos} - export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${GDUMP}.${gPDY}/${gcyc}/atmos} -else - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" - export COMIN_OBS="${COMIN_OBS:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/obs}" - export COMIN_GES_OBS="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/obs" -fi -mkdir -m 775 -p ${COMOUT} -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/atmos" -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" - - -export ATMGES="${COMIN_GES}/${GPREFIX}atmf006.nc" -if [ ! -f ${ATMGES} ]; then - echo "FATAL ERROR: FILE MISSING: ATMGES = ${ATMGES}" - exit 1 -fi - - -if [ ${DOHYBVAR} = "YES" ]; then - export ATMGES_ENSMEAN="${COMIN_GES_ENS}/${GPREFIX_ENS}atmf006.ensmean.nc" - if [ ! -f ${ATMGES_ENSMEAN} ]; then - echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" - exit 2 - fi -fi +export APREFIX="${RUN}.t${cyc}z." +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGDAS_ATMOS_CHGRES_FORENKF b/jobs/JGDAS_ATMOS_CHGRES_FORENKF index da93058ace..1bbed53586 100755 --- a/jobs/JGDAS_ATMOS_CHGRES_FORENKF +++ b/jobs/JGDAS_ATMOS_CHGRES_FORENKF @@ -7,9 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal echgres" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" +export CDUMP=${RUN/enkf} export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} @@ -17,19 +15,11 @@ export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} # Begin JOB SPECIFIC work ############################################## -export APREFIX="${CDUMP/enkf}.t${cyc}z." -export APREFIX_ENS="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENS="${RUN}.t${cyc}z." -if [ ${RUN_ENVIR} = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then - export COMOUT=${COMOUT:-${ROTDIR}/${RUN/enkf}.${PDY}/${cyc}/atmos} - export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/${RUN}.${PDY}/${cyc}} -else - export COMOUT="${ROTDIR}/${RUN/enkf}.${PDY}/${cyc}/atmos" - export COMOUT_ENS="${ROTDIR}/${RUN}.${PDY}/${cyc}" -fi -mkdir -m 775 -p ${COMOUT} - -export ATMFCST_ENSRES="${COMOUT_ENS}/mem001/atmos/${APREFIX_ENS}atmf006.nc" +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +MEMDIR="mem001" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL ############################################################### # Run relevant script diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK index bec9c80867..f0131ffb94 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK +++ b/jobs/JGDAS_ATMOS_GEMPAK @@ -35,32 +35,37 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} # Specify NET and RUN Name and model #################################### export model=${model:-gdas} -export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak} +for grid in 0p25 0p50 1p00; do + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" +done -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} -fi +for grid in 1p00 0p25; do + prod_dir="COM_ATMOS_GEMPAK_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL" + + if [[ ${SENDCOM} == YES && ! -d "${!prod_dir}" ]] ; then + mkdir -m 775 -p "${!prod_dir}" + fi +done -# TODO: These actions belong in an ex-script not a j-job (#1219) -if [ -f ${DATA}/poescript ]; then - rm ${DATA}/poescript +# TODO: These actions belong in an ex-script not a j-job +if [[ -f poescript ]]; then + rm -f poescript fi ######################################################## # Execute the script. -echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas 009 GDAS_GEMPAK " >> poescript +echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" >> poescript ######################################################## ######################################################## # Execute the script for quater-degree grib -echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas_0p25 009 GDAS_GEMPAK " >>poescript +echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas_0p25 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" >> poescript ######################################################## cat poescript @@ -76,9 +81,8 @@ export OMP_NUM_THREADS=${threads} APRUN="mpiexec -l -np ${ntasks} --cpu-bind verbose,core cfp" APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-${APRUN}} -APRUNCFP=$(eval echo ${APRUN_GEMPAKCFP}) -${APRUNCFP} ${DATA}/poescript +${APRUN_GEMPAKCFP} ${DATA}/poescript export err=$?; err_chk ############################################ diff --git a/jobs/JGDAS_ATMOS_GLDAS b/jobs/JGDAS_ATMOS_GLDAS deleted file mode 100755 index dee6b4c9e3..0000000000 --- a/jobs/JGDAS_ATMOS_GLDAS +++ /dev/null @@ -1,85 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs:?}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "gldas" -c "base gldas" - -if [[ "${cyc:?}" -ne "${gldas_cyc:?}" ]]; then - echo "GLDAS only runs for ${gldas_cyc} cycle; Skip GLDAS step for cycle ${cyc}" - rm -Rf "${DATA}" - exit 0 -fi - -gldas_spinup_hours=${gldas_spinup_hours-:72} -xtime=$((gldas_spinup_hours+12)) -if [[ "${CDATE}" -le "$(${NDATE:?} +"${xtime}" "${SDATE:?}")" ]]; then - echo "GLDAS needs fluxes as forcing from cycles in previous ${xtime} hours" - echo "starting from ${SDATE}. This gldas cycle is skipped" - rm -Rf "${DATA}" - exit 0 -fi - - -############################################## -# Set variables used in the exglobal script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gdas"}} -export COMPONENT="atmos" - - -############################################## -# Begin JOB SPECIFIC work -############################################## -export gldas_ver=${gldas_ver:-v2.3.0} -export HOMEgldas=${HOMEgldas:-${HOMEgfs}} -export FIXgldas=${FIXgldas:-${HOMEgldas}/fix/gldas} -export PARMgldas=${PARMgldas:-${HOMEgldas}/parm/gldas} -export EXECgldas=${EXECgldas:-${HOMEgldas}/exec} -export USHgldas=${USHgldas:-${HOMEgldas}/ush} -export PARA_CONFIG=${HOMEgfs}/parm/config/config.gldas - -if [[ "${RUN_ENVIR}" = "nco" ]]; then - export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} -else - export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" -fi -if [[ ! -d ${COMOUT} ]]; then - mkdir -p "${COMOUT}" - chmod 775 "${COMOUT}" -fi - -export COMINgdas=${COMINgdas:-${ROTDIR}} -export DCOMIN=${DCOMIN:-${DCOMROOT:-"/lfs/h1/ops/prod/dcom"}} - -export model=${model:-noah} -export MODEL=${MODEL:-"${model} |tr 'a-z' 'A-Z'"} - - -############################################################### -# Run relevant exglobal script - -${GLDASSH:-${HOMEgldas}/scripts/exgdas_atmos_gldas.sh} -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd "${DATAROOT}" || exit 1 -[[ ${KEEPDATA:?} = "NO" ]] && rm -rf "${DATA}" - -exit 0 - diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN index e16cb6d548..deccc0b28e 100755 --- a/jobs/JGDAS_ATMOS_VERFOZN +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -6,12 +6,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" - -############################### -# Specify NET, RUN, and COMPONENT name -############################## -export COMPONENT="atmos" - export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} #--------------------------------------------- @@ -39,20 +33,20 @@ export USHoznmon=${USHoznmon:-${HOMEoznmon}/ush} # determine PDY and cyc for previous cycle ############################################# -cdate=$(${NDATE} -6 ${PDY}${cyc}) -echo 'pdate = ${pdate}' +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo "pdate = ${pdate}" -export P_PDY=$(echo ${cdate} | cut -c1-8) -export p_cyc=$(echo ${cdate} | cut -c9-10) +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} #--------------------------------------------- # OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE # export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT}/oznmon} -export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/oznmon} -export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} +export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/atmos/oznmon} +export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/atmos/oznmon} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS if [[ ! -d ${TANKverf_ozn} ]]; then mkdir -p -m 775 ${TANKverf_ozn} @@ -69,7 +63,7 @@ fi # Set necessary environment variables # export OZN_AREA=${OZN_AREA:-glb} -export oznstat=${oznstat:-${COMIN}/gdas.t${cyc}z.oznstat} +export oznstat=${oznstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.oznstat} #------------------------------------------------------- diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD index 300e6eff1a..42e112c74f 100755 --- a/jobs/JGDAS_ATMOS_VERFRAD +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -42,31 +42,21 @@ parm_file=${parm_file:-${PARMmon}/da_mon.parm} # determine PDY and cyc for previous cycle ############################################# -cdate=$(${NDATE} -6 ${PDY}${cyc}) -echo 'pdate = ${pdate}' +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo "pdate = ${pdate}" -export P_PDY=$(echo ${cdate} | cut -c1-8) -export p_cyc=$(echo ${cdate} | cut -c9-10) +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} ############################################# # COMOUT - WHERE GSI OUTPUT RESIDES # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/${COMPONENT}/radmon} -export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/radmon} -export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} - -################################ -# backwards compatibility for -# gfs v15 which doesn't have -# a $COMPONENT in output path -################################ -if [[ ! -d ${COMIN} ]]; then - export COMIN=${COM_IN}/${RUN}.${PDY}/${cyc} -fi +export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/atmos/radmon} +export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/atmos/radmon} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS mkdir -p -m 775 ${TANKverf_rad} @@ -75,8 +65,8 @@ mkdir -p -m 775 ${TANKverf_rad} ######################################## export RAD_AREA=${RAD_AREA:-glb} -export biascr=${biascr:-${COMIN}/gdas.t${cyc}z.abias} -export radstat=${radstat:-${COMIN}/gdas.t${cyc}z.radstat} +export biascr=${biascr:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias} +export radstat=${radstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat} echo " " echo "JOB HAS STARTED" diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON index 67f50f5c1c..3f9c0d856f 100755 --- a/jobs/JGDAS_ATMOS_VMINMON +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -6,8 +6,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" -export COMPONENT="atmos" - ########################################################### # obtain unique process id (pid) and make temp directories ########################################################### @@ -32,32 +30,29 @@ export USHminmon=${USHminmon:-${HOMEminmon}/ush} # determine PDY and cyc for previous cycle ############################################# -cdate=$(${NDATE} -6 ${PDY}${cyc}) -echo 'pdate = ${pdate}' +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo "pdate = ${pdate}" -export P_PDY=$(echo ${cdate} | cut -c1-8) -export p_cyc=$(echo ${cdate} | cut -c9-10) +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} ############################################# # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} - -export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} -export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} +export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/atmos/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/atmos/minmon} -export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS mkdir -p -m 775 ${M_TANKverf} - ######################################## # Set necessary environment variables ######################################## export CYCLE_INTERVAL=6 -export gsistat=${gsistat:-${COMIN}/gdas.t${cyc}z.gsistat} +export gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.gsistat} ######################################################## diff --git a/jobs/JGDAS_ENKF_ARCHIVE b/jobs/JGDAS_ENKF_ARCHIVE index e5131c4c67..37f4e17b9b 100755 --- a/jobs/JGDAS_ENKF_ARCHIVE +++ b/jobs/JGDAS_ENKF_ARCHIVE @@ -7,8 +7,11 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "earc" -c "base earc" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export CDUMP=${RUN/enkf} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_TOP +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_ENSSTAT:COM_ATMOS_ANALYSIS_TMPL ############################################################### # Run archive script diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG index 384f253ff7..40f2968869 100755 --- a/jobs/JGDAS_ENKF_DIAG +++ b/jobs/JGDAS_ENKF_DIAG @@ -7,9 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs analdiag edi ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gdas"}} -export COMPONENT="atmos" +export CDUMP="${RUN/enkf}" export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} @@ -17,77 +15,72 @@ export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} ############################################## # Begin JOB SPECIFIC work ############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} -GDUMP_ENS=${GDUMP_ENS:-"enkf${GDUMP}"} +export CASE=${CASE_ENS} -export CASE=${CASE_ENKF} -export CDUMP_OBS=${CDUMP_OBS:-${CDUMP/enkf}} -export CDUMP_CTL=${CDUMP_CTL:-${CDUMP/enkf}} +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP_ENS}.t${gcyc}z." +GPREFIX_DET="${GDUMP}.t${gcyc}z." +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS -export OPREFIX="${CDUMP_OBS}.t${cyc}z." -export APREFIX="${CDUMP_CTL}.t${cyc}z." -export APREFIX_ENS="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL -if [ ${RUN_ENVIR} = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then - export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${CDUMP_OBS}.${PDY}/${cyc}/${COMPONENT}} -else - export COMIN_OBS="${COMIN_OBS:-${ROTDIR}/${CDUMP_OBS}.${PDY}/${cyc}/obs}" -fi - -# COMIN_GES, COMIN_ANL COMIN_GES_ENS, and COMOUT are used in script -COMIN_GES_CTL="${ROTDIR}/gdas.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_ANL="${ROTDIR}/${CDUMP/enkf}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" -export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" +MEMDIR="ensstat" RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL -export ATMGES_ENSMEAN="${COMIN_GES_ENS}/${GPREFIX_ENS}atmf006.ensmean.nc" +export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.ensmean.nc" if [ ! -f ${ATMGES_ENSMEAN} ]; then echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" exit 1 fi - # Link observational data -export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" +export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" if [[ ! -f ${PREPQC} ]]; then echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" fi -export TCVITL="${COMIN_ANL}/${OPREFIX}syndata.tcvitals.tm00" +export TCVITL="${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00" if [[ ${DONST} = "YES" ]]; then - export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" fi -export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" # Guess Bias correction coefficients related to control -export GBIAS=${COMIN_GES_CTL}/${GPREFIX}abias -export GBIASPC=${COMIN_GES_CTL}/${GPREFIX}abias_pc -export GBIASAIR=${COMIN_GES_CTL}/${GPREFIX}abias_air -export GRADSTAT=${COMIN_GES_CTL}/${GPREFIX}radstat +export GBIAS=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias +export GBIASPC=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc +export GBIASAIR=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air +export GRADSTAT=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat # Bias correction coefficients related to ensemble mean -export ABIAS="${COMOUT}/${APREFIX_ENS}abias.ensmean" -export ABIASPC="${COMOUT}/${APREFIX_ENS}abias_pc.ensmean" -export ABIASAIR="${COMOUT}/${APREFIX_ENS}abias_air.ensmean" -export ABIASe="${COMOUT}/${APREFIX_ENS}abias_int.ensmean" +export ABIAS="${COM_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" +export ABIASPC="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" +export ABIASAIR="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" +export ABIASe="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" # Diagnostics related to ensemble mean -export GSISTAT="${COMOUT}/${APREFIX_ENS}gsistat.ensmean" -export CNVSTAT="${COMOUT}/${APREFIX_ENS}cnvstat.ensmean" -export OZNSTAT="${COMOUT}/${APREFIX_ENS}oznstat.ensmean" -export RADSTAT="${COMOUT}/${APREFIX_ENS}radstat.ensmean" +export GSISTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" +export CNVSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" +export OZNSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" +export RADSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" # Select observations based on ensemble mean export RUN_SELECT="YES" export USE_SELECT="NO" -export SELECT_OBS="${COMOUT}/${APREFIX_ENS}obsinput.ensmean" +export SELECT_OBS="${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" export DIAG_SUFFIX="_ensmean" export DIAG_COMPRESS="NO" diff --git a/jobs/JGDAS_ENKF_ECEN b/jobs/JGDAS_ENKF_ECEN index d08f2e11b0..cd77eebb55 100755 --- a/jobs/JGDAS_ENKF_ECEN +++ b/jobs/JGDAS_ENKF_ECEN @@ -7,36 +7,36 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ecen" -c "base ecen" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"enkfgdas"}} -export COMPONENT="atmos" - +export CDUMP="${RUN/enkf}" ############################################## # Begin JOB SPECIFIC work ############################################## - -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} -GDUMP_ENS=${GDUMP_ENS:-"enkf${GDUMP}"} - -export CASE=${CASE_ENKF} - -export CDUMP_OBS=${CDUMP_OBS:-${CDUMP/enkf}} - -export OPREFIX="${CDUMP_OBS}.t${cyc}z." -export APREFIX="${CDUMP/enkf}.t${cyc}z." -export APREFIX_ENS="${CDUMP}.t${cyc}z." +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export CASE=${CASE_ENS} + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENS="${RUN}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." -# COMIN, COMIN_ENS and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP/enkf}.${PDY}/${cyc}/atmos" -export COMIN_ENS="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" -export COMOUT_ENS="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" -export COMIN_GES_ENS="${ROTDIR}/${GDUMP_ENS}.${gPDY}/${gcyc}" +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_STAT:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR="ensstat" RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_STAT_PREV:COM_ATMOS_HISTORY_TMPL ############################################################### diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST index 8a30b9abe3..45d0ad8b1d 100755 --- a/jobs/JGDAS_ENKF_FCST +++ b/jobs/JGDAS_ENKF_FCST @@ -7,19 +7,16 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"enkfgdas"}} -export COMPONENT="atmos" +export CDUMP=${RUN/enkf} export rCDUMP="enkfgdas" ############################################## # Begin JOB SPECIFIC work ############################################## -export CASE=${CASE_ENKF} +export CASE=${CASE_ENS} -# COMOUT is used in script -export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" +YMD=${PDY} HH=${cyc} generate_com -rx COM_TOP # Forecast length for EnKF forecast @@ -27,7 +24,6 @@ export FHMIN=${FHMIN_ENKF} export FHOUT=${FHOUT_ENKF} export FHMAX=${FHMAX_ENKF} - # Get ENSBEG/ENSEND from ENSGRP and NMEM_EFCSGRP if [[ $CDUMP == "gfs" ]]; then export NMEM_EFCSGRP=${NMEM_EFCSGRP_GFS:-${NMEM_EFCSGRP:-1}} @@ -45,7 +41,7 @@ status=$? # Double check the status of members in ENSGRP -EFCSGRP=${COMOUT}/efcs.grp${ENSGRP} +EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}" npass=0 if [ -f ${EFCSGRP} ]; then npass=$(grep "PASS" ${EFCSGRP} | wc -l) @@ -83,5 +79,4 @@ fi cd ${DATAROOT} [[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - exit 0 diff --git a/jobs/JGDAS_ENKF_POST b/jobs/JGDAS_ENKF_POST index a9750d00ba..0f7039d614 100755 --- a/jobs/JGDAS_ENKF_POST +++ b/jobs/JGDAS_ENKF_POST @@ -7,9 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "epos" -c "base epos" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"enkfgdas"}} -export COMPONENT="atmos" +export CDUMP=${RUN/enkf} ############################################## @@ -17,12 +15,7 @@ export COMPONENT="atmos" ############################################## export GFS_NCIO=${GFS_NCIO:-"YES"} -export PREFIX="${CDUMP}.t${cyc}z." - -# COMIN, COMOUT are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" -export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" - +export PREFIX="${RUN}.t${cyc}z." export LEVS=$((LEVS-1)) diff --git a/jobs/JGDAS_ENKF_SELECT_OBS b/jobs/JGDAS_ENKF_SELECT_OBS index eba1b0c6c4..7c02512989 100755 --- a/jobs/JGDAS_ENKF_SELECT_OBS +++ b/jobs/JGDAS_ENKF_SELECT_OBS @@ -7,9 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gdas"}} -export COMPONENT="atmos" +export CDUMP=${RUN/enkf} export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} @@ -17,87 +15,91 @@ export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} ############################################## # Begin JOB SPECIFIC work ############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -${assim_freq} ${PDY}${cyc}) +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP_ENS}.t${gcyc}z." +APREFIX_DET="${CDUMP}.t${cyc}z." +GPREFIX_DET="${GDUMP}.t${gcyc}z." -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} -GDUMP_ENS=${GDUMP_ENS:-"enkf${GDUMP}"} +export GSUFFIX=".ensmean.nc" -export CASE=${CASE_ENKF} -export CDUMP_OBS=${CDUMP_OBS:-${CDUMP/enkf}} +# Generate COM variables from templates +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS +MEMDIR='ensstat' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +declare -rx COM_ATMOS_ANALYSIS_ENS="${COM_ATMOS_ANALYSIS}" -export OPREFIX="${CDUMP_OBS}.t${cyc}z." -export APREFIX="${CDUMP}.t${cyc}z." -export APREFIX_ANL="${CDUMP/enkf}.t${cyc}z." -export GPREFIX_CTL="${GDUMP}.t${gcyc}z." -export GPREFIX="${GDUMP_ENS}.t${gcyc}z." -export GSUFFIX=".ensmean.nc" +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL -if [ ${RUN_ENVIR} = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then - export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${CDUMP_OBS}.${PDY}/${cyc}/${COMPONENT}} - export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}} -else - export COMIN_OBS="${COMIN_OBS:-${ROTDIR}/${CDUMP_OBS}.${PDY}/${cyc}/obs}" - export COMIN_GES_OBS="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/obs" -fi +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + +RUN="${GDUMP}" YMD=${gPDY} HH=${gcyc} generate_com -r COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL -# COMIN_GES, COMIN_ANL COMIN_GES_ENS, and COMOUT are used in script -COMIN_GES_CTL="${ROTDIR}/gdas.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_ANL="${ROTDIR}/${CDUMP/enkf}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" -export COMIN_GES=${COMIN_GES_ENS} -export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" -export ATMGES_ENSMEAN="${COMIN_GES_ENS}/${GPREFIX}atmf006${GSUFFIX}" +export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006${GSUFFIX}" if [[ ! -f ${ATMGES_ENSMEAN} ]]; then echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" exit 1 fi -export LEVS=$(${NCDUMP} -h ${ATMGES_ENSMEAN} | grep -i "pfull" | head -1 | awk -F" = " '{print $2}' | awk -F" " '{print $1}') # get LEVS +# Ignore masking of chained commands and possible misspelling warning +# shellcheck disable=SC2153,SC2312 +LEVS=$(${NCDUMP} -h "${ATMGES_ENSMEAN}" | grep -i "pfull" | head -1 | awk -F" = " '{print $2}' | awk -F" " '{print $1}') # get LEVS +# shellcheck disable= status=$? -[[ ${status} -ne 0 ]] && exit ${status} +[[ ${status} -ne 0 ]] && exit "${status}" +export LEVS # Link observational data -export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" +export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" if [[ ! -f ${PREPQC} ]]; then echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" fi -export TCVITL="${COMIN_ANL}/${OPREFIX}syndata.tcvitals.tm00" +export TCVITL="${COM_OBS}/${APREFIX_DET}syndata.tcvitals.tm00" if [[ ${DONST} = "YES" ]]; then - export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" fi -export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" # Deterministic analysis and increment files -export SFCANL="${COMIN_ANL}/${APREFIX_ANL}sfcanl.nc" -export DTFANL="${COMIN_ANL}/${APREFIX_ANL}dtfanl.nc" -export ATMANL="${COMIN_ANL}/${APREFIX_ANL}atmanl.nc" -export ATMINC="${COMIN_ANL}/${APREFIX_ANL}atminc.nc" +export SFCANL="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}sfcanl.nc" +export DTFANL="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}dtfanl.nc" +export ATMANL="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}atmanl.nc" +export ATMINC="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}atminc.nc" # Guess Bias correction coefficients related to control -export GBIAS=${COMIN_GES_CTL}/${GPREFIX_CTL}abias -export GBIASPC=${COMIN_GES_CTL}/${GPREFIX_CTL}abias_pc -export GBIASAIR=${COMIN_GES_CTL}/${GPREFIX_CTL}abias_air -export GRADSTAT=${COMIN_GES_CTL}/${GPREFIX_CTL}radstat +export GBIAS=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias +export GBIASPC=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc +export GBIASAIR=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air +export GRADSTAT=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat # Bias correction coefficients related to ensemble mean -export ABIAS="${COMOUT}/${APREFIX}abias.ensmean" -export ABIASPC="${COMOUT}/${APREFIX}abias_pc.ensmean" -export ABIASAIR="${COMOUT}/${APREFIX}abias_air.ensmean" -export ABIASe="${COMOUT}/${APREFIX}abias_int.ensmean" +export ABIAS="${COM_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" +export ABIASPC="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" +export ABIASAIR="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" +export ABIASe="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" # Diagnostics related to ensemble mean -export GSISTAT="${COMOUT}/${APREFIX}gsistat.ensmean" -export CNVSTAT="${COMOUT}/${APREFIX}cnvstat.ensmean" -export OZNSTAT="${COMOUT}/${APREFIX}oznstat.ensmean" -export RADSTAT="${COMOUT}/${APREFIX}radstat.ensmean" +export GSISTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" +export CNVSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" +export OZNSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" +export RADSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" # Select observations based on ensemble mean export RUN_SELECT="YES" export USE_SELECT="NO" -export SELECT_OBS="${COMOUT}/${APREFIX}obsinput.ensmean" +export SELECT_OBS="${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" export DIAG_SUFFIX="_ensmean" diff --git a/jobs/JGDAS_ENKF_SFC b/jobs/JGDAS_ENKF_SFC index 075c285ba5..3214812db8 100755 --- a/jobs/JGDAS_ENKF_SFC +++ b/jobs/JGDAS_ENKF_SFC @@ -7,38 +7,38 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "esfc" -c "base esfc" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gdas"}} -export COMPONENT="atmos" - +export CDUMP="${RUN/enkf}" ############################################## # Begin JOB SPECIFIC work ############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} -GDUMP_ENS=${GDUMP_ENS:-"enkf${GDUMP}"} - -export CASE=${CASE_ENKF} - -export CDUMP_OBS=${CDUMP_OBS:-${CDUMP/enkf}} +export CASE=${CASE_ENS} -export OPREFIX="${CDUMP_OBS}.t${cyc}z." -export APREFIX="${CDUMP/enkf}.t${cyc}z." -export APREFIX_ENS="${CDUMP}.t${cyc}z." +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENS="${RUN}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." -# COMIN, COMIN_ENS and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${RUN/enkf}.${PDY}/${cyc}/atmos" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/atmos" -export COMIN_ENS="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" -export COMOUT_ENS="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" -export COMIN_GES_ENS="${ROTDIR}/${GDUMP_ENS}.${gPDY}/${gcyc}" +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS \ + COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL ############################################################### # Run relevant script diff --git a/jobs/JGDAS_ENKF_UPDATE b/jobs/JGDAS_ENKF_UPDATE index f8f959acc8..1050529165 100755 --- a/jobs/JGDAS_ENKF_UPDATE +++ b/jobs/JGDAS_ENKF_UPDATE @@ -7,25 +7,29 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "eupd" -c "base anal eupd" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"enkfgdas"}} -export COMPONENT="atmos" +export CDUMP="${RUN/enkf}" ############################################## # Begin JOB SPECIFIC work ############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP_ENS}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="enkfgdas.t${gcyc}z." +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_STAT:COM_ATMOS_ANALYSIS_TMPL -# COMIN_GES_ENS and COMOUT_ANL_ENS are used in script -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" -export COMOUT_ANL_ENS="${ROTDIR}/${CDUMP}.${PDY}/${cyc}" +MEMDIR="ensstat" RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_STAT_PREV:COM_ATMOS_HISTORY_TMPL ############################################################### @@ -40,7 +44,7 @@ status=$? # Send Alerts ############################################## if [ ${SENDDBN} = YES ] ; then - ${DBNROOT}/bin/dbn_alert MODEL ENKF1_MSC_enkfstat ${job} ${COMOUT_ANL_ENS}/${APREFIX}enkfstat + "${DBNROOT}/bin/dbn_alert" "MODEL" "ENKF1_MSC_enkfstat" "${job}" "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX}enkfstat" fi diff --git a/jobs/JGDAS_FIT2OBS b/jobs/JGDAS_FIT2OBS index f6a1b3631d..d673845404 100755 --- a/jobs/JGDAS_FIT2OBS +++ b/jobs/JGDAS_FIT2OBS @@ -58,7 +58,7 @@ if [[ ${CDATE} -gt ${SDATE} ]]; then # RUN FIT2OBS VERIFICATION ############################################## - "${fitdir}/batrun/excfs_gdas_vrfyfits.sh.ecf" + "${SCRIPTSfit2obs}/excfs_gdas_vrfyfits.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST deleted file mode 100755 index e44e607581..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanalpost" -c "base atmanal atmanalpost" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPOSTPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_post.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP deleted file mode 100755 index 093fccdd5a..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanalprep" -c "base atmanal atmanalprep" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_prep.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN deleted file mode 100755 index 876598ff7e..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanalrun" -c "base atmanal atmanalrun" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASRUNSH:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_run.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST deleted file mode 100755 index e1d53b552e..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalpost" -c "base atmensanal atmensanalpost" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPOSTPY:-${HOMEgfs}/scripts/exgdas_global_atmos_ensanal_post.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP deleted file mode 100755 index 7b3ecee7ca..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalprep" -c "base atmensanal atmensanalprep" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_prep.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN deleted file mode 100755 index 45368d51ff..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalrun" -c "base atmensanal atmensanalrun" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASRUNSH:-${HOMEgfs}/scripts/exgdas_global_atmos_ensanal_run.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT index 7a4bfcc515..613de589d2 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT @@ -1,7 +1,7 @@ #!/bin/bash export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" -WIPE_DATA="NO" +export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY index 4b9511c6d1..c85b5c886b 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY @@ -1,7 +1,7 @@ #!/bin/bash export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" -WIPE_DATA="NO" +export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT new file mode 100755 index 0000000000..7e4294bd7c --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT @@ -0,0 +1,59 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalchkpt" -c "base ocnanal ocnanalchkpt" + + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export GDATE +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export GPREFIX="${GDUMP}.t${gcyc}z." +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +export APREFIX="${CDUMP}.t${cyc}z." + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST index e8ca9f429e..eb9607ad21 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST @@ -1,58 +1,41 @@ #!/bin/bash export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" -WIPE_DATA="NO" -DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalpost" -c "base ocnanalpost" ############################################## -# Begin JOB SPECIFIC work +# Set variables used in the script ############################################## - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} export CDATE=${CDATE:-${PDY}${cyc}} +export GDUMP=${GDUMP:-"gdas"} -mkdir -p "${COMOUT}" - -############################################################### -# Run relevant script -############################################################### - -# Save some of the DA cycle output to COMOUT -# TODO: Move to a dedicated script - -# Make a copy the IAU increment -cp "${DATA}/inc.nc" "${COMOUT}/${CDUMP}.t${cyc}z.ocninc.nc" - -# TODO: Dump-splash of the sea-ice restart not done yet +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS COM_ICE_RESTART -# Copy of the ioda output files, as is for now -cp -r "${DATA}/diags" "${COMOUT}" +mkdir -p "${COM_OCEAN_ANALYSIS}" +mkdir -p "${COM_ICE_RESTART}" -# Copy of the diagonal of the background error for the cycle -bdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} - 3 hours" +"%Y-%m-%dT%H:00:00Z") -cp "${DATA}/ocn.bkgerr_stddev.incr.${bdate}.nc" "${COMOUT}/${CDUMP}.t${cyc}z.ocn.bkgerr_stddev.nc" -cp "${DATA}/ice.bkgerr_stddev.incr.${bdate}.nc" "${COMOUT}/${CDUMP}.t${cyc}z.ice.bkgerr_stddev.nc" - -# Copy the loacalization and correlation operators -cp -rL "${DATA}/bump" "${COMOUT}/bump" - -# Copy the analysis in the middle of the window -cdate=$(date -d "${CDATE:0:8} ${CDATE:8:2}" +"%Y-%m-%dT%H:00:00Z") -cp "${DATA}/Data/ocn.3dvarfgat_pseudo.an.${cdate}.nc" "${COMOUT}/${CDUMP}.t${cyc}z.ocnana.nc" +############################################## +# Begin JOB SPECIFIC work +############################################## -# Copy DA grid (computed for the start of the window) -bcyc=$(((cyc - 3 + 24) % 24)) -cp "${DATA}/soca_gridspec.nc" "${COMOUT}/${CDUMP}.t${bcyc}z.ocngrid.nc" +# Add UFSDA to PYTHONPATH +ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}" +export PYTHONPATH -# Copy logs -mkdir -p "${COMOUT}/logs" -cp "${DATA}"/*.out "${COMOUT}/logs" +############################################################### +# Run relevant script +############################################################### -# Copy var.yaml -mkdir -p "${COMOUT}/yaml" -cp "${DATA}"/*.yaml "${COMOUT}/yaml" +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" ########################################## # Remove the Temporary working directory diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP index 0550f2ed6d..c3fd5b5d65 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -1,7 +1,7 @@ #!/bin/bash export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" -DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanalprep" @@ -9,12 +9,8 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanal # Set variables used in the script ############################################## export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="ocean" - -############################################## -# Begin JOB SPECIFIC work -############################################## - +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") export GDATE export gPDY=${GDATE:0:8} @@ -25,12 +21,17 @@ export OPREFIX="${CDUMP}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export APREFIX="${CDUMP}.t${cyc}z." -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS -mkdir -p "${COMOUT}" +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ + COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL \ + COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/ocean" +############################################## +# Begin JOB SPECIFIC work +############################################## # Add UFSDA to PYTHONPATH ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN index 893719fa9f..87ca5560c4 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN @@ -1,16 +1,19 @@ #!/bin/bash export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" -WIPE_DATA="NO" +export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" +############################################## +# Set variables used in the script +############################################## + ############################################## # Begin JOB SPECIFIC work ############################################## -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY index 6ede181032..86aac5fdaa 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY @@ -7,14 +7,22 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanal ############################################## # Set variables used in the script ############################################## +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export GDUMP=${GDUMP:-"gdas"} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ICE_HISTORY:COM_ICE_HISTORY_TMPL ############################################## # Begin JOB SPECIFIC work ############################################## -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} - # Add UFSDA to PYTHONPATH export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${HOMEgfs}/sorc/gdas.cd/ush/eva:${PYTHONPATH} diff --git a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG index ef4d4f8199..0119bc7f2d 100755 --- a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +++ b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG @@ -3,10 +3,8 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" - export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} - ################################ # Set up the HOME directory ################################ @@ -27,16 +25,15 @@ export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - export SENDDBN=${SENDDBN:-NO} export SENDAWIP=${SENDAWIP:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO +GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +if [[ ${SENDCOM} == "YES" && ! -d "${COM_ATMOS_WMO}" ]] ; then + mkdir -m 775 -p "${COM_ATMOS_WMO}" fi export pgmout=OUTPUT.$$ diff --git a/jobs/JGFS_ATMOS_AWIPS_G2 b/jobs/JGFS_ATMOS_AWIPS_G2 index ee823c9ada..94151fbd72 100755 --- a/jobs/JGFS_ATMOS_AWIPS_G2 +++ b/jobs/JGFS_ATMOS_AWIPS_G2 @@ -29,16 +29,15 @@ export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - export SENDDBN=${SENDDBN:-NO} export SENDAWIP=${SENDAWIP:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO +GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +if [[ ${SENDCOM} == "YES" && ! -d "${COM_ATMOS_WMO}" ]] ; then + mkdir -m 775 -p "${COM_ATMOS_WMO}" fi export pgmout=OUTPUT.$$ diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS index bf674fccde..85e4bf7651 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -7,14 +7,10 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" # TODO (#1220) Determine if this is still needed export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export COMPONENT="atmos" - ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export cmodel=${CDUMP} +export cmodel=${RUN} #################################### # SENDCOM - Copy Files From TMPDIR to $COMOUT @@ -36,18 +32,24 @@ export SCRIPTens_tracker=${SCRIPTens_tracker:-${HOMEens_tracker}/scripts} ############################################## # Define COM directories ############################################## -export COMIN=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} -export gfsdir=${COMIN} -export COMINgfs=${COMIN} -export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GENESIS +YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +# The following variables are used by the tracker scripts which are outside +# of global-workflow and therefore can't be standardized at this time +export COMIN=${COM_ATMOS_GRIB_0p25} +export gfsdir=${COM_ATMOS_GRIB_0p25} +export COMINgfs=${COM_ATMOS_GRIB_0p25} + +export COMINgenvit=${COM_ATMOS_GENESIS} +export COMOUTgenvit=${COM_ATMOS_GENESIS} +export COMOUT=${COM_ATMOS_GENESIS} -export JYYYY=$(echo ${PDY} | cut -c1-4) -export COMINgenvit=${COMINgenvit:-${COMOUT}/genesis_vital_${JYYYY}} -export COMOUTgenvit=${COMOUTgenvit:-${COMOUT}/genesis_vital_${JYYYY}} +export COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} -export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} +mkdir -m 775 -p "${COMOUTgenvit}" -mkdir -m 775 -p ${COMOUTgenvit} +export JYYYY=${PDY:0:4} ############################################## # Run relevant script diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER index 7734e4ea66..3aa3c6f5f4 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -14,7 +14,7 @@ export COMPONENT="atmos" # Set variables used in the exglobal script ############################################## export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export CDUMP=${RUN/enkf} #################################### @@ -37,14 +37,22 @@ export USHens_tracker=${USHens_tracker:-${HOMEens_tracker}/ush} ############################################## # Define COM and Data directories ############################################## -export COMIN=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} -export COMINgfs=${COMIN} -export gfsdir=${COMINgfs} -export COMINgdas=${COMIN} -export gdasdir=${COMINgdas} -export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_TRACK COM_ATMOS_GENESIS +YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +if [[ ! -d "${COM_ATMOS_TRACK}" ]]; then mkdir -p "${COM_ATMOS_TRACK}"; fi + +# The following variables are used by the tracker scripts which are outside +# of global-workflow and therefore can't be standardized at this time +export COMINgfs=${COM_ATMOS_GRIB_0p25} +export gfsdir=${COM_ATMOS_GRIB_0p25} +export COMINgdas=${COM_ATMOS_GRIB_0p25} +export gdasdir=${COM_ATMOS_GRIB_0p25} +export COMOUT=${COM_ATMOS_TRACK} export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} +export COMINgenvit=${COM_ATMOS_GENESIS} + if [ ${RUN_ENVIR} = "nco" ]; then export COMOUThur=${COMROOTp1}/hur/${envir}/global export COMOUTatcf=${COMROOTp1}/nhc/${envir}/atcf diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK index 2d822e53ce..161f0e0883 100755 --- a/jobs/JGFS_ATMOS_GEMPAK +++ b/jobs/JGFS_ATMOS_GEMPAK @@ -31,78 +31,87 @@ export DBN_ALERT_TYPE=${DBN_ALERT_TYPE:-GFS_GEMPAK} # Specify NET and RUN Name and model #################################### export model=${model:-gfs} -export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak} - export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} -fi +for grid in 0p25 0p50 1p00; do + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" +done + +for grid in 1p00 0p50 0p25 40km 35km_atl 35km_pac; do + prod_dir="COM_ATMOS_GEMPAK_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL" + if [[ ${SENDCOM} == YES && ! -d "${!prod_dir}" ]] ; then + mkdir -m 775 -p "${!prod_dir}" + fi +done # TODO: These actions belong in an ex-script not a j-job -if [ -f poescript ]; then +if [[ -f poescript ]]; then rm -f poescript fi +ocean_domain_max=180 +if (( ocean_domain_max > FHMAX_GFS )); then + ocean_domain_max=${FHMAX_GFS} +fi + ################################################################# # Execute the script for the 384 hour 1 degree grib ################################################################## -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> ${DATA}/gfs_1p0.$$.1 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> ${DATA}/gfs_1p0.$$.2 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> ${DATA}/gfs_1p0.$$.3 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> ${DATA}/gfs_1p0.$$.4 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> ${DATA}/gfs_1p0.$$.5 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> ${DATA}/gfs_1p0.$$.6 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.2 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.3 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.4 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.5 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.6 " >> poescript ################################################################# # Execute the script for the half-degree grib ################################################################## -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> ${DATA}/gfs_0p5.$$.1 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> ${DATA}/gfs_0p5.$$.2 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> ${DATA}/gfs_0p5.$$.3 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> ${DATA}/gfs_0p5.$$.4 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> ${DATA}/gfs_0p5.$$.5 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> ${DATA}/gfs_0p5.$$.6 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.2 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.3 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.4 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.5 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.6 " >> poescript ################################################################# # Execute the script for the quater-degree grib #################################################################### -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.1 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.2 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.3 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.4 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.5 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.6 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.7 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.8 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.9 " >> poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> ${DATA}/gfs_0p25.$$.10 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.2 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.3 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.4 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.5 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.6 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.7 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.8 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.9 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.10 " >> poescript #################################################################### # Execute the script to create the 35km Pacific grids for OPC ##################################################################### -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac 180 GFS_GEMPAK_WWB &> ${DATA}/gfs35_pac.$$.1 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac 180 GFS_GEMPAK_WWB &> ${DATA}/gfs35_pac.$$.2 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac} &> ${DATA}/gfs35_pac.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac} &> ${DATA}/gfs35_pac.$$.2 " >> poescript #################################################################### # Execute the script to create the 35km Atlantic grids for OPC ##################################################################### -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl 180 GFS_GEMPAK_WWB &> ${DATA}/gfs35_atl.$$.1 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl 180 GFS_GEMPAK_WWB &> ${DATA}/gfs35_atl.$$.2 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl} &> ${DATA}/gfs35_atl.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl} &> ${DATA}/gfs35_atl.$$.2 " >> poescript ##################################################################### # Execute the script to create the 40km grids for HPC ###################################################################### -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 180 GFS_GEMPAK_WWB &> ${DATA}/gfs40.$$.1 " >>poescript -echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 180 GFS_GEMPAK_WWB &> ${DATA}/gfs40.$$.2 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km} &> ${DATA}/gfs40.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km} &> ${DATA}/gfs40.$$.2 " >> poescript if [[ ${CFP_MP:-"NO"} == "YES" ]]; then # Add task number to the MPMD script @@ -123,7 +132,7 @@ export OMP_NUM_THREADS=${threads} APRUN=${APRUN:-"mpiexec -l -np ${ntasks} --cpu-bind verbose,core cfp"} APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-${APRUN}} -APRUNCFP=$(eval echo ${APRUN_GEMPAKCFP}) +APRUNCFP=${APRUN_GEMPAKCFP} ${APRUNCFP} ${DATA}/poescript export err=$?; err_chk diff --git a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS index 8bb39c0532..48b13c3d9e 100755 --- a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +++ b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS @@ -26,22 +26,19 @@ export FIXgfs=${FIXgfs:-${HOMEgfs}/fix} # Specify NET and RUN Name and model #################################### export model=${model:-gfs} -export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} -fi +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GOES +GRID="0p50" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p50:COM_ATMOS_GRIB_TMPL +if [[ ${SENDCOM} == "YES" ]]; then + mkdir -m 775 -p "${COM_ATMOS_GOES}" +fi # TODO - This should be in the ex-script (#1226) @@ -52,11 +49,6 @@ export SHOUR=000 export FHOUR=180 export FHINC=003 -####################################### -# Specify Restart File Name to Key Off -####################################### -restart_file=${COMIN}/${RUN}.t${cyc}z.special.grb2if - #################################### # Specify Timeout Behavior of Post # @@ -71,46 +63,41 @@ export SLEEP_INT=5 #################################### # Check if this is a restart #################################### -if test -f ${COMIN}/${RUN}.t${cyc}z.control.goessimpgrb2 -then - modelrecvy=$(cat < ${COMIN}/${RUN}.t${cyc}z.control.goessimpgrb) - recvy_pdy=$(echo ${modelrecvy} | cut -c1-8) - recvy_cyc=$(echo ${modelrecvy} | cut -c9-10) - recvy_shour=$(echo ${modelrecvy} | cut -c11-13) - - if test ${RERUN} = "NO" - then - NEW_SHOUR=$(expr ${recvy_shour} + ${FHINC}) - if test ${NEW_SHOUR} -ge ${SHOUR} - then +if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb2" ]]; then + modelrecvy=$(cat < "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb") + recvy_cyc="${modelrecvy:8:2}" + recvy_shour="${modelrecvy:10:13}" + + if [[ ${RERUN} == "NO" ]]; then + NEW_SHOUR=$(( recvy_shour + FHINC )) + if (( NEW_SHOUR >= SHOUR )); then export SHOUR=${NEW_SHOUR} fi - if test ${recvy_shour} -ge ${FHOUR} - then - echo="Forecast Pgrb Generation Already Completed to ${FHOUR}" + if (( recvy_shour >= FHOUR )); then + echo "Forecast Pgrb Generation Already Completed to ${FHOUR}" else - echo="Starting: PDY=${PDY} cycle=t${recvy_cyc}z SHOUR=${SHOUR} ." + echo "Starting: PDY=${PDY} cycle=t${recvy_cyc}z SHOUR=${SHOUR}" fi fi fi ############################################################# # Execute the script -${HOMEgfs}/scripts/exgfs_atmos_grib2_special_npoess.sh +"${HOMEgfs}/scripts/exgfs_atmos_grib2_special_npoess.sh" export err=$?;err_chk ############################################################# ############################################ # print exec I/O output ############################################ -if [ -e "${pgmout}" ] ; then - cat ${pgmout} +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" fi ################################### # Remove temp directories ################################### -if [ "${KEEPDATA}" != "YES" ] ; then - rm -rf ${DATA} +if [[ "${KEEPDATA}" != "YES" ]] ; then + rm -rf "${DATA}" fi diff --git a/jobs/JGFS_ATMOS_POSTSND b/jobs/JGFS_ATMOS_POSTSND index d5aa921e69..2318d70e31 100755 --- a/jobs/JGFS_ATMOS_POSTSND +++ b/jobs/JGFS_ATMOS_POSTSND @@ -7,8 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "postsnd" -c "base postsnd" ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export CDUMP=${RUN/enkf} ######################################## @@ -34,14 +33,14 @@ export SCRbufrsnd=${SCRbufrsnd:-${HOMEbufrsnd}/scripts} ############################## # Define COM Directories ############################## -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos} -export pcom=${pcom:-${COMOUT}/wmo} -export COMAWP=${COMAWP:-${COMOUT}/gempak} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -[[ ! -d ${COMOUT} ]] && mkdir -p ${COMOUT} -[[ ! -d ${pcom} ]] && mkdir -p ${pcom} -[[ ! -d ${COMAWP} ]] && mkdir -p ${COMAWP} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY COM_ATMOS_BUFR \ + COM_ATMOS_WMO COM_ATMOS_GEMPAK + +[[ ! -d ${COM_ATMOS_BUFR} ]] && mkdir -p "${COM_ATMOS_BUFR}" +[[ ! -d ${COM_ATMOS_GEMPAK} ]] && mkdir -p "${COM_ATMOS_GEMPAK}" +[[ ! -d ${COM_ATMOS_WMO} ]] && mkdir -p "${COM_ATMOS_WMO}" ######################################################## diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON index 01f2d3516c..a7300b4dd3 100755 --- a/jobs/JGFS_ATMOS_VMINMON +++ b/jobs/JGFS_ATMOS_VMINMON @@ -6,13 +6,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" - -############################### -# Specify NET and RUN name -############################## -export COMPONENT="atmos" - - ########################################################### # obtain unique process id (pid) and make temp directories ########################################################### @@ -35,31 +28,29 @@ export USHminmon=${USHminmon:-${HOMEminmon}/ush} # determine PDY and cyc for previous cycle ############################################# -cdate=$(${NDATE} -6 ${PDY}${cyc}) +pdate=$(${NDATE} -6 ${PDY}${cyc}) echo 'pdate = ${pdate}' -export P_PDY=$(echo ${cdate} | cut -c1-8) -export p_cyc=$(echo ${cdate} | cut -c9-10) +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} ############################################# # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL -M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} -export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} - -export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} +M_TANKverf=${M_TANKverf:-${COM_ATMOS_ANALYSIS}/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_ANALYSIS_PREV}/minmon} mkdir -p -m 775 ${M_TANKverf} - ######################################## # Set necessary environment variables ######################################## export CYCLE_INTERVAL=6 -export gsistat=${gsistat:-${COMIN}/gfs.t${cyc}z.gsistat} +export gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gfs.t${cyc}z.gsistat} ######################################################## diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index db2199803e..065ebe8d0a 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -8,7 +8,10 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroan ############################################## # Set variables used in the script ############################################## +# shellcheck disable=SC2153 GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} GDUMP="gdas" @@ -16,12 +19,14 @@ GDUMP="gdas" # Begin JOB SPECIFIC work ############################################## -export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/chem} -mkdir -p "${COMOUT}" +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_CHEM_ANALYSIS -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/chem" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/chem" +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index 5642b72092..2f8c222e18 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -7,7 +7,10 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlinit" -c "base aeroanl aeroanl ############################################## # Set variables used in the script ############################################## +# shellcheck disable=SC2153 GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} GDUMP="gdas" @@ -15,12 +18,14 @@ GDUMP="gdas" # Begin JOB SPECIFIC work ############################################## -export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/chem} -mkdir -p "${COMOUT}" +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_CHEM_ANALYSIS -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/chem" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/chem" +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_RUN b/jobs/JGLOBAL_AERO_ANALYSIS_RUN index aa9d48ba2e..853909dc03 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_RUN +++ b/jobs/JGLOBAL_AERO_ANALYSIS_RUN @@ -8,20 +8,11 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlr ############################################## # Set variables used in the script ############################################## -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") -GDUMP="gdas" ############################################## # Begin JOB SPECIFIC work ############################################## -export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/chem} -mkdir -p "${COMOUT}" - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/chem" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/chem" - ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index 801998e2c2..2d2f8c8814 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -7,9 +7,21 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "arch" -c "base arch" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} - +export CDUMP=${RUN/enkf} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMOS_GEMPAK \ + COM_ATMOS_GENESIS COM_ATMOS_HISTORY COM_ATMOS_INPUT COM_ATMOS_MASTER COM_ATMOS_RESTART \ + COM_ATMOS_TRACK COM_ATMOS_WAFS COM_ATMOS_WMO \ + COM_CHEM_HISTORY \ + COM_ICE_HISTORY COM_ICE_INPUT \ + COM_OBS COM_TOP \ + COM_OCEAN_DAILY COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_XSECT \ + COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION + +for grid in "0p25" "0p50" "1p00"; do + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" +done ############################################################### # Run archive script diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..37a49e0ae0 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE @@ -0,0 +1,48 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl atmensanlfinal" + +############################################## +# Set variables used in the script +############################################## +GDUMP="gdas" +GDUMP_ENS="enkf${GDUMP}" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variable from template +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_ENS:COM_ATMOS_ANALYSIS_TMPL + +mkdir -m 755 -p "${COM_ATMOS_ANALYSIS_ENS}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSFINALPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..246502cdfa --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atmensanlinit" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSINITPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN new file mode 100755 index 0000000000..0d10c76b05 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atmensanlrun" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSRUNSH:-${HOMEgfs}/scripts/exglobal_atmens_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS index 48c7a44cb0..9e5850bfc3 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal" # Set variables used in the script ############################################## export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export CDUMP=${RUN/enkf} export COMPONENT="atmos" export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} @@ -19,38 +19,35 @@ export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} # Begin JOB SPECIFIC work ############################################## -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} +GDATE=$(${NDATE} -${assim_freq} ${PDY}${cyc}) +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" export OPREFIX="${CDUMP}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export APREFIX="${CDUMP}.t${cyc}z." -export GPREFIX_ENS="enkf${GDUMP}.t${gcyc}z." - -export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} -export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} -export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${RUN}.${PDY}/${cyc}/atmos} -if [ ${RUN_ENVIR} = "nco" ]; then - export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${GDUMP}.${gPDY}/${gcyc}/atmos} -else - export COMIN_GES_OBS="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/obs" -fi +export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS -mkdir -m 775 -p ${COMOUT} -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/atmos" -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_ENS_PREV:COM_ATMOS_HISTORY_TMPL -export ATMGES="${COMIN_GES}/${GPREFIX}atmf006.nc" +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + +export ATMGES="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.nc" if [ ! -f ${ATMGES} ]; then echo "FATAL ERROR: FILE MISSING: ATMGES = ${ATMGES}" exit 1 fi - # Get LEVS export LEVS=$(${NCLEN} ${ATMGES} pfull) status=$? @@ -58,7 +55,7 @@ status=$? if [ ${DOHYBVAR} = "YES" ]; then - export ATMGES_ENSMEAN="${COMIN_GES_ENS}/enkf${GPREFIX}atmf006.ensmean.nc" + export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_ENS_PREV}/${GPREFIX_ENS}atmf006.ensmean.nc" if [ ! -f ${ATMGES_ENSMEAN} ]; then echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" exit 2 @@ -67,17 +64,17 @@ fi # Link observational data -export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" +export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" if [[ ! -f ${PREPQC} ]]; then echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" fi -export TCVITL="${COMOUT}/${OPREFIX}syndata.tcvitals.tm00" +export TCVITL="${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00" if [[ ${DONST} = "YES" ]]; then if [[ ${MAKE_NSSTBUFR} == "YES" ]]; then - export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" fi fi -export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" # Copy fix file for obsproc # TODO: Why is this necessary? if [[ ${RUN} = "gfs" ]]; then @@ -98,9 +95,9 @@ status=$? # Send Alerts ############################################## if [ ${SENDDBN} = YES -a ${RUN} = gdas ] ; then - ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias ${job} ${COMOUT}/${APREFIX}abias - ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias_pc ${job} ${COMOUT}/${APREFIX}abias_pc - ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias_air ${job} ${COMOUT}/${APREFIX}abias_air + ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias ${job} ${COM_ATMOS_ANALYSIS}/${APREFIX}abias + ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias_pc ${job} ${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc + ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias_air ${job} ${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air fi diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC index 81f792b032..65a571a974 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC @@ -7,64 +7,48 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" +export CDUMP="${RUN/enkf}" export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} ############################################## # Begin JOB SPECIFIC work ############################################## - -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" export OPREFIX="${CDUMP}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." -export GPREFIX_ENS="enkf${GDUMP}.t${gcyc}z." - -if [ ${RUN_ENVIR} = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then - export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${RUN}.${PDY}/${cyc}/atmos} - export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${GDUMP}.${gPDY}/${gcyc}/atmos} -else - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" - export COMIN_OBS="${COMIN_OBS:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/obs}" - export COMIN_GES_OBS="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/obs" -fi -mkdir -m 775 -p ${COMOUT} -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/atmos" -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_RESTART + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL -export ATMGES="${COMIN_GES}/${GPREFIX}atmf006.nc" +export ATMGES="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.nc" if [ ! -f ${ATMGES} ]; then echo "FATAL ERROR: FILE MISSING: ATMGES = ${ATMGES}" exit 1 fi - # Get LEVS export LEVS=$(${NCLEN} ${ATMGES} pfull) status=$? [[ ${status} -ne 0 ]] && exit ${status} -if [ ${DOHYBVAR} = "YES" ]; then - export ATMGES_ENSMEAN="${COMIN_GES_ENS}/${GPREFIX_ENS}atmf006.ensmean.nc" - if [ ! -f ${ATMGES_ENSMEAN} ]; then - echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" - exit 2 - fi -fi - - # Generate Gaussian surface analysis # TODO: Should this be removed now that sfcanl is its own job? export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} diff --git a/jobs/JGLOBAL_ATMOS_POST b/jobs/JGLOBAL_ATMOS_POST index 1c6af31f91..d636be4f30 100755 --- a/jobs/JGLOBAL_ATMOS_POST +++ b/jobs/JGLOBAL_ATMOS_POST @@ -15,15 +15,13 @@ export g2tmpl_ver=${g2tmpl_ver:-v1.5.0} ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" +export CDUMP=${RUN/enkf} ############################################## # TODO: Remove this egregious HACK ############################################## -if [[ "${SDATE:-}" = "${CDATE}" ]]; then +if [[ "${SDATE:-}" = "${PDY}${cyc}" ]]; then if [[ ${post_times} = "anl" ]]; then echo "No offline post-processing in the first half cycle for analysis" exit 0 @@ -41,12 +39,25 @@ export FIXCRTM=${CRTM_FIX:-${HOMECRTM}/fix} export PARMpost=${PARMpost:-${HOMEgfs}/parm/post} export INLINE_POST=${WRITE_DOPOST:-".false."} -export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} -export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} +# Construct COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RESTART COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER +if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -m 775 -p "${COM_ATMOS_MASTER}"; fi -# shellcheck disable=SC2174 -[[ ! -d "${COMOUT}" ]] && mkdir -m 775 -p "${COMOUT}" -# shellcheck disable= +if [[ ${GOESF} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GOES + if [[ ! -d ${COM_ATMOS_GOES} ]]; then mkdir -m 775 -p "${COM_ATMOS_GOES}"; fi +fi + +if [[ ${WAFSF} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WAFS + if [[ ! -d ${COM_ATMOS_WAFS} ]]; then mkdir -m 775 -p "${COM_ATMOS_WAFS}"; fi +fi + +for grid in '0p25' '0p50' '1p00'; do + prod_dir="COM_ATMOS_GRIB_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_TMPL" + if [[ ! -d "${prod_dir}" ]]; then mkdir -m 775 -p "${!prod_dir}"; fi +done if [ "${RUN}" = gfs ];then export FHOUT_PGB=${FHOUT_GFS:-3} #Output frequency of gfs pgb file at 1.0 and 0.5 deg. @@ -68,7 +79,8 @@ fi ####################################### # Specify Restart File Name to Key Off ####################################### -export restart_file=${COMIN}/${RUN}.t${cyc}z.logf +# TODO Improve the name of this variable +export restart_file=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf #################################### # Specify Timeout Behavior of Post @@ -85,7 +97,7 @@ export SLEEP_INT=5 ############################################################### # Run relevant exglobal script -"${SCRgfs}/ex${RUN}_atmos_post.sh" +"${HOMEgfs}/scripts/ex${RUN}_atmos_post.sh" status=$? (( status != 0 )) && exit "${status}" diff --git a/jobs/JGLOBAL_ATMOS_SFCANL b/jobs/JGLOBAL_ATMOS_SFCANL index 8012b64a88..dcedb7b65b 100755 --- a/jobs/JGLOBAL_ATMOS_SFCANL +++ b/jobs/JGLOBAL_ATMOS_SFCANL @@ -7,9 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "sfcanl" -c "base sfcanl" ############################################## # Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" +export CDUMP="${RUN/enkf}" if [[ ${RUN_ENVIR} = "nco" ]]; then export ROTDIR=${COMROOT:?}/${NET}/${envir} fi @@ -18,38 +16,24 @@ fi ############################################## # Begin JOB SPECIFIC work ############################################## - -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -gPDY=$(echo ${GDATE} | cut -c1-8) -gcyc=$(echo ${GDATE} | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +export GDUMP="gdas" export OPREFIX="${CDUMP}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export APREFIX="${CDUMP}.t${cyc}z." -if [ ${RUN_ENVIR} = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then - export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMIN_OBS=${COMIN_OBS:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMIN_GES_OBS=${COMIN_GES_OBS:-${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/atmos} -else - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" - export COMIN_OBS="${COMIN_OBS:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/obs}" - export COMIN_GES_OBS="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/obs" -fi -mkdir -m 775 -p ${COMOUT} -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/atmos" -export COMIN_GES_ENS="${ROTDIR}/enkfgdas.${gPDY}/${gcyc}" - - -export ATMGES="${COMIN_GES}/${GPREFIX}atmf006.nc" -if [ ! -f ${ATMGES} ]; then - echo "FATAL ERROR: FILE MISSING: ATMGES = ${ATMGES}" - exit 1 -fi +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS COM_ATMOS_RESTART \ + COM_LAND_ANALYSIS +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC index 9a70cb968e..d5e4834851 100755 --- a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +++ b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC @@ -11,8 +11,7 @@ export RUN_ENVIR=${RUN_ENVIR:-"nco"} # Set variables used in the exglobal script ############################################## export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" +export CDUMP=${RUN/enkf} ############################################## @@ -33,11 +32,8 @@ export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}} # path to tropical cyclone reco ############################################## # Define COM directories ############################################## -export COMIN=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} -export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} -if [ ! -d ${COMOUT} ]; then mkdir -p ${COMOUT}; fi -#export COMINgdas=${ROTDIR}/gdas.${PDY}/${cyc} -#export COMINgfs=${ROTDIR}/gfs.${PDY}/${cyc} +generate_com COM_OBS +if [[ ! -d "${COM_OBS}" ]]; then mkdir -p "${COM_OBS}"; fi export CRES=$(echo ${CASE} | cut -c2-) export LATB=$((CRES*2)) diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..c0bc56f6e2 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE @@ -0,0 +1,58 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atmanlfinal" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMFINALPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..2d794fb846 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanlinit" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" +GDUMP_ENS="enkf${GDUMP}" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_ENS_PREV:COM_ATMOS_HISTORY_TMPL + +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMINITPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_RUN b/jobs/JGLOBAL_ATM_ANALYSIS_RUN new file mode 100755 index 0000000000..bbfdbe4a1f --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_RUN @@ -0,0 +1,37 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMRUNSH:-${HOMEgfs}/scripts/exglobal_atm_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 9027512761..5be44a8c97 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -6,13 +6,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "fcst" -c "base fcst" ############################################## # Set variables used in the script ############################################## -# Set wave variables -if [ ${DO_WAVE:-"NO"} = "YES" ]; then - # WAVE component directory - export CDUMPwave=${CDUMPwave:-${CDUMP}wave} - export COMINwave=${COMINwave:-$(compath.py ${envir}/${NET}/${gfs_ver})/${CDUMP}.${PDY}/${cyc}/wave} - export COMOUTwave=${COMOUTwave:-$(compath.py -o ${NET}/${gfs_ver})/${CDUMP}.${PDY}/${cyc}/wave} -fi +export CDUMP=${RUN/enkf} ############################################## # Begin JOB SPECIFIC work @@ -34,6 +28,45 @@ else fi +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +declare -x gPDY="${GDATE:0:8}" +declare -x gcyc="${GDATE:8:2}" + +# Construct COM variables from templates (see config.com) +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ + COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_TOP + +RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +if [[ ${DO_WAVE} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY + RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + declare -rx RUNwave="${RUN}wave" +fi + +if [[ ${DO_OCN} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \ + COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS + RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL +fi + +if [[ ${DO_ICE} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART + RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL +fi + +if [[ ${DO_AERO} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_CHEM_HISTORY +fi + + ############################################################### # Run relevant exglobal script diff --git a/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE b/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..695888a568 --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanlfinal" -c "base landanl landanlfinal" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_LAND_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_LAND_ANALYSIS_PREV:COM_LAND_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASLANDFINALPY:-${HOMEgfs}/scripts/exglobal_land_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..73848b95f9 --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanlinit" -c "base landanl landanlinit" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_LAND_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_LAND_ANALYSIS_PREV:COM_LAND_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_LAND_ANALYSIS}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASLANDINITPY:-${HOMEgfs}/scripts/exglobal_land_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_LAND_ANALYSIS_RUN b/jobs/JGLOBAL_LAND_ANALYSIS_RUN new file mode 100755 index 0000000000..46781c4e8f --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS_RUN @@ -0,0 +1,39 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanlrun" -c "base landanl landanlrun" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASLANDRUNSH:-${HOMEgfs}/scripts/exglobal_land_analysis_run.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_WAVE_GEMPAK b/jobs/JGLOBAL_WAVE_GEMPAK index f57324607e..b7c97ce571 100755 --- a/jobs/JGLOBAL_WAVE_GEMPAK +++ b/jobs/JGLOBAL_WAVE_GEMPAK @@ -1,27 +1,22 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "wavegempak" -c "base wavegempak" - -export COMPONENT="wave" +source "${HOMEgfs}/ush/jjob_header.sh" -e "wavegempak" -c "base wave wavegempak" # Add default errchk = err_chk export errchk=${errchk:-err_chk} ################################### # Set COM Paths -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak} - +################################### export DBN_ALERT_TYPE=GFS_WAVE_GEMPAK export SENDCOM=${SENDCOM:-YES} export SENDDBN=${SENDDBN:-YES} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} -fi +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_GEMPAK +if [[ ! -d ${COM_WAVE_GEMPAK} ]]; then mkdir -p "${COM_WAVE_GEMPAK}"; fi ######################################################## # Execute the script. diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT index d1b0037bf0..49fccad66f 100755 --- a/jobs/JGLOBAL_WAVE_INIT +++ b/jobs/JGLOBAL_WAVE_INIT @@ -3,7 +3,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "waveinit" -c "base wave waveinit" -export COMPONENT="wave" # Add default errchk = err_chk export errchk=${errchk:-err_chk} @@ -16,13 +15,11 @@ export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} -# Set COM Paths and GETGES environment -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -[[ ! -d ${COMOUT} ]] && mkdir -m 775 -p ${COMOUT} +# Set COM Paths +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP if [ ${SENDCOM} = YES ]; then - mkdir -p ${COMOUT}/rundata + mkdir -m 775 -p ${COM_WAVE_PREP} fi # Set mpi serial command diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT index ff473c08f5..9016d624d7 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNT +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT @@ -3,13 +3,9 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostbndpnt" -c "base wave wavepostsbs wavepostbndpnt" -export COMPONENT="wave" - # Add default errchk = err_chk export errchk=${errchk:-err_chk} -export CDATE=${PDY}${cyc} - export MP_PULSE=0 # Path to HOME Directory @@ -19,25 +15,23 @@ export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT}/station +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION +if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 export FHMAX_WAV_PNT=${FHMAX_WAV_IBP} export DOSPC_WAV='YES' # Spectral post export DOBLL_WAV='NO' # Bulletin post -export DOBNDPNT_WAV='YES' #not boundary points +export DOBNDPNT_WAV='YES' # Do boundary points # Execute the Script ${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL index 769c738082..c193a28cf7 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL @@ -19,21 +19,16 @@ export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION +if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi -mkdir -p ${COMOUT}/station - - -# Set wave model ID tag to include member number -# if ensemble; waveMEMB var empty in deterministic # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT index 41d4b95ac2..3ee1d56eef 100755 --- a/jobs/JGLOBAL_WAVE_POST_PNT +++ b/jobs/JGLOBAL_WAVE_POST_PNT @@ -3,13 +3,9 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostpnt" -c "base wave wavepostsbs wavepostpnt" -export COMPONENT="wave" - # Add default errchk = err_chk export errchk=${errchk:-err_chk} -export CDATE=${PDY}${cyc} - export MP_PULSE=0 # Path to HOME Directory @@ -19,20 +15,16 @@ export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT}/station +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION +if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi -# Set wave model ID tag to include member number -# if ensemble; waveMEMB var empty in deterministic # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS index d656a1b9c7..47e7063db4 100755 --- a/jobs/JGLOBAL_WAVE_POST_SBS +++ b/jobs/JGLOBAL_WAVE_POST_SBS @@ -3,13 +3,9 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostsbs" -c "base wave wavepostsbs" -export COMPONENT="wave" - # Add default errchk = err_chk export errchk=${errchk:-err_chk} -export CDATE=${PDY}${cyc} - export MP_PULSE=0 # Path to HOME Directory @@ -19,14 +15,9 @@ export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -export COMINice=${COMINice:-${COMROOTp2:-${COMROOT}}/omb/prod} -export COMINwnd=${COMINwnd:-${COMROOT}/gfs/prod} -export COMIN_WAV_CUR=${COMIN_WAV_CUR:-$(compath.py ${envir}/rtofs/${rtofs_ver})} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_GRID -mkdir -p ${COMOUT}/gridded +mkdir -p "${COM_WAVE_GRID}" # Set wave model ID tag to include member number @@ -36,7 +27,7 @@ mkdir -p ${COMOUT}/gridded membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS index d1bbc81e4f..794258e756 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +++ b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS @@ -1,29 +1,22 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsbulls" -c "base waveawipsbulls" - -export COMPONENT="wave" +source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsbulls" -c "base wave waveawipsbulls" # Add default errchk = err_chk export errchk=${errchk:-err_chk} ################################### # Set COM Paths -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export PCOM=${PCOM:-${COMOUT}/wmo} - +################################### export SENDCOM=${SENDCOM:-YES} export SENDDBN_NTC=${SENDDBN_NTC:-YES} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_STATION COM_WAVE_WMO -if [ ${SENDCOM} = YES ]; then - mkdir -p ${COMOUT} ${PCOM} -fi - +if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi ################################### # Execute the Script diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED index b3c5ce550d..a2134461da 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +++ b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED @@ -1,9 +1,7 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsgridded" -c "base waveawipsgridded" - -export COMPONENT="wave" +source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsgridded" -c "base wave waveawipsgridded" # Add default errchk = err_chk export errchk=${errchk:-err_chk} @@ -11,17 +9,17 @@ export errchk=${errchk:-err_chk} ################################### # Set COM Paths ################################### -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export PCOM=${PCOM:-${COMOUT}/wmo} - export SENDCOM=${SENDCOM:-YES} export SENDDBN_NTC=${SENDDBN_NTC:-YES} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_WMO + +if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi + if [ ${SENDCOM} = YES ]; then - mkdir -p ${COMOUT} ${PCOM} + mkdir -p "${COM_WAVE_WMO}" fi ################################### diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP index 1b059f38f4..5ff48d886c 100755 --- a/jobs/JGLOBAL_WAVE_PREP +++ b/jobs/JGLOBAL_WAVE_PREP @@ -3,11 +3,11 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "waveprep" -c "base wave waveprep" -export COMPONENT="wave" - # Add default errchk = err_chk export errchk=${errchk:-err_chk} +export CDUMP=${RUN/enkf} + # Set rtofs PDY export RPDY=${PDY} @@ -23,32 +23,9 @@ export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -[[ ! -d ${COMOUT} ]] && mkdir -m 775 -p ${COMOUT} - -if [ ${RUN_ENVIR} = "nco" ]; then - export COMIN_WAV_ICE=${COMIN_WAV_ICE:-$(compath.py ${envir}/obsproc/${obsproc_ver})}/${CDUMP}.${PDY}/${cyc}/atmos - export COMIN_WAV_RTOFS=${COMIN_WAV_RTOFS:-$(compath.py ${envir}/${WAVECUR_DID}/${rtofs_ver})} -else - if [ ${WW3CURINP} = "YES" ]; then - if [ ! -d ${DMPDIR}/${WAVECUR_DID}.${RPDY} ]; then export RPDY=$(${NDATE} -24 ${PDY}00 | cut -c1-8); fi - if [ ! -L ${ROTDIR}/${WAVECUR_DID}.${RPDY} ]; then # Check if symlink already exists in ROTDIR - ${NLN} ${DMPDIR}/${WAVECUR_DID}.${RPDY} ${ROTDIR}/${WAVECUR_DID}.${RPDY} - fi - BRPDY=$(${NDATE} -24 ${RPDY}00 | cut -c1-8) - if [ ! -L ${ROTDIR}/${WAVECUR_DID}.${BRPDY} ]; then # Check if symlink already exists in ROTDIR - ${NLN} ${DMPDIR}/${WAVECUR_DID}.${BRPDY} ${ROTDIR}/${WAVECUR_DID}.${BRPDY} - fi - export COMIN_WAV_RTOFS=${COMIN_WAV_RTOFS:-${ROTDIR}} - fi - if [ ${WW3ICEINP} = "YES" ]; then - if [ ! -L ${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos/${WAVICEFILE} ]; then # Check if symlink already exists in ROTDIR - ${NLN} ${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos/${WAVICEFILE} ${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos/${WAVICEFILE} - fi - export COMIN_WAV_ICE=${COMIN_WAV_ICE:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - fi -fi +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_WAVE_PREP +generate_com -rx COM_RTOFS +[[ ! -d ${COM_WAVE_PREP} ]] && mkdir -m 775 -p "${COM_WAVE_PREP}" # Execute the Script ${HOMEgfs}/scripts/exgfs_wave_prep.sh diff --git a/jobs/rocoto/atmanalpost.sh b/jobs/rocoto/atmanalpost.sh deleted file mode 100755 index 71ace70c8b..0000000000 --- a/jobs/rocoto/atmanalpost.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmanalpost" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST -status=$? -exit ${status} diff --git a/jobs/rocoto/atmanalprep.sh b/jobs/rocoto/atmanalprep.sh deleted file mode 100755 index d5b729194a..0000000000 --- a/jobs/rocoto/atmanalprep.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmanalprep" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP -status=$? -exit ${status} diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh new file mode 100755 index 0000000000..3c75c52cb0 --- /dev/null +++ b/jobs/rocoto/atmanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh new file mode 100755 index 0000000000..7bb2587f0b --- /dev/null +++ b/jobs/rocoto/atmanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/atmanlrun.sh new file mode 100755 index 0000000000..aad80e0b06 --- /dev/null +++ b/jobs/rocoto/atmanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanalpost.sh b/jobs/rocoto/atmensanalpost.sh deleted file mode 100755 index 91ac2d6212..0000000000 --- a/jobs/rocoto/atmensanalpost.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalpost" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanalprep.sh b/jobs/rocoto/atmensanalprep.sh deleted file mode 100755 index b54a1b464e..0000000000 --- a/jobs/rocoto/atmensanalprep.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalprep" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanalrun.sh b/jobs/rocoto/atmensanalrun.sh deleted file mode 100755 index a2509a310e..0000000000 --- a/jobs/rocoto/atmensanalrun.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalrun" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh new file mode 100755 index 0000000000..838e9712f8 --- /dev/null +++ b/jobs/rocoto/atmensanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh new file mode 100755 index 0000000000..0ab78a1083 --- /dev/null +++ b/jobs/rocoto/atmensanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlrun.sh new file mode 100755 index 0000000000..91efdb3768 --- /dev/null +++ b/jobs/rocoto/atmensanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/awips.sh b/jobs/rocoto/awips.sh index 7ba31e0dd1..f9289255f9 100755 --- a/jobs/rocoto/awips.sh +++ b/jobs/rocoto/awips.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## Abstract: @@ -27,114 +27,49 @@ export jobid="${job}.$$" # Also, this forces us to call the config files here instead of the j-job source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" - -fhrlst=$(echo $FHRLST | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') ############################################################### -export COMPONENT=${COMPONENT:-atmos} -export CDATEm1=$($NDATE -24 $CDATE) -export PDYm1=$(echo $CDATEm1 | cut -c1-8) - -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/awips$FHRGRP" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - ################################################################################ echo echo "=============== BEGIN AWIPS ===============" -export SENDCOM="YES" -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export PCOM="$COMOUT/wmo" - -SLEEP_TIME=1800 -SLEEP_INT=5 -SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) -for fhr in $fhrlst; do - - if [ $fhr -gt $FHMAX_GFS ]; then - echo "Nothing to process for FHR = $fhr, cycle" +for fhr in ${fhrlst}; do + if (( fhr > FHMAX_GFS )); then + echo "Nothing to process for FHR = ${fhr}, cycle" continue fi fhmin=0 fhmax=84 - if [ $fhr -ge $fhmin -a $fhr -le $fhmax ] ; then - if [[ $(expr $fhr % 3) -eq 0 ]]; then - fhr3=$(printf %03d $((10#$fhr))) - -# Check for input file existence. If not present, sleep -# Loop SLEEP_LOOP_MAX times. Abort if not found. - ic=1 - while [[ $ic -le $SLEEP_LOOP_MAX ]]; do - if [ -s $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx ]; then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - if [ $ic -eq $SLEEP_LOOP_MAX ]; then - echo "***FATAL ERROR*** $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx NOT available" - export err=9 - err_chk - fi - done - - export fcsthrs=$fhr3 - # export job="jgfs_awips_f${fcsthrs}_20km_${cyc}" - # export DATA="${DATAROOT}/$job" - $AWIPS20SH - fi - - if [[ $(expr $fhr % 6) -eq 0 ]]; then - # export job="jgfs_awips_f${fcsthrs}_${cyc}" - # export DATA="${DATAROOT}/$job" - $AWIPSG2SH - fi + if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 3 == 0)); then + fhr3=$(printf %03d $((10#${fhr}))) + export fcsthrs=${fhr3} + ${AWIPS20SH} + fi + + if ((fhr % 6 == 0)); then + ${AWIPSG2SH} + fi fi fhmin=90 fhmax=240 - if [ $fhr -ge $fhmin -a $fhr -le $fhmax ]; then - - if [[ $(expr $fhr % 6) -eq 0 ]]; then - fhr3=$(printf %03i $fhr) - -# Check for input file existence. If not present, sleep -# Loop SLEEP_LOOP_MAX times. Abort if not found. - ic=1 - while [[ $ic -le $SLEEP_LOOP_MAX ]]; do - if [ -s $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx ]; then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - if [ $ic -eq $SLEEP_LOOP_MAX ]; then - echo "***FATAL ERROR*** $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx NOT available" - export err=9 - err_chk - fi - done - - export fcsthrs=$fhr3 - # export job="jgfs_awips_f${fcsthrs}_20km_${cyc}" - # export DATA="${DATAROOT}/$job" - $AWIPS20SH - - # export job="jgfs_awips_f${fcsthrs}_${cyc}" - # export DATA="${DATAROOT}/$job" - $AWIPSG2SH - fi + if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 6 == 0)); then + fhr3=$(printf %03i $((10#${fhr}))) + export fcsthrs=${fhr3} + ${AWIPS20SH} + ${AWIPSG2SH} + fi fi done ############################################################### # Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi - +if [[ ${KEEPDATA:-"NO"} == "NO" ]] ; then rm -rf "${DATA}" ; fi exit 0 diff --git a/jobs/rocoto/coupled_ic.sh b/jobs/rocoto/coupled_ic.sh index 13562e8eab..ca2cfc82af 100755 --- a/jobs/rocoto/coupled_ic.sh +++ b/jobs/rocoto/coupled_ic.sh @@ -48,10 +48,10 @@ error_message(){ # Start staging # Stage the FV3 initial conditions to ROTDIR (cold start) -ATMdir="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos/INPUT" -[[ ! -d "${ATMdir}" ]] && mkdir -p "${ATMdir}" +YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_INPUT +[[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}" source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/gfs_ctrl.nc" -target="${ATMdir}/gfs_ctrl.nc" +target="${COM_ATMOS_INPUT}/gfs_ctrl.nc" ${NCP} "${source}" "${target}" rc=$? [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" @@ -59,7 +59,7 @@ err=$((err + rc)) for ftype in gfs_data sfc_data; do for tt in $(seq 1 6); do source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/${ftype}.tile${tt}.nc" - target="${ATMdir}/${ftype}.tile${tt}.nc" + target="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" ${NCP} "${source}" "${target}" rc=$? [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" @@ -68,52 +68,56 @@ for ftype in gfs_data sfc_data; do done # Stage ocean initial conditions to ROTDIR (warm start) -OCNdir="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ocean/RESTART" -[[ ! -d "${OCNdir}" ]] && mkdir -p "${OCNdir}" -source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res.nc" -target="${OCNdir}/${PDY}.${cyc}0000.MOM.res.nc" -${NCP} "${source}" "${target}" -rc=$? -[[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" -err=$((err + rc)) -case $OCNRES in - "025") - for nn in $(seq 1 4); do - source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res_${nn}.nc" - if [[ -f "${source}" ]]; then - target="${OCNdir}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" - ${NCP} "${source}" "${target}" - rc=$? - [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" - err=$((err + rc)) - fi - done - ;; - *) - echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" - rc=1 - err=$((err + rc)) - ;; -esac +if [[ "${DO_OCN:-}" = "YES" ]]; then + YMD=${gPDY} HH=${gcyc} generate_com -r COM_OCEAN_RESTART + [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}" + source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res.nc" + target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) + case "${OCNRES}" in + "025") + for nn in $(seq 1 4); do + source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res_${nn}.nc" + if [[ -f "${source}" ]]; then + target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) + fi + done + ;; + *) + echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" + rc=1 + err=$((err + rc)) + ;; + esac +fi # Stage ice initial conditions to ROTDIR (cold start as these are SIS2 generated) -ICEdir="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ice/RESTART" -[[ ! -d "${ICEdir}" ]] && mkdir -p "${ICEdir}" -ICERESdec=$(echo "${ICERES}" | awk '{printf "%0.2f", $1/100}') -source="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/ice/${ICERES}/cice5_model_${ICERESdec}.res_${PDY}${cyc}.nc" -target="${ICEdir}/${PDY}.${cyc}0000.cice_model.res.nc" -${NCP} "${source}" "${target}" -rc=$? -[[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" -err=$((err + rc)) +if [[ "${DO_ICE:-}" = "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -r COM_ICE_RESTART + [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}" + ICERESdec=$(echo "${ICERES}" | awk '{printf "%0.2f", $1/100}') + source="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/ice/${ICERES}/cice5_model_${ICERESdec}.res_${PDY}${cyc}.nc" + target="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) +fi # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) -if [[ "${DO_WAVE}" = "YES" ]]; then - WAVdir="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave/restart" - [[ ! -d "${WAVdir}" ]] && mkdir -p "${WAVdir}" +if [[ "${DO_WAVE:-}" = "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -r COM_WAVE_RESTART + [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}" for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust source="${BASE_CPLIC}/${CPL_WAVIC}/${PDY}${cyc}/wav/${grdID}/${PDY}.${cyc}0000.restart.${grdID}" - target="${WAVdir}/${PDY}.${cyc}0000.restart.${grdID}" + target="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}" ${NCP} "${source}" "${target}" rc=$? [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" diff --git a/jobs/rocoto/getic.sh b/jobs/rocoto/getic.sh deleted file mode 100755 index f7f03f73ca..0000000000 --- a/jobs/rocoto/getic.sh +++ /dev/null @@ -1,169 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -## Abstract: -## Get GFS intitial conditions -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### - -############################################################### -# Source FV3GFS workflow modules -. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################################### -# Source relevant configs -configs="base getic init" -for config in ${configs}; do - . "${EXPDIR}/config.${config}" - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" -done - -############################################################### -# Source machine runtime environment -. ${BASE_ENV}/${machine}.env getic -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################################### -# Set script and dependency variables - -export yy="$(echo ${CDATE} | cut -c1-4)" -export mm="$(echo ${CDATE} | cut -c5-6)" -export dd="$(echo ${CDATE} | cut -c7-8)" -export hh="${cyc:-$(echo ${CDATE} | cut -c9-10)}" -export GDATE="$(${NDATE} -${assim_freq:-"06"} ${CDATE})" -export gyy="$(echo ${GDATE} | cut -c1-4)" -export gmm="$(echo ${GDATE} | cut -c5-6)" -export gdd="$(echo ${GDATE} | cut -c7-8)" -export ghh="$(echo ${GDATE} | cut -c9-10)" - -export DATA=${DATA:-${DATAROOT}/getic} -export EXTRACT_DIR=${DATA:-${EXTRACT_DIR}} -export PRODHPSSDIR=${PRODHPSSDIR:-/NCEPPROD/hpssprod/runhistory} -export COMPONENT="atmos" -export gfs_ver=${gfs_ver:-"v16"} -export OPS_RES=${OPS_RES:-"C768"} -export GETICSH=${GETICSH:-${GDASINIT_DIR}/get_v16.data.sh} - -# Create ROTDIR/EXTRACT_DIR -if [[ ! -d ${ROTDIR} ]]; then mkdir -p "${ROTDIR}" ; fi -if [[ ! -d ${EXTRACT_DIR} ]]; then mkdir -p "${EXTRACT_DIR}" ; fi -cd "${EXTRACT_DIR}" - -# Check version, cold/warm start, and resolution -if [[ ${gfs_ver} = "v16" && ${EXP_WARM_START} = ".true." && ${CASE} = ${OPS_RES} ]]; then # Pull warm start ICs - no chgres - - # Pull RESTART files off HPSS - if [[ ${RETRO:-"NO"} = "YES" ]]; then # Retrospective parallel input - - # Pull prior cycle restart files - htar -xvf ${HPSSDIR}/${GDATE}/gdas_restartb.tar - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" - - # Pull current cycle restart files - htar -xvf ${HPSSDIR}/${CDATE}/gfs_restarta.tar - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" - - # Pull IAU increment files - htar -xvf ${HPSSDIR}/${CDATE}/gfs_netcdfa.tar - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" - - else # Opertional input - warm starts - - cd "${ROTDIR}" - # Pull CDATE gfs restart tarball - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_restart.tar - # Pull GDATE gdas restart tarball - htar -xvf ${PRODHPSSDIR}/rh${gyy}/${gyy}${gmm}/${gyy}${gmm}${gdd}/com_gfs_prod_gdas.${gyy}${gmm}${gdd}_${ghh}.gdas_restart.tar - fi - -else # Pull chgres cube inputs for cold start IC generation - - # Run UFS_UTILS GETICSH - sh ${GETICSH} ${CDUMP} - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" - -fi - -# Move extracted data to ROTDIR -if [[ -d ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT} ]]; then - rm -rf "${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}" -fi -mkdir -p "${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}" - -if [ ${gfs_ver} = v16 -a ${RETRO} = "YES" ]; then - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/* ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT} -else - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/* ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh} -fi - -# Pull pgbanl file for verification/archival - v14+ -if [ ${gfs_ver} = v14 -o ${gfs_ver} = v15 -o ${gfs_ver} = v16 ]; then - for grid in 0p25 0p50 1p00 - do - file=gfs.t${hh}z.pgrb2.${grid}.anl - - if [[ ${gfs_ver} = v14 ]]; then # v14 production source - - cd "${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}" - export tarball="gpfs_hps_nco_ops_com_gfs_prod_gfs.${yy}${mm}${dd}${hh}.pgrb2_${grid}.tar" - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/${tarball} ./${file} - - elif [[ ${gfs_ver} = v15 ]]; then # v15 production source - - cd "${EXTRACT_DIR}" - export tarball="com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_pgrb2.tar" - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/${tarball} ./${CDUMP}.${yy}${mm}${dd}/${hh}/${file} - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${file} ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}/${file} - - elif [[ ${gfs_ver} = v16 ]]; then # v16 - determine RETRO or production source next - - if [[ ${RETRO} = "YES" ]]; then # Retrospective parallel source - - cd ${EXTRACT_DIR} - if [[ ${grid} = "0p25" ]]; then # anl file spread across multiple tarballs - export tarball="gfsa.tar" - elif [ ${grid} = "0p50" -o ${grid} = "1p00" ]; then - export tarball="gfsb.tar" - fi - htar -xvf ${HPSSDIR}/${yy}${mm}${dd}${hh}/${tarball} ./${CDUMP}.${yy}${mm}${dd}/${hh}/${file} - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${file} ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}/${file} - - else # Production source - - cd "${ROTDIR}" - export tarball="com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_pgrb2.tar" - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/${tarball} ./${CDUMP}.${yy}${mm}${dd}/${hh}/atmos/${file} - - fi # RETRO vs production - - fi # Version check - done # grid loop -fi # v14-v16 pgrb anl file pull - -########################################## -# Remove the Temporary working directory -########################################## -cd "${DATAROOT}" -[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" - -############################################################### -# Exit out cleanly - - -exit 0 diff --git a/jobs/rocoto/gldas.sh b/jobs/rocoto/gldas.sh deleted file mode 100755 index 8d8bb903bb..0000000000 --- a/jobs/rocoto/gldas.sh +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env bash - -source "$HOMEgfs/ush/preamble.sh" - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -export job="gldas" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB. GLDAS only runs once per day. - -$HOMEgfs/jobs/JGDAS_ATMOS_GLDAS -status=$? - - -exit $status diff --git a/jobs/rocoto/init.sh b/jobs/rocoto/init.sh deleted file mode 100755 index 0432750e72..0000000000 --- a/jobs/rocoto/init.sh +++ /dev/null @@ -1,77 +0,0 @@ -#! /usr/bin/env bash - -source "$HOMEgfs/ush/preamble.sh" - -############################################################### -## Abstract: -## Get GFS intitial conditions -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Source relevant configs -configs="base getic init" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -############################################################### -# Source machine runtime environment -. $BASE_ENV/${machine}.env init -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Set script and dependency variables - -export yy=$(echo $CDATE | cut -c1-4) -export mm=$(echo $CDATE | cut -c5-6) -export dd=$(echo $CDATE | cut -c7-8) -export hh=${cyc:-$(echo $CDATE | cut -c9-10)} - -export DATA=${DATA:-${DATAROOT}/init} -export EXTRACT_DIR=${EXTRACT_DIR:-$ROTDIR} -export WORKDIR=${WORKDIR:-$DATA} -export OUTDIR=${OUTDIR:-$ROTDIR} -export COMPONENT="atmos" -export gfs_ver=${gfs_ver:-"v16"} -export OPS_RES=${OPS_RES:-"C768"} -export RUNICSH=${RUNICSH:-${GDASINIT_DIR}/run_v16.chgres.sh} - -# Check if init is needed and run if so -if [[ $gfs_ver = "v16" && $EXP_WARM_START = ".true." && $CASE = $OPS_RES ]]; then - echo "Detected v16 $OPS_RES warm starts, will not run init. Exiting..." - -else - # Run chgres_cube - if [ ! -d $OUTDIR ]; then mkdir -p $OUTDIR ; fi - sh ${RUNICSH} ${CDUMP} - status=$? - [[ $status -ne 0 ]] && exit $status -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA - -############################################################### -# Exit out cleanly - - -exit 0 diff --git a/jobs/rocoto/landanlfinal.sh b/jobs/rocoto/landanlfinal.sh new file mode 100755 index 0000000000..a6fa48c679 --- /dev/null +++ b/jobs/rocoto/landanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/landanlinit.sh b/jobs/rocoto/landanlinit.sh new file mode 100755 index 0000000000..e9c0b2d7a2 --- /dev/null +++ b/jobs/rocoto/landanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/landanlrun.sh b/jobs/rocoto/landanlrun.sh new file mode 100755 index 0000000000..3f306a32be --- /dev/null +++ b/jobs/rocoto/landanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanalrun.sh b/jobs/rocoto/ocnanalchkpt.sh similarity index 58% rename from jobs/rocoto/atmanalrun.sh rename to jobs/rocoto/ocnanalchkpt.sh index 63aa08c184..ae98bc8e88 100755 --- a/jobs/rocoto/atmanalrun.sh +++ b/jobs/rocoto/ocnanalchkpt.sh @@ -1,20 +1,18 @@ #! /usr/bin/env bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" ############################################################### # Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh +. "${HOMEgfs}/ush/load_ufsda_modules.sh" status=$? -[[ ${status} -ne 0 ]] && exit ${status} +[[ ${status} -ne 0 ]] && exit "${status}" -export job="atmanalrun" +export job="ocnanalchkpt" export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT status=$? -exit ${status} +exit "${status}" diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh index 54e3652b44..ee8da061f2 100755 --- a/jobs/rocoto/ocnpost.sh +++ b/jobs/rocoto/ocnpost.sh @@ -9,139 +9,112 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ ${status} -ne 0 ]] && exit ${status} +(( status != 0 )) && exit "${status}" export job="ocnpost" -export jobid=${job}.$$ - -############################################## -# make temp directory -############################################## -export DATA="${DATAROOT}/${jobid}" -[[ -d ${DATA} ]] && rm -rf ${DATA} -mkdir -p ${DATA} && cd ${DATA} - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - -############################################## -# Determine Job Output Name on System -############################################## -export pid=${pid:-$$} -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile - - -############################# -# Source relevant config files -############################# -config_path=${EXPDIR:-${PACKAGEROOT}/gfs.${gfs_ver}/parm/config} -configs="base ocnpost" -for config in ${configs}; do - . ${config_path}/config.${config} - status=$? - [[ ${status} -ne 0 ]] && exit ${status} -done - - -########################################## -# Source machine runtime environment -########################################## -. ${HOMEgfs}/env/${machine}.env ocnpost -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - +export jobid="${job}.$$" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnpost" -c "base ocnpost" ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -if [ ${RUN_ENVIR} = "nco" ]; then +export CDUMP=${RUN/enkf} +if [[ ${RUN_ENVIR} = "nco" ]]; then export ROTDIR=${COMROOT:?}/${NET}/${envir} fi ############################################## # Begin JOB SPECIFIC work ############################################## -[[ ! -d ${COMOUTocean} ]] && mkdir -p ${COMOUTocean} -[[ ! -d ${COMOUTice} ]] && mkdir -p ${COMOUTice} +YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3D \ + COM_OCEAN_XSECT COM_ICE_HISTORY + +for grid in "0p50" "0p25"; do + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" +done + +for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do + if [[ ! -d "${!outdir}" ]]; then + mkdir -p "${!outdir}" + fi +done fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') export OMP_NUM_THREADS=1 export ENSMEM=${ENSMEM:-01} -export IDATE=${CDATE} +export IDATE=${PDY}${cyc} for fhr in ${fhrlst}; do - export fhr=${fhr} - VDATE=$(${NDATE} ${fhr} ${IDATE}) - # Regrid the MOM6 and CICE5 output from tripolar to regular grid via NCL - # This can take .25 degree input and convert to .5 degree - other opts avail - # The regrid scripts use CDATE for the current day, restore it to IDATE afterwards - export CDATE=${VDATE} - cd ${DATA} - if [ ${fhr} -gt 0 ]; then - export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}} - ${MOM6REGRID}/scripts/run_regrid.sh - status=$? - [[ ${status} -ne 0 ]] && exit ${status} - - # Convert the netcdf files to grib2 - export executable=${MOM6REGRID}/exec/reg2grb2.x - ${MOM6REGRID}/scripts/run_reg2grb2.sh - status=$? - [[ ${status} -ne 0 ]] && exit ${status} + export fhr=${fhr} + # Ignore possible spelling error (nothing is misspelled) + # shellcheck disable=SC2153 + VDATE=$(${NDATE} "${fhr}" "${IDATE}") + # shellcheck disable= + declare -x VDATE + cd "${DATA}" || exit 2 + if (( fhr > 0 )); then + # TODO: This portion calls NCL scripts that are deprecated (see Issue #923) + if [[ "${MAKE_OCN_GRIB:-YES}" == "YES" ]]; then + export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}} + "${MOM6REGRID}/scripts/run_regrid.sh" + status=$? + [[ ${status} -ne 0 ]] && exit "${status}" + # Convert the netcdf files to grib2 + export executable=${MOM6REGRID}/exec/reg2grb2.x + "${MOM6REGRID}/scripts/run_reg2grb2.sh" + status=$? + [[ ${status} -ne 0 ]] && exit "${status}" + ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" "${COM_OCEAN_GRIB_0p25}/" + ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" "${COM_OCEAN_GRIB_0p50}/" + fi #break up ocn netcdf into multiple files: - if [ -f ${COMOUTocean}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc ]; then - echo "File ${COMOUTocean}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + if [[ -f "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -x -v vo,uo,so,temp ${COMOUTocean}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc ${COMOUTocean}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc + ncks -x -v vo,uo,so,temp \ + "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ ${status} -ne 0 ]] && exit ${status} + [[ ${status} -ne 0 ]] && exit "${status}" fi - if [ -f ${COMOUTocean}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc ]; then - echo "File ${COMOUTocean}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + if [[ -f "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v ${COMOUTocean}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc ${COMOUTocean}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc + ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v \ + "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ ${status} -ne 0 ]] && exit ${status} + [[ ${status} -ne 0 ]] && exit "${status}" fi - if [ -f ${COMOUTocean}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc ]; then - echo "File ${COMOUTocean}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + if [[ -f "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -v temp -d yh,503 -d xh,-299.92,60.03 ${COMOUTocean}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc ${COMOUTocean}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc + ncks -v temp -d yh,503 -d xh,-299.92,60.03 \ + "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ ${status} -ne 0 ]] && exit ${status} + [[ ${status} -ne 0 ]] && exit "${status}" fi - if [ -f ${COMOUTocean}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc ]; then - echo "File ${COMOUTocean}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + if [[ -f "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -v uo -d yh,503 -d xh,-299.92,60.03 ${COMOUTocean}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc ${COMOUTocean}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc + ncks -v uo -d yh,503 -d xh,-299.92,60.03 \ + "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ ${status} -ne 0 ]] && exit ${status} + [[ ${status} -ne 0 ]] && exit "${status}" fi fi - done -# Restore CDATE to what is expected -export CDATE=${IDATE} -${NMV} ocn_ice*.grb2 ${COMOUTocean}/ -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - # clean up working folder -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf ${DATA} ; fi +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi ############################################################### # Exit out cleanly diff --git a/jobs/rocoto/post.sh b/jobs/rocoto/post.sh index 1de899b7ba..e84b2b7b71 100755 --- a/jobs/rocoto/post.sh +++ b/jobs/rocoto/post.sh @@ -16,17 +16,12 @@ status=$? export job="post" export jobid="${job}.$$" -export COMPONENT="atmos" - if [ ${FHRGRP} = 'anl' ]; then fhrlst="anl" - restart_file=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}/${CDUMP}.t${cyc}z.atm else fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') - restart_file=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}/${CDUMP}.t${cyc}z.logf fi - #--------------------------------------------------------------- for fhr in ${fhrlst}; do export post_times=${fhr} diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index e35721e4e6..826dec5ae7 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -9,49 +9,49 @@ status=$? [[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Source relevant configs -configs="base prep" -for config in ${configs}; do - . ${EXPDIR}/config.${config} - status=$? - [[ ${status} -ne 0 ]] && exit ${status} -done +export job="prep" +export jobid="${job}.$$" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep" -############################################################### -# Source machine runtime environment -. ${BASE_ENV}/${machine}.env prep -status=$? -[[ ${status} -ne 0 ]] && exit ${status} +export CDUMP="${RUN/enkf}" ############################################################### # Set script and dependency variables +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + export OPREFIX="${CDUMP}.t${cyc}z." -export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" + +YMD=${PDY} HH=${cyc} DUMP=${CDUMP} generate_com -rx COM_OBS COM_OBSDMP + +RUN=${GDUMP} DUMP=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_OBSDMP_PREV:COM_OBSDMP_TMPL + export MAKE_PREPBUFR=${MAKE_PREPBUFR:-"YES"} -[[ ! -d ${COMOUT} ]] && mkdir -p ${COMOUT} -[[ ! -d ${COMIN_OBS} ]] && mkdir -p ${COMIN_OBS} +if [[ ! -d "${COM_OBS}" ]]; then mkdir -p "${COM_OBS}"; fi ############################################################### # If ROTDIR_DUMP=YES, copy dump files to rotdir if [[ ${ROTDIR_DUMP} = "YES" ]]; then - ${HOMEgfs}/ush/getdump.sh "${CDATE}" "${CDUMP}" "${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos" "${COMIN_OBS}" + "${HOMEgfs}/ush/getdump.sh" "${PDY}${cyc}" "${CDUMP}" "${COM_OBSDMP}" "${COM_OBS}" status=$? [[ ${status} -ne 0 ]] && exit ${status} -# Ensure previous cycle gdas dumps are available (used by cycle & downstream) - GDATE=$(${NDATE} -${assim_freq} ${CDATE}) - gPDY=$(echo ${GDATE} | cut -c1-8) - gcyc=$(echo ${GDATE} | cut -c9-10) - GDUMP=gdas - gCOMOBS="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/obs" - if [[ ! -s ${gCOMOBS}/${GDUMP}.t${gcyc}z.updated.status.tm00.bufr_d ]]; then - ${HOMEgfs}/ush/getdump.sh "${GDATE}" "${GDUMP}" "${DMPDIR}/${GDUMP}${DUMP_SUFFIX}.${gPDY}/${gcyc}/atmos" "${gCOMOBS}" + # Ensure previous cycle gdas dumps are available (used by cycle & downstream) + if [[ ! -s "${COM_OBS_PREV}/${GDUMP}.t${gcyc}z.updated.status.tm00.bufr_d" ]]; then + "${HOMEgfs}/ush/getdump.sh" "${GDATE}" "${GDUMP}" "${COM_OBSDMP_PREV}" "${COM_OBS_PREV}" status=$? [[ ${status} -ne 0 ]] && exit ${status} fi # exception handling to ensure no dead link - [[ $(find ${COMIN_OBS} -xtype l | wc -l) -ge 1 ]] && exit 9 - [[ $(find ${gCOMOBS} -xtype l | wc -l) -ge 1 ]] && exit 9 + [[ $(find ${COM_OBS} -xtype l | wc -l) -ge 1 ]] && exit 9 + [[ $(find ${COM_OBS_PREV} -xtype l | wc -l) -ge 1 ]] && exit 9 fi @@ -75,40 +75,36 @@ if [[ ${PROCESS_TROPCY} = "YES" ]]; then fi fi - [[ ${ROTDIR_DUMP} = "YES" ]] && rm ${COMOUT}${CDUMP}.t${cyc}z.syndata.tcvitals.tm00 + if [[ ${ROTDIR_DUMP} = "YES" ]]; then rm "${COM_OBS}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00"; fi - ${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC + "${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC" status=$? [[ ${status} -ne 0 ]] && exit ${status} else - [[ ${ROTDIR_DUMP} = "NO" ]] && cp ${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00 ${COMOUT}/ + if [[ ${ROTDIR_DUMP} = "NO" ]]; then cp "${COM_OBSDMP}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00" "${COM_OBS}/"; fi fi -# Will modify the new location later in the next PR to address issue 1198 -if [[ ${ROTDIR_DUMP} = "YES" ]]; then - mv ${COMIN_OBS}/*syndata.tcvitals.tm00 ${COMOUT} - mv ${COMIN_OBS}/*snogrb_t1534.3072.1536 ${COMOUT} - mv ${COMIN_OBS}/*seaice.5min.blend.grb ${COMOUT} -fi ############################################################### # Generate prepbufr files from dumps or copy from OPS if [[ ${MAKE_PREPBUFR} = "YES" ]]; then if [[ ${ROTDIR_DUMP} = "YES" ]]; then - rm -f ${COMIN_OBS}/${OPREFIX}prepbufr - rm -f ${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles - rm -f ${COMIN_OBS}/${OPREFIX}nsstbufr + rm -f "${COM_OBS}/${OPREFIX}prepbufr" + rm -f "${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + rm -f "${COM_OBS}/${OPREFIX}nsstbufr" fi export job="j${CDUMP}_prep_${cyc}" export DATAROOT="${RUNDIR}/${CDATE}/${CDUMP}/prepbufr" - export COMIN=${COMIN_OBS} - export COMINgdas=${COMINgdas:-${ROTDIR}/gdas.${PDY}/${cyc}/atmos} - export COMINgfs=${COMINgfs:-${ROTDIR}/gfs.${PDY}/${cyc}/atmos} - export COMOUT=${COMIN_OBS} + export COMIN=${COM_OBS} + export COMOUT=${COM_OBS} + RUN="gdas" YMD=${PDY} HH=${cyc} generate_com -rx COMINgdas:COM_ATMOS_HISTORY_TMPL + RUN="gfs" YMD=${PDY} HH=${cyc} generate_com -rx COMINgfs:COM_ATMOS_HISTORY_TMPL if [[ ${ROTDIR_DUMP} = "NO" ]]; then - COMIN_OBS=${COMIN_OBS:-${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos} + export COMSP=${COMSP:-"${COM_OBSDMP}/${CDUMP}.t${cyc}z."} + else + export COMSP=${COMSP:-"${COM_OBS}/${CDUMP}.t${cyc}z."} fi export COMSP=${COMSP:-${COMIN_OBS}/${CDUMP}.t${cyc}z.} @@ -117,20 +113,20 @@ if [[ ${MAKE_PREPBUFR} = "YES" ]]; then export MAKE_NSSTBUFR="NO" fi - ${HOMEobsproc}/jobs/JOBSPROC_GLOBAL_PREP + "${HOMEobsproc}/jobs/JOBSPROC_GLOBAL_PREP" status=$? [[ ${status} -ne 0 ]] && exit ${status} # If creating NSSTBUFR was disabled, copy from DMPDIR if appropriate. if [[ ${MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then - [[ ${DONST} = "YES" ]] && ${NCP} ${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos/${OPREFIX}nsstbufr ${COMIN_OBS}/${OPREFIX}nsstbufr + if [[ ${DONST} = "YES" ]]; then ${NCP} "${COM_OBSDMP}/${OPREFIX}nsstbufr" "${COM_OBS}/${OPREFIX}nsstbufr"; fi fi else if [[ ${ROTDIR_DUMP} = "NO" ]]; then - ${NCP} ${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos/${OPREFIX}prepbufr ${COMIN_OBS}/${OPREFIX}prepbufr - ${NCP} ${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos/${OPREFIX}prepbufr.acft_profiles ${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles - [[ ${DONST} = "YES" ]] && ${NCP} ${DMPDIR}/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/atmos/${OPREFIX}nsstbufr ${COMIN_OBS}/${OPREFIX}nsstbufr + ${NCP} "${COM_OBSDMP}/${OPREFIX}prepbufr" "${COM_OBS}/${OPREFIX}prepbufr" + ${NCP} "${COM_OBSDMP}/${OPREFIX}prepbufr.acft_profiles" "${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + if [[ ${DONST} = "YES" ]]; then ${NCP} "${COM_OBSDMP}/${OPREFIX}nsstbufr" "${COM_OBS}/${OPREFIX}nsstbufr"; fi fi fi diff --git a/jobs/rocoto/vrfy.sh b/jobs/rocoto/vrfy.sh index 4230b8a62b..93d9f11c44 100755 --- a/jobs/rocoto/vrfy.sh +++ b/jobs/rocoto/vrfy.sh @@ -16,34 +16,34 @@ export jobid="${job}.$$" source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" ############################################################### -export COMPONENT="atmos" -export CDATEm1=$(${NDATE} -24 ${CDATE}) -export PDYm1=$(echo ${CDATEm1} | cut -c1-8) +export CDUMP="${RUN/enkf}" -CDATEm1c=$(${NDATE} -06 ${CDATE}) -PDYm1c=$(echo ${CDATEm1c} | cut -c1-8) -pcyc=$(echo ${CDATEm1c} | cut -c9-10) +CDATEm1=$(${NDATE} -24 "${PDY}${cyc}") +export CDATEm1 +export PDYm1=${CDATEm1:0:8} -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" +CDATEm1c=$(${NDATE} -06 "${PDY}${cyc}") +PDYm1c=${CDATEm1c:0:8} +pcyc=${CDATEm1c:8:2} ############################################################### +# TODO: We can likely drop support for these dev-only grib1 precip files echo echo "=============== START TO GENERATE QUARTER DEGREE GRIB1 FILES ===============" -if [ ${MKPGB4PRCP} = "YES" -a ${CDUMP} = "gfs" ]; then +if [[ ${MKPGB4PRCP} = "YES" && ${CDUMP} == "gfs" ]]; then + YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_MASTER if [ ! -d ${ARCDIR} ]; then mkdir -p ${ARCDIR} ; fi nthreads_env=${OMP_NUM_THREADS:-1} # get threads set in env export OMP_NUM_THREADS=1 - cd ${COMIN} + cd "${COM_ATMOS_MASTER}" || exit 9 fhmax=${vhr_rain:-${FHMAX_GFS}} - fhr=0 - while [ ${fhr} -le ${fhmax} ]; do - fhr2=$(printf %02i ${fhr}) - fhr3=$(printf %03i ${fhr}) - fname=${CDUMP}.t${cyc}z.sfluxgrbf${fhr3}.grib2 - fileout=${ARCDIR}/pgbq${fhr2}.${CDUMP}.${CDATE}.grib2 - ${WGRIB2} ${fname} -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib ${fileout} - (( fhr = ${fhr} + 6 )) + for (( fhr=0; fhr <= fhmax; fhr+=6 )); do + fhr2=$(printf %02i "${fhr}") + fhr3=$(printf %03i "${fhr}") + fname=${RUN}.t${cyc}z.sfluxgrbf${fhr3}.grib2 + fileout=${ARCDIR}/pgbq${fhr2}.${RUN}.${PDY}${cyc}.grib2 + ${WGRIB2} "${fname}" -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib "${fileout}" done export OMP_NUM_THREADS=${nthreads_env} # revert to threads set in env fi @@ -52,20 +52,20 @@ fi ############################################################### echo echo "=============== START TO RUN MOS ===============" -if [ ${RUNMOS} = "YES" -a ${CDUMP} = "gfs" ]; then - ${RUNGFSMOSSH} ${PDY}${cyc} +if [[ "${RUNMOS}" == "YES" && "${CDUMP}" == "gfs" ]]; then + ${RUNGFSMOSSH} "${PDY}${cyc}" fi ############################################################### echo echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" -if [ ${VRFYRAD} = "YES" -a "${CDUMP}" = "${CDFNL}" -a "${CDATE}" != "${SDATE}" ]; then + +if [[ "${VRFYRAD}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then export EXP=${PSLOT} - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" - export TANKverf_rad="${TANKverf}/stats/${PSLOT}/${CDUMP}.${PDY}/${cyc}" - export TANKverf_radM1="${TANKverf}/stats/${PSLOT}/${CDUMP}.${PDYm1c}/${pcyc}" + export TANKverf_rad="${TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export TANKverf_radM1="${TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" export MY_MACHINE=${machine} ${VRFYRADSH} @@ -76,12 +76,11 @@ fi ############################################################### echo echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -if [ "${VRFYOZN}" = "YES" -a "${CDUMP}" = "${CDFNL}" -a "${CDATE}" != "${SDATE}" ]; then +if [[ "${VRFYOZN}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then export EXP=${PSLOT} - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" - export TANKverf_ozn="${TANKverf_ozn}/stats/${PSLOT}/${CDUMP}.${PDY}/${cyc}" - export TANKverf_oznM1="${TANKverf_ozn}/stats/${PSLOT}/${CDUMP}.${PDYm1c}/${pcyc}" + export TANKverf_ozn="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export TANKverf_oznM1="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" export MY_MACHINE=${machine} ${VRFYOZNSH} @@ -92,11 +91,10 @@ fi ############################################################### echo echo "=============== START TO RUN MINMON ===============" -if [ "${VRFYMINMON}" = "YES" -a "${CDATE}" != "${SDATE}" ]; then +if [[ "${VRFYMINMON}" == "YES" && "${PDY}${cyc}" != "${SDATE}" ]]; then - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" - export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${CDUMP}.${PDY}/${cyc}" - export M_TANKverfM1="${M_TANKverf}/stats/${PSLOT}/${CDUMP}.${PDYm1c}/${pcyc}" + export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export M_TANKverfM1="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" export MY_MACHINE=${machine} ${VRFYMINSH} @@ -107,9 +105,10 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE TRACK VERIFICATION ===============" -if [ ${VRFYTRAK} = "YES" ]; then +if [[ ${VRFYTRAK} = "YES" ]]; then - export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} + export COMINsyn ${TRACKERSH} fi @@ -118,7 +117,7 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION ===============" -if [ ${VRFYGENESIS} = "YES" -a "${CDUMP}" = "gfs" ]; then +if [[ ${VRFYGENESIS} = "YES" && "${CDUMP}" = "gfs" ]]; then ${GENESISSH} fi @@ -126,15 +125,15 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION (FSU) ===============" -if [ ${VRFYFSU} = "YES" -a "${CDUMP}" = "gfs" ]; then +if [[ ${VRFYFSU} = "YES" && "${CDUMP}" = "gfs" ]]; then ${GENESISFSU} fi ############################################################### # Force Exit out cleanly -cd ${DATAROOT} -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf ${DATA} ; fi +cd "${DATAROOT}" +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi exit 0 diff --git a/jobs/rocoto/wavegempak.sh b/jobs/rocoto/wavegempak.sh index 325762a19b..58fbcdcc5b 100755 --- a/jobs/rocoto/wavegempak.sh +++ b/jobs/rocoto/wavegempak.sh @@ -7,6 +7,9 @@ source $HOMEgfs/ush/load_fv3gfs_modules.sh status=$? [[ $status -ne 0 ]] && exit $status +export job="post" +export jobid="${job}.$$" + ############################################################### # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_WAVE_GEMPAK diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc index 201daa7b8d..e5322cbb2c 100644 --- a/modulefiles/module-setup.sh.inc +++ b/modulefiles/module-setup.sh.inc @@ -22,7 +22,7 @@ if [[ -d /lfs/f1 ]] ; then source /usr/share/lmod/lmod/init/$__ms_shell fi module reset -elif [[ -d /lfs3 ]] ; then +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet if ( ! eval module help > /dev/null 2>&1 ) ; then source /apps/lmod/lmod/init/$__ms_shell diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index a43760ee2f..5d5bcadb1a 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -12,7 +12,7 @@ load(pathJoin("hpss", os.getenv("hpss_ver"))) load(pathJoin("gempak", os.getenv("gempak_ver"))) load(pathJoin("ncl", os.getenv("ncl_ver"))) load(pathJoin("jasper", os.getenv("jasper_ver"))) -load(pathJoin("png", os.getenv("png_ver"))) +load(pathJoin("png", os.getenv("libpng_ver"))) load(pathJoin("cdo", os.getenv("cdo_ver"))) load(pathJoin("R", os.getenv("R_ver"))) @@ -28,9 +28,13 @@ load(pathJoin("crtm", os.getenv("crtm_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) setenv("WGRIB2","wgrib2") -prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + -- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.2.0")) diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index d72259d1a1..4d97aa6444 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -2,7 +2,7 @@ help([[ Load environment to run GFS on Jet ]]) -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/nwprod/hpc-stack/libs/modulefiles/stack") +prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/role.epic/hpc-stack/libs/intel-18.0.5.274/modulefiles/stack") load(pathJoin("hpc", os.getenv("hpc_ver"))) load(pathJoin("hpc-intel", os.getenv("hpc_intel_ver"))) @@ -10,7 +10,11 @@ load(pathJoin("hpc-impi", os.getenv("hpc_impi_ver"))) load("hpss") load(pathJoin("gempak", os.getenv("gempak_ver"))) +load(pathJoin("ncl", os.getenv("ncl_ver"))) +load(pathJoin("jasper", os.getenv("jasper_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) load(pathJoin("cdo", os.getenv("cdo_ver"))) +load(pathJoin("R", os.getenv("R_ver"))) load(pathJoin("hdf5", os.getenv("hdf5_ver"))) load(pathJoin("netcdf", os.getenv("netcdf_ver"))) @@ -19,7 +23,17 @@ load(pathJoin("nco", os.getenv("nco_ver"))) load(pathJoin("prod_util", os.getenv("prod_util_ver"))) load(pathJoin("grib_util", os.getenv("grib_util_ver"))) load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("ncdiag", os.getenv("ncdiag_ver"))) load(pathJoin("crtm", os.getenv("crtm_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +prepend_path("MODULEPATH", "/contrib/anaconda/modulefiles") +load(pathJoin("anaconda", "5.3.1")) + +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) + +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/Fit2Obs/v" .. os.getenv("fit2obs_ver"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) + whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua index bff408783b..975aa36df0 100644 --- a/modulefiles/module_base.orion.lua +++ b/modulefiles/module_base.orion.lua @@ -12,7 +12,7 @@ load(pathJoin("gempak", os.getenv("gempak_ver"))) load(pathJoin("ncl", os.getenv("ncl_ver"))) load(pathJoin("jasper", os.getenv("jasper_ver"))) load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("png", os.getenv("png_ver"))) +load(pathJoin("png", os.getenv("libpng_ver"))) load(pathJoin("cdo", os.getenv("cdo_ver"))) load(pathJoin("hdf5", os.getenv("hdf5_ver"))) @@ -27,9 +27,13 @@ load(pathJoin("crtm", os.getenv("crtm_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) setenv("WGRIB2","wgrib2") -prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +--prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + -- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.2.0")) diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua index 3d273e36cf..aa17d85fc4 100644 --- a/modulefiles/module_base.s4.lua +++ b/modulefiles/module_base.s4.lua @@ -14,7 +14,7 @@ load(pathJoin("ncl", os.getenv("ncl_ver"))) load(pathJoin("cdo", os.getenv("cdo_ver"))) load(pathJoin("jasper", os.getenv("jasper_ver"))) load(pathJoin("zlib", os.getenv("zlib_ver"))) -load(pathJoin("png", os.getenv("png_ver"))) +load(pathJoin("png", os.getenv("libpng_ver"))) load(pathJoin("hdf5", os.getenv("hdf5_ver"))) load(pathJoin("netcdf", os.getenv("netcdf_ver"))) @@ -28,7 +28,11 @@ load(pathJoin("crtm", os.getenv("crtm_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) setenv("WGRIB2","wgrib2") -prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +--prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/feature-GFSv17_com_reorg/modulefiles")) load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua index 0fc498065b..6fb7b11be6 100644 --- a/modulefiles/module_base.wcoss2.lua +++ b/modulefiles/module_base.wcoss2.lua @@ -31,7 +31,11 @@ load(pathJoin("ncdiag", os.getenv("ncdiag_ver"))) load(pathJoin("crtm", os.getenv("crtm_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) -prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +--prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) +prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + whatis("Description: GFS run environment") diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua new file mode 100644 index 0000000000..f4b62a5fd2 --- /dev/null +++ b/modulefiles/module_gwci.hera.lua @@ -0,0 +1,15 @@ +help([[ +Load environment to run GFS workflow setup scripts on Hera +]]) + +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.5.274")) +load(pathJoin("hpc-impi", "2018.0.4")) + +load(pathJoin("netcdf","4.7.4")) +load(pathJoin("nccmp","1.8.7.0")) +load(pathJoin("wgrib2", "2.0.8")) + +whatis("Description: GFS run setup CI environment") diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua new file mode 100644 index 0000000000..779e80a454 --- /dev/null +++ b/modulefiles/module_gwci.orion.lua @@ -0,0 +1,21 @@ +help([[ +Load environment to run GFS workflow ci scripts on Orion +]]) + +prepend_path("MODULEPATH", "/apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "2018.4")) +load(pathJoin("hpc-impi", "2018.4")) +load(pathJoin("netcdf","4.7.4")) +load(pathJoin("nccmp"," 1.8.7.0")) +load(pathJoin("contrib","0.1")) +load(pathJoin("wgrib2","3.0.2")) + +prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-intel", "2018.4")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/modulefiles/module_setup.hera.lua b/modulefiles/module_gwsetup.hera.lua similarity index 99% rename from modulefiles/module_setup.hera.lua rename to modulefiles/module_gwsetup.hera.lua index 4971a3f2d9..a07b32b6a6 100644 --- a/modulefiles/module_setup.hera.lua +++ b/modulefiles/module_gwsetup.hera.lua @@ -2,10 +2,9 @@ help([[ Load environment to run GFS workflow setup scripts on Hera ]]) --- Temporary until official hpc-stack is updated - load(pathJoin("rocoto")) +-- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.2.0")) load(pathJoin("hpc-miniconda3", "4.6.14")) diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua new file mode 100644 index 0000000000..37f3187fb4 --- /dev/null +++ b/modulefiles/module_gwsetup.orion.lua @@ -0,0 +1,17 @@ +help([[ +Load environment to run GFS workflow ci scripts on Orion +]]) + +-- Temporary until official hpc-stack is updated + +prepend_path("MODULEPATH", "/apps/modulefiles/core") +load(pathJoin("contrib","0.1")) +load(pathJoin("rocoto","1.3.3")) +load(pathJoin("git","2.28.0")) + +prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/parm/config/config.atmanal b/parm/config/config.atmanal deleted file mode 100644 index 2e36ffa603..0000000000 --- a/parm/config/config.atmanal +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -x - -########## config.atmanal ########## -# configuration common to all atm analysis tasks - -echo "BEGIN: config.atmanal" - -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype.yaml -export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml -export STATICB_TYPE="identity" -export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml -export FV3JEDI_FIX=${HOMEgfs}/fix/gdas -export R2D2_OBS_DB='ufsda_test' -export R2D2_OBS_DUMP='oper_gdas' -export R2D2_OBS_SRC='ncdiag' -export R2D2_BC_SRC='gsi' -export R2D2_BC_DUMP='oper_gdas' -export R2D2_ARCH_DB='local' -export INTERP_METHOD='barycentric' - -export io_layout_x=1 -export io_layout_y=1 - -echo "END: config.atmanal" diff --git a/parm/config/config.atmanalpost b/parm/config/config.atmanalpost deleted file mode 100644 index fd5f3bbbcc..0000000000 --- a/parm/config/config.atmanalpost +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmanalpost ########## -# Post Atm Analysis specific - -echo "BEGIN: config.atmanalpost" - -# Get task specific resources -. $EXPDIR/config.resources atmanalpost -echo "END: config.atmanalpost" diff --git a/parm/config/config.atmanalprep b/parm/config/config.atmanalprep deleted file mode 100644 index 0014520f5f..0000000000 --- a/parm/config/config.atmanalprep +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmanalprep ########## -# Pre Atm Analysis specific - -echo "BEGIN: config.atmanalprep" - -# Get task specific resources -. $EXPDIR/config.resources atmanalprep -echo "END: config.atmanalprep" diff --git a/parm/config/config.atmanalrun b/parm/config/config.atmanalrun deleted file mode 100644 index 5aaac6a01d..0000000000 --- a/parm/config/config.atmanalrun +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -x - -########## config.atmanalrun ########## -# Atm Analysis specific - -echo "BEGIN: config.atmanalrun" - -# Get task specific resources -. $EXPDIR/config.resources atmanalrun - -# Task specific variables -export JEDIVAREXE=$HOMEgfs/exec/fv3jedi_var.x - -echo "END: config.atmanalrun" diff --git a/parm/config/config.atmensanal b/parm/config/config.atmensanal deleted file mode 100644 index 2c939f0d84..0000000000 --- a/parm/config/config.atmensanal +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanal ########## -# configuration common to all atm atmensanal analysis tasks - -echo "BEGIN: config.atmensanal" - -export OBS_YAML_DIR=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml -export BERROR_YAML=$HOMEgfs/sorc/gdas.cd/parm/atm/berror/hybvar_bump.yaml -export ATMENSYAML=$HOMEgfs/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml -export FV3JEDI_FIX=$HOMEgfs/fix/gdas -export R2D2_OBS_DB='ufsda_test' -export R2D2_OBS_DUMP='oper_gdas' -export R2D2_OBS_SRC='ncdiag' -export R2D2_BC_SRC='gsi' -export R2D2_BC_DUMP='oper_gdas' -export R2D2_ARCH_DB='local' -export INTERP_METHOD='barycentric' - -export io_layout_x=1 # hardwired to 1,1 in yamltools.py -export io_layout_y=1 - -echo "END: config.atmensanal" diff --git a/parm/config/config.atmensanalpost b/parm/config/config.atmensanalpost deleted file mode 100644 index f79ee5b507..0000000000 --- a/parm/config/config.atmensanalpost +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalpost ########## -# Post Atm Analysis specific - -echo "BEGIN: config.atmensanalpost" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalpost -echo "END: config.atmensanalpost" diff --git a/parm/config/config.atmensanalprep b/parm/config/config.atmensanalprep deleted file mode 100644 index b719b9ac6c..0000000000 --- a/parm/config/config.atmensanalprep +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalprep ########## -# Pre Atm Analysis specific - -echo "BEGIN: config.atmensanalprep" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalprep -echo "END: config.atmensanalprep" diff --git a/parm/config/config.atmensanalrun b/parm/config/config.atmensanalrun deleted file mode 100644 index aeb59d1805..0000000000 --- a/parm/config/config.atmensanalrun +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalrun ########## -# Atm LETKFs specific - -echo "BEGIN: config.atmensanalrun" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalrun - -# Task specific variables -export JEDIENSEXE=$HOMEgfs/exec/fv3jedi_letkf.x - -echo "END: config.atmensanalrun" diff --git a/parm/config/config.getic b/parm/config/config.getic deleted file mode 100644 index fce3f9ecf6..0000000000 --- a/parm/config/config.getic +++ /dev/null @@ -1,66 +0,0 @@ -#! /usr/bin/env bash - -########## config.getic ########## -# Fetching GFS initial conditions specific - -echo "BEGIN: config.getic" - -# Get task specific resources -. $EXPDIR/config.resources getic - -export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs -export gfs_ver="v16" # Default = v16 -export OPS_RES=${OPS_RES:-"C768"} # Operational resolution - -export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd -export GDASINIT_DIR=${UFS_DIR}/util/gdas_init - -export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory -export GETICSH=${GDASINIT_DIR}/get_v16.data.sh - -if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input - export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh - if [[ "$CDATE" -lt "2019060106" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e - elif [[ "$CDATE" -lt "2019090100" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e - elif [[ "$CDATE" -lt "2019101706" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e - elif [[ "$CDATE" -lt "2020122200" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 - elif [[ "$CDATE" -le "2021032506" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n - else - set +x - echo NO DATA FOR $CDATE - exit 3 - fi -elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input - # No ENKF data prior to 2012/05/21/00z - if [[ "$CDATE" -lt "2012052100" ]]; then - set +x - echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA - elif [[ "$CDATE" -lt "2016051000" ]]; then - export gfs_ver=v12 - export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh - elif [[ "$CDATE" -lt "2017072000" ]]; then - export gfs_ver=v13 - export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh - elif [[ "$CDATE" -lt "2019061200" ]]; then - export gfs_ver=v14 - export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh - elif [[ "$CDATE" -lt "2021032100" ]]; then - export gfs_ver=v15 - export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh - elif [[ "$CDATE" -lt "2021032106" ]]; then - # The way the v16 switch over was done, there is no complete - # set of v16 or v15 data for 2021032100. And although - # v16 was officially implemented 2021032212, the v16 prod - # tarballs were archived starting 2021032106. - set +x - echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 - exit 1 - fi -fi - -echo "END: config.getic" diff --git a/parm/config/config.gldas b/parm/config/config.gldas deleted file mode 100644 index c51829d9fc..0000000000 --- a/parm/config/config.gldas +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env bash - -########## config.gldas ########## -# GDAS gldas step specific - -echo "BEGIN: config.gldas" - -# Get task specific resources -. $EXPDIR/config.resources gldas - -export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh -export gldas_spinup_hours=72 -export CPCGAUGE=$DMPDIR -export FINDDATE=$USHgfs/finddate.sh - -echo "END: config.gldas" diff --git a/parm/config/config.init b/parm/config/config.init deleted file mode 100644 index 3e016fb248..0000000000 --- a/parm/config/config.init +++ /dev/null @@ -1,54 +0,0 @@ -#! /usr/bin/env bash - -########## config.init ########## -# Prepare initial conditions - -echo "BEGIN: config.init" - -# Get task specific resources -. $EXPDIR/config.resources init - -# Get task specific resources -. $EXPDIR/config.getic - -export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd -export GDASINIT_DIR=${UFS_DIR}/util/gdas_init - -export CRES_HIRES=$CASE -export CRES_ENKF=$CASE_ENKF -export FRAC_ORO="yes" - -export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh -if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then - export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh -fi - -if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input - # No ENKF data prior to 2012/05/21/00z - if [[ "$CDATE" -lt "2012052100" ]]; then - set +x - echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA - elif [[ "$CDATE" -lt "2016051000" ]]; then - export gfs_ver=v12 - export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh - elif [[ "$CDATE" -lt "2017072000" ]]; then - export gfs_ver=v13 - export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh - elif [[ "$CDATE" -lt "2019061200" ]]; then - export gfs_ver=v14 - export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh - elif [[ "$CDATE" -lt "2021032100" ]]; then - export gfs_ver=v15 - export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh - elif [[ "$CDATE" -lt "2021032106" ]]; then - # The way the v16 switch over was done, there is no complete - # set of v16 or v15 data for 2021032100. And although - # v16 was officially implemented 2021032212, the v16 prod - # tarballs were archived starting 2021032106. - set +x - echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 - exit 1 - fi -fi - -echo "END: config.init" diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy deleted file mode 100644 index 1cf08f97c8..0000000000 --- a/parm/config/config.vrfy +++ /dev/null @@ -1,108 +0,0 @@ -#! /usr/bin/env bash - -########## config.vrfy ########## -# Verification step specific - -echo "BEGIN: config.vrfy" - -# Get task specific resources -. $EXPDIR/config.resources vrfy - -export CDFNL="gdas" # Scores verification against GDAS/GFS analysis -export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification -export VRFYRAD="YES" # Radiance data assimilation monitoring -export VRFYOZN="YES" # Ozone data assimilation monitoring -export VRFYMINMON="YES" # GSI minimization monitoring -export VRFYTRAK="YES" # Hurricane track verification -export VRFYGENESIS="YES" # Cyclone genesis verification -export VRFYFSU="NO" # Cyclone genesis verification (FSU) -export RUNMOS="NO" # whether to run entire MOS package - -#---------------------------------------------------------- -# Minimization, Radiance and Ozone Monitoring -#---------------------------------------------------------- - -if [ $VRFYRAD = "YES" -o $VRFYMINMON = "YES" -o $VRFYOZN = "YES" ]; then - - export envir="para" - export COM_IN=$ROTDIR - - # Radiance Monitoring - if [[ "$VRFYRAD" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then - - export RADMON_SUFFIX=$PSLOT - export TANKverf="$NOSCRUB/monitor/radmon" - export VRFYRADSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD" - - fi - - # Minimization Monitoring - if [[ "$VRFYMINMON" = "YES" ]] ; then - - export MINMON_SUFFIX=$PSLOT - export M_TANKverf="$NOSCRUB/monitor/minmon" - if [[ "$CDUMP" = "gdas" ]] ; then - export VRFYMINSH="$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON" - elif [[ "$CDUMP" = "gfs" ]] ; then - export VRFYMINSH="$HOMEgfs/jobs/JGFS_ATMOS_VMINMON" - fi - - fi - - # Ozone Monitoring - if [[ "$VRFYOZN" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then - - export HOMEgfs_ozn="$HOMEgfs" - export OZNMON_SUFFIX=$PSLOT - export TANKverf_ozn="$NOSCRUB/monitor/oznmon" - export VRFYOZNSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN" - - fi - -fi - - -#------------------------------------------------- -# Cyclone genesis and cyclone track verification -#------------------------------------------------- - -export ens_tracker_ver=v1.1.15.5 -export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} - -if [ "$VRFYTRAK" = "YES" ]; then - - export TRACKERSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_TRACKER" - export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} - if [ "$CDUMP" = "gdas" ]; then - export FHOUT_CYCLONE=3 - export FHMAX_CYCLONE=$FHMAX - else - export FHOUT_CYCLONE=6 - export FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) - fi -fi - - -if [[ "$VRFYGENESIS" == "YES" && "$CDUMP" == "gfs" ]]; then - - export GENESISSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_GENESIS" -fi - -if [[ "$VRFYFSU" == "YES" && "$CDUMP" == "gfs" ]]; then - - export GENESISFSU="$HOMEgfs/jobs/JGFS_ATMOS_FSU_GENESIS" -fi - -if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then - - if [ "$machine" = "HERA" ] ; then - export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" - else - echo "WARNING: MOS package is not enabled on ${machine}!" - export RUNMOS="NO" - export RUNGFSMOSSH="" - fi -fi - - -echo "END: config.vrfy" diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn new file mode 100644 index 0000000000..4a4101a156 --- /dev/null +++ b/parm/config/gefs/config.base.emc.dyn @@ -0,0 +1,379 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/parm/config/gefs/config.com b/parm/config/gefs/config.com new file mode 120000 index 0000000000..6a3754559c --- /dev/null +++ b/parm/config/gefs/config.com @@ -0,0 +1 @@ +../gfs/config.com \ No newline at end of file diff --git a/parm/config/config.coupled_ic b/parm/config/gefs/config.coupled_ic similarity index 89% rename from parm/config/config.coupled_ic rename to parm/config/gefs/config.coupled_ic index 1977c56ca4..50fab283b5 100644 --- a/parm/config/config.coupled_ic +++ b/parm/config/gefs/config.coupled_ic @@ -15,6 +15,8 @@ elif [[ "${machine}" == "ORION" ]]; then export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" elif [[ "${machine}" == "S4" ]]; then export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" fi diff --git a/parm/config/config.efcs b/parm/config/gefs/config.efcs similarity index 97% rename from parm/config/config.efcs rename to parm/config/gefs/config.efcs index a9b410e416..95c2cb58de 100644 --- a/parm/config/config.efcs +++ b/parm/config/gefs/config.efcs @@ -13,7 +13,7 @@ export DO_WAVE=${DO_WAVE_ENKF:-"NO"} # TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too if [[ ${DO_OCN} == "YES" ]]; then - case "$CASE_ENKF" in + case "${CASE_ENS}" in "C48") export OCNRES=500;; "C96") export OCNRES=100;; "C192") export OCNRES=050;; @@ -26,7 +26,7 @@ fi [[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? # Source model specific information that is resolution dependent -string="--fv3 $CASE_ENKF" +string="--fv3 ${CASE_ENS}" [[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" [[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" [[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" @@ -84,7 +84,7 @@ fi export cplwav=.false. # ocean model resolution -case "$CASE_ENKF" in +case "${CASE_ENS}" in "C48") export OCNRES=500;; "C96") export OCNRES=100;; "C192") export OCNRES=050;; diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources new file mode 100644 index 0000000000..9dd5c6e737 --- /dev/null +++ b/parm/config/gefs/config.resources @@ -0,0 +1,958 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlprep landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlprep" || "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + elif [[ "${step}" = "landanlprep" ]]; then + export wtime_landanlprep="00:30:00" + npe_landanlprep=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlprep + export nth_landanlprep=1 + npe_node_landanlprep=$(echo "${npe_node_max} / ${nth_landanlprep}" | bc) + export npe_node_landanlprep + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/parm/config/config.ufs b/parm/config/gefs/config.ufs similarity index 86% rename from parm/config/config.ufs rename to parm/config/gefs/config.ufs index a4dc53807b..a96ba126e2 100644 --- a/parm/config/config.ufs +++ b/parm/config/gefs/config.ufs @@ -113,9 +113,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C96") export DELTIM=600 @@ -127,9 +127,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C192") export DELTIM=450 @@ -141,9 +141,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=2 export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=64 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_GFS=64 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 ;; "C384") export DELTIM=300 @@ -155,9 +155,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=2 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=2 - export WRTTASK_PER_GROUP=48 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_GFS=48 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 ;; "C768") export DELTIM=150 @@ -169,9 +169,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=2 - export WRTTASK_PER_GROUP=64 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=64 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 ;; "C1152") export DELTIM=120 @@ -183,9 +183,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=4 - export WRTTASK_PER_GROUP=64 # TODO: refine these numbers when a case is available + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=64 # TODO: refine these numbers when a case is available + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available ;; "C3072") export DELTIM=90 @@ -197,9 +197,9 @@ case "${fv3_res}" in export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=4 - export WRTTASK_PER_GROUP=64 # TODO: refine these numbers when a case is available + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=64 # TODO: refine these numbers when a case is available + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available ;; *) echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" @@ -207,13 +207,18 @@ case "${fv3_res}" in ;; esac +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + (( ntasks_fv3 = layout_x * layout_y * 6 )) (( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) export ntasks_fv3 export ntasks_fv3_gfs -(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP )) -(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_GFS )) +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) export ntasks_quilt export ntasks_quilt_gfs diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml new file mode 100644 index 0000000000..6e7633bfe0 --- /dev/null +++ b/parm/config/gefs/yaml/defaults.yaml @@ -0,0 +1 @@ +# This file intentionally left blank diff --git a/parm/config/config.aero b/parm/config/gfs/config.aero similarity index 93% rename from parm/config/config.aero rename to parm/config/gfs/config.aero index 171701cd2a..1cb3bf5679 100644 --- a/parm/config/config.aero +++ b/parm/config/gfs/config.aero @@ -19,6 +19,9 @@ case $machine in "WCOSS2") AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; *) echo "FATAL ERROR: Machine $machine unsupported for aerosols" exit 2 diff --git a/parm/config/config.aeroanl b/parm/config/gfs/config.aeroanl similarity index 94% rename from parm/config/config.aeroanl rename to parm/config/gfs/config.aeroanl index 3b9a9971f4..41d63f8549 100644 --- a/parm/config/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -18,7 +18,7 @@ export BERROR_DATE="20160630.000000" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIVAREXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x export crtm_VERSION="2.3.0" echo "END: config.aeroanl" diff --git a/parm/config/config.aeroanlfinal b/parm/config/gfs/config.aeroanlfinal similarity index 100% rename from parm/config/config.aeroanlfinal rename to parm/config/gfs/config.aeroanlfinal diff --git a/parm/config/config.aeroanlinit b/parm/config/gfs/config.aeroanlinit similarity index 100% rename from parm/config/config.aeroanlinit rename to parm/config/gfs/config.aeroanlinit diff --git a/parm/config/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun similarity index 100% rename from parm/config/config.aeroanlrun rename to parm/config/gfs/config.aeroanlrun diff --git a/parm/config/config.aerosol_init b/parm/config/gfs/config.aerosol_init similarity index 100% rename from parm/config/config.aerosol_init rename to parm/config/gfs/config.aerosol_init diff --git a/parm/config/config.anal b/parm/config/gfs/config.anal similarity index 71% rename from parm/config/config.anal rename to parm/config/gfs/config.anal index 4cbe22ea11..e3a17f9c6a 100644 --- a/parm/config/config.anal +++ b/parm/config/gfs/config.anal @@ -29,7 +29,7 @@ fi # Set parameters specific to L127 if [[ ${LEVS} = "128" ]]; then export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," - export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" fi # Set namelist option for LETKF @@ -53,61 +53,42 @@ export OBERROR=${FIXgsi}/prepobs_errtable.global # Use experimental dumps in EMC GFS v16 parallels if [[ ${RUN_ENVIR} == "emc" ]]; then - export ABIBF="/dev/null" - if [[ "${CDATE}" -ge "2019022800" ]] ; then - export ABIBF="${COMIN_OBS}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" - fi - - export AHIBF="/dev/null" - if [[ "${CDATE}" -ge "2019042300" ]]; then - export AHIBF="${COMIN_OBS}/${CDUMP}.t${cyc}z.ahicsr.tm00.bufr_d" - fi - - export HDOB="${COMIN_OBS}/${CDUMP}.t${cyc}z.hdob.tm00.bufr_d" - - # Use dumps from NCO GFS v16 parallel - if [[ "${CDATE}" -ge "2020103012" ]]; then - export ABIBF="" - export AHIBF="" - export HDOB="" - fi - # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels - if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 fi # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps - if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 fi # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations - if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 fi # Assimilate COSMIC-2 - if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 fi # Assimilate HDOB - if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 fi # Assimilate Metop-C GNSSRO - if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 fi # Assimilate DO-2 GeoOptics - if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 fi @@ -116,38 +97,38 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then # identical to ../global_convinfo.txt. Thus, the logic below is not # needed at this time. # Assimilate COSMIC-2 GPS - # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 # fi # Turn off assmilation of OMPS during period of bad data - if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 fi # Set satinfo for start of GFS v16 parallels - if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 fi # Turn on assimilation of Metop-C AMSUA and MHS - if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 fi # Turn off assimilation of Metop-A MHS - if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 fi # Turn off assimilation of S-NPP CrIS - if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 fi # Turn off assimilation of MetOp-A IASI - if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 fi @@ -157,7 +138,7 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then # needed at this time # # Turn off assmilation of all Metop-A MHS - # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 # fi fi diff --git a/parm/config/config.analcalc b/parm/config/gfs/config.analcalc similarity index 100% rename from parm/config/config.analcalc rename to parm/config/gfs/config.analcalc diff --git a/parm/config/config.analdiag b/parm/config/gfs/config.analdiag similarity index 100% rename from parm/config/config.analdiag rename to parm/config/gfs/config.analdiag diff --git a/parm/config/config.arch b/parm/config/gfs/config.arch similarity index 67% rename from parm/config/config.arch rename to parm/config/gfs/config.arch index c705e0b7ed..6a0f6306a8 100644 --- a/parm/config/config.arch +++ b/parm/config/gfs/config.arch @@ -6,7 +6,7 @@ echo "BEGIN: config.arch" # Get task specific resources -. $EXPDIR/config.resources arch +. "${EXPDIR}/config.resources" arch export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} @@ -15,15 +15,10 @@ export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} #--online archive of nemsio files for fit2obs verification export FITSARC="YES" export FHMAX_FITS=132 -[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} #--starting and ending hours of previous cycles to be removed from rotating directory export RMOLDSTD=144 export RMOLDEND=24 -#--keep forcing data for running gldas step -if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then - [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 -fi - echo "END: config.arch" diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/parm/config/gfs/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/parm/config/gfs/config.atmanlfinal b/parm/config/gfs/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/parm/config/gfs/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/parm/config/gfs/config.atmanlinit b/parm/config/gfs/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/parm/config/gfs/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/parm/config/gfs/config.atmanlrun b/parm/config/gfs/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/parm/config/gfs/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/parm/config/gfs/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/parm/config/gfs/config.atmensanlfinal b/parm/config/gfs/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/parm/config/gfs/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/parm/config/gfs/config.atmensanlinit b/parm/config/gfs/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/parm/config/gfs/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/parm/config/gfs/config.atmensanlrun b/parm/config/gfs/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/parm/config/gfs/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/parm/config/config.awips b/parm/config/gfs/config.awips similarity index 100% rename from parm/config/config.awips rename to parm/config/gfs/config.awips diff --git a/parm/config/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn similarity index 87% rename from parm/config/config.base.emc.dyn rename to parm/config/gfs/config.base.emc.dyn index c58c740ca8..8d085d0deb 100644 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -75,7 +75,6 @@ export MODE="@MODE@" # cycled/forecast-only #################################################### # Build paths relative to $HOMEgfs export FIXgsi="${HOMEgfs}/fix/gsi" -export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" export HOMEpost="${HOMEgfs}" export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" @@ -104,7 +103,7 @@ export EXPDIR="@EXPDIR@/${PSLOT}" export ROTDIR="@ROTDIR@/${PSLOT}" export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work export DUMP_SUFFIX="" -if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel fi export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops @@ -119,19 +118,9 @@ export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUM # TODO: determine where is RUN actually used in the workflow other than here # TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be # consistent w/ EE2? -export COMIN_OBS=${COMIN_OBS:-${ROTDIR}/${CDUMP/enkf}.${PDY}/${cyc}/obs} -export COMIN_GES_OBS=${COMIN_GES_OBS:-${ROTDIR}/${CDUMP/enkf}.${PDY}/${cyc}/obs} - -export COMINatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos -export COMOUTatmos=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos -export COMINwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave -export COMOUTwave=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave -export COMINocean=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean -export COMOUTocean=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean -export COMINice=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ice -export COMOUTice=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ice -export COMINaero=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/chem -export COMOUTaero=${ROTDIR}/${CDUMP}.${PDY}/${cyc}/chem + +# Get all the COM path templates +source "${EXPDIR}/config.com" export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} export LOGSCRIPT=${LOGSCRIPT:-""} @@ -141,7 +130,7 @@ export REDOUT="1>" export REDERR="2>" export SENDECF=${SENDECF:-"NO"} -export SENDCOM=${SENDCOM:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} export SENDSDM=${SENDSDM:-"NO"} export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} export SENDDBN=${SENDDBN:-"NO"} @@ -169,8 +158,8 @@ export OPS_RES="C768" # Do not change # Resolution specific parameters export LEVS=128 export CASE="@CASECTL@" -export CASE_ENKF="@CASEENS@" -# TODO: This should not depend on $CASE or $CASE_ENKF +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS # These are the currently available grid-combinations case "${CASE}" in "C48") export OCNRES=500;; @@ -241,7 +230,7 @@ fi # Output frequency of the forecast model (for cycling) export FHMIN=0 export FHMAX=9 -export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) # Cycle to run EnKF (set to BOTH for both gfs and gdas) export EUPD_CYC="gdas" @@ -289,15 +278,6 @@ export IAU_DELTHRS_ENKF=6 # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true." -# run GLDAS to spin up land ICs -export DO_GLDAS="NO" -export gldas_cyc=00 - -# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 -if [[ ${DO_GLDAS} = "YES" ]]; then - export FHOUT=1 -fi - # if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA # export DO_WAVE="NO" # echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" @@ -311,11 +291,13 @@ export imp_physics=@IMP_PHYSICS@ export DO_JEDIVAR="NO" export DO_JEDIENS="NO" export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" # Hybrid related export DOHYBVAR="@DOHYBVAR@" -export NMEM_ENKF=@NMEM_ENKF@ -export NMEM_EFCS=30 +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ export SMOOTH_ENKF="NO" export l4densvar=".true." export lwrite4danl=".true." @@ -343,7 +325,7 @@ if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then fi # Check if cycle is cold starting, DOIAU off, or free-forecast mode -if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then export IAU_OFFSET=0 export IAU_FHROT=0 fi @@ -379,7 +361,7 @@ export binary_diag=".false." # Verification options export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp -export DO_FIT2OBS="YES" # Run fit to observations package +export DO_FIT2OBS="NO" # Run fit to observations package # Archiving options export HPSSARCH="@HPSSARCH@" # save data to HPSS archive diff --git a/parm/config/config.base.nco.static b/parm/config/gfs/config.base.nco.static similarity index 97% rename from parm/config/config.base.nco.static rename to parm/config/gfs/config.base.nco.static index e3702852f4..7d726cc62e 100644 --- a/parm/config/config.base.nco.static +++ b/parm/config/gfs/config.base.nco.static @@ -183,10 +183,6 @@ export IAU_DELTHRS_ENKF=6 # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true." -# run GLDAS to spin up land ICs -export DO_GLDAS=YES -export gldas_cyc=00 - # run wave component export DO_WAVE=YES export WAVE_CDUMP="both" @@ -228,10 +224,10 @@ if [ $DONST = "YES" ]; then export FNTSFA=" "; fi export nst_anl=.true. # Analysis increments to zero in CALCINCEXEC -export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" # Stratospheric increments to zero -export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" export INCVARS_EFOLD="5" # Swith to generate netcdf or binary diagnostic files. If not specified, diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/parm/config/gfs/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/parm/config/gfs/config.coupled_ic b/parm/config/gfs/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/parm/config/gfs/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/parm/config/config.defaults.s2sw b/parm/config/gfs/config.defaults.s2sw similarity index 100% rename from parm/config/config.defaults.s2sw rename to parm/config/gfs/config.defaults.s2sw diff --git a/parm/config/config.earc b/parm/config/gfs/config.earc similarity index 100% rename from parm/config/config.earc rename to parm/config/gfs/config.earc diff --git a/parm/config/config.ecen b/parm/config/gfs/config.ecen similarity index 100% rename from parm/config/config.ecen rename to parm/config/gfs/config.ecen diff --git a/parm/config/config.echgres b/parm/config/gfs/config.echgres similarity index 100% rename from parm/config/config.echgres rename to parm/config/gfs/config.echgres diff --git a/parm/config/config.ediag b/parm/config/gfs/config.ediag similarity index 100% rename from parm/config/config.ediag rename to parm/config/gfs/config.ediag diff --git a/parm/config/gfs/config.efcs b/parm/config/gfs/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/parm/config/gfs/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/parm/config/config.eobs b/parm/config/gfs/config.eobs similarity index 100% rename from parm/config/config.eobs rename to parm/config/gfs/config.eobs diff --git a/parm/config/config.epos b/parm/config/gfs/config.epos similarity index 100% rename from parm/config/config.epos rename to parm/config/gfs/config.epos diff --git a/parm/config/config.esfc b/parm/config/gfs/config.esfc similarity index 100% rename from parm/config/config.esfc rename to parm/config/gfs/config.esfc diff --git a/parm/config/config.eupd b/parm/config/gfs/config.eupd similarity index 100% rename from parm/config/config.eupd rename to parm/config/gfs/config.eupd diff --git a/parm/config/config.fcst b/parm/config/gfs/config.fcst similarity index 100% rename from parm/config/config.fcst rename to parm/config/gfs/config.fcst diff --git a/parm/config/config.fit2obs b/parm/config/gfs/config.fit2obs similarity index 75% rename from parm/config/config.fit2obs rename to parm/config/gfs/config.fit2obs index 9a904e2b0c..46baaa9e45 100644 --- a/parm/config/config.fit2obs +++ b/parm/config/gfs/config.fit2obs @@ -8,13 +8,6 @@ echo "BEGIN: config.fit2obs" # Get task specific resources . "${EXPDIR}/config.resources" fit2obs -export fit_ver="wflow.1.0" -export fitdir="${BASE_GIT}/Fit2Obs/${fit_ver}" - -export HOMEcfs=${fitdir} -export EXECcfs=${HOMEcfs}/exec -export USHcfs=${HOMEcfs}/ush - export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt diff --git a/parm/config/config.fv3.nco.static b/parm/config/gfs/config.fv3.nco.static similarity index 100% rename from parm/config/config.fv3.nco.static rename to parm/config/gfs/config.fv3.nco.static diff --git a/parm/config/config.gempak b/parm/config/gfs/config.gempak similarity index 100% rename from parm/config/config.gempak rename to parm/config/gfs/config.gempak diff --git a/parm/config/config.ice b/parm/config/gfs/config.ice similarity index 100% rename from parm/config/config.ice rename to parm/config/gfs/config.ice diff --git a/parm/config/gfs/config.landanl b/parm/config/gfs/config.landanl new file mode 100644 index 0000000000..89bb8a4b7b --- /dev/null +++ b/parm/config/gfs/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/parm/config/gfs/config.landanlfinal b/parm/config/gfs/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/parm/config/gfs/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/parm/config/gfs/config.landanlinit b/parm/config/gfs/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/parm/config/gfs/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/parm/config/gfs/config.landanlprep b/parm/config/gfs/config.landanlprep new file mode 100644 index 0000000000..26daf491f2 --- /dev/null +++ b/parm/config/gfs/config.landanlprep @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Prep specific + +echo "BEGIN: config.landanlprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlprep + +echo "END: config.landanlprep" diff --git a/parm/config/gfs/config.landanlrun b/parm/config/gfs/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/parm/config/gfs/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/parm/config/config.metp b/parm/config/gfs/config.metp similarity index 84% rename from parm/config/config.metp rename to parm/config/gfs/config.metp index 4be7151ffa..c90903f6a5 100644 --- a/parm/config/config.metp +++ b/parm/config/gfs/config.metp @@ -6,7 +6,7 @@ echo "BEGIN: config.metp" # Get task specific resources -. $EXPDIR/config.resources metp +. "${EXPDIR}/config.resources" metp export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus @@ -18,15 +18,15 @@ export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus #---------------------------------------------------------- ## EMC_VERIF_GLOBAL SETTINGS export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd -export VERIF_GLOBALSH=$HOMEverif_global/ush/run_verif_global_in_global_workflow.sh +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh ## INPUT DATA SETTINGS -export model=$PSLOT +export model=${PSLOT} export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" -export model_hpss_dir=$ATARDIR/.. +export model_hpss_dir=${ATARDIR}/.. export get_data_from_hpss="NO" export hpss_walltime="10" ## OUTPUT SETTINGS -export model_stat_dir=$ARCDIR/.. +export model_stat_dir=${ARCDIR}/.. export make_met_data_by="VALID" export SENDMETVIEWER="NO" ## DATE SETTINGS @@ -39,20 +39,20 @@ export log_MET_output_to_METplus="yes" export g2g1_type_list="anom pres sfc" export g2g1_anom_truth_name="self_anl" export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_anom_fhr_min=$FHMIN_GFS -export g2g1_anom_fhr_max=$FHMAX_GFS +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} export g2g1_anom_grid="G002" export g2g1_anom_gather_by="VSDB" export g2g1_pres_truth_name="self_anl" export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_pres_fhr_min=$FHMIN_GFS -export g2g1_pres_fhr_max=$FHMAX_GFS +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} export g2g1_pres_grid="G002" export g2g1_pres_gather_by="VSDB" export g2g1_sfc_truth_name="self_f00" export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_sfc_fhr_min=$FHMIN_GFS -export g2g1_sfc_fhr_max=$FHMAX_GFS +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} export g2g1_sfc_grid="G002" export g2g1_sfc_gather_by="VSDB" export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" @@ -62,19 +62,19 @@ export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow expe export g2o1_type_list="upper_air conus_sfc" export g2o1_upper_air_msg_type_list="ADPUPA" export g2o1_upper_air_vhr_list="00 06 12 18" -export g2o1_upper_air_fhr_min=$FHMIN_GFS +export g2o1_upper_air_fhr_min=${FHMIN_GFS} export g2o1_upper_air_fhr_max="240" export g2o1_upper_air_grid="G003" export g2o1_upper_air_gather_by="VSDB" export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" -export g2o1_conus_sfc_fhr_min=$FHMIN_GFS +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} export g2o1_conus_sfc_fhr_max="240" export g2o1_conus_sfc_grid="G104" export g2o1_conus_sfc_gather_by="VSDB" export g2o1_polar_sfc_msg_type_list="IABP" export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" -export g2o1_polar_sfc_fhr_min=$FHMIN_GFS +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} export g2o1_polar_sfc_fhr_max="240" export g2o1_polar_sfc_grid="G219" export g2o1_polar_sfc_gather_by="VSDB" @@ -87,7 +87,7 @@ export precip1_type_list="ccpa_accum24hr" export precip1_ccpa_accum24hr_model_bucket="06" export precip1_ccpa_accum24hr_model_var="APCP" export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" -export precip1_ccpa_accum24hr_fhr_min=$FHMIN_GFS +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} export precip1_ccpa_accum24hr_fhr_max="180" export precip1_ccpa_accum24hr_grid="G211" export precip1_ccpa_accum24hr_gather_by="VSDB" diff --git a/parm/config/config.nsst b/parm/config/gfs/config.nsst similarity index 95% rename from parm/config/config.nsst rename to parm/config/gfs/config.nsst index b4c58eedb3..235c91f08b 100644 --- a/parm/config/config.nsst +++ b/parm/config/gfs/config.nsst @@ -12,7 +12,7 @@ export NST_MODEL=2 # nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, export NST_SPINUP=0 -if [[ "$CDATE" -lt "2017072000" ]]; then +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then export NST_SPINUP=1 fi diff --git a/parm/config/config.ocn b/parm/config/gfs/config.ocn similarity index 100% rename from parm/config/config.ocn rename to parm/config/gfs/config.ocn diff --git a/parm/config/config.ocnanal b/parm/config/gfs/config.ocnanal similarity index 87% rename from parm/config/config.ocnanal rename to parm/config/gfs/config.ocnanal index c8d821b86d..36519c7f35 100644 --- a/parm/config/config.ocnanal +++ b/parm/config/gfs/config.ocnanal @@ -1,7 +1,7 @@ #!/bin/bash ########## config.ocnanal ########## -# configuration common to all atm analysis tasks +# configuration common to all ocean analysis tasks echo "BEGIN: config.ocnanal" @@ -15,7 +15,7 @@ export SOCA_VARS=tocn,socn,ssh export SABER_BLOCKS_YAML=@SABER_BLOCKS_YAML@ export SOCA_NINNER=@SOCA_NINNER@ export CASE_ANL=@CASE_ANL@ -export DOMAIN_STACK_SIZE=2000000 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin # R2D2 diff --git a/parm/config/config.ocnanalbmat b/parm/config/gfs/config.ocnanalbmat similarity index 100% rename from parm/config/config.ocnanalbmat rename to parm/config/gfs/config.ocnanalbmat diff --git a/parm/config/gfs/config.ocnanalchkpt b/parm/config/gfs/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/parm/config/gfs/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/parm/config/config.ocnanalpost b/parm/config/gfs/config.ocnanalpost similarity index 100% rename from parm/config/config.ocnanalpost rename to parm/config/gfs/config.ocnanalpost diff --git a/parm/config/config.ocnanalprep b/parm/config/gfs/config.ocnanalprep similarity index 100% rename from parm/config/config.ocnanalprep rename to parm/config/gfs/config.ocnanalprep diff --git a/parm/config/config.ocnanalrun b/parm/config/gfs/config.ocnanalrun similarity index 100% rename from parm/config/config.ocnanalrun rename to parm/config/gfs/config.ocnanalrun diff --git a/parm/config/gfs/config.ocnanalvrfy b/parm/config/gfs/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/parm/config/gfs/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/parm/config/config.ocnpost b/parm/config/gfs/config.ocnpost similarity index 100% rename from parm/config/config.ocnpost rename to parm/config/gfs/config.ocnpost diff --git a/parm/config/config.post b/parm/config/gfs/config.post similarity index 100% rename from parm/config/config.post rename to parm/config/gfs/config.post diff --git a/parm/config/config.postsnd b/parm/config/gfs/config.postsnd similarity index 100% rename from parm/config/config.postsnd rename to parm/config/gfs/config.postsnd diff --git a/parm/config/config.prep b/parm/config/gfs/config.prep similarity index 87% rename from parm/config/config.prep rename to parm/config/gfs/config.prep index 3e1cf8c32f..b05b82a43e 100644 --- a/parm/config/config.prep +++ b/parm/config/gfs/config.prep @@ -36,12 +36,12 @@ export PRVT=$FIXgsi/prepobs_errtable.global # Set prepobs.errtable.global for GFS v16 retrospective parallels if [[ $RUN_ENVIR == "emc" ]]; then - if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 fi # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps - if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 fi @@ -51,7 +51,7 @@ if [[ $RUN_ENVIR == "emc" ]]; then # needed at this time # Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations -# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then # export PRVT=$EXPDIR/prepobs_errtable.global # fi @@ -60,7 +60,7 @@ fi # NSST bufr was created with a different set of files prior to 2020102200 # See comments at the end of # https://github.com/NOAA-EMC/global-workflow/issues/313 -if [[ "$CDATE" -ge "2020102200" ]]; then +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then export DTYPS_nsst='sfcshp tesac bathy trkob' else export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' diff --git a/parm/config/config.resources b/parm/config/gfs/config.resources similarity index 64% rename from parm/config/config.resources rename to parm/config/gfs/config.resources index f211dbc93b..5a6c734b98 100644 --- a/parm/config/config.resources +++ b/parm/config/gfs/config.resources @@ -4,21 +4,23 @@ # Set resource information for job tasks # e.g. walltime, node, cores per node, memory etc. -if [ $# -ne 1 ]; then +if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "getic init coupled_ic aerosol_init" - echo "atmanalprep atmanalrun atmanalpost" - echo "atmensanalprep atmensanalrun atmensanalpost" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlprep landanlinit landanlrun landanlfinal" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak" echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -49,7 +51,7 @@ elif [[ ${machine} = "ORION" ]]; then export npe_node_max=40 fi -if [ ${step} = "prep" ]; then +if [[ ${step} = "prep" ]]; then export wtime_prep='00:30:00' export npe_prep=4 export npe_node_prep=2 @@ -69,129 +71,187 @@ elif [[ "${step}" = "aerosol_init" ]]; then export NTASKS=${npe_aerosol_init} export memory_aerosol_init="6G" -elif [ ${step} = "waveinit" ]; then +elif [[ ${step} = "waveinit" ]]; then export wtime_waveinit="00:10:00" export npe_waveinit=12 export nth_waveinit=1 - export npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit export NTASKS=${npe_waveinit} export memory_waveinit="2GB" -elif [ ${step} = "waveprep" ]; then +elif [[ ${step} = "waveprep" ]]; then export wtime_waveprep="00:10:00" export npe_waveprep=5 export npe_waveprep_gfs=65 export nth_waveprep=1 export nth_waveprep_gfs=1 - export npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) - export npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs export NTASKS=${npe_waveprep} export NTASKS_gfs=${npe_waveprep_gfs} export memory_waveprep="100GB" export memory_waveprep_gfs="150GB" -elif [ ${step} = "wavepostsbs" ]; then +elif [[ ${step} = "wavepostsbs" ]]; then export wtime_wavepostsbs="00:20:00" export wtime_wavepostsbs_gfs="03:00:00" export npe_wavepostsbs=8 export nth_wavepostsbs=1 - export npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs export NTASKS=${npe_wavepostsbs} export memory_wavepostsbs="10GB" export memory_wavepostsbs_gfs="10GB" -elif [ ${step} = "wavepostbndpnt" ]; then +elif [[ ${step} = "wavepostbndpnt" ]]; then export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 - export npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True -elif [ ${step} = "wavepostbndpntbll" ]; then +elif [[ ${step} = "wavepostbndpntbll" ]]; then export wtime_wavepostbndpntbll="01:00:00" export npe_wavepostbndpntbll=448 export nth_wavepostbndpntbll=1 - export npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True -elif [ ${step} = "wavepostpnt" ]; then +elif [[ ${step} = "wavepostpnt" ]]; then export wtime_wavepostpnt="01:30:00" export npe_wavepostpnt=200 export nth_wavepostpnt=1 - export npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt export NTASKS=${npe_wavepostpnt} export is_exclusive=True -elif [ ${step} = "wavegempak" ]; then +elif [[ ${step} = "wavegempak" ]]; then export wtime_wavegempak="02:00:00" export npe_wavegempak=1 export nth_wavegempak=1 - export npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak export NTASKS=${npe_wavegempak} export memory_wavegempak="1GB" -elif [ ${step} = "waveawipsbulls" ]; then +elif [[ ${step} = "waveawipsbulls" ]]; then export wtime_waveawipsbulls="00:20:00" export npe_waveawipsbulls=1 export nth_waveawipsbulls=1 - export npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls export NTASKS=${npe_waveawipsbulls} export is_exclusive=True -elif [ ${step} = "waveawipsgridded" ]; then +elif [[ ${step} = "waveawipsgridded" ]]; then export wtime_waveawipsgridded="02:00:00" export npe_waveawipsgridded=1 export nth_waveawipsgridded=1 - export npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded export NTASKS=${npe_waveawipsgridded} export memory_waveawipsgridded_gfs="1GB" -elif [[ "${step}" = "atmanalprep" ]]; then +elif [[ "${step}" = "atmanlinit" ]]; then - export wtime_atmanalprep="00:10:00" - export npe_atmanalprep=1 - export nth_atmanalprep=1 - npe_node_atmanalprep=$(echo "${npe_node_max} / ${nth_atmanalprep}" | bc) - export npe_node_atmanalprep - export memory_atmanalprep="3072M" + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then # make below case dependent later export layout_x=1 export layout_y=1 - export wtime_atmanalrun="00:30:00" - npe_atmanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanalrun - npe_atmanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanalrun_gfs - export nth_atmanalrun=1 - export nth_atmanalrun_gfs=${nth_atmanalrun} - npe_node_atmanalrun=$(echo "${npe_node_max} / ${nth_atmanalrun}" | bc) - export npe_node_atmanalrun + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun export is_exclusive=True -elif [[ "${step}" = "atmanalpost" ]]; then +elif [[ "${step}" = "atmanlfinal" ]]; then - export wtime_atmanalpost="00:30:00" - export npe_atmanalpost=${npe_node_max} - export nth_atmanalpost=1 - npe_node_atmanalpost=$(echo "${npe_node_max} / ${nth_atmanalpost}" | bc) - export npe_node_atmanalpost + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal export is_exclusive=True +elif [[ "${step}" = "landanlprep" || "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + elif [[ "${step}" = "landanlprep" ]]; then + export wtime_landanlprep="00:30:00" + npe_landanlprep=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlprep + export nth_landanlprep=1 + npe_node_landanlprep=$(echo "${npe_node_max} / ${nth_landanlprep}" | bc) + export npe_node_landanlprep + export is_exclusive=True + fi + elif [[ "${step}" = "aeroanlinit" ]]; then # below lines are for creating JEDI YAML @@ -317,6 +377,25 @@ elif [[ "${step}" = "ocnanalrun" ]]; then npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) export npe_node_ocnanalrun +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + elif [[ "${step}" = "ocnanalpost" ]]; then export wtime_ocnanalpost="00:30:00" @@ -325,7 +404,16 @@ elif [[ "${step}" = "ocnanalpost" ]]; then npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) export npe_node_ocnanalpost -elif [ ${step} = "anal" ]; then +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then export wtime_anal="00:50:00" export wtime_anal_gfs="00:40:00" @@ -337,13 +425,12 @@ elif [ ${step} = "anal" ]; then export nth_anal=8 export nth_anal_gfs=8 fi - if [ ${CASE} = "C384" ]; then + if [[ ${CASE} = "C384" ]]; then export npe_anal=160 export npe_anal_gfs=160 export nth_anal=10 export nth_anal_gfs=10 if [[ ${machine} = "S4" ]]; then - #For the analysis jobs, the number of tasks and cores used must be equal #On the S4-s4 partition, this is accomplished by increasing the task #count to a multiple of 32 if [[ ${PARTITION_BATCH} = "s4" ]]; then @@ -372,47 +459,41 @@ elif [ ${step} = "anal" ]; then fi fi fi - export npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal export nth_cycle=${nth_anal} - export npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle export is_exclusive=True -elif [ ${step} = "analcalc" ]; then +elif [[ ${step} = "analcalc" ]]; then export wtime_analcalc="00:10:00" export npe_analcalc=127 - export ntasks=${npe_analcalc} + export ntasks="${npe_analcalc}" export nth_analcalc=1 export nth_echgres=4 export nth_echgres_gfs=12 - export npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc export is_exclusive=True -elif [ ${step} = "analdiag" ]; then +elif [[ ${step} = "analdiag" ]]; then export wtime_analdiag="00:15:00" export npe_analdiag=96 # Should be at least twice npe_ediag export nth_analdiag=1 - export npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag export memory_analdiag="48GB" -elif [ ${step} = "sfcanl" ]; then +elif [[ ${step} = "sfcanl" ]]; then export wtime_sfcanl="00:10:00" export npe_sfcanl=6 export nth_sfcanl=1 - export npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) - export is_exclusive=True - -elif [ ${step} = "gldas" ]; then - - export wtime_gldas="00:10:00" - export npe_gldas=112 - export nth_gldas=1 - export npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) - export npe_gaussian=96 - export nth_gaussian=1 - export npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl export is_exclusive=True elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then @@ -431,7 +512,7 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then export layout_x=${layout_x_gfs} export layout_y=${layout_y_gfs} export WRITE_GROUP=${WRITE_GROUP_GFS} - export WRTTASK_PER_GROUP=${WRTTASK_PER_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} ntasks_fv3=${ntasks_fv3_gfs} ntasks_quilt=${ntasks_quilt_gfs} nthreads_fv3=${nthreads_fv3_gfs} @@ -444,8 +525,7 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then # PETS for quilting if [[ "${QUILTING:-}" = ".true." ]]; then (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) - (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP * nthreads_fv3 )) # when threads are used, WRTTASKS_PER_GROUP = INCOMING_WRTTASKS_PER_GROUP * threads - # model_configure should be updated to reflect the number of threads used by calling this variable write_pets_per_group + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) export WRTTASK_PER_GROUP else QUILTPETS=0 @@ -507,25 +587,25 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" if [[ "${_CDUMP}" =~ "gfs" ]]; then - declare -x npe_${step}_gfs="${NTASKS_TOT}" - declare -x nth_${step}_gfs=1 # ESMF handles threading for the UFS-weather-model - declare -x npe_node_${step}_gfs="${npe_node_max}" + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" else - declare -x npe_${step}="${NTASKS_TOT}" - declare -x nth_${step}=1 # ESMF handles threading for the UFS-weather-model - declare -x npe_node_${step}="${npe_node_max}" + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" fi done case "${CASE}" in "C48" | "C96" | "C192") - declare -x wtime_${step}="00:30:00" - declare -x wtime_${step}_gfs="03:00:00" + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" ;; "C384" | "C768" | "C1152") - declare -x wtime_${step}="01:00:00" - declare -x wtime_${step}_gfs="06:00:00" + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" ;; *) echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" @@ -536,20 +616,25 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then unset _CDUMP _CDUMP_LIST unset NTASKS_TOT -elif [ ${step} = "ocnpost" ]; then +elif [[ ${step} = "ocnpost" ]]; then export wtime_ocnpost="00:30:00" export npe_ocnpost=1 export npe_node_ocnpost=1 export nth_ocnpost=1 export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi -elif [ ${step} = "post" ]; then +elif [[ ${step} = "post" ]]; then export wtime_post="00:12:00" export wtime_post_gfs="01:00:00" export npe_post=126 - res=$(echo ${CASE} | cut -c2-) + res=$(echo "${CASE}" | cut -c2-) if (( npe_post > res )); then export npe_post=${res} fi @@ -561,7 +646,7 @@ elif [ ${step} = "post" ]; then if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi export is_exclusive=True -elif [ ${step} = "wafs" ]; then +elif [[ ${step} = "wafs" ]]; then export wtime_wafs="00:30:00" export npe_wafs=1 @@ -569,7 +654,7 @@ elif [ ${step} = "wafs" ]; then export nth_wafs=1 export memory_wafs="1GB" -elif [ ${step} = "wafsgcip" ]; then +elif [[ ${step} = "wafsgcip" ]]; then export wtime_wafsgcip="00:30:00" export npe_wafsgcip=2 @@ -577,39 +662,43 @@ elif [ ${step} = "wafsgcip" ]; then export npe_node_wafsgcip=1 export memory_wafsgcip="50GB" -elif [ ${step} = "wafsgrib2" ]; then +elif [[ ${step} = "wafsgrib2" ]]; then export wtime_wafsgrib2="00:30:00" export npe_wafsgrib2=18 export nth_wafsgrib2=1 - export npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 export memory_wafsgrib2="80GB" -elif [ ${step} = "wafsblending" ]; then +elif [[ ${step} = "wafsblending" ]]; then export wtime_wafsblending="00:30:00" export npe_wafsblending=1 export nth_wafsblending=1 - export npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending export memory_wafsblending="15GB" -elif [ ${step} = "wafsgrib20p25" ]; then +elif [[ ${step} = "wafsgrib20p25" ]]; then export wtime_wafsgrib20p25="00:30:00" export npe_wafsgrib20p25=11 export nth_wafsgrib20p25=1 - export npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 export memory_wafsgrib20p25="80GB" -elif [ ${step} = "wafsblending0p25" ]; then +elif [[ ${step} = "wafsblending0p25" ]]; then export wtime_wafsblending0p25="00:30:00" export npe_wafsblending0p25=1 export nth_wafsblending0p25=1 - export npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 export memory_wafsblending0p25="15GB" -elif [ ${step} = "vrfy" ]; then +elif [[ ${step} = "vrfy" ]]; then export wtime_vrfy="03:00:00" export wtime_vrfy_gfs="06:00:00" @@ -629,9 +718,10 @@ elif [[ "${step}" = "fit2obs" ]]; then export npe_fit2obs=3 export nth_fit2obs=1 export npe_node_fit2obs=1 + export memory_fit2obs="20G" if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi -elif [ ${step} = "metp" ]; then +elif [[ "${step}" = "metp" ]]; then export nth_metp=1 export wtime_metp="03:00:00" @@ -642,7 +732,7 @@ elif [ ${step} = "metp" ]; then export npe_node_metp_gfs=4 export is_exclusive=True -elif [ ${step} = "echgres" ]; then +elif [[ ${step} = "echgres" ]]; then export wtime_echgres="00:10:00" export npe_echgres=3 @@ -652,7 +742,7 @@ elif [ ${step} = "echgres" ]; then export memory_echgres="200GB" fi -elif [ ${step} = "init" ]; then +elif [[ ${step} = "init" ]]; then export wtime_init="00:30:00" export npe_init=24 @@ -660,14 +750,14 @@ elif [ ${step} = "init" ]; then export npe_node_init=6 export memory_init="70G" -elif [ ${step} = "init_chem" ]; then +elif [[ ${step} = "init_chem" ]]; then export wtime_init_chem="00:30:00" export npe_init_chem=1 export npe_node_init_chem=1 export is_exclusive=True -elif [ ${step} = "mom6ic" ]; then +elif [[ ${step} = "mom6ic" ]]; then export wtime_mom6ic="00:30:00" export npe_mom6ic=24 @@ -680,12 +770,12 @@ elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then eval "export npe_${step}=1" eval "export npe_node_${step}=1" eval "export nth_${step}=1" - eval "export memory_${step}=2048M" + eval "export memory_${step}=4096M" if [[ "${machine}" = "WCOSS2" ]]; then eval "export memory_${step}=50GB" fi -elif [ ${step} = "coupled_ic" ]; then +elif [[ ${step} = "coupled_ic" ]]; then export wtime_coupled_ic="00:15:00" export npe_coupled_ic=1 @@ -693,34 +783,39 @@ elif [ ${step} = "coupled_ic" ]; then export nth_coupled_ic=1 export is_exclusive=True -elif [ ${step} = "atmensanalprep" ]; then +elif [[ "${step}" = "atmensanlinit" ]]; then - export wtime_atmensanalprep="00:10:00" - export npe_atmensanalprep=1 - export nth_atmensanalprep=1 - export npe_node_atmensanalprep=$(echo "${npe_node_max} / ${nth_atmensanalprep}" | bc) - export is_exclusive=True + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" -elif [ ${step} = "atmensanalrun" ]; then +elif [[ "${step}" = "atmensanlrun" ]]; then # make below case dependent later - export layout_x=2 - export layout_y=3 - - export wtime_atmensanalrun="00:30:00" - export npe_atmensanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export nth_atmensanalrun=1 - export nth_atmensanalrun_gfs=${nth_atmensanalrun} + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun export is_exclusive=True - export npe_node_atmensanalrun=$(echo "${npe_node_max} / ${nth_atmensanalrun}" | bc) -elif [ ${step} = "atmensanalpost" ]; then +elif [[ "${step}" = "atmensanlfinal" ]]; then - export wtime_atmensanalpost="00:30:00" - export npe_atmensanalpost=${npe_node_max} - export nth_atmensanalpost=1 - export npe_node_atmensanalpost=$(echo "${npe_node_max} / ${nth_atmensanalpost}" | bc) + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal export is_exclusive=True elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then @@ -737,7 +832,8 @@ elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then export npe_eomg=${npe_eobs} export nth_eobs=2 export nth_eomg=${nth_eobs} - export npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs export npe_node_eomg=${npe_node_eobs} export is_exclusive=True #The number of tasks and cores used must be the same for eobs @@ -746,70 +842,75 @@ elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then export npe_node_eobs=10 fi -elif [ ${step} = "ediag" ]; then +elif [[ ${step} = "ediag" ]]; then export wtime_ediag="00:15:00" export npe_ediag=48 export nth_ediag=1 - export npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag export memory_ediag="30GB" -elif [ ${step} = "eupd" ]; then +elif [[ ${step} = "eupd" ]]; then export wtime_eupd="00:30:00" - if [ ${CASE} = "C768" ]; then + if [[ ${CASE} = "C768" ]]; then export npe_eupd=480 export nth_eupd=6 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 fi - elif [ ${CASE} = "C384" ]; then + elif [[ ${CASE} = "C384" ]]; then export npe_eupd=270 export nth_eupd=2 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 - elif [[ "${machine}" = "HERA" ]]; then + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then export nth_eupd=8 - fi - if [[ ${machine} = "S4" ]]; then + elif [[ ${machine} = "S4" ]]; then export npe_eupd=160 export nth_eupd=2 fi elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_eupd=42 export nth_eupd=2 - if [[ "${machine}" = "HERA" ]]; then + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then export nth_eupd=4 fi fi - export npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd export is_exclusive=True -elif [ ${step} = "ecen" ]; then +elif [[ ${step} = "ecen" ]]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=4 if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi - export npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen export nth_cycle=${nth_ecen} - export npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle export is_exclusive=True -elif [ ${step} = "esfc" ]; then +elif [[ ${step} = "esfc" ]]; then export wtime_esfc="00:06:00" export npe_esfc=80 export nth_esfc=1 - export npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc export nth_cycle=${nth_esfc} - export npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle export memory_esfc="80GB" -elif [ ${step} = "epos" ]; then +elif [[ ${step} = "epos" ]]; then export wtime_epos="00:15:00" export npe_epos=80 @@ -817,10 +918,11 @@ elif [ ${step} = "epos" ]; then if [[ "${machine}" == "HERA" ]]; then export nth_epos=6 fi - export npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos export is_exclusive=True -elif [ ${step} = "postsnd" ]; then +elif [[ ${step} = "postsnd" ]]; then export wtime_postsnd="02:00:00" export npe_postsnd=40 @@ -828,12 +930,14 @@ elif [ ${step} = "postsnd" ]; then export npe_node_postsnd=10 export npe_postsndcfp=9 export npe_node_postsndcfp=1 - if [[ "$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc)" -gt "${npe_node_max}" ]]; then - export npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd fi export is_exclusive=True -elif [ ${step} = "awips" ]; then +elif [[ ${step} = "awips" ]]; then export wtime_awips="03:30:00" export npe_awips=1 @@ -841,7 +945,7 @@ elif [ ${step} = "awips" ]; then export nth_awips=1 export memory_awips="3GB" -elif [ ${step} = "gempak" ]; then +elif [[ ${step} = "gempak" ]]; then export wtime_gempak="03:00:00" export npe_gempak=2 diff --git a/parm/config/config.resources.nco.static b/parm/config/gfs/config.resources.nco.static similarity index 96% rename from parm/config/config.resources.nco.static rename to parm/config/gfs/config.resources.nco.static index e6cd2ef73e..d98e985b95 100644 --- a/parm/config/config.resources.nco.static +++ b/parm/config/gfs/config.resources.nco.static @@ -8,7 +8,7 @@ if [ $# -ne 1 ]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" - echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" @@ -144,16 +144,6 @@ elif [ $step = "analdiag" ]; then export npe_node_analdiag=$npe_analdiag export memory_analdiag="48GB" -elif [ $step = "gldas" ]; then - - export wtime_gldas="00:10:00" - export npe_gldas=112 - export nth_gldas=1 - export npe_node_gldas=$npe_gldas - export npe_gaussian=96 - export nth_gaussian=1 - export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) - elif [ $step = "fcst" ]; then export wtime_fcst="01:30:00" diff --git a/parm/config/config.sfcanl b/parm/config/gfs/config.sfcanl similarity index 100% rename from parm/config/config.sfcanl rename to parm/config/gfs/config.sfcanl diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs new file mode 100644 index 0000000000..a96ba126e2 --- /dev/null +++ b/parm/config/gfs/config.ufs @@ -0,0 +1,370 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/parm/config/gfs/config.vrfy b/parm/config/gfs/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/parm/config/gfs/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/parm/config/config.wafs b/parm/config/gfs/config.wafs similarity index 100% rename from parm/config/config.wafs rename to parm/config/gfs/config.wafs diff --git a/parm/config/config.wafsblending b/parm/config/gfs/config.wafsblending similarity index 100% rename from parm/config/config.wafsblending rename to parm/config/gfs/config.wafsblending diff --git a/parm/config/config.wafsblending0p25 b/parm/config/gfs/config.wafsblending0p25 similarity index 100% rename from parm/config/config.wafsblending0p25 rename to parm/config/gfs/config.wafsblending0p25 diff --git a/parm/config/config.wafsgcip b/parm/config/gfs/config.wafsgcip similarity index 100% rename from parm/config/config.wafsgcip rename to parm/config/gfs/config.wafsgcip diff --git a/parm/config/config.wafsgrib2 b/parm/config/gfs/config.wafsgrib2 similarity index 100% rename from parm/config/config.wafsgrib2 rename to parm/config/gfs/config.wafsgrib2 diff --git a/parm/config/config.wafsgrib20p25 b/parm/config/gfs/config.wafsgrib20p25 similarity index 100% rename from parm/config/config.wafsgrib20p25 rename to parm/config/gfs/config.wafsgrib20p25 diff --git a/parm/config/config.wave b/parm/config/gfs/config.wave similarity index 99% rename from parm/config/config.wave rename to parm/config/gfs/config.wave index 5cc55e5542..658c4b40ae 100644 --- a/parm/config/config.wave +++ b/parm/config/gfs/config.wave @@ -19,7 +19,7 @@ export USHwave="$HOMEgfs/ush" # Some others are also used across the workflow in wave component scripts # General runtime labels -export CDUMPwave="${CDUMP}wave" +export CDUMPwave="${RUN}wave" # In GFS/GDAS, restart files are generated/read from gdas runs export CDUMPRSTwave="gdas" diff --git a/parm/config/config.waveawipsbulls b/parm/config/gfs/config.waveawipsbulls similarity index 66% rename from parm/config/config.waveawipsbulls rename to parm/config/gfs/config.waveawipsbulls index e3748e9cd1..fd21869355 100644 --- a/parm/config/config.waveawipsbulls +++ b/parm/config/gfs/config.waveawipsbulls @@ -10,8 +10,5 @@ echo "BEGIN: config.waveawipsbulls" export DBNROOT=/dev/null export SENDCOM="YES" -export COMPONENT=${COMPONENT:-wave} -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" echo "END: config.waveawipsbulls" diff --git a/parm/config/config.waveawipsgridded b/parm/config/gfs/config.waveawipsgridded similarity index 67% rename from parm/config/config.waveawipsgridded rename to parm/config/gfs/config.waveawipsgridded index e84352558e..6896ec8bd2 100644 --- a/parm/config/config.waveawipsgridded +++ b/parm/config/gfs/config.waveawipsgridded @@ -10,8 +10,5 @@ echo "BEGIN: config.waveawipsgridded" export DBNROOT=/dev/null export SENDCOM="YES" -export COMPONENT=${COMPONENT:-wave} -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" echo "END: config.waveawipsgridded" diff --git a/parm/config/config.wavegempak b/parm/config/gfs/config.wavegempak similarity index 62% rename from parm/config/config.wavegempak rename to parm/config/gfs/config.wavegempak index 66af59f2a4..da76c364ce 100644 --- a/parm/config/config.wavegempak +++ b/parm/config/gfs/config.wavegempak @@ -9,8 +9,5 @@ echo "BEGIN: config.wavegempak" . $EXPDIR/config.resources wavegempak export SENDCOM="YES" -export COMPONENT=${COMPONENT:-wave} -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT/gempak" echo "END: config.wavegempak" diff --git a/parm/config/config.waveinit b/parm/config/gfs/config.waveinit similarity index 100% rename from parm/config/config.waveinit rename to parm/config/gfs/config.waveinit diff --git a/parm/config/config.wavepostbndpnt b/parm/config/gfs/config.wavepostbndpnt similarity index 100% rename from parm/config/config.wavepostbndpnt rename to parm/config/gfs/config.wavepostbndpnt diff --git a/parm/config/config.wavepostbndpntbll b/parm/config/gfs/config.wavepostbndpntbll similarity index 100% rename from parm/config/config.wavepostbndpntbll rename to parm/config/gfs/config.wavepostbndpntbll diff --git a/parm/config/config.wavepostpnt b/parm/config/gfs/config.wavepostpnt similarity index 100% rename from parm/config/config.wavepostpnt rename to parm/config/gfs/config.wavepostpnt diff --git a/parm/config/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs similarity index 100% rename from parm/config/config.wavepostsbs rename to parm/config/gfs/config.wavepostsbs diff --git a/parm/config/config.waveprep b/parm/config/gfs/config.waveprep similarity index 100% rename from parm/config/config.waveprep rename to parm/config/gfs/config.waveprep diff --git a/parm/config/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml similarity index 89% rename from parm/config/yaml/defaults.yaml rename to parm/config/gfs/yaml/defaults.yaml index 8289974668..4c3817ef01 100644 --- a/parm/config/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -2,6 +2,10 @@ aeroanl: IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 +landanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + ocnanal: SOCA_INPUT_FIX_DIR: '/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25' CASE_ANL: 'C48' diff --git a/parm/mom6/MOM_input_template_025 b/parm/mom6/MOM_input_template_025 index 2b04adf189..6c0779f426 100644 --- a/parm/mom6/MOM_input_template_025 +++ b/parm/mom6/MOM_input_template_025 @@ -201,7 +201,6 @@ DTFREEZE_DP = -7.75E-08 ! [deg C Pa-1] default = 0.0 PARALLEL_RESTARTFILES = True ! [Boolean] default = False ! If true, each processor writes its own restart file, otherwise a single ! restart file is generated -STORE_CORIOLIS_ACCEL = False ! === module MOM_tracer_flow_control === USE_IDEAL_AGE_TRACER = False ! [Boolean] default = False diff --git a/parm/mom6/MOM_input_template_050 b/parm/mom6/MOM_input_template_050 index dca3c204a5..4c39198c02 100644 --- a/parm/mom6/MOM_input_template_050 +++ b/parm/mom6/MOM_input_template_050 @@ -199,7 +199,6 @@ DTFREEZE_DP = -7.75E-08 ! [deg C Pa-1] default = 0.0 PARALLEL_RESTARTFILES = True ! [Boolean] default = False ! If true, each processor writes its own restart file, otherwise a single ! restart file is generated -STORE_CORIOLIS_ACCEL = False ! === module MOM_tracer_flow_control === USE_IDEAL_AGE_TRACER = False ! [Boolean] default = False diff --git a/parm/mom6/MOM_input_template_100 b/parm/mom6/MOM_input_template_100 index 51d171b11f..8b616ad27f 100644 --- a/parm/mom6/MOM_input_template_100 +++ b/parm/mom6/MOM_input_template_100 @@ -210,7 +210,6 @@ TFREEZE_FORM = "MILLERO_78" ! default = "LINEAR" PARALLEL_RESTARTFILES = True ! [Boolean] default = False ! If true, each processor writes its own restart file, otherwise a single ! restart file is generated -STORE_CORIOLIS_ACCEL = False ! === module MOM_tracer_flow_control === USE_IDEAL_AGE_TRACER = False ! [Boolean] default = False diff --git a/parm/mom6/MOM_input_template_500 b/parm/mom6/MOM_input_template_500 index ac0eb1642e..5a378caeb0 100644 --- a/parm/mom6/MOM_input_template_500 +++ b/parm/mom6/MOM_input_template_500 @@ -132,7 +132,6 @@ TFREEZE_FORM = "MILLERO_78" ! default = "LINEAR" ! === module MOM_restart === RESTART_CHECKSUMS_REQUIRED = False -STORE_CORIOLIS_ACCEL = False ! === module MOM_tracer_flow_control === ! === module MOM_coord_initialization === diff --git a/parm/parm_fv3diag/diag_table b/parm/parm_fv3diag/diag_table index 8b7982a058..37421f8a4f 100644 --- a/parm/parm_fv3diag/diag_table +++ b/parm/parm_fv3diag/diag_table @@ -100,6 +100,13 @@ "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 "gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzrb", "frzrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozr", "frozr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozrb", "frozrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowp", "tsnowp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowpb", "tsnowpb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rhonewsn", "rhonewsn", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 diff --git a/parm/parm_fv3diag/diag_table_cpl b/parm/parm_fv3diag/diag_table_cpl deleted file mode 100644 index 6974dafac3..0000000000 --- a/parm/parm_fv3diag/diag_table_cpl +++ /dev/null @@ -1,297 +0,0 @@ -#20161003.00Z.C96.64bit.non-mono -#2016 10 03 00 0 0 - -"grid_spec", -1, "months", 1, "days", "time" -"atmos_4xdaily", 6, "hours", 1, "days", "time" -"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 0, "hours", 1, "hours", "time" -"fv3_history2d", 0, "hours", 1, "hours", "time" -###################### -"ocn%4yr%2mo%2dy%2hr", 6, "hours", 1, "hours", "time", 6, "hours", "1901 1 1 0 0 0" -"ocn_daily%4yr%2mo%2dy", 1, "days", 1, "days", "time", 1, "days", "1901 1 1 0 0 0" -############################################## -# static fields - "ocean_model", "geolon", "geolon", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolat", "geolat", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolon_c", "geolon_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolat_c", "geolat_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolon_u", "geolon_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolat_u", "geolat_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolon_v", "geolon_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "geolat_v", "geolat_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 -# "ocean_model", "depth_ocean", "depth_ocean", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 -# "ocean_model", "wet", "wet", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "wet_c", "wet_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "wet_u", "wet_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "wet_v", "wet_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "sin_rot", "sin_rot", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - "ocean_model", "cos_rot", "cos_rot", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 - -# ocean output TSUV and others - "ocean_model", "SSH", "SSH", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "SST", "SST", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "SSS", "SSS", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "speed", "speed", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "SSU", "SSU", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "SSV", "SSV", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "frazil", "frazil", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "ePBL_h_ML","ePBL", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "MLD_003", "MLD_003", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "MLD_0125", "MLD_0125", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - -# save daily SST - "ocean_model", "geolon", "geolon", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolat", "geolat", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolon_c", "geolon_c", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolat_c", "geolat_c", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolon_u", "geolon_u", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolat_u", "geolat_u", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolon_v", "geolon_v", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "geolat_v", "geolat_v", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 - "ocean_model", "SST", "sst", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 - "ocean_model", "latent", "latent", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "sensible", "sensible", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "SW", "SW", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "LW", "LW", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "evap", "evap", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "lprec", "lprec", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "taux", "taux", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - "ocean_model", "tauy", "tauy", "ocn_daily%4yr%2mo%2dy","all",.true.,"none",2 - -# Z-Space Fields Provided for CMIP6 (CMOR Names): -#=============================================== - "ocean_model_z","uo","uo" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model_z","vo","vo" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model_z","so","so" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model_z","temp","temp" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - -# forcing - "ocean_model", "taux", "taux", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "tauy", "tauy", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "latent", "latent", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "sensible", "sensible", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "SW", "SW", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "LW", "LW", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "evap", "evap", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "lprec", "lprec", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "lrunoff", "lrunoff", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 -# "ocean_model", "frunoff", "frunoff", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "fprec", "fprec", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "LwLatSens", "LwLatSens", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - "ocean_model", "Heat_PmE", "Heat_PmE", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 - -#============================================================================================= -"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ice_nc", "nccice", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "rain_nc", "nconrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "cld_amt", "cld_amt", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFtoa", "dswrf_avetoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFtoa", "uswrf_avetoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFtoa", "ulwrf_avetoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 - -"gfs_phys", "pahi", "pahi", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pah_ave", "pah_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ecan_acc", "ecan_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "etran_acc", "etran_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "edir_acc", "edir_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "wa_acc", "wa_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "lfrac", "lfrac", "fv3_history2d", "all", .false., "none", 2 - -"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "rainc", "cnvprcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - - -#============================================================================================= -# -#====> This file can be used with diag_manager/v2.0a (or higher) <==== -# -# -# FORMATS FOR FILE ENTRIES (not all input values are used) -# ------------------------ -# -#"file_name", output_freq, "output_units", format, "time_units", "long_name", -# -# -#output_freq: > 0 output frequency in "output_units" -# = 0 output frequency every time step -# =-1 output frequency at end of run -# -#output_units = units used for output frequency -# (years, months, days, minutes, hours, seconds) -# -#time_units = units used to label the time axis -# (days, minutes, hours, seconds) -# -# -# FORMAT FOR FIELD ENTRIES (not all input values are used) -# ------------------------ -# -#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing -# -#time_avg = .true. or .false. -# -#packing = 1 double precision -# = 2 float -# = 4 packed 16-bit integers -# = 8 packed 1-byte (not tested?) diff --git a/parm/parm_fv3diag/diag_table_da_gsd b/parm/parm_fv3diag/diag_table_da_gsd deleted file mode 100644 index 334c949ac5..0000000000 --- a/parm/parm_fv3diag/diag_table_da_gsd +++ /dev/null @@ -1,329 +0,0 @@ -#20161003.00Z.C96.64bit.non-mono -#2016 10 03 00 0 0 - -"grid_spec", -1, "months", 1, "days", "time" -"atmos_4xdaily", 6, "hours", 1, "days", "time" -"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 0, "hours", 1, "hours", "time" -"fv3_history2d", 0, "hours", 1, "hours", "time" - -# -#======================= -# ATMOSPHERE DIAGNOSTICS -#======================= -### -# grid_spec -### - "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, - "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, -### -# 4x daily output -### - "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "h1000", "h1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h850", "h850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h700", "h700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h500", "h500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h200", "h200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h100", "h100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h50", "h50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h10", "h10", "atmos_4xdaily", "all", .false., "none", 2 -#### -#"dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 -### -# gfs static data -### - "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 - "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 - "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 - "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 - "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -### -# FV3 variabls needed for NGGPS evaluation -### -"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_dyn", "refl_10cm", "refl10cm", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - -"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 -# Aerosols (CCN, IN) from Thompson microphysics -"gfs_dyn", "nwfa", "nwfa", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nifa", "nifa", "fv3_history", "all", .false., "none", 2 -"gfs_sfc", "nwfa2d", "nwfa2d", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "nifa2d", "nifa2d", "fv3_history2d", "all", .false., "none", 2 -# Cloud effective radii from Thompson and WSM6 microphysics -"gfs_phys", "cleffr", "cleffr", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "cieffr", "cieffr", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "cseffr", "cseffr", "fv3_history", "all", .false., "none", 2 -# Prognostic/diagnostic variables from MYNN -"gfs_dyn", "QC_BL", "qc_bl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "CLDFRA_BL", "cldfra_bl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "EL_PBL", "el_pbl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "QKE", "qke", "fv3_history", "all", .false., "none", 2 -"gfs_sfc", "maxmf", "maxmf", "fv3_history2d", "all", .false., "none", 2 -#"gfs_sfc", "nupdraft", "nupdrafts", "fv3_history2d", "all", .false., "none", 2 -#"gfs_sfc", "ktop_shallow", "ktop_shallow", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zol", "zol", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "flhc", "flhc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "flqc", "flqc", "fv3_history2d", "all", .false., "none", 2 -# Prognostic/diagnostic variables from RUC LSM -"gfs_sfc", "snowfall_acc", "snowfall_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "swe_snowfall_acc", "swe_snowfall_acc", "fv3_history2d", "all", .false., "none", 2 - -#============================================================================================= -# -#====> This file can be used with diag_manager/v2.0a (or higher) <==== -# -# -# FORMATS FOR FILE ENTRIES (not all input values are used) -# ------------------------ -# -#"file_name", output_freq, "output_units", format, "time_units", "long_name", -# -# -#output_freq: > 0 output frequency in "output_units" -# = 0 output frequency every time step -# =-1 output frequency at end of run -# -#output_units = units used for output frequency -# (years, months, days, minutes, hours, seconds) -# -#time_units = units used to label the time axis -# (days, minutes, hours, seconds) -# -# -# FORMAT FOR FIELD ENTRIES (not all input values are used) -# ------------------------ -# -#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing -# -#time_avg = .true. or .false. -# -#packing = 1 double precision -# = 2 float -# = 4 packed 16-bit integers -# = 8 packed 1-byte (not tested?) diff --git a/parm/parm_fv3diag/diag_table_gsd b/parm/parm_fv3diag/diag_table_gsd deleted file mode 100644 index 334c949ac5..0000000000 --- a/parm/parm_fv3diag/diag_table_gsd +++ /dev/null @@ -1,329 +0,0 @@ -#20161003.00Z.C96.64bit.non-mono -#2016 10 03 00 0 0 - -"grid_spec", -1, "months", 1, "days", "time" -"atmos_4xdaily", 6, "hours", 1, "days", "time" -"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 0, "hours", 1, "hours", "time" -"fv3_history2d", 0, "hours", 1, "hours", "time" - -# -#======================= -# ATMOSPHERE DIAGNOSTICS -#======================= -### -# grid_spec -### - "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, - "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, -### -# 4x daily output -### - "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "h1000", "h1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h850", "h850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h700", "h700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h500", "h500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h200", "h200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h100", "h100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h50", "h50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h10", "h10", "atmos_4xdaily", "all", .false., "none", 2 -#### -#"dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 -### -# gfs static data -### - "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 - "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 - "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 - "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 - "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -### -# FV3 variabls needed for NGGPS evaluation -### -"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_dyn", "refl_10cm", "refl10cm", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - -"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 -# Aerosols (CCN, IN) from Thompson microphysics -"gfs_dyn", "nwfa", "nwfa", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nifa", "nifa", "fv3_history", "all", .false., "none", 2 -"gfs_sfc", "nwfa2d", "nwfa2d", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "nifa2d", "nifa2d", "fv3_history2d", "all", .false., "none", 2 -# Cloud effective radii from Thompson and WSM6 microphysics -"gfs_phys", "cleffr", "cleffr", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "cieffr", "cieffr", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "cseffr", "cseffr", "fv3_history", "all", .false., "none", 2 -# Prognostic/diagnostic variables from MYNN -"gfs_dyn", "QC_BL", "qc_bl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "CLDFRA_BL", "cldfra_bl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "EL_PBL", "el_pbl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "QKE", "qke", "fv3_history", "all", .false., "none", 2 -"gfs_sfc", "maxmf", "maxmf", "fv3_history2d", "all", .false., "none", 2 -#"gfs_sfc", "nupdraft", "nupdrafts", "fv3_history2d", "all", .false., "none", 2 -#"gfs_sfc", "ktop_shallow", "ktop_shallow", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zol", "zol", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "flhc", "flhc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "flqc", "flqc", "fv3_history2d", "all", .false., "none", 2 -# Prognostic/diagnostic variables from RUC LSM -"gfs_sfc", "snowfall_acc", "snowfall_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "swe_snowfall_acc", "swe_snowfall_acc", "fv3_history2d", "all", .false., "none", 2 - -#============================================================================================= -# -#====> This file can be used with diag_manager/v2.0a (or higher) <==== -# -# -# FORMATS FOR FILE ENTRIES (not all input values are used) -# ------------------------ -# -#"file_name", output_freq, "output_units", format, "time_units", "long_name", -# -# -#output_freq: > 0 output frequency in "output_units" -# = 0 output frequency every time step -# =-1 output frequency at end of run -# -#output_units = units used for output frequency -# (years, months, days, minutes, hours, seconds) -# -#time_units = units used to label the time axis -# (days, minutes, hours, seconds) -# -# -# FORMAT FOR FIELD ENTRIES (not all input values are used) -# ------------------------ -# -#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing -# -#time_avg = .true. or .false. -# -#packing = 1 double precision -# = 2 float -# = 4 packed 16-bit integers -# = 8 packed 1-byte (not tested?) diff --git a/parm/parm_fv3diag/diag_table_gsd_ruc b/parm/parm_fv3diag/diag_table_gsd_ruc deleted file mode 100644 index b8c3aef3e4..0000000000 --- a/parm/parm_fv3diag/diag_table_gsd_ruc +++ /dev/null @@ -1,344 +0,0 @@ -#20161003.00Z.C96.64bit.non-mono -#2016 10 03 00 0 0 - -"grid_spec", -1, "months", 1, "days", "time" -"atmos_4xdaily", 6, "hours", 1, "days", "time" -"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 0, "hours", 1, "hours", "time" -"fv3_history2d", 0, "hours", 1, "hours", "time" - -# -#======================= -# ATMOSPHERE DIAGNOSTICS -#======================= -### -# grid_spec -### - "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, - "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, -### -# 4x daily output -### - "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "h1000", "h1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h850", "h850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h700", "h700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h500", "h500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h200", "h200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h100", "h100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h50", "h50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "h10", "h10", "atmos_4xdaily", "all", .false., "none", 2 -#### -#"dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 -### -# gfs static data -### - "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 - "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 - "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 - "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 - "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -### -# FV3 variabls needed for NGGPS evaluation -### -"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 -#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_dyn", "refl_10cm", "refl10cm", "fv3_history", "all", .false., "none", 2 - -"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt5", "soilt5" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt6", "soilt6" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt7", "soilt7" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt8", "soilt8" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilt9", "soilt9" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw5", "soilw5" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw6", "soilw6" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw7", "soilw7" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw8", "soilw8" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soilw9", "soilw9" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_5", "soill5", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_6", "soill6", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_7", "soill7", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_8", "soill8", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slc_9", "soill9", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - -"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 -# Aerosols (CCN, IN) from Thompson microphysics -"gfs_dyn", "nwfa", "nwfa", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nifa", "nifa", "fv3_history", "all", .false., "none", 2 -"gfs_sfc", "nwfa2d", "nwfa2d", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "nifa2d", "nifa2d", "fv3_history2d", "all", .false., "none", 2 -# Cloud effective radii from Thompson and WSM6 microphysics -"gfs_phys", "cleffr", "cleffr", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "cieffr", "cieffr", "fv3_history", "all", .false., "none", 2 -"gfs_phys", "cseffr", "cseffr", "fv3_history", "all", .false., "none", 2 -# Prognostic/diagnostic variables from MYNN -"gfs_dyn", "QC_BL", "qc_bl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "CLDFRA_BL", "cldfra_bl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "EL_PBL", "el_pbl", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "QKE", "qke", "fv3_history", "all", .false., "none", 2 -"gfs_sfc", "maxmf", "maxmf", "fv3_history2d", "all", .false., "none", 2 -#"gfs_sfc", "nupdraft", "nupdrafts", "fv3_history2d", "all", .false., "none", 2 -#"gfs_sfc", "ktop_shallow", "ktop_shallow", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "zol", "zol", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "flhc", "flhc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "flqc", "flqc", "fv3_history2d", "all", .false., "none", 2 -# Prognostic/diagnostic variables from RUC LSM -"gfs_sfc", "snowfall_acc", "snowfall_acc", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "swe_snowfall_acc", "swe_snowfall_acc", "fv3_history2d", "all", .false., "none", 2 - -#============================================================================================= -# -#====> This file can be used with diag_manager/v2.0a (or higher) <==== -# -# -# FORMATS FOR FILE ENTRIES (not all input values are used) -# ------------------------ -# -#"file_name", output_freq, "output_units", format, "time_units", "long_name", -# -# -#output_freq: > 0 output frequency in "output_units" -# = 0 output frequency every time step -# =-1 output frequency at end of run -# -#output_units = units used for output frequency -# (years, months, days, minutes, hours, seconds) -# -#time_units = units used to label the time axis -# (days, minutes, hours, seconds) -# -# -# FORMAT FOR FIELD ENTRIES (not all input values are used) -# ------------------------ -# -#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing -# -#time_avg = .true. or .false. -# -#packing = 1 double precision -# = 2 float -# = 4 packed 16-bit integers -# = 8 packed 1-byte (not tested?) diff --git a/parm/parm_fv3diag/diag_table_history b/parm/parm_fv3diag/diag_table_history deleted file mode 100644 index 9a5766c27c..0000000000 --- a/parm/parm_fv3diag/diag_table_history +++ /dev/null @@ -1,89 +0,0 @@ -#"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 0, "hours", 1, "hours", "time" -"fv3_history2d", 0, "hours", 1, "hours", "time" -# -# static data -# "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 -# "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 -# "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 -# "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 -# "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -# -# history files -"gfs_dyn", "ucomp", "ucomp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vcomp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "sphum", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "temp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "liq_wat", "liq_wat", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "delp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "pfhy", "hypres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "pfnh", "nhpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "vvel", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -# -"gfs_sfc" "hgtsfc" "hgtsfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "psurf" "pressfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "u10m" "u10m" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "v10m" "v10m" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "soilm" "soilm" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "cnvprcp" "cnvprcp" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tprcp" "tprcp" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "weasd" "weasd" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "f10m" "f10m" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "q2m" "q2m" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "t2m" "t2m" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tsfc" "tsfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "vtype" "vtype" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "stype" "stype" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slmsksfc" "slmsk" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "vfracsfc" "vfrac" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "zorlsfc" "zorl" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "uustar" "uustar" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt1" "soilt1" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt2" "soilt2" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt3" "soilt3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt4" "soilt4" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw1" "soilw1" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw2" "soilw2" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw3" "soilw3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw4" "soilw4" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_1" "slc_1" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_2" "slc_2" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_3" "slc_3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_4" "slc_4" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slope" "slope" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alnsf" "alnsf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alnwf" "alnwf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alvsf" "alvsf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alvwf" "alvwf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "canopy" "canopy" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "facsf" "facsf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "facwf" "facwf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "ffhh" "ffhh" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "ffmm" "ffmm" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "fice" "fice" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "hice" "hice" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "snoalb" "snoalb" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "shdmax" "shdmax" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "shdmin" "shdmin" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "snowd" "snowd" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tg3" "tg3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tisfc" "tisfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tref" "tref" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "z_c" "z_c" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "c_0" "c_0" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "c_d" "c_d" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "w_0" "w_0" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "w_d" "w_d" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xt" "xt" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xz" "xz" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "dt_cool" "dt_cool" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xs" "xs" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xu" "xu" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xv" "xv" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xtts" "xtts" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xzts" "xzts" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "d_conv" "d_conv" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "qrain" "qrain" "fv3_history2d" "all" .false. "none" 2 - diff --git a/parm/parm_fv3diag/diag_table_orig b/parm/parm_fv3diag/diag_table_orig deleted file mode 100644 index 0b3000f904..0000000000 --- a/parm/parm_fv3diag/diag_table_orig +++ /dev/null @@ -1,449 +0,0 @@ -#output files -"grid_spec", -1, "months", 1, "days", "time" -"atmos_4xdaily", 6, "hours", 1, "days", "time" -#"atmos_daily", 24, "hours", 1, "days", "time" -"atmos_static", -1, "hours", 1, "hours", "time" -#"atmos_sos", 60, "minutes", 1, "days", "time" -#NOTE: Using multiple gfs output files appears to cause OOM errors. -#"gfs_physics", 0, "hours", 1, "hours", "time" -#"gfs_surface", 0, "hours", 1, "hours", "time" -# files needed for NGGPS evaluation -"nggps3d", 0, "hours", 1, "hours", "time" -"nggps2d", 0, "hours", 1, "hours", "time" -#"atmos_8xdaily_instant", 3, "hours", 1, "days", "time" - -# -#output variables -# -# -#======================= -# ATMOSPHERE DIAGNOSTICS -#======================= -### -# grid_spec -### - "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, - "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, - "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, -### -# 4x daily output -### - "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "aam", "aam", "atmos_4xdaily", "all", .false., "none", 2 -### - "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 -### - "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "z1000", "z1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z850", "z850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z700", "z700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z500", "z500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z200", "z200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z100", "z100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z50", "z50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "z10", "z10", "atmos_4xdaily", "all", .false., "none", 2 -#### -#"dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 -#"dynamics", "w100", "w100", "atmos_4xdaily", "all", .false., "none", 2 -#"dynamics", "w50", "w50", "atmos_4xdaily", "all", .false., "none", 2 -#"dynamics", "w10", "w10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh100", "rh100", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh50", "rh50", "atmos_4xdaily", "all", .false., "none", 2 -# "dynamics", "rh10", "rh10", "atmos_4xdaily", "all", .false., "none", 2 -#### - "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 - "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 -### -# Daily averages: -### -#"dynamics", "u850", "u850", "atmos_daily", "all", .true., "none", 2 -#"dynamics", "v850", "v850", "atmos_daily", "all", .true., "none", 2 -#"dynamics", "u200", "u200", "atmos_daily", "all", .true., "none", 2 -#"dynamics", "v200", "v200", "atmos_daily", "all", .true., "none", 2 -#"dynamics", "t200", "t200", "atmos_daily", "all", .true., "none", 2 -#"dynamics", "t100", "t100", "atmos_daily", "all", .true., "none", 2 -#"dynamics", "slp", "slp", "atmos_daily", "all", .true., "none", 2 -### -# gfs static data -### - "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 - "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 - "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 - "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 - "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -############################################################################################# -### -# FV3 variabls needed for NGGPS evaluation -### - "gfs_dyn", "ucomp", "ucomp", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "vcomp", "vcomp", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "temp", "temp", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "delp", "delp", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "sphum", "sphum", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "o3mr", "o3mr", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "liq_wat", "clwmr", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "pfhy", "hypres", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "pfnh", "nhpres", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "w", "w", "nggps3d", "all", .false., "none", 2 - "gfs_dyn", "delz", "delz", "nggps3d", "all", .false., "none", 2 -### -# GFS variabls needed for NGGPS evaluation -### - "gfs_phys", "ALBDO_ave" "ALBDOsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "cnvprcp_ave" "CPRATsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "totprcp_ave" "PRATEsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "DLWRF" "DLWRFsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "ULWRF" "ULWRFsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "DSWRF" "DSWRFsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "USWRF" "USWRFsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "DSWRFtoa" "DSWRFtoa" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "USWRFtoa" "USWRFtoa" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "ULWRFtoa" "ULWRFtoa" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "gflux_ave" "GFLUXsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "HGTsfc" "HGTsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "hpbl" "HPBLsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "fice" "ICECsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SLMSKsfc" "SLMSKsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "lhtfl_ave" "LHTFLsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "shtfl_ave" "SHTFLsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "psurf" "PRESsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "pwat" "PWATclm" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "soilm" "SOILM" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILW1" "SOILW1" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILW2" "SOILW2" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILW3" "SOILW3" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILW4" "SOILW4" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "q2m" "SPFH2m" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "TCDCclm" "TCDCclm" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "TCDChcl" "TCDChcl" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "TCDClcl" "TCDClcl" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "TCDCmcl" "TCDCmcl" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILT1" "SOILT1" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILT2" "SOILT2" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILT3" "SOILT3" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "SOILT4" "SOILT4" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "t2m" "TMP2m" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "tsfc" "TMPsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "u-gwd_ave" "UGWDsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "v-gwd_ave" "VGWDsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "dusfc" "UFLXsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "dvsfc" "VFLXsfc" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "u10m" "UGRD10m" "nggps2d", "all", .false., "none", 2 - "gfs_phys", "v10m" "VGRD10m" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "weasd" "WEASDsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "snowd" "SNODsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "ZORLsfc" "ZORLsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "VFRACsfc" "VFRACsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "f10m" "F10Msfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "vtype" "VTYPEsfc" "nggps2d", "all", .false., "none", 2 - "gfs_sfc", "stype" "STYPEsfc" "nggps2d", "all", .false., "none", 2 - -############################################################################################# -### -# gfs physics data -### -#"gfs_phys", "fluxr_01" "fluxr_01" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_02" "fluxr_02" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_03" "fluxr_03" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_04" "fluxr_04" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_05" "fluxr_05" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_06" "fluxr_06" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_07" "fluxr_07" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_08" "fluxr_08" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_09" "fluxr_09" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_10" "fluxr_10" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_11" "fluxr_11" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_12" "fluxr_12" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_13" "fluxr_13" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_14" "fluxr_14" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_15" "fluxr_15" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_16" "fluxr_16" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_17" "fluxr_17" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_18" "fluxr_18" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_19" "fluxr_19" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_20" "fluxr_20" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_21" "fluxr_21" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_22" "fluxr_22" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_23" "fluxr_23" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_24" "fluxr_24" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_25" "fluxr_25" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_26" "fluxr_26" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_27" "fluxr_27" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_28" "fluxr_28" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_29" "fluxr_29" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_30" "fluxr_30" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_31" "fluxr_31" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_32" "fluxr_32" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_33" "fluxr_33" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_34" "fluxr_34" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_35" "fluxr_35" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_36" "fluxr_36" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_37" "fluxr_37" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_38" "fluxr_38" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "fluxr_39" "fluxr_39" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dswcmp_1" "dswcmp_1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dswcmp_2" "dswcmp_2" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dswcmp_3" "dswcmp_3" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dswcmp_4" "dswcmp_4" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "uswcmp_1" "uswcmp_1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "uswcmp_2" "uswcmp_2" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "uswcmp_3" "uswcmp_3" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "uswcmp_4" "uswcmp_4" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "sw_upfxc" "sw_upfxc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "sw_dnfxc" "sw_dnfxc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "sw_upfx0" "sw_upfx0" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "lw_upfxc" "lw_upfxc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "lw_upfx0" "lw_upfx0" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "srunoff" "srunoff" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "evbsa" "evbsa" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "evcwa" "evcwa" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "snohfa" "snohfa" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "transa" "transa" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "sbsnoa" "sbsnoa" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "snowca" "snowca" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "soilm" "soilm" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "tmpmin" "tmpmin" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "tmpmax" "tmpmax" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dusfc" "dusfc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dvsfc" "dvsfc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dtsfc" "dtsfc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dqsfc" "dqsfc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "totprcp" "totprcp" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "gflux" "gflux" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dlwsfc" "dlwsfc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "ulwsfc" "ulwsfc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "suntim" "suntim" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "runoff" "runoff" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "ep" "ep" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "cldwrk" "cldwrk" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dugwd" "dugwd" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dvgwd" "dvgwd" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "psmean" "psmean" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "cnvprcp" "cnvprcp" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "spfhmin" "spfhmin" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "spfhmax" "spfhmax" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "rain" "rain" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "rainc" "rainc" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "u10m" "u10m" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "v10m" "v10m" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "zlvl" "zlvl" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "psurf" "psurf" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "hpbl" "hpbl" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "pwat" "pwat" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "t1" "t1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "q1" "q1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "u1" "u1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "v1" "v1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "chh" "chh" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "cmm" "cmm" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dlwsfci" "dlwsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "ulwsfci" "ulwsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dswsfci" "dswsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "uswsfci" "uswsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dusfci" "dusfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dvsfci" "dvsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dtsfci" "dtsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "dqsfci" "dqsfci" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "gfluxi" "gfluxi" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "epi" "epi" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "smcwlt2" "smcwlt2" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "smcref2" "smcref2" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "wet1" "wet1" "gfs_physics", "all", .false., "none", 2 -#"gfs_phys", "sr" "sr" "gfs_physics", "all", .false., "none", 2 -### -# gfs surface data -### -#"gfs_sfc", "alnsf" "alnsf" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "alnwf" "alnwf" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "alvsf" "alvsf" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "alvwf" "alvwf" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "canopy" "canopy" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "f10m" "f10m" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "facsf" "facsf" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "facwf" "facwf" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "ffhh" "ffhh" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "ffmm" "ffmm" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "uustar" "uustar" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "slope" "slope" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "fice" "fice" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "hice" "hice" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "snoalb" "snoalb" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "shdmax" "shdmax" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "shdmin" "shdmin" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "snowd" "snowd" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "stype" "stype" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "q2m" "q2m" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "t2m" "t2m" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "tsfc" "tsfc" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "tg3" "tg3" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "tisfc" "tisfc" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "tprcp" "tprcp" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "vtype" "vtype" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "weasd" "weasd" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "slc_1" "slc_1" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "slc_2" "slc_2" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "slc_3" "slc_3" "gfs_surface", "all", .false., "none", 2 -#"gfs_sfc", "slc_4" "slc_4" "gfs_surface", "all", .false., "none", 2 -### -#"gfs_phys", "dt3dt_1" "dt3dt_1" "gfs_physics1", "all", .false., "none", 2 -#"gfs_phys", "dt3dt_2" "dt3dt_2" "gfs_physics1", "all", .false., "none", 2 -#"gfs_phys", "dt3dt_3" "dt3dt_3" "gfs_physics1", "all", .false., "none", 2 -#"gfs_phys", "dt3dt_4" "dt3dt_4" "gfs_physics1", "all", .false., "none", 2 -#"gfs_phys", "dt3dt_5" "dt3dt_5" "gfs_physics1", "all", .false., "none", 2 -#"gfs_phys", "dt3dt_6" "dt3dt_6" "gfs_physics1", "all", .false., "none", 2 -### -#"gfs_phys", "dq3dt_1" "dq3dt_1" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_2" "dq3dt_2" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_3" "dq3dt_3" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_4" "dq3dt_4" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_5" "dq3dt_5" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_6" "dq3dt_6" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_7" "dq3dt_7" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_8" "dq3dt_8" "gfs_physics2", "all", .false., "none", 2 -#"gfs_phys", "dq3dt_9" "dq3dt_9" "gfs_physics2", "all", .false., "none", 2 -### -#"gfs_phys", "du3dt_1" "du3dt_1" "gfs_physics3", "all", .false., "none", 2 -#"gfs_phys", "du3dt_2" "du3dt_2" "gfs_physics3", "all", .false., "none", 2 -#"gfs_phys", "du3dt_3" "du3dt_3" "gfs_physics3", "all", .false., "none", 2 -#"gfs_phys", "du3dt_4" "du3dt_4" "gfs_physics3", "all", .false., "none", 2 -### -#"gfs_phys", "dv3dt_1" "dv3dt_1" "gfs_physics4", "all", .false., "none", 2 -#"gfs_phys", "dv3dt_2" "dv3dt_2" "gfs_physics4", "all", .false., "none", 2 -#"gfs_phys", "dv3dt_3" "dv3dt_3" "gfs_physics4", "all", .false., "none", 2 -#"gfs_phys", "dv3dt_4" "dv3dt_4" "gfs_physics4", "all", .false., "none", 2 -### -#"gfs_phys", "dtemp_dt" "dtemp_dt" "gfs_physics5", "all", .false., "none", 2 -#"gfs_phys", "du_dt" "du_dt" "gfs_physics5", "all", .false., "none", 2 -#"gfs_phys", "dv_dt" "dv_dt" "gfs_physics5", "all", .false., "none", 2 -#"gfs_phys", "dsphum_dt" "dsphum_dt" "gfs_physics5", "all", .false., "none", 2 -### -#"gfs_phys", "dclwmr_dt" "dclwmr_dt" "gfs_physics6", "all", .false., "none", 2 -#"gfs_phys", "do3mr_dt" "do3mr_dt" "gfs_physics6", "all", .false., "none", 2 -#"gfs_phys", "dqdt_v" "dqdt_v" "gfs_physics6", "all", .false., "none", 2 -#### -## SOS high-frequency -#### -#"dynamics", "tq", "tq", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "lw", "lw", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "tb", "tb", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "ps", "ps", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "us", "us", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "vs", "vs", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "slp", "slp", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "vort850", "vort850", "atmos_sos", "all", .false., "none", 2 -#"dynamics", "w850", "w850", "atmos_sos", "all", .false., "none", 2 -#### -## Monthly averages: -#### -# "dynamics", "zsurf", "zsurf", "atmos_month", "all", .false., "none", 2 -# "dynamics", "bk", "bk", "atmos_month", "all", .false., "none", 2 -# "dynamics", "pk", "pk", "atmos_month", "all", .false., "none", 2 -# "dynamics", "ps", "ps", "atmos_month", "all", .true., "none", 2 -# "dynamics", "slp", "slp", "atmos_month", "all", .true., "none", 2 -# "dynamics", "tm", "tm", "atmos_month", "all", .true., "none", 2 -# "dynamics", "cld_amt", "cld_amt", "atmos_month", "all", .true., "none", 2 -# "dynamics", "liq_wat", "liq_wat", "atmos_month", "all", .true., "none", 2 -# "dynamics", "ice_wat", "ice_wat", "atmos_month", "all", .true., "none", 2 -# "dynamics", "snowwat", "snowwat", "atmos_month", "all", .true., "none", 2 -# "dynamics", "rainwat", "rainwat", "atmos_month", "all", .true., "none", 2 -# "dynamics", "graupel", "graupel", "atmos_month", "all", .true., "none", 2 -# "dynamics", "iw", "iw", "atmos_month", "all", .true., "none", 2 -# "dynamics", "lw", "lw", "atmos_month", "all", .true., "none", 2 -# "dynamics", "aam", "aam", "atmos_month", "all", .true., "none", 2 -# "dynamics", "amdt", "amdt", "atmos_month", "all", .true., "none", 2 -# "dynamics", "z500", "z500", "atmos_month", "all", .true., "none", 2 -# "dynamics", "temp", "temp", "atmos_month", "all", .true., "none", 2 -# "dynamics", "ucomp", "ucomp", "atmos_month", "all", .true., "none", 2 -# "dynamics", "vcomp", "vcomp", "atmos_month", "all", .true., "none", 2 -# "dynamics", "sphum", "sphum", "atmos_month", "all", .true., "none", 2 -# "dynamics", "omega", "omega", "atmos_month", "all", .true., "none", 2 -# -#============================================================================================= -# -#====> This file can be used with diag_manager/v2.0a (or higher) <==== -# -# -# FORMATS FOR FILE ENTRIES (not all input values are used) -# ------------------------ -# -#"file_name", output_freq, "output_units", format, "time_units", "long_name", -# -# -#output_freq: > 0 output frequency in "output_units" -# = 0 output frequency every time step -# =-1 output frequency at end of run -# -#output_units = units used for output frequency -# (years, months, days, minutes, hours, seconds) -# -#time_units = units used to label the time axis -# (days, minutes, hours, seconds) -# -# -# FORMAT FOR FIELD ENTRIES (not all input values are used) -# ------------------------ -# -#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing -# -#time_avg = .true. or .false. -# -#packing = 1 double precision -# = 2 float -# = 4 packed 16-bit integers -# = 8 packed 1-byte (not tested?) diff --git a/parm/parm_gdas/aero_crtm_coeff.yaml b/parm/parm_gdas/aero_crtm_coeff.yaml index e60fc422f2..d310ff6d31 100644 --- a/parm/parm_gdas/aero_crtm_coeff.yaml +++ b/parm/parm_gdas/aero_crtm_coeff.yaml @@ -1,7 +1,7 @@ mkdir: - $(DATA)/crtm/ copy: -- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/AerosolCoeff.bin, $(DATA)crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/AerosolCoeff.bin, $(DATA)/crtm/] - [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/CloudCoeff.bin, $(DATA)/crtm/] - [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/v.viirs-m_npp.SpcCoeff.bin, $(DATA)/crtm/] - [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/v.viirs-m_npp.TauCoeff.bin, $(DATA)/crtm/] diff --git a/parm/parm_gdas/atm_crtm_coeff.yaml b/parm/parm_gdas/atm_crtm_coeff.yaml new file mode 100644 index 0000000000..8e8d433b06 --- /dev/null +++ b/parm/parm_gdas/atm_crtm_coeff.yaml @@ -0,0 +1,178 @@ +mkdir: +- $(DATA)/crtm +copy: +# Emissivity files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISice.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISland.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISsnow.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISwater.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRice.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRland.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRsnow.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/Nalli.IRwater.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/FASTEM6.MWwater.EmisCoeff.bin, $(DATA)/crtm] +# Aerosol and Cloud files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/AerosolCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/CloudCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/CloudCoeff.GFDLFV3.-109z-1.bin, $(DATA)/crtm] +# Satellite_Sensor specific Tau and Spc coefficient files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g16.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g16.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g17.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g18.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari8.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari8.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari9.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari9.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/airs_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/airs_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsr2_gcom-w1.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsr2_gcom-w1.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsre_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsre_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsub_n17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsub_n17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n20.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n21.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n21.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_npp.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n20.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n21.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n21.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_npp.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/gmi_gpm.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/gmi_gpm.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs3_n17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs3_n17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/saphir_meghat.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/saphir_meghat.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m08.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m08.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m09.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m09.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m10.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m10.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmi_f15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmi_f15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f16.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f16.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f20.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j1.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j1.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j2.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j2.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_npp.TauCoeff.bin, $(DATA)/crtm] +# Special Spc files +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a_v2.SpcCoeff.bin, $(DATA)/crtm] diff --git a/parm/parm_gdas/atm_jedi_fix.yaml b/parm/parm_gdas/atm_jedi_fix.yaml new file mode 100644 index 0000000000..07b0fe49f1 --- /dev/null +++ b/parm/parm_gdas/atm_jedi_fix.yaml @@ -0,0 +1,7 @@ +mkdir: +- $(DATA)/fv3jedi +copy: +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fieldmetadata/gfs-restart.yaml, $(DATA)/fv3jedi/gfs-restart.yaml] diff --git a/parm/parm_gdas/atmanl_inc_vars.yaml b/parm/parm_gdas/atmanl_inc_vars.yaml new file mode 100644 index 0000000000..cb6718ce9f --- /dev/null +++ b/parm/parm_gdas/atmanl_inc_vars.yaml @@ -0,0 +1 @@ +incvars: ['ua', 'va', 't', 'sphum', 'liq_wat', 'ice_wat', 'o3mr'] diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml new file mode 100644 index 0000000000..cc82f7a253 --- /dev/null +++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml @@ -0,0 +1,85 @@ +copy: + # Atmosphere mosaic file linked as the grid_spec file (atm only) + - [$(FIX_orog)/$(atm_res)/$(atm_res)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc] + + # Atmosphere grid tile files + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile1.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile2.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile3.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile4.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile5.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile6.nc, $(DATA)/INPUT/] + + # oro_data_ls and oro_data_ss files from FIX_ugwd + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc] + + # GWD?? + - [$(FIX_ugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] + + # CO2 climatology + - [$(FIX_am)/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] + - [$(FIX_am)/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt] + + # FIX_am files + - [$(FIX_am)/global_climaeropac_global.txt, $(DATA)/aerosol.dat] + - [$(FIX_am)/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77] + - [$(FIX_am)/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77] + - [$(FIX_am)/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb] + - [$(FIX_am)/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb] + - [$(FIX_am)/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb] + - [$(FIX_am)/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb] + - [$(FIX_am)/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb] + - [$(FIX_am)/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt] + - [$(FIX_am)/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt] + - [$(FIX_am)/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb] + - [$(FIX_am)/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb] + + # MERRA2 Aerosol Climatology + - [$(FIX_aer)/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc] + + # Optical depth + - [$(FIX_lut)/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat] + - [$(FIX_lut)/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat] + - [$(FIX_lut)/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat] + - [$(FIX_lut)/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat] + - [$(FIX_lut)/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat] + + # fd_nems.yaml file + - [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_nems.yaml, $(DATA)/] diff --git a/parm/ufs/fix/gfs/land.fixed_files.yaml b/parm/ufs/fix/gfs/land.fixed_files.yaml new file mode 100644 index 0000000000..ab93ff27a6 --- /dev/null +++ b/parm/ufs/fix/gfs/land.fixed_files.yaml @@ -0,0 +1,58 @@ +copy: + + # Files from FIX_orog/C??.mx??_frac/fix_sfc + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile6.nc, $(DATA)/] diff --git a/parm/ufs/fix/gfs/ocean.fixed_files.yaml b/parm/ufs/fix/gfs/ocean.fixed_files.yaml new file mode 100644 index 0000000000..801f070c49 --- /dev/null +++ b/parm/ufs/fix/gfs/ocean.fixed_files.yaml @@ -0,0 +1,10 @@ +copy: + + # Orography data tile files + # The following are for "frac_grid = .true." + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc] diff --git a/parm/parm_wave/bull_awips_gfswave b/parm/wave/bull_awips_gfswave similarity index 100% rename from parm/parm_wave/bull_awips_gfswave rename to parm/wave/bull_awips_gfswave diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f000 b/parm/wave/grib2_gfswave.ao_9km.f000 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f000 rename to parm/wave/grib2_gfswave.ao_9km.f000 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f003 b/parm/wave/grib2_gfswave.ao_9km.f003 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f003 rename to parm/wave/grib2_gfswave.ao_9km.f003 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f006 b/parm/wave/grib2_gfswave.ao_9km.f006 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f006 rename to parm/wave/grib2_gfswave.ao_9km.f006 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f009 b/parm/wave/grib2_gfswave.ao_9km.f009 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f009 rename to parm/wave/grib2_gfswave.ao_9km.f009 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f012 b/parm/wave/grib2_gfswave.ao_9km.f012 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f012 rename to parm/wave/grib2_gfswave.ao_9km.f012 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f015 b/parm/wave/grib2_gfswave.ao_9km.f015 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f015 rename to parm/wave/grib2_gfswave.ao_9km.f015 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f018 b/parm/wave/grib2_gfswave.ao_9km.f018 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f018 rename to parm/wave/grib2_gfswave.ao_9km.f018 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f021 b/parm/wave/grib2_gfswave.ao_9km.f021 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f021 rename to parm/wave/grib2_gfswave.ao_9km.f021 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f024 b/parm/wave/grib2_gfswave.ao_9km.f024 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f024 rename to parm/wave/grib2_gfswave.ao_9km.f024 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f027 b/parm/wave/grib2_gfswave.ao_9km.f027 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f027 rename to parm/wave/grib2_gfswave.ao_9km.f027 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f030 b/parm/wave/grib2_gfswave.ao_9km.f030 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f030 rename to parm/wave/grib2_gfswave.ao_9km.f030 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f033 b/parm/wave/grib2_gfswave.ao_9km.f033 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f033 rename to parm/wave/grib2_gfswave.ao_9km.f033 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f036 b/parm/wave/grib2_gfswave.ao_9km.f036 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f036 rename to parm/wave/grib2_gfswave.ao_9km.f036 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f039 b/parm/wave/grib2_gfswave.ao_9km.f039 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f039 rename to parm/wave/grib2_gfswave.ao_9km.f039 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f042 b/parm/wave/grib2_gfswave.ao_9km.f042 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f042 rename to parm/wave/grib2_gfswave.ao_9km.f042 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f045 b/parm/wave/grib2_gfswave.ao_9km.f045 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f045 rename to parm/wave/grib2_gfswave.ao_9km.f045 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f048 b/parm/wave/grib2_gfswave.ao_9km.f048 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f048 rename to parm/wave/grib2_gfswave.ao_9km.f048 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f051 b/parm/wave/grib2_gfswave.ao_9km.f051 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f051 rename to parm/wave/grib2_gfswave.ao_9km.f051 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f054 b/parm/wave/grib2_gfswave.ao_9km.f054 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f054 rename to parm/wave/grib2_gfswave.ao_9km.f054 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f057 b/parm/wave/grib2_gfswave.ao_9km.f057 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f057 rename to parm/wave/grib2_gfswave.ao_9km.f057 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f060 b/parm/wave/grib2_gfswave.ao_9km.f060 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f060 rename to parm/wave/grib2_gfswave.ao_9km.f060 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f063 b/parm/wave/grib2_gfswave.ao_9km.f063 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f063 rename to parm/wave/grib2_gfswave.ao_9km.f063 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f066 b/parm/wave/grib2_gfswave.ao_9km.f066 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f066 rename to parm/wave/grib2_gfswave.ao_9km.f066 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f069 b/parm/wave/grib2_gfswave.ao_9km.f069 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f069 rename to parm/wave/grib2_gfswave.ao_9km.f069 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f072 b/parm/wave/grib2_gfswave.ao_9km.f072 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f072 rename to parm/wave/grib2_gfswave.ao_9km.f072 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f078 b/parm/wave/grib2_gfswave.ao_9km.f078 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f078 rename to parm/wave/grib2_gfswave.ao_9km.f078 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f084 b/parm/wave/grib2_gfswave.ao_9km.f084 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f084 rename to parm/wave/grib2_gfswave.ao_9km.f084 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f090 b/parm/wave/grib2_gfswave.ao_9km.f090 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f090 rename to parm/wave/grib2_gfswave.ao_9km.f090 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f096 b/parm/wave/grib2_gfswave.ao_9km.f096 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f096 rename to parm/wave/grib2_gfswave.ao_9km.f096 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f102 b/parm/wave/grib2_gfswave.ao_9km.f102 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f102 rename to parm/wave/grib2_gfswave.ao_9km.f102 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f108 b/parm/wave/grib2_gfswave.ao_9km.f108 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f108 rename to parm/wave/grib2_gfswave.ao_9km.f108 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f114 b/parm/wave/grib2_gfswave.ao_9km.f114 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f114 rename to parm/wave/grib2_gfswave.ao_9km.f114 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f120 b/parm/wave/grib2_gfswave.ao_9km.f120 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f120 rename to parm/wave/grib2_gfswave.ao_9km.f120 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f126 b/parm/wave/grib2_gfswave.ao_9km.f126 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f126 rename to parm/wave/grib2_gfswave.ao_9km.f126 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f132 b/parm/wave/grib2_gfswave.ao_9km.f132 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f132 rename to parm/wave/grib2_gfswave.ao_9km.f132 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f138 b/parm/wave/grib2_gfswave.ao_9km.f138 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f138 rename to parm/wave/grib2_gfswave.ao_9km.f138 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f144 b/parm/wave/grib2_gfswave.ao_9km.f144 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f144 rename to parm/wave/grib2_gfswave.ao_9km.f144 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f150 b/parm/wave/grib2_gfswave.ao_9km.f150 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f150 rename to parm/wave/grib2_gfswave.ao_9km.f150 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f156 b/parm/wave/grib2_gfswave.ao_9km.f156 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f156 rename to parm/wave/grib2_gfswave.ao_9km.f156 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f162 b/parm/wave/grib2_gfswave.ao_9km.f162 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f162 rename to parm/wave/grib2_gfswave.ao_9km.f162 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f168 b/parm/wave/grib2_gfswave.ao_9km.f168 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f168 rename to parm/wave/grib2_gfswave.ao_9km.f168 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f174 b/parm/wave/grib2_gfswave.ao_9km.f174 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f174 rename to parm/wave/grib2_gfswave.ao_9km.f174 diff --git a/parm/parm_wave/grib2_gfswave.ao_9km.f180 b/parm/wave/grib2_gfswave.ao_9km.f180 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ao_9km.f180 rename to parm/wave/grib2_gfswave.ao_9km.f180 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f000 b/parm/wave/grib2_gfswave.at_10m.f000 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f000 rename to parm/wave/grib2_gfswave.at_10m.f000 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f003 b/parm/wave/grib2_gfswave.at_10m.f003 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f003 rename to parm/wave/grib2_gfswave.at_10m.f003 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f006 b/parm/wave/grib2_gfswave.at_10m.f006 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f006 rename to parm/wave/grib2_gfswave.at_10m.f006 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f009 b/parm/wave/grib2_gfswave.at_10m.f009 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f009 rename to parm/wave/grib2_gfswave.at_10m.f009 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f012 b/parm/wave/grib2_gfswave.at_10m.f012 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f012 rename to parm/wave/grib2_gfswave.at_10m.f012 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f015 b/parm/wave/grib2_gfswave.at_10m.f015 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f015 rename to parm/wave/grib2_gfswave.at_10m.f015 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f018 b/parm/wave/grib2_gfswave.at_10m.f018 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f018 rename to parm/wave/grib2_gfswave.at_10m.f018 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f021 b/parm/wave/grib2_gfswave.at_10m.f021 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f021 rename to parm/wave/grib2_gfswave.at_10m.f021 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f024 b/parm/wave/grib2_gfswave.at_10m.f024 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f024 rename to parm/wave/grib2_gfswave.at_10m.f024 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f027 b/parm/wave/grib2_gfswave.at_10m.f027 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f027 rename to parm/wave/grib2_gfswave.at_10m.f027 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f030 b/parm/wave/grib2_gfswave.at_10m.f030 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f030 rename to parm/wave/grib2_gfswave.at_10m.f030 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f033 b/parm/wave/grib2_gfswave.at_10m.f033 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f033 rename to parm/wave/grib2_gfswave.at_10m.f033 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f036 b/parm/wave/grib2_gfswave.at_10m.f036 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f036 rename to parm/wave/grib2_gfswave.at_10m.f036 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f039 b/parm/wave/grib2_gfswave.at_10m.f039 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f039 rename to parm/wave/grib2_gfswave.at_10m.f039 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f042 b/parm/wave/grib2_gfswave.at_10m.f042 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f042 rename to parm/wave/grib2_gfswave.at_10m.f042 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f045 b/parm/wave/grib2_gfswave.at_10m.f045 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f045 rename to parm/wave/grib2_gfswave.at_10m.f045 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f048 b/parm/wave/grib2_gfswave.at_10m.f048 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f048 rename to parm/wave/grib2_gfswave.at_10m.f048 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f051 b/parm/wave/grib2_gfswave.at_10m.f051 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f051 rename to parm/wave/grib2_gfswave.at_10m.f051 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f054 b/parm/wave/grib2_gfswave.at_10m.f054 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f054 rename to parm/wave/grib2_gfswave.at_10m.f054 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f057 b/parm/wave/grib2_gfswave.at_10m.f057 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f057 rename to parm/wave/grib2_gfswave.at_10m.f057 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f060 b/parm/wave/grib2_gfswave.at_10m.f060 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f060 rename to parm/wave/grib2_gfswave.at_10m.f060 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f063 b/parm/wave/grib2_gfswave.at_10m.f063 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f063 rename to parm/wave/grib2_gfswave.at_10m.f063 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f066 b/parm/wave/grib2_gfswave.at_10m.f066 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f066 rename to parm/wave/grib2_gfswave.at_10m.f066 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f069 b/parm/wave/grib2_gfswave.at_10m.f069 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f069 rename to parm/wave/grib2_gfswave.at_10m.f069 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f072 b/parm/wave/grib2_gfswave.at_10m.f072 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f072 rename to parm/wave/grib2_gfswave.at_10m.f072 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f078 b/parm/wave/grib2_gfswave.at_10m.f078 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f078 rename to parm/wave/grib2_gfswave.at_10m.f078 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f084 b/parm/wave/grib2_gfswave.at_10m.f084 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f084 rename to parm/wave/grib2_gfswave.at_10m.f084 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f090 b/parm/wave/grib2_gfswave.at_10m.f090 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f090 rename to parm/wave/grib2_gfswave.at_10m.f090 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f096 b/parm/wave/grib2_gfswave.at_10m.f096 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f096 rename to parm/wave/grib2_gfswave.at_10m.f096 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f102 b/parm/wave/grib2_gfswave.at_10m.f102 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f102 rename to parm/wave/grib2_gfswave.at_10m.f102 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f108 b/parm/wave/grib2_gfswave.at_10m.f108 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f108 rename to parm/wave/grib2_gfswave.at_10m.f108 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f114 b/parm/wave/grib2_gfswave.at_10m.f114 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f114 rename to parm/wave/grib2_gfswave.at_10m.f114 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f120 b/parm/wave/grib2_gfswave.at_10m.f120 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f120 rename to parm/wave/grib2_gfswave.at_10m.f120 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f126 b/parm/wave/grib2_gfswave.at_10m.f126 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f126 rename to parm/wave/grib2_gfswave.at_10m.f126 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f132 b/parm/wave/grib2_gfswave.at_10m.f132 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f132 rename to parm/wave/grib2_gfswave.at_10m.f132 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f138 b/parm/wave/grib2_gfswave.at_10m.f138 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f138 rename to parm/wave/grib2_gfswave.at_10m.f138 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f144 b/parm/wave/grib2_gfswave.at_10m.f144 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f144 rename to parm/wave/grib2_gfswave.at_10m.f144 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f150 b/parm/wave/grib2_gfswave.at_10m.f150 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f150 rename to parm/wave/grib2_gfswave.at_10m.f150 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f156 b/parm/wave/grib2_gfswave.at_10m.f156 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f156 rename to parm/wave/grib2_gfswave.at_10m.f156 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f162 b/parm/wave/grib2_gfswave.at_10m.f162 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f162 rename to parm/wave/grib2_gfswave.at_10m.f162 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f168 b/parm/wave/grib2_gfswave.at_10m.f168 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f168 rename to parm/wave/grib2_gfswave.at_10m.f168 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f174 b/parm/wave/grib2_gfswave.at_10m.f174 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f174 rename to parm/wave/grib2_gfswave.at_10m.f174 diff --git a/parm/parm_wave/grib2_gfswave.at_10m.f180 b/parm/wave/grib2_gfswave.at_10m.f180 similarity index 100% rename from parm/parm_wave/grib2_gfswave.at_10m.f180 rename to parm/wave/grib2_gfswave.at_10m.f180 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f000 b/parm/wave/grib2_gfswave.ep_10m.f000 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f000 rename to parm/wave/grib2_gfswave.ep_10m.f000 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f003 b/parm/wave/grib2_gfswave.ep_10m.f003 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f003 rename to parm/wave/grib2_gfswave.ep_10m.f003 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f006 b/parm/wave/grib2_gfswave.ep_10m.f006 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f006 rename to parm/wave/grib2_gfswave.ep_10m.f006 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f009 b/parm/wave/grib2_gfswave.ep_10m.f009 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f009 rename to parm/wave/grib2_gfswave.ep_10m.f009 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f012 b/parm/wave/grib2_gfswave.ep_10m.f012 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f012 rename to parm/wave/grib2_gfswave.ep_10m.f012 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f015 b/parm/wave/grib2_gfswave.ep_10m.f015 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f015 rename to parm/wave/grib2_gfswave.ep_10m.f015 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f018 b/parm/wave/grib2_gfswave.ep_10m.f018 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f018 rename to parm/wave/grib2_gfswave.ep_10m.f018 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f021 b/parm/wave/grib2_gfswave.ep_10m.f021 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f021 rename to parm/wave/grib2_gfswave.ep_10m.f021 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f024 b/parm/wave/grib2_gfswave.ep_10m.f024 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f024 rename to parm/wave/grib2_gfswave.ep_10m.f024 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f027 b/parm/wave/grib2_gfswave.ep_10m.f027 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f027 rename to parm/wave/grib2_gfswave.ep_10m.f027 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f030 b/parm/wave/grib2_gfswave.ep_10m.f030 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f030 rename to parm/wave/grib2_gfswave.ep_10m.f030 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f033 b/parm/wave/grib2_gfswave.ep_10m.f033 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f033 rename to parm/wave/grib2_gfswave.ep_10m.f033 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f036 b/parm/wave/grib2_gfswave.ep_10m.f036 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f036 rename to parm/wave/grib2_gfswave.ep_10m.f036 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f039 b/parm/wave/grib2_gfswave.ep_10m.f039 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f039 rename to parm/wave/grib2_gfswave.ep_10m.f039 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f042 b/parm/wave/grib2_gfswave.ep_10m.f042 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f042 rename to parm/wave/grib2_gfswave.ep_10m.f042 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f045 b/parm/wave/grib2_gfswave.ep_10m.f045 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f045 rename to parm/wave/grib2_gfswave.ep_10m.f045 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f048 b/parm/wave/grib2_gfswave.ep_10m.f048 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f048 rename to parm/wave/grib2_gfswave.ep_10m.f048 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f051 b/parm/wave/grib2_gfswave.ep_10m.f051 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f051 rename to parm/wave/grib2_gfswave.ep_10m.f051 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f054 b/parm/wave/grib2_gfswave.ep_10m.f054 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f054 rename to parm/wave/grib2_gfswave.ep_10m.f054 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f057 b/parm/wave/grib2_gfswave.ep_10m.f057 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f057 rename to parm/wave/grib2_gfswave.ep_10m.f057 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f060 b/parm/wave/grib2_gfswave.ep_10m.f060 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f060 rename to parm/wave/grib2_gfswave.ep_10m.f060 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f063 b/parm/wave/grib2_gfswave.ep_10m.f063 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f063 rename to parm/wave/grib2_gfswave.ep_10m.f063 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f066 b/parm/wave/grib2_gfswave.ep_10m.f066 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f066 rename to parm/wave/grib2_gfswave.ep_10m.f066 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f069 b/parm/wave/grib2_gfswave.ep_10m.f069 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f069 rename to parm/wave/grib2_gfswave.ep_10m.f069 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f072 b/parm/wave/grib2_gfswave.ep_10m.f072 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f072 rename to parm/wave/grib2_gfswave.ep_10m.f072 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f078 b/parm/wave/grib2_gfswave.ep_10m.f078 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f078 rename to parm/wave/grib2_gfswave.ep_10m.f078 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f084 b/parm/wave/grib2_gfswave.ep_10m.f084 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f084 rename to parm/wave/grib2_gfswave.ep_10m.f084 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f090 b/parm/wave/grib2_gfswave.ep_10m.f090 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f090 rename to parm/wave/grib2_gfswave.ep_10m.f090 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f096 b/parm/wave/grib2_gfswave.ep_10m.f096 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f096 rename to parm/wave/grib2_gfswave.ep_10m.f096 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f102 b/parm/wave/grib2_gfswave.ep_10m.f102 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f102 rename to parm/wave/grib2_gfswave.ep_10m.f102 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f108 b/parm/wave/grib2_gfswave.ep_10m.f108 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f108 rename to parm/wave/grib2_gfswave.ep_10m.f108 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f114 b/parm/wave/grib2_gfswave.ep_10m.f114 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f114 rename to parm/wave/grib2_gfswave.ep_10m.f114 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f120 b/parm/wave/grib2_gfswave.ep_10m.f120 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f120 rename to parm/wave/grib2_gfswave.ep_10m.f120 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f126 b/parm/wave/grib2_gfswave.ep_10m.f126 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f126 rename to parm/wave/grib2_gfswave.ep_10m.f126 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f132 b/parm/wave/grib2_gfswave.ep_10m.f132 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f132 rename to parm/wave/grib2_gfswave.ep_10m.f132 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f138 b/parm/wave/grib2_gfswave.ep_10m.f138 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f138 rename to parm/wave/grib2_gfswave.ep_10m.f138 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f144 b/parm/wave/grib2_gfswave.ep_10m.f144 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f144 rename to parm/wave/grib2_gfswave.ep_10m.f144 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f150 b/parm/wave/grib2_gfswave.ep_10m.f150 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f150 rename to parm/wave/grib2_gfswave.ep_10m.f150 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f156 b/parm/wave/grib2_gfswave.ep_10m.f156 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f156 rename to parm/wave/grib2_gfswave.ep_10m.f156 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f162 b/parm/wave/grib2_gfswave.ep_10m.f162 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f162 rename to parm/wave/grib2_gfswave.ep_10m.f162 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f168 b/parm/wave/grib2_gfswave.ep_10m.f168 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f168 rename to parm/wave/grib2_gfswave.ep_10m.f168 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f174 b/parm/wave/grib2_gfswave.ep_10m.f174 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f174 rename to parm/wave/grib2_gfswave.ep_10m.f174 diff --git a/parm/parm_wave/grib2_gfswave.ep_10m.f180 b/parm/wave/grib2_gfswave.ep_10m.f180 similarity index 100% rename from parm/parm_wave/grib2_gfswave.ep_10m.f180 rename to parm/wave/grib2_gfswave.ep_10m.f180 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f000 b/parm/wave/grib2_gfswave.glo_30m.f000 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f000 rename to parm/wave/grib2_gfswave.glo_30m.f000 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f003 b/parm/wave/grib2_gfswave.glo_30m.f003 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f003 rename to parm/wave/grib2_gfswave.glo_30m.f003 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f006 b/parm/wave/grib2_gfswave.glo_30m.f006 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f006 rename to parm/wave/grib2_gfswave.glo_30m.f006 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f009 b/parm/wave/grib2_gfswave.glo_30m.f009 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f009 rename to parm/wave/grib2_gfswave.glo_30m.f009 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f012 b/parm/wave/grib2_gfswave.glo_30m.f012 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f012 rename to parm/wave/grib2_gfswave.glo_30m.f012 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f015 b/parm/wave/grib2_gfswave.glo_30m.f015 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f015 rename to parm/wave/grib2_gfswave.glo_30m.f015 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f018 b/parm/wave/grib2_gfswave.glo_30m.f018 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f018 rename to parm/wave/grib2_gfswave.glo_30m.f018 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f021 b/parm/wave/grib2_gfswave.glo_30m.f021 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f021 rename to parm/wave/grib2_gfswave.glo_30m.f021 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f024 b/parm/wave/grib2_gfswave.glo_30m.f024 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f024 rename to parm/wave/grib2_gfswave.glo_30m.f024 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f027 b/parm/wave/grib2_gfswave.glo_30m.f027 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f027 rename to parm/wave/grib2_gfswave.glo_30m.f027 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f030 b/parm/wave/grib2_gfswave.glo_30m.f030 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f030 rename to parm/wave/grib2_gfswave.glo_30m.f030 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f033 b/parm/wave/grib2_gfswave.glo_30m.f033 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f033 rename to parm/wave/grib2_gfswave.glo_30m.f033 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f036 b/parm/wave/grib2_gfswave.glo_30m.f036 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f036 rename to parm/wave/grib2_gfswave.glo_30m.f036 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f039 b/parm/wave/grib2_gfswave.glo_30m.f039 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f039 rename to parm/wave/grib2_gfswave.glo_30m.f039 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f042 b/parm/wave/grib2_gfswave.glo_30m.f042 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f042 rename to parm/wave/grib2_gfswave.glo_30m.f042 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f045 b/parm/wave/grib2_gfswave.glo_30m.f045 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f045 rename to parm/wave/grib2_gfswave.glo_30m.f045 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f048 b/parm/wave/grib2_gfswave.glo_30m.f048 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f048 rename to parm/wave/grib2_gfswave.glo_30m.f048 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f051 b/parm/wave/grib2_gfswave.glo_30m.f051 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f051 rename to parm/wave/grib2_gfswave.glo_30m.f051 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f054 b/parm/wave/grib2_gfswave.glo_30m.f054 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f054 rename to parm/wave/grib2_gfswave.glo_30m.f054 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f057 b/parm/wave/grib2_gfswave.glo_30m.f057 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f057 rename to parm/wave/grib2_gfswave.glo_30m.f057 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f060 b/parm/wave/grib2_gfswave.glo_30m.f060 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f060 rename to parm/wave/grib2_gfswave.glo_30m.f060 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f063 b/parm/wave/grib2_gfswave.glo_30m.f063 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f063 rename to parm/wave/grib2_gfswave.glo_30m.f063 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f066 b/parm/wave/grib2_gfswave.glo_30m.f066 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f066 rename to parm/wave/grib2_gfswave.glo_30m.f066 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f069 b/parm/wave/grib2_gfswave.glo_30m.f069 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f069 rename to parm/wave/grib2_gfswave.glo_30m.f069 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f072 b/parm/wave/grib2_gfswave.glo_30m.f072 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f072 rename to parm/wave/grib2_gfswave.glo_30m.f072 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f078 b/parm/wave/grib2_gfswave.glo_30m.f078 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f078 rename to parm/wave/grib2_gfswave.glo_30m.f078 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f084 b/parm/wave/grib2_gfswave.glo_30m.f084 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f084 rename to parm/wave/grib2_gfswave.glo_30m.f084 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f090 b/parm/wave/grib2_gfswave.glo_30m.f090 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f090 rename to parm/wave/grib2_gfswave.glo_30m.f090 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f096 b/parm/wave/grib2_gfswave.glo_30m.f096 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f096 rename to parm/wave/grib2_gfswave.glo_30m.f096 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f102 b/parm/wave/grib2_gfswave.glo_30m.f102 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f102 rename to parm/wave/grib2_gfswave.glo_30m.f102 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f108 b/parm/wave/grib2_gfswave.glo_30m.f108 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f108 rename to parm/wave/grib2_gfswave.glo_30m.f108 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f114 b/parm/wave/grib2_gfswave.glo_30m.f114 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f114 rename to parm/wave/grib2_gfswave.glo_30m.f114 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f120 b/parm/wave/grib2_gfswave.glo_30m.f120 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f120 rename to parm/wave/grib2_gfswave.glo_30m.f120 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f126 b/parm/wave/grib2_gfswave.glo_30m.f126 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f126 rename to parm/wave/grib2_gfswave.glo_30m.f126 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f132 b/parm/wave/grib2_gfswave.glo_30m.f132 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f132 rename to parm/wave/grib2_gfswave.glo_30m.f132 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f138 b/parm/wave/grib2_gfswave.glo_30m.f138 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f138 rename to parm/wave/grib2_gfswave.glo_30m.f138 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f144 b/parm/wave/grib2_gfswave.glo_30m.f144 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f144 rename to parm/wave/grib2_gfswave.glo_30m.f144 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f150 b/parm/wave/grib2_gfswave.glo_30m.f150 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f150 rename to parm/wave/grib2_gfswave.glo_30m.f150 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f156 b/parm/wave/grib2_gfswave.glo_30m.f156 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f156 rename to parm/wave/grib2_gfswave.glo_30m.f156 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f162 b/parm/wave/grib2_gfswave.glo_30m.f162 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f162 rename to parm/wave/grib2_gfswave.glo_30m.f162 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f168 b/parm/wave/grib2_gfswave.glo_30m.f168 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f168 rename to parm/wave/grib2_gfswave.glo_30m.f168 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f174 b/parm/wave/grib2_gfswave.glo_30m.f174 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f174 rename to parm/wave/grib2_gfswave.glo_30m.f174 diff --git a/parm/parm_wave/grib2_gfswave.glo_30m.f180 b/parm/wave/grib2_gfswave.glo_30m.f180 similarity index 100% rename from parm/parm_wave/grib2_gfswave.glo_30m.f180 rename to parm/wave/grib2_gfswave.glo_30m.f180 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f000 b/parm/wave/grib2_gfswave.wc_10m.f000 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f000 rename to parm/wave/grib2_gfswave.wc_10m.f000 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f003 b/parm/wave/grib2_gfswave.wc_10m.f003 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f003 rename to parm/wave/grib2_gfswave.wc_10m.f003 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f006 b/parm/wave/grib2_gfswave.wc_10m.f006 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f006 rename to parm/wave/grib2_gfswave.wc_10m.f006 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f009 b/parm/wave/grib2_gfswave.wc_10m.f009 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f009 rename to parm/wave/grib2_gfswave.wc_10m.f009 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f012 b/parm/wave/grib2_gfswave.wc_10m.f012 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f012 rename to parm/wave/grib2_gfswave.wc_10m.f012 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f015 b/parm/wave/grib2_gfswave.wc_10m.f015 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f015 rename to parm/wave/grib2_gfswave.wc_10m.f015 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f018 b/parm/wave/grib2_gfswave.wc_10m.f018 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f018 rename to parm/wave/grib2_gfswave.wc_10m.f018 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f021 b/parm/wave/grib2_gfswave.wc_10m.f021 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f021 rename to parm/wave/grib2_gfswave.wc_10m.f021 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f024 b/parm/wave/grib2_gfswave.wc_10m.f024 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f024 rename to parm/wave/grib2_gfswave.wc_10m.f024 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f027 b/parm/wave/grib2_gfswave.wc_10m.f027 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f027 rename to parm/wave/grib2_gfswave.wc_10m.f027 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f030 b/parm/wave/grib2_gfswave.wc_10m.f030 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f030 rename to parm/wave/grib2_gfswave.wc_10m.f030 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f033 b/parm/wave/grib2_gfswave.wc_10m.f033 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f033 rename to parm/wave/grib2_gfswave.wc_10m.f033 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f036 b/parm/wave/grib2_gfswave.wc_10m.f036 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f036 rename to parm/wave/grib2_gfswave.wc_10m.f036 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f039 b/parm/wave/grib2_gfswave.wc_10m.f039 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f039 rename to parm/wave/grib2_gfswave.wc_10m.f039 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f042 b/parm/wave/grib2_gfswave.wc_10m.f042 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f042 rename to parm/wave/grib2_gfswave.wc_10m.f042 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f045 b/parm/wave/grib2_gfswave.wc_10m.f045 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f045 rename to parm/wave/grib2_gfswave.wc_10m.f045 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f048 b/parm/wave/grib2_gfswave.wc_10m.f048 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f048 rename to parm/wave/grib2_gfswave.wc_10m.f048 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f051 b/parm/wave/grib2_gfswave.wc_10m.f051 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f051 rename to parm/wave/grib2_gfswave.wc_10m.f051 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f054 b/parm/wave/grib2_gfswave.wc_10m.f054 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f054 rename to parm/wave/grib2_gfswave.wc_10m.f054 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f057 b/parm/wave/grib2_gfswave.wc_10m.f057 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f057 rename to parm/wave/grib2_gfswave.wc_10m.f057 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f060 b/parm/wave/grib2_gfswave.wc_10m.f060 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f060 rename to parm/wave/grib2_gfswave.wc_10m.f060 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f063 b/parm/wave/grib2_gfswave.wc_10m.f063 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f063 rename to parm/wave/grib2_gfswave.wc_10m.f063 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f066 b/parm/wave/grib2_gfswave.wc_10m.f066 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f066 rename to parm/wave/grib2_gfswave.wc_10m.f066 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f069 b/parm/wave/grib2_gfswave.wc_10m.f069 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f069 rename to parm/wave/grib2_gfswave.wc_10m.f069 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f072 b/parm/wave/grib2_gfswave.wc_10m.f072 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f072 rename to parm/wave/grib2_gfswave.wc_10m.f072 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f078 b/parm/wave/grib2_gfswave.wc_10m.f078 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f078 rename to parm/wave/grib2_gfswave.wc_10m.f078 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f084 b/parm/wave/grib2_gfswave.wc_10m.f084 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f084 rename to parm/wave/grib2_gfswave.wc_10m.f084 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f090 b/parm/wave/grib2_gfswave.wc_10m.f090 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f090 rename to parm/wave/grib2_gfswave.wc_10m.f090 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f096 b/parm/wave/grib2_gfswave.wc_10m.f096 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f096 rename to parm/wave/grib2_gfswave.wc_10m.f096 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f102 b/parm/wave/grib2_gfswave.wc_10m.f102 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f102 rename to parm/wave/grib2_gfswave.wc_10m.f102 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f108 b/parm/wave/grib2_gfswave.wc_10m.f108 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f108 rename to parm/wave/grib2_gfswave.wc_10m.f108 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f114 b/parm/wave/grib2_gfswave.wc_10m.f114 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f114 rename to parm/wave/grib2_gfswave.wc_10m.f114 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f120 b/parm/wave/grib2_gfswave.wc_10m.f120 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f120 rename to parm/wave/grib2_gfswave.wc_10m.f120 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f126 b/parm/wave/grib2_gfswave.wc_10m.f126 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f126 rename to parm/wave/grib2_gfswave.wc_10m.f126 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f132 b/parm/wave/grib2_gfswave.wc_10m.f132 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f132 rename to parm/wave/grib2_gfswave.wc_10m.f132 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f138 b/parm/wave/grib2_gfswave.wc_10m.f138 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f138 rename to parm/wave/grib2_gfswave.wc_10m.f138 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f144 b/parm/wave/grib2_gfswave.wc_10m.f144 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f144 rename to parm/wave/grib2_gfswave.wc_10m.f144 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f150 b/parm/wave/grib2_gfswave.wc_10m.f150 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f150 rename to parm/wave/grib2_gfswave.wc_10m.f150 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f156 b/parm/wave/grib2_gfswave.wc_10m.f156 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f156 rename to parm/wave/grib2_gfswave.wc_10m.f156 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f162 b/parm/wave/grib2_gfswave.wc_10m.f162 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f162 rename to parm/wave/grib2_gfswave.wc_10m.f162 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f168 b/parm/wave/grib2_gfswave.wc_10m.f168 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f168 rename to parm/wave/grib2_gfswave.wc_10m.f168 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f174 b/parm/wave/grib2_gfswave.wc_10m.f174 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f174 rename to parm/wave/grib2_gfswave.wc_10m.f174 diff --git a/parm/parm_wave/grib2_gfswave.wc_10m.f180 b/parm/wave/grib2_gfswave.wc_10m.f180 similarity index 100% rename from parm/parm_wave/grib2_gfswave.wc_10m.f180 rename to parm/wave/grib2_gfswave.wc_10m.f180 diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh index f99979bc77..25d034ef47 100755 --- a/scripts/exgdas_atmos_chgres_forenkf.sh +++ b/scripts/exgdas_atmos_chgres_forenkf.sh @@ -65,22 +65,22 @@ SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} APREFIX=${APREFIX:-""} APREFIX_ENS=${APREFIX_ENS:-""} # at full resolution -ATMF03=${ATMF03:-${COMOUT}/${APREFIX}atmf003.nc} -ATMF04=${ATMF04:-${COMOUT}/${APREFIX}atmf004.nc} -ATMF05=${ATMF05:-${COMOUT}/${APREFIX}atmf005.nc} -ATMF06=${ATMF06:-${COMOUT}/${APREFIX}atmf006.nc} -ATMF07=${ATMF07:-${COMOUT}/${APREFIX}atmf007.nc} -ATMF08=${ATMF08:-${COMOUT}/${APREFIX}atmf008.nc} -ATMF09=${ATMF09:-${COMOUT}/${APREFIX}atmf009.nc} +ATMF03=${ATMF03:-${COM_ATMOS_HISTORY}/${APREFIX}atmf003.nc} +ATMF04=${ATMF04:-${COM_ATMOS_HISTORY}/${APREFIX}atmf004.nc} +ATMF05=${ATMF05:-${COM_ATMOS_HISTORY}/${APREFIX}atmf005.nc} +ATMF06=${ATMF06:-${COM_ATMOS_HISTORY}/${APREFIX}atmf006.nc} +ATMF07=${ATMF07:-${COM_ATMOS_HISTORY}/${APREFIX}atmf007.nc} +ATMF08=${ATMF08:-${COM_ATMOS_HISTORY}/${APREFIX}atmf008.nc} +ATMF09=${ATMF09:-${COM_ATMOS_HISTORY}/${APREFIX}atmf009.nc} # at ensemble resolution -ATMF03ENS=${ATMF03ENS:-${COMOUT}/${APREFIX}atmf003.ensres.nc} -ATMF04ENS=${ATMF04ENS:-${COMOUT}/${APREFIX}atmf004.ensres.nc} -ATMF05ENS=${ATMF05ENS:-${COMOUT}/${APREFIX}atmf005.ensres.nc} -ATMF06ENS=${ATMF06ENS:-${COMOUT}/${APREFIX}atmf006.ensres.nc} -ATMF07ENS=${ATMF07ENS:-${COMOUT}/${APREFIX}atmf007.ensres.nc} -ATMF08ENS=${ATMF08ENS:-${COMOUT}/${APREFIX}atmf008.ensres.nc} -ATMF09ENS=${ATMF09ENS:-${COMOUT}/${APREFIX}atmf009.ensres.nc} -ATMFCST_ENSRES=${ATMFCST_ENSRES:-${COMOUT_ENS}/mem001/atmos/${APREFIX_ENS}atmf006.nc} +ATMF03ENS=${ATMF03ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf003.ensres.nc} +ATMF04ENS=${ATMF04ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf004.ensres.nc} +ATMF05ENS=${ATMF05ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf005.ensres.nc} +ATMF06ENS=${ATMF06ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf006.ensres.nc} +ATMF07ENS=${ATMF07ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf007.ensres.nc} +ATMF08ENS=${ATMF08ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf008.ensres.nc} +ATMF09ENS=${ATMF09ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf009.ensres.nc} +ATMFCST_ENSRES=${ATMFCST_ENSRES:-${COM_ATMOS_HISTORY_MEM}/${APREFIX_ENS}atmf006.nc} # Set script / GSI control parameters DOHYBVAR=${DOHYBVAR:-"NO"} @@ -102,16 +102,7 @@ fi ################################################################################ ################################################################################ -# Preprocessing -mkdata=NO -if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA -fi - -cd $DATA || exit 99 -############################################################## # get resolution information LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMFCST_ENSRES grid_xt)} # get LONB_ENKF LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMFCST_ENSRES grid_yt)} # get LATB_ENFK @@ -196,7 +187,5 @@ fi ################################################################################ # Postprocessing cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA - exit $err diff --git a/scripts/exgdas_atmos_gldas.sh b/scripts/exgdas_atmos_gldas.sh deleted file mode 100755 index ba56e323aa..0000000000 --- a/scripts/exgdas_atmos_gldas.sh +++ /dev/null @@ -1,332 +0,0 @@ -#! /usr/bin/env bash - -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_atmos_gldas.sh -# Script description: Runs the global land analysis -# -################################################################################ - -source "${HOMEgfs:?}/ush/preamble.sh" - -################################# -# Set up UTILITIES -################################# -export FINDDATE=${FINDDATE:-/apps/ops/prod/nco/core/prod_util.v2.0.13/ush/finddate.sh} -export utilexec=${utilexec:-/apps/ops/prod/libs/intel/19.1.3.304/grib_util/1.2.3/bin} -export CNVGRIB=${CNVGRIB:-${utilexec}/cnvgrib} -export WGRIB=${WGRIB:-${utilexec}/wgrib} -export WGRIB2=${WGRIB2:-/apps/ops/prod/libs/intel/19.1.3.304/wgrib2/2.0.7/bin/wgrib2} -export COPYGB=${COPYGB:-${utilexec}/copygb} -export NDATE=${NDATE:-/apps/ops/prod/nco/core/prod_util.v2.0.13/exec/ndate} -export DCOMIN=${DCOMIN:-${DCOMROOT:-"/lfs/h1/ops/prod/dcom"}} -export CPCGAUGE=${CPCGAUGE:-/lfs/h2/emc/global/noscrub/emc.global/dump} -export COMINgdas=${COMINgdas:-${ROTDIR}} -export OFFLINE_GLDAS=${OFFLINE_GLDAS:-"NO"} -export ERRSCRIPT=${ERRSCRIPT:-"eval [[ ${err} = 0 ]]"} - - -################################# -# Set up the running environment -################################# -export USE_CFP=${USE_CFP:-"NO"} -export assim_freq=${assim_freq:-6} -export gldas_spinup_hours=${gldas_spinup_hours:-72} - -# Local date variables -gldas_cdate=${CDATE:?} -gldas_eymd=$(echo "${gldas_cdate}" |cut -c 1-8) -gldas_ecyc=$(echo "${gldas_cdate}" |cut -c 9-10) -gldas_sdate=$(${NDATE} -"${gldas_spinup_hours}" "${CDATE}") -gldas_symd=$(echo "${gldas_sdate}" |cut -c 1-8) -gldas_scyc=$(echo "${gldas_sdate}" |cut -c 9-10) - -iau_cdate=${CDATE} -if [[ "${DOIAU:?}" = "YES" ]]; then - IAU_OFFSET=${IAU_OFFSET:-0} - IAUHALH=$((IAU_OFFSET/2)) - iau_cdate=$(${NDATE} -"${IAUHALH}" "${CDATE}") -fi -iau_eymd=$(echo "${iau_cdate}" |cut -c 1-8) -iau_ecyc=$(echo "${iau_cdate}" |cut -c 9-10) -echo "GLDAS runs from ${gldas_sdate} to ${iau_cdate}" - -CASE=${CASE:-C768} -res=$(echo "${CASE}" |cut -c2-5) -JCAP=$((2*res-2)) -nlat=$((2*res)) -nlon=$((4*res)) - -export USHgldas=${USHgldas:?} -export FIXgldas=${FIXgldas:-${HOMEgfs}/fix/gldas} -export topodir=${topodir:-${HOMEgfs}/fix/orog/${CASE}} - -DATA=${DATA:-${pwd}/gldastmp$$} -mkdata=NO -if [[ ! -d "${DATA}" ]]; then - mkdata=YES - mkdir -p "${DATA}" -fi -cd "${DATA}" || exit 1 -export RUNDIR=${DATA} - - -################################# -GDAS=${RUNDIR}/force -mkdir -p "${GDAS}" - -input1=${COMINgdas}/gdas.${gldas_symd}/${gldas_scyc}/atmos/RESTART -input2=${COMINgdas}/gdas.${gldas_eymd}/${gldas_ecyc}/atmos/RESTART -[[ -d ${RUNDIR} ]] && rm -fr "${RUNDIR}/FIX" -[[ -f ${RUNDIR}/LIS ]] && rm -fr "${RUNDIR}/LIS" -[[ -d ${RUNDIR}/input ]] && rm -fr "${RUNDIR}/input" -mkdir -p "${RUNDIR}/input" -ln -fs "${GDAS}" "${RUNDIR}/input/GDAS" -ln -fs "${EXECgldas:?}/gldas_model" "${RUNDIR}/LIS" - -# Set FIXgldas subfolder -ln -fs "${FIXgldas}/frac_grid/FIX_T${JCAP}" "${RUNDIR}/FIX" - -#--------------------------------------------------------------- -### 1) Get gdas 6-tile netcdf restart file and gdas forcing data -#--------------------------------------------------------------- - -"${USHgldas}/gldas_get_data.sh" "${gldas_sdate}" "${gldas_cdate}" -export err=$? -${ERRSCRIPT} || exit 2 - -#--------------------------------------------------------------- -### 2) Get CPC daily precip and temporally disaggreated -#--------------------------------------------------------------- - -"${USHgldas}/gldas_forcing.sh" "${gldas_symd}" "${gldas_eymd}" -export err=$? -${ERRSCRIPT} || exit 3 - -# spatially disaggregated - -if [[ "${JCAP}" -eq 1534 ]]; then - gds='255 4 3072 1536 89909 0 128 -89909 -117 117 768 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -elif [[ "${JCAP}" -eq 766 ]]; then - gds='255 4 1536 768 89821 0 128 -89821 -234 234 384 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -elif [[ "${JCAP}" -eq 382 ]]; then - gds='255 4 768 384 89641 0 128 -89641 -469 469 192 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -elif [[ "${JCAP}" -eq 190 ]]; then - gds='255 4 384 192 89284 0 128 -89284 -938 938 96 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -else - echo "JCAP=${JCAP} not supported, exit" - export err=4 - ${ERRSCRIPT} || exit 4 -fi - -echo "${JCAP}" -echo "${gds}" -ymdpre=$(sh "${FINDDATE}" "${gldas_symd}" d-1) -ymdend=$(sh "${FINDDATE}" "${gldas_eymd}" d-2) -ymd=${ymdpre} - -if [[ "${USE_CFP}" = "YES" ]] ; then - rm -f ./cfile - touch ./cfile -fi - -while [[ "${ymd}" -le "${ymdend}" ]]; do - if [[ "${ymd}" -ne "${ymdpre}" ]]; then - if [[ "${USE_CFP}" = "YES" ]] ; then - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}00 ${RUNDIR}/cmap.gdas.${ymd}00" >> ./cfile - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}06 ${RUNDIR}/cmap.gdas.${ymd}06" >> ./cfile - else - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}00" "${RUNDIR}/cmap.gdas.${ymd}00" - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}06" "${RUNDIR}/cmap.gdas.${ymd}06" - fi - fi - if [[ "${ymd}" -ne "${ymdend}" ]]; then - if [[ "${USE_CFP}" = "YES" ]] ; then - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}12 ${RUNDIR}/cmap.gdas.${ymd}12" >> ./cfile - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}18 ${RUNDIR}/cmap.gdas.${ymd}18" >> ./cfile - else - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}12" "${RUNDIR}/cmap.gdas.${ymd}12" - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}18" "${RUNDIR}/cmap.gdas.${ymd}18" - fi - fi - ymd=$(sh "${FINDDATE}" "${ymd}" d+1) -done - -if [[ "${USE_CFP}" = "YES" ]] ; then - ${APRUN_GLDAS_DATA_PROC:?} ./cfile -fi - -# create configure file -"${USHgldas}/gldas_liscrd.sh" "${gldas_sdate}" "${iau_cdate}" "${JCAP}" -export err=$? -${ERRSCRIPT} || exit 4 - - -#--------------------------------------------------------------- -### 3) Produce initials noah.rst from 6-tile gdas restart files -#--------------------------------------------------------------- -rm -f fort.41 fort.141 fort.11 fort.12 - -# 3a) create gdas2gldas input file - -cat >> fort.141 << EOF - &config - data_dir_input_grid="${input1}" - sfc_files_input_grid="${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile1.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile2.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile3.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile4.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile5.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile6.nc" - mosaic_file_input_grid="${CASE}_mosaic.nc" - orog_dir_input_grid="${topodir}/" - orog_files_input_grid="${CASE}_oro_data.tile1.nc","${CASE}_oro_data.tile2.nc","${CASE}_oro_data.tile3.nc","${CASE}_oro_data.tile4.nc","${CASE}_oro_data.tile5.nc","${CASE}_oro_data.tile6.nc" - i_target=${nlon} - j_target=${nlat} - model="${model:?}" - / -EOF -cp fort.141 fort.41 - - -# 3b) Use gdas2gldas to generate nemsio file - -export OMP_NUM_THREADS=1 -export pgm=gdas2gldas -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GAUSSIAN:?} "${EXECgldas}/gdas2gldas" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 5 - - -# 3c)gldas_rst to generate noah.rst - -sfcanl=sfc.gaussian.nemsio -ln -fs "FIX/lmask_gfs_T${JCAP}.bfsa" fort.11 -ln -fs "${sfcanl}" fort.12 -export pgm=gldas_rst -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -"${EXECgldas}/gldas_rst" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 6 - -mv "${sfcanl}" "${sfcanl}.${gldas_symd}" - - -#--------------------------------------------------------------- -### 4) run noah/noahmp model -#--------------------------------------------------------------- -export pgm=LIS -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GLDAS:?} ./LIS 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 7 - - -#--------------------------------------------------------------- -### 5) using gdas2gldas to generate nemsio file for gldas_eymd -### use gldas_post to replace soil moisture and temperature -### use gldas2gdas to produce 6-tile restart file -#--------------------------------------------------------------- -rm -f fort.41 fort.241 fort.42 - -# 5a) create input file for gdas2gldas - -cat >> fort.241 << EOF - &config - data_dir_input_grid="${input2}" - sfc_files_input_grid="${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile1.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile2.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile3.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile4.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile5.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile6.nc" - mosaic_file_input_grid="${CASE}_mosaic.nc" - orog_dir_input_grid="${topodir}/" - orog_files_input_grid="${CASE}_oro_data.tile1.nc","${CASE}_oro_data.tile2.nc","${CASE}_oro_data.tile3.nc","${CASE}_oro_data.tile4.nc","${CASE}_oro_data.tile5.nc","${CASE}_oro_data.tile6.nc" - i_target=${nlon} - j_target=${nlat} - model="${model:?}" - / -EOF -cp fort.241 fort.41 - -# 5b) use gdas2gldas to produce nemsio file - -export OMP_NUM_THREADS=1 -export pgm=gdas2gldas -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GAUSSIAN} "${EXECgldas}/gdas2gldas" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 8 - - -# 5c) use gldas_post to replace soil moisture and temperature - -yyyy=$(echo "${iau_eymd}" | cut -c1-4) -gbin=${RUNDIR}/EXP901/NOAH/${yyyy}/${iau_eymd}/LIS.E901.${iau_eymd}${iau_ecyc}.NOAHgbin -sfcanl=sfc.gaussian.nemsio -rm -rf fort.11 fort.12 -ln -fs "${gbin}" fort.11 -ln -fs "${sfcanl}" fort.12 - -export pgm=gldas_post -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -"${EXECgldas}/gldas_post" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 9 - -cp fort.22 ./gldas.nemsio -mv fort.22 "${sfcanl}.gldas" - - -# 5d) use gldas2gdas to create 6-tile restart tiles - -cat >> fort.42 << EOF - &config - orog_dir_gdas_grid="${topodir}/" - mosaic_file_gdas_grid="${CASE}_mosaic.nc" - / -EOF - -# copy/link gdas netcdf tiles -k=1; while [[ "${k}" -le 6 ]]; do - cp "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc" "./sfc_data.tile${k}.nc" - k=$((k+1)) -done - -# copy soil type -ln -fs "FIX/stype_gfs_T${JCAP}.bfsa" "stype_gfs_T${JCAP}.bfsa" - -export OMP_NUM_THREADS=1 -export pgm=gldas2gdas -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GAUSSIAN} "${EXECgldas}/gldas2gdas" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 10 - - -# 5e) archive gldas results - -if [[ "${OFFLINE_GLDAS}" = "YES" ]]; then - "${USHgldas}/gldas_archive.sh" "${gldas_symd}" "${gldas_eymd}" - export err=$? - ${ERRSCRIPT} || exit 11 -else - k=1; while [[ "${k}" -le 6 ]]; do - mv "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc" "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc_bfgldas" - cp "sfc_data.tile${k}.nc" "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc" - k=$((k+1)) - done -fi - - -#------------------------------------------------------------------ -# Clean up before leaving -if [[ "${mkdata}" = "YES" ]]; then rm -rf "${DATA}"; fi - -exit "${err}" - diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh index 9251852892..725cb0223f 100755 --- a/scripts/exgdas_atmos_nawips.sh +++ b/scripts/exgdas_atmos_nawips.sh @@ -16,6 +16,7 @@ cd $DATA RUN2=$1 fend=$2 DBN_ALERT_TYPE=$3 +destination=$4 DATA_RUN=$DATA/$RUN2 mkdir -p $DATA_RUN @@ -77,18 +78,18 @@ while [ $fhcnt -le $fend ] ; do GEMGRD=${RUN2}_${PDY}${cyc}f${fhr3} - if [ $RUN2 = "gdas_0p25" ]; then - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr} - if [ ! -f $GRIBIN ] ; then - echo "WARNING: $GRIBIN FILE is missing" + if [[ ${RUN2} = "gdas_0p25" ]]; then + export GRIBIN=${COM_ATMOS_GRIB_0p25}/${model}.${cycle}.pgrb2.0p25.f${fhr} + if [[ ! -f ${GRIBIN} ]] ; then + echo "WARNING: ${GRIBIN} FILE is missing" fi - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr}.idx + GRIBIN_chk=${COM_ATMOS_GRIB_0p25}${model}.${cycle}.pgrb2.0p25.f${fhr}.idx else - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr} - if [ ! -f $GRIBIN ] ; then - echo "WARNING: $GRIBIN FILE is missing" + export GRIBIN=${COM_ATMOS_GRIB_1p00}/${model}.${cycle}.pgrb2.1p00.f${fhr} + if [[ ! -f ${GRIBIN} ]] ; then + echo "WARNING: ${GRIBIN} FILE is missing" fi - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx + GRIBIN_chk=${COM_ATMOS_GRIB_1p00}/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx fi icnt=1 @@ -135,17 +136,17 @@ EOF export err=$?;err_chk if [ $SENDCOM = "YES" ] ; then - cp $GEMGRD $COMOUT/.$GEMGRD + cp "${GEMGRD}" "${destination}/.${GEMGRD}" export err=$? - if [[ $err -ne 0 ]] ; then - echo " File $GEMGRD does not exist." - exit $err + if [[ ${err} -ne 0 ]] ; then + echo " File ${GEMGRD} does not exist." + exit "${err}" fi - mv $COMOUT/.$GEMGRD $COMOUT/$GEMGRD - if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD + mv "${destination}/.${GEMGRD}" "${destination}/${GEMGRD}" + if [[ ${SENDDBN} = "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" else echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" fi diff --git a/scripts/exgdas_atmos_post.sh b/scripts/exgdas_atmos_post.sh index 895765c970..c49be8b0b8 100755 --- a/scripts/exgdas_atmos_post.sh +++ b/scripts/exgdas_atmos_post.sh @@ -49,14 +49,13 @@ export JO=${LATB:-721} # specify default model output format: 3 for sigio and 4 # for nemsio export OUTTYP=${OUTTYP:-4} -export TCYC=${TCYC:-".t${cyc}z."} -export PREFIX=${PREFIX:-${RUN}${TCYC}} +export PREFIX=${PREFIX:-${RUN}.t${cyc}z.} export machine=${machine:-WCOSS2} ########################### # Specify Output layers ########################### -export POSTGPVARS="KPO=57,PO=1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,40.,30.,20.,15.,10.,7.,5.,3.,2.,1.,0.7,0.4,0.2,0.1,0.07,0.04,0.02,0.01," +export POSTGPVARS="KPO=57,PO=1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,40.,30.,20.,15.,10.,7.,5.,3.,2.,1.,0.7,0.4,0.2,0.1,0.07,0.04,0.02,0.01,rdaod=.true.," ########################################################## # Specify variable to directly output pgrb2 files for GDAS/GFS @@ -69,7 +68,7 @@ export IDRT=${IDRT:-0} # IDRT=0 is setting for outputting grib files on lat/lon # Chuang: modify to process analysis when post_times is 00 stime="$(echo "${post_times}" | cut -c1-3)" export stime -export loganl="${COMIN}/${PREFIX}atmanl.nc" +export loganl="${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" if [[ "${stime}" = "anl" ]]; then if [[ -f "${loganl}" ]]; then @@ -100,9 +99,9 @@ if [[ "${stime}" = "anl" ]]; then [[ -f flxfile ]] && rm flxfile ; [[ -f nemsfile ]] && rm nemsfile - ln -fs "${COMIN}/${PREFIX}atmanl.nc" nemsfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" nemsfile export NEMSINP=nemsfile - ln -fs "${COMIN}/${PREFIX}sfcanl.nc" flxfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" flxfile export FLXINP=flxfile export PGBOUT=pgbfile export PGIOUT=pgifile @@ -117,7 +116,6 @@ if [[ "${stime}" = "anl" ]]; then ${POSTGPSH} export err=$?; err_chk - if [[ "${GRIBVERSION}" = 'grib2' ]]; then mv "${PGBOUT}" "${PGBOUT2}" @@ -126,28 +124,25 @@ if [[ "${stime}" = "anl" ]]; then export downset=${downset:-1} ${GFSDOWNSH} export err=$?; err_chk - - export fhr3=anl fi if [[ "${SENDCOM}" = 'YES' ]]; then - export fhr3=anl if [[ "${GRIBVERSION}" = 'grib2' ]]; then - MASTERANL=${PREFIX}master.grb2${fhr3} + MASTERANL=${PREFIX}master.grb2anl ##########XXW Accord to Boi, fortran index should use *if${fhr}, wgrib index use .idx #MASTERANLIDX=${RUN}.${cycle}.master.grb2${fhr3}.idx - MASTERANLIDX=${PREFIX}master.grb2i${fhr3} - cp "${PGBOUT2}" "${COMOUT}/${MASTERANL}" - ${GRB2INDEX} "${PGBOUT2}" "${COMOUT}/${MASTERANLIDX}" + MASTERANLIDX=${PREFIX}master.grb2ianl + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANL}" + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANLIDX}" fi if [[ "${SENDDBN}" = 'YES' ]]; then run="$(echo "${RUN}" | tr '[:lower:]' '[:upper:]')" if [[ "${GRIBVERSION}" = 'grib2' ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_MSC_sfcanl" "${job}" "${COMOUT}/${PREFIX}sfc${fhr3}.nc" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SA" "${job}" "${COMIN}/${PREFIX}atm${fhr3}.nc" - "${DBNROOT}/bin/dbn_alert" MODEL "GDAS_PGA_GB2" "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.${fhr3}" - "${DBNROOT}/bin/dbn_alert" MODEL "GDAS_PGA_GB2_WIDX" "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.${fhr3}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_MSC_sfcanl" "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SA" "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "GDAS_PGA_GB2" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl" + "${DBNROOT}/bin/dbn_alert" MODEL "GDAS_PGA_GB2_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl.idx" fi fi fi @@ -199,9 +194,9 @@ else ## not_anl if_stimes ############################### [[ -f flxfile ]] && rm flxfile [[ -f nemsfile ]] && rm nemsfile - ln -sf "${COMIN}/${PREFIX}atmf${fhr}.nc" nemsfile + ln -sf "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc" nemsfile export NEMSINP=nemsfile - ln -sf "${COMIN}/${PREFIX}sfcf${fhr}.nc" flxfile + ln -sf "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc" flxfile export FLXINP=flxfile if (( d_fhr > 0 )); then @@ -265,7 +260,7 @@ else ## not_anl if_stimes if [[ "${INLINE_POST}" = ".false." ]]; then ${POSTGPSH} else - cp "${COMOUT}/${MASTERFHR}" "${PGBOUT}" + cp "${COM_ATMOS_MASTER}/${MASTERFHR}" "${PGBOUT}" fi export err=$?; err_chk @@ -281,27 +276,27 @@ else ## not_anl if_stimes if [[ "${SENDDBN}" = "YES" ]]; then run="$(echo "${RUN}" | tr '[:lower:]' '[:upper:]')" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB2_0P25" "${job}" "${COMOUT}/${PREFIX}pgrb2.0p25.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB2_0P25_WIDX ""${job}" "${COMOUT}/${PREFIX}pgrb2.0p25.f${fhr}.idx" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB_GB2" "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB_GB2_WIDX" "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB2_0P25" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB2_0P25_WIDX ""${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB_GB2" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB_GB2_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}.idx" fi if [[ "${SENDCOM}" = 'YES' ]]; then if [[ "${GRIBVERSION}" = 'grib2' ]]; then if [[ "${INLINE_POST}" = ".false." ]]; then - cp "${PGBOUT2}" "${COMOUT}/${MASTERFHR}" + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFHR}" fi - ${GRB2INDEX} "${PGBOUT2}" "${COMOUT}/${MASTERFHRIDX}" + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFHRIDX}" fi # Model generated flux files will be in nemsio after FY17 upgrade # use post to generate Grib2 flux files if (( OUTTYP == 4 )) ; then - export NEMSINP=${COMIN}/${PREFIX}atmf${fhr}.nc - export FLXINP=${COMIN}/${PREFIX}sfcf${fhr}.nc + export NEMSINP=${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc + export FLXINP=${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc if (( d_fhr == 0 )); then export PostFlatFile=${PARMpost}/postxconfig-NT-GFS-FLUX-F00.txt export CTLFILE=${PARMpost}/postcntrl_gfs_flux_f00.xml @@ -317,16 +312,16 @@ else ## not_anl if_stimes if [[ "${INLINE_POST}" = ".false." ]]; then ${POSTGPSH} export err=$?; err_chk - mv fluxfile "${COMOUT}/${FLUXFL}" + mv fluxfile "${COM_ATMOS_MASTER}/${FLUXFL}" fi - ${WGRIB2} -s "${COMOUT}/${FLUXFL}" > "${COMOUT}/${FLUXFLIDX}" + ${WGRIB2} -s "${COM_ATMOS_MASTER}/${FLUXFL}" > "${COM_ATMOS_MASTER}/${FLUXFLIDX}" fi if [[ "${SENDDBN}" = 'YES' ]] && [[ "${RUN}" = 'gdas' ]] && (( d_fhr % 3 == 0 )); then - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SF" "${job}" "${COMOUT}/${PREFIX}atmf${fhr}.nc" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_BF" "${job}" "${COMOUT}/${PREFIX}sfcf${fhr}.nc" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SGB_GB2" "${job}" "${COMOUT}/${PREFIX}sfluxgrbf${fhr}.grib2" - "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SGB_GB2_WIDX ""${job}" "${COMOUT}/${PREFIX}sfluxgrbf${fhr}.grib2.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_BF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SGB_GB2" "${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrbf${fhr}.grib2" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SGB_GB2_WIDX ""${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrbf${fhr}.grib2.idx" fi fi diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh index e9a1900085..aa686284be 100755 --- a/scripts/exgdas_atmos_verfozn.sh +++ b/scripts/exgdas_atmos_verfozn.sh @@ -18,12 +18,6 @@ export RUN_ENVIR=${RUN_ENVIR:-nco} export NET=${NET:-gfs} export RUN=${RUN:-gdas} export envir=${envir:-prod} -export COMPONENT=${COMPONENT:-atmos} - -# Command line arguments -export PDY=${1:-${PDY:?}} -export cyc=${2:-${cyc:?}} - # Other variables export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh index b9cfa701cd..5306fbbdba 100755 --- a/scripts/exgdas_atmos_verfrad.sh +++ b/scripts/exgdas_atmos_verfrad.sh @@ -25,21 +25,11 @@ export RUN_ENVIR=${RUN_ENVIR:-nco} export NET=${NET:-gfs} export RUN=${RUN:-gdas} export envir=${envir:-prod} -export COMPONENT=${COMPONENT:-atmos} - -# Command line arguments -export PDY=${1:-${PDY:?}} -export cyc=${2:-${cyc:?}} - -# Directories -export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} - # Filenames -export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} -export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} -export satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} +biascr=${biascr:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias} +radstat=${radstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat} +satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} # Other variables export RAD_AREA=${RAD_AREA:-glb} diff --git a/scripts/exgdas_atmos_vminmon.sh b/scripts/exgdas_atmos_vminmon.sh index 5d54174bf7..2a22fcb0b6 100755 --- a/scripts/exgdas_atmos_vminmon.sh +++ b/scripts/exgdas_atmos_vminmon.sh @@ -28,12 +28,6 @@ export NET=${NET:-gfs} export RUN=${RUN:-gdas} export envir=${envir:-prod} -######################################## -# Command line arguments -######################################## -export PDY=${1:-${PDY:?}} -export cyc=${2:-${cyc:?}} - ######################################## # Directories ######################################## @@ -43,7 +37,7 @@ export DATA=${DATA:-$(pwd)} ######################################## # Filenames ######################################## -gsistat=${gsistat:-$COMIN/gdas.t${cyc}z.gsistat} +gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.gsistat} export mm_gnormfile=${gnormfile:-${M_FIXgdas}/gdas_minmon_gnorm.txt} export mm_costfile=${costfile:-${M_FIXgdas}/gdas_minmon_cost.txt} diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh index 1408165bfd..8f1928042f 100755 --- a/scripts/exgdas_enkf_earc.sh +++ b/scripts/exgdas_enkf_earc.sh @@ -6,8 +6,9 @@ source "${HOMEgfs}/ush/preamble.sh" # Begin JOB SPECIFIC work ############################################## export n=$((10#${ENSGRP})) -export CDUMP_ENKF=$(echo "${EUPD_CYC:-"gdas"}" | tr a-z A-Z) -export ARCH_LIST="${ROTDIR}/${RUN}.${PDY}/${cyc}/earc${ENSGRP}" +export CDUMP_ENKF="${EUPD_CYC:-"gdas"}" + +export ARCH_LIST="${COM_TOP}/earc${ENSGRP}" # ICS are restarts and always lag INC by $assim_freq hours. EARCINC_CYC=${ARCH_CYC} @@ -18,16 +19,16 @@ fi [[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" mkdir -p "${ARCH_LIST}" -cd "${ARCH_LIST}" +cd "${ARCH_LIST}" || exit 2 -"${HOMEgfs}"/ush/hpssarch_gen.sh "${RUN}" +"${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" status=$? if [ "${status}" -ne 0 ]; then echo "${HOMEgfs}/ush/hpssarch_gen.sh ${RUN} failed, ABORT!" exit "${status}" fi -cd "${ROTDIR}" +cd "${ROTDIR}" || exit 2 source "${HOMEgfs}/ush/file_utils.sh" @@ -40,18 +41,18 @@ if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; TARCMD="htar" if [[ ${LOCALARCH} = "YES" ]]; then TARCMD="tar" - [ ! -d "${ATARDIR}"/"${CDATE}" ] && mkdir -p "${ATARDIR}"/"${CDATE}" + if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi fi #--determine when to save ICs for warm start SAVEWARMICA="NO" SAVEWARMICB="NO" - mm=$(echo "${CDATE}"|cut -c 5-6) - dd=$(echo "${CDATE}"|cut -c 7-8) + mm="${PDY:4:2}" + dd="${PDY:6:2}" nday=$(( (10#${mm}-1)*30+10#${dd} )) mod=$((nday % ARCH_WARMICFREQ)) - if [ "${CDATE}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi - if [ "${CDATE}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi + if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi + if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi @@ -60,32 +61,32 @@ if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; mod1=$((nday1 % ARCH_WARMICFREQ)) if [ "${mod1}" -eq 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi if [ "${mod1}" -ne 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="NO" ; fi - if [ "${CDATE}" -eq "${SDATE}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi + if [ "${PDY}${cyc}" -eq "${SDATE}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi fi - if [ "${CDATE}" -gt "${SDATE}" ]; then # Don't run for first half cycle + if [ "${PDY}${cyc}" -gt "${SDATE}" ]; then # Don't run for first half cycle - ${TARCMD} -P -cvf "${ATARDIR}/${CDATE}/${RUN}_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_grp${n}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_grp${n}.txt") status=$? - if [ "${status}" -ne 0 ] && [ "${CDATE}" -ge "${firstday}" ]; then - echo "$(echo "${TARCMD}" | tr 'a-z' 'A-Z') ${CDATE} ${RUN}_grp${ENSGRP}.tar failed" + if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_grp${ENSGRP}.tar failed" exit "${status}" fi if [ "${SAVEWARMICA}" = "YES" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then - ${TARCMD} -P -cvf "${ATARDIR}/${CDATE}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restarta_grp${n}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restarta_grp${n}.txt") status=$? if [ "${status}" -ne 0 ]; then - echo "$(echo "${TARCMD}" | tr 'a-z' 'A-Z') ${CDATE} ${RUN}_restarta_grp${ENSGRP}.tar failed" + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restarta_grp${ENSGRP}.tar failed" exit "${status}" fi fi if [ "${SAVEWARMICB}" = "YES" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then - ${TARCMD} -P -cvf "${ATARDIR}/${CDATE}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restartb_grp${n}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restartb_grp${n}.txt") status=$? if [ "${status}" -ne 0 ]; then - echo "$(echo "${TARCMD}" | tr 'a-z' 'A-Z') ${CDATE} ${RUN}_restartb_grp${ENSGRP}.tar failed" + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restartb_grp${ENSGRP}.tar failed" exit "${status}" fi fi @@ -101,22 +102,22 @@ if [ "${ENSGRP}" -eq 0 ]; then if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then -#--set the archiving command and create local directories, if necessary + #--set the archiving command and create local directories, if necessary TARCMD="htar" HSICMD="hsi" if [[ ${LOCALARCH} = "YES" ]]; then TARCMD="tar" HSICMD="" - [ ! -d "${ATARDIR}"/"${CDATE}" ] && mkdir -p "${ATARDIR}"/"${CDATE}" + if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi fi set +e - ${TARCMD} -P -cvf "${ATARDIR}/${CDATE}/${RUN}.tar" $(cat "${ARCH_LIST}/${RUN}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" $(cat "${ARCH_LIST}/${RUN}.txt") status=$? - ${HSICMD} chgrp rstprod "${ATARDIR}/${CDATE}/${RUN}.tar" - ${HSICMD} chmod 640 "${ATARDIR}/${CDATE}/${RUN}.tar" - if [ "${status}" -ne 0 ] && [ "${CDATE}" -ge "${firstday}" ]; then - echo "$(echo "${TARCMD}" | tr 'a-z' 'A-Z') ${CDATE} ${RUN}.tar failed" + ${HSICMD} chgrp rstprod "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" + ${HSICMD} chmod 640 "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" + if (( status != 0 && ${PDY}${cyc} >= firstday )); then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}.tar failed" exit "${status}" fi set_strict @@ -124,16 +125,12 @@ if [ "${ENSGRP}" -eq 0 ]; then #-- Archive online for verification and diagnostics [[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" - cd "${ARCDIR}" - - nb_copy "${ROTDIR}/${RUN}.${PDY}/${cyc}/${RUN}.t${cyc}z.enkfstat" "enkfstat.${RUN}.${CDATE}" - nb_copy "${ROTDIR}/${RUN}.${PDY}/${cyc}/${RUN}.t${cyc}z.gsistat.ensmean" "gsistat.${RUN}.${CDATE}.ensmean" - - if [ "${CDUMP_ENKF}" != "GDAS" ]; then - nb_copy "${ROTDIR}/enkfgfs.${PDY}/${cyc}/${RUN}.t${cyc}z.enkfstat" "enkfstat.gfs.${CDATE}" - nb_copy "${ROTDIR}/enkfgfs.${PDY}/${cyc}/${RUN}.t${cyc}z.gsistat.ensmean" "gsistat.gfs.${CDATE}.ensmean" - fi + cd "${ARCDIR}" || exit 2 + nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.enkfstat" \ + "enkfstat.${RUN}.${PDY}${cyc}" + nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.gsistat.ensmean" \ + "gsistat.${RUN}.${PDY}${cyc}.ensmean" fi @@ -143,53 +140,154 @@ fi ############################################################### # ENSGRP 0 also does clean-up -if [ "${ENSGRP}" -eq 0 ]; then +############################################################### +if [[ "${ENSGRP}" -eq 0 ]]; then + function remove_files() { + # TODO: move this to a new location + local directory=$1 + shift + if [[ ! -d ${directory} ]]; then + echo "No directory ${directory} to remove files from, skiping" + return + fi + local exclude_list="" + if (($# > 0)); then + exclude_list=$* + fi + local file_list + declare -a file_list + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + readarray -t file_list < <(find -L "${directory}" -type f) + if (( ${#file_list[@]} == 0 )); then return; fi + for exclude in ${exclude_list}; do + echo "Excluding ${exclude}" + declare -a file_list_old=("${file_list[@]}") + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") + if (( ${#file_list[@]} == 0 )); then return; fi + done + + for file in "${file_list[@]}"; do + rm -f "${file}" + done + # Remove directory if empty + rmdir "${directory}" || true + } # Start start and end dates to remove - GDATEEND=$(${NDATE} -"${RMOLDEND_ENKF:-24}" "${CDATE}") - GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${CDATE}") + GDATEEND=$(${NDATE} -"${RMOLDEND_ENKF:-24}" "${PDY}${cyc}") + GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") + while [ "${GDATE}" -le "${GDATEEND}" ]; do - gPDY=$(echo "${GDATE}" | cut -c1-8) - gcyc=$(echo "${GDATE}" | cut -c9-10) - - # Loop over GDAS and GFS EnKF directories separately. - clist="gdas gfs" - for ctype in ${clist}; do - COMIN_ENS="${ROTDIR}/enkf${ctype}.${gPDY}/${gcyc}" - if [ -d "${COMIN_ENS}" ]; then - rocotolog="${EXPDIR}/logs/${GDATE}.log" - if [ -f "${rocotolog}" ]; then - set +e - testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") - rc=$? - set_strict - if [ "${rc}" -eq 0 ]; then - # Retain f006.ens files. Remove everything else - for file in $(ls "${COMIN_ENS}" | grep -v f006.ens); do - rm -rf "${COMIN_ENS}"/"${file}" + gPDY="${GDATE:0:8}" + gcyc="${GDATE:8:2}" + + if [[ -d ${COM_TOP} ]]; then + rocotolog="${EXPDIR}/logs/${GDATE}.log" + if [[ -f "${rocotolog}" ]]; then + set +e + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") + rc=$? + set_strict + if [ "${rc}" -eq 0 ]; then + case ${CDUMP} in + gdas) nmem="${NMEM_ENS}";; + gfs) nmem="${NMEM_ENS_GFS}";; + *) + echo "FATAL ERROR: Unknown CDUMP ${CDUMP} during cleanup" + exit 10 + ;; + esac + + readarray memlist< <(seq --format="mem%03g" 1 "${nmem}") + memlist+=("ensstat") + + for mem in "${memlist[@]}"; do + # Atmos + exclude_list="f006.ens" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Wave + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" done - fi + + # Ocean + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') + for template in ${templates}; do + YMEMDIR="${mem}" MD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ice + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Aerosols (GOCART) + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Mediator + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + done fi fi + fi - # Remove empty directories - if [ -d "${COMIN_ENS}" ] ; then - [[ ! "$(ls -A "${COMIN_ENS}")" ]] && rm -rf "${COMIN_ENS}" - fi - done + # Remove any empty directories + YMD=${gPDY} HH=${gcyc} generate_com target_dir:COM_TOP_TMPL + target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" + if [[ -d ${target_dir} ]]; then + find "${target_dir}" -empty -type d -delete + fi # Advance to next cycle GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") - done - fi # Remove enkf*.$rPDY for the older of GDATE or RDATE -GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${CDATE}") +GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") fhmax=${FHMAX_GFS} -RDATE=$(${NDATE} -"${fhmax}" "${CDATE}") +RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") if [ "${GDATE}" -lt "${RDATE}" ]; then RDATE=${GDATE} fi diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh index 08abd1ae20..de603cba3f 100755 --- a/scripts/exgdas_enkf_ecen.sh +++ b/scripts/exgdas_enkf_ecen.sh @@ -52,7 +52,7 @@ GPREFIX=${GPREFIX:-""} GPREFIX_ENS=${GPREFIX_ENS:-$GPREFIX} # Variables -NMEM_ENKF=${NMEM_ENKF:-80} +NMEM_ENS=${NMEM_ENS:-80} imp_physics=${imp_physics:-99} INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} DOIAU=${DOIAU_ENKF:-"NO"} @@ -108,34 +108,41 @@ ENKF_SUFFIX="s" # Link ensemble member guess, analysis and increment files for FHR in $(seq $FHMIN $FHOUT $FHMAX); do -for imem in $(seq 1 $NMEM_ENKF); do +for imem in $(seq 1 $NMEM_ENS); do memchar="mem"$(printf %03i $imem) - $NLN $COMIN_GES_ENS/$memchar/atmos/${GPREFIX_ENS}atmf00${FHR}${ENKF_SUFFIX}.nc ./atmges_$memchar + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ + COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL + + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX_ENS}atmf00${FHR}${ENKF_SUFFIX}.nc" "./atmges_${memchar}" if [ $DO_CALC_INCREMENT = "YES" ]; then if [ $FHR -eq 6 ]; then - $NLN $COMIN_ENS/$memchar/atmos/${APREFIX_ENS}atmanl.nc ./atmanl_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atmanl.nc" "./atmanl_${memchar}" else - $NLN $COMIN_ENS/$memchar/atmos/${APREFIX_ENS}atma00${FHR}.nc ./atmanl_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atma00${FHR}.nc" "./atmanl_${memchar}" fi fi - mkdir -p $COMOUT_ENS/$memchar + mkdir -p "${COM_ATMOS_ANALYSIS_MEM}" if [ $FHR -eq 6 ]; then - $NLN $COMOUT_ENS/$memchar/atmos/${APREFIX_ENS}atminc.nc ./atminc_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atminc.nc" "./atminc_${memchar}" else - $NLN $COMOUT_ENS/$memchar/atmos/${APREFIX_ENS}atmi00${FHR}.nc ./atminc_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atmi00${FHR}.nc" "./atminc_${memchar}" fi if [[ $RECENTER_ENKF = "YES" ]]; then if [ $DO_CALC_INCREMENT = "YES" ]; then if [ $FHR -eq 6 ]; then - $NLN $COMOUT_ENS/$memchar/atmos/${APREFIX_ENS}ratmanl.nc ./ratmanl_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratmanl.nc" "./ratmanl_${memchar}" else - $NLN $COMOUT_ENS/$memchar/atmos/${APREFIX_ENS}ratma00${FHR}.nc ./ratmanl_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratma00${FHR}.nc" "./ratmanl_${memchar}" fi else if [ $FHR -eq 6 ]; then - $NLN $COMOUT_ENS/$memchar/atmos/${APREFIX_ENS}ratminc.nc ./ratminc_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratminc.nc" "./ratminc_${memchar}" else - $NLN $COMOUT_ENS/$memchar/atmos/${APREFIX_ENS}ratmi00${FHR}.nc ./ratminc_$memchar + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratmi00${FHR}.nc" "./ratminc_${memchar}" fi fi fi @@ -144,9 +151,9 @@ done if [ $DO_CALC_INCREMENT = "YES" ]; then # Link ensemble mean analysis if [ $FHR -eq 6 ]; then - $NLN $COMIN_ENS/${APREFIX_ENS}atmanl.ensmean.nc ./atmanl_ensmean + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atmanl.ensmean.nc" "./atmanl_ensmean" else - $NLN $COMIN_ENS/${APREFIX_ENS}atma00${FHR}.ensmean.nc ./atmanl_ensmean + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atma00${FHR}.ensmean.nc" "./atmanl_ensmean" fi # Compute ensemble mean analysis @@ -159,14 +166,14 @@ if [ $DO_CALC_INCREMENT = "YES" ]; then . prep_step $NCP $GETATMENSMEANEXEC $DATA - $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMANLMEANNAME $ATMANLNAME $NMEM_ENKF + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMANLMEANNAME $ATMANLNAME $NMEM_ENS export err=$?; err_chk else # Link ensemble mean increment if [ $FHR -eq 6 ]; then - $NLN $COMIN_ENS/${APREFIX_ENS}atminc.ensmean.nc ./atminc_ensmean + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atminc.ensmean.nc" "./atminc_ensmean" else - $NLN $COMIN_ENS/${APREFIX_ENS}atmi00${FHR}.ensmean.nc ./atminc_ensmean + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atmi00${FHR}.ensmean.nc" "./atminc_ensmean" fi # Compute ensemble mean increment @@ -179,12 +186,12 @@ else . prep_step $NCP $GETATMENSMEANEXEC $DATA - $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMINCMEANNAME $ATMINCNAME $NMEM_ENKF + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMINCMEANNAME $ATMINCNAME $NMEM_ENS export err=$?; err_chk # If available, link to ensemble mean guess. Otherwise, compute ensemble mean guess - if [ -s $COMIN_GES_ENS/${GPREFIX_ENS}atmf00${FHR}.ensmean.nc ]; then - $NLN $COMIN_GES_ENS/${GPREFIX_ENS}atmf00${FHR}.ensmean.nc ./atmges_ensmean + if [[ -s "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX_ENS}atmf00${FHR}.ensmean.nc" ]]; then + ${NLN} "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX_ENS}atmf00${FHR}.ensmean.nc" "./atmges_ensmean" else DATAPATH="./" ATMGESNAME="atmges" @@ -195,7 +202,7 @@ else . prep_step $NCP $GETATMENSMEANEXEC $DATA - $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMGESMEANNAME $ATMGESNAME $NMEM_ENKF + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMGESMEANNAME $ATMGESNAME $NMEM_ENS export err=$?; err_chk fi fi @@ -219,11 +226,11 @@ if [ $RECENTER_ENKF = "YES" ]; then # GSI EnVar analysis if [ $FHR -eq 6 ]; then - ATMANL_GSI=$COMIN/${APREFIX}atmanl.nc - ATMANL_GSI_ENSRES=$COMIN/${APREFIX}atmanl.ensres.nc + ATMANL_GSI="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atmanl.nc" + ATMANL_GSI_ENSRES="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atmanl.ensres.nc" else - ATMANL_GSI=$COMIN/${APREFIX}atma00${FHR}.nc - ATMANL_GSI_ENSRES=$COMIN/${APREFIX}atma00${FHR}.ensres.nc + ATMANL_GSI="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atma00${FHR}.nc" + ATMANL_GSI_ENSRES="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atma00${FHR}.ensres.nc" fi # if we already have a ensemble resolution GSI analysis then just link to it @@ -272,7 +279,7 @@ EOF . prep_step $NCP $RECENATMEXEC $DATA - $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_MEANIN $FILENAME_MEANOUT $FILENAMEOUT $NMEM_ENKF + $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_MEANIN $FILENAME_MEANOUT $FILENAMEOUT $NMEM_ENS export err=$?; err_chk else ################################################################################ @@ -300,7 +307,7 @@ cat recenter.nml . prep_step $NCP $RECENATMEXEC $DATA - $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_INCMEANIN $FILENAME_GSIDET $FILENAMEOUT $NMEM_ENKF $FILENAME_GESMEANIN + $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_INCMEANIN $FILENAME_GSIDET $FILENAMEOUT $NMEM_ENS $FILENAME_GESMEANIN export err=$?; err_chk fi fi @@ -329,7 +336,7 @@ if [ $DO_CALC_INCREMENT = "YES" ]; then firstguess_filename = 'atmges' increment_filename = 'atminc' debug = .false. - nens = $NMEM_ENKF + nens = $NMEM_ENS imp_physics = $imp_physics / &zeroinc diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh index 3f02ae6417..bc126d5906 100755 --- a/scripts/exgdas_enkf_fcst.sh +++ b/scripts/exgdas_enkf_fcst.sh @@ -21,7 +21,6 @@ source "$HOMEgfs/ush/preamble.sh" # Directories. -pwd=$(pwd) export FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} export FIX_AM=${FIX_AM:-$FIX_DIR/am} @@ -46,10 +45,6 @@ export FCSTEXEC=${FCSTEXEC:-fv3gfs.x} export PARM_FV3DIAG=${PARM_FV3DIAG:-$HOMEgfs/parm/parm_fv3diag} export DIAG_TABLE=${DIAG_TABLE_ENKF:-${DIAG_TABLE:-$PARM_FV3DIAG/diag_table_da}} -# Cycling and forecast hour specific parameters -export CDATE=${CDATE:-"2001010100"} -export CDUMP=${CDUMP:-"enkfgdas"} - # Re-run failed members, or entire group RERUN_EFCSGRP=${RERUN_EFCSGRP:-"YES"} @@ -63,19 +58,12 @@ SENDDBN=${SENDDBN:-"NO"} ################################################################################ # Preprocessing -mkdata=NO -if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA -fi cd $DATA || exit 99 DATATOP=$DATA ################################################################################ # Set output data -cymd=$(echo $CDATE | cut -c1-8) -chh=$(echo $CDATE | cut -c9-10) -EFCSGRP=$COMOUT/efcs.grp${ENSGRP} +EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}" if [ -f $EFCSGRP ]; then if [ $RERUN_EFCSGRP = "YES" ]; then rm -f $EFCSGRP @@ -100,20 +88,20 @@ export TYPE=${TYPE_ENKF:-${TYPE:-nh}} # choices: nh, hydro export MONO=${MONO_ENKF:-${MONO:-non-mono}} # choices: mono, non-mono # fv_core_nml -export CASE=${CASE_ENKF:-${CASE:-C768}} +export CASE=${CASE_ENS:-${CASE:-C768}} export layout_x=${layout_x_ENKF:-${layout_x:-8}} export layout_y=${layout_y_ENKF:-${layout_y:-16}} export LEVS=${LEVS_ENKF:-${LEVS:-64}} # nggps_diag_nml export FHOUT=${FHOUT_ENKF:-3} -if [[ ${CDUMP} == "enkfgfs" ]]; then +if [[ ${RUN} == "enkfgfs" ]]; then export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT_ENKF:${FHOUT:-3}}} fi # model_configure export DELTIM=${DELTIM_ENKF:-${DELTIM:-225}} export FHMAX=${FHMAX_ENKF:-9} -if [[ $CDUMP == "enkfgfs" ]]; then +if [[ ${RUN} == "enkfgfs" ]]; then export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX_ENKF:-${FHMAX}}} fi @@ -137,6 +125,12 @@ if [ $RECENTER_ENKF = "YES" ]; then export PREFIX_ATMINC="r" fi +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +declare -x gPDY="${GDATE:0:8}" +declare -x gcyc="${GDATE:8:2}" + ################################################################################ # Run forecast for ensemble member rc=0 @@ -145,7 +139,7 @@ for imem in $(seq $ENSBEG $ENSEND); do cd $DATATOP cmem=$(printf %03i $imem) - memchar="mem$cmem" + memchar="mem${cmem}" echo "Processing MEMBER: $cmem" @@ -157,12 +151,40 @@ for imem in $(seq $ENSBEG $ENSEND); do [[ $memstat -eq 1 ]] && skip_mem="YES" fi + # Construct COM variables from templates (see config.com) + # Can't make these read-only because we are looping over members + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ + COM_ATMOS_HISTORY COM_ATMOS_MASTER + + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + + if [[ ${DO_WAVE} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + fi + + if [[ ${DO_OCN} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_MED_RESTART COM_OCEAN_RESTART \ + COM_OCEAN_INPUT COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL + fi + + if [[ ${DO_ICE} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL + fi + + if [[ ${DO_AERO} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_CHEM_HISTORY + fi + + if [ $skip_mem = "NO" ]; then ra=0 export MEMBER=$imem - export DATA=$DATATOP/$memchar + export DATA="${DATATOP}/${memchar}" if [ -d $DATA ]; then rm -rf $DATA; fi mkdir -p $DATA $FORECASTSH @@ -182,7 +204,7 @@ for imem in $(seq $ENSBEG $ENSEND); do while [ $fhr -le $FHMAX ]; do FH3=$(printf %03i $fhr) if [ $(expr $fhr % 3) -eq 0 ]; then - $DBNROOT/bin/dbn_alert MODEL GFS_ENKF $job $COMOUT/$memchar/atmos/${CDUMP}.t${cyc}z.sfcf${FH3}.nc + "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" fi fhr=$((fhr+FHOUT)) done @@ -222,8 +244,5 @@ export err=$rc; err_chk ################################################################################ # Postprocessing -cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATATOP - exit $err diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh index 1338b0ceef..86ab9071a4 100755 --- a/scripts/exgdas_enkf_post.sh +++ b/scripts/exgdas_enkf_post.sh @@ -47,21 +47,14 @@ FHMAX=${FHMAX_EPOS:-9} FHOUT=${FHOUT_EPOS:-3} if [[ $CDUMP == "gfs" ]]; then - NMEM_ENKF=${NMEM_EFCS:-${NMEM_ENKF:-30}} + NMEM_ENS=${NMEM_ENS_GFS:-${NMEM_ENS:-30}} fi -NMEM_ENKF=${NMEM_ENKF:-80} +NMEM_ENS=${NMEM_ENS:-80} SMOOTH_ENKF=${SMOOTH_ENKF:-"NO"} ENKF_SPREAD=${ENKF_SPREAD:-"NO"} ################################################################################ # Preprocessing -mkdata=NO -if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA -fi -cd $DATA || exit 99 - ENKF_SUFFIX="s" [[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" @@ -74,27 +67,33 @@ export OMP_NUM_THREADS=$NTHREADS_EPOS ################################################################################ # Forecast ensemble member files -for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) +for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i "${imem}") + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do fhrchar=$(printf %03i $fhr) - $NLN $COMIN/$memchar/atmos/${PREFIX}sfcf${fhrchar}.nc sfcf${fhrchar}_$memchar - $NLN $COMIN/$memchar/atmos/${PREFIX}atmf${fhrchar}.nc atmf${fhrchar}_$memchar + ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhrchar}.nc" "sfcf${fhrchar}_${memchar}" + ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhrchar}.nc" "atmf${fhrchar}_${memchar}" done done # Forecast ensemble mean and smoothed files +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY_STAT:COM_ATMOS_HISTORY_TMPL +if [[ ! -d "${COM_ATMOS_HISTORY_STAT}" ]]; then mkdir -p "${COM_ATMOS_HISTORY_STAT}"; fi + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do fhrchar=$(printf %03i $fhr) - $NLN $COMOUT/${PREFIX}sfcf${fhrchar}.ensmean.nc sfcf${fhrchar}.ensmean - $NLN $COMOUT/${PREFIX}atmf${fhrchar}.ensmean.nc atmf${fhrchar}.ensmean + ${NLN} "${COM_ATMOS_HISTORY_STAT}/${PREFIX}sfcf${fhrchar}.ensmean.nc" "sfcf${fhrchar}.ensmean" + ${NLN} "${COM_ATMOS_HISTORY_STAT}/${PREFIX}atmf${fhrchar}.ensmean.nc" "atmf${fhrchar}.ensmean" if [ $SMOOTH_ENKF = "YES" ]; then - for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - $NLN $COMOUT/$memchar/atmos/${PREFIX}atmf${fhrchar}${ENKF_SUFFIX}.nc atmf${fhrchar}${ENKF_SUFFIX}_$memchar + for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i "${imem}") + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_HISTORY + ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhrchar}${ENKF_SUFFIX}.nc" "atmf${fhrchar}${ENKF_SUFFIX}_${memchar}" done fi - [[ $ENKF_SPREAD = "YES" ]] && $NLN $COMOUT/${PREFIX}atmf${fhrchar}.ensspread.nc atmf${fhrchar}.ensspread + [[ $ENKF_SPREAD = "YES" ]] && ${NLN} "${COM_ATMOS_HISTORY_STAT}/${PREFIX}atmf${fhrchar}.ensspread.nc" "atmf${fhrchar}.ensspread" done ################################################################################ @@ -109,7 +108,7 @@ for fhr in $(seq $FHMIN $FHOUT $FHMAX); do export pgm=$GETSFCENSMEANEXEC . prep_step - $APRUN_EPOS ${DATA}/$(basename $GETSFCENSMEANEXEC) ./ sfcf${fhrchar}.ensmean sfcf${fhrchar} $NMEM_ENKF + $APRUN_EPOS ${DATA}/$(basename $GETSFCENSMEANEXEC) ./ sfcf${fhrchar}.ensmean sfcf${fhrchar} $NMEM_ENS ra=$? rc=$((rc+ra)) @@ -117,9 +116,9 @@ for fhr in $(seq $FHMIN $FHOUT $FHMAX); do . prep_step if [ $ENKF_SPREAD = "YES" ]; then - $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENKF atmf${fhrchar}.ensspread + $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENS atmf${fhrchar}.ensspread else - $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENKF + $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENS fi ra=$? rc=$((rc+ra)) @@ -133,9 +132,9 @@ if [ $SMOOTH_ENKF = "YES" ]; then fhrchar=$(printf %03i $fhr) if [ ! -s atmf${fhrchar}${ENKF_SUFFIX}_mem001 ]; then echo WARNING! no smoothed ensemble member for fhour = $fhrchar >&2 - for imem in $(seq 1 $NMEM_ENKF); do + for imem in $(seq 1 $NMEM_ENS); do memchar="mem"$(printf %03i $imem) - $NCP atmf${fhrchar}_$memchar atmf${fhrchar}${ENKF_SUFFIX}_$memchar + ${NCP} "atmf${fhrchar}_${memchar}" "atmf${fhrchar}${ENKF_SUFFIX}_${memchar}" done fi done @@ -149,7 +148,7 @@ if [ $SENDDBN = "YES" ]; then fhrchar=$(printf %03i $fhr) if [ $(expr $fhr % 3) -eq 0 ]; then if [ -s ./sfcf${fhrchar}.ensmean ]; then - $DBNROOT/bin/dbn_alert MODEL GFS_ENKF $job $COMOUT/${PREFIX}sfcf${fhrchar}.ensmean.nc + ${DBNROOT}/bin/dbn_alert "MODEL" "GFS_ENKF" "${job}" "${COM_ATMOS_HISTORY_STAT}/${PREFIX}sfcf${fhrchar}.ensmean.nc" fi fi done @@ -159,7 +158,5 @@ fi ################################################################################ # Postprocessing cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA - exit $err diff --git a/scripts/exgdas_enkf_select_obs.sh b/scripts/exgdas_enkf_select_obs.sh index 7a6eec5535..2ad624bcdb 100755 --- a/scripts/exgdas_enkf_select_obs.sh +++ b/scripts/exgdas_enkf_select_obs.sh @@ -28,13 +28,9 @@ export NLN=${NLN:-"/bin/ln -sf"} # Scripts. ANALYSISSH=${ANALYSISSH:-$HOMEgfs/scripts/exglobal_atmos_analysis.sh} -# Prefix and Suffix Variables. -export APREFIX=${APREFIX:-""} - # Select obs export RUN_SELECT=${RUN_SELECT:-"YES"} export USE_SELECT=${USE_SELECT:-"NO"} -export SELECT_OBS=${SELECT_OBS:-$COMOUT/${APREFIX}obsinput} # Observation Operator GSI namelist initialization SETUP_INVOBS=${SETUP_INVOBS:-""} @@ -61,8 +57,6 @@ if [ ! -d $DATA ]; then fi cd $DATA || exit 8 -[[ ! -d $COMOUT ]] && mkdir -p $COMOUT - ################################################################################ # ObsInput file from ensemble mean rm -f obs*input* diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 8ba8d6e012..5bbe7a460f 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -23,7 +23,6 @@ source "$HOMEgfs/ush/preamble.sh" pwd=$(pwd) # Base variables -CDATE=${CDATE:-"2010010100"} DONST=${DONST:-"NO"} DOSFCANL_ENKF=${DOSFCANL_ENKF:-"YES"} export CASE=${CASE:-384} @@ -47,7 +46,7 @@ GPREFIX=${GPREFIX:-""} GPREFIX_ENS=${GPREFIX_ENS:-${GPREFIX}} # Variables -NMEM_ENKF=${NMEM_ENKF:-80} +NMEM_ENS=${NMEM_ENS:-80} DOIAU=${DOIAU_ENKF:-"NO"} # Global_cycle stuff @@ -78,34 +77,30 @@ cd $DATA || exit 99 ################################################################################ # Update surface fields in the FV3 restart's using global_cycle. -PDY=$(echo $CDATE | cut -c1-8) -cyc=$(echo $CDATE | cut -c9-10) - -GDATE=$($NDATE -$assim_freq $CDATE) -gPDY=$(echo $GDATE | cut -c1-8) -gcyc=$(echo $GDATE | cut -c9-10) -GDUMP=${GDUMP:-"gdas"} - -BDATE=$($NDATE -3 $CDATE) -bPDY=$(echo $BDATE | cut -c1-8) -bcyc=$(echo $BDATE | cut -c9-10) +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +BDATE=$(${NDATE} -3 "${PDY}${cyc}") +bPDY=${BDATE:0:8} +bcyc=${BDATE:8:2} # Get dimension information based on CASE -res=$(echo $CASE | cut -c2-) +res=${CASE:2:} JCAP_CASE=$((res*2-2)) LATB_CASE=$((res*2)) LONB_CASE=$((res*4)) # Global cycle requires these files export FNTSFA=${FNTSFA:-' '} -export FNACNA=${FNACNA:-$COMIN/${OPREFIX}seaice.5min.blend.grb} -export FNSNOA=${FNSNOA:-$COMIN/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} -[[ ! -f $FNSNOA ]] && export FNSNOA="$COMIN/${OPREFIX}snogrb_t1534.3072.1536" -FNSNOG=${FNSNOG:-$COMIN_GES/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} -[[ ! -f $FNSNOG ]] && FNSNOG="$COMIN_GES/${GPREFIX}snogrb_t1534.3072.1536" +export FNACNA=${FNACNA:-${COM_OBS}/${OPREFIX}seaice.5min.blend.grb} +export FNSNOA=${FNSNOA:-${COM_OBS}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f $FNSNOA ]] && export FNSNOA="${COM_OBS}/${OPREFIX}snogrb_t1534.3072.1536" +FNSNOG=${FNSNOG:-${COM_OBS_PREV}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f $FNSNOG ]] && FNSNOG="${COM_OBS_PREV}/${GPREFIX}snogrb_t1534.3072.1536" # Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle if [ ${RUN_GETGES:-"NO"} = "YES" ]; then + # Ignore possible spelling error (nothing is misspelled) + # shellcheck disable=SC2153 snoprv=$($GETGESSH -q -t snogrb_$JCAP_CASE -e $gesenvir -n $GDUMP -v $GDATE) else snoprv=${snoprv:-$FNSNOG} @@ -121,14 +116,14 @@ else fi if [ $DONST = "YES" ]; then - export NST_FILE=${NST_FILE:-$COMIN/${APREFIX}dtfanl.nc} + export NST_FILE=${NST_FILE:-${COM_ATMOS_ANALYSIS_DET}/${APREFIX}dtfanl.nc} else export NST_FILE="NULL" fi export APRUNCY=${APRUN_CYCLE:-$APRUN_ESFC} export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE:-$NTHREADS_ESFC} -export MAX_TASKS_CY=$NMEM_ENKF +export MAX_TASKS_CY=$NMEM_ENS if [ $DOIAU = "YES" ]; then # Update surface restarts at beginning of window when IAU is ON @@ -138,22 +133,31 @@ if [ $DOIAU = "YES" ]; then export TILE_NUM=$n - for imem in $(seq 1 $NMEM_ENKF); do + for imem in $(seq 1 $NMEM_ENS); do cmem=$(printf %03i $imem) memchar="mem$cmem" - [[ $TILE_NUM -eq 1 ]] && mkdir -p $COMOUT_ENS/$memchar/atmos/RESTART + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + MEMDIR=${memchar} RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com \ + COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - $NCP $COMIN_GES_ENS/$memchar/atmos/RESTART/$bPDY.${bcyc}0000.sfc_data.tile${n}.nc $COMOUT_ENS/$memchar/atmos/RESTART/$bPDY.${bcyc}0000.sfcanl_data.tile${n}.nc - $NLN $COMIN_GES_ENS/$memchar/atmos/RESTART/$bPDY.${bcyc}0000.sfc_data.tile${n}.nc $DATA/fnbgsi.$cmem - $NLN $COMOUT_ENS/$memchar/atmos/RESTART/$bPDY.${bcyc}0000.sfcanl_data.tile${n}.nc $DATA/fnbgso.$cmem - $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/fngrid.$cmem - $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/fnorog.$cmem + ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${DATA}/fnbgsi.${cmem}" + ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ + "${DATA}/fnbgso.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done - $CYCLESH + CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk done @@ -161,29 +165,38 @@ if [ $DOIAU = "YES" ]; then fi if [ $DOSFCANL_ENKF = "YES" ]; then - for n in $(seq 1 $ntiles); do + for n in $(seq 1 $ntiles); do + + export TILE_NUM=$n - export TILE_NUM=$n + for imem in $(seq 1 $NMEM_ENS); do - for imem in $(seq 1 $NMEM_ENKF); do + cmem=$(printf %03i $imem) + memchar="mem$cmem" - cmem=$(printf %03i $imem) - memchar="mem$cmem" + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL - [[ $TILE_NUM -eq 1 ]] && mkdir -p $COMOUT_ENS/$memchar/atmos/RESTART + RUN="${GDUMP_ENS}" MEMDIR=${memchar} YMD=${gPDY} HH=${gcyc} generate_com \ + COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL - $NCP $COMIN_GES_ENS/$memchar/atmos/RESTART/$PDY.${cyc}0000.sfc_data.tile${n}.nc $COMOUT_ENS/$memchar/atmos/RESTART/$PDY.${cyc}0000.sfcanl_data.tile${n}.nc - $NLN $COMIN_GES_ENS/$memchar/atmos/RESTART/$PDY.${cyc}0000.sfc_data.tile${n}.nc $DATA/fnbgsi.$cmem - $NLN $COMOUT_ENS/$memchar/atmos/RESTART/$PDY.${cyc}0000.sfcanl_data.tile${n}.nc $DATA/fnbgso.$cmem - $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/fngrid.$cmem - $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/fnorog.$cmem + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - done + ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${DATA}/fnbgsi.${cmem}" + ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ + "${DATA}/fnbgso.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" - $CYCLESH - export err=$?; err_chk + done - done + CDATE="${PDY}${cyc}" ${CYCLESH} + export err=$?; err_chk + + done fi ################################################################################ diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index 906ef5f070..2bb749e226 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -57,7 +57,7 @@ ENKFSTAT=${ENKFSTAT:-${APREFIX}enkfstat} # Namelist parameters USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"NO"} -NMEM_ENKF=${NMEM_ENKF:-80} +NMEM_ENS=${NMEM_ENS:-80} NAM_ENKF=${NAM_ENKF:-""} SATOBS_ENKF=${SATOBS_ENKF:-""} OZOBS_ENKF=${OZOBS_ENKF:-""} @@ -90,7 +90,8 @@ fi INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} ################################################################################ -ATMGES_ENSMEAN=$COMIN_GES_ENS/${GPREFIX}atmf006.ensmean.nc + +ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX}atmf006.ensmean.nc" LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_xt)} # get LONB_ENKF LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_yt)} # get LATB_ENFK LEVS_ENKF=${LEVS_ENKF:-$($NCLEN $ATMGES_ENSMEAN pfull)} # get LEVS_ENFK @@ -137,7 +138,7 @@ $NLN $ANAVINFO anavinfo $NLN $VLOCALEIG vlocal_eig.dat # Bias correction coefficients based on the ensemble mean -$NLN $COMOUT_ANL_ENS/$GBIASe satbias_in +${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${GBIASe}" "satbias_in" ################################################################################ @@ -147,12 +148,13 @@ if [ $USE_CFP = "YES" ]; then cat > $DATA/untar.sh << EOFuntar #!/bin/sh memchar=\$1 +COM_ATMOS_ANALYSIS=\$2 flist="$CNVSTAT $OZNSTAT $RADSTAT" for ftype in \$flist; do if [ \$memchar = "ensmean" ]; then - fname=$COMOUT_ANL_ENS/\${ftype}.ensmean + fname=\${COM_ATMOS_ANALYSIS}/\${ftype}.ensmean else - fname=$COMOUT_ANL_ENS/\$memchar/atmos/\$ftype + fname=\${COM_ATMOS_ANALYSIS}/\${ftype} fi tar -xvf \$fname done @@ -165,49 +167,62 @@ fi flist="$CNVSTAT $OZNSTAT $RADSTAT" if [ $USE_CFP = "YES" ]; then - echo "$nm $DATA/untar.sh ensmean" | tee -a $DATA/mp_untar.sh + echo "${nm} ${DATA}/untar.sh ensmean ${COM_ATMOS_ANALYSIS_STAT}" | tee -a "${DATA}/mp_untar.sh" if [ ${CFP_MP:-"NO"} = "YES" ]; then nm=$((nm+1)) fi else for ftype in $flist; do - fname=$COMOUT_ANL_ENS/${ftype}.ensmean + fname="${COM_ATMOS_ANALYSIS_STAT}/${ftype}.ensmean" tar -xvf $fname done fi nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') -for imem in $(seq 1 $NMEM_ENKF); do +for imem in $(seq 1 $NMEM_ENS); do memchar="mem"$(printf %03i $imem) + + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ + COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + if [ $lobsdiag_forenkf = ".false." ]; then if [ $USE_CFP = "YES" ]; then - echo "$nm $DATA/untar.sh $memchar" | tee -a $DATA/mp_untar.sh + echo "${nm} ${DATA}/untar.sh ${memchar} ${COM_ATMOS_ANALYSIS_MEM}" | tee -a "${DATA}/mp_untar.sh" if [ ${CFP_MP:-"NO"} = "YES" ]; then nm=$((nm+1)) fi else for ftype in $flist; do - fname=$COMOUT_ANL_ENS/$memchar/atmos/$ftype + fname="${COM_ATMOS_ANALYSIS_MEM}/${ftype}" tar -xvf $fname done fi fi - mkdir -p $COMOUT_ANL_ENS/$memchar/atmos + mkdir -p "${COM_ATMOS_ANALYSIS_MEM}" for FHR in $nfhrs; do - $NLN $COMIN_GES_ENS/$memchar/atmos/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}.nc sfg_${CDATE}_fhr0${FHR}_${memchar} + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}.nc" \ + "sfg_${PDY}${cyc}_fhr0${FHR}_${memchar}" if [ $cnvw_option = ".true." ]; then - $NLN $COMIN_GES_ENS/$memchar/atmos/${GPREFIX}sfcf00${FHR}.nc sfgsfc_${CDATE}_fhr0${FHR}_${memchar} + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}sfcf00${FHR}.nc" \ + "sfgsfc_${PDY}${cyc}_fhr0${FHR}_${memchar}" fi if [ $FHR -eq 6 ]; then if [ $DO_CALC_INCREMENT = "YES" ]; then - $NLN $COMOUT_ANL_ENS/$memchar/atmos/${APREFIX}atmanl.nc sanl_${CDATE}_fhr0${FHR}_${memchar} + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atmanl.nc" \ + "sanl_${PDY}${cyc}_fhr0${FHR}_${memchar}" else - $NLN $COMOUT_ANL_ENS/$memchar/atmos/${APREFIX}atminc.nc incr_${CDATE}_fhr0${FHR}_${memchar} + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atminc.nc" \ + "incr_${PDY}${cyc}_fhr0${FHR}_${memchar}" fi else if [ $DO_CALC_INCREMENT = "YES" ]; then - $NLN $COMOUT_ANL_ENS/$memchar/atmos/${APREFIX}atma00${FHR}.nc sanl_${CDATE}_fhr0${FHR}_${memchar} + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atma00${FHR}.nc" \ + "sanl_${PDY}${cyc}_fhr0${FHR}_${memchar}" else - $NLN $COMOUT_ANL_ENS/$memchar/atmos/${APREFIX}atmi00${FHR}.nc incr_${CDATE}_fhr0${FHR}_${memchar} + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atmi00${FHR}.nc" \ + "incr_${PDY}${cyc}_fhr0${FHR}_${memchar}" fi fi done @@ -215,9 +230,12 @@ done # Ensemble mean guess for FHR in $nfhrs; do - $NLN $COMIN_GES_ENS/${GPREFIX}atmf00${FHR}.ensmean.nc sfg_${CDATE}_fhr0${FHR}_ensmean + + ${NLN} "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX}atmf00${FHR}.ensmean.nc" \ + "sfg_${PDY}${cyc}_fhr0${FHR}_ensmean" if [ $cnvw_option = ".true." ]; then - $NLN $COMIN_GES_ENS/${GPREFIX}sfcf00${FHR}.ensmean.nc sfgsfc_${CDATE}_fhr0${FHR}_ensmean + ${NLN} "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX}sfcf00${FHR}.ensmean.nc" \ + "sfgsfc_${PDY}${cyc}_fhr0${FHR}_ensmean" fi done @@ -236,7 +254,7 @@ fi # Create global_enkf namelist cat > enkf.nml << EOFnml &nam_enkf - datestring="$CDATE",datapath="$DATA/", + datestring="${PDY}${cyc}",datapath="$DATA/", analpertwtnh=${analpertwt},analpertwtsh=${analpertwt},analpertwttr=${analpertwt}, covinflatemax=1.e2,covinflatemin=1,pseudo_rh=.true.,iassim_order=0, corrlengthnh=${corrlength},corrlengthsh=${corrlength},corrlengthtr=${corrlength}, @@ -246,7 +264,7 @@ cat > enkf.nml << EOFnml obtimelnh=1.e30,obtimelsh=1.e30,obtimeltr=1.e30, saterrfact=1.0,numiter=0, sprd_tol=1.e30,paoverpb_thresh=0.98, - nlons=$LONA_ENKF,nlats=$LATA_ENKF,nlevs=$LEVS_ENKF,nanals=$NMEM_ENKF, + nlons=$LONA_ENKF,nlats=$LATA_ENKF,nlevs=$LEVS_ENKF,nanals=$NMEM_ENS, deterministic=.true.,sortinc=.true.,lupd_satbiasc=.false., reducedgrid=${reducedgrid},readin_localization=${readin_localization_enkf}., use_gfs_nemsio=${use_gfs_nemsio},use_gfs_ncio=${use_gfs_ncio},imp_physics=$imp_physics,lupp=$lupp, @@ -377,7 +395,7 @@ $APRUN_ENKF ${DATA}/$(basename $ENKFEXEC) 1>stdout 2>stderr export err=$?; err_chk # Cat runtime output files. -cat stdout stderr > $COMOUT_ANL_ENS/$ENKFSTAT +cat stdout stderr > "${COM_ATMOS_ANALYSIS_STAT}/${ENKFSTAT}" ################################################################################ # Postprocessing diff --git a/scripts/exgdas_global_atmos_analysis_post.py b/scripts/exgdas_global_atmos_analysis_post.py deleted file mode 100755 index 2f17ee4aea..0000000000 --- a/scripts/exgdas_global_atmos_analysis_post.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_post.py -# Script description: Post atmospheric analysis script. -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 -# -# Abstract: This script runs after the atmospheric analysis and -# archives each diagnostic file into the R2D2 local user database. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') -config['DIAG_DIR'] = os.path.join(os.environ['COMOUT'], 'diags') -config['BIAS_OUT_DIR'] = os.path.join(os.environ['COMOUT'], 'bc') -config['provider'] = 'ncdiag' - -# use R2D2 to archive diags and bias correction coefficient files -ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_analysis_prep.py b/scripts/exgdas_global_atmos_analysis_prep.py deleted file mode 100755 index 65c77cd903..0000000000 --- a/scripts/exgdas_global_atmos_analysis_prep.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_prep.py -# Script description: Stages files and generates YAML for Global Atmosphere Analysis -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-21 -# -# Abstract: This script stages necessary input files and produces YAML -# configuration input file for FV3-JEDI executable(s) needed -# to produce a UFS Global Atmospheric Analysis. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda -from ufsda.stage import atm_obs, bias_obs - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') - -# use R2D2 to stage obs and bias correction coefficient files -ufsda.stage.atm_obs(config) -ufsda.stage.bias_obs(config) diff --git a/scripts/exgdas_global_atmos_analysis_run.sh b/scripts/exgdas_global_atmos_analysis_run.sh deleted file mode 100755 index 45ccdf525b..0000000000 --- a/scripts/exgdas_global_atmos_analysis_run.sh +++ /dev/null @@ -1,182 +0,0 @@ -#!/bin/bash -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_run.sh -# Script description: Runs the global atmospheric analysis with FV3-JEDI -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 -# -# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI -# and also (for now) updates increment files using a python ush utility -# -# $Id$ -# -# Attributes: -# Language: POSIX shell -# Machine: Orion -# -################################################################################ - -# Set environment. -source "$HOMEgfs/ush/preamble.sh" - -# Directories -pwd=$(pwd) - -# Utilities -export NLN=${NLN:-"/bin/ln -sf"} -export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} -export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} -export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} - -################################################################################ -# make subdirectories -mkdir -p $DATA/fv3jedi -mkdir -p $DATA/obs -mkdir -p $DATA/diags -mkdir -p $DATA/bc -mkdir -p $DATA/anl - -################################################################################ -# generate YAML file -cat > $DATA/temp.yaml << EOF -template: ${ATMVARYAML} -output: $DATA/fv3jedi_var.yaml -config: - atm: true - BERROR_YAML: $BERROR_YAML - OBS_DIR: obs - DIAG_DIR: diags - CRTM_COEFF_DIR: crtm - BIAS_IN_DIR: obs - BIAS_OUT_DIR: bc - OBS_PREFIX: $OPREFIX - BIAS_PREFIX: $GPREFIX - OBS_LIST: $OBS_LIST - OBS_YAML_DIR: $OBS_YAML_DIR - BKG_DIR: bkg - fv3jedi_staticb_dir: berror - fv3jedi_fix_dir: fv3jedi - fv3jedi_fieldset_dir: fv3jedi - fv3jedi_fieldmetadata_dir: fv3jedi - OBS_DATE: '$CDATE' - BIAS_DATE: '$GDATE' - ANL_DIR: anl/ - NMEM_ENKF: '$NMEM_ENKF' - INTERP_METHOD: '$INTERP_METHOD' -EOF -$GENYAML --config $DATA/temp.yaml - -################################################################################ -# link observations to $DATA -$GETOBSYAML --config $DATA/fv3jedi_var.yaml --output $DATA/${OPREFIX}obsspace_list -files=$(cat $DATA/${OPREFIX}obsspace_list) -for file in $files; do - basefile=$(basename $file) - $NLN $COMOUT/$basefile $DATA/obs/$basefile -done - -# link backgrounds to $DATA -# linking FMS RESTART files for now -# change to (or make optional) for cube sphere history later -$NLN ${COMIN_GES}/RESTART $DATA/bkg - - -# optionally link ensemble backgrounds to $DATA -if [ $DOHYBVAR = "YES" ]; then - mkdir -p $DATA/ens - fhrs="06" - if [ $l4densvar = ".true." ]; then - fhrs="03 04 05 06 07 08 09" - fi - - for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - for fhr in $fhrs; do - $NLN ${COMIN_GES_ENS}/$memchar/atmos/RESTART $DATA/ens/$memchar - done - done - -fi - -################################################################################ -# link fix files to $DATA -# static B -CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-${CASE}}} -if [[ (${STATICB_TYPE} = "bump") || (${STATICB_TYPE} = "gsibec") ]] ; then - ${NLN} "${FV3JEDI_FIX}/${STATICB_TYPE}/${CASE_BERROR}/" "${DATA}/berror" -fi - -# vertical coordinate -LAYERS=$(expr $LEVS - 1) -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 - -# other FV3-JEDI fix files -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table - -# fieldmetadata -$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml - -# fieldsets -fieldsets="dynamics.yaml ufo.yaml" -for fieldset in $fieldsets; do - $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset -done - -# CRTM coeffs -${NLN} "${FV3JEDI_FIX}/crtm/2.3.0" "${DATA}/crtm" - -# Link executable to $DATA -$NLN $JEDIVAREXE $DATA/fv3jedi_var.x - -################################################################################ -# run executable -export OMP_NUM_THREADS=$NTHREADS_ATMANAL -export pgm=$JEDIVAREXE -. prep_step -$APRUN_ATMANAL $DATA/fv3jedi_var.x $DATA/fv3jedi_var.yaml 1>&1 2>&2 -export err=$?; err_chk - -################################################################################ -# translate FV3-JEDI increment to FV3 readable format -if [[ "${CASE_BERROR}" = "${CASE}" ]]; then - atmges_fv3=${COMIN_GES}/${GPREFIX}atmf006.nc -else - atmges_fv3=${COMIN_GES}/${GPREFIX}atmf006.ensres.nc -fi -atminc_jedi=${DATA}/anl/atminc.${PDY}_${cyc}0000z.nc4 -atminc_fv3=${COMOUT}/${CDUMP}.${cycle}.atminc.nc -if [ -s $atminc_jedi ]; then - $INCPY $atmges_fv3 $atminc_jedi $atminc_fv3 - export err=$? -else - echo "***WARNING*** missing $atminc_jedi ABORT" - export err=99 -fi -err_chk - -################################################################################ -# Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc and tiled sfcanl done at $(date)" > $COMOUT/${CDUMP}.${cycle}.loginc.txt - -################################################################################ -# Copy diags and YAML to $COMOUT -cp -r $DATA/fv3jedi_var.yaml $COMOUT/${CDUMP}.${cycle}.fv3jedi_var.yaml -cp -rf $DATA/diags $COMOUT/ -cp -rf $DATA/bc $COMOUT/ - -# ***WARNING*** PATCH -# Copy abias, abias_pc, and abias_air from previous cycle to current cycle -# Deterministic abias used in enkf cycle -alist="abias abias_air abias_int abias_pc" -for abias in $alist; do - cp "${COMIN_GES}/${GPREFIX}${abias}" "${COMOUT}/${APREFIX}${abias}" -done - -################################################################################ - -exit ${err} - -################################################################################ diff --git a/scripts/exgdas_global_atmos_ensanal_post.py b/scripts/exgdas_global_atmos_ensanal_post.py deleted file mode 100755 index 6c5384953f..0000000000 --- a/scripts/exgdas_global_atmos_ensanal_post.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_post.py -# Script description: Post atmospheric analysis script. -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 -# -# Abstract: This script runs after the atmospheric analysis and -# archives each diagnostic file into the R2D2 local user database. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') -config['DIAG_DIR'] = os.path.join(os.environ['COMOUT_ENS'], 'diags') -config['provider'] = 'ncdiag_lgetkf' - -# use R2D2 to archive hofx files -ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_ensanal_run.sh b/scripts/exgdas_global_atmos_ensanal_run.sh deleted file mode 100755 index 2e87573eda..0000000000 --- a/scripts/exgdas_global_atmos_ensanal_run.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/bin/bash -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_run.sh -# Script description: Runs the global atmospheric analysis with FV3-JEDI -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 -# -# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI -# and also (for now) updates increment files using a python ush utility -# -# $Id$ -# -# Attributes: -# Language: POSIX shell -# Machine: Orion -# -################################################################################ - -# Set environment. -source "$HOMEgfs/ush/preamble.sh" - -# Directories -pwd=$(pwd) - -# Utilities -export NLN=${NLN:-"/bin/ln -sf"} -export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} -export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} -export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} - -################################################################################ -# make subdirectories -mkdir -p $DATA/fv3jedi -mkdir -p $DATA/obs -mkdir -p $DATA/diags -mkdir -p $DATA/bc -mkdir -p $DATA/anl - -################################################################################ -# generate YAML file -cat > $DATA/temp.yaml << EOF -template: ${ATMENSYAML} -output: $DATA/fv3jedi_ens.yaml -config: - atm: true - BERROR_YAML: $BERROR_YAML - OBS_DIR: obs - DIAG_DIR: diags - CRTM_COEFF_DIR: crtm - BIAS_IN_DIR: obs - BIAS_OUT_DIR: bc - OBS_PREFIX: $OPREFIX - BIAS_PREFIX: $GPREFIX - OBS_LIST: $OBS_LIST - OBS_YAML_DIR: $OBS_YAML_DIR - BKG_DIR: bkg - fv3jedi_staticb_dir: berror - fv3jedi_fix_dir: fv3jedi - fv3jedi_fieldset_dir: fv3jedi - fv3jedi_fieldmetadata_dir: fv3jedi - OBS_DATE: '$CDATE' - BIAS_DATE: '$GDATE' - ANL_DIR: anl/ - NMEM_ENKF: '$NMEM_ENKF' - INTERP_METHOD: '$INTERP_METHOD' -EOF -$GENYAML --config $DATA/temp.yaml - -################################################################################ -# link observations to $DATA -$GETOBSYAML --config $DATA/fv3jedi_ens.yaml --output $DATA/${OPREFIX}obsspace_list -files=$(cat $DATA/${OPREFIX}obsspace_list) -for file in $files; do - basefile=$(basename $file) - $NLN $COMIN/$basefile $DATA/obs/$basefile -done - -# link backgrounds to $DATA -# linking FMS RESTART files for now -# change to (or make optional) for cube sphere history later -##$NLN ${COMIN_GES}/RESTART $DATA/bkg - - -# Link ensemble backgrounds to $DATA. Make directories -# for ensemble output -if [ $DOHYBVAR = "YES" -o $DO_JEDIENS = "YES" ]; then - mkdir -p $DATA/bkg - for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - mkdir -p $DATA/bkg/$memchar - $NLN ${COMIN_GES_ENS}/$memchar/RESTART $DATA/bkg/$memchar - mkdir -p $DATA/anl/$memchar - done -fi - -################################################################################ -# link fix files to $DATA -# static B -##CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-$CASE}} -##$NLN $FV3JEDI_FIX/bump/$CASE_BERROR/ $DATA/berror - -# vertical coordinate -LAYERS=$(expr $LEVS - 1) -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 - -# other FV3-JEDI fix files -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table - -# fieldmetadata -$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml - -# fieldsets -fieldsets="dynamics.yaml ufo.yaml" -for fieldset in $fieldsets; do - $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset -done - -# CRTM coeffs -${NLN} "${FV3JEDI_FIX}/crtm/2.3.0" "${DATA}/crtm" - -# Link executable to $DATA -$NLN $JEDIENSEXE $DATA/fv3jedi_ens.x - -################################################################################ -# run executable -export OMP_NUM_THREADS=$NTHREADS_ATMENSANAL -export pgm=$JEDIENSEXE -. prep_step -$APRUN_ATMENSANAL $DATA/fv3jedi_ens.x $DATA/fv3jedi_ens.yaml 1>&1 2>&2 -export err=$?; err_chk - -################################################################################ -# translate FV3-JEDI increment to FV3 readable format -for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - atmges_fv3=$COMIN_GES_ENS/$memchar/${GPREFIX}atmf006.nc - atminc_jedi=$DATA/anl/$memchar/atminc.${PDY}_${cyc}0000z.nc4 - atminc_fv3=$COMOUT_ENS/$memchar/${CDUMP}.${cycle}.atminc.nc - mkdir -p $COMOUT_ENS/$memchar - if [ -s $atminc_jedi ]; then - $INCPY $atmges_fv3 $atminc_jedi $atminc_fv3 - export err=$? - else - echo "***WARNING*** missing $atminc_jedi ABORT" - export err=99 - fi - err_chk -done - -################################################################################ -# Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc done at $(date)" > $COMOUT_ENS/${CDUMP}.${cycle}.loginc.txt - -################################################################################ -# Copy diags and YAML to $COMOUT -cp -r ${DATA}/fv3jedi_ens.yaml ${COMOUT_ENS}/${CDUMP}.${cycle}.fv3jedi_ens.yaml -cp -rf "${DATA}/diags" "${COMOUT_ENS}/" - - -################################################################################ - -exit ${err} - -################################################################################ diff --git a/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/scripts/exgfs_atmos_awips_20km_1p0deg.sh index 91053a8762..0f9868a506 100755 --- a/scripts/exgfs_atmos_awips_20km_1p0deg.sh +++ b/scripts/exgfs_atmos_awips_20km_1p0deg.sh @@ -19,47 +19,39 @@ # echo " " ############################################################################### -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" fcsthrs="$1" num=$# -job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') +job_name=${job/[jpt]gfs/gfs} -if test "$num" -ge 1 -then +if (( num != 1 )); then echo "" - echo " Appropriate number of arguments were passed" + echo " FATAL ERROR: Incorrect number of arguments " echo "" -else echo "" - echo " Number of arguments were not passed " - echo "" - echo "" - echo "Usage: $0 \$fcsthrs (3 digits) " + echo "Usage: $0 \${fcsthrs} (3 digits) " echo "" exit 16 fi -cd $DATA +cd "${DATA}" || exit 2 ############################################### # Wait for the availability of the pgrb file ############################################### icnt=1 -while [ $icnt -lt 1000 ] -do - if [ -s $COMIN/${RUN}.${cycle}.pgrb2b.0p25.f$fcsthrs.idx ] - then - break - fi - - sleep 10 - icnt=$((icnt + 1)) - if [ $icnt -ge 180 ] - then - msg="ABORTING after 30 min of waiting for the GFS pgrb2 file!" - err_exit $msg - fi +while (( icnt < 1000 )); do + if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then + break + fi + + sleep 10 + icnt=$((icnt + 1)) + if (( icnt >= 180 )); then + msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" + err_exit "${msg}" + fi done ######################################## @@ -94,157 +86,184 @@ export SCALEDEC=${SCALDEC:-$USHgfs/scale_dec.sh} # Process GFS GRIB AWIP PRODUCTS IN GRIB2 # ############################################################### -cp $COMIN/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs} tmpfile2${fcsthrs} -cp $COMIN/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs} tmpfile2b${fcsthrs} -cat tmpfile2${fcsthrs} tmpfile2b${fcsthrs} > tmpfile${fcsthrs} -$WGRIB2 tmpfile${fcsthrs} | grep -F -f $PARMproduct/gfs_awips_parmlist_g2 | $WGRIB2 -i -grib masterfile tmpfile${fcsthrs} +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs}" "tmpfile2${fcsthrs}" +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b${fcsthrs}" +cat "tmpfile2${fcsthrs}" "tmpfile2b${fcsthrs}" > "tmpfile${fcsthrs}" +${WGRIB2} "tmpfile${fcsthrs}" | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \ + ${WGRIB2} -i -grib masterfile "tmpfile${fcsthrs}" export err=$? -if [[ $err -ne 0 ]] ; then +if [[ $err -ne 0 ]]; then echo " FATAL ERROR: masterfile does not exist." exit $err fi -$WGRIB2 masterfile -match ":PWAT:entire atmosphere" -grib gfs_pwat.grb -$WGRIB2 masterfile | grep -v ":PWAT:entire atmosphere" | $WGRIB2 -i -grib temp_gfs masterfile +${WGRIB2} masterfile -match ":PWAT:entire atmosphere" -grib gfs_pwat.grb +${WGRIB2} masterfile | grep -v ":PWAT:entire atmosphere" | ${WGRIB2} -i -grib temp_gfs masterfile ################################################################## # Process to change PWAT from level 200 to 10 (Entire Atmosphere) # in production defintion template (PDT) 4.0 ################################################################## -$WGRIB2 gfs_pwat.grb -set_byte 4 23 10 -grib gfs_pwat_levels_10.grb +${WGRIB2} gfs_pwat.grb -set_byte 4 23 10 -grib gfs_pwat_levels_10.grb export err=$?; err_chk cat temp_gfs gfs_pwat_levels_10.grb > tmp_masterfile -for GRID in conus ak prico pac 003 -do - case $GRID in - conus) - # Grid 20km_conus - CONUS - 20 km Quadruple Resolution (Lambert Conformal) - # export grid_20km_conus="30 6 0 0 0 0 0 0 369 257 12190000 226541000 8 25000000 265000000 20318000 20318000 0 64 25000000 25000000 0 0" - # $COPYGB2 -g "$grid_20km_conus" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridconus="lambert:265.0:25.0:25.0 226.541:369:20318.0 12.19:257:20318.0" - $WGRIB2 tmp_masterfile $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridconus awps_file_f${fcsthrs}_${GRID} - ;; - ak) - # Grid 20km_ak - Alaska - Double Resolution (Polar Stereographic) - # Redefined grid 217 for Alaska region - # export grid_20km_ak="20 6 0 0 0 0 0 0 277 213 30000000 187000000 8 60000000 225000000 22500000 22500000 0 64" - # $COPYGB2 -g "$grid_20km_ak" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridak="nps:210.0:60.0 170.0:277:22500 35.0:225:22500" - $WGRIB2 tmp_masterfile $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridak awps_file_f${fcsthrs}_${GRID} - ;; - prico) - # Grid 20km_prico - 0.25 degree Lat/Lon grid for Puerto Rico (20km) - # export grid_20km_prico="0 6 0 0 0 0 0 0 275 205 0 0 50750000 271750000 48 -250000 340250000 250000 250000 0" - # $COPYGB2 -g "$grid_20km_prico" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridprico="latlon 271.75:275:0.25 50.75:205:-0.25" - $WGRIB2 tmp_masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridprico awps_file_f${fcsthrs}_${GRID} - ;; - pac) - # Grid 20km_pac - 20 km Mercator grid for Pacific Region - # export grid_20km_pac="10 6 0 0 0 0 0 0 837 692 -45000000 110000000 48 20000000 65720000 270000000 64 0 20000000 20000000" - # NEW export grid_20km_pac="10 6 0 0 0 0 0 0 837 725 -45000000 110000000 48 20000000 65734500 270000000 64 0 20000000 20000000" - # $COPYGB2 -g "$grid_20km_pac" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridpac="mercator:20.0 110.0:837:20000:270.0 -45.0:725:20000:65.7345" - $WGRIB2 tmp_masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridpac awps_file_f${fcsthrs}_${GRID} - ;; - 003) - ###################################################################### - # Process GFS GRIB AWIP 1.0 DEGREE (GRID 003) PRODUCTS IN GRIB2 # - ###################################################################### - export grid003="latlon 0:360:1.0 90:181:-1.0" - $WGRIB2 tmp_masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $grid003 awps_file_f${fcsthrs}_${GRID} - ;; +for GRID in conus ak prico pac 003; do + # shellcheck disable=SC2086 + case ${GRID} in + conus) + # Grid 20km_conus - CONUS - 20 km Quadruple Resolution (Lambert Conformal) + # export grid_20km_conus="30 6 0 0 0 0 0 0 369 257 12190000 226541000 8 25000000 265000000 20318000 20318000 0 64 25000000 25000000 0 0" + # $COPYGB2 -g "$grid_20km_conus" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridconus="lambert:265.0:25.0:25.0 226.541:369:20318.0 12.19:257:20318.0" + ${WGRIB2} tmp_masterfile ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridconus} "awps_file_f${fcsthrs}_${GRID}" + ;; + ak) + # Grid 20km_ak - Alaska - Double Resolution (Polar Stereographic) + # Redefined grid 217 for Alaska region + # export grid_20km_ak="20 6 0 0 0 0 0 0 277 213 30000000 187000000 8 60000000 225000000 22500000 22500000 0 64" + # $COPYGB2 -g "$grid_20km_ak" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridak="nps:210.0:60.0 170.0:277:22500 35.0:225:22500" + ${WGRIB2} tmp_masterfile ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridak} "awps_file_f${fcsthrs}_${GRID}" + ;; + prico) + # Grid 20km_prico - 0.25 degree Lat/Lon grid for Puerto Rico (20km) + # export grid_20km_prico="0 6 0 0 0 0 0 0 275 205 0 0 50750000 271750000 48 -250000 340250000 250000 250000 0" + # $COPYGB2 -g "$grid_20km_prico" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridprico="latlon 271.75:275:0.25 50.75:205:-0.25" + ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridprico} "awps_file_f${fcsthrs}_${GRID}" + ;; + pac) + # Grid 20km_pac - 20 km Mercator grid for Pacific Region + # export grid_20km_pac="10 6 0 0 0 0 0 0 837 692 -45000000 110000000 48 20000000 65720000 270000000 64 0 20000000 20000000" + # NEW export grid_20km_pac="10 6 0 0 0 0 0 0 837 725 -45000000 110000000 48 20000000 65734500 270000000 64 0 20000000 20000000" + # $COPYGB2 -g "$grid_20km_pac" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridpac="mercator:20.0 110.0:837:20000:270.0 -45.0:725:20000:65.7345" + ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridpac} "awps_file_f${fcsthrs}_${GRID}" + ;; + 003) + ###################################################################### + # Process GFS GRIB AWIP 1.0 DEGREE (GRID 003) PRODUCTS IN GRIB2 # + ###################################################################### + export grid003="latlon 0:360:1.0 90:181:-1.0" + ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${grid003} "awps_file_f${fcsthrs}_${GRID}" + ;; + *) + echo "FATAL ERROR: Unknown output grid ${GRID}" + exit 2 + ;; esac - $TRIMRH awps_file_f${fcsthrs}_${GRID} - $SCALEDEC awps_file_f${fcsthrs}_${GRID} - $GRB2INDEX awps_file_f${fcsthrs}_${GRID} awps_file_fi${fcsthrs}_${GRID} - -########################################################################### -# Checking fields in awps_file_f${fcsthrs}_${GRID} file -# before TOCGRIB2 adding WMO headers for AWIPS products. -# -# NOTE: numparm is the total of fields in grib2_awpgfs_20km_conusf000 file -########################################################################### -numparm=247 -numrec=$( $WGRIB2 awps_file_f${fcsthrs}_${GRID} | wc -l ) - -if [ $numrec -lt $numparm ] -then - msg="ABORTING : awps_file_f${fcsthrs}_${GRID} file is missing fields for AWIPS !" - err_exit $msg -fi + # shellcheck disable= + ${TRIMRH} "awps_file_f${fcsthrs}_${GRID}" + ${SCALEDEC} "awps_file_f${fcsthrs}_${GRID}" + ${GRB2INDEX} "awps_file_f${fcsthrs}_${GRID}" "awps_file_fi${fcsthrs}_${GRID}" + + ########################################################################### + # Checking fields in awps_file_f${fcsthrs}_${GRID} file + # before TOCGRIB2 adding WMO headers for AWIPS products. + # + # NOTE: numparm is the total of fields in grib2_awpgfs_20km_conusf000 file + ########################################################################### + numparm=247 + numrec=$( ${WGRIB2} "awps_file_f${fcsthrs}_${GRID}" | wc -l ) + + if (( numrec < numparm )); then + msg="FATAL ERROR: awps_file_f${fcsthrs}_${GRID} file is missing fields for AWIPS !" + err_exit "${msg}" || exit 10 + fi -# Processing AWIPS GRIB2 grids with WMO headers + # Processing AWIPS GRIB2 grids with WMO headers pgm=tocgrib2 export pgm; prep_step startmsg - if [ $GRID = "003" -a $(expr ${fcsthrs} % 6) -eq 0 ] ; then - export FORT11=awps_file_f${fcsthrs}_${GRID} - export FORT31=awps_file_fi${fcsthrs}_${GRID} - export FORT51=grib2.awpgfs${fcsthrs}.${GRID} + if [[ ${GRID} = "003" && $(( fcsthrs % 6 )) == 0 ]]; then + export FORT11="awps_file_f${fcsthrs}_${GRID}" + export FORT31="awps_file_fi${fcsthrs}_${GRID}" + export FORT51="grib2.awpgfs${fcsthrs}.${GRID}" + + cp "${PARMwmo}/grib2_awpgfs${fcsthrs}.${GRID}" "parm_list" + if [[ ${DO_WAVE} != "YES" ]]; then + # Remove wave field it not running wave model + grep -vw "5WAVH" "parm_list" > "parm_list_temp" + mv "parm_list_temp" "parm_list" + fi - $TOCGRIB2 < $PARMwmo/grib2_awpgfs${fcsthrs}.${GRID} >> $pgmout 2> errfile + ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile export err=$?; err_chk echo " error from tocgrib2=",$err - if [ $SENDCOM = "YES" ] ; then + if [[ ${SENDCOM} == "YES" ]]; then ############################## - # Post Files to ${COMOUTwmo} + # Post Files to ${COM_ATMOS_WMO} ############################## - mv grib2.awpgfs${fcsthrs}.${GRID} ${COMOUTwmo}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} + mv "grib2.awpgfs${fcsthrs}.${GRID}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" ############################## # Distribute Data ############################## - if [ "$SENDDBN" = 'YES' -o "$SENDAWIP" = 'YES' ] ; then - $DBNROOT/bin/dbn_alert NTC_LOW $NET $job ${COMOUTwmo}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} + if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" else - echo "File ${COMOUTwmo}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} not posted to db_net." + echo "File ${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} not posted to db_net." fi fi - elif [ $GRID != "003" ] ; then - export FORT11=awps_file_f${fcsthrs}_${GRID} - export FORT31=awps_file_fi${fcsthrs}_${GRID} - export FORT51=grib2.awpgfs_20km_${GRID}_f${fcsthrs} + elif [[ ${GRID} != "003" ]]; then + export FORT11="awps_file_f${fcsthrs}_${GRID}" + export FORT31="awps_file_fi${fcsthrs}_${GRID}" + export FORT51="grib2.awpgfs_20km_${GRID}_f${fcsthrs}" + + cp "${PARMwmo}/grib2_awpgfs_20km_${GRID}f${fcsthrs}" "parm_list" + if [[ ${DO_WAVE} != "YES" ]]; then + # Remove wave field it not running wave model + grep -vw "5WAVH" "parm_list" > "parm_list_temp" + mv "parm_list_temp" "parm_list" + fi - $TOCGRIB2 < $PARMwmo/grib2_awpgfs_20km_${GRID}f${fcsthrs} >> $pgmout 2> errfile - export err=$? ;err_chk - echo " error from tocgrib2=",$err + ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile + export err=$?; err_chk || exit "${err}" - if [ $SENDCOM = "YES" ] ; then + if [[ ${SENDCOM} = "YES" ]]; then - ############################## - # Post Files to ${COMOUTwmo} - ############################## + ############################## + # Post Files to ${COM_ATMOS_WMO} + ############################## - mv grib2.awpgfs_20km_${GRID}_f${fcsthrs} ${COMOUTwmo}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.$job_name + mv "grib2.awpgfs_20km_${GRID}_f${fcsthrs}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" - ############################## - # Distribute Data - ############################## + ############################## + # Distribute Data + ############################## - if [ "$SENDDBN" = 'YES' -o "$SENDAWIP" = 'YES' ] ; then - $DBNROOT/bin/dbn_alert NTC_LOW $NET $job ${COMOUTwmo}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.$job_name - else - echo "File ${COMOUTwmo}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.$job_name not posted to db_net." + if [[ "${SENDDBN}" = 'YES' || "${SENDAWIP}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" + else + echo "File ${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name} not posted to db_net." + fi fi - fi fi echo "Awip Processing ${fcsthrs} hour completed normally" done -if [ -e "$pgmout" ] ; then - cat $pgmout +if [[ -e "${pgmout}" ]]; then + cat "${pgmout}" fi diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh index ccffbeb7aa..4009a8e66a 100755 --- a/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -47,66 +47,62 @@ SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) ############################################################################## export SHOUR=000 export FHOUR=024 -export fhr=$(printf "%03d" $SHOUR) ############################################################ # Loop Through the Post Forecast Files ############################################################ -while test 10#$fhr -le $FHOUR -do - - ############################### - # Start Looping for the - # existence of the restart files - ############################### - export pgm="postcheck" - ic=1 - while [ $ic -le $SLEEP_LOOP_MAX ] - do - if test -f $COMIN/gfs.t${cyc}z.pgrb2b.0p50.f${fhr}.idx - then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - ############################### - # If we reach this point assume - # fcst job never reached restart - # period and error exit - ############################### - if [ $ic -eq $SLEEP_LOOP_MAX ] - then - export err=9 - err_chk - fi - done - -###################################################################### -# Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024 # -###################################################################### - paramlist=${PARMproduct}/global_npoess_paramlist_g2 - cp $COMIN/gfs.t${cyc}z.pgrb2.0p50.f${fhr} tmpfile2 - cp $COMIN/gfs.t${cyc}z.pgrb2b.0p50.f${fhr} tmpfile2b - cat tmpfile2 tmpfile2b > tmpfile - $WGRIB2 tmpfile | grep -F -f $paramlist | $WGRIB2 -i -grib pgb2file tmpfile - export err=$?; err_chk - - if test $SENDCOM = "YES" - then - cp pgb2file $COMOUT/${RUN}.${cycle}.pgrb2f${fhr}.npoess - - if test $SENDDBN = "YES" - then - $DBNROOT/bin/dbn_alert MODEL GFS_PGBNPOESS $job $COMOUT/${RUN}.${cycle}.pgrb2f${fhr}.npoess - else - msg="File ${RUN}.${cycle}.pgrb2f${fhr}.npoess not posted to db_net." - postmsg "$msg" - fi - echo "$PDY$cyc$fhr" > $COMOUT/${RUN}.t${cyc}z.control.halfdeg.npoess - fi - rm tmpfile pgb2file - export fhr=$(printf "%03d" $(expr $fhr + $FHINC)) +for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do + + fhr3=$(printf "%03d" "${fhr}") + + ############################### + # Start Looping for the + # existence of the restart files + ############################### + export pgm="postcheck" + ic=1 + while (( ic <= SLEEP_LOOP_MAX )); do + if [[ -f "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}.idx" ]]; then + break + else + ic=$((ic + 1)) + sleep "${SLEEP_INT}" + fi + ############################### + # If we reach this point assume + # fcst job never reached restart + # period and error exit + ############################### + if (( ic == SLEEP_LOOP_MAX )); then + echo "FATAL ERROR: 0p50 grib file not available after max sleep time" + export err=9 + err_chk || exit "${err}" + fi + done + + ###################################################################### + # Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024 # + ###################################################################### + paramlist=${PARMproduct}/global_npoess_paramlist_g2 + cp "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2.0p50.f${fhr3}" tmpfile2 + cp "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}" tmpfile2b + cat tmpfile2 tmpfile2b > tmpfile + ${WGRIB2} tmpfile | grep -F -f ${paramlist} | ${WGRIB2} -i -grib pgb2file tmpfile + export err=$?; err_chk + + if [[ ${SENDCOM} == "YES" ]]; then + cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" + + if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGBNPOESS "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" + else + msg="File ${RUN}.${cycle}.pgrb2f${fhr3}.npoess not posted to db_net." + postmsg "${msg}" || echo "${msg}" + fi + echo "${PDY}${cyc}${fhr3}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.halfdeg.npoess" + fi + rm tmpfile pgb2file done @@ -115,89 +111,80 @@ done ################################################################ export SHOUR=000 export FHOUR=180 -export fhr=$(printf "%03d" $SHOUR) ################################# # Process GFS PGRB2_SPECIAL_POST ################################# -while test 10#$fhr -le $FHOUR -do - ############################### - # Start Looping for the - # existence of the restart files - ############################### - set +x - export pgm="postcheck" - ic=1 - while [ $ic -le $SLEEP_LOOP_MAX ] - do - if test -f $restart_file$fhr - then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - ############################### - # If we reach this point assume - # fcst job never reached restart - # period and error exit - ############################### - if [ $ic -eq $SLEEP_LOOP_MAX ] - then - export err=9 - err_chk - fi - done - set_trace - - ############################### - # Put restart files into /nwges - # for backup to start Model Fcst - ############################### - - cp $COMIN/${RUN}.t${cyc}z.special.grb2f$fhr masterfile - -# $COPYGB2 -g "0 6 0 0 0 0 0 0 360 181 0 0 90000000 0 48 -90000000 359000000 1000000 1000000 0" -i1,1 -x masterfile pgb2file - -# export grid1p0="latlon 0:360:1.0 90:181:-1.0" - export grid0p25="latlon 0:1440:0.25 90:721:-0.25" - $WGRIB2 masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $grid0p25 pgb2file - -# creating higher resolution goes files for US centers -# $COPYGB2 -g "30 6 0 0 0 0 0 0 349 277 1000000 214500000 8 50000000 253000000 32463000 32463000 0 64 50000000 50000000 0 0" -i1,1 -x masterfile pgb2file2 - - export gridconus="lambert:253.0:50.0:50.0 214.5:349:32463.0 1.0:277:32463.0" - $WGRIB2 masterfile $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridconus pgb2file2 - - $WGRIB2 pgb2file -s > pgb2ifile - - if test $SENDCOM = "YES" - then - - cp pgb2file $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr} - cp pgb2ifile $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx - - cp pgb2file2 $COMOUT/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221 - - if test $SENDDBN = "YES" - then - $DBNROOT/bin/dbn_alert MODEL GFS_GOESSIMPGB2_0P25 $job $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr} - $DBNROOT/bin/dbn_alert MODEL GFS_GOESSIMPGB2_0P25_WIDX $job $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx - $DBNROOT/bin/dbn_alert MODEL GFS_GOESSIMGRD221_PGB2 $job $COMOUT/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221 - fi - - echo "$PDY$cyc$fhr" > $COMOUT/${RUN}.t${cyc}z.control.goessimpgrb - fi - rm pgb2file2 pgb2ifile - - if test "$SENDECF" = 'YES' - then - export fhour=$(expr ${fhr} % 6 ) - fi - - export fhr=$(printf "%03d" $(expr $fhr + $FHINC)) +for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do + + fhr3=$(printf "%03d" "${fhr}") + + ############################### + # Start Looping for the + # existence of the restart files + ############################### + set +x + export pgm="postcheck" + ic=1 + while (( ic <= SLEEP_LOOP_MAX )); do + if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.special.grb2if${fhr3}.idx" ]]; then + break + else + ic=$((ic + 1)) + sleep "${SLEEP_INT}" + fi + ############################### + # If we reach this point assume + # fcst job never reached restart + # period and error exit + ############################### + if (( ic == SLEEP_LOOP_MAX )); then + echo "FATAL ERROR: Special goes grib file not available after max sleep time" + export err=9 + err_chk || exit "${err}" + fi + done + set_trace + ############################### + # Put restart files into /nwges + # for backup to start Model Fcst + ############################### + cp "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.special.grb2if${fhr3}" masterfile + export grid0p25="latlon 0:1440:0.25 90:721:-0.25" + ${WGRIB2} masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${grid0p25} pgb2file + + export gridconus="lambert:253.0:50.0:50.0 214.5:349:32463.0 1.0:277:32463.0" + ${WGRIB2} masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridconus} pgb2file2 + + ${WGRIB2} pgb2file -s > pgb2ifile + + if [[ ${SENDCOM} == "YES" ]]; then + + cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}" + cp pgb2ifile "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}.idx" + cp pgb2file2 "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr3}.grd221" + + if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25 "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25_WIDX "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMGRD221_PGB2 "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221" + fi + + echo "${PDY}${cyc}${fhr}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb" + fi + rm pgb2file2 pgb2ifile + + if [[ ${SENDECF} == "YES" ]]; then + # TODO Does this even do anything? + export fhour=$(( fhr % 6 )) + fi + done diff --git a/scripts/exgfs_atmos_grib_awips.sh b/scripts/exgfs_atmos_grib_awips.sh index 857846c448..f10508626f 100755 --- a/scripts/exgfs_atmos_grib_awips.sh +++ b/scripts/exgfs_atmos_grib_awips.sh @@ -21,51 +21,45 @@ # echo " FEB 2019 - Removed grid 225" ##################################################################### -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" fcsthrs="$1" num=$# -job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') +job_name=${job/[jpt]gfs/gfs} -fcsthrs=$(printf "%03d" $fcsthrs) - -export SCALEDEC=${SCALDEC:-$USHgfs/scale_dec.sh} - -if test "$num" -ge 1 -then - echo "" - echo " Appropriate number of arguments were passed" - echo "" -else +if (( num != 1 )); then echo "" - echo " FATAL ERROR: Number of arguments were not passed." + echo " FATAL ERROR: Incorrect number of arguments " echo "" echo "" - echo "Usage: $0 \$fcsthrs (3-digit) " + echo "Usage: $0 \${fcsthrs} (3 digits) " echo "" exit 16 fi -cd $DATA/awips_g1 +cd "${DATA}" || exit 2 + +fcsthrs=$(printf "%03d" "${fcsthrs}") + +export SCALEDEC=${SCALDEC:-${USHgfs}/scale_dec.sh} + +cd ${DATA}/awips_g1 || exit 2 ############################################### # Wait for the availability of the pgrb file ############################################### icnt=1 -while [ $icnt -lt 1000 ] -do - if [ -s $COMIN/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx ] - then - break - fi - - sleep 10 - icnt=$((icnt + 1)) - if [ $icnt -ge 180 ] - then - msg="ABORTING after 30 min of waiting for the pgrb file!" - err_exit $msg - fi +while (( icnt < 1000 )); do + if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then + break + fi + + sleep 10 + icnt=$((icnt + 1)) + if (( icnt >= 180 )); then + msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" + err_exit "${msg}" + fi done echo " ------------------------------------------" @@ -80,64 +74,63 @@ echo "###############################################" echo " " set_trace - cp $COMIN/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs} tmpfile2 - cp $COMIN/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs} tmpfile2b - cat tmpfile2 tmpfile2b > tmpfile - $WGRIB2 tmpfile | grep -F -f $PARMproduct/gfs_awips_parmlist_g2 | $WGRIB2 -i -grib masterfile tmpfile - $SCALEDEC masterfile - $CNVGRIB -g21 masterfile masterfile.grib1 +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs}" "tmpfile2" +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b" +cat tmpfile2 tmpfile2b > tmpfile +${WGRIB2} tmpfile | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \ + ${WGRIB2} -i -grib masterfile tmpfile +${SCALEDEC} masterfile +${CNVGRIB} -g21 masterfile masterfile.grib1 - ln -s masterfile.grib1 fort.11 +ln -s masterfile.grib1 fort.11 -# $OVERGRIDID << EOF - ${UTILgfs}/exec/overgridid << EOF +"${HOMEgfs}/exec/overgridid.x" << EOF 255 EOF - mv fort.51 master.grbf${fcsthrs} - rm fort.11 +mv fort.51 "master.grbf${fcsthrs}" +rm fort.11 - $GRBINDEX master.grbf${fcsthrs} master.grbif${fcsthrs} +${GRBINDEX} "master.grbf${fcsthrs}" "master.grbif${fcsthrs}" ############################################################### # Process GFS GRIB1 AWIP GRIDS 211 PRODUCTS ############################################################### - executable=mkgfsawps - DBNALERT_TYPE=GRIB_LOW +DBNALERT_TYPE=GRIB_LOW - startmsg +startmsg # GRID=211 out to 240 hours: - export GRID=211 - export FORT11=master.grbf${fcsthrs} - export FORT31=master.grbif${fcsthrs} - export FORT51=xtrn.awpgfs${fcsthrs}.${GRID} +export GRID=211 +export FORT11="master.grbf${fcsthrs}" +export FORT31="master.grbif${fcsthrs}" +export FORT51="xtrn.awpgfs${fcsthrs}.${GRID}" # $MKGFSAWPS < $PARMwmo/grib_awpgfs${fcsthrs}.${GRID} parm=KWBC >> $pgmout 2>errfile - ${UTILgfs}/exec/mkgfsawps < $PARMwmo/grib_awpgfs${fcsthrs}.${GRID} parm=KWBC >> $pgmout 2>errfile - export err=$?; err_chk - ############################## - # Post Files to ${COMOUTwmo} - ############################## +"${HOMEgfs}/exec/mkgfsawps.x" < "${PARMwmo}/grib_awpgfs${fcsthrs}.${GRID}" parm=KWBC >> "${pgmout}" 2>errfile +export err=$?; err_chk +############################## +# Post Files to ${COM_ATMOS_WMO} +############################## - if test "$SENDCOM" = 'YES' - then - cp xtrn.awpgfs${fcsthrs}.${GRID} ${COMOUTwmo}/xtrn.awpgfs${fcsthrs}.${GRID}.$job_name +if [[ "${SENDCOM}" = 'YES' ]]; then + cp "xtrn.awpgfs${fcsthrs}.${GRID}" "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" - ############################## - # Distribute Data - ############################## + ############################## + # Distribute Data + ############################## - if [ "$SENDDBN" = 'YES' -o "$SENDAWIP" = 'YES' ] ; then - $DBNROOT/bin/dbn_alert $DBNALERT_TYPE $NET $job ${COMOUTwmo}/xtrn.awpgfs${fcsthrs}.${GRID}.$job_name - else - echo "File $output_grb.$job_name not posted to db_net." - fi + if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]] ; then + "${DBNROOT}/bin/dbn_alert" "${DBNALERT_TYPE}" "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" + else + echo "File ${output_grb}.${job_name} not posted to db_net." fi +fi -if [ -e "$pgmout" ] ; then - cat $pgmout +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} fi ############################################################################### diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh index 764dde4fe1..07b0ca8b3f 100755 --- a/scripts/exgfs_atmos_nawips.sh +++ b/scripts/exgfs_atmos_nawips.sh @@ -21,13 +21,14 @@ cd $DATA RUN2=$1 fend=$2 DBN_ALERT_TYPE=$3 +destination=${4} DATA_RUN=$DATA/$RUN2 mkdir -p $DATA_RUN cd $DATA_RUN # -NAGRIB=$GEMEXE/nagrib2_nc +NAGRIB=$GEMEXE/nagrib2 # cpyfil=gds @@ -44,137 +45,131 @@ maxtries=360 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do -if [[ $(mkdir lock.${fhcnt}) == 0 ]] ; then - cd lock.$fhcnt - cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl - cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl - cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl - cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl - - fhr=$(printf "%03d" $fhcnt) - fhcnt3=$(expr $fhr % 3) - - fhr3=$(printf "%03d" $fhcnt) - - GEMGRD=${RUN2}_${PDY}${cyc}f${fhr3} - -# Set type of Interpolation for WGRIB2 - export opt1=' -set_grib_type same -new_grid_winds earth ' - export opt1uv=' -set_grib_type same -new_grid_winds grid ' - export opt21=' -new_grid_interpolation bilinear -if ' - export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):" - export opt23=' -new_grid_interpolation neighbor -fi ' - export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if ' - export opt25=":(APCP|ACPCP|PRATE|CPRAT):" - export opt26=' -set_grib_max_bits 25 -fi -if ' - export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" - export opt28=' -new_grid_interpolation budget -fi ' - export TRIMRH=$HOMEgfs/ush/trim_rh.sh - - if [ $RUN2 = "gfs_0p50" ]; then - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.0p50.f${fhr} - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.0p50.f${fhr}.idx - elif [ $RUN2 = "gfs_0p25" -o $RUN2 = "gdas_0p25" -o $RUN2 = "gfs35_atl" -o $RUN2 = "gfs35_pac" -o $RUN2 = "gfs40" ]; then - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr} - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr}.idx - else - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr} - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx - fi - - icnt=1 - while [ $icnt -lt 1000 ] - do - if [ -r $GRIBIN_chk ] ; then - sleep 5 - break - else - echo "The process is waiting ... ${GRIBIN_chk} file to proceed." - sleep 10 - let "icnt=icnt+1" - fi - if [ $icnt -ge $maxtries ] - then - echo "ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." - export err=7 ; err_chk - exit $err - fi - done - -case $RUN2 in - gfs35_pac) -# $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 0 75125000 130000000 48 17000000 260000000 312000 312000 0" -x $GRIBIN grib$fhr -# NEW define gfs35_pac="0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -# $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -x $GRIBIN grib$fhr - - export gfs35_pac='latlon 130.0:416:0.312 75.125:186:-0.312' - $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_pac} grib$fhr - $TRIMRH grib$fhr - ;; - gfs35_atl) -# $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 0 75125000 230000000 48 -500000 20000000 312000 312000 0" -x $GRIBIN grib$fhr -# NEW define gfs35_atl="0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -# $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -x $GRIBIN grib$fhr - - export gfs35_atl='latlon 230.0:480:0.312 75.125:242:-0.312' - $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_atl} grib$fhr - $TRIMRH grib$fhr - ;; - gfs40) -# $COPYGB2 -g "30 6 0 0 0 0 0 0 185 129 12190000 226541000 8 25000000 265000000 40635000 40635000 0 64 25000000 25000000 0 0" -x $GRIBIN grib$fhr - - export gfs40='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0' - $WGRIB2 $GRIBIN $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs40} grib$fhr - $TRIMRH grib$fhr - ;; - *) - cp $GRIBIN grib$fhr -esac - - export pgm="nagrib2 F$fhr" - startmsg - - $NAGRIB << EOF - GBFILE = grib$fhr - INDXFL = - GDOUTF = $GEMGRD - PROJ = $proj - GRDAREA = $grdarea - KXKY = $kxky - MAXGRD = $maxgrd - CPYFIL = $cpyfil - GAREA = $garea - OUTPUT = $output - GBTBLS = $gbtbls - GBDIAG = - PDSEXT = $pdsext - l - r + if mkdir "lock.${fhcnt}" ; then + cd lock.$fhcnt + cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl + cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl + cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl + cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl + + fhr=$(printf "%03d" "${fhcnt}") + + GEMGRD=${RUN2}_${PDY}${cyc}f${fhr} + + # Set type of Interpolation for WGRIB2 + export opt1=' -set_grib_type same -new_grid_winds earth ' + export opt1uv=' -set_grib_type same -new_grid_winds grid ' + export opt21=' -new_grid_interpolation bilinear -if ' + export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):" + export opt23=' -new_grid_interpolation neighbor -fi ' + export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if ' + export opt25=":(APCP|ACPCP|PRATE|CPRAT):" + export opt26=' -set_grib_max_bits 25 -fi -if ' + export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" + export opt28=' -new_grid_interpolation budget -fi ' + export TRIMRH=$HOMEgfs/ush/trim_rh.sh + + case ${RUN2} in + # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km? + 'gfs_0p50' | 'gfs_0p25') res=${RUN2: -4};; + *) res="1p00";; + esac + + source_var="COM_ATMOS_GRIB_${res}" + export GRIBIN="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}" + GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}.idx" + + icnt=1 + while [ $icnt -lt 1000 ]; do + if [ -r $GRIBIN_chk ] ; then + sleep 5 + break + else + echo "The process is waiting ... ${GRIBIN_chk} file to proceed." + sleep 10 + let "icnt=icnt+1" + fi + if [ $icnt -ge $maxtries ]; then + echo "ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." + export err=7 ; err_chk + exit $err + fi + done + + case $RUN2 in + gfs35_pac) + # $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 0 75125000 130000000 48 17000000 260000000 312000 312000 0" -x $GRIBIN grib$fhr + # NEW define gfs35_pac="0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" + # $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -x $GRIBIN grib$fhr + + export gfs35_pac='latlon 130.0:416:0.312 75.125:186:-0.312' + $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_pac} grib$fhr + $TRIMRH grib$fhr + ;; + gfs35_atl) + # $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 0 75125000 230000000 48 -500000 20000000 312000 312000 0" -x $GRIBIN grib$fhr + # NEW define gfs35_atl="0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" + # $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -x $GRIBIN grib$fhr + + export gfs35_atl='latlon 230.0:480:0.312 75.125:242:-0.312' + $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_atl} grib$fhr + $TRIMRH grib$fhr + ;; + gfs40) + # $COPYGB2 -g "30 6 0 0 0 0 0 0 185 129 12190000 226541000 8 25000000 265000000 40635000 40635000 0 64 25000000 25000000 0 0" -x $GRIBIN grib$fhr + + export gfs40='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0' + $WGRIB2 $GRIBIN $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs40} grib$fhr + $TRIMRH grib$fhr + ;; + *) + cp $GRIBIN grib$fhr + esac + + export pgm="nagrib2 F$fhr" + startmsg + + $NAGRIB << EOF + GBFILE = grib$fhr + INDXFL = + GDOUTF = $GEMGRD + PROJ = $proj + GRDAREA = $grdarea + KXKY = $kxky + MAXGRD = $maxgrd + CPYFIL = $cpyfil + GAREA = $garea + OUTPUT = $output + GBTBLS = $gbtbls + GBDIAG = + PDSEXT = $pdsext + l + r EOF - export err=$?;err_chk - - if [ $SENDCOM = "YES" ] ; then - cpfs $GEMGRD $COMOUT/$GEMGRD - if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD - fi - fi - cd $DATA_RUN -else + export err=$?;err_chk + + if [[ ${SENDCOM} == "YES" ]] ; then + cpfs "${GEMGRD}" "${destination}/${GEMGRD}" + if [[ ${SENDDBN} == "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" + fi + fi + cd $DATA_RUN + else if [ $fhcnt -ge 240 ] ; then - if [ $fhcnt -lt 276 -a $RUN2 = "gfs_0p50" ] ; then - let fhcnt=fhcnt+6 - else - let fhcnt=fhcnt+12 - fi + if [ $fhcnt -lt 276 -a $RUN2 = "gfs_0p50" ] ; then + let fhcnt=fhcnt+6 + else + let fhcnt=fhcnt+12 + fi elif [ $fhcnt -lt 120 -a $RUN2 = "gfs_0p25" ] ; then -#### let fhcnt=fhcnt+1 - let fhcnt=fhcnt+$ILPOST + #### let fhcnt=fhcnt+1 + let fhcnt=fhcnt+$ILPOST else - let fhcnt=fhcnt+finc + fhcnt=$((ILPOST > finc ? fhcnt+ILPOST : fhcnt+finc )) fi -fi + fi done $GEMEXE/gpend diff --git a/scripts/exgfs_atmos_post.sh b/scripts/exgfs_atmos_post.sh index aa266c1721..40bde0f731 100755 --- a/scripts/exgfs_atmos_post.sh +++ b/scripts/exgfs_atmos_post.sh @@ -84,7 +84,7 @@ export IDRT=${IDRT:-0} # IDRT=0 is setting for outputting grib files on lat/lon # Process analysis when post_times is 00 stime="$(echo "${post_times}" | cut -c1-3)" export stime -export loganl="${COMIN}/${PREFIX}atmanl.nc" +export loganl="${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" if [[ "${stime}" = "anl" ]]; then if [[ -f "${loganl}" ]]; then @@ -106,9 +106,9 @@ if [[ "${stime}" = "anl" ]]; then fi [[ -f flxfile ]] && rm flxfile ; [[ -f nemsfile ]] && rm nemsfile - ln -fs "${COMIN}/${PREFIX}atmanl.nc" nemsfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" nemsfile export NEMSINP=nemsfile - ln -fs "${COMIN}/${PREFIX}sfcanl.nc" flxfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" flxfile export FLXINP=flxfile export PGBOUT=pgbfile @@ -138,28 +138,28 @@ if [[ "${stime}" = "anl" ]]; then if [[ "${GRIBVERSION}" = 'grib2' ]]; then MASTERANL=${PREFIX}master.grb2${fhr3} MASTERANLIDX=${PREFIX}master.grb2i${fhr3} - cp "${PGBOUT2}" "${COMOUT}/${MASTERANL}" - ${GRB2INDEX} "${PGBOUT2}" "${COMOUT}/${MASTERANLIDX}" + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANL}" + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANLIDX}" fi if [[ "${SENDDBN}" = 'YES' ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_MSC_sfcanl "${job}" "${COMOUT}/${PREFIX}sfcanl.nc" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_SA "${job}" "${COMOUT}/${PREFIX}atmanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_MSC_sfcanl "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SA "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" if [[ "${PGBF}" = 'YES' ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25 "${job}" "${COMOUT}/${PREFIX}pgrb2.0p25.anl" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2.0p25.anl.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25 "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p25.anl" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p25.anl.idx" - - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5 "${job}" "${COMOUT}/${PREFIX}pgrb2.0p50.anl" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2.0p50.anl.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5 "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p50.anl" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p50.anl.idx" - - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0 "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.anl" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.anl.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0 "${job}" "${COMOUT}/${PREFIX}pgrb2b.1p00.anl" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2b.1p00.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl.idx" + + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl.idx" + + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl.idx" fi fi fi @@ -192,8 +192,8 @@ if [[ "${stime}" = "anl" ]]; then -new_grid ${wafsgrid} "${PGBOUT}.tmp" if [[ "${SENDCOM}" = "YES" ]]; then - cp "${PGBOUT}.tmp" "${COMOUT}/${PREFIX}wafs.0p25.anl" - ${WGRIB2} -s "${PGBOUT}.tmp" > "${COMOUT}/${PREFIX}wafs.0p25.anl.idx" + cp "${PGBOUT}.tmp" "${COM_ATMOS_WAFS}/${PREFIX}wafs.0p25.anl" + ${WGRIB2} -s "${PGBOUT}.tmp" > "${COM_ATMOS_WAFS}/${PREFIX}wafs.0p25.anl.idx" # if [ $SENDDBN = YES ]; then # $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_GB2 $job $COMOUT/${PREFIX}wafs.0p25.anl @@ -250,9 +250,9 @@ else ## not_anl if_stime # for backup to start Model Fcst ############################### [[ -f flxfile ]] && rm flxfile ; [[ -f nemsfile ]] && rm nemsfile - ln -fs "${COMIN}/${PREFIX}atmf${fhr}.nc" nemsfile + ln -fs "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc" nemsfile export NEMSINP=nemsfile - ln -fs "${COMIN}/${PREFIX}sfcf${fhr}.nc" flxfile + ln -fs "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc" flxfile export FLXINP=flxfile if (( fhr > 0 )); then @@ -311,7 +311,7 @@ else ## not_anl if_stime if [[ "${INLINE_POST}" = ".false." ]]; then ${POSTGPSH} else - cp -p "${COMOUT}/${MASTERFL}" "${PGBOUT}" + cp -p "${COM_ATMOS_MASTER}/${MASTERFL}" "${PGBOUT}" fi export err=$?; err_chk @@ -330,31 +330,31 @@ else ## not_anl if_stime if [[ "${SENDCOM}" = "YES" ]]; then if [[ "${GRIBVERSION}" = 'grib2' ]]; then if [[ "${INLINE_POST}" = ".false." ]]; then - cp "${PGBOUT2}" "${COMOUT}/${MASTERFL}" + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFL}" fi - ${GRB2INDEX} "${PGBOUT2}" "${COMOUT}/${MASTERFLIDX}" + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFLIDX}" fi if [[ "${SENDDBN}" = 'YES' ]]; then if [[ "${GRIBVERSION}" = 'grib2' ]]; then if [[ "${PGBF}" = 'YES' ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25 "${job}" "${COMOUT}/${PREFIX}pgrb2.0p25.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2.0p25.f${fhr}.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25 "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p25.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p25.f${fhr}.idx" - - if [[ -s "${COMOUT}/${PREFIX}pgrb2.0p50.f${fhr}" ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5 "${job}" "${COMOUT}/${PREFIX}pgrb2.0p50.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2.0p50.f${fhr}.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5 "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p50.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2b.0p50.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr}.idx" + + if [[ -s "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr}" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr}.idx" fi - if [[ -s "${COMOUT}/${PREFIX}pgrb2.1p00.f${fhr}" ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0 "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2.1p00.f${fhr}.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0 "${job}" "${COMOUT}/${PREFIX}pgrb2b.1p00.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0_WIDX "${job}" "${COMOUT}/${PREFIX}pgrb2b.1p00.f${fhr}.idx" + if [[ -s "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr}.idx" fi fi fi @@ -384,9 +384,9 @@ else ## not_anl if_stime if [[ "${INLINE_POST}" = ".false." ]]; then ${POSTGPSH} export err=$?; err_chk - mv fluxfile "${COMOUT}/${FLUXFL}" + mv fluxfile "${COM_ATMOS_MASTER}/${FLUXFL}" fi - ${WGRIB2} -s "${COMOUT}/${FLUXFL}" > "${COMOUT}/${FLUXFLIDX}" + ${WGRIB2} -s "${COM_ATMOS_MASTER}/${FLUXFL}" > "${COM_ATMOS_MASTER}/${FLUXFLIDX}" #Add extra flux.1p00 file for coupled if [[ "${FLXGF}" = 'YES' ]]; then @@ -396,8 +396,8 @@ else ## not_anl if_stime fi if [[ "${SENDDBN}" = 'YES' ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_SGB_GB2 "${job}" "${COMOUT}/${FLUXFL}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_SGB_GB2_WIDX "${job}" "${COMOUT}/${FLUXFLIDX}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SGB_GB2 "${job}" "${COM_ATMOS_MASTER}/${FLUXFL}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SGB_GB2_WIDX "${job}" "${COM_ATMOS_MASTER}/${FLUXFLIDX}" fi fi @@ -410,7 +410,7 @@ else ## not_anl if_stime # if model already runs gfs io, make sure GFSOUT is linked to the gfsio file # new imported variable for global_post.sh - export GFSOUT=${PREFIX}gfsio${fhr} + export GFSOUT=${PREFIX}gfsio${fhr} # link satellite coefficients files, use hwrf version as ops crtm 2.0.5 # does not new coefficient files used by post @@ -444,11 +444,11 @@ else ## not_anl if_stime if [[ "${SENDCOM}" = "YES" ]]; then # echo "$PDY$cyc$pad$fhr" > $COMOUT/${RUN}.t${cyc}z.master.control - mv goesfile "${COMOUT}/${SPECIALFL}f${fhr}" - mv goesifile "${COMOUT}/${SPECIALFLIDX}f${fhr}" + mv goesfile "${COM_ATMOS_GOES}/${SPECIALFL}f${fhr}" + mv goesifile "${COM_ATMOS_GOES}/${SPECIALFLIDX}f${fhr}" if [[ "${SENDDBN}" = "YES" ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_SPECIAL_GB2 "${job}" "${COMOUT}/${SPECIALFL}f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SPECIAL_GB2 "${job}" "${COM_ATMOS_GOES}/${SPECIALFL}f${fhr}" fi fi fi @@ -492,8 +492,8 @@ else ## not_anl if_stime else if [[ -e "${PGBOUT}" ]]; then if [[ "${SENDCOM}" = "YES" ]]; then - cp "${PGBOUT}" "${COMOUT}/${PREFIX}wafs.grb2f${fhr}" - cp "${PGIOUT}" "${COMOUT}/${PREFIX}wafs.grb2if${fhr}" + cp "${PGBOUT}" "${COM_ATMOS_WAFS}/${PREFIX}wafs.grb2f${fhr}" + cp "${PGIOUT}" "${COM_ATMOS_WAFS}/${PREFIX}wafs.grb2if${fhr}" fi fi fi diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh index c6d07bf9b9..668234c357 100755 --- a/scripts/exgfs_atmos_postsnd.sh +++ b/scripts/exgfs_atmos_postsnd.sh @@ -42,14 +42,11 @@ export NINT1=${FHOUT_HF_GFS:-1} export NEND1=${FHMAX_HF_GFS:-120} export NINT3=${FHOUT_GFS:-3} -rm -f -r ${COMOUT}/bufr.${cycle} -mkdir -p ${COMOUT}/bufr.${cycle} -if [ -f $HOMEgfs/ush/getncdimlen ]; then - GETDIM=$HOMEgfs/ush/getncdimlen -else - GETDIM=$EXECbufrsnd/getncdimlen -fi -export LEVS=$($GETDIM $COMIN/${RUN}.${cycle}.atmf000.${atmfm} pfull) +rm -f -r "${COM_ATMOS_BUFR}" +mkdir -p "${COM_ATMOS_BUFR}" +GETDIM="${HOMEgfs}/ush/getncdimlen" +LEVS=$(${GETDIM} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf000.${atmfm}" pfull) +declare -x LEVS ### Loop for the hour and wait for the sigma and surface flux file: export FSTART=$STARTHOUR @@ -73,10 +70,8 @@ export FINT=$NINT1 fi ic=0 - while [ $ic -lt 1000 ] - do - if [ ! -f $COMIN/${RUN}.${cycle}.logf$FEND.${logfm} ] - then + while [ $ic -lt 1000 ]; do + if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.logf${FEND}.${logfm}" ]]; then sleep 10 ic=$(expr $ic + 1) else @@ -102,17 +97,16 @@ done ############################################################## # Tar and gzip the individual bufr files and send them to /com ############################################################## -cd ${COMOUT}/bufr.${cycle} -tar -cf - . | /usr/bin/gzip > ../${RUN}.${cycle}.bufrsnd.tar.gz -cd $DATA +cd "${COM_ATMOS_BUFR}" || exit 2 +tar -cf - . | /usr/bin/gzip > "${RUN}.${cycle}.bufrsnd.tar.gz" +cd "${DATA}" || exit 2 ######################################## # Send the single tar file to OSO ######################################## -if test "$SENDDBN" = 'YES' -then - $DBNROOT/bin/dbn_alert MODEL GFS_BUFRSND_TAR $job \ - $COMOUT/${RUN}.${cycle}.bufrsnd.tar.gz +if [[ "${SENDDBN}" == 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_BUFRSND_TAR "${job}" \ + "${COM_ATMOS_BUFR}/${RUN}.${cycle}.bufrsnd.tar.gz" fi ######################################## diff --git a/scripts/exgfs_atmos_vminmon.sh b/scripts/exgfs_atmos_vminmon.sh index 4311878a03..a1346d5f9e 100755 --- a/scripts/exgfs_atmos_vminmon.sh +++ b/scripts/exgfs_atmos_vminmon.sh @@ -43,7 +43,7 @@ export DATA=${DATA:-$(pwd)} ######################################## # Filenames ######################################## -gsistat=${gsistat:-$COMIN/gfs.t${cyc}z.gsistat} +gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gfs.t${cyc}z.gsistat} export mm_gnormfile=${gnormfile:-${M_FIXgfs}/gfs_minmon_gnorm.txt} export mm_costfile=${costfile:-${M_FIXgfs}/gfs_minmon_cost.txt} diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh index da9aa5e181..2be224d1da 100755 --- a/scripts/exgfs_wave_init.sh +++ b/scripts/exgfs_wave_init.sh @@ -26,7 +26,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" # 0.a Basic modes of operation @@ -40,7 +40,7 @@ source "$HOMEgfs/ush/preamble.sh" echo ' *** MWW3 INIT CONFIG SCRIPT ***' echo ' ********************************' echo ' Initial configuration script' - echo " Model identifier : ${CDUMP}wave" + echo " Model identifier : ${RUN}wave" echo ' ' echo "Starting at : $(date)" echo ' ' @@ -82,18 +82,16 @@ source "$HOMEgfs/ush/preamble.sh" array=($WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD $wavepostGRD $waveinterpGRD) grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') - for grdID in ${grdALL} - do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] - then + for grdID in ${grdALL}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." + echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...." set_trace - cp $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID + cp "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}" else set +x - echo " Mod def file for $grdID not found in ${COMIN}/rundata. Setting up to generate ..." + echo " Mod def file for ${grdID} not found in ${COM_WAVE_PREP}. Setting up to generate ..." echo ' ' set_trace if [ -f $FIXwave/ww3_grid.inp.$grdID ] @@ -120,7 +118,7 @@ source "$HOMEgfs/ush/preamble.sh" err=2;export err;${errchk} fi - [[ ! -d $COMOUT/rundata ]] && mkdir -m 775 -p $COMOUT/rundata + [[ ! -d "${COM_WAVE_PREP}" ]] && mkdir -m 775 -p "${COM_WAVE_PREP}" if [ ${CFP_MP:-"NO"} = "YES" ]; then echo "$nmoddef $USHwave/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile else @@ -184,10 +182,8 @@ source "$HOMEgfs/ush/preamble.sh" # 1.a.3 File check - for grdID in ${grdALL} - do - if [ -f ${COMOUT}/rundata/${CDUMP}wave.mod_def.$grdID ] - then + for grdID in ${grdALL}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x echo ' ' echo " mod_def.$grdID succesfully created/copied " diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh index a7ac02d085..09d23ec685 100755 --- a/scripts/exgfs_wave_nawips.sh +++ b/scripts/exgfs_wave_nawips.sh @@ -14,8 +14,8 @@ source "$HOMEgfs/ush/preamble.sh" #export grids=${grids:-'glo_30m at_10m ep_10m wc_10m ao_9km'} #Interpolated grids -export grids=${grids:-'glo_10m gso_15m ao_9km'} #Native grids -export RUNwave=${RUNwave:-${RUN}${COMPONENT}} +export grids=${grids:-'glo_30m'} #Native grids +export RUNwave=${RUNwave:-${RUN}wave} export fstart=${fstart:-0} export FHMAX_WAV=${FHMAX_WAV:-180} #180 Total of hours to process export FHMAX_HF_WAV=${FHMAX_HF_WAV:-72} @@ -71,7 +71,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do *) gridIDin= grdIDout= ;; esac - GRIBIN=$COMIN/gridded/$RUNwave.$cycle.$grdIDin.f${fhr}.grib2 + GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdIDin}.f${fhr}.grib2" GRIBIN_chk=$GRIBIN.idx icnt=1 @@ -83,7 +83,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do sleep 20 fi if [ $icnt -ge $maxtries ]; then - echo "ABORTING after 5 minutes of waiting for $GRIBIN." + msg="ABORTING after 5 minutes of waiting for $GRIBIN." echo ' ' echo '**************************** ' echo '*** ERROR : NO GRIB FILE *** ' @@ -102,7 +102,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do $GRIBIN 1> out 2>&1 OK=$? if [ "$OK" != '0' ]; then - echo "ABNORMAL EXIT: ERROR IN interpolation the global grid" + msg="ABNORMAL EXIT: ERROR IN interpolation the global grid" #set +x echo ' ' echo '************************************************************* ' @@ -158,12 +158,11 @@ while [ $fhcnt -le $FHMAX_WAV ]; do fi if [ $SENDCOM = "YES" ] ; then - cpfs $GEMGRD $COMOUT/$GEMGRD + cpfs "${GEMGRD}" "${COM_WAVE_GEMPAK}/${GEMGRD}" if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" "${COM_WAVE_GEMPAK}/${GEMGRD}" else - echo "##### DBN_ALERT is: MODEL ${DBN_ALERT_TYPE} $job $COMOUT/$GEMGRD#####" + echo "##### DBN_ALERT is: MODEL ${DBN_ALERT_TYPE} ${job} ${COM_WAVE_GEMPAK}/${GEMGRD}#####" fi fi rm grib_$grid diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index bd003b7f22..76e2d6d1da 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -36,7 +36,7 @@ source "$HOMEgfs/ush/preamble.sh" # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic - export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} + export WAV_MOD_TAG=${RUN}wave${waveMEMB} cd $DATA @@ -83,9 +83,6 @@ source "$HOMEgfs/ush/preamble.sh" echo ' ' set_trace - -# 0.c.3 Define CDATE_POST - export CDATE_POST=${CDATE} export FHRUN=0 # --------------------------------------------------------------------------- # @@ -105,23 +102,19 @@ source "$HOMEgfs/ush/preamble.sh" # 1.a Model definition files and output files (set up using poe) # 1.a.1 Copy model definition files - for grdID in $waveGRD $wavepostGRD $waveinterpGRD - do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] - then + for grdID in ${waveGRD} ${wavepostGRD} ${waveinterpGRD}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." + echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...." set_trace - cp -f $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID + cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}" fi done # 1.a.2 Check that model definition files exist - for grdID in $waveGRD $wavepostGRD $waveinterpGRD - do - if [ ! -f mod_def.$grdID ] - then + for grdID in ${waveGRD} ${wavepostGRD} ${waveinterpGRD}; do + if [[ ! -f "mod_def.${grdID}" ]]; then set +x echo ' ' echo '*************************************************** ' @@ -164,7 +157,7 @@ source "$HOMEgfs/ush/preamble.sh" echo '*********************************************** ' echo ' ' set_trace - echo "$WAV_MOD_TAG post $date $cycle : GRINT template file missing." + echo "${WAV_MOD_TAG} post ${PDY} ${cycle} : GRINT template file missing." exit_code=1 DOGRI_WAV='NO' fi @@ -241,7 +234,7 @@ source "$HOMEgfs/ush/preamble.sh" iwaitmax=120 # Maximum loop cycles for waiting until wave component output file is ready (fails after max) while [ $fhr -le $FHMAX_WAV ]; do - ymdh=$($NDATE $fhr $CDATE) + ymdh=$($NDATE $fhr ${PDY}${cyc}) YMD=$(echo $ymdh | cut -c1-8) HMS="$(echo $ymdh | cut -c9-10)0000" YMDHMS=${YMD}${HMS} @@ -265,7 +258,7 @@ source "$HOMEgfs/ush/preamble.sh" then iwait=0 for wavGRD in ${waveGRD} ; do - gfile=$COMIN/rundata/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS} + gfile=${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS} while [ ! -s ${gfile} ]; do sleep 10; let iwait=iwait+1; done if [ $iwait -eq $iwaitmax ]; then echo '*************************************************** ' @@ -273,7 +266,7 @@ source "$HOMEgfs/ush/preamble.sh" echo '*************************************************** ' echo ' ' set_trace - echo "$WAV_MOD_TAG post $grdID $date $cycle : field output missing." + echo "${WAV_MOD_TAG} post ${grdID} ${PDY} ${cycle} : field output missing." err=3; export err;${errchk} exit $err fi @@ -407,8 +400,8 @@ source "$HOMEgfs/ush/preamble.sh" # Check if grib2 file created ENSTAG="" if [ ${waveMEMB} ]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi - gribchk=${CDUMP}wave.${cycle}${ENSTAG}.${GRDNAME}.${GRDRES}.f${FH3}.grib2 - if [ ! -s ${COMOUT}/gridded/${gribchk} ]; then + gribchk="${RUN}wave.${cycle}${ENSTAG}.${GRDNAME}.${GRDRES}.f${FH3}.grib2" + if [ ! -s ${COM_WAVE_GRID}/${gribchk} ]; then set +x echo ' ' echo '********************************************' diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index 1a36f36eee..a7aa957564 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -116,16 +116,14 @@ source "$HOMEgfs/ush/preamble.sh" # Copy model definition files iloop=0 - for grdID in $waveuoutpGRD - do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] - then + for grdID in ${waveuoutpGRD}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." + echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...." set_trace - cp -f $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID - iloop=$(expr $iloop + 1) + cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}" + iloop=$((iloop + 1)) fi done @@ -235,29 +233,28 @@ source "$HOMEgfs/ush/preamble.sh" if [ "$DOSPC_WAV" = 'YES' ] || [ "$DOBLL_WAV" = 'YES' ] then - ymdh=$($NDATE -${WAVHINDH} $CDATE) - tstart="$(echo $ymdh | cut -c1-8) $(echo $ymdh | cut -c9-10)0000" + ymdh=$(${NDATE} -"${WAVHINDH}" "${PDY}${cyc}") + tstart="${ymdh:0:8} ${ymdh:8:2}0000" dtspec=3600. # default time step (not used here) - sed -e "s/TIME/$tstart/g" \ - -e "s/DT/$dtspec/g" \ + sed -e "s/TIME/${tstart}/g" \ + -e "s/DT/${dtspec}/g" \ -e "s/POINT/1/g" \ -e "s/ITYPE/0/g" \ -e "s/FORMAT/F/g" \ ww3_outp_spec.inp.tmpl > ww3_outp.inp ln -s mod_def.$waveuoutpGRD mod_def.ww3 - YMD=$(echo $CDATE | cut -c1-8) - HMS="$(echo $CDATE | cut -c9-10)0000" - if [ -f $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ] - then - ln -s $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ./out_pnt.${waveuoutpGRD} + HMS="${cyc}0000" + if [[ -f "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then + ln -s "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \ + "./out_pnt.${waveuoutpGRD}" else echo '*************************************************** ' - echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${YMD}.${HMS} " + echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${PDY}.${HMS} " echo '*************************************************** ' echo ' ' set_trace - echo "$WAV_MOD_TAG post $waveuoutpGRD $CDATE $cycle : field output missing." + echo "${WAV_MOD_TAG} post ${waveuoutpGRD} ${PDY}${cyc} ${cycle} : field output missing." err=4; export err;${errchk} fi @@ -351,11 +348,11 @@ source "$HOMEgfs/ush/preamble.sh" while [ $fhr -le $FHMAX_WAV_PNT ]; do echo " Creating the wave point scripts at : $(date)" - ymdh=$($NDATE $fhr $CDATE) - YMD=$(echo $ymdh | cut -c1-8) - HMS="$(echo $ymdh | cut -c9-10)0000" + ymdh=$($NDATE "${fhr}" "${PDY}${cyc}") + YMD=${ymdh:0:8} + HMS="${ymdh:8:2}0000" YMDHMS=${YMD}${HMS} - FH3=$(printf %03i $fhr) + FH3=$(printf %03i ${fhr}) rm -f tmpcmdfile.${FH3} touch tmpcmdfile.${FH3} @@ -367,7 +364,7 @@ source "$HOMEgfs/ush/preamble.sh" export BULLDATA=${DATA}/output_$YMDHMS cp $DATA/mod_def.${waveuoutpGRD} mod_def.${waveuoutpGRD} - pfile=$COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} + pfile="${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" if [ -f ${pfile} ] then ln -fs ${pfile} ./out_pnt.${waveuoutpGRD} diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh index b902e26d4c..e75df8dfd1 100755 --- a/scripts/exgfs_wave_prdgen_bulls.sh +++ b/scripts/exgfs_wave_prdgen_bulls.sh @@ -23,7 +23,7 @@ source "$HOMEgfs/ush/preamble.sh" # 0.a Basic modes of operation # PATH for working and home directories - export RUNwave=${RUNwave:-${RUN}${COMPONENT}} + export RUNwave=${RUNwave:-${RUN}wave} export envir=${envir:-ops} export cyc=${cyc:-00} export cycle=${cycle:-t${cyc}z} @@ -58,11 +58,11 @@ source "$HOMEgfs/ush/preamble.sh" # 1. Get necessary files set +x - echo " Copying bulletins from $COMIN" + echo " Copying bulletins from ${COM_WAVE_STATION}" set_trace # 1.a Link the input file and untar it - BullIn=$COMIN/station/${RUNwave}.$cycle.cbull_tar + BullIn="${COM_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar" if [ -f $BullIn ]; then cp $BullIn cbull.tar else @@ -120,7 +120,7 @@ source "$HOMEgfs/ush/preamble.sh" if [ -f $PARMwave/bull_awips_gfswave ]; then cp $PARMwave/bull_awips_gfswave awipsbull.data else - echo "ABNORMAL EXIT: NO AWIPS BULLETIN HEADER DATA FILE" + msg="ABNORMAL EXIT: NO AWIPS BULLETIN HEADER DATA FILE" set +x echo ' ' echo '******************************************* ' @@ -146,8 +146,7 @@ source "$HOMEgfs/ush/preamble.sh" # 2.c Generate list of bulletins to process echo ' Generating buoy list ...' - echo 'bulls=$(sed -e 's/export b//g' -e 's/=/ /' awipsbull.data | grep -v "#" |awk '{ print $1}')' - bulls=$(sed -e 's/export b//g' -e 's/=/ /' awipsbull.data | grep -v "#" |awk '{ print $1}') + bulls=$(sed -e 's/export b//g' -e 's/=/ /' awipsbull.data | grep -v "#" |awk '{print $1}') # 2.d Looping over buoys running formbul echo ' Looping over buoys ... \n' @@ -176,14 +175,14 @@ source "$HOMEgfs/ush/preamble.sh" set_trace - formbul.pl -d $headr -f $fname -j $job -m ${RUNwave} \ - -p $PCOM -s NO -o $oname > formbul.out 2>&1 + formbul.pl -d "${headr}" -f "${fname}" -j "${job}" -m "${RUNwave}" \ + -p "${COM_WAVE_WMO}" -s "NO" -o "${oname}" > formbul.out 2>&1 OK=$? if [ "$OK" != '0' ] || [ ! -f $oname ]; then set_trace cat formbul.out - echo "ABNORMAL EXIT: ERROR IN formbul" + msg="ABNORMAL EXIT: ERROR IN formbul" set +x echo ' ' echo '************************************** ' @@ -202,20 +201,20 @@ source "$HOMEgfs/ush/preamble.sh" done # 3. Send output files to the proper destination - set_trace - if [ "$SENDCOM" = YES ]; then - cp awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave} - if [ "$SENDDBN_NTC" = YES ]; then - make_ntc_bull.pl WMOBH NONE KWBC NONE $DATA/awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave} - else - if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then - echo "Making NTC bulletin for parallel environment, but do not alert." - set_trace - (export SENDDBN=NO; make_ntc_bull.pl WMOBH NONE KWBC NONE \ - $DATA/awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave}) - fi - fi - fi +set_trace +if [ "$SENDCOM" = YES ]; then + cp "awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" + if [ "$SENDDBN_NTC" = YES ]; then + make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" "${DATA}/awipsbull.${cycle}.${RUNwave}" \ + "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" + else + if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then + echo "Making NTC bulletin for parallel environment, but do not alert." + (export SENDDBN=NO; make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" \ + "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}") + fi + fi +fi # --------------------------------------------------------------------------- # # 4. Clean up diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh index b688cf3e0b..de7f2c4974 100755 --- a/scripts/exgfs_wave_prdgen_gridded.sh +++ b/scripts/exgfs_wave_prdgen_gridded.sh @@ -23,7 +23,7 @@ source "$HOMEgfs/ush/preamble.sh" # 0.a Basic modes of operation - export RUNwave=${RUNwave:-${RUN}${COMPONENT}} + export RUNwave=${RUNwave:-${RUN}wave} export envir=${envir:-ops} export fstart=${fstart:-0} export FHMAX_WAV=${FHMAX_WAV:-180} #180 Total of hours to process @@ -40,12 +40,13 @@ source "$HOMEgfs/ush/preamble.sh" export DATA=${DATA:-${DATAROOT:?}/${job}.$$} mkdir -p $DATA cd $DATA - export wavelog=${DATA}/${COMPONENTwave}_prdggridded.log + export wavelog=${DATA}/${RUNwave}_prdggridded.log echo "Starting MWW3 GRIDDED PRODUCTS SCRIPT" # Output grids - grids=${grids:-ao_9km at_10m ep_10m wc_10m glo_30m} -# grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m} + # grids=${grids:-ao_9km at_10m ep_10m wc_10m glo_30m} +grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m} +# export grids=${wavepostGRD} maxtries=${maxtries:-720} # 0.b Date and time stuff export date=$PDY @@ -97,7 +98,7 @@ source "$HOMEgfs/ush/preamble.sh" esac # - GRIBIN=$COMIN/gridded/$RUNwave.$cycle.$grdID.f${fhr}.grib2 + GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdID}.f${fhr}.grib2" GRIBIN_chk=$GRIBIN.idx icnt=1 @@ -110,7 +111,7 @@ source "$HOMEgfs/ush/preamble.sh" sleep 5 fi if [ $icnt -ge $maxtries ]; then - echo "ABNORMAL EXIT: NO GRIB FILE FOR GRID $GRIBIN" + msg="ABNORMAL EXIT: NO GRIB FILE FOR GRID $GRIBIN" echo ' ' echo '**************************** ' echo '*** ERROR : NO GRIB FILE *** ' @@ -185,7 +186,7 @@ source "$HOMEgfs/ush/preamble.sh" if [ "$OK" != '0' ] then - echo "ABNORMAL EXIT: ERROR IN grb2index MWW3 for grid $grdID" + msg="ABNORMAL EXIT: ERROR IN grb2index MWW3 for grid $grdID" #set +x echo ' ' echo '******************************************** ' @@ -214,7 +215,7 @@ source "$HOMEgfs/ush/preamble.sh" OK=$? if [ "$OK" != '0' ]; then cat tocgrib2.out - echo "ABNORMAL EXIT: ERROR IN tocgrib2" + msg="ABNORMAL EXIT: ERROR IN tocgrib2" #set +x echo ' ' echo '*************************************** ' @@ -236,16 +237,16 @@ source "$HOMEgfs/ush/preamble.sh" then #set +x echo " Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}" - echo " in $PCOM" + echo " in ${COM_WAVE_WMO}" #set_trace - cp $AWIPSGRB.$grdID.f${fhr} $PCOM/grib2.$cycle.f${fhr}.awipsww3_${grdOut} + cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}" #set +x fi if [ "$SENDDBN" = 'YES' ] then echo " Sending $AWIPSGRB.$grdID.f${fhr} to DBRUN." - $DBNROOT/bin/dbn_alert GRIB_LOW $RUN $job $PCOM/grib2.$cycle.f${fhr}.awipsww3_${grdOut} + "${DBNROOT}/bin/dbn_alert" GRIB_LOW "${RUN}" "${job}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}" fi rm -f $AWIPSGRB.$grdID.f${fhr} tocgrib2.out done # For grids diff --git a/scripts/exgfs_wave_prep.sh b/scripts/exgfs_wave_prep.sh index b4abbc816a..be006c1c85 100755 --- a/scripts/exgfs_wave_prep.sh +++ b/scripts/exgfs_wave_prep.sh @@ -46,7 +46,7 @@ source "$HOMEgfs/ush/preamble.sh" # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic - export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} + export WAV_MOD_TAG=${RUN}wave${waveMEMB} cd $DATA mkdir outtmp @@ -66,13 +66,6 @@ source "$HOMEgfs/ush/preamble.sh" echo ' ' set_trace - if [ "$INDRUN" = 'no' ] - then - FHMAX_WAV=${FHMAX_WAV:-3} - else - FHMAX_WAV=${FHMAX_WAV:-384} - fi - # 0.b Date and time stuff # Beginning time for outpupt may differ from SDATE if DOIAU=YES @@ -161,20 +154,20 @@ source "$HOMEgfs/ush/preamble.sh" touch cmdfile grdINP='' - if [ "${WW3ATMINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEWND_FID" ; fi - if [ "${WW3ICEINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEICE_FID" ; fi - if [ "${WW3CURINP}" = 'YES' ]; then grdINP="${grdINP} $WAVECUR_FID" ; fi + if [ "${WW3ATMINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEWND_FID" ; fi + if [ "${WW3ICEINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEICE_FID" ; fi + if [ "${WW3CURINP}" = 'YES' ]; then grdINP="${grdINP} $WAVECUR_FID" ; fi ifile=1 for grdID in $grdINP $waveGRD do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] + if [ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ] then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." + echo " Mod def file for $grdID found in ${COM_WAVE_PREP}. copying ...." set_trace - cp $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID + cp ${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID} mod_def.$grdID else set +x @@ -191,9 +184,9 @@ source "$HOMEgfs/ush/preamble.sh" done # 1.b Netcdf Preprocessor template files - if [ "$WW3ATMINP" = 'YES' ]; then itype="$itype wind" ; fi - if [ "$WW3ICEINP" = 'YES' ]; then itype="$itype ice" ; fi - if [ "$WW3CURINP" = 'YES' ]; then itype="$itype cur" ; fi + if [[ "${WW3ATMINP}" == 'YES' ]]; then itype="${itype:-} wind" ; fi + if [[ "${WW3ICEINP}" == 'YES' ]]; then itype="${itype:-} ice" ; fi + if [[ "${WW3CURINP}" == 'YES' ]]; then itype="${itype:-} cur" ; fi for type in $itype do @@ -326,22 +319,22 @@ source "$HOMEgfs/ush/preamble.sh" export RPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8) fi #Set the first time for RTOFS files to be the beginning time of simulation - ymdh_rtofs=$ymdh_beg + ymdh_rtofs=$ymdh_beg if [ "$FHMAX_WAV_CUR" -le 72 ]; then - rtofsfile1=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc - rtofsfile2=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc - rtofsfile3=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc + rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc" + rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc" + rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc" if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ]; then #Needed current files are not available, so use RTOFS from previous day export RPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8) fi else - rtofsfile1=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc - rtofsfile2=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc - rtofsfile3=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc - rtofsfile4=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc - rtofsfile5=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc + rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc" + rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc" + rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc" + rtofsfile4="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc" + rtofsfile5="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc" if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ] || [ ! -f $rtofsfile4 ] || [ ! -f $rtofsfile5 ]; then #Needed current files are not available, so use RTOFS from previous day @@ -349,8 +342,6 @@ source "$HOMEgfs/ush/preamble.sh" fi fi - export COMIN_WAV_CUR=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY} - ymdh_end_rtofs=$($NDATE ${FHMAX_WAV_CUR} ${RPDY}00) if [ "$ymdh_end" -lt "$ymdh_end_rtofs" ]; then ymdh_end_rtofs=$ymdh_end @@ -369,8 +360,8 @@ source "$HOMEgfs/ush/preamble.sh" fhr_rtofs=$(${NHOUR} ${ymdh_rtofs} ${RPDY}00) fh3_rtofs=$(printf "%03d" "${fhr_rtofs#0}") - curfile1h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc - curfile3h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc + curfile1h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc + curfile3h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc if [ -s ${curfile1h} ] && [ "${FLGHF}" = "T" ] ; then curfile=${curfile1h} @@ -474,7 +465,7 @@ source "$HOMEgfs/ush/preamble.sh" cat $file >> cur.${WAVECUR_FID} done - cp -f cur.${WAVECUR_FID} ${COMOUT}/rundata/${CDUMP}wave.${WAVECUR_FID}.$cycle.cur + cp -f cur.${WAVECUR_FID} ${COM_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur else echo ' ' @@ -494,6 +485,4 @@ source "$HOMEgfs/ush/preamble.sh" # 4. Ending output -exit $err - # End of MWW3 preprocessor script ------------------------------------------- # diff --git a/scripts/exglobal_aero_analysis_finalize.py b/scripts/exglobal_aero_analysis_finalize.py index 1a0c1d75a5..7342bf8357 100755 --- a/scripts/exglobal_aero_analysis_finalize.py +++ b/scripts/exglobal_aero_analysis_finalize.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# exgdas_global_aero_analysis_finalize.py +# exglobal_aero_analysis_finalize.py # This script creates an AerosolAnalysis class # and runs the finalize method # which perform post-processing and clean up activities diff --git a/scripts/exglobal_aero_analysis_initialize.py b/scripts/exglobal_aero_analysis_initialize.py index bf0c61c8b9..6c4135fc2d 100755 --- a/scripts/exglobal_aero_analysis_initialize.py +++ b/scripts/exglobal_aero_analysis_initialize.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# exgdas_global_aero_analysis_initialize.py +# exglobal_aero_analysis_initialize.py # This script creates an AerosolAnalysis class # and runs the initialize method # which create and stage the runtime directory diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_run.py index f4ab0e67ff..887700f476 100755 --- a/scripts/exglobal_aero_analysis_run.py +++ b/scripts/exglobal_aero_analysis_run.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# exgdas_global_aero_analysis_run.py +# exglobal_aero_analysis_run.py # This script creates an AerosolAnalysis object # and runs the execute method # which executes the global aerosol variational analysis diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 38ff1db48c..730563e256 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -14,89 +14,99 @@ if [ "${ARCHICS_CYC}" -lt 0 ]; then fi # CURRENT CYCLE -APREFIX="${CDUMP}.t${cyc}z." +APREFIX="${RUN}.t${cyc}z." # Realtime parallels run GFS MOS on 1 day delay # If realtime parallel, back up CDATE_MOS one day -CDATE_MOS=${CDATE} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +CDATE_MOS=${PDY}${cyc} if [ "${REALTIME}" = "YES" ]; then - CDATE_MOS=$(${NDATE} -24 "${CDATE}") + CDATE_MOS=$(${NDATE} -24 "${PDY}${cyc}") fi -PDY_MOS=$(echo "${CDATE_MOS}" | cut -c1-8) +PDY_MOS="${CDATE_MOS:0:8}" ############################################################### # Archive online for verification and diagnostics ############################################################### -COMIN=${COMINatmos:-"${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos"} -cd "${COMIN}" - source "${HOMEgfs}/ush/file_utils.sh" [[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" -nb_copy "${APREFIX}"gsistat "${ARCDIR}"/gsistat."${CDUMP}"."${CDATE}" -nb_copy "${APREFIX}"pgrb2.1p00.anl "${ARCDIR}"/pgbanl."${CDUMP}"."${CDATE}".grib2 +nb_copy "${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat" "${ARCDIR}/gsistat.${RUN}.${PDY}${cyc}" +nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.anl" "${ARCDIR}/pgbanl.${RUN}.${PDY}${cyc}.grib2" # Archive 1 degree forecast GRIB2 files for verification -if [ "${CDUMP}" = "gfs" ]; then +if [[ "${RUN}" == "gfs" ]]; then fhmax=${FHMAX_GFS} fhr=0 while [ "${fhr}" -le "${fhmax}" ]; do fhr2=$(printf %02i "${fhr}") fhr3=$(printf %03i "${fhr}") - nb_copy "${APREFIX}"pgrb2.1p00.f"${fhr3}" "${ARCDIR}"/pgbf"${fhr2}"."${CDUMP}"."${CDATE}".grib2 + nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr3}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" fhr=$((10#${fhr} + 10#${FHOUT_GFS} )) done fi -if [ "${CDUMP}" = "gdas" ]; then +if [[ "${RUN}" == "gdas" ]]; then flist="000 003 006 009" for fhr in ${flist}; do - fname=${APREFIX}pgrb2.1p00.f${fhr} + fname="${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr}" + # TODO Shouldn't the archived files also use three-digit tags? fhr2=$(printf %02i $((10#${fhr}))) - nb_copy "${fname}" "${ARCDIR}"/pgbf"${fhr2}"."${CDUMP}"."${CDATE}".grib2 + nb_copy "${fname}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" done fi -if [ -s avno.t"${cyc}"z.cyclone.trackatcfunix ]; then - PLSOT4=$(echo "${PSLOT}"|cut -c 1-4 |tr '[a-z]' '[A-Z]') - cat avno.t"${cyc}"z.cyclone.trackatcfunix | sed s:AVNO:"${PLSOT4}":g > "${ARCDIR}"/atcfunix."${CDUMP}"."${CDATE}" - cat avnop.t"${cyc}"z.cyclone.trackatcfunix | sed s:AVNO:"${PLSOT4}":g > "${ARCDIR}"/atcfunixp."${CDUMP}"."${CDATE}" +if [[ -s "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" ]]; then + # shellcheck disable=2153 + PSLOT4=${PSLOT:0:4} + # shellcheck disable= + PSLOT4=${PSLOT4^^} + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avnop.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" fi -if [ "${CDUMP}" = "gdas" ] && [ -s gdas.t"${cyc}"z.cyclone.trackatcfunix ]; then - PLSOT4=$(echo "${PSLOT}"|cut -c 1-4 |tr '[a-z]' '[A-Z]') - cat gdas.t"${cyc}"z.cyclone.trackatcfunix | sed s:AVNO:"${PLSOT4}":g > "${ARCDIR}"/atcfunix."${CDUMP}"."${CDATE}" - cat gdasp.t"${cyc}"z.cyclone.trackatcfunix | sed s:AVNO:"${PLSOT4}":g > "${ARCDIR}"/atcfunixp."${CDUMP}"."${CDATE}" +if [[ "${RUN}" == "gdas" ]] && [[ -s "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" ]]; then + # shellcheck disable=2153 + PSLOT4=${PSLOT:0:4} + # shellcheck disable= + PSLOT4=${PSLOT4^^} + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdasp.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" fi -if [ "${CDUMP}" = "gfs" ]; then - nb_copy storms.gfso.atcf_gen."${CDATE}" "${ARCDIR}"/. - nb_copy storms.gfso.atcf_gen.altg."${CDATE}" "${ARCDIR}"/. - nb_copy trak.gfso.atcfunix."${CDATE}" "${ARCDIR}"/. - nb_copy trak.gfso.atcfunix.altg."${CDATE}" "${ARCDIR}"/. +if [ "${RUN}" = "gfs" ]; then + nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.altg.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.altg.${PDY}${cyc}" "${ARCDIR}/." - mkdir -p "${ARCDIR}"/tracker."${CDATE}"/"${CDUMP}" + mkdir -p "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" blist="epac natl" for basin in ${blist}; do if [[ -f ${basin} ]]; then - cp -rp "${basin}" "${ARCDIR}"/tracker."${CDATE}"/"${CDUMP}" + cp -rp "${COM_ATMOS_TRACK}/${basin}" "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" fi done fi # Archive required gaussian gfs forecast files for Fit2Obs -if [ "${CDUMP}" = "gfs" ] && [ "${FITSARC}" = "YES" ]; then +if [[ "${RUN}" == "gfs" ]] && [[ "${FITSARC}" = "YES" ]]; then VFYARC=${VFYARC:-${ROTDIR}/vrfyarch} [[ ! -d ${VFYARC} ]] && mkdir -p "${VFYARC}" - mkdir -p "${VFYARC}"/"${CDUMP}"."${PDY}"/"${cyc}" - prefix=${CDUMP}.t${cyc}z + mkdir -p "${VFYARC}/${RUN}.${PDY}/${cyc}" + prefix="${RUN}.t${cyc}z" fhmax=${FHMAX_FITS:-${FHMAX_GFS}} fhr=0 while [[ ${fhr} -le ${fhmax} ]]; do fhr3=$(printf %03i "${fhr}") - sfcfile=${prefix}.sfcf${fhr3}.nc - sigfile=${prefix}.atmf${fhr3}.nc - nb_copy "${sfcfile}" "${VFYARC}"/"${CDUMP}"."${PDY}"/"${cyc}"/ - nb_copy "${sigfile}" "${VFYARC}"/"${CDUMP}"."${PDY}"/"${cyc}"/ + sfcfile="${COM_ATMOS_MASTER}/${prefix}.sfcf${fhr3}.nc" + sigfile="${COM_ATMOS_MASTER}/${prefix}.atmf${fhr3}.nc" + nb_copy "${sfcfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" + nb_copy "${sigfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" (( fhr = 10#${fhr} + 6 )) done fi @@ -107,171 +117,167 @@ fi if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then ############################################################### -# --set the archiving command and create local directories, if necessary -TARCMD="htar" -HSICMD="hsi" -if [[ ${LOCALARCH} = "YES" ]]; then - TARCMD="tar" - HSICMD='' - [ ! -d "${ATARDIR}"/"${CDATE}" ] && mkdir -p "${ATARDIR}"/"${CDATE}" - [ ! -d "${ATARDIR}"/"${CDATE_MOS}" ] && [ -d "${ROTDIR}"/gfsmos."${PDY_MOS}" ] && [ "${cyc}" -eq 18 ] && mkdir -p "${ATARDIR}"/"${CDATE_MOS}" -fi - -#--determine when to save ICs for warm start and forecast-only runs -SAVEWARMICA="NO" -SAVEWARMICB="NO" -SAVEFCSTIC="NO" -firstday=$(${NDATE} +24 "${SDATE}") -mm=$(echo "${CDATE}"|cut -c 5-6) -dd=$(echo "${CDATE}"|cut -c 7-8) -nday=$(( (10#${mm}-1)*30+10#${dd} )) -mod=$((nday % ARCH_WARMICFREQ)) -if [ "${CDATE}" -eq "${firstday}" ] && [ "${cyc}" -eq "${ARCHINC_CYC}" ]; then SAVEWARMICA="YES" ; fi -if [ "${CDATE}" -eq "${firstday}" ] && [ "${cyc}" -eq "${ARCHICS_CYC}" ]; then SAVEWARMICB="YES" ; fi -if [ "${mod}" -eq 0 ] && [ "${cyc}" -eq "${ARCHINC_CYC}" ]; then SAVEWARMICA="YES" ; fi -if [ "${mod}" -eq 0 ] && [ "${cyc}" -eq "${ARCHICS_CYC}" ]; then SAVEWARMICB="YES" ; fi - -if [ "${ARCHICS_CYC}" -eq 18 ]; then - nday1=$((nday+1)) - mod1=$((nday1 % ARCH_WARMICFREQ)) - if [ "${mod1}" -eq 0 ] && [ "${cyc}" -eq "${ARCHICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi - if [ "${mod1}" -ne 0 ] && [ "${cyc}" -eq "${ARCHICS_CYC}" ] ; then SAVEWARMICB="NO" ; fi - if [ "${CDATE}" -eq "${SDATE}" ] && [ "${cyc}" -eq "${ARCHICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi -fi - -mod=$((nday % ARCH_FCSTICFREQ)) -if [ "${mod}" -eq 0 ] || [ "${CDATE}" -eq "${firstday}" ]; then SAVEFCSTIC="YES" ; fi - - -ARCH_LIST="${COMIN}/archlist" -[[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" -mkdir -p "${ARCH_LIST}" -cd "${ARCH_LIST}" + # --set the archiving command and create local directories, if necessary + TARCMD="htar" + HSICMD="hsi" + if [[ ${LOCALARCH} = "YES" ]]; then + TARCMD="tar" + HSICMD='' + [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]] && mkdir -p "${ATARDIR}/${PDY}${cyc}" + [[ ! -d "${ATARDIR}/${CDATE_MOS}" ]] && [[ -d "${ROTDIR}/gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]] && mkdir -p "${ATARDIR}/${CDATE_MOS}" + fi -"${HOMEgfs}"/ush/hpssarch_gen.sh "${CDUMP}" -status=$? -if [ "${status}" -ne 0 ]; then - echo "${HOMEgfs}/ush/hpssarch_gen.sh ${CDUMP} failed, ABORT!" - exit "${status}" -fi + #--determine when to save ICs for warm start and forecast-only runs + SAVEWARMICA="NO" + SAVEWARMICB="NO" + SAVEFCSTIC="NO" + firstday=$(${NDATE} +24 "${SDATE}") + mm="${PDY:2:2}" + dd="${PDY:4:2}" + # TODO: This math yields multiple dates sharing the same nday + nday=$(( (10#${mm}-1)*30+10#${dd} )) + mod=$((nday % ARCH_WARMICFREQ)) + if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi + if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi + if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi + if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi + + if [[ "${ARCHICS_CYC}" -eq 18 ]]; then + nday1=$((nday+1)) + mod1=$((nday1 % ARCH_WARMICFREQ)) + if [[ "${mod1}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi + if [[ "${mod1}" -ne 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="NO" ; fi + if [[ "${PDY}${cyc}" -eq "${SDATE}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi + fi -cd "${ROTDIR}" + mod=$((nday % ARCH_FCSTICFREQ)) + if [[ "${mod}" -eq 0 ]] || [[ "${PDY}${cyc}" -eq "${firstday}" ]]; then SAVEFCSTIC="YES" ; fi -if [ "${CDUMP}" = "gfs" ]; then - targrp_list="gfsa gfsb" + ARCH_LIST="${DATA}/archlist" + [[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" + mkdir -p "${ARCH_LIST}" + cd "${ARCH_LIST}" || exit 2 - if [ "${ARCH_GAUSSIAN:-"NO"}" = "YES" ]; then - targrp_list="${targrp_list} gfs_flux gfs_netcdfb gfs_pgrb2b" - if [ "${MODE}" = "cycled" ]; then - targrp_list="${targrp_list} gfs_netcdfa" - fi + "${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" + status=$? + if [ "${status}" -ne 0 ]; then + echo "${HOMEgfs}/ush/hpssarch_gen.sh ${RUN} failed, ABORT!" + exit "${status}" fi - if [ "${DO_WAVE}" = "YES" ] && [ "${WAVE_CDUMP}" != "gdas" ]; then - targrp_list="${targrp_list} gfswave" - fi + cd "${ROTDIR}" || exit 2 - if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily gfs_flux_1p00" - fi + if [[ "${RUN}" = "gfs" ]]; then - if [ "${DO_ICE}" = "YES" ]; then - targrp_list="${targrp_list} ice" - fi + targrp_list="gfsa gfsb" - # Aerosols - if [ "${DO_AERO}" = "YES" ]; then - for targrp in chem; do - ${TARCMD} -P -cvf "${ATARDIR}"/"${CDATE}"/"${targrp}".tar $(cat "${ARCH_LIST}"/"${targrp}".txt) - status=$? - if [ "${status}" -ne 0 ] && [ "${CDATE}" -ge "${firstday}" ]; then - echo "HTAR ${CDATE} ${targrp}.tar failed" - exit "${status}" + if [ "${ARCH_GAUSSIAN:-"NO"}" = "YES" ]; then + targrp_list="${targrp_list} gfs_flux gfs_netcdfb gfs_pgrb2b" + if [ "${MODE}" = "cycled" ]; then + targrp_list="${targrp_list} gfs_netcdfa" fi - done - fi + fi - #for restarts - if [ "${SAVEFCSTIC}" = "YES" ]; then - targrp_list="${targrp_list} gfs_restarta" - fi + if [ "${DO_WAVE}" = "YES" ]; then + targrp_list="${targrp_list} gfswave" + fi - #for downstream products - if [ "${DO_BUFRSND}" = "YES" ] || [ "${WAFSF}" = "YES" ]; then - targrp_list="${targrp_list} gfs_downstream" - fi + if [ "${DO_OCN}" = "YES" ]; then + targrp_list="${targrp_list} ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily gfs_flux_1p00" + fi - #--save mdl gfsmos output from all cycles in the 18Z archive directory - if [ -d gfsmos."${PDY_MOS}" ] && [ "${cyc}" -eq 18 ]; then - set +e - ${TARCMD} -P -cvf "${ATARDIR}"/"${CDATE_MOS}"/gfsmos.tar ./gfsmos."${PDY_MOS}" - status=$? - if [ "${status}" -ne 0 ] && [ "${CDATE}" -ge "${firstday}" ]; then - echo "$(echo "${TARCMD}" | tr 'a-z' 'A-Z') ${CDATE} gfsmos.tar failed" - exit "${status}" + if [ "${DO_ICE}" = "YES" ]; then + targrp_list="${targrp_list} ice" fi - set_strict - fi -elif [ "${CDUMP}" = "gdas" ]; then - targrp_list="gdas" + # Aerosols + if [ "${DO_AERO}" = "YES" ]; then + for targrp in chem; do + # TODO: Why is this tar being done here instead of being added to the list? + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") + status=$? + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "HTAR ${PDY}${cyc} ${targrp}.tar failed" + exit "${status}" + fi + done + fi - #gdaswave - if [ "${DO_WAVE}" = "YES" ]; then - targrp_list="${targrp_list} gdaswave" - fi + #for restarts + if [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gfs_restarta" + fi - #gdasocean - if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean" - fi + #for downstream products + if [ "${DO_BUFRSND}" = "YES" ] || [ "${WAFSF}" = "YES" ]; then + targrp_list="${targrp_list} gfs_downstream" + fi - #gdasice - if [ "${DO_ICE}" = "YES" ]; then - targrp_list="${targrp_list} gdasice" - fi + #--save mdl gfsmos output from all cycles in the 18Z archive directory + if [[ -d "gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]]; then + set +e + # TODO: Why is this tar being done here instead of being added to the list? + ${TARCMD} -P -cvf "${ATARDIR}/${CDATE_MOS}/gfsmos.tar" "./gfsmos.${PDY_MOS}" + status=$? + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "${TARCMD^^} ${PDY}${cyc} gfsmos.tar failed" + exit "${status}" + fi + set_strict + fi + elif [[ "${RUN}" = "gdas" ]]; then - if [ "${SAVEWARMICA}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then - targrp_list="${targrp_list} gdas_restarta" + targrp_list="gdas" + #gdaswave if [ "${DO_WAVE}" = "YES" ]; then - targrp_list="${targrp_list} gdaswave_restart" + targrp_list="${targrp_list} gdaswave" fi + + #gdasocean if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean_restart" + targrp_list="${targrp_list} gdasocean" fi + + #gdasice if [ "${DO_ICE}" = "YES" ]; then - targrp_list="${targrp_list} gdasice_restart" + targrp_list="${targrp_list} gdasice" fi - fi - if [ "${SAVEWARMICB}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then - targrp_list="${targrp_list} gdas_restartb" - fi -fi + if [ "${SAVEWARMICA}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gdas_restarta" + if [ "${DO_WAVE}" = "YES" ]; then targrp_list="${targrp_list} gdaswave_restart"; fi + if [ "${DO_OCN}" = "YES" ]; then targrp_list="${targrp_list} gdasocean_restart"; fi + if [ "${DO_ICE}" = "YES" ]; then targrp_list="${targrp_list} gdasice_restart"; fi + fi -# Turn on extended globbing options -shopt -s extglob -for targrp in ${targrp_list}; do - set +e - ${TARCMD} -P -cvf "${ATARDIR}"/"${CDATE}"/"${targrp}".tar $(cat "${ARCH_LIST}"/"${targrp}".txt) - status=$? - case ${targrp} in - 'gdas'|'gdas_restarta') - ${HSICMD} chgrp rstprod "${ATARDIR}/${CDATE}/${targrp}.tar" - ${HSICMD} chmod 640 "${ATARDIR}/${CDATE}/${targrp}.tar" - ;; - *) ;; - esac - if [ "${status}" -ne 0 ] && [ "${CDATE}" -ge "${firstday}" ]; then - echo "$(echo "${TARCMD}" | tr 'a-z' 'A-Z') ${CDATE} ${targrp}.tar failed" - exit "${status}" + if [ "${SAVEWARMICB}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gdas_restartb" + fi fi - set_strict -done -# Turn extended globbing back off -shopt -u extglob + + # Turn on extended globbing options + shopt -s extglob + for targrp in ${targrp_list}; do + set +e + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") + status=$? + case ${targrp} in + 'gdas'|'gdas_restarta') + ${HSICMD} chgrp rstprod "${ATARDIR}/${CDATE}/${targrp}.tar" + ${HSICMD} chmod 640 "${ATARDIR}/${CDATE}/${targrp}.tar" + ;; + *) ;; + esac + if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${targrp}.tar failed" + exit "${status}" + fi + set_strict + done + # Turn extended globbing back off + shopt -u extglob ############################################################### fi ##end of HPSS archive @@ -282,15 +288,17 @@ fi ##end of HPSS archive ############################################################### # Clean up previous cycles; various depths # PRIOR CYCLE: Leave the prior cycle alone -GDATE=$(${NDATE} -"${assim_freq}" "${CDATE}") +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") # PREVIOUS to the PRIOR CYCLE GDATE=$(${NDATE} -"${assim_freq}" "${GDATE}") -gPDY=$(echo "${GDATE}" | cut -c1-8) -gcyc=$(echo "${GDATE}" | cut -c9-10) +gPDY="${GDATE:0:8}" +gcyc="${GDATE:8:2}" # Remove the TMPDIR directory -COMIN="${RUNDIR}/${GDATE}" +# TODO Only prepbufr is currently using this directory, and all jobs should be +# cleaning up after themselves anyway +COMIN="${DATAROOT}/${GDATE}" [[ -d ${COMIN} ]] && rm -rf "${COMIN}" if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then @@ -298,89 +306,132 @@ if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then fi # Step back every assim_freq hours and remove old rotating directories -# for successful cycles (defaults from 24h to 120h). If GLDAS is -# active, retain files needed by GLDAS update. Independent of GLDAS, -# retain files needed by Fit2Obs -DO_GLDAS=${DO_GLDAS:-"NO"} -GDATEEND=$(${NDATE} -"${RMOLDEND:-24}" "${CDATE}") -GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${CDATE}") -GLDAS_DATE=$(${NDATE} -96 "${CDATE}") -RTOFS_DATE=$(${NDATE} -48 "${CDATE}") +# for successful cycles (defaults from 24h to 120h). +# Retain files needed by Fit2Obs +# TODO: This whole section needs to be revamped to remove marine component +# directories and not look at the rocoto log. +GDATEEND=$(${NDATE} -"${RMOLDEND:-24}" "${PDY}${cyc}") +GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") +RTOFS_DATE=$(${NDATE} -48 "${PDY}${cyc}") +function remove_files() { + # TODO: move this to a new location + local directory=$1 + shift + if [[ ! -d ${directory} ]]; then + echo "No directory ${directory} to remove files from, skiping" + return + fi + local exclude_list="" + if (($# > 0)); then + exclude_list=$* + fi + local file_list + declare -a file_list + readarray -t file_list < <(find -L "${directory}" -type f) + if (( ${#file_list[@]} == 0 )); then return; fi + # echo "Number of files to remove before exclusions: ${#file_list[@]}" + for exclude in ${exclude_list}; do + echo "Excluding ${exclude}" + declare -a file_list_old=("${file_list[@]}") + readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") + # echo "Number of files to remove after exclusion: ${#file_list[@]}" + if (( ${#file_list[@]} == 0 )); then return; fi + done + # echo "Number of files to remove after exclusions: ${#file_list[@]}" + + for file in "${file_list[@]}"; do + rm -f "${file}" + done + # Remove directory if empty + rmdir "${directory}" || true +} + while [ "${GDATE}" -le "${GDATEEND}" ]; do - gPDY=$(echo "${GDATE}" | cut -c1-8) - gcyc=$(echo "${GDATE}" | cut -c9-10) - COMIN="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/atmos" - COMINwave="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/wave" - COMINocean="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ocean" - COMINice="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ice" - COMINmed="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/med" + gPDY="${GDATE:0:8}" + gcyc="${GDATE:8:2}" COMINrtofs="${ROTDIR}/rtofs.${gPDY}" - if [ -d "${COMIN}" ]; then + if [ -d "${COM_TOP}" ]; then rocotolog="${EXPDIR}/logs/${GDATE}.log" if [ -f "${rocotolog}" ]; then set +e testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") rc=$? set_strict + if [ "${rc}" -eq 0 ]; then - if [ -d "${COMINwave}" ]; then rm -rf "${COMINwave}" ; fi - if [ -d "${COMINocean}" ]; then rm -rf "${COMINocean}" ; fi - if [ -d "${COMINice}" ]; then rm -rf "${COMINice}" ; fi - if [ -d "${COMINmed}" ]; then rm -rf "${COMINmed}" ; fi + # Obs + exclude_list="prepbufr" + templates="COM_OBS" + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Atmos + exclude_list="cnvstat atmanl.nc" + templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Wave + exclude_list="" + templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ocean + exclude_list="" + templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ice + exclude_list="" + templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Aerosols (GOCART) + exclude_list="" + templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Mediator + exclude_list="" + templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + if [ -d "${COMINrtofs}" ] && [ "${GDATE}" -lt "${RTOFS_DATE}" ]; then rm -rf "${COMINrtofs}" ; fi - if [ "${CDUMP}" != "gdas" ] || [ "${DO_GLDAS}" = "NO" ] || [ "${GDATE}" -lt "${GLDAS_DATE}" ]; then - if [ "${CDUMP}" = "gdas" ]; then - for file in $(ls "${COMIN}" |grep -v prepbufr |grep -v cnvstat |grep -v atmanl.nc); do - rm -rf "${COMIN}"/"${file}" - done - else - rm -rf "${COMIN}" - fi - else - if [ "${DO_GLDAS}" = "YES" ]; then - for file in $(ls "${COMIN}" |grep -v sflux |grep -v RESTART |grep -v prepbufr |grep -v cnvstat |grep -v atmanl.nc); do - rm -rf "${COMIN}"/"${file}" - done - for file in $(ls "${COMIN}"/RESTART |grep -v sfcanl ); do - rm -rf "${COMIN}"/RESTART/"${file}" - done - else - for file in $(ls "${COMIN}" |grep -v prepbufr |grep -v cnvstat |grep -v atmanl.nc); do - rm -rf "${COMIN}"/"${file}" - done - fi - fi fi fi fi - # Remove any empty directories - if [ -d "${COMIN}" ]; then - [[ ! "$(ls -A "${COMIN}")" ]] && rm -rf "${COMIN}" - fi - - if [ -d "${COMINwave}" ]; then - [[ ! "$(ls -A "${COMINwave}")" ]] && rm -rf "${COMINwave}" - fi - - if [ -d "${COMINocean}" ]; then - [[ ! "$(ls -A "${COMINocean}")" ]] && rm -rf "${COMINocean}" - fi - - if [ -d "${COMINice}" ]; then - [[ ! "$(ls -A "${COMINice}")" ]] && rm -rf "${COMINice}" - fi - - if [ -d "${COMINmed}" ]; then - [[ ! "$(ls -A "${COMINmed}")" ]] && rm -rf "${COMINmed}" - fi - # Remove mdl gfsmos directory - if [ "${CDUMP}" = "gfs" ]; then + if [ "${RUN}" = "gfs" ]; then COMIN="${ROTDIR}/gfsmos.${gPDY}" if [ -d "${COMIN}" ] && [ "${GDATE}" -lt "${CDATE_MOS}" ]; then rm -rf "${COMIN}" ; fi fi + # Remove any empty directories + target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" + if [[ -d ${target_dir} ]]; then + find "${target_dir}" -empty -type d -delete + fi + GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") done @@ -389,32 +440,32 @@ done # gaussian files to prevent the files from being removed by automatic # scrubber present on some machines. -if [ "${CDUMP}" = "gfs" ]; then +if [ "${RUN}" = "gfs" ]; then fhmax=$((FHMAX_FITS+36)) - RDATE=$(${NDATE} -"${fhmax}" "${CDATE}") + RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") rPDY=$(echo "${RDATE}" | cut -c1-8) - COMIN="${VFYARC}/${CDUMP}.${rPDY}" + COMIN="${VFYARC}/${RUN}.${rPDY}" [[ -d ${COMIN} ]] && rm -rf "${COMIN}" - TDATE=$(${NDATE} -"${FHMAX_FITS}" "${CDATE}") - while [ "${TDATE}" -lt "${CDATE}" ]; do + TDATE=$(${NDATE} -"${FHMAX_FITS}" "${PDY}${cyc}") + while [ "${TDATE}" -lt "${PDY}${cyc}" ]; do tPDY=$(echo "${TDATE}" | cut -c1-8) tcyc=$(echo "${TDATE}" | cut -c9-10) - TDIR=${VFYARC}/${CDUMP}.${tPDY}/${tcyc} + TDIR=${VFYARC}/${RUN}.${tPDY}/${tcyc} [[ -d ${TDIR} ]] && touch "${TDIR}"/* TDATE=$(${NDATE} +6 "${TDATE}") done fi -# Remove $CDUMP.$rPDY for the older of GDATE or RDATE -GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${CDATE}") +# Remove $RUN.$rPDY for the older of GDATE or RDATE +GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") fhmax=${FHMAX_GFS} -RDATE=$(${NDATE} -"${fhmax}" "${CDATE}") +RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") if [ "${GDATE}" -lt "${RDATE}" ]; then RDATE=${GDATE} fi rPDY=$(echo "${RDATE}" | cut -c1-8) -COMIN="${ROTDIR}/${CDUMP}.${rPDY}" +COMIN="${ROTDIR}/${RUN}.${rPDY}" [[ -d ${COMIN} ]] && rm -rf "${COMIN}" diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py new file mode 100755 index 0000000000..e51bf082b5 --- /dev/null +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_finalize.py +# This script creates an AtmAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.finalize() diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py new file mode 100755 index 0000000000..e0077f3323 --- /dev/null +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_initialize.py +# This script creates an AtmAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.initialize() diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_run.py new file mode 100755 index 0000000000..6b29a56976 --- /dev/null +++ b/scripts/exglobal_atm_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_run.py +# This script creates an AtmAnalysis object +# and runs the execute method +# which executes the global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.execute() diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py new file mode 100755 index 0000000000..7bac671aee --- /dev/null +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_finalize.py +# This script creates an AtmEnsAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.finalize() diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py new file mode 100755 index 0000000000..1461e0b441 --- /dev/null +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_initialize.py +# This script creates an AtmEnsAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py new file mode 100755 index 0000000000..dda4f7a11d --- /dev/null +++ b/scripts/exglobal_atmens_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_run.py +# This script creates an AtmEnsAnalysis object +# and runs the execute method +# which executes the global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.execute() diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index af75d6c75b..f81f7f0a33 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -55,8 +55,8 @@ lupp=${lupp:-".true."} cnvw_option=${cnvw_option:-".false."} # Observation usage options -cao_check=${cao_check:-".false."} -ta2tb=${ta2tb:-".false."} +cao_check=${cao_check:-".true."} +ta2tb=${ta2tb:-".true."} # Diagnostic files options lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} @@ -93,112 +93,112 @@ export gesenvir=${gesenvir:-${envir}} # Observations OPREFIX=${OPREFIX:-""} OSUFFIX=${OSUFFIX:-""} -PREPQC=${PREPQC:-${COMIN_OBS}/${OPREFIX}prepbufr${OSUFFIX}} -PREPQCPF=${PREPQCPF:-${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles${OSUFFIX}} -NSSTBF=${NSSTBF:-${COMIN_OBS}/${OPREFIX}nsstbufr${OSUFFIX}} -SATWND=${SATWND:-${COMIN_OBS}/${OPREFIX}satwnd.tm00.bufr_d${OSUFFIX}} -OSCATBF=${OSCATBF:-${COMIN_OBS}/${OPREFIX}oscatw.tm00.bufr_d${OSUFFIX}} -RAPIDSCATBF=${RAPIDSCATBF:-${COMIN_OBS}/${OPREFIX}rapidscatw.tm00.bufr_d${OSUFFIX}} -GSNDBF=${GSNDBF:-${COMIN_OBS}/${OPREFIX}goesnd.tm00.bufr_d${OSUFFIX}} -GSNDBF1=${GSNDBF1:-${COMIN_OBS}/${OPREFIX}goesfv.tm00.bufr_d${OSUFFIX}} -B1HRS2=${B1HRS2:-${COMIN_OBS}/${OPREFIX}1bhrs2.tm00.bufr_d${OSUFFIX}} -B1MSU=${B1MSU:-${COMIN_OBS}/${OPREFIX}1bmsu.tm00.bufr_d${OSUFFIX}} -B1HRS3=${B1HRS3:-${COMIN_OBS}/${OPREFIX}1bhrs3.tm00.bufr_d${OSUFFIX}} -B1HRS4=${B1HRS4:-${COMIN_OBS}/${OPREFIX}1bhrs4.tm00.bufr_d${OSUFFIX}} -B1AMUA=${B1AMUA:-${COMIN_OBS}/${OPREFIX}1bamua.tm00.bufr_d${OSUFFIX}} -B1AMUB=${B1AMUB:-${COMIN_OBS}/${OPREFIX}1bamub.tm00.bufr_d${OSUFFIX}} -B1MHS=${B1MHS:-${COMIN_OBS}/${OPREFIX}1bmhs.tm00.bufr_d${OSUFFIX}} -ESHRS3=${ESHRS3:-${COMIN_OBS}/${OPREFIX}eshrs3.tm00.bufr_d${OSUFFIX}} -ESAMUA=${ESAMUA:-${COMIN_OBS}/${OPREFIX}esamua.tm00.bufr_d${OSUFFIX}} -ESAMUB=${ESAMUB:-${COMIN_OBS}/${OPREFIX}esamub.tm00.bufr_d${OSUFFIX}} -ESMHS=${ESMHS:-${COMIN_OBS}/${OPREFIX}esmhs.tm00.bufr_d${OSUFFIX}} -HRS3DB=${HRS3DB:-${COMIN_OBS}/${OPREFIX}hrs3db.tm00.bufr_d${OSUFFIX}} -AMUADB=${AMUADB:-${COMIN_OBS}/${OPREFIX}amuadb.tm00.bufr_d${OSUFFIX}} -AMUBDB=${AMUBDB:-${COMIN_OBS}/${OPREFIX}amubdb.tm00.bufr_d${OSUFFIX}} -MHSDB=${MHSDB:-${COMIN_OBS}/${OPREFIX}mhsdb.tm00.bufr_d${OSUFFIX}} -AIRSBF=${AIRSBF:-${COMIN_OBS}/${OPREFIX}airsev.tm00.bufr_d${OSUFFIX}} -IASIBF=${IASIBF:-${COMIN_OBS}/${OPREFIX}mtiasi.tm00.bufr_d${OSUFFIX}} -ESIASI=${ESIASI:-${COMIN_OBS}/${OPREFIX}esiasi.tm00.bufr_d${OSUFFIX}} -IASIDB=${IASIDB:-${COMIN_OBS}/${OPREFIX}iasidb.tm00.bufr_d${OSUFFIX}} -AMSREBF=${AMSREBF:-${COMIN_OBS}/${OPREFIX}amsre.tm00.bufr_d${OSUFFIX}} -AMSR2BF=${AMSR2BF:-${COMIN_OBS}/${OPREFIX}amsr2.tm00.bufr_d${OSUFFIX}} -GMI1CRBF=${GMI1CRBF:-${COMIN_OBS}/${OPREFIX}gmi1cr.tm00.bufr_d${OSUFFIX}} # GMI temporarily disabled due to array overflow. -SAPHIRBF=${SAPHIRBF:-${COMIN_OBS}/${OPREFIX}saphir.tm00.bufr_d${OSUFFIX}} -SEVIRIBF=${SEVIRIBF:-${COMIN_OBS}/${OPREFIX}sevcsr.tm00.bufr_d${OSUFFIX}} -AHIBF=${AHIBF:-${COMIN_OBS}/${OPREFIX}ahicsr.tm00.bufr_d${OSUFFIX}} -SSTVIIRS=${SSTVIIRS:-${COMIN_OBS}/${OPREFIX}sstvcw.tm00.bufr_d${OSUFFIX}} -ABIBF=${ABIBF:-${COMIN_OBS}/${OPREFIX}gsrcsr.tm00.bufr_d${OSUFFIX}} -CRISBF=${CRISBF:-${COMIN_OBS}/${OPREFIX}cris.tm00.bufr_d${OSUFFIX}} -ESCRIS=${ESCRIS:-${COMIN_OBS}/${OPREFIX}escris.tm00.bufr_d${OSUFFIX}} -CRISDB=${CRISDB:-${COMIN_OBS}/${OPREFIX}crisdb.tm00.bufr_d${OSUFFIX}} -CRISFSBF=${CRISFSBF:-${COMIN_OBS}/${OPREFIX}crisf4.tm00.bufr_d${OSUFFIX}} -ESCRISFS=${ESCRISFS:-${COMIN_OBS}/${OPREFIX}escrsf.tm00.bufr_d${OSUFFIX}} -CRISFSDB=${CRISFSDB:-${COMIN_OBS}/${OPREFIX}crsfdb.tm00.bufr_d${OSUFFIX}} -ATMSBF=${ATMSBF:-${COMIN_OBS}/${OPREFIX}atms.tm00.bufr_d${OSUFFIX}} -ESATMS=${ESATMS:-${COMIN_OBS}/${OPREFIX}esatms.tm00.bufr_d${OSUFFIX}} -ATMSDB=${ATMSDB:-${COMIN_OBS}/${OPREFIX}atmsdb.tm00.bufr_d${OSUFFIX}} -SSMITBF=${SSMITBF:-${COMIN_OBS}/${OPREFIX}ssmit.tm00.bufr_d${OSUFFIX}} -SSMISBF=${SSMISBF:-${COMIN_OBS}/${OPREFIX}ssmisu.tm00.bufr_d${OSUFFIX}} -SBUVBF=${SBUVBF:-${COMIN_OBS}/${OPREFIX}osbuv8.tm00.bufr_d${OSUFFIX}} -OMPSNPBF=${OMPSNPBF:-${COMIN_OBS}/${OPREFIX}ompsn8.tm00.bufr_d${OSUFFIX}} -OMPSTCBF=${OMPSTCBF:-${COMIN_OBS}/${OPREFIX}ompst8.tm00.bufr_d${OSUFFIX}} -OMPSLPBF=${OMPSLPBF:-${COMIN_OBS}/${OPREFIX}ompslp.tm00.bufr_d${OSUFFIX}} -GOMEBF=${GOMEBF:-${COMIN_OBS}/${OPREFIX}gome.tm00.bufr_d${OSUFFIX}} -OMIBF=${OMIBF:-${COMIN_OBS}/${OPREFIX}omi.tm00.bufr_d${OSUFFIX}} -MLSBF=${MLSBF:-${COMIN_OBS}/${OPREFIX}mls.tm00.bufr_d${OSUFFIX}} -SMIPCP=${SMIPCP:-${COMIN_OBS}/${OPREFIX}spssmi.tm00.bufr_d${OSUFFIX}} -TMIPCP=${TMIPCP:-${COMIN_OBS}/${OPREFIX}sptrmm.tm00.bufr_d${OSUFFIX}} -GPSROBF=${GPSROBF:-${COMIN_OBS}/${OPREFIX}gpsro.tm00.bufr_d${OSUFFIX}} -TCVITL=${TCVITL:-${COMIN_OBS}/${OPREFIX}syndata.tcvitals.tm00} -B1AVHAM=${B1AVHAM:-${COMIN_OBS}/${OPREFIX}avcsam.tm00.bufr_d${OSUFFIX}} -B1AVHPM=${B1AVHPM:-${COMIN_OBS}/${OPREFIX}avcspm.tm00.bufr_d${OSUFFIX}} -HDOB=${HDOB:-${COMIN_OBS}/${OPREFIX}hdob.tm00.bufr_d${OSUFFIX}} +PREPQC=${PREPQC:-${COM_OBS}/${OPREFIX}prepbufr${OSUFFIX}} +PREPQCPF=${PREPQCPF:-${COM_OBS}/${OPREFIX}prepbufr.acft_profiles${OSUFFIX}} +NSSTBF=${NSSTBF:-${COM_OBS}/${OPREFIX}nsstbufr${OSUFFIX}} +SATWND=${SATWND:-${COM_OBS}/${OPREFIX}satwnd.tm00.bufr_d${OSUFFIX}} +OSCATBF=${OSCATBF:-${COM_OBS}/${OPREFIX}oscatw.tm00.bufr_d${OSUFFIX}} +RAPIDSCATBF=${RAPIDSCATBF:-${COM_OBS}/${OPREFIX}rapidscatw.tm00.bufr_d${OSUFFIX}} +GSNDBF=${GSNDBF:-${COM_OBS}/${OPREFIX}goesnd.tm00.bufr_d${OSUFFIX}} +GSNDBF1=${GSNDBF1:-${COM_OBS}/${OPREFIX}goesfv.tm00.bufr_d${OSUFFIX}} +B1HRS2=${B1HRS2:-${COM_OBS}/${OPREFIX}1bhrs2.tm00.bufr_d${OSUFFIX}} +B1MSU=${B1MSU:-${COM_OBS}/${OPREFIX}1bmsu.tm00.bufr_d${OSUFFIX}} +B1HRS3=${B1HRS3:-${COM_OBS}/${OPREFIX}1bhrs3.tm00.bufr_d${OSUFFIX}} +B1HRS4=${B1HRS4:-${COM_OBS}/${OPREFIX}1bhrs4.tm00.bufr_d${OSUFFIX}} +B1AMUA=${B1AMUA:-${COM_OBS}/${OPREFIX}1bamua.tm00.bufr_d${OSUFFIX}} +B1AMUB=${B1AMUB:-${COM_OBS}/${OPREFIX}1bamub.tm00.bufr_d${OSUFFIX}} +B1MHS=${B1MHS:-${COM_OBS}/${OPREFIX}1bmhs.tm00.bufr_d${OSUFFIX}} +ESHRS3=${ESHRS3:-${COM_OBS}/${OPREFIX}eshrs3.tm00.bufr_d${OSUFFIX}} +ESAMUA=${ESAMUA:-${COM_OBS}/${OPREFIX}esamua.tm00.bufr_d${OSUFFIX}} +ESAMUB=${ESAMUB:-${COM_OBS}/${OPREFIX}esamub.tm00.bufr_d${OSUFFIX}} +ESMHS=${ESMHS:-${COM_OBS}/${OPREFIX}esmhs.tm00.bufr_d${OSUFFIX}} +HRS3DB=${HRS3DB:-${COM_OBS}/${OPREFIX}hrs3db.tm00.bufr_d${OSUFFIX}} +AMUADB=${AMUADB:-${COM_OBS}/${OPREFIX}amuadb.tm00.bufr_d${OSUFFIX}} +AMUBDB=${AMUBDB:-${COM_OBS}/${OPREFIX}amubdb.tm00.bufr_d${OSUFFIX}} +MHSDB=${MHSDB:-${COM_OBS}/${OPREFIX}mhsdb.tm00.bufr_d${OSUFFIX}} +AIRSBF=${AIRSBF:-${COM_OBS}/${OPREFIX}airsev.tm00.bufr_d${OSUFFIX}} +IASIBF=${IASIBF:-${COM_OBS}/${OPREFIX}mtiasi.tm00.bufr_d${OSUFFIX}} +ESIASI=${ESIASI:-${COM_OBS}/${OPREFIX}esiasi.tm00.bufr_d${OSUFFIX}} +IASIDB=${IASIDB:-${COM_OBS}/${OPREFIX}iasidb.tm00.bufr_d${OSUFFIX}} +AMSREBF=${AMSREBF:-${COM_OBS}/${OPREFIX}amsre.tm00.bufr_d${OSUFFIX}} +AMSR2BF=${AMSR2BF:-${COM_OBS}/${OPREFIX}amsr2.tm00.bufr_d${OSUFFIX}} +GMI1CRBF=${GMI1CRBF:-${COM_OBS}/${OPREFIX}gmi1cr.tm00.bufr_d${OSUFFIX}} # GMI temporarily disabled due to array overflow. +SAPHIRBF=${SAPHIRBF:-${COM_OBS}/${OPREFIX}saphir.tm00.bufr_d${OSUFFIX}} +SEVIRIBF=${SEVIRIBF:-${COM_OBS}/${OPREFIX}sevcsr.tm00.bufr_d${OSUFFIX}} +AHIBF=${AHIBF:-${COM_OBS}/${OPREFIX}ahicsr.tm00.bufr_d${OSUFFIX}} +SSTVIIRS=${SSTVIIRS:-${COM_OBS}/${OPREFIX}sstvcw.tm00.bufr_d${OSUFFIX}} +ABIBF=${ABIBF:-${COM_OBS}/${OPREFIX}gsrcsr.tm00.bufr_d${OSUFFIX}} +CRISBF=${CRISBF:-${COM_OBS}/${OPREFIX}cris.tm00.bufr_d${OSUFFIX}} +ESCRIS=${ESCRIS:-${COM_OBS}/${OPREFIX}escris.tm00.bufr_d${OSUFFIX}} +CRISDB=${CRISDB:-${COM_OBS}/${OPREFIX}crisdb.tm00.bufr_d${OSUFFIX}} +CRISFSBF=${CRISFSBF:-${COM_OBS}/${OPREFIX}crisf4.tm00.bufr_d${OSUFFIX}} +ESCRISFS=${ESCRISFS:-${COM_OBS}/${OPREFIX}escrsf.tm00.bufr_d${OSUFFIX}} +CRISFSDB=${CRISFSDB:-${COM_OBS}/${OPREFIX}crsfdb.tm00.bufr_d${OSUFFIX}} +ATMSBF=${ATMSBF:-${COM_OBS}/${OPREFIX}atms.tm00.bufr_d${OSUFFIX}} +ESATMS=${ESATMS:-${COM_OBS}/${OPREFIX}esatms.tm00.bufr_d${OSUFFIX}} +ATMSDB=${ATMSDB:-${COM_OBS}/${OPREFIX}atmsdb.tm00.bufr_d${OSUFFIX}} +SSMITBF=${SSMITBF:-${COM_OBS}/${OPREFIX}ssmit.tm00.bufr_d${OSUFFIX}} +SSMISBF=${SSMISBF:-${COM_OBS}/${OPREFIX}ssmisu.tm00.bufr_d${OSUFFIX}} +SBUVBF=${SBUVBF:-${COM_OBS}/${OPREFIX}osbuv8.tm00.bufr_d${OSUFFIX}} +OMPSNPBF=${OMPSNPBF:-${COM_OBS}/${OPREFIX}ompsn8.tm00.bufr_d${OSUFFIX}} +OMPSTCBF=${OMPSTCBF:-${COM_OBS}/${OPREFIX}ompst8.tm00.bufr_d${OSUFFIX}} +OMPSLPBF=${OMPSLPBF:-${COM_OBS}/${OPREFIX}ompslp.tm00.bufr_d${OSUFFIX}} +GOMEBF=${GOMEBF:-${COM_OBS}/${OPREFIX}gome.tm00.bufr_d${OSUFFIX}} +OMIBF=${OMIBF:-${COM_OBS}/${OPREFIX}omi.tm00.bufr_d${OSUFFIX}} +MLSBF=${MLSBF:-${COM_OBS}/${OPREFIX}mls.tm00.bufr_d${OSUFFIX}} +SMIPCP=${SMIPCP:-${COM_OBS}/${OPREFIX}spssmi.tm00.bufr_d${OSUFFIX}} +TMIPCP=${TMIPCP:-${COM_OBS}/${OPREFIX}sptrmm.tm00.bufr_d${OSUFFIX}} +GPSROBF=${GPSROBF:-${COM_OBS}/${OPREFIX}gpsro.tm00.bufr_d${OSUFFIX}} +TCVITL=${TCVITL:-${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00} +B1AVHAM=${B1AVHAM:-${COM_OBS}/${OPREFIX}avcsam.tm00.bufr_d${OSUFFIX}} +B1AVHPM=${B1AVHPM:-${COM_OBS}/${OPREFIX}avcspm.tm00.bufr_d${OSUFFIX}} +HDOB=${HDOB:-${COM_OBS}/${OPREFIX}hdob.tm00.bufr_d${OSUFFIX}} # Guess files GPREFIX=${GPREFIX:-""} GSUFFIX=${GSUFFIX:-".nc"} -SFCG03=${SFCG03:-${COMIN_GES}/${GPREFIX}sfcf003${GSUFFIX}} -SFCG04=${SFCG04:-${COMIN_GES}/${GPREFIX}sfcf004${GSUFFIX}} -SFCG05=${SFCG05:-${COMIN_GES}/${GPREFIX}sfcf005${GSUFFIX}} -SFCGES=${SFCGES:-${COMIN_GES}/${GPREFIX}sfcf006${GSUFFIX}} -SFCG07=${SFCG07:-${COMIN_GES}/${GPREFIX}sfcf007${GSUFFIX}} -SFCG08=${SFCG08:-${COMIN_GES}/${GPREFIX}sfcf008${GSUFFIX}} -SFCG09=${SFCG09:-${COMIN_GES}/${GPREFIX}sfcf009${GSUFFIX}} -ATMG03=${ATMG03:-${COMIN_GES}/${GPREFIX}atmf003${GSUFFIX}} -ATMG04=${ATMG04:-${COMIN_GES}/${GPREFIX}atmf004${GSUFFIX}} -ATMG05=${ATMG05:-${COMIN_GES}/${GPREFIX}atmf005${GSUFFIX}} -ATMGES=${ATMGES:-${COMIN_GES}/${GPREFIX}atmf006${GSUFFIX}} -ATMG07=${ATMG07:-${COMIN_GES}/${GPREFIX}atmf007${GSUFFIX}} -ATMG08=${ATMG08:-${COMIN_GES}/${GPREFIX}atmf008${GSUFFIX}} -ATMG09=${ATMG09:-${COMIN_GES}/${GPREFIX}atmf009${GSUFFIX}} -GBIAS=${GBIAS:-${COMIN_GES}/${GPREFIX}abias} -GBIASPC=${GBIASPC:-${COMIN_GES}/${GPREFIX}abias_pc} -GBIASAIR=${GBIASAIR:-${COMIN_GES}/${GPREFIX}abias_air} -GRADSTAT=${GRADSTAT:-${COMIN_GES}/${GPREFIX}radstat} +SFCG03=${SFCG03:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf003${GSUFFIX}} +SFCG04=${SFCG04:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf004${GSUFFIX}} +SFCG05=${SFCG05:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf005${GSUFFIX}} +SFCGES=${SFCGES:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf006${GSUFFIX}} +SFCG07=${SFCG07:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf007${GSUFFIX}} +SFCG08=${SFCG08:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf008${GSUFFIX}} +SFCG09=${SFCG09:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf009${GSUFFIX}} +ATMG03=${ATMG03:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf003${GSUFFIX}} +ATMG04=${ATMG04:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf004${GSUFFIX}} +ATMG05=${ATMG05:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf005${GSUFFIX}} +ATMGES=${ATMGES:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006${GSUFFIX}} +ATMG07=${ATMG07:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf007${GSUFFIX}} +ATMG08=${ATMG08:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf008${GSUFFIX}} +ATMG09=${ATMG09:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf009${GSUFFIX}} +GBIAS=${GBIAS:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}abias} +GBIASPC=${GBIASPC:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}abias_pc} +GBIASAIR=${GBIASAIR:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}abias_air} +GRADSTAT=${GRADSTAT:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}radstat} # Analysis files export APREFIX=${APREFIX:-""} -SFCANL=${SFCANL:-${COMOUT}/${APREFIX}sfcanl.nc} -DTFANL=${DTFANL:-${COMOUT}/${APREFIX}dtfanl.nc} -ATMANL=${ATMANL:-${COMOUT}/${APREFIX}atmanl.nc} -ABIAS=${ABIAS:-${COMOUT}/${APREFIX}abias} -ABIASPC=${ABIASPC:-${COMOUT}/${APREFIX}abias_pc} -ABIASAIR=${ABIASAIR:-${COMOUT}/${APREFIX}abias_air} -ABIASe=${ABIASe:-${COMOUT}/${APREFIX}abias_int} -RADSTAT=${RADSTAT:-${COMOUT}/${APREFIX}radstat} -GSISTAT=${GSISTAT:-${COMOUT}/${APREFIX}gsistat} -PCPSTAT=${PCPSTAT:-${COMOUT}/${APREFIX}pcpstat} -CNVSTAT=${CNVSTAT:-${COMOUT}/${APREFIX}cnvstat} -OZNSTAT=${OZNSTAT:-${COMOUT}/${APREFIX}oznstat} +SFCANL=${SFCANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc} +DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} +ATMANL=${ATMANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmanl.nc} +ABIAS=${ABIAS:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias} +ABIASPC=${ABIASPC:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc} +ABIASAIR=${ABIASAIR:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air} +ABIASe=${ABIASe:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int} +RADSTAT=${RADSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}radstat} +GSISTAT=${GSISTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat} +PCPSTAT=${PCPSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat} # Increment files -ATMINC=${ATMINC:-${COMOUT}/${APREFIX}atminc.nc} +ATMINC=${ATMINC:-${COM_ATMOS_ANALYSIS}/${APREFIX}atminc.nc} # Obs diag RUN_SELECT=${RUN_SELECT:-"NO"} USE_SELECT=${USE_SELECT:-"NO"} USE_RADSTAT=${USE_RADSTAT:-"YES"} -SELECT_OBS=${SELECT_OBS:-${COMOUT}/${APREFIX}obsinput} +SELECT_OBS=${SELECT_OBS:-${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput} GENDIAG=${GENDIAG:-"YES"} DIAG_SUFFIX=${DIAG_SUFFIX:-""} if [ ${netcdf_diag} = ".true." ] ; then @@ -212,11 +212,11 @@ nm="" if [ ${CFP_MP} = "YES" ]; then nm=0 fi -DIAG_DIR=${DIAG_DIR:-${COMOUT}/gsidiags} +DIAG_DIR=${DIAG_DIR:-${COM_ATMOS_ANALYSIS}/gsidiags} # Set script / GSI control parameters DOHYBVAR=${DOHYBVAR:-"NO"} -NMEM_ENKF=${NMEM_ENKF:-0} +NMEM_ENS=${NMEM_ENS:-0} export DONST=${DONST:-"NO"} NST_GSI=${NST_GSI:-0} NSTINFO=${NSTINFO:-0} @@ -243,8 +243,8 @@ JCAP=${JCAP:--9999} # there is no jcap in these files # Get header information from Ensemble Guess files if [ ${DOHYBVAR} = "YES" ]; then - SFCGES_ENSMEAN=${SFCGES_ENSMEAN:-${COMIN_GES_ENS}/${GPREFIX_ENS}sfcf006.ensmean.nc} - export ATMGES_ENSMEAN=${ATMGES_ENSMEAN:-${COMIN_GES_ENS}/${GPREFIX_ENS}atmf006.ensmean.nc} + SFCGES_ENSMEAN=${SFCGES_ENSMEAN:-${COM_ATMOS_HISTORY_ENS_PREV}/${GPREFIX_ENS}sfcf006.ensmean.nc} + export ATMGES_ENSMEAN=${ATMGES_ENSMEAN:-${COM_ATMOS_HISTORY_ENS_PREV}/${GPREFIX_ENS}atmf006.ensmean.nc} LONB_ENKF=${LONB_ENKF:-$(${NCLEN} ${ATMGES_ENSMEAN} grid_xt)} # get LONB_ENKF LATB_ENKF=${LATB_ENKF:-$(${NCLEN} ${ATMGES_ENSMEAN} grid_yt)} # get LATB_ENFK LEVS_ENKF=${LEVS_ENKF:-$(${NCLEN} ${ATMGES_ENSMEAN} pfull)} # get LATB_ENFK @@ -337,18 +337,18 @@ fi # Set 4D-EnVar specific variables if [ ${DOHYBVAR} = "YES" -a ${l4densvar} = ".true." -a ${lwrite4danl} = ".true." ]; then - ATMA03=${ATMA03:-${COMOUT}/${APREFIX}atma003.nc} - ATMI03=${ATMI03:-${COMOUT}/${APREFIX}atmi003.nc} - ATMA04=${ATMA04:-${COMOUT}/${APREFIX}atma004.nc} - ATMI04=${ATMI04:-${COMOUT}/${APREFIX}atmi004.nc} - ATMA05=${ATMA05:-${COMOUT}/${APREFIX}atma005.nc} - ATMI05=${ATMI05:-${COMOUT}/${APREFIX}atmi005.nc} - ATMA07=${ATMA07:-${COMOUT}/${APREFIX}atma007.nc} - ATMI07=${ATMI07:-${COMOUT}/${APREFIX}atmi007.nc} - ATMA08=${ATMA08:-${COMOUT}/${APREFIX}atma008.nc} - ATMI08=${ATMI08:-${COMOUT}/${APREFIX}atmi008.nc} - ATMA09=${ATMA09:-${COMOUT}/${APREFIX}atma009.nc} - ATMI09=${ATMI09:-${COMOUT}/${APREFIX}atmi009.nc} + ATMA03=${ATMA03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma003.nc} + ATMI03=${ATMI03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi003.nc} + ATMA04=${ATMA04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma004.nc} + ATMI04=${ATMI04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi004.nc} + ATMA05=${ATMA05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma005.nc} + ATMI05=${ATMI05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi005.nc} + ATMA07=${ATMA07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma007.nc} + ATMI07=${ATMI07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi007.nc} + ATMA08=${ATMA08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma008.nc} + ATMI08=${ATMI08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi008.nc} + ATMA09=${ATMA09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma009.nc} + ATMI09=${ATMI09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi009.nc} fi ################################################################################ @@ -423,9 +423,7 @@ ${NLN} ${RTMFIX}/NPOESS.VISsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISsnow.Emis ${NLN} ${RTMFIX}/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin ${NLN} ${RTMFIX}/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin ${NLN} ${RTMFIX}/AerosolCoeff.bin ./crtm_coeffs/AerosolCoeff.bin -${NLN} ${RTMFIX}/CloudCoeff.bin ./crtm_coeffs/CloudCoeff.bin -#$NLN $RTMFIX/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin - +${NLN} ${RTMFIX}/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin ############################################################## # Observational data @@ -532,12 +530,14 @@ if [ ${DOHYBVAR} = "YES" ]; then nhr_obsbin=1 fi - for imem in $(seq 1 ${NMEM_ENKF}); do - memchar="mem"$(printf %03i ${imem}) + for imem in $(seq 1 ${NMEM_ENS}); do + memchar="mem$(printf %03i "${imem}")" + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com COM_ATMOS_HISTORY + for fhr in ${fhrs}; do - ${NLN} ${COMIN_GES_ENS}/${memchar}/atmos/${GPREFIX_ENS}atmf0${fhr}${ENKF_SUFFIX}.nc ./ensemble_data/sigf${fhr}_ens_${memchar} + ${NLN} ${COM_ATMOS_HISTORY}/${GPREFIX_ENS}atmf0${fhr}${ENKF_SUFFIX}.nc ./ensemble_data/sigf${fhr}_ens_${memchar} if [ ${cnvw_option} = ".true." ]; then - ${NLN} ${COMIN_GES_ENS}/${memchar}/atmos/${GPREFIX_ENS}sfcf0${fhr}.nc ./ensemble_data/sfcf${fhr}_ens_${memchar} + ${NLN} ${COM_ATMOS_HISTORY}/${GPREFIX_ENS}sfcf0${fhr}.nc ./ensemble_data/sfcf${fhr}_ens_${memchar} fi done done @@ -673,7 +673,7 @@ fi # if [ $USE_RADSTAT = "YES" ] ############################################################## # GSI Namelist options if [ ${DOHYBVAR} = "YES" ]; then - HYBRID_ENSEMBLE="n_ens=${NMEM_ENKF},jcap_ens=${JCAP_ENKF},nlat_ens=${NLAT_ENKF},nlon_ens=${NLON_ENKF},jcap_ens_test=${JCAP_ENKF},${HYBRID_ENSEMBLE}" + HYBRID_ENSEMBLE="n_ens=${NMEM_ENS},jcap_ens=${JCAP_ENKF},nlat_ens=${NLAT_ENKF},nlon_ens=${NLON_ENKF},jcap_ens_test=${JCAP_ENKF},${HYBRID_ENSEMBLE}" if [ ${l4densvar} = ".true." ]; then SETUP="niter(1)=50,niter(2)=150,niter_no_qc(1)=25,niter_no_qc(2)=0,thin4d=.true.,ens_nstarthr=3,l4densvar=${l4densvar},lwrite4danl=${lwrite4danl},${SETUP}" JCOPTS="ljc4tlevs=.true.,${JCOPTS}" @@ -700,7 +700,7 @@ cat > gsiparm.anl << EOF iguess=-1, tzr_qc=${TZR_QC}, oneobtest=.false.,retrieval=.false.,l_foto=.false., - use_pbl=.false.,use_compress=.true.,nsig_ext=12,gpstop=50.,commgpstop=45.,commgpserrinf=1.0, + use_pbl=.false.,use_compress=.true.,nsig_ext=45,gpstop=50.,commgpstop=45.,commgpserrinf=1.0, use_gfs_nemsio=.false.,use_gfs_ncio=.true.,sfcnst_comb=.true., use_readin_anl_sfcmask=${USE_READIN_ANL_SFCMASK}, lrun_subdirs=${lrun_subdirs}, @@ -753,7 +753,7 @@ cat > gsiparm.anl << EOF ${OBSQC} / &OBS_INPUT - dmesh(1)=145.0,dmesh(2)=150.0,dmesh(3)=100.0,dmesh(4)=25.0,time_window_max=3.0, + dmesh(1)=145.0,dmesh(2)=150.0,dmesh(3)=100.0,dmesh(4)=50.0,time_window_max=3.0, ${OBSINPUT} / OBS_INPUT:: @@ -976,7 +976,7 @@ cd ${pwd} if [ ${SENDECF} = "YES" -a "${RUN}" != "enkf" ]; then ecflow_client --event release_fcst fi -echo "${CDUMP} ${CDATE} atminc done at $(date)" > ${COMOUT}/${APREFIX}loginc.txt +echo "${CDUMP} ${CDATE} atminc done at $(date)" > ${COM_ATMOS_ANALYSIS}/${APREFIX}loginc.txt ################################################################################ diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh index 5c005ac892..b353d3c52b 100755 --- a/scripts/exglobal_atmos_analysis_calc.sh +++ b/scripts/exglobal_atmos_analysis_calc.sh @@ -26,18 +26,9 @@ pwd=$(pwd) export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} # Base variables -CDATE=${CDATE:-"2001010100"} CDUMP=${CDUMP:-"gdas"} GDUMP=${GDUMP:-"gdas"} -# Derived base variables -GDATE=$($NDATE -$assim_freq $CDATE) -BDATE=$($NDATE -3 $CDATE) -PDY=$(echo $CDATE | cut -c1-8) -cyc=$(echo $CDATE | cut -c9-10) -bPDY=$(echo $BDATE | cut -c1-8) -bcyc=$(echo $BDATE | cut -c9-10) - # Utilities export NCP=${NCP:-"/bin/cp"} export NMV=${NMV:-"/bin/mv"} @@ -82,22 +73,22 @@ SENDDBN=${SENDDBN:-"NO"} # Guess files GPREFIX=${GPREFIX:-""} -ATMG03=${ATMG03:-${COMIN_GES}/${GPREFIX}atmf003.nc} -ATMG04=${ATMG04:-${COMIN_GES}/${GPREFIX}atmf004.nc} -ATMG05=${ATMG05:-${COMIN_GES}/${GPREFIX}atmf005.nc} -ATMGES=${ATMGES:-${COMIN_GES}/${GPREFIX}atmf006.nc} -ATMG07=${ATMG07:-${COMIN_GES}/${GPREFIX}atmf007.nc} -ATMG08=${ATMG08:-${COMIN_GES}/${GPREFIX}atmf008.nc} -ATMG09=${ATMG09:-${COMIN_GES}/${GPREFIX}atmf009.nc} +ATMG03=${ATMG03:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf003.nc} +ATMG04=${ATMG04:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf004.nc} +ATMG05=${ATMG05:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf005.nc} +ATMGES=${ATMGES:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.nc} +ATMG07=${ATMG07:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf007.nc} +ATMG08=${ATMG08:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf008.nc} +ATMG09=${ATMG09:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf009.nc} # Analysis files export APREFIX=${APREFIX:-""} -SFCANL=${SFCANL:-${COMOUT}/${APREFIX}sfcanl.nc} -DTFANL=${DTFANL:-${COMOUT}/${APREFIX}dtfanl.nc} -ATMANL=${ATMANL:-${COMOUT}/${APREFIX}atmanl.nc} +SFCANL=${SFCANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc} +DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} +ATMANL=${ATMANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmanl.nc} # Increment files -ATMINC=${ATMINC:-${COMOUT}/${APREFIX}atminc.nc} +ATMINC=${ATMINC:-${COM_ATMOS_ANALYSIS}/${APREFIX}atminc.nc} # Set script / GSI control parameters DOHYBVAR=${DOHYBVAR:-"NO"} @@ -114,18 +105,18 @@ fi # Set 4D-EnVar specific variables if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then - ATMA03=${ATMA03:-${COMOUT}/${APREFIX}atma003.nc} - ATMI03=${ATMI03:-${COMOUT}/${APREFIX}atmi003.nc} - ATMA04=${ATMA04:-${COMOUT}/${APREFIX}atma004.nc} - ATMI04=${ATMI04:-${COMOUT}/${APREFIX}atmi004.nc} - ATMA05=${ATMA05:-${COMOUT}/${APREFIX}atma005.nc} - ATMI05=${ATMI05:-${COMOUT}/${APREFIX}atmi005.nc} - ATMA07=${ATMA07:-${COMOUT}/${APREFIX}atma007.nc} - ATMI07=${ATMI07:-${COMOUT}/${APREFIX}atmi007.nc} - ATMA08=${ATMA08:-${COMOUT}/${APREFIX}atma008.nc} - ATMI08=${ATMI08:-${COMOUT}/${APREFIX}atmi008.nc} - ATMA09=${ATMA09:-${COMOUT}/${APREFIX}atma009.nc} - ATMI09=${ATMI09:-${COMOUT}/${APREFIX}atmi009.nc} + ATMA03=${ATMA03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma003.nc} + ATMI03=${ATMI03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi003.nc} + ATMA04=${ATMA04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma004.nc} + ATMI04=${ATMI04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi004.nc} + ATMA05=${ATMA05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma005.nc} + ATMI05=${ATMI05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi005.nc} + ATMA07=${ATMA07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma007.nc} + ATMI07=${ATMI07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi007.nc} + ATMA08=${ATMA08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma008.nc} + ATMI08=${ATMI08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi008.nc} + ATMA09=${ATMA09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma009.nc} + ATMI09=${ATMI09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi009.nc} fi ################################################################################ @@ -194,7 +185,7 @@ if [ $DOGAUSFCANL = "YES" ]; then export err=$?; err_chk fi -echo "$CDUMP $CDATE atmanl and sfcanl done at $(date)" > $COMOUT/${APREFIX}loganl.txt +echo "${CDUMP} ${PDY}${cyc} atmanl and sfcanl done at $(date)" > "${COM_ATMOS_ANALYSIS}/${APREFIX}loganl.txt" ################################################################################ # Postprocessing diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh index 7b76bc5359..f173886a07 100755 --- a/scripts/exglobal_atmos_sfcanl.sh +++ b/scripts/exglobal_atmos_sfcanl.sh @@ -24,18 +24,13 @@ source "${HOMEgfs}/ush/preamble.sh" # Directories. pwd=$(pwd) -# Base variables -CDATE=${CDATE:-"2001010100"} -CDUMP=${CDUMP:-"gdas"} -GDUMP=${GDUMP:-"gdas"} - # Derived base variables -GDATE=$(${NDATE} -${assim_freq} ${CDATE}) -BDATE=$(${NDATE} -3 ${CDATE}) -PDY=$(echo ${CDATE} | cut -c1-8) -cyc=$(echo ${CDATE} | cut -c9-10) -bPDY=$(echo ${BDATE} | cut -c1-8) -bcyc=$(echo ${BDATE} | cut -c9-10) +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +BDATE=$(${NDATE} -3 "${PDY}${cyc}") +bPDY=${BDATE:0:8} +bcyc=${BDATE:8:2} # Utilities export NCP=${NCP:-"/bin/cp"} @@ -104,7 +99,7 @@ GPREFIX=${GPREFIX:-""} # Analysis files export APREFIX=${APREFIX:-""} -DTFANL=${DTFANL:-${COMOUT}/${APREFIX}dtfanl.nc} +DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} # Get dimension information based on CASE res=$(echo ${CASE} | cut -c2-) @@ -123,7 +118,7 @@ fi cd ${DATA} || exit 99 if [[ ${DONST} = "YES" ]]; then - export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" ${NLN} ${NSSTBF} nsstbufr fi @@ -141,15 +136,15 @@ fi ############################################################## # Update surface fields in the FV3 restart's using global_cycle -mkdir -p ${COMOUT}/RESTART +mkdir -p "${COM_ATMOS_RESTART}" # Global cycle requires these files -export FNTSFA=${FNTSFA:-${COMIN_OBS}/${OPREFIX}rtgssthr.grb} -export FNACNA=${FNACNA:-${COMIN}/${OPREFIX}seaice.5min.blend.grb} -export FNSNOA=${FNSNOA:-${COMIN}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} -[[ ! -f ${FNSNOA} ]] && export FNSNOA="${COMIN}/${OPREFIX}snogrb_t1534.3072.1536" -FNSNOG=${FNSNOG:-${COMIN_GES}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} -[[ ! -f ${FNSNOG} ]] && FNSNOG="${COMIN_GES}/${GPREFIX}snogrb_t1534.3072.1536" +export FNTSFA=${FNTSFA:-${COM_OBS}/${OPREFIX}rtgssthr.grb} +export FNACNA=${FNACNA:-${COM_OBS}/${OPREFIX}seaice.5min.blend.grb} +export FNSNOA=${FNSNOA:-${COM_OBS}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f ${FNSNOA} ]] && export FNSNOA="${COM_OBS}/${OPREFIX}snogrb_t1534.3072.1536" +FNSNOG=${FNSNOG:-${COM_OBS_PREV}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f ${FNSNOG} ]] && FNSNOG="${COM_OBS_PREV}/${GPREFIX}snogrb_t1534.3072.1536" # Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle if [[ ${RUN_GETGES} = "YES" ]]; then @@ -168,7 +163,7 @@ else fi if [[ ${DONST} = "YES" ]]; then - export NST_FILE=${GSI_FILE:-${COMOUT}/${APREFIX}dtfanl.nc} + export NST_FILE=${GSI_FILE:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} else export NST_FILE="NULL" fi @@ -177,35 +172,42 @@ if [[ ${DOIAU} = "YES" ]]; then # update surface restarts at the beginning of the window, if IAU # For now assume/hold dtfanl.nc valid at beginning of window for n in $(seq 1 ${ntiles}); do - ${NCP} ${COMIN_GES}/RESTART/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc ${COMOUT}/RESTART/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc - ${NLN} ${COMIN_GES}/RESTART/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc ${DATA}/fnbgsi.00${n} - ${NLN} ${COMOUT}/RESTART/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc ${DATA}/fnbgso.00${n} - ${NLN} ${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc ${DATA}/fngrid.00${n} - ${NLN} ${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc ${DATA}/fnorog.00${n} + ${NCP} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + ${NLN} "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" done export APRUNCY=${APRUN_CYCLE} export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE} export MAX_TASKS_CY=${ntiles} - ${CYCLESH} + CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk fi # Update surface restarts at middle of window for n in $(seq 1 ${ntiles}); do - ${NCP} ${COMIN_GES}/RESTART/${PDY}.${cyc}0000.sfc_data.tile${n}.nc ${COMOUT}/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc - ${NLN} ${COMIN_GES}/RESTART/${PDY}.${cyc}0000.sfc_data.tile${n}.nc ${DATA}/fnbgsi.00${n} - ${NLN} ${COMOUT}/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc ${DATA}/fnbgso.00${n} - ${NLN} ${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc ${DATA}/fngrid.00${n} - ${NLN} ${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc ${DATA}/fnorog.00${n} + if [[ ${DO_JEDILANDDA:-"NO"} = "YES" ]]; then + ${NCP} "${COM_LAND_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + else + ${NCP} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + fi + ${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" done export APRUNCY=${APRUN_CYCLE} export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE} export MAX_TASKS_CY=${ntiles} -${CYCLESH} +CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk diff --git a/scripts/exglobal_atmos_tropcy_qc_reloc.sh b/scripts/exglobal_atmos_tropcy_qc_reloc.sh index 490a5bba4e..380441a6c9 100755 --- a/scripts/exglobal_atmos_tropcy_qc_reloc.sh +++ b/scripts/exglobal_atmos_tropcy_qc_reloc.sh @@ -17,8 +17,6 @@ cd $DATA cat break > $pgmout -export COMSP=$COMOUT/${RUN}.${cycle}. - tmhr=$(echo $tmmark|cut -c3-4) cdate10=$( ${NDATE:?} -$tmhr $PDY$cyc) @@ -60,7 +58,7 @@ if [ "$PROCESS_TROPCY" = 'YES' ]; then fi - cd $COMOUT + cd "${COM_OBS}" || exit 1 pwd ls -ltr *syndata* cd $ARCHSYND @@ -79,10 +77,10 @@ else # don't want to wipe out these files) # - [ ! -s ${COMSP}syndata.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}syndata.tcvitals.$tmmark - [ ! -s ${COMSP}jtwc-fnoc.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}jtwc-fnoc.tcvitals.$tmmark + [ ! -s "${COM_OBS}/${RUN}.t${cyc}z.syndata.tcvitals.${tmmark}" ] && \ + cp "/dev/null" "${COM_OBS}/${RUN}.t${cyc}z.syndata.tcvitals.${tmmark}" + [ ! -s "${COM_OBS}/${RUN}.t${cyc}z.jtwc-fnoc.tcvitals.${tmmark}" ] && \ + cp "/dev/null" "${COM_OBS}/${RUN}.t${cyc}z.jtwc-fnoc.tcvitals.${tmmark}" # endif loop $PROCESS_TROPCY fi @@ -110,25 +108,25 @@ if [ "$DO_RELOCATE" = 'YES' ]; then [ $RUN = gfs -o $RUN = gdas -o $NET = cfs ] && qual_last="" if [ $BKGFREQ -eq 1 ]; then - [ -s sgm3prep ] && cp sgm3prep ${COMSP}sgm3prep${qual_last} - [ -s sgm2prep ] && cp sgm2prep ${COMSP}sgm2prep${qual_last} - [ -s sgm1prep ] && cp sgm1prep ${COMSP}sgm1prep${qual_last} - [ -s sgesprep ] && cp sgesprep ${COMSP}sgesprep${qual_last} - [ -s sgp1prep ] && cp sgp1prep ${COMSP}sgp1prep${qual_last} - [ -s sgp2prep ] && cp sgp2prep ${COMSP}sgp2prep${qual_last} - [ -s sgp3prep ] && cp sgp3prep ${COMSP}sgp3prep${qual_last} + if [[ -s sgm3prep ]]; then cp "sgm3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm3prep${qual_last}"; fi + if [[ -s sgm2prep ]]; then cp "sgm2prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm2prep${qual_last}"; fi + if [[ -s sgm1prep ]]; then cp "sgm1prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm1prep${qual_last}"; fi + if [[ -s sgesprep ]]; then cp "sgesprep" "${COM_OBS}/${RUN}.t${cyc}z.sgesprep${qual_last}"; fi + if [[ -s sgp1prep ]]; then cp "sgp1prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp1prep${qual_last}"; fi + if [[ -s sgp2prep ]]; then cp "sgp2prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp2prep${qual_last}"; fi + if [[ -s sgp3prep ]]; then cp "sgp3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp3prep${qual_last}"; fi elif [ $BKGFREQ -eq 3 ]; then - [ -s sgm3prep ] && cp sgm3prep ${COMSP}sgm3prep${qual_last} - [ -s sgesprep ] && cp sgesprep ${COMSP}sgesprep${qual_last} - [ -s sgp3prep ] && cp sgp3prep ${COMSP}sgp3prep${qual_last} + if [[ -s sgm3prep ]]; then cp "sgm3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm3prep${qual_last}"; fi + if [[ -s sgesprep ]]; then cp "sgesprep" "${COM_OBS}/${RUN}.t${cyc}z.sgesprep${qual_last}"; fi + if [[ -s sgp3prep ]]; then cp "sgp3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp3prep${qual_last}"; fi fi -# The existence of ${COMSP}tropcy_relocation_status.$tmmark file will tell the +# The existence of ${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.$tmmark file will tell the # subsequent PREP processing that RELOCATION processing occurred, if this file # does not already exist at this point, echo "RECORDS PROCESSED" into it to # further tell PREP processing that records were processed by relocation and # the global sigma guess was modified by tropical cyclone relocation -# Note: If ${COMSP}tropcy_relocation_status.$tmmark already exists at this +# Note: If ${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.$tmmark already exists at this # point it means that it contains the string "NO RECORDS to process" # and was created by the child script tropcy_relocate.sh because records # were not processed by relocation and the global sigma guess was NOT @@ -136,8 +134,9 @@ if [ "$DO_RELOCATE" = 'YES' ]; then # were found in the relocation step) # ---------------------------------------------------------------------------- - [ ! -s ${COMSP}tropcy_relocation_status.$tmmark ] && \ - echo "RECORDS PROCESSED" > ${COMSP}tropcy_relocation_status.$tmmark + if [[ ! -s "${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.${tmmark}" ]]; then + echo "RECORDS PROCESSED" > "${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.${tmmark}" + fi # endif loop $DO_RELOCATE fi diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index 3ff07d48f6..3aa1093fad 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -25,18 +25,10 @@ source "$HOMEgfs/ush/preamble.sh" pwd=$(pwd) # Base variables -CDATE=${CDATE:-"2001010100"} +CDATE="${PDY}${cyc}" CDUMP=${CDUMP:-"gdas"} GDUMP=${GDUMP:-"gdas"} -# Derived base variables -GDATE=$($NDATE -$assim_freq $CDATE) -BDATE=$($NDATE -3 $CDATE) -PDY=$(echo $CDATE | cut -c1-8) -cyc=$(echo $CDATE | cut -c9-10) -bPDY=$(echo $BDATE | cut -c1-8) -bcyc=$(echo $BDATE | cut -c9-10) - # Utilities export NCP=${NCP:-"/bin/cp"} export NMV=${NMV:-"/bin/mv"} @@ -61,10 +53,10 @@ SENDDBN=${SENDDBN:-"NO"} # Analysis files export APREFIX=${APREFIX:-""} -RADSTAT=${RADSTAT:-${COMOUT}/${APREFIX}radstat} -PCPSTAT=${PCPSTAT:-${COMOUT}/${APREFIX}pcpstat} -CNVSTAT=${CNVSTAT:-${COMOUT}/${APREFIX}cnvstat} -OZNSTAT=${OZNSTAT:-${COMOUT}/${APREFIX}oznstat} +RADSTAT=${RADSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}radstat} +PCPSTAT=${PCPSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat} # Remove stat file if file already exists [[ -s $RADSTAT ]] && rm -f $RADSTAT @@ -86,7 +78,7 @@ nm="" if [ $CFP_MP = "YES" ]; then nm=0 fi -DIAG_DIR=${DIAG_DIR:-${COMOUT}/gsidiags} +DIAG_DIR=${DIAG_DIR:-${COM_ATMOS_ANALYSIS}/gsidiags} REMOVE_DIAG_DIR=${REMOVE_DIAG_DIR:-"NO"} # Set script / GSI control parameters diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index 8bd6776dda..d86691d5ec 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -77,9 +77,9 @@ # Main body starts here ####################### -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" -SCRIPTDIR=$(dirname $(readlink -f "$0") )/../ush +SCRIPTDIR="${HOMEgfs}/ush" echo "MAIN: environment loaded for $machine platform,Current Script locates in $SCRIPTDIR." # include all subroutines. Executions later. diff --git a/scripts/run_reg2grb2.sh b/scripts/run_reg2grb2.sh index 668368056e..ab2c80043e 100755 --- a/scripts/run_reg2grb2.sh +++ b/scripts/run_reg2grb2.sh @@ -1,11 +1,11 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" #requires grib_util module -MOM6REGRID=${MOM6REGRID:-$HOMEgfs} -export mask_file=$MOM6REGRID/fix/reg2grb2/mask.0p25x0p25.grb2 +MOM6REGRID=${MOM6REGRID:-${HOMEgfs}} +export mask_file="${MOM6REGRID}/fix/reg2grb2/mask.0p25x0p25.grb2" # offline testing: #export DATA= @@ -14,25 +14,25 @@ export mask_file=$MOM6REGRID/fix/reg2grb2/mask.0p25x0p25.grb2 #export outfile=$DATA/DATA0p5/out/ocnh2012010106.01.2012010100.grb2 # # workflow testing: -export icefile=icer${CDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc -export ocnfile=ocnr${CDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc -export outfile=ocn_ice${CDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2 -export outfile0p5=ocn_ice${CDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2 +export icefile="icer${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc" +export ocnfile="ocnr${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc" +export outfile="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" +export outfile0p5="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" export mfcstcpl=${mfcstcpl:-1} export IGEN_OCNP=${IGEN_OCNP:-197} # PT This is the forecast date -export year=$(echo $CDATE | cut -c1-4) -export month=$(echo $CDATE | cut -c5-6) -export day=$(echo $CDATE | cut -c7-8) -export hour=$(echo $CDATE | cut -c9-10) +export year=${VDATE:0:4} +export month=${VDATE:4:2} +export day=${VDATE:6:2} +export hour=${VDATE:8:2} # PT This is the initialization date -export syear=$(echo $IDATE | cut -c1-4) -export smonth=$(echo $IDATE | cut -c5-6) -export sday=$(echo $IDATE | cut -c7-8) -export shour=$(echo $IDATE | cut -c9-10) +export syear=${IDATE:0:4} +export smonth=${IDATE:4:2} +export sday=${IDATE:6:2} +export shour=${IDATE:8:2} # PT Need to get this from above - could be 6 or 1 hour export hh_inc_ocn=6 @@ -63,10 +63,10 @@ export flatn=90. export flonw=0.0 export flone=359.75 -ln -sf $mask_file ./iceocnpost.g2 -$executable > reg2grb2.$CDATE.$IDATE.out +ln -sf "${mask_file}" ./iceocnpost.g2 +${executable} > "reg2grb2.${VDATE}.${IDATE}.out" # interpolated from 0p25 to 0p5 grid grid2p05="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000 500000 0" -$COPYGB2 -g "${grid2p05}" -i0 -x $outfile $outfile0p5 +${COPYGB2} -g "${grid2p05}" -i0 -x "${outfile}" "${outfile0p5}" diff --git a/scripts/run_regrid.sh b/scripts/run_regrid.sh index 6d18eeb693..103e9a759e 100755 --- a/scripts/run_regrid.sh +++ b/scripts/run_regrid.sh @@ -1,26 +1,27 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" -MOM6REGRID=${MOM6REGRID:-$HOMEgfs} -export EXEC_DIR=$MOM6REGRID/exec -export USH_DIR=$MOM6REGRID/ush -export COMOUTocean=$COMOUTocean -export COMOUTice=$COMOUTice -export IDATE=$IDATE -export ENSMEM=$ENSMEM -export FHR=$fhr -export DATA=$DATA -export FIXreg2grb2=$FIXreg2grb2 +MOM6REGRID="${MOM6REGRID:-${HOMEgfs}}" +export EXEC_DIR="${MOM6REGRID}/exec" +export USH_DIR="${MOM6REGRID}/ush" +export COMOUTocean="${COM_OCEAN_HISTORY}" +export COMOUTice="${COM_ICE_HISTORY}" +export IDATE="${IDATE}" +export VDATE="${VDATE}" +export ENSMEM="${ENSMEM}" +export FHR="${fhr}" +export DATA="${DATA}" +export FIXreg2grb2="${FIXreg2grb2}" ###### DO NOT MODIFY BELOW UNLESS YOU KNOW WHAT YOU ARE DOING ####### #Need NCL module to be loaded: -echo $NCARG_ROOT -export NCL=$NCARG_ROOT/bin/ncl +echo "${NCARG_ROOT}" +export NCL="${NCARG_ROOT}/bin/ncl" ls -alrt -$NCL $USH_DIR/icepost.ncl -$NCL $USH_DIR/ocnpost.ncl +${NCL} "${USH_DIR}/icepost.ncl" +${NCL} "${USH_DIR}/ocnpost.ncl" ##################################################################### diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 99f1d262bb..c4c539fda9 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -96,11 +96,6 @@ source ../versions/${MACHINE_ID}.ver source ./partial_build.sh ${_verbose_opt} ${_partial_opt} # shellcheck disable= -if [[ ${MACHINE_ID} =~ jet.* ]]; then - Build_gldas="false" - Build_ww3_prepost="false" -fi - #------------------------------------ # Exception Handling Init #------------------------------------ @@ -270,26 +265,6 @@ if [[ ${Build_ufs_utils} == 'true' ]]; then err=$((err + rc)) fi -#------------------------------------ -# build gldas -#------------------------------------ -if [[ -d gldas.fd ]]; then - if [[ ${Build_gldas} == 'true' ]]; then - echo " .... Building gldas .... " - # shellcheck disable=SC2086,SC2248 - ./build_gldas.sh ${_verbose_opt} > "${logs_dir}/build_gldas.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gldas." - echo "The log file is in ${logs_dir}/build_gldas.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gldas .... " -fi - #------------------------------------ # build gfs_wafs - optional checkout #------------------------------------ diff --git a/sorc/build_gldas.sh b/sorc/build_gldas.sh deleted file mode 100755 index 05963b9348..0000000000 --- a/sorc/build_gldas.sh +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env bash -set -eux - -script_dir=$(dirname "${BASH_SOURCE[0]}") -cd "${script_dir}" || exit 1 - -# Check final exec folder exists -if [[ ! -d "../exec" ]]; then - mkdir ../exec -fi - -cd gldas.fd/sorc -./build_all_gldas.sh - -exit - diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 3f34d9eaa3..f7476ac49a 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -156,8 +156,8 @@ mkdir -p "${logdir}" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "0b8ff56" ; errs=$((errs + $?)) -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "4e673bf" ; errs=$((errs + $?)) -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-c22aaad}" ; errs=$((errs + $?)) +checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" ; errs=$((errs + $?)) +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-2247060}" ; errs=$((errs + $?)) checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) if [[ ${checkout_gsi} == "YES" ]]; then @@ -165,13 +165,12 @@ if [[ ${checkout_gsi} == "YES" ]]; then fi if [[ ${checkout_gdas} == "YES" ]]; then - checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "db2f998"; errs=$((errs + $?)) + checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "aaf7caa"; errs=$((errs + $?)) fi if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) - checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "8cf16de"; errs=$((errs + $?)) - checkout "gldas.fd" "https://github.com/NOAA-EMC/GLDAS.git" "fd8ba62"; errs=$((errs + $?)) + checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?)) fi if [[ ${checkout_wafs} == "YES" ]]; then diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg index c56db1f71f..d789d5ec51 100644 --- a/sorc/gfs_build.cfg +++ b/sorc/gfs_build.cfg @@ -7,7 +7,6 @@ Building gsi_utils (gsi_utils) ........................ yes Building gsi_monitor (gsi_monitor) .................... yes Building gdas (gdas) .................................. yes - Building gldas (gldas) ................................ yes Building UPP (upp) .................................... yes Building ufs_utils (ufs_utils) ........................ yes Building gfs_wafs (gfs_wafs) .......................... yes diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 379d19387b..0ca0ba3415 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -90,7 +90,6 @@ for dir in aer \ cice \ cpl \ datm \ - gldas \ gsi \ lut \ mom6 \ @@ -119,11 +118,6 @@ fi #--------------------------------------- #--add files from external repositories #--------------------------------------- -cd "${top_dir}/parm" || exit 1 - if [[ -d "${script_dir}/gldas.fd" ]]; then - [[ -d gldas ]] && rm -rf gldas - ${LINK} "${script_dir}/gldas.fd/parm" gldas - fi cd "${top_dir}/parm/post" || exit 1 for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ @@ -172,7 +166,7 @@ if [[ -d "${script_dir}/gdas.cd" ]]; then cd "${top_dir}/fix" || exit 1 [[ ! -d gdas ]] && mkdir -p gdas cd gdas || exit 1 - for gdas_sub in bump crtm fv3jedi gsibec; do + for gdas_sub in crtm fv3jedi gsibec; do if [[ -d "${gdas_sub}" ]]; then rm -rf "${gdas_sub}" fi @@ -187,6 +181,9 @@ fi if [[ -d "${script_dir}/gdas.cd" ]]; then cd "${top_dir}/ush" || exit 1 ${LINK} "${script_dir}/gdas.cd/ush/ufsda" . + ${LINK} "${script_dir}/gdas.cd/ush/jediinc2fv3.py" . + ${LINK} "${script_dir}/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK} "${script_dir}/gdas.cd/ush/land/letkf_create_ens.py" . fi @@ -279,13 +276,6 @@ if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then done fi -if [[ -d "${script_dir}/gldas.fd" ]]; then - for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post gldas_rst; do - [[ -s "${gldasexe}" ]] && rm -f "${gldasexe}" - ${LINK} "${script_dir}/gldas.fd/exec/${gldasexe}" . - done -fi - # GDASApp if [[ -d "${script_dir}/gdas.cd" ]]; then declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ @@ -309,7 +299,9 @@ if [[ -d "${script_dir}/gdas.cd" ]]; then "soca_error_covariance_training.x" \ "soca_setcorscales.x" \ "soca_gridgen.x" \ - "soca_var.x") + "soca_var.x" \ + "calcfIMS.exe" \ + "apply_incr.exe" ) for gdasexe in "${JEDI_EXE[@]}"; do [[ -s "${gdasexe}" ]] && rm -f "${gdasexe}" ${LINK} "${script_dir}/gdas.cd/build/bin/${gdasexe}" . @@ -424,13 +416,6 @@ cd "${script_dir}" || exit 8 ${SLINK} gfs_wafs.fd/sorc/wafs_setmissing.fd wafs_setmissing.fd fi - if [[ -d gldas.fd ]]; then - for prog in gdas2gldas.fd gldas2gdas.fd gldas_forcing.fd gldas_model.fd gldas_post.fd gldas_rst.fd ;do - [[ -d "${prog}" ]] && rm -rf "${prog}" - ${SLINK} "gldas.fd/sorc/${prog}" "${prog}" - done - fi - #------------------------------ # copy $HOMEgfs/parm/config/config.base.nco.static as config.base for operations # config.base in the $HOMEgfs/parm/config has no use in development diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index 0d4657136d..f212ae4cb4 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -9,7 +9,6 @@ declare -a Build_prg=("Build_ufs_model" \ "Build_gsi_monitor" \ "Build_ww3_prepost" \ "Build_gdas" \ - "Build_gldas" \ "Build_upp" \ "Build_ufs_utils" \ "Build_gfs_wafs" \ @@ -99,7 +98,7 @@ parse_cfg() { sel_prg=${sel_prg//${del}/ } } done - if [[ ${del} == "" ]]; then + if [[ ${del} == "" ]]; then { short_prg=${sel_prg} found=false diff --git a/test/diff_grib_files.py b/test/diff_grib_files.py index e0eb7936db..9c01afbb18 100755 --- a/test/diff_grib_files.py +++ b/test/diff_grib_files.py @@ -72,4 +72,5 @@ def count_nonid_corr(test_string: str, quiet=False): wgrib2_cmd = f"wgrib2 {fileA} -var -rpn 'sto_1' -import_grib {fileB} -rpn 'rcl_1:print_corr'" string = subprocess.run(wgrib2_cmd, shell=True, stdout=subprocess.PIPE).stdout.decode("utf-8") + count_nonid_corr(string) diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py index fcf272b3ee..a325ec35b3 100755 --- a/ush/calcanl_gfs.py +++ b/ush/calcanl_gfs.py @@ -341,9 +341,9 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, DoIAU = gsi_utils.isTrue(os.getenv('DOIAU', 'NO')) l4DEnsVar = gsi_utils.isTrue(os.getenv('l4densvar', 'NO')) Write4Danl = gsi_utils.isTrue(os.getenv('lwrite4danl', 'NO')) - ComIn_Ges = os.getenv('COMIN_GES', './') + ComIn_Ges = os.getenv('COM_ATMOS_HISTORY_PREV', './') GPrefix = os.getenv('GPREFIX', './') - ComOut = os.getenv('COMOUT', './') + ComOut = os.getenv('COM_ATMOS_ANALYSIS', './') APrefix = os.getenv('APREFIX', '') NThreads = os.getenv('NTHREADS_CHGRES', 1) FixDir = os.getenv('FIXgsm', './') diff --git a/ush/calcinc_gfs.py b/ush/calcinc_gfs.py index 6ed1e50422..cb334ac836 100755 --- a/ush/calcinc_gfs.py +++ b/ush/calcinc_gfs.py @@ -70,7 +70,7 @@ def calcinc_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, IAUHrs, DoIAU = gsi_utils.isTrue(os.getenv('DOIAU', 'NO')) l4DEnsVar = gsi_utils.isTrue(os.getenv('l4densvar', 'NO')) Write4Danl = gsi_utils.isTrue(os.getenv('lwrite4danl', 'NO')) - ComOut = os.getenv('COMOUT', './') + ComOut = os.getenv('COM_ATMOS_ANALYSIS', './') APrefix = os.getenv('APREFIX', '') NThreads = os.getenv('NTHREADS_CALCINC', 1) IMP_Physics = os.getenv('imp_physics', 11) diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 9250c89888..647722b7a3 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -50,7 +50,7 @@ fi if [[ -d /lfs/f1 ]] ; then # We are on NOAA Cactus or Dogwood MACHINE_ID=wcoss2 -elif [[ -d /lfs3 ]] ; then +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet MACHINE_ID=jet elif [[ -d /scratch1 ]] ; then diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index 575f35696a..06329e0762 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -19,7 +19,7 @@ FV3_GFS_det(){ res_latlon_dynamics="''" # Determine if this is a warm start or cold start - if [[ -f "${gmemdir}/RESTART/${sPDY}.${scyc}0000.coupler.res" ]]; then + if [[ -f "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000.coupler.res" ]]; then export warm_start=".true." fi @@ -40,22 +40,22 @@ FV3_GFS_det(){ #------------------------------------------------------- # determine if restart IC exists to continue from a previous forecast RERUN=${RERUN:-"NO"} - filecount=$(find "${RSTDIR_ATM:-/dev/null}" -type f | wc -l) + filecount=$(find "${COM_ATMOS_RESTART:-/dev/null}" -type f | wc -l) if [[ ( ${CDUMP} = "gfs" || ( ${RUN} = "gefs" && ${CDATE_RST} = "" )) && ${rst_invt1} -gt 0 && ${FHMAX} -gt ${rst_invt1} && ${filecount} -gt 10 ]]; then reverse=$(echo "${restart_interval[@]} " | tac -s ' ') for xfh in ${reverse} ; do yfh=$((xfh-(IAU_OFFSET/2))) - SDATE=$(${NDATE} +${yfh} "${CDATE}") + SDATE=$(${NDATE} ${yfh} "${CDATE}") PDYS=$(echo "${SDATE}" | cut -c1-8) cycs=$(echo "${SDATE}" | cut -c9-10) - flag1=${RSTDIR_ATM}/${PDYS}.${cycs}0000.coupler.res - flag2=${RSTDIR_ATM}/coupler.res + flag1=${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res + flag2=${COM_ATMOS_RESTART}/coupler.res #make sure that the wave restart files also exist if cplwav=true waverstok=".true." if [[ "${cplwav}" = ".true." ]]; then for wavGRD in ${waveGRD} ; do - if [[ ! -f "${RSTDIR_WAVE}/${PDYS}.${cycs}0000.restart.${wavGRD}" ]]; then + if [[ ! -f "${COM_WAVE_RESTART}/${PDYS}.${cycs}0000.restart.${wavGRD}" ]]; then waverstok=".false." fi done diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index f80a7440cf..adce9f696c 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -40,25 +40,23 @@ FV3_GFS_postdet(){ if [ $RERUN = "NO" ]; then #............................. - # Link all restart files from $gmemdir - for file in $(ls $gmemdir/RESTART/${sPDY}.${scyc}0000.*.nc); do + # Link all restart files from previous cycle + for file in "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000."*.nc; do file2=$(echo $(basename $file)) file2=$(echo $file2 | cut -d. -f3-) # remove the date from file fsuf=$(echo $file2 | cut -d. -f1) $NLN $file $DATA/INPUT/$file2 done - # Replace sfc_data with sfcanl_data restart files from $memdir (if found) + # Replace sfc_data with sfcanl_data restart files from current cycle (if found) if [ "${MODE}" = "cycled" ] && [ "${CCPP_SUITE}" = "FV3_GFS_v16" ]; then # TODO: remove if statement when global_cycle can handle NOAHMP - for file in $(ls $memdir/RESTART/${sPDY}.${scyc}0000.sfcanl_data.tile?.nc); do - if [[ -f $file ]]; then - file2=$(echo $(basename $file)) - file2=$(echo $file2 | cut -d. -f3-) # remove the date from file - fsufanl=$(echo $file2 | cut -d. -f1) - file2=$(echo $file2 | sed -e "s/sfcanl_data/sfc_data/g") - rm -f $DATA/INPUT/$file2 - $NLN $file $DATA/INPUT/$file2 - fi + for file in "${COM_ATMOS_RESTART}/${sPDY}.${scyc}0000."*.nc; do + file2=$(echo $(basename $file)) + file2=$(echo $file2 | cut -d. -f3-) # remove the date from file + fsufanl=$(echo $file2 | cut -d. -f1) + file2=$(echo $file2 | sed -e "s/sfcanl_data/sfc_data/g") + rm -f $DATA/INPUT/$file2 + $NLN $file $DATA/INPUT/$file2 done fi @@ -77,9 +75,9 @@ EOF for i in $(echo $IAUFHRS | sed "s/,/ /g" | rev); do incfhr=$(printf %03i $i) if [ $incfhr = "006" ]; then - increment_file=$memdir/${CDUMP}.t${cyc}z.${PREFIX_ATMINC}atminc.nc + increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc" else - increment_file=$memdir/${CDUMP}.t${cyc}z.${PREFIX_ATMINC}atmi${incfhr}.nc + increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atmi${incfhr}.nc" fi if [ ! -f $increment_file ]; then echo "ERROR: DOIAU = $DOIAU, but missing increment file for fhr $incfhr at $increment_file" @@ -92,7 +90,7 @@ EOF read_increment=".false." res_latlon_dynamics="" else - increment_file=$memdir/${CDUMP}.t${cyc}z.${PREFIX_ATMINC}atminc.nc + increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc" if [ -f $increment_file ]; then $NLN $increment_file $DATA/INPUT/fv3_increment.nc read_increment=".true." @@ -105,7 +103,7 @@ EOF export warm_start=".true." PDYT=$(echo $CDATE_RST | cut -c1-8) cyct=$(echo $CDATE_RST | cut -c9-10) - for file in $(ls $RSTDIR_ATM/${PDYT}.${cyct}0000.*); do + for file in "${COM_ATMOS_RESTART}/${PDYT}.${cyct}0000."*; do file2=$(echo $(basename $file)) file2=$(echo $file2 | cut -d. -f3-) $NLN $file $DATA/INPUT/$file2 @@ -130,7 +128,7 @@ EOF #............................. else ## cold start - for file in $(ls ${memdir}/INPUT/*.nc); do + for file in "${COM_ATMOS_INPUT}/"*.nc; do file2=$(echo $(basename $file)) fsuf=$(echo $file2 | cut -c1-3) if [ $fsuf = "gfs" -o $fsuf = "sfc" ]; then @@ -482,6 +480,8 @@ EOF LONB_STP=${LONB_STP:-$LONB_CASE} LATB_STP=${LATB_STP:-$LATB_CASE} cd $DATA + if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p ${COM_ATMOS_HISTORY}; fi + if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p ${COM_ATMOS_MASTER}; fi if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then fhr=$FHMIN for fhr in $OUTPUT_FH; do @@ -489,14 +489,14 @@ EOF FH2=$(printf %02i $fhr) atmi=atmf${FH3}.nc sfci=sfcf${FH3}.nc - logi=logf${FH3} + logi=log.atm.f${FH3} pgbi=GFSPRS.GrbF${FH2} flxi=GFSFLX.GrbF${FH2} - atmo=$memdir/${CDUMP}.t${cyc}z.atmf${FH3}.nc - sfco=$memdir/${CDUMP}.t${cyc}z.sfcf${FH3}.nc - logo=$memdir/${CDUMP}.t${cyc}z.logf${FH3}.txt - pgbo=$memdir/${CDUMP}.t${cyc}z.master.grb2f${FH3} - flxo=$memdir/${CDUMP}.t${cyc}z.sfluxgrbf${FH3}.grib2 + atmo=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc + sfco=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc + logo=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt + pgbo=${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3} + flxo=${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2 eval $NLN $atmo $atmi eval $NLN $sfco $sfci eval $NLN $logo $logi @@ -507,11 +507,11 @@ EOF done else for n in $(seq 1 $ntiles); do - eval $NLN nggps2d.tile${n}.nc $memdir/nggps2d.tile${n}.nc - eval $NLN nggps3d.tile${n}.nc $memdir/nggps3d.tile${n}.nc - eval $NLN grid_spec.tile${n}.nc $memdir/grid_spec.tile${n}.nc - eval $NLN atmos_static.tile${n}.nc $memdir/atmos_static.tile${n}.nc - eval $NLN atmos_4xdaily.tile${n}.nc $memdir/atmos_4xdaily.tile${n}.nc + eval $NLN nggps2d.tile${n}.nc ${COM_ATMOS_HISTORY}/nggps2d.tile${n}.nc + eval $NLN nggps3d.tile${n}.nc ${COM_ATMOS_HISTORY}/nggps3d.tile${n}.nc + eval $NLN grid_spec.tile${n}.nc ${COM_ATMOS_HISTORY}/grid_spec.tile${n}.nc + eval $NLN atmos_static.tile${n}.nc ${COM_ATMOS_HISTORY}/atmos_static.tile${n}.nc + eval $NLN atmos_4xdaily.tile${n}.nc ${COM_ATMOS_HISTORY}/atmos_4xdaily.tile${n}.nc done fi } @@ -542,16 +542,16 @@ data_out_GFS() { if [ $SEND = "YES" ]; then # Copy model restart files - if [[ ${CDUMP} =~ "gdas" ]] && (( rst_invt1 > 0 )); then + if [[ ${RUN} =~ "gdas" ]] && (( rst_invt1 > 0 )); then cd $DATA/RESTART - mkdir -p $memdir/RESTART + mkdir -p "${COM_ATMOS_RESTART}" for rst_int in $restart_interval ; do if [ $rst_int -ge 0 ]; then RDATE=$($NDATE +$rst_int $CDATE) rPDY=$(echo $RDATE | cut -c1-8) rcyc=$(echo $RDATE | cut -c9-10) - for file in $(ls ${rPDY}.${rcyc}0000.*) ; do - $NCP $file $memdir/RESTART/$file + for file in "${rPDY}.${rcyc}0000."* ; do + ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" done fi done @@ -564,12 +564,12 @@ data_out_GFS() { RDATE=$($NDATE +$rst_iau $CDATE) rPDY=$(echo $RDATE | cut -c1-8) rcyc=$(echo $RDATE | cut -c9-10) - for file in $(ls ${rPDY}.${rcyc}0000.*) ; do - $NCP $file $memdir/RESTART/$file + for file in "${rPDY}.${rcyc}0000."* ; do + ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" done fi - elif [[ ${CDUMP} =~ "gfs" ]]; then - $NCP $DATA/input.nml ${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos/ + elif [[ ${RUN} =~ "gfs" ]]; then + ${NCP} "${DATA}/input.nml" "${COM_ATMOS_HISTORY}/input.nml" fi fi @@ -588,11 +588,11 @@ WW3_postdet() { grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') for wavGRD in ${grdALL}; do - $NCP $ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/rundata/${COMPONENTwave}.mod_def.$wavGRD $DATA/mod_def.$wavGRD + ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${wavGRD}" "${DATA}/mod_def.${wavGRD}" done - else - #if shel, only 1 waveGRD which is linked to mod_def.ww3 - $NCP $ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/rundata/${COMPONENTwave}.mod_def.$waveGRD $DATA/mod_def.ww3 + else + #if shel, only 1 waveGRD which is linked to mod_def.ww3 + ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${waveGRD}" "${DATA}/mod_def.ww3" fi @@ -604,25 +604,18 @@ WW3_postdet() { $NLN -sf $FIXwave/$MESH_WAV $DATA/ fi - export WAVHCYC=${WAVHCYC:-6} - export WRDATE=$($NDATE -${WAVHCYC} $CDATE) - export WRPDY=$(echo $WRDATE | cut -c1-8) - export WRcyc=$(echo $WRDATE | cut -c9-10) - export WRDIR=${ROTDIR}/${CDUMPRSTwave}.${WRPDY}/${WRcyc}/wave/restart - export RSTDIR_WAVE=$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart - export datwave=$COMOUTwave/rundata - export wavprfx=${CDUMPwave}${WAV_MEMBER:-} + export wavprfx=${RUNwave}${WAV_MEMBER:-} #Copy initial condition files: for wavGRD in $waveGRD ; do if [ $warm_start = ".true." -o $RERUN = "YES" ]; then if [ $RERUN = "NO" ]; then - waverstfile=${WRDIR}/${sPDY}.${scyc}0000.restart.${wavGRD} - else - waverstfile=${RSTDIR_WAVE}/${PDYT}.${cyct}0000.restart.${wavGRD} + waverstfile=${COM_WAVE_RESTART_PREV}/${sPDY}.${scyc}0000.restart.${wavGRD} + else + waverstfile=${COM_WAVE_RESTART}/${PDYT}.${cyct}0000.restart.${wavGRD} fi - else - waverstfile=${RSTDIR_WAVE}/${sPDY}.${scyc}0000.restart.${wavGRD} + else + waverstfile=${COM_WAVE_RESTART}/${sPDY}.${scyc}0000.restart.${wavGRD} fi if [ ! -f ${waverstfile} ]; then if [ $RERUN = "NO" ]; then @@ -642,14 +635,14 @@ WW3_postdet() { if [ $waveMULTIGRID = ".true." ]; then for wavGRD in $waveGRD ; do - $NLN $datwave/${wavprfx}.log.${wavGRD}.${PDY}${cyc} log.${wavGRD} + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${wavGRD}.${PDY}${cyc}" "log.${wavGRD}" done - else - $NLN $datwave/${wavprfx}.log.${waveGRD}.${PDY}${cyc} log.ww3 + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3" fi if [ "$WW3ICEINP" = "YES" ]; then - wavicefile=$COMINwave/rundata/${CDUMPwave}.${WAVEICE_FID}.${cycle}.ice + wavicefile="${COM_WAVE_PREP}/${RUNwave}.${WAVEICE_FID}.${cycle}.ice" if [ ! -f $wavicefile ]; then echo "ERROR: WW3ICEINP = ${WW3ICEINP}, but missing ice file" echo "Abort!" @@ -659,7 +652,7 @@ WW3_postdet() { fi if [ "$WW3CURINP" = "YES" ]; then - wavcurfile=$COMINwave/rundata/${CDUMPwave}.${WAVECUR_FID}.${cycle}.cur + wavcurfile="${COM_WAVE_PREP}/${RUNwave}.${WAVECUR_FID}.${cycle}.cur" if [ ! -f $wavcurfile ]; then echo "ERROR: WW3CURINP = ${WW3CURINP}, but missing current file" echo "Abort!" @@ -668,10 +661,12 @@ WW3_postdet() { $NLN $wavcurfile $DATA/current.${WAVECUR_FID} fi + if [[ ! -d ${COM_WAVE_HISTORY} ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi + # Link output files cd $DATA if [ $waveMULTIGRID = ".true." ]; then - $NLN $datwave/${wavprfx}.log.mww3.${PDY}${cyc} log.mww3 + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3" fi # Loop for gridded output (uses FHINC) @@ -682,10 +677,10 @@ WW3_postdet() { HMS="$(echo $YMDH | cut -c9-10)0000" if [ $waveMULTIGRID = ".true." ]; then for wavGRD in ${waveGRD} ; do - $NLN $datwave/${wavprfx}.out_grd.${wavGRD}.${YMD}.${HMS} $DATA/${YMD}.${HMS}.out_grd.${wavGRD} + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${wavGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_grd.${wavGRD}" done - else - $NLN $datwave/${wavprfx}.out_grd.${waveGRD}.${YMD}.${HMS} $DATA/${YMD}.${HMS}.out_grd.ww3 + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_grd.ww3" fi FHINC=$FHOUT_WAV if [ $FHMAX_HF_WAV -gt 0 -a $FHOUT_HF_WAV -gt 0 -a $fhr -lt $FHMAX_HF_WAV ]; then @@ -701,9 +696,9 @@ WW3_postdet() { YMD=$(echo $YMDH | cut -c1-8) HMS="$(echo $YMDH | cut -c9-10)0000" if [ $waveMULTIGRID = ".true." ]; then - $NLN $datwave/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} $DATA/${YMD}.${HMS}.out_pnt.${waveuoutpGRD} - else - $NLN $datwave/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} $DATA/${YMD}.${HMS}.out_pnt.ww3 + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_pnt.${waveuoutpGRD}" + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_pnt.ww3" fi FHINC=$FHINCP_WAV @@ -713,9 +708,9 @@ WW3_postdet() { WW3_nml() { echo "SUB ${FUNCNAME[0]}: Copying input files for WW3" - WAV_MOD_TAG=${CDUMP}wave${waveMEMB} + WAV_MOD_TAG=${RUN}wave${waveMEMB} if [ "${USE_WAV_RMP:-YES}" = "YES" ]; then - if (( $( ls -1 $FIXwave/rmp_src_to_dst_conserv_* > /dev/null | wc -l) > 0 )); then + if (( $( ls -1 $FIXwave/rmp_src_to_dst_conserv_* 2> /dev/null | wc -l) > 0 )); then for file in $(ls $FIXwave/rmp_src_to_dst_conserv_*) ; do $NLN $file $DATA/ done @@ -736,7 +731,7 @@ WW3_out() { CPL_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for general cpl fields" if [ $esmf_profile = ".true." ]; then - $NCP $DATA/ESMF_Profile.summary $ROTDIR/$CDUMP.$PDY/$cyc/ + ${NCP} "${DATA}/ESMF_Profile.summary" "${COM_ATMOS_HISTORY}/ESMF_Profile.summary" fi } @@ -744,12 +739,12 @@ MOM6_postdet() { echo "SUB ${FUNCNAME[0]}: MOM6 after run type determination" # Copy MOM6 ICs - $NLN "${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ocean/RESTART/${PDY}.${cyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" + ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" case $OCNRES in "025") for nn in $(seq 1 4); do - if [[ -f "${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ocean/RESTART/${PDY}.${cyc}0000.MOM.res_${nn}.nc" ]]; then - $NLN "${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ocean/RESTART/${PDY}.${cyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" + if [[ -f "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" ]]; then + ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" fi done ;; @@ -757,11 +752,11 @@ MOM6_postdet() { # Link increment if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - if [[ ! -f "${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean/${CDUMP}.t${cyc}z.ocninc.nc" ]]; then + if [[ ! -f "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" ]]; then echo "FATAL ERROR: Ocean increment not found, ABORT!" exit 111 fi - $NLN "${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean/${CDUMP}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" + ${NLN} "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" fi # Copy MOM6 fixed files @@ -776,11 +771,11 @@ MOM6_postdet() { exit 3 fi - # Copy mediator restart file to RUNDIR # TODO: mediator should have its own CMEPS_postdet() function - if [[ "${warm_start}" = '.true.' ]]; then - local mediator_file="${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/med/RESTART/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" + # Copy mediator restart files to RUNDIR # TODO: mediator should have its own CMEPS_postdet() function + if [[ $warm_start = ".true." ]]; then + local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" if [[ -f "${mediator_file}" ]]; then - $NLN "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc" + ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc" rm -f "${DATA}/rpointer.cpl" touch "${DATA}/rpointer.cpl" echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl" @@ -809,17 +804,18 @@ MOM6_postdet() { fi # Create COMOUTocean - [[ ! -d $COMOUTocean ]] && mkdir -p $COMOUTocean + [[ ! -d ${COM_OCEAN_HISTORY} ]] && mkdir -p "${COM_OCEAN_HISTORY}" # Link output files - if [[ "${CDUMP}" =~ "gfs" ]]; then - # Link output files for CDUMP = gfs + if [[ "${RUN}" =~ "gfs" ]]; then + # Link output files for RUN = gfs # TODO: get requirements on what files need to be written out and what these dates here are and what they mean export ENSMEM=${ENSMEM:-01} export IDATE=$CDATE - fhrlst=$OUTPUT_FH + fhrlst=${OUTPUT_FH} + if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p ${COM_OCEAN_HISTORY}; fi for fhr in $fhrlst; do if [ $fhr = 'anl' ]; then # Looking at OUTPUT_FH, this is never true, TODO: remove this block @@ -847,41 +843,41 @@ MOM6_postdet() { source_file="ocn_${YYYY_MID}_${MM_MID}_${DD_MID}_${HH_MID}.nc" dest_file="ocn${VDATE}.${ENSMEM}.${IDATE}.nc" - ${NLN} ${COMOUTocean}/${dest_file} ${DATA}/${source_file} + ${NLN} ${COM_OCEAN_HISTORY}/${dest_file} ${DATA}/${source_file} source_file="ocn_daily_${YYYY}_${MM}_${DD}.nc" dest_file=${source_file} if [ ! -a "${DATA}/${source_file}" ]; then - $NLN ${COMOUTocean}/${dest_file} ${DATA}/${source_file} + $NLN ${COM_OCEAN_HISTORY}/${dest_file} ${DATA}/${source_file} fi last_fhr=$fhr done - elif [[ "${CDUMP}" =~ "gdas" ]]; then - # Link output files for CDUMP = gdas + elif [[ "${RUN}" =~ "gdas" ]]; then + # Link output files for RUN = gdas # Save MOM6 backgrounds for fhr in ${OUTPUT_FH}; do local idatestr=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${fhr} hours" +%Y_%m_%d_%H) local fhr3=$(printf %03i "${fhr}") - $NLN "${COMOUTocean}/${CDUMP}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc" + $NLN "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc" done fi - mkdir -p "${COMOUTocean}/RESTART" + mkdir -p "${COM_OCEAN_RESTART}" # end point restart does not have a timestamp, calculate local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) # Link ocean restarts from DATA to COM # Coarser than 1/2 degree has a single MOM restart - $NLN "${COMOUTocean}/RESTART/${rdate:0:8}.${rdate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/MOM.res.nc" + $NLN "${COM_OCEAN_RESTART}/${rdate:0:8}.${rdate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" # 1/4 degree resolution has 4 additional restarts case ${OCNRES} in "025") for nn in $(seq 1 4); do - $NLN "${COMOUTocean}/RESTART/${rdate:0:8}.${rdate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/MOM.res_${nn}.nc" + $NLN "${COM_OCEAN_RESTART}/${rdate:0:8}.${rdate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" done ;; *) @@ -893,11 +889,11 @@ MOM6_postdet() { local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) while [[ $idate -lt $rdate ]]; do local idatestr=$(date +%Y-%m-%d-%H -d "${idate:0:8} ${idate:8:2}") - $NLN "${COMOUTocean}/RESTART/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/MOM.res.${idatestr}-00-00.nc" + $NLN "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" case ${OCNRES} in "025") for nn in $(seq 1 4); do - $NLN "${COMOUTocean}/RESTART/${idate:0:8}.${idate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/MOM.res.${idatestr}-00-00_${nn}.nc" + $NLN "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" done ;; esac @@ -909,7 +905,7 @@ MOM6_postdet() { # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below: # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173 # Instead of linking, copy the mediator files after the model finishes - #local COMOUTmed="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/med" + #local COMOUTmed="${ROTDIR}/${RUN}.${PDY}/${cyc}/med" #mkdir -p "${COMOUTmed}/RESTART" #local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) #while [[ $idate -le $rdate ]]; do @@ -932,15 +928,15 @@ MOM6_nml() { MOM6_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for MOM6" - # Copy MOM_input from DATA to COMOUToucean after the forecast is run (and successfull) - $NCP ${DATA}/INPUT/MOM_input ${COMOUTocean}/MOM_input + # Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull) + if [[ ! -d ${COM_OCEAN_INPUT} ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi + ${NCP} "${DATA}/INPUT/MOM_input" "${COM_OCEAN_INPUT}/" # TODO: mediator should have its own CMEPS_out() function # Copy mediator restarts from DATA to COM # Linking mediator restarts to COM causes the model to fail with a message. # See MOM6_postdet() function for error message - local COMOUTmed="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/med" - mkdir -p "${COMOUTmed}/RESTART" + mkdir -p "${COM_MED_RESTART}" local res_int=$(echo $restart_interval | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) @@ -949,7 +945,7 @@ MOM6_out() { local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" local mediator_file="${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" if [[ -f ${mediator_file} ]]; then - $NCP "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" + $NCP "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" else echo "Mediator restart ${mediator_file} not found." fi @@ -977,9 +973,9 @@ CICE_postdet() { dumpfreq_n=${dumpfreq_n:-1000} # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by nems.configure dumpfreq=${dumpfreq:-"y"} # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years" - if [[ "${CDUMP}" =~ "gdas" ]]; then + if [[ "${RUN}" =~ "gdas" ]]; then cice_hist_avg=".false." # DA needs instantaneous - elif [[ "${CDUMP}" =~ "gfs" ]]; then + elif [[ "${RUN}" =~ "gfs" ]]; then cice_hist_avg=".true." # P8 wants averaged over histfreq_n fi @@ -1001,9 +997,14 @@ CICE_postdet() { # Copy/link CICE IC to DATA if [[ "${warm_start}" = ".true." ]]; then - $NLN "${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ice/RESTART/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + cice_ana="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model_anl.res.nc" + if [[ -e ${cice_ana} ]]; then + ${NLN} "${cice_ana}" "${DATA}/cice_model.res.nc" + else + ${NLN} "${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + fi else # cold start are typically SIS2 restarts obtained from somewhere else e.g. CPC - $NLN "${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ice/RESTART/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + $NLN "${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" fi # TODO: add a check for the restarts to exist, if not, exit eloquently rm -f "${DATA}/ice.restart_file" @@ -1016,11 +1017,11 @@ CICE_postdet() { $NLN -sf $FIXcice/$ICERES/$MESH_OCN_ICE $DATA/ # Link CICE output files - [[ ! -d $COMOUTice ]] && mkdir -p $COMOUTice - mkdir -p ${COMOUTice}/RESTART + if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi + mkdir -p ${COM_ICE_RESTART} - if [[ "${CDUMP}" =~ "gfs" ]]; then - # Link output files for CDUMP = gfs + if [[ "${RUN}" =~ "gfs" ]]; then + # Link output files for RUN = gfs # TODO: make these forecast output files consistent w/ GFS output # TODO: Work w/ NB to determine appropriate naming convention for these files @@ -1043,20 +1044,20 @@ CICE_postdet() { SS=$((10#$HH*3600)) if [[ 10#$fhr -eq 0 ]]; then - $NLN $COMOUTice/iceic$VDATE.$ENSMEM.$IDATE.nc $DATA/CICE_OUTPUT/iceh_ic.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc + ${NLN} "${COM_ICE_HISTORY}/iceic${VDATE}.${ENSMEM}.${IDATE}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc" else (( interval = fhr - last_fhr )) # Umm.. isn't this histfreq_n? - $NLN $COMOUTice/ice$VDATE.$ENSMEM.$IDATE.nc $DATA/CICE_OUTPUT/iceh_$(printf "%0.2d" $interval)h.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc + ${NLN} "${COM_ICE_HISTORY}/ice${VDATE}.${ENSMEM}.${IDATE}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" $interval)h.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc" fi last_fhr=$fhr done - elif [[ "${CDUMP}" =~ "gdas" ]]; then + elif [[ "${RUN}" =~ "gdas" ]]; then # Link CICE generated initial condition file from DATA/CICE_OUTPUT to COMOUTice # This can be thought of as the f000 output from the CICE model local seconds=$(to_seconds ${CDATE:8:2}0000) # convert HHMMSS to seconds - $NLN "${COMOUTice}/${CDUMP}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}-${seconds}.nc" + $NLN "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}-${seconds}.nc" # Link instantaneous CICE forecast output files from DATA/CICE_OUTPUT to COMOUTice local fhr="${FHOUT}" @@ -1064,7 +1065,7 @@ CICE_postdet() { local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${fhr} hours" +%Y%m%d%H) local seconds=$(to_seconds ${idate:8:2}0000) # convert HHMMSS to seconds local fhr3=$(printf %03i ${fhr}) - $NLN "${COMOUTice}/${CDUMP}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}.nc" + $NLN "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}.nc" local fhr=$((fhr + FHOUT)) done @@ -1078,7 +1079,7 @@ CICE_postdet() { while [[ ${idate} -le ${rdate} ]]; do local seconds=$(to_seconds ${idate:8:2}0000) # convert HHMMSS to seconds local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" - $NLN "${COMOUTice}/RESTART/${idate:0:8}.${idate:8:2}0000.cice_model.res.nc" "${DATA}/CICE_RESTART/cice_model.res.${idatestr}.nc" + $NLN "${COM_ICE_RESTART}/${idate:0:8}.${idate:8:2}0000.cice_model.res.nc" "${DATA}/CICE_RESTART/cice_model.res.${idatestr}.nc" local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) done } @@ -1093,7 +1094,8 @@ CICE_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for CICE" # Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull) - $NCP ${DATA}/ice_in $COMOUTice/ice_in + if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi + ${NCP} "${DATA}/ice_in" "${COM_ICE_INPUT}/ice_in" } GOCART_rc() { @@ -1130,7 +1132,7 @@ GOCART_rc() { GOCART_postdet() { echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART" - [[ ! -d $COMOUTaero ]] && mkdir -p $COMOUTaero + if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi fhrlst=$OUTPUT_FH for fhr in $fhrlst; do @@ -1147,10 +1149,10 @@ GOCART_postdet() { # # Temporarily delete existing files due to noclobber in GOCART # - if [[ -e "${COMOUTaero}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" ]]; then - rm "${COMOUTaero}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" + if [[ -e "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" ]]; then + rm "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" fi - $NLN $COMOUTaero/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4 $DATA/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4 + ${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" "${DATA}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" done } diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 4f0cee0b80..334eacedef 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -60,7 +60,6 @@ DATM_predet(){ FV3_GFS_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for FV3GFS" CDUMP=${CDUMP:-gdas} - CDUMPwave="${CDUMP}wave" FHMIN=${FHMIN:-0} FHMAX=${FHMAX:-9} FHOUT=${FHOUT:-3} @@ -214,10 +213,9 @@ FV3_GFS_predet(){ print_freq=${print_freq:-6} #------------------------------------------------------- - if [[ ${CDUMP} =~ "gfs" || ${RUN} = "gefs" ]] && [ ${rst_invt1} -gt 0 ]; then - RSTDIR_ATM=${RSTDIR_ATM:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos/RERUN_RESTART} - if [ ! -d $RSTDIR_ATM ]; then mkdir -p $RSTDIR_ATM ; fi - $NLN $RSTDIR_ATM RESTART + if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]] && (( rst_invt1 > 0 )); then + if [[ ! -d ${COM_ATMOS_RESTART} ]]; then mkdir -p "${COM_ATMOS_RESTART}" ; fi + ${NLN} "${COM_ATMOS_RESTART}" RESTART # The final restart written at the end doesn't include the valid date # Create links that keep the same name pattern for these files VDATE=$($NDATE +$FHMAX_GFS $CDATE) @@ -229,34 +227,19 @@ FV3_GFS_predet(){ files="${files} ${base}.tile${tile}.nc" done done - for file in $files; do - $NLN $RSTDIR_ATM/$file $RSTDIR_ATM/${vPDY}.${vcyc}0000.$file + for file in ${files}; do + ${NLN} "${COM_ATMOS_RESTART}/${file}" "${COM_ATMOS_RESTART}/${vPDY}.${vcyc}0000.${file}" done else mkdir -p $DATA/RESTART fi - #------------------------------------------------------- - # member directory - if [[ ${MEMBER} -lt 0 || ${RUN} = "gefs" ]]; then - memchar="" - else - memchar=mem$(printf %03i $MEMBER) - fi - memdir=${memdir:-${ROTDIR}/${RUN}.${PDY}/${cyc}/${memchar}/atmos} - if [ ! -d $memdir ]; then mkdir -p $memdir; fi - - GDATE=$($NDATE -$assim_freq $CDATE) - gPDY=$(echo $GDATE | cut -c1-8) - gcyc=$(echo $GDATE | cut -c9-10) - gmemdir=${gmemdir:-${ROTDIR}/${rCDUMP}.${gPDY}/${gcyc}/${memchar}/atmos} - if [[ "$DOIAU" = "YES" ]]; then sCDATE=$($NDATE -3 $CDATE) sPDY=$(echo $sCDATE | cut -c1-8) scyc=$(echo $sCDATE | cut -c9-10) - tPDY=$gPDY - tcyc=$gcyc + tPDY=${gPDY} + tcyc=${gcyc} else sCDATE=$CDATE sPDY=$PDY @@ -270,13 +253,8 @@ FV3_GFS_predet(){ WW3_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for WW3" - if [[ $CDUMP =~ "gdas" ]]; then - export RSTDIR_WAVE=$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart - else - export RSTDIR_WAVE=${RSTDIR_WAVE:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart} - fi - if [ ! -d $RSTDIR_WAVE ]; then mkdir -p $RSTDIR_WAVE ; fi - $NLN $RSTDIR_WAVE restart_wave + if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi + ${NLN} "${COM_WAVE_RESTART}" "restart_wave" } CICE_predet(){ @@ -289,5 +267,4 @@ MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" if [ ! -d $DATA/MOM6_OUTPUT ]; then mkdir -p $DATA/MOM6_OUTPUT; fi if [ ! -d $DATA/MOM6_RESTART ]; then mkdir -p $DATA/MOM6_RESTART; fi - cd "${DATA}" || exit 8 } diff --git a/ush/fv3gfs_downstream_nems.sh b/ush/fv3gfs_downstream_nems.sh index f52baf465b..48aacf0f07 100755 --- a/ush/fv3gfs_downstream_nems.sh +++ b/ush/fv3gfs_downstream_nems.sh @@ -240,50 +240,50 @@ while [ $nset -le $totalset ]; do if [ $nset = 1 ]; then if [ $fhr3 = anl ]; then - cp pgb2file_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2.0p25.anl - $WGRIB2 -s pgb2file_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2.0p25.anl.idx + cp "pgb2file_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl" + ${WGRIB2} -s "pgb2file_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2file_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2.0p50.anl - cp pgb2file_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2.1p00.anl - $WGRIB2 -s pgb2file_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2.0p50.anl.idx - $WGRIB2 -s pgb2file_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2.1p00.anl.idx + cp "pgb2file_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl" + cp "pgb2file_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl" + ${WGRIB2} -s "pgb2file_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl.idx" + ${WGRIB2} -s "pgb2file_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl.idx" if [ "$PGB1F" = 'YES' ]; then - cp pgbfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb.1p00.anl - $GRBINDEX $COMOUT/${PREFIX}pgrb.1p00.anl $COMOUT/${PREFIX}pgrb.1p00.anl.idx + cp "pgbfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.anl" + ${GRBINDEX} "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.anl" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.anl.idx" fi fi else - cp pgb2file_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2.0p25.f${fhr3} - $WGRIB2 -s pgb2file_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2.0p25.f${fhr3}.idx + cp "pgb2file_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr3}" + ${WGRIB2} -s "pgb2file_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr3}.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2file_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2.0p50.f${fhr3} - cp pgb2file_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2.1p00.f${fhr3} - $WGRIB2 -s pgb2file_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2.0p50.f${fhr3}.idx - $WGRIB2 -s pgb2file_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2.1p00.f${fhr3}.idx + cp "pgb2file_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr3}" + cp "pgb2file_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr3}" + ${WGRIB2} -s "pgb2file_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr3}.idx" + ${WGRIB2} -s "pgb2file_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr3}.idx" if [ "$PGB1F" = 'YES' ]; then - cp pgbfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb.1p00.f${fhr3} - $GRBINDEX $COMOUT/${PREFIX}pgrb.1p00.f${fhr3} $COMOUT/${PREFIX}pgrb.1p00.f${fhr3}.idx + cp "pgbfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.f${fhr3}" + ${GRBINDEX} "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.f${fhr3}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.f${fhr3}.idx" fi fi fi elif [ $nset = 2 ]; then if [ $fhr3 = anl ]; then - cp pgb2bfile_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2b.0p25.anl - $WGRIB2 -s pgb2bfile_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2b.0p25.anl.idx + cp "pgb2bfile_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2bfile_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2b.0p50.anl - cp pgb2bfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2b.1p00.anl - $WGRIB2 -s pgb2bfile_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2b.0p50.anl.idx - $WGRIB2 -s pgb2bfile_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2b.1p00.anl.idx + cp "pgb2bfile_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl" + cp "pgb2bfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl.idx" + ${WGRIB2} -s "pgb2bfile_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl.idx" fi else - cp pgb2bfile_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2b.0p25.f${fhr3} - $WGRIB2 -s pgb2bfile_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2b.0p25.f${fhr3}.idx + cp "pgb2bfile_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr3}" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr3}.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2bfile_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2b.0p50.f${fhr3} - cp pgb2bfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2b.1p00.f${fhr3} - $WGRIB2 -s pgb2bfile_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2b.0p50.f${fhr3}.idx - $WGRIB2 -s pgb2bfile_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2b.1p00.f${fhr3}.idx + cp "pgb2bfile_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr3}" + cp "pgb2bfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr3}" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr3}.idx" + ${WGRIB2} -s "pgb2bfile_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr3}.idx" fi fi fi diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh index e7be2c034f..f8d2763bb5 100755 --- a/ush/gaussian_sfcanl.sh +++ b/ush/gaussian_sfcanl.sh @@ -87,7 +87,7 @@ # $FIXWGTS # $FIXam/global_hyblev.l65.txt # -# input data : $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile*.nc +# input data : ${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile*.nc # # output data: $PGMOUT # $PGMERR @@ -131,7 +131,6 @@ FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} FIXam=${FIXam:-$HOMEgfs/fix/am} FIXWGTS=${FIXWGTS:-$FIXfv3/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} DATA=${DATA:-$(pwd)} -COMOUT=${COMOUT:-$(pwd)} # Filenames. XC=${XC:-} @@ -160,7 +159,8 @@ else mkdata=YES fi cd $DATA||exit 99 -[[ -d $COMOUT ]]||mkdir -p $COMOUT +[[ -d "${COM_ATMOS_ANALYSIS}" ]] || mkdir -p "${COM_ATMOS_ANALYSIS}" +[[ -d "${COM_ATMOS_RESTART}" ]] || mkdir -p "${COM_ATMOS_RESTART}" cd $DATA ################################################################################ @@ -169,12 +169,10 @@ export PGM=$GAUSFCANLEXE export pgm=$PGM $LOGSCRIPT -PDY=$(echo $CDATE | cut -c1-8) -cyc=$(echo $CDATE | cut -c9-10) -iy=$(echo $CDATE | cut -c1-4) -im=$(echo $CDATE | cut -c5-6) -id=$(echo $CDATE | cut -c7-8) -ih=$(echo $CDATE | cut -c9-10) +iy=${PDY:0:4} +im=${PDY:4:2} +id=${PDY:6:2} +ih=${cyc} export OMP_NUM_THREADS=${OMP_NUM_THREADS_SFC:-1} @@ -182,12 +180,12 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_SFC:-1} $NLN $FIXWGTS ./weights.nc # input analysis tiles (with nst records) -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile1.nc ./anal.tile1.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile2.nc ./anal.tile2.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile3.nc ./anal.tile3.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile4.nc ./anal.tile4.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile5.nc ./anal.tile5.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile6.nc ./anal.tile6.nc +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile1.nc" "./anal.tile1.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile2.nc" "./anal.tile2.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile3.nc" "./anal.tile3.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile4.nc" "./anal.tile4.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile5.nc" "./anal.tile5.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile6.nc" "./anal.tile6.nc" # input orography tiles $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile1.nc ./orog.tile1.nc @@ -200,7 +198,7 @@ $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile6.nc ./orog.tile6.nc $NLN $SIGLEVEL ./vcoord.txt # output gaussian global surface analysis files -$NLN $COMOUT/${APREFIX}sfcanl.nc ./sfc.gaussian.analysis.file +${NLN} "${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc" "./sfc.gaussian.analysis.file" # Executable namelist cat < fort.41 diff --git a/ush/gfs_bfr2gpk.sh b/ush/gfs_bfr2gpk.sh index c11ec62735..add68536ec 100755 --- a/ush/gfs_bfr2gpk.sh +++ b/ush/gfs_bfr2gpk.sh @@ -10,7 +10,7 @@ # Log: # # K. Brill/HPC 04/12/05 # ######################################################################### -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" # Set GEMPAK paths. @@ -18,32 +18,19 @@ source "$HOMEgfs/ush/preamble.sh" # Go to a working directory. -cd $DATA - -# Set input directory name. - -#BPATH=$COMIN/bufr.t${cyc}z -BPATH=$COMOUT/bufr.t${cyc}z -export BPATH +cd "${DATA}" || exit 2 # Set output directory: - -COMAWP=${COMAWP:-$COMOUT/gempak} -OUTDIR=$COMAWP -if [ ! -d $OUTDIR ]; then mkdir -p $OUTDIR; fi +if [[ ! -d "${COM_ATMOS_GEMPAK}" ]]; then mkdir -p "${COM_ATMOS_GEMPAK}"; fi outfilbase=gfs_${PDY}${cyc} # Get the list of individual station files. date -##filelist=$(/bin/ls -1 $BPATH | grep bufr) -##rm -f bufr.combined -##for file in $filelist; do -## cat $BPATH/$file >> bufr.combined -##done - cat $BPATH/bufr.*.${PDY}${cyc} > bufr.combined +cat "${COM_ATMOS_BUFR}/bufr."*".${PDY}${cyc}" > bufr.combined date + namsnd << EOF > /dev/null SNBUFR = bufr.combined SNOUTF = ${outfilbase}.snd @@ -55,20 +42,20 @@ r ex EOF + date -/bin/rm *.nts +/bin/rm ./*.nts snd=${outfilbase}.snd sfc=${outfilbase}.sfc -cp $snd $OUTDIR/.$snd -cp $sfc $OUTDIR/.$sfc -mv $OUTDIR/.$snd $OUTDIR/$snd -mv $OUTDIR/.$sfc $OUTDIR/$sfc - -if [ $SENDDBN = "YES" ] -then - $DBNROOT/bin/dbn_alert MODEL GFS_PTYP_SFC $job $OUTDIR/$sfc - $DBNROOT/bin/dbn_alert MODEL GFS_PTYP_SND $job $OUTDIR/$snd +cp "${snd}" "${COM_ATMOS_GEMPAK}/.${snd}" +cp "${sfc}" "${COM_ATMOS_GEMPAK}/.${sfc}" +mv "${COM_ATMOS_GEMPAK}/.${snd}" "${COM_ATMOS_GEMPAK}/${snd}" +mv "${COM_ATMOS_GEMPAK}/.${sfc}" "${COM_ATMOS_GEMPAK}/${sfc}" + +if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PTYP_SFC "${job}" "${COM_ATMOS_GEMPAK}/${sfc}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PTYP_SND "${job}" "${COM_ATMOS_GEMPAK}/${snd}" fi -echo done > $DATA/gembufr.done +echo "done" > "${DATA}/gembufr.done" diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index 65d81ce521..b782c707c9 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -43,7 +43,7 @@ CLASS="class1fv3" cat << EOF > gfsparm &NAMMET levs=${LEVS},makebufr=${bufrflag}, - dird="${COMOUT}/bufr.${cycle}/bufr", + dird="${COM_ATMOS_BUFR}/bufr", nstart=${FSTART},nend=${FEND},nint=${FINT}, nend1=${NEND1},nint1=${NINT1},nint3=${NINT3}, nsfc=80,f00=${f00flag},fformat=${fformat},np1=0 @@ -58,7 +58,7 @@ for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do # Make sure all files are available: ic=0 while (( ic < 1000 )); do - if [ ! -f "${COMIN}/${RUN}.${cycle}.logf${hh3}.${logfm}" ]; then + if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.logf${hh3}.${logfm}" ]]; then sleep 10 ic=$((ic + 1)) else @@ -71,8 +71,8 @@ for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do fi done #------------------------------------------------------------------ - ln -sf "${COMIN}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}" - ln -sf "${COMIN}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}" + ln -sf "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}" + ln -sf "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}" done # define input BUFR table file. @@ -85,8 +85,8 @@ export err=$? if [ $err -ne 0 ]; then echo "GFS postsnd job error, Please check files " - echo $COMIN/${RUN}.${cycle}.atmf${hh2}.${atmfm} - echo $COMIN/${RUN}.${cycle}.sfcf${hh2}.${atmfm} + echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh2}.${atmfm}" + echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh2}.${atmfm}" err_chk fi diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh index bf0f3a9088..579dd5ae25 100755 --- a/ush/gfs_sndp.sh +++ b/ush/gfs_sndp.sh @@ -32,7 +32,7 @@ cd $DATA/$m for stn in $(cat $file_list) do - cp ${COMOUT}/bufr.${cycle}/bufr.$stn.$PDY$cyc $DATA/${m}/bufrin + cp "${COM_ATMOS_BUFR}/bufr.${stn}.${PDY}${cyc}" "${DATA}/${m}/bufrin" export pgm=tocsbufr.x #. prep_step export FORT11=$DATA/${m}/bufrin @@ -45,11 +45,11 @@ cd $DATA/$m MAXFILESIZE=600000 / EOF - # JY export err=$?; err_chk - export err=$?; #err_chk + export err=$?; if (( err != 0 )); then - echo "ERROR in $pgm" + echo "FATAL ERROR in ${pgm}" err_chk + exit 3 fi cat $DATA/${m}/bufrout >> $DATA/${m}/gfs_collective$m.fil @@ -57,13 +57,12 @@ EOF rm $DATA/${m}/bufrout done -# if test $SENDCOM = 'NO' - if test $SENDCOM = 'YES' - then - if [ $SENDDBN = 'YES' ] ; then - cp $DATA/${m}/gfs_collective$m.fil $pcom/gfs_collective$m.postsnd_$cyc - $DBNROOT/bin/dbn_alert NTC_LOW BUFR $job $pcom/gfs_collective$m.postsnd_$cyc + if [[ ${SENDCOM} == 'YES' ]]; then + if [[ ${SENDDBN} == 'YES' ]] ; then + cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" + "${DBNROOT}/bin/dbn_alert" NTC_LOW BUFR "${job}" \ + "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" fi - cp $DATA/${m}/gfs_collective$m.fil ${COMOUT}/bufr.${cycle}/. + cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_BUFR}/." fi diff --git a/ush/gldas_forcing.sh b/ush/gldas_forcing.sh deleted file mode 100755 index ca5562f459..0000000000 --- a/ush/gldas_forcing.sh +++ /dev/null @@ -1,118 +0,0 @@ -#! /usr/bin/env bash -########################################################################### -# this script gets cpc daily precipitation and using gdas hourly precipitation -# to disaggregate daily value into hourly value -########################################################################### - -source "${HOMEgfs:?}/ush/preamble.sh" - -bdate=$1 -edate=$2 - -# HOMEgldas - gldas directory -# EXECgldas - gldas exec directory -# PARMgldas - gldas param directory -# FIXgldas - gldas fix field directory -export LISDIR=${HOMEgldas:?} -export fpath=${RUNDIR:?}/force -export xpath=${RUNDIR:?}/force -export WGRIB=${WGRIB:?} -export COPYGB=${COPYGB:?} -export ERRSCRIPT=${ERRSCRIPT:-"eval [[ ${err} = 0 ]]"} - -#------------------------------- -#--- extract variables of each timestep and create forcing files -sdate=${bdate} -edate=$(sh "${FINDDATE:?}" "${edate}" d-1) -while [[ "${sdate}" -lt "${edate}" ]] ; do - - sdat0=$(sh "${FINDDATE:?}" "${sdate}" d-1) - [[ ! -d ${xpath}/cpc.${sdate} ]] && mkdir -p "${xpath}/cpc.${sdate}" - [[ ! -d ${xpath}/cpc.${sdat0} ]] && mkdir -p "${xpath}/cpc.${sdat0}" - - cd "${xpath}" || exit - rm -f fort.* grib.* - - COMPONENT=${COMPONENT:-"atmos"} - pathp1=${CPCGAUGE:?}/gdas.${sdate}/00/${COMPONENT} - pathp2=${DCOMIN:?}/${sdate}/wgrbbul/cpc_rcdas - cpc_precip="PRCP_CU_GAUGE_V1.0GLB_0.125deg.lnx.${sdate}.RT" - if [[ "${RUN_ENVIR:?}" = "emc" ]] && [[ "${sdate}" -gt "${bdate}" ]]; then - cpc_precip="PRCP_CU_GAUGE_V1.0GLB_0.125deg.lnx.${sdate}.RT_early" - fi - cpc=${pathp1}/${cpc_precip} - if [[ ! -s "${cpc}" ]]; then cpc=${pathp2}/${cpc_precip} ; fi - if [[ "${RUN_ENVIR:?}" = "nco" ]]; then cpc=${pathp2}/${cpc_precip} ; fi - if [[ ! -s "${cpc}" ]]; then - echo "WARNING: GLDAS MISSING ${cpc}, WILL NOT RUN." - exit 3 - fi - cp "${cpc}" "${xpath}/cpc.${sdate}/." - - sflux=${fpath}/gdas.${sdat0}/gdas1.t12z.sfluxgrbf06 - prate=gdas.${sdat0}12 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - sflux=${fpath}/gdas.${sdat0}/gdas1.t18z.sfluxgrbf06 - prate=gdas.${sdat0}18 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - sflux=${fpath}/gdas.${sdate}/gdas1.t00z.sfluxgrbf06 - prate=gdas.${sdate}00 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - sflux=${fpath}/gdas.${sdate}/gdas1.t06z.sfluxgrbf06 - prate=gdas.${sdate}06 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - if [[ "${USE_CFP:?}" = "YES" ]] ; then - rm -f ./cfile - touch ./cfile - { - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdat0}12 grib.12" - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdat0}18 grib.18" - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdate}00 grib.00" - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdate}06 grib.06" - } >> ./cfile - ${APRUN_GLDAS_DATA_PROC:?} ./cfile - else - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdat0}"12 grib.12 - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdat0}"18 grib.18 - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdate}"00 grib.00 - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdate}"06 grib.06 - fi - - rm -f fort.10 - touch fort.10 - echo "${sdat0}" >> fort.10 - echo "${sdate}" >> fort.10 - - export pgm=gldas_forcing - # shellcheck disable=SC1091 - . prep_step - # shellcheck disable= - - ${WGRIB} -d -bin grib.12 -o fort.11 - ${WGRIB} -d -bin grib.18 -o fort.12 - ${WGRIB} -d -bin grib.00 -o fort.13 - ${WGRIB} -d -bin grib.06 -o fort.14 - - ln -fs "${xpath}/cpc.${sdate}/${cpc_precip}" fort.15 - - "${EXECgldas:?}/gldas_forcing" 1>&1 2>&2 - - export err=$? - ${ERRSCRIPT} || exit 3 - - cp fort.21 "${xpath}/cpc.${sdat0}/precip.gldas.${sdat0}12" - cp fort.22 "${xpath}/cpc.${sdat0}/precip.gldas.${sdat0}18" - cp fort.23 "${xpath}/cpc.${sdate}/precip.gldas.${sdate}00" - cp fort.24 "${xpath}/cpc.${sdate}/precip.gldas.${sdate}06" - - rm -f fort.* grib.* - - sdate=$(sh "${FINDDATE}" "${sdate}" d+1) -done -#------------------------------- - -exit "${err}" diff --git a/ush/gldas_get_data.sh b/ush/gldas_get_data.sh deleted file mode 100755 index 3416309119..0000000000 --- a/ush/gldas_get_data.sh +++ /dev/null @@ -1,76 +0,0 @@ -#! /usr/bin/env bash -######################################################### -# This script generate gldas forcing from gdas prod sflux -######################################################### - -source "${HOMEgfs:?}/ush/preamble.sh" - -bdate=$1 -edate=$2 - -if [[ "${USE_CFP:-"NO"}" = "YES" ]] ; then - touch ./cfile -fi - -### COMINgdas = prod gdas sflux grib2 -### RUNDIR = gldas forcing in grib2 format -### RUNDIR/force = gldas forcing in grib1 format -export COMPONENT=${COMPONENT:-atmos} -fpath=${RUNDIR:?} -gpath=${RUNDIR}/force -cycint=${assim_freq:-6} - -# get gdas flux files to force gldas. -# CPC precipitation is from 12z to 12z. One more day of gdas data is -# needed to disaggregate daily CPC precipitation values to hourly values -cdate=$(${NDATE:?} -12 "${bdate}") - -iter=0 - -#------------------------------- -while [[ "${cdate}" -lt "${edate}" ]]; do - - ymd=$(echo "${cdate}" |cut -c 1-8) - cyc=$(echo "${cdate}" |cut -c 9-10) - [[ ! -d ${fpath}/gdas.${ymd} ]] && mkdir -p "${fpath}/gdas.${ymd}" - [[ ! -d ${gpath}/gdas.${ymd} ]] && mkdir -p "${gpath}/gdas.${ymd}" - - f=1 - while [[ "${f}" -le "${cycint}" ]]; do - rflux=${COMINgdas:?}/gdas.${ymd}/${cyc}/${COMPONENT}/gdas.t${cyc}z.sfluxgrbf00${f}.grib2 - fflux=${fpath}/gdas.${ymd}/gdas.t${cyc}z.sfluxgrbf0${f}.grib2 - gflux=${gpath}/gdas.${ymd}/gdas1.t${cyc}z.sfluxgrbf0${f} - if [[ ! -s "${rflux}" ]];then - echo "WARNING: GLDAS MISSING ${rflux}, WILL NOT RUN." - exit 2 - fi - rm -f "${fflux}" "${gflux}" - touch "${fflux}" "${gflux}" - - fcsty=anl - if [[ "${f}" -ge 1 ]]; then fcsty=fcst; fi - - if [[ "${USE_CFP:-"NO"}" = "YES" ]] ; then - if [[ "${CFP_MP:-"NO"}" = "YES" ]]; then - echo "${iter} ${USHgldas:?}/gldas_process_data.sh ${iter} ${rflux} ${fcsty} ${fflux} ${gflux} ${f}" >> ./cfile - else - echo "${USHgldas:?}/gldas_process_data.sh ${iter} ${rflux} ${fcsty} ${fflux} ${gflux} ${f}" >> ./cfile - fi - else - "${USHgldas:?}/gldas_process_data.sh" "${iter}" "${rflux}" "${fcsty}" "${fflux}" "${gflux}" "${f}" - fi - - iter=$((iter+1)) - f=$((f+1)) - done - -#------------------------------- - cdate=$(${NDATE} +"${cycint}" "${cdate}") -done -#------------------------------- - -if [[ "${USE_CFP:-"NO"}" = "YES" ]] ; then - ${APRUN_GLDAS_DATA_PROC:?} ./cfile -fi - -exit $? diff --git a/ush/gldas_liscrd.sh b/ush/gldas_liscrd.sh deleted file mode 100755 index 7c0f446035..0000000000 --- a/ush/gldas_liscrd.sh +++ /dev/null @@ -1,46 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs:?}/ush/preamble.sh" - -if [[ $# -lt 3 ]]; then - echo usage "$0" yyyymmddhh1 yyyymmddhh2 126/382/574/1534 - exit $? -fi - -date1=$1 -date2=$2 -grid=$3 - -yyyy1=$(echo "${date1}" | cut -c 1-4) -mm1=$(echo "${date1}" | cut -c 5-6) -dd1=$(echo "${date1}" | cut -c 7-8) -hh1=$(echo "${date1}" | cut -c 9-10) -yyyy2=$(echo "${date2}" | cut -c 1-4) -mm2=$(echo "${date2}" | cut -c 5-6) -dd2=$(echo "${date2}" | cut -c 7-8) -hh2=$(echo "${date2}" | cut -c 9-10) - -PARM_LM=${PARMgldas:?} -LISCARD=lis.crd - -rm -f "${LISCARD}" -touch "${LISCARD}" -{ - cat "${PARM_LM}/lis.crd.T${grid}.tmp.1" - echo "LIS%t%SSS = 0 " - echo "LIS%t%SMN = 00 " - echo "LIS%t%SHR = ${hh1} " - echo "LIS%t%SDA = ${dd1} " - echo "LIS%t%SMO = ${mm1} " - echo "LIS%t%SYR = ${yyyy1}" - echo "LIS%t%ENDCODE = 1 " - echo "LIS%t%ESS = 0 " - echo "LIS%t%EMN = 00 " - echo "LIS%t%EHR = ${hh2} " - echo "LIS%t%EDA = ${dd2} " - echo "LIS%t%EMO = ${mm2} " - echo "LIS%t%EYR = ${yyyy2}" - cat "${PARM_LM}/lis.crd.T${grid}.tmp.2" -} >> "${LISCARD}" - -exit 0 diff --git a/ush/gldas_process_data.sh b/ush/gldas_process_data.sh deleted file mode 100755 index 4770170a97..0000000000 --- a/ush/gldas_process_data.sh +++ /dev/null @@ -1,34 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs:?}/ush/preamble.sh" "$1" - -rflux=$2 -fcsty=$3 -fflux=$4 -gflux=$5 -f=$6 - -WGRIB2=${WGRIB2:?} -CNVGRIB=${CNVGRIB:?} - -${WGRIB2} "${rflux}" | grep "TMP:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SPFH:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "UGRD:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "VGRD:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "HGT:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "PRES:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "PRATE:surface" | grep ave | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "VEG:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SFCR:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SFEXC:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "TMP:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "WEASD:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SNOD:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" - -${WGRIB2} "${rflux}" | grep "DSWRF:surface:${f} hour fcst" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "DLWRF:surface:${f} hour fcst" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "USWRF:surface:${f} hour fcst" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" - -${CNVGRIB} -g21 "${fflux}" "${gflux}" - -exit $? diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index a510b5e84b..f9a15e2307 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -8,16 +8,13 @@ source "${HOMEgfs}/ush/preamble.sh" type=${1:-gfs} ##gfs, gdas, enkfgdas or enkfggfs -CDATE=${CDATE:-2018010100} -PDY=$(echo $CDATE | cut -c 1-8) -cyc=$(echo $CDATE | cut -c 9-10) ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"YES"} ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} # Set whether to archive downstream products DO_DOWN=${DO_DOWN:-"NO"} -if [ ${DO_BUFRSND} = "YES" -o ${WAFSF} = "YES" ]; then +if [[ ${DO_BUFRSND} = "YES" || ${WAFSF} = "YES" ]]; then export DO_DOWN="YES" fi @@ -45,7 +42,7 @@ if [[ ${type} = "gfs" ]]; then touch gfs_netcdfb.txt touch gfs_flux.txt - if [ $MODE = "cycled" ]; then + if [[ ${MODE} = "cycled" ]]; then rm -f gfs_netcdfa.txt touch gfs_netcdfa.txt fi @@ -56,31 +53,33 @@ if [[ ${type} = "gfs" ]]; then touch gfs_downstream.txt fi - dirpath="gfs.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" - obs_dirname="./gfs.${PDY}/${cyc}/obs/" - head="gfs.t${cyc}z." if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - echo "${dirname}${head}pgrb2b.0p25.anl " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.0p25.anl.idx " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.1p00.anl " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.1p00.anl.idx " >>gfs_pgrb2b.txt - - if [ $MODE = "cycled" ]; then - echo "${dirname}${head}atmanl.nc " >>gfs_netcdfa.txt - echo "${dirname}${head}sfcanl.nc " >>gfs_netcdfa.txt - echo "${dirname}${head}atmi*.nc " >>gfs_netcdfa.txt - echo "${dirname}${head}dtfanl.nc " >>gfs_netcdfa.txt - echo "${dirname}${head}loginc.txt " >>gfs_netcdfa.txt + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl.idx" + } >> gfs_pgrb2b.txt + + if [[ ${MODE} = "cycled" ]]; then + { + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}dtfanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}loginc.txt" + } >> gfs_netcdfa.txt fi fh=0 - while [ $fh -le $ARCH_GAUSSIAN_FHMAX ]; do - fhr=$(printf %03i $fh) - echo "${dirname}${head}atmf${fhr}.nc " >>gfs_netcdfb.txt - echo "${dirname}${head}sfcf${fhr}.nc " >>gfs_netcdfb.txt + while (( fh <= ARCH_GAUSSIAN_FHMAX )); do + fhr=$(printf %03i "${fh}") + { + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" + } >> gfs_netcdfb.txt fh=$((fh+ARCH_GAUSSIAN_FHINC)) done fi @@ -88,113 +87,138 @@ if [[ ${type} = "gfs" ]]; then #.................. # Exclude the gfsarch.log file, which will change during the tar operation # This uses the bash extended globbing option - echo "./logs/${CDATE}/gfs!(arch).log " >>gfsa.txt - echo "${dirname}input.nml " >>gfsa.txt - if [[ ${MODE} = "cycled" ]]; then - echo "${dirname}${head}gsistat " >>gfsa.txt - echo "${obs_dirname}${head}nsstbufr " >>gfsa.txt - echo "${obs_dirname}${head}prepbufr " >>gfsa.txt - echo "${obs_dirname}${head}prepbufr.acft_profiles " >>gfsa.txt - fi - echo "${dirname}${head}pgrb2.0p25.anl " >>gfsa.txt - echo "${dirname}${head}pgrb2.0p25.anl.idx " >>gfsa.txt - #Only generated if there are cyclones to track - cyclone_files=(avno.t${cyc}z.cyclone.trackatcfunix - avnop.t${cyc}z.cyclone.trackatcfunix - trak.gfso.atcfunix.${PDY}${cyc} - trak.gfso.atcfunix.altg.${PDY}${cyc} - storms.gfso.atcf_gen.${PDY}${cyc} - storms.gfso.atcf_gen.altg.${PDY}${cyc}) - - for file in ${cyclone_files[@]}; do - [[ -s ${ROTDIR}/${dirname}${file} ]] && echo "${dirname}${file}" >>gfsa.txt - done + { + echo "./logs/${PDY}${cyc}/gfs!(arch).log" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/input.nml" + + if [[ ${MODE} = "cycled" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" + echo "${COM_OBS/${ROTDIR}\//}/${head}nsstbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr.acft_profiles" + fi - if [[ ${DO_DOWN} = "YES" ]]; then - if [[ ${DO_BUFRSND} = "YES" ]]; then - echo "${dirname}gempak/gfs_${PDY}${cyc}.sfc " >>gfs_downstream.txt - echo "${dirname}gempak/gfs_${PDY}${cyc}.snd " >>gfs_downstream.txt - echo "${dirname}wmo/gfs_collective*.postsnd_${cyc} " >>gfs_downstream.txt - echo "${dirname}bufr.t${cyc}z " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.bufrsnd.tar.gz " >>gfs_downstream.txt - fi - if [[ ${WAFSF} = "YES" ]]; then - echo "${dirname}wafsgfs*.t${cyc}z.gribf*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_grb45f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_grb45f*.nouswafs.grib2 " >>gfs_downstream.txt - echo "${dirname}WAFS_blended_${PDY}${cyc}f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t*z.gcip.f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_0p25.f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_0p25_unblended.f*.grib2" >>gfs_downstream.txt - echo "${dirname}WAFS_0p25_blended_${PDY}${cyc}f*.grib2 " >>gfs_downstream.txt - fi - fi + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" - echo "${dirname}${head}pgrb2.0p50.anl " >>gfsb.txt - echo "${dirname}${head}pgrb2.0p50.anl.idx " >>gfsb.txt - echo "${dirname}${head}pgrb2.1p00.anl " >>gfsb.txt - echo "${dirname}${head}pgrb2.1p00.anl.idx " >>gfsb.txt + #Only generated if there are cyclones to track + cyclone_files=("avno.t${cyc}z.cyclone.trackatcfunix" + "avnop.t${cyc}z.cyclone.trackatcfunix" + "trak.gfso.atcfunix.${PDY}${cyc}" + "trak.gfso.atcfunix.altg.${PDY}${cyc}") + for file in "${cyclone_files[@]}"; do + [[ -s ${COM_ATMOS_TRACK}/${file} ]] && echo "${COM_ATMOS_TRACK/${ROTDIR}\//}/${file}" + done - fh=0 - while [[ ${fh} -le ${FHMAX_GFS} ]]; do - fhr=$(printf %03i ${fh}) - if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - echo "${dirname}${head}sfluxgrbf${fhr}.grib2 " >>gfs_flux.txt - echo "${dirname}${head}sfluxgrbf${fhr}.grib2.idx " >>gfs_flux.txt - - echo "${dirname}${head}pgrb2b.0p25.f${fhr} " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.0p25.f${fhr}.idx " >>gfs_pgrb2b.txt - if [[ -s ${ROTDIR}/${dirpath}${head}pgrb2b.1p00.f${fhr} ]]; then - echo "${dirname}${head}pgrb2b.1p00.f${fhr} " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.1p00.f${fhr}.idx " >>gfs_pgrb2b.txt + genesis_files=("storms.gfso.atcf_gen.${PDY}${cyc}" + "storms.gfso.atcf_gen.altg.${PDY}${cyc}") + for file in "${genesis_files[@]}"; do + [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" + done + } >> gfsa.txt + + { + if [[ ${DO_DOWN} = "YES" ]]; then + if [[ ${DO_BUFRSND} = "YES" ]]; then + echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.sfc" + echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.snd" + echo "${COM_ATMOS_WMO/${ROTDIR}\//}/gfs_collective*.postsnd_${cyc}" + echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/bufr.t${cyc}z" + echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/gfs.t${cyc}z.bufrsnd.tar.gz" + fi + if [[ ${WAFSF} = "YES" ]]; then + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/wafsgfs*.t${cyc}z.gribf*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_grb45f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_grb45f*.nouswafs.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/WAFS_blended_${PDY}${cyc}f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t*z.gcip.f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_0p25.f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_0p25_unblended.f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/WAFS_0p25_blended_${PDY}${cyc}f*.grib2" fi fi + } >> gfs_downstream.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr} " >>gfsa.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr}.idx " >>gfsa.txt - echo "${dirname}${head}logf${fhr}.txt " >>gfsa.txt + { + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl" + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" + } >> gfsb.txt - if [[ -s ${ROTDIR}/${dirpath}${head}pgrb2.0p50.f${fhr} ]]; then - echo "${dirname}${head}pgrb2.0p50.f${fhr} " >>gfsb.txt - echo "${dirname}${head}pgrb2.0p50.f${fhr}.idx " >>gfsb.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}pgrb2.1p00.f${fhr} ]]; then - echo "${dirname}${head}pgrb2.1p00.f${fhr} " >>gfsb.txt - echo "${dirname}${head}pgrb2.1p00.f${fhr}.idx " >>gfsb.txt + + fh=0 + while (( fh <= FHMAX_GFS )); do + fhr=$(printf %03i "${fh}") + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then + { + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" + } >> gfs_flux.txt + + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}.idx" + if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2b.1p00.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}.idx" + fi + } >> gfs_pgrb2b.txt fi + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}logf${fhr}.txt" + } >> gfsa.txt + + + { + if [[ -s "${COM_ATMOS_GRIB_0p50}/${head}pgrb2.0p50.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}" + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}.idx" + fi + if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2.1p00.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" + fi + } >> gfsb.txt + inc=${FHOUT_GFS} - if [ ${FHMAX_HF_GFS} -gt 0 -a ${FHOUT_HF_GFS} -gt 0 -a ${fh} -lt ${FHMAX_HF_GFS} ]; then - inc=${FHOUT_HF_GFS} + if (( FHMAX_HF_GFS > 0 && FHOUT_HF_GFS > 0 && fh < FHMAX_HF_GFS )); then + inc=${FHOUT_HF_GFS} fi fh=$((fh+inc)) done #.................. - if [[ ${MODE} = "cycled" ]]; then - echo "${dirname}RESTART/*0000.sfcanl_data.tile1.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile2.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile3.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile4.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile5.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile6.nc " >>gfs_restarta.txt - elif [[ ${MODE} = "forecast-only" ]]; then - echo "${dirname}INPUT/gfs_ctrl.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile1.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile2.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile3.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile4.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile5.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile6.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile1.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile2.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile3.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile4.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile5.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile6.nc " >>gfs_restarta.txt - fi + { + if [[ ${MODE} = "cycled" ]]; then + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + elif [[ ${MODE} = "forecast-only" ]]; then + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_ctrl.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile1.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile2.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile3.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile4.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile5.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile6.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile1.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile2.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile3.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile4.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile5.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile6.nc" + fi + } >> gfs_restarta.txt + #.................. if [[ ${DO_WAVE} = "YES" ]]; then @@ -202,26 +226,22 @@ if [[ ${type} = "gfs" ]]; then rm -rf gfswave.txt touch gfswave.txt - dirpath="gfs.${PDY}/${cyc}/wave/" - dirname="./${dirpath}" - head="gfswave.t${cyc}z." #........................... - echo "${dirname}rundata/ww3_multi* " >>gfswave.txt - echo "${dirname}gridded/${head}* " >>gfswave.txt - echo "${dirname}station/${head}* " >>gfswave.txt - + { + echo "${COM_WAVE_HISTORY/${ROTDIR}\//}/ww3_multi*" + echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + } >> gfswave.txt fi if [[ ${DO_OCN} = "YES" ]]; then - dirpath="gfs.${PDY}/${cyc}/ocean/" - dirname="./${dirpath}" head="gfs.t${cyc}z." rm -f gfs_flux_1p00.txt - rm -f ocn_ice_grib2_0p5.txt + rm -f ocn_ice_grib2_0p5.txt rm -f ocn_ice_grib2_0p25.txt rm -f ocn_2D.txt rm -f ocn_3D.txt @@ -234,42 +254,39 @@ if [[ ${type} = "gfs" ]]; then touch ocn_3D.txt touch ocn_xsect.txt touch ocn_daily.txt - echo "${dirname}MOM_input " >>ocn_2D.txt - echo "${dirname}ocn_2D* " >>ocn_2D.txt - echo "${dirname}ocn_3D* " >>ocn_3D.txt - echo "${dirname}ocn*EQ* " >>ocn_xsect.txt - echo "${dirname}ocn_daily* " >>ocn_daily.txt - echo "${dirname}ocn_ice*0p5x0p5.grb2 " >>ocn_ice_grib2_0p5.txt - echo "${dirname}ocn_ice*0p25x0p25.grb2 " >>ocn_ice_grib2_0p25.txt - - dirpath="gfs.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" - echo "${dirname}${head}flux.1p00.f??? " >>gfs_flux_1p00.txt - echo "${dirname}${head}flux.1p00.f???.idx " >>gfs_flux_1p00.txt + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> ocn_2D.txt + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_2D*" >> ocn_2D.txt + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_3D*" >> ocn_3D.txt + echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> ocn_xsect.txt + echo "${COM_OCEAN_DAILY/${ROTDIR}\//}/ocn_daily*" >> ocn_daily.txt + echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> ocn_ice_grib2_0p5.txt + echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> ocn_ice_grib2_0p25.txt + + # Also save fluxes from atmosphere + { + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx" + } >> gfs_flux_1p00.txt fi if [[ ${DO_ICE} = "YES" ]]; then - dirpath="gfs.${PDY}/${cyc}/ice/" - dirname="./${dirpath}" - head="gfs.t${cyc}z." rm -f ice.txt touch ice.txt - echo "${dirname}ice_in " >>ice.txt - echo "${dirname}ice*nc " >>ice.txt + { + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc" + } >> ice.txt fi if [[ ${DO_AERO} = "YES" ]]; then - dirpath="gfs.${PDY}/${cyc}/chem" - dirname="./${dirpath}" - head="gocart" rm -f chem.txt touch chem.txt - echo "${dirname}/${head}*" >> chem.txt + echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> chem.txt fi #----------------------------------------------------- @@ -279,7 +296,7 @@ fi ##end of gfs #----------------------------------------------------- -if [[ ${type} = "gdas" ]]; then +if [[ ${type} == "gdas" ]]; then #----------------------------------------------------- rm -f gdas.txt @@ -289,94 +306,96 @@ if [[ ${type} = "gdas" ]]; then touch gdas_restarta.txt touch gdas_restartb.txt - dirpath="gdas.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" - obs_dirname="./gdas.${PDY}/${cyc}/obs/" head="gdas.t${cyc}z." #.................. - echo "${dirname}${head}gsistat " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.anl " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.anl.idx " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.anl " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.anl.idx " >>gdas.txt - echo "${dirname}${head}atmanl.nc " >>gdas.txt - echo "${dirname}${head}sfcanl.nc " >>gdas.txt - if [ -s $ROTDIR/${dirpath}${head}atmanl.ensres.nc ]; then - echo "${dirname}${head}atmanl.ensres.nc " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}atma003.ensres.nc ]; then - echo "${dirname}${head}atma003.ensres.nc " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}atma009.ensres.nc ]; then - echo "${dirname}${head}atma009.ensres.nc " >>gdas.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}cnvstat ]]; then - echo "${dirname}${head}cnvstat " >>gdas.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}oznstat ]]; then - echo "${dirname}${head}oznstat " >>gdas.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}radstat ]]; then - echo "${dirname}${head}radstat " >>gdas.txt - fi - for fstep in prep anal gldas fcst vrfy radmon minmon oznmon; do - if [[ -s ${ROTDIR}/logs/${CDATE}/gdas${fstep}.log ]]; then - echo "./logs/${CDATE}/gdas${fstep}.log " >>gdas.txt - fi - done - echo "./logs/${CDATE}/gdaspost*.log " >>gdas.txt - - fh=0 - while [[ ${fh} -le 9 ]]; do - fhr=$(printf %03i ${fh}) - echo "${dirname}${head}sfluxgrbf${fhr}.grib2 " >>gdas.txt - echo "${dirname}${head}sfluxgrbf${fhr}.grib2.idx " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr} " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr}.idx " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.f${fhr} " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.f${fhr}.idx " >>gdas.txt - echo "${dirname}${head}logf${fhr}.txt " >>gdas.txt - echo "${dirname}${head}atmf${fhr}.nc " >>gdas.txt - echo "${dirname}${head}sfcf${fhr}.nc " >>gdas.txt - fh=$((fh+3)) - done - flist="001 002 004 005 007 008" - for fhr in ${flist}; do - echo "${dirname}${head}sfluxgrbf${fhr}.grib2 " >>gdas.txt - echo "${dirname}${head}sfluxgrbf${fhr}.grib2.idx " >>gdas.txt - done - + { + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmanl.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma003.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma003.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma009.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma009.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}oznstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}oznstat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" + fi + for fstep in prep anal fcst vrfy radmon minmon oznmon; do + if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then + echo "./logs/${PDY}${cyc}/gdas${fstep}.log" + fi + done + echo "./logs/${PDY}${cyc}/gdaspost*.log" + fh=0 + while [[ ${fh} -le 9 ]]; do + fhr=$(printf %03i "${fh}") + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}logf${fhr}.txt" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" + fh=$((fh+3)) + done + flist="001 002 004 005 007 008" + for fhr in ${flist}; do + file="${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + if [[ -s "${file}" ]]; then + echo "${file}" + echo "${file}.idx" + fi + done + } >> gdas.txt #.................. - if [[ -s ${ROTDIR}/${dirpath}${head}cnvstat ]]; then - echo "${dirname}${head}cnvstat " >>gdas_restarta.txt + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> gdas_restarta.txt fi - if [[ -s ${ROTDIR}/${dirpath}${head}radstat ]]; then - echo "${dirname}${head}radstat " >>gdas_restarta.txt + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> gdas_restarta.txt fi - echo "${obs_dirname}${head}nsstbufr " >>gdas_restarta.txt - echo "${obs_dirname}${head}prepbufr " >>gdas_restarta.txt - echo "${obs_dirname}${head}prepbufr.acft_profiles " >>gdas_restarta.txt - echo "${dirname}${head}abias " >>gdas_restarta.txt - echo "${dirname}${head}abias_air " >>gdas_restarta.txt - echo "${dirname}${head}abias_int " >>gdas_restarta.txt - echo "${dirname}${head}abias_pc " >>gdas_restarta.txt - echo "${dirname}${head}atmi*nc " >>gdas_restarta.txt - echo "${dirname}${head}dtfanl.nc " >>gdas_restarta.txt - echo "${dirname}${head}loginc.txt " >>gdas_restarta.txt - - echo "${dirname}RESTART/*0000.sfcanl_data.tile1.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile2.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile3.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile4.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile5.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile6.nc " >>gdas_restarta.txt + { + echo "${COM_OBS/${ROTDIR}\//}/${head}nsstbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr.acft_profiles" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias_air" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias_int" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias_pc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}dtfanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}loginc.txt" + + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + } >> gdas_restarta.txt #.................. - echo "${dirname}RESTART " >>gdas_restartb.txt + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> gdas_restartb.txt #.................. if [[ ${DO_WAVE} = "YES" ]]; then @@ -386,16 +405,15 @@ if [[ ${type} = "gdas" ]]; then rm -rf gdaswave_restart.txt touch gdaswave_restart.txt - dirpath="gdas.${PDY}/${cyc}/wave/" - dirname="./${dirpath}" - head="gdaswave.t${cyc}z." #........................... - echo "${dirname}gridded/${head}* " >>gdaswave.txt - echo "${dirname}station/${head}* " >>gdaswave.txt + { + echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + } >> gdaswave.txt - echo "${dirname}restart/* " >>gdaswave_restart.txt + echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> gdaswave_restart.txt fi @@ -407,21 +425,18 @@ if [[ ${type} = "gdas" ]]; then rm -rf gdasocean_restart.txt touch gdasocean_restart.txt - dirpath="gdas.${PDY}/${cyc}/ocean/" - dirname="./${dirpath}" - head="gdas.t${cyc}z." #........................... - echo "${dirname}/${head}* " >>gdasocean.txt - echo "${dirname}/MOM_input " >>gdasocean.txt - - echo "${dirname}/RESTART/* " >>gdasocean_restart.txt - - dirpath="gdas.${PDY}/${cyc}/med/" - dirname="./${dirpath}" + { + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}" + } >> gdasocean.txt - echo "${dirname}/RESTART/* " >>gdasocean_restart.txt + { + echo "${COM_OCEAN_RESTART/${ROTDIR}\//}/*" + echo "${COM_MED_RESTART/${ROTDIR}\//}/*" + } >> gdasocean_restart.txt fi @@ -432,16 +447,15 @@ if [[ ${type} = "gdas" ]]; then rm -rf gdasice_restart.txt touch gdasice_restart.txt - dirpath="gdas.${PDY}/${cyc}/ice/" - dirname="./${dirpath}" - head="gdas.t${cyc}z." #........................... - echo "${dirname}/${head}* " >>gdasice.txt - echo "${dirname}/ice_in " >>gdasice.txt + { + echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + } >> gdasice.txt - echo "${dirname}/RESTART/* " >>gdasice_restart.txt + echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> gdasice_restart.txt fi @@ -452,165 +466,180 @@ fi ##end of gdas #----------------------------------------------------- -if [ ${type} = "enkfgdas" -o ${type} = "enkfgfs" ]; then +if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then #----------------------------------------------------- IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} - nfhrs=$(echo ${IAUFHRS_ENKF} | sed 's/,/ /g') - NMEM_ENKF=${NMEM_ENKF:-80} + nfhrs="${IAUFHRS_ENKF/,/}" + NMEM_ENS=${NMEM_ENS:-80} NMEM_EARCGRP=${NMEM_EARCGRP:-10} ##number of ens memebers included in each tarball - NTARS=$((NMEM_ENKF/NMEM_EARCGRP)) + NTARS=$((NMEM_ENS/NMEM_EARCGRP)) [[ ${NTARS} -eq 0 ]] && NTARS=1 - [[ $((NTARS*NMEM_EARCGRP)) -lt ${NMEM_ENKF} ]] && NTARS=$((NTARS+1)) -##NTARS2=$((NTARS/2)) # number of earc groups to include analysis/increments + [[ $((NTARS*NMEM_EARCGRP)) -lt ${NMEM_ENS} ]] && NTARS=$((NTARS+1)) + ##NTARS2=$((NTARS/2)) # number of earc groups to include analysis/increments NTARS2=${NTARS} - dirpath="${RUN}.${PDY}/${cyc}/" - dirname="./${dirpath}" head="${RUN}.t${cyc}z." #.................. - rm -f ${RUN}.txt - touch ${RUN}.txt - - echo "${dirname}${head}enkfstat " >>${RUN}.txt - echo "${dirname}${head}gsistat.ensmean " >>${RUN}.txt - if [[ -s ${ROTDIR}/${dirpath}${head}cnvstat.ensmean ]]; then - echo "${dirname}${head}cnvstat.ensmean " >>${RUN}.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}oznstat.ensmean ]]; then - echo "${dirname}${head}oznstat.ensmean " >>${RUN}.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}radstat.ensmean ]]; then - echo "${dirname}${head}radstat.ensmean " >>${RUN}.txt - fi - for FHR in $nfhrs; do # loop over analysis times in window - if [ $FHR -eq 6 ]; then - if [ -s $ROTDIR/${dirpath}${head}atmanl.ensmean.nc ]; then - echo "${dirname}${head}atmanl.ensmean.nc " >>${RUN}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}atminc.ensmean.nc ]; then - echo "${dirname}${head}atminc.ensmean.nc " >>${RUN}.txt + rm -f "${RUN}.txt" + touch "${RUN}.txt" + + { + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}enkfstat" + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}gsistat.ensmean" + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}cnvstat.ensmean" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}cnvstat.ensmean" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}oznstat.ensmean" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}oznstat.ensmean" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}radstat.ensmean" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}radstat.ensmean" + fi + for FHR in ${nfhrs}; do # loop over analysis times in window + if [[ ${FHR} -eq 6 ]]; then + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmanl.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmanl.ensmean.nc" fi - else - if [ -s $ROTDIR/${dirpath}${head}atma00${FHR}.ensmean.nc ]; then - echo "${dirname}${head}atma00${FHR}.ensmean.nc " >>${RUN}.txt + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atminc.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atminc.ensmean.nc" fi - if [ -s $ROTDIR/${dirpath}${head}atmi00${FHR}.ensmean.nc ]; then - echo "${dirname}${head}atmi00${FHR}.ensmean.nc " >>${RUN}.txt + else + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atma00${FHR}.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atma00${FHR}.ensmean.nc" fi - fi - done # loop over FHR - for fstep in eobs ecen esfc eupd efcs epos ; do - echo "logs/${CDATE}/${RUN}${fstep}*.log " >>${RUN}.txt - done - -# eomg* are optional jobs - for log in ${ROTDIR}/logs/${CDATE}/${RUN}eomg*.log; do - if [[ -s "${log}" ]]; then - echo "logs/${CDATE}/${RUN}eomg*.log " >>${RUN}.txt - fi - break - done + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmi00${FHR}.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmi00${FHR}.ensmean.nc" + fi + fi + done # loop over FHR + for fstep in eobs ecen esfc eupd efcs epos ; do + echo "logs/${PDY}${cyc}/${RUN}${fstep}*.log" + done + # eomg* are optional jobs + for log in "${ROTDIR}/logs/${PDY}${cyc}/${RUN}eomg"*".log"; do + if [[ -s "${log}" ]]; then + echo "logs/${PDY}${cyc}/${RUN}eomg*.log" + fi + break + done -# Ensemble spread file only available with netcdf output - fh=3 - while [ $fh -le 9 ]; do - fhr=$(printf %03i $fh) - echo "${dirname}${head}atmf${fhr}.ensmean.nc " >>${RUN}.txt - echo "${dirname}${head}sfcf${fhr}.ensmean.nc " >>${RUN}.txt - if [ -s $ROTDIR/${dirpath}${head}atmf${fhr}.ensspread.nc ]; then - echo "${dirname}${head}atmf${fhr}.ensspread.nc " >>${RUN}.txt - fi - fh=$((fh+3)) - done + # Ensemble spread file only available with netcdf output + fh=3 + while [ $fh -le 9 ]; do + fhr=$(printf %03i $fh) + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensmean.nc" + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}sfcf${fhr}.ensmean.nc" + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmf${fhr}.ensspread.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensspread.nc" + fi + fh=$((fh+3)) + done + } >> "${RUN}.txt" #........................... n=1 - while [[ ${n} -le ${NTARS} ]]; do - #........................... + while (( n <= NTARS )); do + #........................... - rm -f ${RUN}_grp${n}.txt - rm -f ${RUN}_restarta_grp${n}.txt - rm -f ${RUN}_restartb_grp${n}.txt - touch ${RUN}_grp${n}.txt - touch ${RUN}_restarta_grp${n}.txt - touch ${RUN}_restartb_grp${n}.txt - - m=1 - while [[ ${m} -le ${NMEM_EARCGRP} ]]; do - nm=$(((n-1)*NMEM_EARCGRP+m)) - mem=$(printf %03i $nm) - dirpath="${RUN}.${PDY}/${cyc}/mem${mem}/atmos/" - dirname="./${dirpath}" - head="${RUN}.t${cyc}z." - - #--- - for FHR in $nfhrs; do # loop over analysis times in window - if [ $FHR -eq 6 ]; then - if [ $n -le $NTARS2 ]; then - if [ -s $ROTDIR/${dirpath}${head}atmanl.nc ] ; then - echo "${dirname}${head}atmanl.nc " >>${RUN}_grp${n}.txt + rm -f "${RUN}_grp${n}.txt" + rm -f "${RUN}_restarta_grp${n}.txt" + rm -f "${RUN}_restartb_grp${n}.txt" + touch "${RUN}_grp${n}.txt" + touch "${RUN}_restarta_grp${n}.txt" + touch "${RUN}_restartb_grp${n}.txt" + + m=1 + while (( m <= NMEM_EARCGRP )); do + nm=$(((n-1)*NMEM_EARCGRP+m)) + mem=$(printf %03i ${nm}) + head="${RUN}.t${cyc}z." + + MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + #--- + for FHR in $nfhrs; do # loop over analysis times in window + if [ $FHR -eq 6 ]; then + { + if (( n <= NTARS2 )); then + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atmanl.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atmanl.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" + fi fi - if [ -s $ROTDIR/${dirpath}${head}ratminc.nc ] ; then - echo "${dirname}${head}ratminc.nc " >>${RUN}_grp${n}.txt - fi - fi - if [ -s $ROTDIR/${dirpath}${head}ratminc.nc ] ; then - echo "${dirname}${head}ratminc.nc " >>${RUN}_restarta_grp${n}.txt - fi - - else - if [ $n -le $NTARS2 ]; then - if [ -s $ROTDIR/${dirpath}${head}atma00${FHR}.nc ] ; then - echo "${dirname}${head}atma00${FHR}.nc " >>${RUN}_grp${n}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}ratmi00${FHR}.nc ] ; then - echo "${dirname}${head}ratmi00${FHR}.nc " >>${RUN}_grp${n}.txt + } >> "${RUN}_grp${n}.txt" + + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" \ + >> "${RUN}_restarta_grp${n}.txt" + fi + + else + { + if (( n <= NTARS2 )); then + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atma00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atma00${FHR}.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" + fi fi - fi - if [ -s $ROTDIR/${dirpath}${head}ratmi00${FHR}.nc ] ; then - echo "${dirname}${head}ratmi00${FHR}.nc " >>${RUN}_restarta_grp${n}.txt - fi - - fi - echo "${dirname}${head}atmf00${FHR}.nc " >>${RUN}_grp${n}.txt - if [ $FHR -eq 6 ]; then - echo "${dirname}${head}sfcf00${FHR}.nc " >>${RUN}_grp${n}.txt + } >> "${RUN}_grp${n}.txt" + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" \ + >> "${RUN}_restarta_grp${n}.txt" + fi + fi + { + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atmf00${FHR}.nc" + if (( FHR == 6 )); then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}sfcf00${FHR}.nc" + fi + } >> "${RUN}_grp${n}.txt" + done # loop over FHR + + if [[ ${lobsdiag_forenkf} == ".false." ]] ; then + { + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}gsistat" + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]] ; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" + fi + } >> "${RUN}_grp${n}.txt" + + { + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}radstat" ]]; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}radstat" + fi + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" + fi + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_air" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_int" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_pc" + } >> "${RUN}_restarta_grp${n}.txt" fi - done # loop over FHR - - if [[ lobsdiag_forenkf = ".false." ]] ; then - echo "${dirname}${head}gsistat " >>${RUN}_grp${n}.txt - if [[ -s ${ROTDIR}/${dirpath}${head}cnvstat ]] ; then - echo "${dirname}${head}cnvstat " >>${RUN}_grp${n}.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}radstat ]]; then - echo "${dirname}${head}radstat " >>${RUN}_restarta_grp${n}.txt - fi - if [[ -s ${ROTDIR}/${dirpath}${head}cnvstat ]]; then - echo "${dirname}${head}cnvstat " >>${RUN}_restarta_grp${n}.txt - fi - echo "${dirname}${head}abias " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}${head}abias_air " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}${head}abias_int " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}${head}abias_pc " >>${RUN}_restarta_grp${n}.txt - fi - #--- - echo "${dirname}RESTART/*0000.sfcanl_data.tile1.nc " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile2.nc " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile3.nc " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile4.nc " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile5.nc " >>${RUN}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile6.nc " >>${RUN}_restarta_grp${n}.txt - - #--- - echo "${dirname}RESTART " >>${RUN}_restartb_grp${n}.txt - - m=$((m+1)) - done + #--- + { + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + } >> "${RUN}_restarta_grp${n}.txt" + #--- + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${RUN}_restartb_grp${n}.txt" + + m=$((m+1)) + done #........................... diff --git a/ush/inter_flux.sh b/ush/inter_flux.sh index 2a02b9aa2d..b1f4475e05 100755 --- a/ush/inter_flux.sh +++ b/ush/inter_flux.sh @@ -43,13 +43,13 @@ else fi #--------------------------------------------------------------- - $WGRIB2 $COMOUT/${FLUXFL} $option1 $option21 $option22 $option23 $option24 \ - $option25 $option26 $option27 $option28 \ - -new_grid $grid1p0 fluxfile_${fhr3}_1p00 + ${WGRIB2} "${COM_ATMOS_MASTER}/${FLUXFL}" ${option1} ${option21} ${option22} ${option23} \ + ${option24} ${option25} ${option26} ${option27} ${option28} \ + -new_grid ${grid1p0} fluxfile_${fhr3}_1p00 export err=$?; err_chk - $WGRIB2 -s fluxfile_${fhr3}_1p00 > $COMOUT/${PREFIX}flux.1p00.f${fhr3}.idx - cp fluxfile_${fhr3}_1p00 $COMOUT/${PREFIX}flux.1p00.f${fhr3} + ${WGRIB2} -s "fluxfile_${fhr3}_1p00" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}flux.1p00.f${fhr3}.idx" + cp "fluxfile_${fhr3}_1p00" "${COM_ATMOS_GRIB_1p00}/${PREFIX}flux.1p00.f${fhr3}" #--------------------------------------------------------------- diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index 71725916ae..d750421551 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -10,19 +10,19 @@ fi ulimit_s=$( ulimit -S -s ) # Find module command and purge: -source "$HOMEgfs/modulefiles/module-setup.sh.inc" +source "${HOMEgfs}/modulefiles/module-setup.sh.inc" # Source versions file for runtime source "$HOMEgfs/versions/run.ver" # Load our modules: -module use "$HOMEgfs/modulefiles" +module use "${HOMEgfs}/modulefiles" if [[ -d /lfs/f1 ]]; then # We are on WCOSS2 (Cactus or Dogwood) source "$HOMEgfs/versions/wcoss2.ver" module load module_base.wcoss2 -elif [[ -d /lfs3 ]] ; then +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet source "$HOMEgfs/versions/jet.ver" module load module_base.jet @@ -51,7 +51,7 @@ fi module list # Restore stack soft limit: -ulimit -S -s "$ulimit_s" +ulimit -S -s "${ulimit_s}" unset ulimit_s set_trace diff --git a/ush/ocnpost.ncl b/ush/ocnpost.ncl index 81f24673fc..27e60b0edf 100755 --- a/ush/ocnpost.ncl +++ b/ush/ocnpost.ncl @@ -93,7 +93,8 @@ begin ; pull from environment COMDIR = getenv("COMOUTocean") IDATE = getenv("IDATE") - FHR2 = getenv("FHR") + VDATE = getenv("VDATE") + FHR2 = getenv("FHR") FHR=FHR2 ENSMEM = getenv("ENSMEM") DATA_TMP = getenv("DATA") @@ -101,7 +102,7 @@ begin ; nemsrc = "/scratch2/NCEPDEV/climate/Bin.Li/S2S/fix/ocean_ice_post/FIXDIR/" ; calculate and break apart verification date - VDATE = tochar(systemfunc("$NDATE "+FHR+" "+IDATE)) + ; VDATE = tochar(systemfunc("$NDATE "+FHR+" "+IDATE)) ; YYYY = tostring(VDATE(0:3)) ; MM = tostring(VDATE(4:5)) ; DD = tostring(VDATE(6:7)) diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index a8997176f4..91b82a0d76 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -36,6 +36,7 @@ restart_interval: ${restart_interval} output_1st_tstep_rst: .false. quilting: ${QUILTING} +quilting_restart: .true. write_groups: ${WRITE_GROUP:-1} write_tasks_per_group: ${WRTTASK_PER_GROUP:-24} itasks: 1 diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index cf5b0fbf83..6fc8b143cc 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -25,8 +25,8 @@ fi # Get correct MPI options for NPROC and grid local processor_shape=${cice6_processor_shape:-'slenderX2'} local shape=${processor_shape#${processor_shape%?}} -local NPX=$(( ICEPETS / shape )) #number of processors in x direction -local NPY=$(( ICEPETS / NPX )) #number of processors in y direction +local NPX=$(( ntasks_cice6 / shape )) #number of processors in x direction +local NPY=$(( ntasks_cice6 / NPX )) #number of processors in y direction if (( $(( NX_GLB % NPX )) == 0 )); then local block_size_x=$(( NX_GLB / NPX )) else @@ -201,7 +201,7 @@ cat > ice_in < 1 )); then + template="${args[1]}" + else + template="${com_var}_TMPL" + fi + if [[ ! -v "${template}" ]]; then + echo "FATAL ERROR in generate_com: Requested template ${template} not defined!" + exit 2 + fi + value=$(echo "${!template}" | envsubst) + # shellcheck disable=SC2086 + declare ${opts} "${com_var}"="${value}" + echo "generate_com :: ${com_var}=${value}" + done + set_trace +} +# shellcheck disable= +declare -xf generate_com + # Turn on our settings set_strict set_trace diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index e21284dc91..e3c9ad50a2 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -30,7 +30,7 @@ def __init__(self, config): super().__init__(config) _res = int(self.config['CASE'][1:]) - _res_enkf = int(self.config['CASE_ENKF'][1:]) + _res_enkf = int(self.config['CASE_ENS'][1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") @@ -46,7 +46,6 @@ def __init__(self, config): 'npz_anl': self.config['LEVS'] - 1, 'AERO_WINDOW_BEGIN': _window_begin, 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'comin_ges_atm': self.config.COMIN_GES.replace('chem', 'atmos'), # 'chem' is COMPONENT, aerosol fields are in 'atmos' tracers 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", @@ -68,7 +67,6 @@ def initialize(self: Analysis) -> None: - staging B error files - staging model backgrounds - generating a YAML file for the JEDI executable - - linking the JEDI executable (TODO make it copyable, requires JEDI fix) - creating output directories """ super().initialize() @@ -99,14 +97,6 @@ def initialize(self: Analysis) -> None: save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") - # link executable to DATA/ directory - exe_src = self.task_config['JEDIVAREXE'] - logger.debug(f"Link executable {exe_src} to DATA/") # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - exe_dest = os.path.join(self.task_config['DATA'], os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ @@ -152,7 +142,7 @@ def finalize(self: Analysis) -> None: """ # ---- tar up diags # path of output tar statfile - aerostat = os.path.join(self.task_config['COMOUTaero'], f"{self.task_config['APREFIX']}aerostat") + aerostat = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") # get list of diag files to put in tarball diags = glob.glob(os.path.join(self.task_config['DATA'], 'diags', 'diag*nc4')) @@ -170,9 +160,9 @@ def finalize(self: Analysis) -> None: # copy full YAML from executable to ROTDIR src = os.path.join(self.task_config['DATA'], f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") - dest = os.path.join(self.task_config['COMOUTaero'], f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") yaml_copy = { - 'mkdir': [self.task_config['COMOUTaero']], + 'mkdir': [self.task_config.COM_CHEM_ANALYSIS], 'copy': [[src, dest]] } FileHandler(yaml_copy).sync() @@ -184,8 +174,8 @@ def finalize(self: Analysis) -> None: bkglist = [] for itile in range(1, self.task_config.ntiles + 1): tracer = template.format(tilenum=itile) - src = os.path.join(self.task_config.comin_ges_atm, 'RESTART', tracer) - dest = os.path.join(self.task_config.COMOUTaero, f'aeroges.{tracer}') + src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, tracer) + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f'aeroges.{tracer}') bkglist.append([src, dest]) FileHandler({'copy': bkglist}).sync() @@ -195,12 +185,12 @@ def finalize(self: Analysis) -> None: # ---- move increments to ROTDIR logger.info('Moving increments to ROTDIR') - template = f'aeroinc.{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}' + template = f'aeroinc.{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' inclist = [] for itile in range(1, self.task_config.ntiles + 1): tracer = template.format(tilenum=itile) src = os.path.join(self.task_config.DATA, 'anl', tracer) - dest = os.path.join(self.task_config.COMOUTaero, tracer) + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, tracer) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() @@ -216,7 +206,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: # only need the fv_tracer files template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + template) - bkg_template = os.path.join(self.task_config.comin_ges_atm, 'RESTART', template) + bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, template) # get list of increment vars incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] @@ -242,7 +232,7 @@ def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: # NOTE for now this is FV3 RESTART files and just assumed to be fh006 # get FV3 RESTART files, this will be a lot simpler when using history files - rst_dir = os.path.join(task_config.comin_ges_atm, 'RESTART') # for now, option later? + rst_dir = task_config.COM_ATMOS_RESTART_PREV run_dir = os.path.join(task_config['DATA'], 'bkg') # Start accumulating list of background files to copy diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 6eeeb34996..7c24c9cbdb 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -5,7 +5,7 @@ from netCDF4 import Dataset from typing import List, Dict, Any -from pygw.yaml_file import YAMLFile, parse_j2yaml +from pygw.yaml_file import YAMLFile, parse_j2yaml, parse_yamltmpl from pygw.file_utils import FileHandler from pygw.template import Template, TemplateConstants from pygw.logger import logit @@ -36,6 +36,9 @@ def initialize(self) -> None: bias_dict = self.get_bias_dict() FileHandler(bias_dict).sync() + # link jedi executable to run directory + self.link_jediexe() + @logit(logger) def get_obs_dict(self: Task) -> Dict[str, Any]: """Compile a dictionary of observation files to copy @@ -61,7 +64,7 @@ def get_obs_dict(self: Task) -> Dict[str, Any]: for ob in observers: obfile = ob['obs space']['obsdatain']['engine']['obsfile'] basename = os.path.basename(obfile) - copylist.append([os.path.join(self.task_config['COMIN_OBS'], basename), obfile]) + copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile]) obs_dict = { 'mkdir': [os.path.join(self.runtime_config['DATA'], 'obs')], 'copy': copylist @@ -98,7 +101,7 @@ def get_bias_dict(self: Task) -> Dict[str, Any]: prefix = '.'.join(basename.split('.')[:-2]) for file in ['satbias.nc4', 'satbias_cov.nc4', 'tlapse.txt']: bfile = f"{prefix}.{file}" - copylist.append([os.path.join(self.task_config.comin_ges_atm, bfile), os.path.join(obdir, bfile)]) + copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) bias_dict = { 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], @@ -171,3 +174,28 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: """ berror_dict = {'foo': 'bar'} return berror_dict + + @logit(logger) + def link_jediexe(self: Task) -> None: + """Compile a dictionary of background error files to copy + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.JEDIEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.debug(f"Link executable {exe_src} to DATA/") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py new file mode 100644 index 0000000000..3ab0ae3240 --- /dev/null +++ b/ush/python/pygfs/task/atm_analysis.py @@ -0,0 +1,435 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmAnalysis(Analysis): + """ + Class for global atm analysis tasks + """ + @logit(logger, name="AtmAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE[1:]) + _res_anl = int(self.config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atm analysis + + This method will initialize a global atm analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging B error files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + """ + super().initialize() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage berror files + # copy static background error files, otherwise it will assume ID matrix + logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + FileHandler(self.get_berror_dict(self.task_config)).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + + # generate variational YAML file + logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") + varda_yaml = parse_j2yaml(self.task_config.ATMVARYAML, self.task_config) + save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atm analysis + + This method will finalize a global atm analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - copy the updated bias correction files to ROTDIR + - write UFS model readable atm incrment file + + """ + # ---- tar up diags + # path of output tar statfile + atmstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.APREFIX}atmstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # copy bias correction files to ROTDIR + logger.info("Copy bias correction files from DATA/ to COM/") + biasdir = os.path.join(self.task_config.DATA, 'bc') + biasls = os.listdir(biasdir) + biaslist = [] + for bfile in biasls: + src = os.path.join(biasdir, bfile) + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, bfile) + biaslist.append([src, dest]) + + gprefix = f"{self.task_config.GPREFIX}" + gsuffix = f"{to_YMDH(self.task_config.previous_cycle)}" + ".txt" + aprefix = f"{self.task_config.APREFIX}" + asuffix = f"{to_YMDH(self.task_config.current_cycle)}" + ".txt" + + logger.info(f"Copying {gprefix}*{gsuffix} from DATA/ to COM/ as {aprefix}*{asuffix}") + obsdir = os.path.join(self.task_config.DATA, 'obs') + obsls = os.listdir(obsdir) + for ofile in obsls: + if ofile.endswith(".txt"): + src = os.path.join(obsdir, ofile) + tfile = ofile.replace(gprefix, aprefix) + tfile = tfile.replace(gsuffix, asuffix) + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, tfile) + biaslist.append([src, dest]) + + bias_copy = { + 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], + 'copy': biaslist, + } + FileHandler(bias_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 restart files (coupler, core, tracer) + that are needed for global atm DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 restart files and just assumed to be fh006 + + # get FV3 restart files, this will be a lot simpler when using history files + rst_dir = os.path.join(task_config.COM_ATMOS_RESTART_PREV) # for now, option later? + run_dir = os.path.join(task_config.DATA, 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # atm DA needs coupler + basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['core', 'srf_wnd', 'tracer']: + template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + for ftype in ['phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } + return bkg_dict + + @logit(logger) + def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of background error files to copy + + This method will construct a dictionary of either bump of gsibec background + error files for global atm DA and return said dictionary for use by the + FileHandler class. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary containing the list of atm background error files to copy for FileHandler + """ + SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity, + 'bump': self._get_berror_dict_bump, + 'gsibec': self._get_berror_dict_gsibec} + + try: + berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config) + except KeyError: + raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" + + f"Currently supported background error types are:\n" + + f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}') + + return berror_dict + + @staticmethod + @logit(logger) + def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Identity BE does not need any files for staging. + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + Returns + ---------- + berror_dict: Dict + Empty dictionary [identity BE needs not files to stage] + """ + logger.info(f"Identity background error does not use staged files. Return empty dictionary") + return {} + + @staticmethod + @logit(logger) + def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of atm bump background error files to copy + + This method will construct a dictionary of atm bump background error + files for global atm DA and return said dictionary to the parent + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary of atm bump background error files to copy for FileHandler + """ + # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files. + b_dir = config.BERROR_DATA_DIR + b_datestr = to_fv3time(config.BERROR_DATE) + berror_list = [] + for ftype in ['cor_rh', 'cor_rv', 'stddev']: + coupler = f'{b_datestr}.{ftype}.coupler.res' + berror_list.append([ + os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) + ]) + + template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + tracer = template.format(tilenum=itile) + berror_list.append([ + os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) + ]) + + nproc = config.ntiles * config.layout_x * config.layout_y + for nn in range(1, nproc + 1): + berror_list.append([ + os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), + os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') + ]) + + # create dictionary of background error files to stage + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict + + @staticmethod + @logit(logger) + def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of atm gsibec background error files to copy + + This method will construct a dictionary of atm gsibec background error + files for global atm DA and return said dictionary to the parent + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary of atm gsibec background error files to copy for FileHandler + """ + # GSI atm static-B needs namelist and coefficient files. + b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL) + berror_list = [] + for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']: + berror_list.append([ + os.path.join(b_dir, ftype), + os.path.join(config.DATA, 'berror', ftype) + ]) + + # create dictionary of background error files to stage + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + case_anl = int(self.task_config.CASE_ANL[1:]) + case = int(self.task_config.CASE[1:]) + + file = f"{self.task_config.GPREFIX}" + "atmf006" + f"{'' if case_anl == case else '.ensres'}" + ".nc" + atmges_fv3 = os.path.join(self.task_config.COM_ATMOS_HISTORY_PREV, file) + + # Set the path/name to the input UFS-DA atm increment file (atminc_jedi) + # and the output UFS model atm increment file (atminc_fv3) + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + # Execute incpy to create the UFS model atm increment file + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py new file mode 100644 index 0000000000..64f61d8df3 --- /dev/null +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -0,0 +1,351 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygw.template import Template, TemplateConstants +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmEnsAnalysis(Analysis): + """ + Class for global atmens analysis tasks + """ + @logit(logger, name="AtmEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE_ENS[1:]) + _res_anl = int(self.config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atmens analysis + + This method will initialize a global atmens analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + super().initialize() + + # Make member directories in DATA for background and in DATA and ROTDIR for analysis files + # create template dictionary for output member analysis directories + template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL + tmpl_inc_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.current_cycle), + 'HH': self.task_config.current_cycle.strftime('%H') + } + dirlist = [] + for imem in range(1, self.task_config.NMEM_ENS + 1): + dirlist.append(os.path.join(self.task_config.DATA, 'bkg', f'mem{imem:03d}')) + dirlist.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}')) + + # create output directory path for member analysis + tmpl_inc_dict['MEMDIR'] = f"mem{imem:03d}" + incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) + dirlist.append(incdir) + + FileHandler({'mkdir': dirlist}).sync() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict()).sync() + + # generate ensemble da YAML file + logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") + ensda_yaml = parse_j2yaml(self.task_config.ATMENSYAML, self.task_config) + save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + """Execute a global atmens analysis + + This method will execute a global atmens analysis using JEDI. + This includes: + - changing to the run directory + - running the global atmens analysis executable + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atmens analysis + + This method will finalize a global atmens analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - write UFS model readable atm incrment file + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # ---- tar up diags + # path of output tar statfile + atmensstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.APREFIX}atmensstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmensstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmensstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + # create template dictionaries + template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL + tmpl_inc_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.current_cycle), + 'HH': self.task_config.current_cycle.strftime('%H') + } + + template_ges = self.task_config.COM_ATMOS_HISTORY_TMPL + tmpl_ges_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.previous_cycle), + 'HH': self.task_config.previous_cycle.strftime('%H') + } + + # loop over ensemble members + for imem in range(1, self.task_config.NMEM_ENS + 1): + memchar = f"mem{imem:03d}" + + # create output path for member analysis increment + tmpl_inc_dict['MEMDIR'] = memchar + incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) + + # rewrite UFS-DA atmens increments + tmpl_ges_dict['MEMDIR'] = memchar + gesdir = Template.substitute_structure(template_ges, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_ges_dict.get) + atmges_fv3 = os.path.join(gesdir, f"{self.task_config.CDUMP}.t{self.task_config.previous_cycle.hour:02d}z.atmf006.nc") + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + + # Execute incpy to create the UFS model atm increment file + # TODO: use MPMD or parallelize with mpi4py + # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656 + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') + + @logit(logger) + def get_bkg_dict(self: Analysis) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer) + that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + None + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 restart files and just assumed to be fh006 + # loop over ensemble members + rstlist = [] + bkglist = [] + + # get FV3 restart files, this will be a lot simpler when using history files + template_res = self.task_config.COM_ATMOS_RESTART_TMPL + tmpl_res_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.previous_cycle), + 'HH': self.task_config.previous_cycle.strftime('%H'), + 'MEMDIR': None + } + + for imem in range(1, self.task_config.NMEM_ENS + 1): + memchar = f"mem{imem:03d}" + + # get FV3 restart files, this will be a lot simpler when using history files + tmpl_res_dict['MEMDIR'] = memchar + rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get) + rstlist.append(rst_dir) + + run_dir = os.path.join(self.task_config.DATA, 'bkg', memchar) + + # atmens DA needs coupler + basename = f'{to_fv3time(self.task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(self.task_config.DATA, 'bkg', memchar, basename)]) + + # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': rstlist, + 'copy': bkglist, + } + + return bkg_dict diff --git a/ush/python/pygw/src/pygw/schema.py b/ush/python/pygw/src/pygw/schema.py new file mode 100644 index 0000000000..2a46c62f01 --- /dev/null +++ b/ush/python/pygw/src/pygw/schema.py @@ -0,0 +1,887 @@ +"""schema is a library for validating Python data structures, such as those +obtained from config-files, forms, external services or command-line +parsing, converted from JSON/YAML (or something else) to Python data-types.""" + +import inspect +import re + +from typing import Dict +from pydoc import locate + +try: + from contextlib import ExitStack +except ImportError: + from contextlib2 import ExitStack + + +__version__ = "0.7.5" +__all__ = [ + "Schema", + "And", + "Or", + "Regex", + "Optional", + "Use", + "Forbidden", + "Const", + "Literal", + "SchemaError", + "SchemaWrongKeyError", + "SchemaMissingKeyError", + "SchemaForbiddenKeyError", + "SchemaUnexpectedTypeError", + "SchemaOnlyOneAllowedError", +] + + +class SchemaError(Exception): + """Error during Schema validation.""" + + def __init__(self, autos, errors=None): + self.autos = autos if type(autos) is list else [autos] + self.errors = errors if type(errors) is list else [errors] + Exception.__init__(self, self.code) + + @property + def code(self): + """ + Removes duplicates values in auto and error list. + parameters. + """ + + def uniq(seq): + """ + Utility function that removes duplicate. + """ + seen = set() + seen_add = seen.add + # This way removes duplicates while preserving the order. + return [x for x in seq if x not in seen and not seen_add(x)] + + data_set = uniq(i for i in self.autos if i is not None) + error_list = uniq(i for i in self.errors if i is not None) + if error_list: + return "\n".join(error_list) + return "\n".join(data_set) + + +class SchemaWrongKeyError(SchemaError): + """Error Should be raised when an unexpected key is detected within the + data set being.""" + + pass + + +class SchemaMissingKeyError(SchemaError): + """Error should be raised when a mandatory key is not found within the + data set being validated""" + + pass + + +class SchemaOnlyOneAllowedError(SchemaError): + """Error should be raised when an only_one Or key has multiple matching candidates""" + + pass + + +class SchemaForbiddenKeyError(SchemaError): + """Error should be raised when a forbidden key is found within the + data set being validated, and its value matches the value that was specified""" + + pass + + +class SchemaUnexpectedTypeError(SchemaError): + """Error should be raised when a type mismatch is detected within the + data set being validated.""" + + pass + + +class And(object): + """ + Utility function to combine validation directives in AND Boolean fashion. + """ + + def __init__(self, *args, **kw): + self._args = args + if not set(kw).issubset({"error", "schema", "ignore_extra_keys"}): + diff = {"error", "schema", "ignore_extra_keys"}.difference(kw) + raise TypeError("Unknown keyword arguments %r" % list(diff)) + self._error = kw.get("error") + self._ignore_extra_keys = kw.get("ignore_extra_keys", False) + # You can pass your inherited Schema class. + self._schema = kw.get("schema", Schema) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, ", ".join(repr(a) for a in self._args)) + + @property + def args(self): + """The provided parameters""" + return self._args + + def validate(self, data, **kwargs): + """ + Validate data using defined sub schema/expressions ensuring all + values are valid. + :param data: to be validated with sub defined schemas. + :return: returns validated data + """ + for s in [self._schema(s, error=self._error, ignore_extra_keys=self._ignore_extra_keys) for s in self._args]: + data = s.validate(data, **kwargs) + return data + + +class Or(And): + """Utility function to combine validation directives in a OR Boolean + fashion.""" + + def __init__(self, *args, **kwargs): + self.only_one = kwargs.pop("only_one", False) + self.match_count = 0 + super(Or, self).__init__(*args, **kwargs) + + def reset(self): + failed = self.match_count > 1 and self.only_one + self.match_count = 0 + if failed: + raise SchemaOnlyOneAllowedError(["There are multiple keys present " + "from the %r condition" % self]) + + def validate(self, data, **kwargs): + """ + Validate data using sub defined schema/expressions ensuring at least + one value is valid. + :param data: data to be validated by provided schema. + :return: return validated data if not validation + """ + autos, errors = [], [] + for s in [self._schema(s, error=self._error, ignore_extra_keys=self._ignore_extra_keys) for s in self._args]: + try: + validation = s.validate(data, **kwargs) + self.match_count += 1 + if self.match_count > 1 and self.only_one: + break + return validation + except SchemaError as _x: + autos += _x.autos + errors += _x.errors + raise SchemaError( + ["%r did not validate %r" % (self, data)] + autos, + [self._error.format(data) if self._error else None] + errors, + ) + + +class Regex(object): + """ + Enables schema.py to validate string using regular expressions. + """ + + # Map all flags bits to a more readable description + NAMES = [ + "re.ASCII", + "re.DEBUG", + "re.VERBOSE", + "re.UNICODE", + "re.DOTALL", + "re.MULTILINE", + "re.LOCALE", + "re.IGNORECASE", + "re.TEMPLATE", + ] + + def __init__(self, pattern_str, flags=0, error=None): + self._pattern_str = pattern_str + flags_list = [ + Regex.NAMES[i] for i, f in enumerate("{0:09b}".format(int(flags))) if f != "0" + ] # Name for each bit + + if flags_list: + self._flags_names = ", flags=" + "|".join(flags_list) + else: + self._flags_names = "" + + self._pattern = re.compile(pattern_str, flags=flags) + self._error = error + + def __repr__(self): + return "%s(%r%s)" % (self.__class__.__name__, self._pattern_str, self._flags_names) + + @property + def pattern_str(self): + """The pattern for the represented regular expression""" + return self._pattern_str + + def validate(self, data, **kwargs): + """ + Validated data using defined regex. + :param data: data to be validated + :return: return validated data. + """ + e = self._error + + try: + if self._pattern.search(data): + return data + else: + raise SchemaError("%r does not match %r" % (self, data), e.format(data) if e else None) + except TypeError: + raise SchemaError("%r is not string nor buffer" % data, e) + + +class Use(object): + """ + For more general use cases, you can use the Use class to transform + the data while it is being validate. + """ + + def __init__(self, callable_, error=None): + if not callable(callable_): + raise TypeError("Expected a callable, not %r" % callable_) + self._callable = callable_ + self._error = error + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._callable) + + def validate(self, data, **kwargs): + try: + return self._callable(data) + except SchemaError as x: + raise SchemaError([None] + x.autos, [self._error.format(data) if self._error else None] + x.errors) + except BaseException as x: + f = _callable_str(self._callable) + raise SchemaError("%s(%r) raised %r" % (f, data, x), self._error.format(data) if self._error else None) + + +COMPARABLE, CALLABLE, VALIDATOR, TYPE, DICT, ITERABLE = range(6) + + +def _priority(s): + """Return priority for a given object.""" + if type(s) in (list, tuple, set, frozenset): + return ITERABLE + if type(s) is dict: + return DICT + if issubclass(type(s), type): + return TYPE + if isinstance(s, Literal): + return COMPARABLE + if hasattr(s, "validate"): + return VALIDATOR + if callable(s): + return CALLABLE + else: + return COMPARABLE + + +def _invoke_with_optional_kwargs(f, **kwargs): + s = inspect.signature(f) + if len(s.parameters) == 0: + return f() + return f(**kwargs) + + +class Schema(object): + """ + Entry point of the library, use this class to instantiate validation + schema for the data that will be validated. + """ + + def __init__(self, schema, error=None, ignore_extra_keys=False, name=None, description=None, as_reference=False): + self._schema = schema + self._error = error + self._ignore_extra_keys = ignore_extra_keys + self._name = name + self._description = description + # Ask json_schema to create a definition for this schema and use it as part of another + self.as_reference = as_reference + if as_reference and name is None: + raise ValueError("Schema used as reference should have a name") + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._schema) + + @property + def schema(self): + return self._schema + + @property + def description(self): + return self._description + + @property + def name(self): + return self._name + + @property + def ignore_extra_keys(self): + return self._ignore_extra_keys + + @staticmethod + def _dict_key_priority(s): + """Return priority for a given key object.""" + if isinstance(s, Hook): + return _priority(s._schema) - 0.5 + if isinstance(s, Optional): + return _priority(s._schema) + 0.5 + return _priority(s) + + @staticmethod + def _is_optional_type(s): + """Return True if the given key is optional (does not have to be found)""" + return any(isinstance(s, optional_type) for optional_type in [Optional, Hook]) + + def is_valid(self, data, **kwargs): + """Return whether the given data has passed all the validations + that were specified in the given schema. + """ + try: + self.validate(data, **kwargs) + except SchemaError: + return False + else: + return True + + def _prepend_schema_name(self, message): + """ + If a custom schema name has been defined, prepends it to the error + message that gets raised when a schema error occurs. + """ + if self._name: + message = "{0!r} {1!s}".format(self._name, message) + return message + + def validate(self, data, **kwargs): + Schema = self.__class__ + s = self._schema + e = self._error + i = self._ignore_extra_keys + + if isinstance(s, Literal): + s = s.schema + + flavor = _priority(s) + if flavor == ITERABLE: + data = Schema(type(s), error=e).validate(data, **kwargs) + o = Or(*s, error=e, schema=Schema, ignore_extra_keys=i) + return type(data)(o.validate(d, **kwargs) for d in data) + if flavor == DICT: + exitstack = ExitStack() + data = Schema(dict, error=e).validate(data, **kwargs) + new = type(data)() # new - is a dict of the validated values + coverage = set() # matched schema keys + # for each key and value find a schema entry matching them, if any + sorted_skeys = sorted(s, key=self._dict_key_priority) + for skey in sorted_skeys: + if hasattr(skey, "reset"): + exitstack.callback(skey.reset) + + with exitstack: + # Evaluate dictionaries last + data_items = sorted(data.items(), key=lambda value: isinstance(value[1], dict)) + for key, value in data_items: + for skey in sorted_skeys: + svalue = s[skey] + try: + nkey = Schema(skey, error=e).validate(key, **kwargs) + except SchemaError: + pass + else: + if isinstance(skey, Hook): + # As the content of the value makes little sense for + # keys with a hook, we reverse its meaning: + # we will only call the handler if the value does match + # In the case of the forbidden key hook, + # we will raise the SchemaErrorForbiddenKey exception + # on match, allowing for excluding a key only if its + # value has a certain type, and allowing Forbidden to + # work well in combination with Optional. + try: + nvalue = Schema(svalue, error=e).validate(value, **kwargs) + except SchemaError: + continue + skey.handler(nkey, data, e) + else: + try: + nvalue = Schema(svalue, error=e, ignore_extra_keys=i).validate(value, **kwargs) + except SchemaError as x: + k = "Key '%s' error:" % nkey + message = self._prepend_schema_name(k) + raise SchemaError([message] + x.autos, [e.format(data) if e else None] + x.errors) + else: + new[nkey] = nvalue + coverage.add(skey) + break + required = set(k for k in s if not self._is_optional_type(k)) + if not required.issubset(coverage): + missing_keys = required - coverage + s_missing_keys = ", ".join(repr(k) for k in sorted(missing_keys, key=repr)) + message = "Missing key%s: %s" % (_plural_s(missing_keys), s_missing_keys) + message = self._prepend_schema_name(message) + raise SchemaMissingKeyError(message, e.format(data) if e else None) + if not self._ignore_extra_keys and (len(new) != len(data)): + wrong_keys = set(data.keys()) - set(new.keys()) + s_wrong_keys = ", ".join(repr(k) for k in sorted(wrong_keys, key=repr)) + message = "Wrong key%s %s in %r" % (_plural_s(wrong_keys), s_wrong_keys, data) + message = self._prepend_schema_name(message) + raise SchemaWrongKeyError(message, e.format(data) if e else None) + + # Apply default-having optionals that haven't been used: + defaults = set(k for k in s if isinstance(k, Optional) and hasattr(k, "default")) - coverage + for default in defaults: + new[default.key] = _invoke_with_optional_kwargs(default.default, **kwargs) if callable(default.default) else default.default + + return new + if flavor == TYPE: + if isinstance(data, s) and not (isinstance(data, bool) and s == int): + return data + else: + message = "%r should be instance of %r" % (data, s.__name__) + message = self._prepend_schema_name(message) + raise SchemaUnexpectedTypeError(message, e.format(data) if e else None) + if flavor == VALIDATOR: + try: + return s.validate(data, **kwargs) + except SchemaError as x: + raise SchemaError([None] + x.autos, [e.format(data) if e else None] + x.errors) + except BaseException as x: + message = "%r.validate(%r) raised %r" % (s, data, x) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + if flavor == CALLABLE: + f = _callable_str(s) + try: + if s(data): + return data + except SchemaError as x: + raise SchemaError([None] + x.autos, [e.format(data) if e else None] + x.errors) + except BaseException as x: + message = "%s(%r) raised %r" % (f, data, x) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + message = "%s(%r) should evaluate to True" % (f, data) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + if s == data: + return data + else: + message = "%r does not match %r" % (s, data) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + + def json_schema(self, schema_id, use_refs=False, **kwargs): + """Generate a draft-07 JSON schema dict representing the Schema. + This method must be called with a schema_id. + + :param schema_id: The value of the $id on the main schema + :param use_refs: Enable reusing object references in the resulting JSON schema. + Schemas with references are harder to read by humans, but are a lot smaller when there + is a lot of reuse + """ + + seen = dict() # For use_refs + definitions_by_name = {} + + def _json_schema(schema, is_main_schema=True, description=None, allow_reference=True): + Schema = self.__class__ + + def _create_or_use_ref(return_dict): + """If not already seen, return the provided part of the schema unchanged. + If already seen, give an id to the already seen dict and return a reference to the previous part + of the schema instead. + """ + if not use_refs or is_main_schema: + return return_schema + + hashed = hash(repr(sorted(return_dict.items()))) + + if hashed not in seen: + seen[hashed] = return_dict + return return_dict + else: + id_str = "#" + str(hashed) + seen[hashed]["$id"] = id_str + return {"$ref": id_str} + + def _get_type_name(python_type): + """Return the JSON schema name for a Python type""" + if python_type == str: + return "string" + elif python_type == int: + return "integer" + elif python_type == float: + return "number" + elif python_type == bool: + return "boolean" + elif python_type == list: + return "array" + elif python_type == dict: + return "object" + return "string" + + def _to_json_type(value): + """Attempt to convert a constant value (for "const" and "default") to a JSON serializable value""" + if value is None or type(value) in (str, int, float, bool, list, dict): + return value + + if type(value) in (tuple, set, frozenset): + return list(value) + + if isinstance(value, Literal): + return value.schema + + return str(value) + + def _to_schema(s, ignore_extra_keys): + if not isinstance(s, Schema): + return Schema(s, ignore_extra_keys=ignore_extra_keys) + + return s + + s = schema.schema + i = schema.ignore_extra_keys + flavor = _priority(s) + + return_schema = {} + + return_description = description or schema.description + if return_description: + return_schema["description"] = return_description + + # Check if we have to create a common definition and use as reference + if allow_reference and schema.as_reference: + # Generate sub schema if not already done + if schema.name not in definitions_by_name: + definitions_by_name[schema.name] = {} # Avoid infinite loop + definitions_by_name[schema.name] = _json_schema(schema, is_main_schema=False, allow_reference=False) + + return_schema["$ref"] = "#/definitions/" + schema.name + else: + if flavor == TYPE: + # Handle type + return_schema["type"] = _get_type_name(s) + elif flavor == ITERABLE: + # Handle arrays or dict schema + + return_schema["type"] = "array" + if len(s) == 1: + return_schema["items"] = _json_schema(_to_schema(s[0], i), is_main_schema=False) + elif len(s) > 1: + return_schema["items"] = _json_schema(Schema(Or(*s)), is_main_schema=False) + elif isinstance(s, Or): + # Handle Or values + + # Check if we can use an enum + if all(priority == COMPARABLE for priority in [_priority(value) for value in s.args]): + or_values = [str(s) if isinstance(s, Literal) else s for s in s.args] + # All values are simple, can use enum or const + if len(or_values) == 1: + return_schema["const"] = _to_json_type(or_values[0]) + return return_schema + return_schema["enum"] = or_values + else: + # No enum, let's go with recursive calls + any_of_values = [] + for or_key in s.args: + new_value = _json_schema(_to_schema(or_key, i), is_main_schema=False) + if new_value != {} and new_value not in any_of_values: + any_of_values.append(new_value) + if len(any_of_values) == 1: + # Only one representable condition remains, do not put under anyOf + return_schema.update(any_of_values[0]) + else: + return_schema["anyOf"] = any_of_values + elif isinstance(s, And): + # Handle And values + all_of_values = [] + for and_key in s.args: + new_value = _json_schema(_to_schema(and_key, i), is_main_schema=False) + if new_value != {} and new_value not in all_of_values: + all_of_values.append(new_value) + if len(all_of_values) == 1: + # Only one representable condition remains, do not put under allOf + return_schema.update(all_of_values[0]) + else: + return_schema["allOf"] = all_of_values + elif flavor == COMPARABLE: + return_schema["const"] = _to_json_type(s) + elif flavor == VALIDATOR and type(s) == Regex: + return_schema["type"] = "string" + return_schema["pattern"] = s.pattern_str + else: + if flavor != DICT: + # If not handled, do not check + return return_schema + + # Schema is a dict + + required_keys = [] + expanded_schema = {} + additional_properties = i + for key in s: + if isinstance(key, Hook): + continue + + def _key_allows_additional_properties(key): + """Check if a key is broad enough to allow additional properties""" + if isinstance(key, Optional): + return _key_allows_additional_properties(key.schema) + + return key == str or key == object + + def _get_key_description(key): + """Get the description associated to a key (as specified in a Literal object). Return None if not a Literal""" + if isinstance(key, Optional): + return _get_key_description(key.schema) + + if isinstance(key, Literal): + return key.description + + return None + + def _get_key_name(key): + """Get the name of a key (as specified in a Literal object). Return the key unchanged if not a Literal""" + if isinstance(key, Optional): + return _get_key_name(key.schema) + + if isinstance(key, Literal): + return key.schema + + return key + + additional_properties = additional_properties or _key_allows_additional_properties(key) + sub_schema = _to_schema(s[key], ignore_extra_keys=i) + key_name = _get_key_name(key) + + if isinstance(key_name, str): + if not isinstance(key, Optional): + required_keys.append(key_name) + expanded_schema[key_name] = _json_schema( + sub_schema, is_main_schema=False, description=_get_key_description(key) + ) + if isinstance(key, Optional) and hasattr(key, "default"): + expanded_schema[key_name]["default"] = _to_json_type(_invoke_with_optional_kwargs(key.default, **kwargs) if callable(key.default) else key.default) # nopep8 + elif isinstance(key_name, Or): + # JSON schema does not support having a key named one name or another, so we just add both options + # This is less strict because we cannot enforce that one or the other is required + + for or_key in key_name.args: + expanded_schema[_get_key_name(or_key)] = _json_schema( + sub_schema, is_main_schema=False, description=_get_key_description(or_key) + ) + + return_schema.update( + { + "type": "object", + "properties": expanded_schema, + "required": required_keys, + "additionalProperties": additional_properties, + } + ) + + if is_main_schema: + return_schema.update({"$id": schema_id, "$schema": "http://json-schema.org/draft-07/schema#"}) + if self._name: + return_schema["title"] = self._name + + if definitions_by_name: + return_schema["definitions"] = {} + for definition_name, definition in definitions_by_name.items(): + return_schema["definitions"][definition_name] = definition + + return _create_or_use_ref(return_schema) + + return _json_schema(self, True) + + +class Optional(Schema): + """Marker for an optional part of the validation Schema.""" + + _MARKER = object() + + def __init__(self, *args, **kwargs): + default = kwargs.pop("default", self._MARKER) + super(Optional, self).__init__(*args, **kwargs) + if default is not self._MARKER: + # See if I can come up with a static key to use for myself: + if _priority(self._schema) != COMPARABLE: + raise TypeError( + "Optional keys with defaults must have simple, " + "predictable values, like literal strings or ints. " + '"%r" is too complex.' % (self._schema,) + ) + self.default = default + self.key = str(self._schema) + + def __hash__(self): + return hash(self._schema) + + def __eq__(self, other): + return ( + self.__class__ is other.__class__ and + getattr(self, "default", self._MARKER) == getattr(other, "default", self._MARKER) and + self._schema == other._schema + ) + + def reset(self): + if hasattr(self._schema, "reset"): + self._schema.reset() + + +class Hook(Schema): + def __init__(self, *args, **kwargs): + self.handler = kwargs.pop("handler", lambda *args: None) + super(Hook, self).__init__(*args, **kwargs) + self.key = self._schema + + +class Forbidden(Hook): + def __init__(self, *args, **kwargs): + kwargs["handler"] = self._default_function + super(Forbidden, self).__init__(*args, **kwargs) + + @staticmethod + def _default_function(nkey, data, error): + raise SchemaForbiddenKeyError("Forbidden key encountered: %r in %r" % (nkey, data), error) + + +class Literal(object): + def __init__(self, value, description=None): + self._schema = value + self._description = description + + def __str__(self): + return self._schema + + def __repr__(self): + return 'Literal("' + self.schema + '", description="' + (self.description or "") + '")' + + @property + def description(self): + return self._description + + @property + def schema(self): + return self._schema + + +class Const(Schema): + def validate(self, data, **kwargs): + super(Const, self).validate(data, **kwargs) + return data + + +def _callable_str(callable_): + if hasattr(callable_, "__name__"): + return callable_.__name__ + return str(callable_) + + +def _plural_s(sized): + return "s" if len(sized) > 1 else "" + + +# The following functions are added to be able to translate an user-specified Dict into a SchemaDict. The Schema +# class module was obtained from: + +# https://github.com/keleshev/schema/blob/master/schema.py + + +def build_schema(data: Dict) -> Dict: + """ + Description + ----------- + + This function takes in a user-provided dictionary and defines the + respective schema. + + Parameters + ---------- + + data: Dict + + A Python dictionary containing the schema attributes. + + Returns + ------- + + schema_dict: Dict + + A Python dictionary containing the schema. + + """ + + # TODO: Find an alternative to pydoc.locate() to identify type. + schema_dict = {} + for datum in data: + data_dict = data[datum] + + # Check whether the variable is optional; proceed accordingly. + if "optional" not in data_dict: + data_dict['optional'] = False + schema_dict[datum] = locate(data_dict["type"]) + else: + if data_dict['optional']: + schema_dict[datum] = locate(data_dict["type"]) + + # Build the schema accordingly. + try: + if data_dict["optional"]: + schema_dict[Optional(datum, default=data_dict["default"]) + ] = locate(data_dict["type"]) + else: + schema_dict[datum] = locate(data_dict["type"]) + except AttributeError: + pass + + return schema_dict + + +def validate_schema(schema_dict: Dict, data: Dict) -> Dict: + """ + Description + ------------ + + This function validates the schema; if an optional key value has + not be specified, a the default value for the option is defined + within the returned Dict. + + Parameters + ---------- + + schema_dict: Dict + + A Python dictionary containing the schema. + + data: Dict + + A Python dictionary containing the configuration to be + validated. + + Returns + ------- + + data: Dict + + A Python dictionary containing the validated schema; if any + optional values have not been define within `data` (above), + they are updated with the schema default values. + + """ + + # Define the schema instance. + schema = Schema([schema_dict], ignore_extra_keys=True) + + # If any `Optional` keys are missing from the scheme to be + # validated (`data`), update them acccordingly. + for k, v in schema_dict.items(): + if isinstance(k, Optional): + if k.key not in data: + data[k.key] = k.default + + # Validate the schema and return the updated dictionary. + schema.validate([data]) + + return data diff --git a/ush/python/pygw/src/tests/test-files/test_schema.yaml b/ush/python/pygw/src/tests/test-files/test_schema.yaml new file mode 100644 index 0000000000..741313118b --- /dev/null +++ b/ush/python/pygw/src/tests/test-files/test_schema.yaml @@ -0,0 +1,21 @@ +# A mandatory boolean valued variable. +variable1: + optional: False + type: bool + +# An optional complex valued variable. +variable2: + optional: True + type: int + default: 2 + +# A mandatory string variable. +variable3: + type: str + +# The default value should be ignored here as it is not optional; the +# default value is meaningless. +variable4: + type: float + optional: False + default: 10.0 diff --git a/ush/python/pygw/src/tests/test_schema.py b/ush/python/pygw/src/tests/test_schema.py new file mode 100644 index 0000000000..220b9866a9 --- /dev/null +++ b/ush/python/pygw/src/tests/test_schema.py @@ -0,0 +1,82 @@ +""" +Description +----------- + +Unit-tests for `pygw.schema`. +""" + +import os +import pytest +from pygw import schema +from pygw.yaml_file import parse_yaml +from pygw.schema import SchemaError +from pygw.configuration import cast_strdict_as_dtypedict + + +# Define the path to the YAML-formatted file containing the schema +# attributes. +# yaml_path = os.path.join(os.getcwd(), "tests", +# "test-files", "test_schema.yaml") +# data = parse_yaml(path=yaml_path) +@pytest.mark.skip(reason="disable till the developer fixes the test") +def test_build_schema(): + """ + Description + ----------- + + This function tests the `pygw.schema.build_schema` function. + + """ + + # Test that the schema can be defined. + assert schema.build_schema(data=data) + + +@pytest.mark.skip(reason="disable till the developer fixes the test") +def test_validate_schema(): + """ + Description + ----------- + + This function tests various application configurations (i.e., + `data_in`) for various schema validation applications. + + """ + + # Define the schema. + schema_dict = schema.build_schema(data=data) + + # Test that the schema validates and returns a the dictionary + # passed; this unit-test should pass. + data_in = { + "variable1": False, + "variable2": 1, + "variable3": "hello world", + "variable4": 10.0, + } + data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) + assert True + assert data_in == data_out + + # Test that optional values are updated with defaults. + del data_in["variable2"] + data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) + assert True + + # This unit-test should raise a `SchemaError` exception in order + # to pass. + data_in["variable2"] = "I **should** fail." + try: + data_out = schema.validate_schema( + schema_dict=schema_dict, data=data_in) + except SchemaError: + assert True + + # This unit-test passes the full environment, including `data_in`, + # to be validated; this tests the `ignore_extra_keys` attribute; + # this unit-test should pass. + del data_in["variable2"] + data_in = {**cast_strdict_as_dtypedict(os.environ), **data_in} + data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) + assert True + assert data_in == data_out diff --git a/ush/radmon_diag_ck.sh b/ush/radmon_diag_ck.sh index 142e99f8c7..4045ddb2d5 100755 --- a/ush/radmon_diag_ck.sh +++ b/ush/radmon_diag_ck.sh @@ -105,7 +105,7 @@ echo "--> radmon_diag_ck.sh" # the radstat file (which is a tar file) are gzipped. # I find that 0 sized, gzipped file has a size of ~52 # (I assume that's for header and block size). - # + # # So for this check we'll assume anything in the radstat # file with a size of > 1000 bytes is suspect. (That's # overkill, 100 is probably sufficient, but I'm the @@ -113,42 +113,41 @@ echo "--> radmon_diag_ck.sh" # the actual file size of those. Anything with an # uncompressed size of 0 goes on the zero_len_diag list. # - verbose_contents=`tar -tvf ${radstat_file} | grep '_ges'` - - - #------------------------------------------------------- - # note: need to reset the IFS to line breaks otherwise - # the $vc value in the for loop below will break - # on all white space, not the line break. - SAVEIFS=$IFS - IFS=$(echo -en "\n\b") + # TODO Rewrite these array parsing commands to avoid using Bash's sloppy word splitting + # File sizes contain only digits and immediately precede the date + # shellcheck disable=SC2207 + sizes=($(tar -vtf ${radstat_file} --wildcards '*_ges*' | grep -P -o '(\d)+(?= \d{4}-\d{2}-\d{2})')) + # Filenames are the last group of non-whitespace characters + # shellcheck disable=SC2207 + filenames=($(tar -vtf ${radstat_file} --wildcards '*_ges*' | grep -P -o '\S+$')) + # shellcheck disable= - for vc in ${verbose_contents}; do - gzip_len=`echo ${vc} | gawk '{print $3}'` + for file_num in "${!filenames[@]}"; do + file_name="${filenames[${file_num}]}" + file_size="${sizes[${file_num}]}" - if [[ ${gzip_len} -le 1000 ]]; then - test_file=`echo ${vc} | gawk '{print $6}'` - tar -xf ${radstat_file} ${test_file} + if (( file_size <= 1000 )); then + tar -xf "${radstat_file}" "${file_name}" + gunzip "${file_name}" + uz_file_name="${file_name%.*}" + uz_file_size=$(stat -c "%s" "${uz_file_name}") - gunzip ${test_file} - unzipped_file=`echo ${test_file%.*}` - - uz_file_size=`ls -la ${unzipped_file} | gawk '{print $5}'` - if [[ ${uz_file_size} -le 0 ]]; then - sat=`echo ${unzipped_file} | gawk -F"diag_" '{print $2}' | - gawk -F"_ges" '{print $1}'` + if (( uz_file_size <= 0 )); then + # Remove leading diag_ + sat=${uz_file_name#diag_} + # Remove trailing _ges* + sat=${sat%_ges*} zero_len_diag="${zero_len_diag} ${sat}" fi - rm -f ${unzipped_file} + rm -f ${uz_file_name} fi - done - IFS=${SAVEIFS} # reset IFS to default (white space) + done echo "" echo "zero_len_diag = ${zero_len_diag}" diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index 4f62435c73..5b5b4ba34b 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -113,17 +113,19 @@ positional parameter 1" set_trace echo $msg >> $pgmout -# Copy null files into "${COMSP}syndata.tcvitals.$tmmark" and -# "${COMSP}jtwc-fnoc.tcvitals.$tmmark" so later ftp attempts will find and +# Copy null files into "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.$tmmark" and +# "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.$tmmark" so later ftp attempts will find and # copy the zero-length file and avoid wasting time with multiple attempts # to remote machine(s) # (Note: Only do so if files don't already exist) if [ $SENDCOM = YES ]; then - [ ! -s ${COMSP}syndata.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}syndata.tcvitals.$tmmark - [ ! -s ${COMSP}jtwc-fnoc.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}jtwc-fnoc.tcvitals.$tmmark + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + fi + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" + fi fi exit @@ -289,16 +291,18 @@ if [ "$errqct" -gt '0' ];then echo $msg >> $pgmout # In the event of a ERROR in PROGRAM SYNDAT_QCTROPCY, copy null files into -# "${COMSP}syndata.tcvitals.$tmmark" and "${COMSP}jtwc-fnoc.tcvitals.$tmmark" +# "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.$tmmark" and "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.$tmmark" # so later ftp attempts will find and copy the zero-length file and avoid # wasting time with multiple attempts to remote machine(s) # (Note: Only do so if files don't already exist) if [ $SENDCOM = YES ]; then - [ ! -s ${COMSP}syndata.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}syndata.tcvitals.$tmmark - [ ! -s ${COMSP}jtwc-fnoc.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}jtwc-fnoc.tcvitals.$tmmark + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + fi + if [[ ! -s ${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark} ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" + fi fi exit @@ -375,15 +379,15 @@ fi # This is the file that connects to the later RELOCATE and/or PREP scripts -[ $SENDCOM = YES ] && cp current ${COMSP}syndata.tcvitals.$tmmark +[ $SENDCOM = YES ] && cp current "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" # Create the DBNet alert if [ $SENDDBN = "YES" ] then - $DBNROOT/bin/dbn_alert MODEL GDAS_TCVITALS $job ${COMSP}syndata.tcvitals.$tmmark + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS_TCVITALS" "${job}" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" fi # Write JTWC/FNOC Tcvitals to /com path since not saved anywhere else -[ $SENDCOM = YES ] && cp fnoc "${COMSP}jtwc-fnoc.tcvitals.${tmmark}" +[ $SENDCOM = YES ] && cp fnoc "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" exit diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index 1383df693c..9b170ddfd0 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -430,22 +430,22 @@ to center relocation date/time;" fi # For center time sigma guess file obtained via getges, store pathname from -# getges into ${COMSP}sgesprep_pre-relocate_pathname.$tmmark and, for now, -# also in ${COMSP}sgesprep_pathname.$tmmark - if relocation processing stops +# getges into ${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.$tmmark and, for now, +# also in ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark - if relocation processing stops # due to an error or due to no input tcvitals records found, then the center # time sigma guess will not be modified and this getges file will be read in # subsequent PREP processing; if relocation processing continues and the -# center sigma guess is modified, then ${COMSP}sgesprep_pathname.$tmmark will +# center sigma guess is modified, then ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark will # be removed later in this script {the subsequent PREP step will correctly -# update ${COMSP}sgesprep_pathname.$tmmark to point to the sgesprep file +# update ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark to point to the sgesprep file # updated here by the relocation} # ---------------------------------------------------------------------------- if [ $fhr = "0" ]; then - $USHGETGES/getges.sh -e $envir_getges -n $network_getges -v $CDATE10 \ - -t $stype > ${COMSP}sgesprep_pre-relocate_pathname.$tmmark - cp ${COMSP}sgesprep_pre-relocate_pathname.$tmmark \ - ${COMSP}sgesprep_pathname.$tmmark + "${USHGETGES}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${CDATE10}" \ + -t "${stype}" > "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" + cp "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \ + "${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" fi set +x echo @@ -493,7 +493,7 @@ done if [ -f ${tstsp}syndata.tcvitals.$tmmark ]; then cp ${tstsp}syndata.tcvitals.$tmmark tcvitals.now else - cp ${COMSP}syndata.tcvitals.$tmmark tcvitals.now + cp "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" "tcvitals.now" fi @@ -517,7 +517,7 @@ if [ $errgrep -ne 0 ] ; then echo "NO TCVITAL RECORDS FOUND FOR $CDATE10 - EXIT TROPICAL CYCLONE \ RELOCATION PROCESSING" -# The existence of ${COMSP}tropcy_relocation_status.$tmmark file will tell the +# The existence of ${COM_OBS}/${RUN}.${cycle}.tropcy_relocation_status.$tmmark file will tell the # subsequent PREP processing that RELOCATION processing occurred, echo # "NO RECORDS to process" into it to further tell PREP processing that records # were not processed by relocation and the global sigma guess was NOT @@ -525,14 +525,15 @@ RELOCATION PROCESSING" # found) # Note: When tropical cyclone relocation does run to completion and the # global sigma guess is modified, the parent script to this will echo -# "RECORDS PROCESSED" into ${COMSP}tropcy_relocation_status.$tmmark +# "RECORDS PROCESSED" into ${COM_OBS}/${RUN}.${cycle}.tropcy_relocation_status.$tmmark # assuming it doesn't already exist (meaning "NO RECORDS to process" # was NOT echoed into it here) # ---------------------------------------------------------------------------- - echo "NO RECORDS to process" > ${COMSP}tropcy_relocation_status.$tmmark - [ ! -s ${COMSP}tcvitals.relocate.$tmmark ] && \ - cp /dev/null ${COMSP}tcvitals.relocate.$tmmark + echo "NO RECORDS to process" > "${COM_OBS}/${RUN}.${cycle}.tropcy_relocation_status.${tmmark}" + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + fi else cat VITL >>tcvitals @@ -687,32 +688,32 @@ else rm -f RELOCATE_GES cmd if [ "$SENDCOM" = "YES" ]; then - cp rel_inform1 ${COMSP}inform.relocate.$tmmark - cp tcvitals ${COMSP}tcvitals.relocate.$tmmark + cp "rel_inform1" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + cp "tcvitals" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" if [ "$SENDDBN" = "YES" ]; then if test "$RUN" = "gdas1" then - $DBNROOT/bin/dbn_alert MODEL GDAS1_TCI $job ${COMSP}inform.relocate.$tmmark - $DBNROOT/bin/dbn_alert MODEL GDAS1_TCI $job ${COMSP}tcvitals.relocate.$tmmark + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" fi if test "$RUN" = "gfs" then - $DBNROOT/bin/dbn_alert MODEL GFS_TCI $job ${COMSP}inform.relocate.$tmmark - $DBNROOT/bin/dbn_alert MODEL GFS_TCI $job ${COMSP}tcvitals.relocate.$tmmark + "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" fi fi fi # -------------------------------------------------------------------------- # Since relocation processing has ended sucessfully (and the center sigma -# guess has been modified), remove ${COMSP}sgesprep_pathname.$tmmark (which +# guess has been modified), remove ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark (which # had earlier had getges center sigma guess pathname written into it - in # case of error or no input tcvitals records found) - the subsequent PREP -# step will correctly update ${COMSP}sgesprep_pathname.$tmmark to point to +# step will correctly update ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark to point to # the sgesprep file updated here by the relocation # -------------------------------------------------------------------------- - rm ${COMSP}sgesprep_pathname.$tmmark + rm "${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" echo "TROPICAL CYCLONE RELOCATION PROCESSING SUCCESSFULLY COMPLETED FOR \ $CDATE10" diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index 54a298b41c..8511515abb 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -25,55 +25,54 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" # 0.a Basic modes of operation - cd $GRIBDATA +cd "${GRIBDATA}" || exit 2 - alertName=$(echo $RUN|tr [a-z] [A-Z]) +alertName=${RUN^^} - grdID=$1 - gribDIR=${grdID}_grib - rm -rfd ${gribDIR} - mkdir ${gribDIR} - err=$? - if [ $err != 0 ] - then - set +x - echo ' ' - echo '******************************************************************************* ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 (COULD NOT CREATE TEMP DIRECTORY) *** ' - echo '******************************************************************************* ' - echo ' ' - set_trace - exit 1 - fi +grdID=$1 +gribDIR="${grdID}_grib" +rm -rfd "${gribDIR}" +mkdir "${gribDIR}" +err=$? +if [[ ${err} != 0 ]]; then + set +x + echo ' ' + echo '******************************************************************************* ' + echo '*** FATAL ERROR : ERROR IN ww3_grib2 (COULD NOT CREATE TEMP DIRECTORY) *** ' + echo '******************************************************************************* ' + echo ' ' + set_trace + exit 1 +fi - cd ${gribDIR} +cd "${gribDIR}" || exit 2 # 0.b Define directories and the search path. # The tested variables should be exported by the postprocessor script. - GRIDNR=$2 - MODNR=$3 - ymdh=$4 - fhr=$5 - grdnam=$6 - grdres=$7 - gribflags=$8 - ngrib=1 # only one time slice - dtgrib=3600 # only one time slice +GRIDNR=$2 +MODNR=$3 +ymdh=$4 +fhr=$5 +grdnam=$6 +grdres=$7 +gribflags=$8 +ngrib=1 # only one time slice +dtgrib=3600 # only one time slice # SBS one time slice per file - FH3=$(printf %03i $fhr) +FH3=$(printf %03i "${fhr}") # Verify if grib2 file exists from interrupted run - ENSTAG="" - if [ ${waveMEMB} ]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi - outfile=${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2 +ENSTAG="" +if [[ -n ${waveMEMB} ]]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi +outfile="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2" # Only create file if not present in COM - if [ ! -s ${COMOUT}/gridded/${outfile}.idx ]; then +if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' @@ -83,11 +82,10 @@ source "$HOMEgfs/ush/preamble.sh" echo " Model ID : $WAV_MOD_TAG" set_trace - if [ -z "$CDATE" ] || [ -z "$cycle" ] || [ -z "$EXECwave" ] || \ - [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || [ -z "$SENDCOM" ] || \ - [ -z "$gribflags" ] || \ - [ -z "$GRIDNR" ] || [ -z "$MODNR" ] || [ -z "$SENDDBN" ] - then + if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ + [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDCOM}" ]] || \ + [[ -z "${gribflags}" ]] || [[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \ + [[ -z "${SENDDBN}" ]]; then set +x echo ' ' echo '***************************************************' @@ -98,75 +96,76 @@ source "$HOMEgfs/ush/preamble.sh" exit 1 fi -# 0.c Starting time for output + # 0.c Starting time for output - tstart="$(echo $ymdh | cut -c1-8) $(echo $ymdh | cut -c9-10)0000" + tstart="${ymdh:0:8} ${ymdh:8:2}0000" set +x - echo " Starting time : $tstart" - echo " Time step : Single SBS - echo " Number of times : Single SBS - echo " GRIB field flags : $gribflags" + echo " Starting time : ${tstart}" + echo " Time step : Single SBS" + echo " Number of times : Single SBS" + echo " GRIB field flags : ${gribflags}" echo ' ' set_trace -# 0.e Links to working directory + # 0.e Links to working directory - ln -s ${DATA}/mod_def.$grdID mod_def.ww3 - ln -s ${DATA}/output_${ymdh}0000/out_grd.$grdID out_grd.ww3 + ln -s "${DATA}/mod_def.${grdID}" "mod_def.ww3" + ln -s "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "out_grd.ww3" -# --------------------------------------------------------------------------- # -# 1. Generate GRIB file with all data -# 1.a Generate input file for ww3_grib2 -# Template copied in mother script ... + # --------------------------------------------------------------------------- # + # 1. Generate GRIB file with all data + # 1.a Generate input file for ww3_grib2 + # Template copied in mother script ... set +x echo " Generate input file for ww3_grib2" set_trace - sed -e "s/TIME/$tstart/g" \ - -e "s/DT/$dtgrib/g" \ - -e "s/NT/$ngrib/g" \ - -e "s/GRIDNR/$GRIDNR/g" \ - -e "s/MODNR/$MODNR/g" \ - -e "s/FLAGS/$gribflags/g" \ - ${DATA}/ww3_grib2.${grdID}.inp.tmpl > ww3_grib.inp + sed -e "s/TIME/${tstart}/g" \ + -e "s/DT/${dtgrib}/g" \ + -e "s/NT/${ngrib}/g" \ + -e "s/GRIDNR/${GRIDNR}/g" \ + -e "s/MODNR/${MODNR}/g" \ + -e "s/FLAGS/${gribflags}/g" \ + "${DATA}/ww3_grib2.${grdID}.inp.tmpl" > ww3_grib.inp echo "ww3_grib.inp" cat ww3_grib.inp -# 1.b Run GRIB packing program + + # 1.b Run GRIB packing program set +x echo " Run ww3_grib2" - echo " Executing $EXECwave/ww3_grib" + echo " Executing ${EXECwave}/ww3_grib" set_trace export pgm=ww3_grib;. prep_step - $EXECwave/ww3_grib > grib2_${grdnam}_${FH3}.out 2>&1 + "${EXECwave}/ww3_grib" > "grib2_${grdnam}_${FH3}.out" 2>&1 export err=$?;err_chk - if [ ! -s gribfile ]; then - set +x - echo ' ' - echo '************************************************ ' - echo '*** FATAL ERROR : ERROR IN ww3_grib encoding *** ' - echo '************************************************ ' - echo ' ' - set_trace - exit 3 - fi - - if [ $fhr -gt 0 ]; then - $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -grib ${COMOUT}/gridded/${outfile} + if [ ! -s gribfile ]; then + set +x + echo ' ' + echo '************************************************ ' + echo '*** FATAL ERROR : ERROR IN ww3_grib encoding *** ' + echo '************************************************ ' + echo ' ' + set_trace + exit 3 + fi + + if (( fhr > 0 )); then + ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COM_WAVE_GRID}/${outfile}" err=$? else - $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -set table_1.4 1 -set table_1.2 1 -grib ${COMOUT}/gridded/${outfile} + ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" \ + -set table_1.4 1 -set table_1.2 1 -grib "${COM_WAVE_GRID}/${outfile}" err=$? fi - if [ $err != 0 ] - then + if [[ ${err} != 0 ]]; then set +x echo ' ' echo '********************************************* ' @@ -177,65 +176,62 @@ source "$HOMEgfs/ush/preamble.sh" exit 3 fi -# Create index - $WGRIB2 -s $COMOUT/gridded/${outfile} > $COMOUT/gridded/${outfile}.idx + # Create index + ${WGRIB2} -s "${COM_WAVE_GRID}/${outfile}" > "${COM_WAVE_GRID}/${outfile}.idx" -# Create grib2 subgrid is this is the source grid - if [ "${grdID}" = "${WAV_SUBGRBSRC}" ]; then + # Create grib2 subgrid is this is the source grid + if [[ "${grdID}" = "${WAV_SUBGRBSRC}" ]]; then for subgrb in ${WAV_SUBGRB}; do subgrbref=$(echo ${!subgrb} | cut -d " " -f 1-20) subgrbnam=$(echo ${!subgrb} | cut -d " " -f 21) subgrbres=$(echo ${!subgrb} | cut -d " " -f 22) subfnam="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${subgrbnam}.${subgrbres}.f${FH3}.grib2" - $COPYGB2 -g "${subgrbref}" -i0 -x ${COMOUT}/gridded/${outfile} ${COMOUT}/gridded/${subfnam} - $WGRIB2 -s $COMOUT/gridded/${subfnam} > $COMOUT/gridded/${subfnam}.idx + ${COPYGB2} -g "${subgrbref}" -i0 -x "${COM_WAVE_GRID}/${outfile}" "${COM_WAVE_GRID}/${subfnam}" + ${WGRIB2} -s "${COM_WAVE_GRID}/${subfnam}" > "${COM_WAVE_GRID}/${subfnam}.idx" done fi -# 1.e Save in /com - - if [ ! -s $COMOUT/gridded/${outfile} ] - then - set +x - echo ' ' - echo '********************************************* ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' - echo '********************************************* ' - echo ' ' - echo " Error in moving grib file ${outfile} to com" - echo ' ' - set_trace - exit 4 - fi - if [ ! -s $COMOUT/gridded/${outfile} ] - then - set +x - echo ' ' - echo '*************************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 INDEX FILE *** ' - echo '*************************************************** ' - echo ' ' - echo " Error in moving grib file ${outfile}.idx to com" - echo ' ' - set_trace - exit 4 - fi - - if [[ "$SENDDBN" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]] - then - set +x - echo " Alerting GRIB file as $COMOUT/gridded/${outfile}" - echo " Alerting GRIB index file as $COMOUT/gridded/${outfile}.idx" - set_trace - $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_GB2 $job $COMOUT/gridded/${outfile} - $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_GB2_WIDX $job $COMOUT/gridded/${outfile}.idx - else - echo "${outfile} is global.0p50, not alert out" - fi + # 1.e Save in /com + if [[ ! -s "${COM_WAVE_GRID}/${outfile}" ]]; then + set +x + echo ' ' + echo '********************************************* ' + echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' + echo '********************************************* ' + echo ' ' + echo " Error in moving grib file ${outfile} to com" + echo ' ' + set_trace + exit 4 + fi + if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then + set +x + echo ' ' + echo '*************************************************** ' + echo '*** FATAL ERROR : ERROR IN ww3_grib2 INDEX FILE *** ' + echo '*************************************************** ' + echo ' ' + echo " Error in moving grib file ${outfile}.idx to com" + echo ' ' + set_trace + exit 4 + fi -# --------------------------------------------------------------------------- # -# 3. Clean up the directory + if [[ "${SENDDBN}" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]]; then + set +x + echo " Alerting GRIB file as ${COM_WAVE_GRID}/${outfile}" + echo " Alerting GRIB index file as ${COM_WAVE_GRID}/${outfile}.idx" + set_trace + "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COM_WAVE_GRID}/${outfile}" + "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COM_WAVE_GRID}/${outfile}.idx" + else + echo "${outfile} is global.0p50 or SENDDBN is NO, no alert sent" + fi + + + # --------------------------------------------------------------------------- # + # 3. Clean up the directory rm -f gribfile @@ -244,15 +240,15 @@ source "$HOMEgfs/ush/preamble.sh" set_trace cd ../ - mv -f ${gribDIR} done.${gribDIR} + mv -f "${gribDIR}" "done.${gribDIR}" - else - set +x - echo ' ' - echo " File ${COMOUT}/gridded/${outfile} found, skipping generation process" - echo ' ' - set_trace - fi +else + set +x + echo ' ' + echo " File ${COM_WAVE_GRID}/${outfile} found, skipping generation process" + echo ' ' + set_trace +fi # End of ww3_grib2.sh -------------------------------------------------- # diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index 4ccae7640d..bf34068874 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -65,9 +65,9 @@ source "$HOMEgfs/ush/preamble.sh" echo " Model ID : $WAV_MOD_TAG" set_trace - if [ -z "$CDATE" ] || [ -z "$cycle" ] || [ -z "$EXECwave" ] || \ - [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || [ -z "$SENDCOM" ] || \ - [ -z "$SENDDBN" ] || [ -z "$waveGRD" ] + if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ + [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDCOM}" ]] || \ + [[ -z "${SENDDBN}" ]] || [ -z "${waveGRD}" ] then set +x echo ' ' @@ -75,7 +75,7 @@ source "$HOMEgfs/ush/preamble.sh" echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***' echo '***************************************************' echo ' ' - echo "$CDATE $cycle $EXECwave $COMOUT $WAV_MOD_TAG $SENDCOM $SENDDBN $waveGRD" + echo "${PDY}${cyc} ${cycle} ${EXECwave} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDCOM} ${SENDDBN} ${waveGRD}" set_trace exit 1 fi @@ -103,7 +103,7 @@ source "$HOMEgfs/ush/preamble.sh" # 1. Generate GRID file with all data # 1.a Generate Input file - time="$(echo $ymdh | cut -c1-8) $(echo $ymdh | cut -c9-10)0000" + time="${ymdh:0:8} ${ymdh:8:2}0000" sed -e "s/TIME/$time/g" \ -e "s/DT/$dt/g" \ @@ -175,14 +175,14 @@ source "$HOMEgfs/ush/preamble.sh" if [ "$SENDCOM" = 'YES' ] then set +x - echo " Saving GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE}" + echo " Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" set_trace - cp ${DATA}/output_${ymdh}0000/out_grd.$grdID $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE} + cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" # if [ "$SENDDBN" = 'YES' ] # then # set +x -# echo " Alerting GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE} +# echo " Alerting GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${PDY}${cyc} # set_trace # diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh index 2809d1779a..5b1b212a16 100755 --- a/ush/wave_grid_moddef.sh +++ b/ush/wave_grid_moddef.sh @@ -101,7 +101,7 @@ source "$HOMEgfs/ush/preamble.sh" if [ -f mod_def.ww3 ] then - cp mod_def.ww3 $COMOUT/rundata/${CDUMP}wave.mod_def.${grdID} + cp mod_def.ww3 "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" mv mod_def.ww3 ../mod_def.$grdID else set +x diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index 7b193313d3..6b1ab19db2 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -29,18 +29,19 @@ curfile=$2 fhr=$3 flagfirst=$4 fh3=$(printf "%03d" "${fhr#0}") +fext='f' # Timing has to be made relative to the single 00z RTOFS cycle for that PDY mkdir -p rtofs_${ymdh_rtofs} cd rtofs_${ymdh_rtofs} -ncks -x -v sst,sss,layer_density $curfile cur_uv_${PDY}_${fext}${fh3}.nc -ncks -O -a -h -x -v Layer cur_uv_${PDY}_${fext}${fh3}.nc cur_temp1.nc +ncks -x -v sst,sss,layer_density "${curfile} cur_uv_${PDY}_${fext}${fh3}.nc" +ncks -O -a -h -x -v Layer "cur_uv_${PDY}_${fext}${fh3}.nc" "cur_temp1.nc" ncwa -h -O -a Layer cur_temp1.nc cur_temp2.nc ncrename -h -O -v MT,time -d MT,time cur_temp2.nc ncks -v u_velocity,v_velocity cur_temp2.nc cur_temp3.nc -mv -f cur_temp3.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc +mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc" # Convert to regular lat lon file # If weights need to be regenerated due to CDO ver change, use: @@ -48,19 +49,19 @@ mv -f cur_temp3.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc cp ${FIXwave}/weights_rtofs_to_r4320x2160.nc ./weights.nc # Interpolate to regular 5 min grid -$CDO remap,r4320x2160,weights.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc cur_5min_01.nc +${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc" # Perform 9-point smoothing twice to make RTOFS data less noisy when # interpolating from 1/12 deg RTOFS grid to 1/6 deg wave grid if [ "WAV_CUR_CDO_SMOOTH" = "YES" ]; then - $CDO -f nc -smooth9 cur_5min_01.nc cur_5min_02.nc - $CDO -f nc -smooth9 cur_5min_02.nc cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc + ${CDO} -f nc -smooth9 "cur_5min_01.nc" "cur_5min_02.nc" + ${CDO} -f nc -smooth9 "cur_5min_02.nc" "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" else - mv cur_5min_01.nc cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc + mv "cur_5min_01.nc" "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" fi # Cleanup -rm -f cur_temp[123].nc cur_5min_??.nc cur_glo_uv_${PDY}_${fext}${fh3}.nc weights.nc +rm -f cur_temp[123].nc cur_5min_??.nc "cur_glo_uv_${PDY}_${fext}${fh3}.nc weights.nc" if [ ${flagfirst} = "T" ] then @@ -70,8 +71,8 @@ else fi rm -f cur.nc -ln -s cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc cur.nc -ln -s ${DATA}/mod_def.${WAVECUR_FID} ./mod_def.ww3 +ln -s "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc" +ln -s "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3 export pgm=ww3_prnc;. prep_step $EXECwave/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1 diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index 4d9ac1fb3c..a32a2b7e43 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -47,18 +47,18 @@ source "$HOMEgfs/ush/preamble.sh" echo '! Make ice fields |' echo '+--------------------------------+' echo " Model TAG : $WAV_MOD_TAG" - echo " Model ID : ${CDUMP}wave" + echo " Model ID : ${RUN}wave" echo " Ice grid ID : $WAVEICE_FID" echo " Ice file : $WAVICEFILE" echo ' ' set_trace echo "Making ice fields." - if [ -z "$YMDH" ] || [ -z "$cycle" ] || \ - [ -z "$COMOUT" ] || [ -z "$FIXwave" ] || [ -z "$EXECwave" ] || \ - [ -z "$WAV_MOD_TAG" ] || [ -z "$WAVEICE_FID" ] || [ -z "$SENDCOM" ] || \ - [ -z "$COMIN_WAV_ICE" ] - then + if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \ + [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXwave}" ]] || [[ -z "${EXECwave}" ]] || \ + [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${SENDCOM}" ]] || \ + [[ -z "${COM_OBS}" ]]; then + set +x echo ' ' echo '**************************************************' @@ -78,7 +78,7 @@ source "$HOMEgfs/ush/preamble.sh" # 1. Get the necessary files # 1.a Copy the ice data file - file=${COMIN_WAV_ICE}/${WAVICEFILE} + file=${COM_OBS}/${WAVICEFILE} if [ -f $file ] then @@ -175,13 +175,13 @@ source "$HOMEgfs/ush/preamble.sh" icefile=${WAV_MOD_TAG}.${WAVEICE_FID}.$cycle.ice elif [ "${WW3ATMIENS}" = "F" ] then - icefile=${CDUMP}wave.${WAVEICE_FID}.$cycle.ice + icefile=${RUN}wave.${WAVEICE_FID}.$cycle.ice fi set +x - echo " Saving ice.ww3 as $COMOUT/rundata/${icefile}" + echo " Saving ice.ww3 as ${COM_WAVE_PREP}/${icefile}" set_trace - cp ice.ww3 $COMOUT/rundata/${icefile} + cp ice.ww3 "${COM_WAVE_PREP}/${icefile}" rm -f ice.ww3 # --------------------------------------------------------------------------- # diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh index bc98ea2d2a..9264aac5f3 100755 --- a/ush/wave_tar.sh +++ b/ush/wave_tar.sh @@ -76,9 +76,8 @@ source "$HOMEgfs/ush/preamble.sh" # 0.c Define directories and the search path. # The tested variables should be exported by the postprocessor script. - if [ -z "$cycle" ] || [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || \ - [ -z "$SENDCOM" ] || [ -z "$SENDDBN" ] || [ -z "${STA_DIR}" ] - then + if [[ -z "${cycle}" ]] || [[ -z "${COM_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \ + [[ -z "${SENDCOM}" ]] || [[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then set +x echo ' ' echo '*****************************************************' @@ -180,10 +179,10 @@ source "$HOMEgfs/ush/preamble.sh" set +x echo ' ' - echo " Moving tar file ${file_name} to $COMOUT ..." + echo " Moving tar file ${file_name} to ${COM_WAVE_STATION} ..." set_trace - cp ${file_name} $COMOUT/station/. + cp "${file_name}" "${COM_WAVE_STATION}/." exit=$? @@ -203,10 +202,11 @@ source "$HOMEgfs/ush/preamble.sh" then set +x echo ' ' - echo " Alerting TAR file as $COMOUT/station/${file_name}" + echo " Alerting TAR file as ${COM_WAVE_STATION}/${file_name}" echo ' ' set_trace - $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_TAR $job $COMOUT/station/${file_name} + "${DBNROOT}/bin/dbn_alert MODEL" "${alertName}_WAVE_TAR" "${job}" \ + "${COM_WAVE_STATION}/${file_name}" fi # --------------------------------------------------------------------------- # diff --git a/versions/fix.ver b/versions/fix.ver index 256d8efc5a..7bcc4f75af 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -7,13 +7,11 @@ export chem_ver=20220805 export cice_ver=20220805 export cpl_ver=20220805 export datm_ver=20220805 -export gdas_bump_ver=20220805 export gdas_crtm_ver=20220805 export gdas_fv3jedi_ver=20220805 export gdas_gsibec_ver=20221031 -export gldas_ver=20220920 export glwu_ver=20220805 -export gsi_ver=20221128 +export gsi_ver=20230112 export lut_ver=20220805 export mom6_ver=20220805 export orog_ver=20220805 diff --git a/workflow/applications.py b/workflow/applications.py index 34746e613e..dfec3a6a18 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -99,7 +99,6 @@ def __init__(self, conf: Configuration) -> None: self.do_ocean = _base.get('DO_OCN', False) self.do_ice = _base.get('DO_ICE', False) self.do_aero = _base.get('DO_AERO', False) - self.do_gldas = _base.get('DO_GLDAS', False) self.do_bufrsnd = _base.get('DO_BUFRSND', False) self.do_gempak = _base.get('DO_GEMPAK', False) self.do_awips = _base.get('DO_AWIPS', False) @@ -107,9 +106,11 @@ def __init__(self, conf: Configuration) -> None: self.do_vrfy = _base.get('DO_VRFY', True) self.do_fit2obs = _base.get('DO_FIT2OBS', True) self.do_metp = _base.get('DO_METP', False) - self.do_jedivar = _base.get('DO_JEDIVAR', False) - self.do_jediens = _base.get('DO_JEDIENS', False) + self.do_jediatmvar = _base.get('DO_JEDIVAR', False) + self.do_jediatmens = _base.get('DO_JEDIENS', False) self.do_jediocnvar = _base.get('DO_JEDIOCNVAR', False) + self.do_jedilandda = _base.get('DO_JEDILANDDA', False) + self.do_mergensst = _base.get('DO_MERGENSST', False) self.do_hpssarch = _base.get('HPSSARCH', False) @@ -177,24 +178,22 @@ def _cycled_configs(self): configs = ['prep'] - if self.do_jedivar: - configs += ['atmanalprep', 'atmanalrun', 'atmanalpost'] + if self.do_jediatmvar: + configs += ['atmanlinit', 'atmanlrun', 'atmanlfinal'] else: configs += ['anal', 'analdiag'] if self.do_jediocnvar: - configs += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost'] + configs += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy'] + if self.do_ocean: configs += ['ocnpost'] configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch'] - if self.do_gldas: - configs += ['gldas'] - if self.do_hybvar: - if self.do_jediens: - configs += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost'] + if self.do_jediatmens: + configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] @@ -226,6 +225,9 @@ def _cycled_configs(self): if self.do_aero: configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_jedilandda: + configs += ['landanlinit', 'landanlprep', 'landanlrun', 'landanlfinal'] + return configs @property @@ -234,20 +236,11 @@ def _forecast_only_configs(self): Returns the config_files that are involved in the forecast-only app """ - configs = ['fcst'] + configs = ['coupled_ic', 'fcst', 'arch'] if self.do_atm: configs += ['post', 'vrfy'] - configs += ['arch'] - - if self.model_app in ['S2S', 'S2SW', 'S2SWA', 'NG-GODAS']: - configs += ['coupled_ic'] - else: - configs += ['init'] - if self.do_hpssarch: - configs += ['getic'] - if self.do_aero: configs += ['aerosol_init'] @@ -354,20 +347,23 @@ def _get_cycled_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch'] - if self.do_jedivar: - gdas_gfs_common_tasks_before_fcst += ['atmanalprep', 'atmanalrun', 'atmanalpost'] + if self.do_jediatmvar: + gdas_gfs_common_tasks_before_fcst += ['atmanlinit', 'atmanlrun', 'atmanlfinal'] else: gdas_gfs_common_tasks_before_fcst += ['anal'] if self.do_jediocnvar: - gdas_gfs_common_tasks_before_fcst += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost'] + gdas_gfs_common_tasks_before_fcst += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', + 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy'] gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] if self.do_aero: gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] - gldas_tasks = ['gldas'] + if self.do_jedilandda: + gdas_gfs_common_tasks_before_fcst += ['landanlinit', 'landanlprep', 'landanlrun', 'landanlfinal'] + wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] wave_post_tasks = ['wavepostsbs', 'wavepostpnt'] @@ -375,8 +371,8 @@ def _get_cycled_task_names(self): hybrid_tasks = [] hybrid_after_eupd_tasks = [] if self.do_hybvar: - if self.do_jediens: - hybrid_tasks += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost', 'echgres'] + if self.do_jediatmens: + hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] @@ -384,12 +380,9 @@ def _get_cycled_task_names(self): # Collect all "gdas" cycle tasks gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() - if not self.do_jedivar: + if not self.do_jediatmvar: gdas_tasks += ['analdiag'] - if self.do_gldas: - gdas_tasks += gldas_tasks - if self.do_wave and 'gdas' in self.wave_cdumps: gdas_tasks += wave_prep_tasks @@ -468,14 +461,7 @@ def _get_forecast_only_task_names(self): This is the place where that order is set. """ - tasks = [] - - if self.model_app in ['S2S', 'S2SW', 'S2SWA', 'NG-GODAS']: - tasks += ['coupled_ic'] - else: - if self.do_hpssarch: - tasks += ['getic'] - tasks += ['init'] + tasks = ['coupled_ic'] if self.do_aero: tasks += ['aerosol_init'] diff --git a/workflow/ecflow_build.yml b/workflow/ecflow_build.yml index ff51ef1741..4ca70da9a2 100644 --- a/workflow/ecflow_build.yml +++ b/workflow/ecflow_build.yml @@ -19,16 +19,9 @@ suites: tasks: jgfs_forecast: triggers: - - task: jgfs_getic - task: jgfs_atmos_post_f( 2,1 ) - task: jgfs_forecast suite: fcstplus - jgfs_getic: - template: skip - events: - - test_event - edits: - RUN: 'gfs' post: tasks: jgfs_atmos_post_manager[ 1,2 ]: @@ -66,5 +59,5 @@ suites: tasks: jgfs_forecast: triggers: - - task: jgfs_getic + - task: jgfs_getic # TODO: Not sure about this, leave as is suite: fcstonly diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml new file mode 100644 index 0000000000..903213b761 --- /dev/null +++ b/workflow/hosts/jet.yaml @@ -0,0 +1,23 @@ +BASE_GIT: '/lfs4/HFIP/hfv3gfs/glopara/git' +DMPDIR: '/lfs4/HFIP/hfv3gfs/glopara/dump' +PACKAGEROOT: '/lfs4/HFIP/hfv3gfs/glopara/nwpara' +COMROOT: '/lfs4/HFIP/hfv3gfs/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/lfs4/HFIP/hfv3gfs/${USER}' +STMP: '/lfs4/HFIP/hfv3gfs/${USER}/stmp' +PTMP: '/lfs4/HFIP/hfv3gfs/${USER}/ptmp' +NOSCRUB: $HOMEDIR +ACCOUNT: hfv3gfs +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: kjet +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C384', 'C192', 'C96', 'C48'] diff --git a/workflow/prod.yml b/workflow/prod.yml index 04ddca61bb..98755d2f86 100644 --- a/workflow/prod.yml +++ b/workflow/prod.yml @@ -238,7 +238,6 @@ suites: - task: jgdas_atmos_analysis event: release_fcst - task: jgdas_wave_prep - - task: jgdas_atmos_gldas atmos: obsproc: dump: @@ -250,11 +249,6 @@ suites: triggers: - task: jobsproc_gdas_atmos_dump event: release_sfcprep - init: - tasks: - jgdas_atmos_gldas: - triggers: - - task: jgdas_atmos_analysis analysis: tasks: jgdas_atmos_analysis: diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 370f27f771..12166689cc 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -4,20 +4,22 @@ from typing import List from applications import AppConfig import rocoto.rocoto as rocoto +from pygw.template import Template, TemplateConstants __all__ = ['Tasks', 'create_wf_task', 'get_wf_tasks'] class Tasks: - SERVICE_TASKS = ['arch', 'earc', 'getic'] - VALID_TASKS = ['aerosol_init', 'coupled_ic', 'getic', 'init', - 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'gldas', 'arch', - 'atmanalprep', 'atmanalrun', 'atmanalpost', - 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost', + SERVICE_TASKS = ['arch', 'earc'] + VALID_TASKS = ['aerosol_init', 'coupled_ic', + 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', + 'atmanlinit', 'atmanlrun', 'atmanlfinal', + 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', - 'atmensanalprep', 'atmensanalrun', 'atmensanalpost', + 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', + 'landanlinit', 'landanlprep', 'landanlrun', 'landanlfinal', 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', 'wafs', 'wafsblending', 'wafsblending0p25', @@ -69,6 +71,51 @@ def _is_this_a_gdas_task(cdump, task_name): if cdump != 'enkfgdas': raise TypeError(f'{task_name} must be part of the "enkfgdas" cycle and not {cdump}') + def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> str: + ''' + Takes a string templated with ${ } and converts it into a string suitable + for use in a rocoto . Some common substitutions are defined by + default. Any additional variables in the template and overrides of the + defaults can be passed in by an optional dict. + + Variables substitued by default: + ${ROTDIR} -> '&ROTDIR;' + ${RUN} -> self.cdump + ${DUMP} -> self.cdump + ${MEMDIR} -> '' + ${YMD} -> '@Y@m@d' + ${HH} -> '@H' + + Parameters + ---------- + template: str + Template string with variables to be replaced + subs_dict: dict, optional + Dictionary containing substitutions + + Returns + ------- + str + Updated string with variables substituted + + ''' + + # Defaults + rocoto_conversion_dict = { + 'ROTDIR': '&ROTDIR;', + 'RUN': self.cdump, + 'DUMP': self.cdump, + 'MEMDIR': '', + 'YMD': '@Y@m@d', + 'HH': '@H' + } + + rocoto_conversion_dict.update(subs_dict) + + return Template.substitute_structure(template, + TemplateConstants.DOLLAR_CURLY_BRACE, + rocoto_conversion_dict.get) + def get_resource(self, task_name): """ Given a task name (task_name) and its configuration (task_names), @@ -200,60 +247,24 @@ def coupled_ic(self): return task - def getic(self): - - files = ['INPUT/sfc_data.tile6.nc', - 'RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc'] - - deps = [] - for file in files: - dep_dict = {'type': 'data', 'data': f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/{file}'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='nor', dep=deps) - - resources = self.get_resource('getic') - task = create_wf_task('getic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) - - return task - - def init(self): - - files = ['gfs.t@Hz.sanl', - 'gfs.t@Hz.atmanl.nemsio', - 'gfs.t@Hz.atmanl.nc', - 'atmos/gfs.t@Hz.atmanl.nc', - 'atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc'] - - deps = [] - for file in files: - dep_dict = {'type': 'data', 'data': f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/{file}'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - - if self.app_config.do_hpssarch: - dep_dict = {'type': 'task', 'name': f'{self.cdump}getic'} - dependencies.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) - - resources = self.get_resource('init') - task = create_wf_task('init', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) - - return task - def prep(self): dump_suffix = self._base["DUMP_SUFFIX"] gfs_cyc = self._base["gfs_cyc"] dmpdir = self._base["DMPDIR"] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'RUN': 'gdas'}) + dump_path = self._template_to_rocoto_cycstring(self._base["COM_OBSDMP_TMPL"], + {'DMPDIR': dmpdir, 'DUMP_SUFFIX': dump_suffix}) + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False deps = [] dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' + data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + data = f'{dump_path}/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -298,11 +309,14 @@ def waveprep(self): def aerosol_init(self): + input_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_INPUT_TMPL']) + restart_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_RESTART_TMPL']) + deps = [] # Files from current cycle files = ['gfs_ctrl.nc'] + [f'gfs_data.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] for file in files: - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/INPUT/{file}' + data = f'{input_path}/{file}' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) @@ -320,7 +334,7 @@ def aerosol_init(self): [f'@Y@m@d.@H0000.fv_tracer.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] for file in files: - data = [f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/RERUN_RESTART/', file] + data = [f'{restart_path}', file] dep_dict = {'type': 'data', 'data': data, 'offset': [offset, None]} deps.append(rocoto.add_dependency(dep_dict)) @@ -352,12 +366,17 @@ def anal(self): def sfcanl(self): deps = [] - if self.app_config.do_jedivar: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) resources = self.get_resource('sfcanl') task = create_wf_task('sfcanl', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) @@ -367,8 +386,8 @@ def sfcanl(self): def analcalc(self): deps = [] - if self.app_config.do_jedivar: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} deps.append(rocoto.add_dependency(dep_dict)) @@ -396,59 +415,52 @@ def analdiag(self): return task - def atmanalprep(self): - - dump_suffix = self._base["DUMP_SUFFIX"] - gfs_cyc = self._base["gfs_cyc"] - dmpdir = self._base["DMPDIR"] - do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + def atmanlinit(self): deps = [] - dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' - dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' - dep_dict = {'type': 'data', 'data': data} + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + gfs_cyc = self._base["gfs_cyc"] + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False cycledef = self.cdump - if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: cycledef = 'gdas' - resources = self.get_resource('atmanalprep') - task = create_wf_task('atmanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + resources = self.get_resource('atmanlinit') + task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) + return task - def atmanalrun(self): + def atmanlrun(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalprep'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_hybvar: - dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - else: - dependencies = rocoto.create_dependency(dep=deps) + dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmanalrun') - task = create_wf_task('atmanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmanlrun') + task = create_wf_task('atmanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task - def atmanalpost(self): + def atmanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('atmanalpost') - task = create_wf_task('atmanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmanlfinal') + task = create_wf_task('atmanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task @@ -456,11 +468,14 @@ def aeroanlinit(self): dump_suffix = self._base["DUMP_SUFFIX"] dmpdir = self._base["DMPDIR"] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'RUN': 'gdas'}) + dump_path = self._template_to_rocoto_cycstring(self._base["COM_OBSDMP_TMPL"], + {'DMPDIR': dmpdir, 'DUMP_SUFFIX': dump_suffix}) deps = [] dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' + data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} @@ -495,13 +510,68 @@ def aeroanlfinal(self): return task + def landanlinit(self): + + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'RUN': 'gdas'}) + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('landanlinit') + task = create_wf_task('landanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def landanlprep(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('landanlprep') + task = create_wf_task('landanlprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def landanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlprep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('landanlrun') + task = create_wf_task('landanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def landanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('landanlfinal') + task = create_wf_task('landanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def ocnanalprep(self): dump_suffix = self._base["DUMP_SUFFIX"] dmpdir = self._base["DMPDIR"] + ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"]) deps = [] - data = f'&ROTDIR;/gdas.@Y@m@d/@H/ocean/gdas.t@Hz.ocnf009.nc' + data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -547,11 +617,31 @@ def ocnanalrun(self): return task - def ocnanalpost(self): + def ocnanalchkpt(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalrun'} deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_mergensst: + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.sfcanl.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalchkpt') + task = create_wf_task('ocnanalchkpt', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalchkpt'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('ocnanalpost') @@ -563,15 +653,19 @@ def ocnanalpost(self): return task - def gldas(self): + def ocnanalvrfy(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('gldas') - task = create_wf_task('gldas', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('ocnanalvrfy') + task = create_wf_task('ocnanalvrfy', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) return task @@ -592,27 +686,8 @@ def fcst(self): def _fcst_forecast_only(self): dependencies = [] - deps = [] - if self.app_config.do_atm: - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies.append(rocoto.create_dependency(dep_condition='or', dep=deps)) - - else: # data-atmosphere - data = f'&ICSDIR;/@Y@m@d@H/datm/gefs.@Y@m.nc' # GEFS forcing - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/ocn/MOM.res.nc' # TODO - replace with actual ocean IC - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/ice/cice5_model.res.nc' # TODO - replace with actual ice IC - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies.append(rocoto.create_dependency(dep_condition='and', dep=deps)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}coupled_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: wave_job = 'waveprep' if self.app_config.model_app in ['ATMW'] else 'waveinit' @@ -651,10 +726,6 @@ def _fcst_cycled(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} dependencies.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_gldas and self.cdump in ['gdas']: - dep_dict = {'type': 'task', 'name': f'{self.cdump}gldas'} - dependencies.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: dep_dict = {'type': 'task', 'name': f'{self.cdump}waveprep'} dependencies.append(rocoto.add_dependency(dep_dict)) @@ -663,6 +734,10 @@ def _fcst_cycled(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'} dependencies.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlfinal'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) if self.cdump in ['gdas']: @@ -731,7 +806,8 @@ def _get_postgroups(cdump, config, add_anl=False): return grp, dep, lst deps = [] - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.log#dep#.txt' + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} @@ -760,7 +836,8 @@ def _get_postgroups(cdump, config, add_anl=False): def wavepostsbs(self): deps = [] for wave_grid in self._configs['wavepostsbs']['waveGRD'].split(): - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/wave/rundata/{self.cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -783,7 +860,8 @@ def wavepostbndpnt(self): def wavepostbndpntbll(self): deps = [] - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.logf180.txt' + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -860,10 +938,12 @@ def _wafs_task(self, task_name): if task_name not in ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25']: raise KeyError(f'Invalid WAFS task: {task_name}') + wafs_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_WAFS_TMPL"]) + deps = [] fhrlst = [6] + [*range(12, 36 + 3, 3)] for fhr in fhrlst: - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.wafs.grb2if{fhr:03d}' + data = f'{wafs_path}/{self.cdump}.t@Hz.wafs.grb2if{fhr:03d}' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -1090,7 +1170,7 @@ def eomg(self): eomgenvars = self.envars.copy() eomgenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) - groups = self._get_hybgroups(self._base['NMEM_ENKF'], self._configs['eobs']['NMEM_EOMGGRP']) + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['eobs']['NMEM_EOMGGRP']) resources = self.get_resource('eomg') task = create_wf_task('eomg', resources, cdump=self.cdump, envar=eomgenvars, dependency=dependencies, @@ -1123,57 +1203,44 @@ def eupd(self): return task - def atmensanalprep(self): - - dump_suffix = self._base["DUMP_SUFFIX"] - gfs_cyc = self._base["gfs_cyc"] - dmpdir = self._base["DMPDIR"] - do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False - + def atmensanlinit(self): deps = [] - dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' - dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' - dep_dict = {'type': 'data', 'data': data} + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - cycledef = self.cdump - if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: - cycledef = 'gdas' - - resources = self.get_resource('atmensanalprep') - task = create_wf_task('atmensanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef = "gdas" + resources = self.get_resource('atmensanlinit') + task = create_wf_task('atmensanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) return task - def atmensanalrun(self): + def atmensanlrun(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalprep'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('atmensanalrun') - task = create_wf_task('atmensanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmensanlrun') + task = create_wf_task('atmensanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task - def atmensanalpost(self): + def atmensanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmensanalpost') - task = create_wf_task('atmensanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmensanlfinal') + task = create_wf_task('atmensanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task @@ -1205,8 +1272,8 @@ def _get_ecengroups(): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_jediens: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) @@ -1234,8 +1301,8 @@ def esfc(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_jediens: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) @@ -1261,10 +1328,10 @@ def efcs(self): efcsenvars = self.envars.copy() efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) - groups = self._get_hybgroups(self._base['NMEM_ENKF'], self._configs['efcs']['NMEM_EFCSGRP']) + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP']) if self.cdump == "enkfgfs": - groups = self._get_hybgroups(self._base['NMEM_EFCS'], self._configs['efcs']['NMEM_EFCSGRP_GFS']) + groups = self._get_hybgroups(self._base['NMEM_ENS_GFS'], self._configs['efcs']['NMEM_EFCSGRP_GFS']) cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') resources = self.get_resource('efcs') task = create_wf_task('efcs', resources, cdump=self.cdump, envar=efcsenvars, dependency=dependencies, @@ -1348,7 +1415,7 @@ def earc(self): earcenvars = self.envars.copy() earcenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) - groups = self._get_hybgroups(self._base['NMEM_ENKF'], self._configs['earc']['NMEM_EARCGRP'], start_index=0) + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['earc']['NMEM_EARCGRP'], start_index=0) cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py index 856cade2b9..99f1eaa83d 100644 --- a/workflow/rocoto/workflow_xml.py +++ b/workflow/rocoto/workflow_xml.py @@ -144,6 +144,9 @@ def _get_cycledefs_forecast_only(self): if sdate <= edate: strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + strings.append('') + strings.append('') + return '\n'.join(strings) @staticmethod diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 6c478db46a..3cd782a52c 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -8,7 +8,7 @@ import glob import shutil import warnings -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, SUPPRESS from hosts import Host @@ -58,14 +58,11 @@ def fill_COMROT_cycled(host, inputs): comrot = os.path.join(inputs.comrot, inputs.pslot) - do_ocean = do_ice = do_med = do_aerosols = False + do_ocean = do_ice = do_med = False if inputs.app in ['S2S', 'S2SW']: do_ocean = do_ice = do_med = True - if inputs.app in ['ATMA']: - do_aerosols = True - if inputs.icsdir is None: warnings.warn("User did not provide '--icsdir' to stage initial conditions") return @@ -73,14 +70,46 @@ def fill_COMROT_cycled(host, inputs): rdatestr = datetime_to_YMDH(inputs.idate - to_timedelta('T06H')) idatestr = datetime_to_YMDH(inputs.idate) - if inputs.start in ['warm']: # This is warm start experiment (only meaningful for atmos) - atmos_dir = med_dir = 'RESTART' - elif inputs.start in ['cold']: # This is a cold start experiment - atmos_dir = 'INPUT' - med_dir = '' # no mediator files for a "cold start" - chem_dir = '' # aerosols do not have a 'directory' + if os.path.isdir(os.path.join(inputs.icsdir, f'{inputs.cdump}.{rdatestr[:8]}', rdatestr[8:], 'model_data', 'atmos')): + flat_structure = False + else: + flat_structure = True + + # Destination always uses the new COM structure + # These should match the templates defined in config.com + if inputs.start in ['warm']: + dst_atm_dir = os.path.join('model_data', 'atmos', 'restart') + dst_med_dir = os.path.join('model_data', 'med', 'restart') + else: + dst_atm_dir = os.path.join('model_data', 'atmos', 'input') + dst_med_dir = '' # no mediator files for a "cold start" do_med = False - ocean_dir = ice_dir = 'RESTART' # ocean and ice have the same filenames for warm and cold + dst_ocn_rst_dir = os.path.join('model_data', 'ocean', 'restart') + dst_ocn_anl_dir = os.path.join('analysis', 'ocean') + dst_ice_dir = os.path.join('model_data', 'ice', 'restart') + dst_atm_anl_dir = os.path.join('analysis', 'atmos') + + if flat_structure: + # ICs are in the old flat COM structure + if inputs.start in ['warm']: # This is warm start experiment + src_atm_dir = os.path.join('atmos', 'RESTART') + src_med_dir = os.path.join('med', 'RESTART') + elif inputs.start in ['cold']: # This is a cold start experiment + src_atm_dir = os.path.join('atmos', 'INPUT') + src_med_dir = '' # no mediator files for a "cold start" + do_med = False + # ocean and ice have the same filenames for warm and cold + src_ocn_rst_dir = os.path.join('ocean', 'RESTART') + src_ocn_anl_dir = 'ocean' + src_ice_dir = os.path.join('ice', 'RESTART') + src_atm_anl_dir = 'atmos' + else: + src_atm_dir = dst_atm_dir + src_med_dir = dst_med_dir + src_ocn_rst_dir = dst_ocn_rst_dir + src_ocn_anl_dir = dst_ocn_anl_dir + src_ice_dir = dst_ice_dir + src_atm_anl_dir = dst_atm_anl_dir def link_files_from_src_to_dst(src_dir, dst_dir): files = os.listdir(src_dir) @@ -99,8 +128,8 @@ def link_files_from_src_to_dst(src_dir, dst_dir): for ii in range(1, inputs.nens + 1): memdir = f'mem{ii:03d}' # Link atmospheric files - dst_dir = os.path.join(comrot, enkfdir, memdir, 'atmos', atmos_dir) - src_dir = os.path.join(inputs.icsdir, enkfdir, memdir, 'atmos', atmos_dir) + dst_dir = os.path.join(comrot, enkfdir, memdir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, enkfdir, memdir, src_atm_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # ocean, ice, etc. TBD ... @@ -112,58 +141,48 @@ def link_files_from_src_to_dst(src_dir, dst_dir): detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' elif inputs.start in ['cold']: detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, 'atmos', atmos_dir) - src_dir = os.path.join(inputs.icsdir, detdir, 'atmos', atmos_dir) + + dst_dir = os.path.join(comrot, detdir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_atm_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # Link ocean files if do_ocean: detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, 'ocean', ocean_dir) - src_dir = os.path.join(inputs.icsdir, detdir, 'ocean', ocean_dir) + dst_dir = os.path.join(comrot, detdir, dst_ocn_rst_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_ocn_rst_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # First 1/2 cycle needs a MOM6 increment incdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' incfile = f'{inputs.cdump}.t{idatestr[8:]}z.ocninc.nc' - src_file = os.path.join(inputs.icsdir, incdir, 'ocean', incfile) - dst_file = os.path.join(comrot, incdir, 'ocean', incfile) - makedirs_if_missing(os.path.join(comrot, incdir, 'ocean')) + src_file = os.path.join(inputs.icsdir, incdir, src_ocn_anl_dir, incfile) + dst_file = os.path.join(comrot, incdir, dst_ocn_anl_dir, incfile) + makedirs_if_missing(os.path.join(comrot, incdir, dst_ocn_anl_dir)) os.symlink(src_file, dst_file) # Link ice files if do_ice: detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, 'ice', ice_dir) - src_dir = os.path.join(inputs.icsdir, detdir, 'ice', ice_dir) + dst_dir = os.path.join(comrot, detdir, dst_ice_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_ice_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # Link mediator files if do_med: detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, 'med', med_dir) - src_dir = os.path.join(inputs.icsdir, detdir, 'med', med_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link aerosol files - if do_aerosols: - if inputs.start in ['warm']: - detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - elif inputs.start in ['cold']: - detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, 'chem', chem_dir) - src_dir = os.path.join(inputs.icsdir, detdir, 'chem', chem_dir) + dst_dir = os.path.join(comrot, detdir, dst_med_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_med_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # Link bias correction and radiance diagnostics files detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' - src_dir = os.path.join(inputs.icsdir, detdir, 'atmos') - dst_dir = os.path.join(comrot, detdir, 'atmos') + src_dir = os.path.join(inputs.icsdir, detdir, src_atm_anl_dir) + dst_dir = os.path.join(comrot, detdir, dst_atm_anl_dir) makedirs_if_missing(dst_dir) for ftype in ['abias', 'abias_pc', 'abias_air', 'radstat']: fname = f'{inputs.cdump}.t{idatestr[8:]}z.{ftype}' @@ -252,10 +271,16 @@ def edit_baseconfig(host, inputs): tmpl_dict = dict(tmpl_dict, **extend_dict) extend_dict = dict() - if inputs.mode in ['cycled']: + if getattr(inputs, 'nens', 0) > 0: extend_dict = { "@CASEENS@": f'C{inputs.resens}', - "@NMEM_ENKF@": inputs.nens, + "@NMEM_ENS@": inputs.nens, + } + tmpl_dict = dict(tmpl_dict, **extend_dict) + + extend_dict = dict() + if inputs.mode in ['cycled']: + extend_dict = { "@DOHYBVAR@": "YES" if inputs.nens > 0 else "NO", } tmpl_dict = dict(tmpl_dict, **extend_dict) @@ -317,14 +342,16 @@ def input_args(): formatter_class=ArgumentDefaultsHelpFormatter) # Set up sub-parsers for various modes of experimentation - subparser = parser.add_subparsers(dest='mode') - cycled = subparser.add_parser( - 'cycled', help='arguments for cycled mode') - forecasts = subparser.add_parser( - 'forecast-only', help='arguments for forecast-only mode') + sysparser = parser.add_subparsers(dest='system') + gfs = sysparser.add_parser('gfs', help='arguments for GFS') + gefs = sysparser.add_parser('gefs', help='arguments for GEFS') + + modeparser = gfs.add_subparsers(dest='mode') + cycled = modeparser.add_parser('cycled', help='arguments for cycled mode') + forecasts = modeparser.add_parser('forecast-only', help='arguments for forecast-only mode') # Common arguments across all modes - for subp in [cycled, forecasts]: + for subp in [cycled, forecasts, gefs]: subp.add_argument('--pslot', help='parallel experiment name', type=str, required=False, default='test') subp.add_argument('--resdet', help='resolution of the deterministic model forecast', @@ -336,32 +363,51 @@ def input_args(): subp.add_argument('--idate', help='starting date of experiment, initial conditions must exist!', required=True, type=lambda dd: to_datetime(dd)) subp.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: to_datetime(dd)) - subp.add_argument('--configdir', help='full path to directory containing the config files', - type=str, required=False, default=os.path.join(_top, 'parm/config')) - subp.add_argument('--cdump', help='CDUMP to start the experiment', - type=str, required=False, default='gdas') - subp.add_argument('--gfs_cyc', help='GFS cycles to run', type=int, - choices=[0, 1, 2, 4], default=1, required=False) + + ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SA', 'S2SW'] + + # GFS-only arguments + for subp in [cycled, forecasts]: subp.add_argument('--start', help='restart mode: warm or cold', type=str, choices=['warm', 'cold'], required=False, default='cold') - + subp.add_argument('--cdump', help='CDUMP to start the experiment', + type=str, required=False, default='gdas') + # --configdir is hidden from help + subp.add_argument('--configdir', help=SUPPRESS, type=str, required=False, default=os.path.join(_top, 'parm/config/gfs')) subp.add_argument('--yaml', help='Defaults to substitute from', type=str, - required=False, default=os.path.join(_top, 'parm/config/yaml/defaults.yaml')) - - ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SW'] + required=False, default=os.path.join(_top, 'parm/config/gfs/yaml/defaults.yaml')) + + # ensemble-only arguments + for subp in [cycled, gefs]: + subp.add_argument('--resens', help='resolution of the ensemble model forecast', + type=int, required=False, default=192) + subp.add_argument('--nens', help='number of ensemble members', + type=int, required=False, default=20) + + # GFS/GEFS forecast-only additional arguments + for subp in [forecasts, gefs]: + subp.add_argument('--app', help='UFS application', type=str, + choices=ufs_apps + ['S2SWA'], required=False, default='ATM') + subp.add_argument('--gfs_cyc', help='Number of forecasts per day', type=int, + choices=[1, 2, 4], default=1, required=False) # cycled mode additional arguments cycled.add_argument('--icsdir', help='full path to initial condition directory', type=str, required=False, default=None) - cycled.add_argument('--resens', help='resolution of the ensemble model forecast', - type=int, required=False, default=192) - cycled.add_argument('--nens', help='number of ensemble members', - type=int, required=False, default=20) cycled.add_argument('--app', help='UFS application', type=str, choices=ufs_apps, required=False, default='ATM') - - # forecast only mode additional arguments - forecasts.add_argument('--app', help='UFS application', type=str, - choices=ufs_apps + ['S2SWA'], required=False, default='ATM') + cycled.add_argument('--gfs_cyc', help='cycles to run forecast', type=int, + choices=[0, 1, 2, 4], default=1, required=False) + + # GEFS-only arguments + # Create hidden mode argument since there is real option for GEFS + gefs.add_argument('--mode', help=SUPPRESS, type=str, required=False, default='forecast-only') + # Create hidden start argument since GEFS is always cold start + gefs.add_argument('--start', help=SUPPRESS, type=str, required=False, default='cold') + # Create hidden arguments for configdir and yaml + gefs.add_argument('--configdir', help=SUPPRESS, type=str, required=False, + default=os.path.join(_top, 'parm/config/gefs')) + gefs.add_argument('--yaml', help='Defaults to substitute from', type=str, required=False, + default=os.path.join(_top, 'parm/config/gefs/yaml/defaults.yaml')) args = parser.parse_args() @@ -394,11 +440,15 @@ def query_and_clean(dirname): def validate_user_request(host, inputs): - expt_res = f'C{inputs.resdet}' supp_res = host.info['SUPPORTED_RESOLUTIONS'] machine = host.machine - if expt_res not in supp_res: - raise NotImplementedError(f"Supported resolutions on {machine} are:\n{', '.join(supp_res)}") + for attr in ['resdet', 'ensres']: + try: + expt_res = f'C{getattr(inputs, attr)}' + except AttributeError: + continue + if expt_res not in supp_res: + raise NotImplementedError(f"Supported resolutions on {machine} are:\n{', '.join(supp_res)}") if __name__ == '__main__':