From cbdcf6a3db9f4edee652791e21115437e3f0e1ae Mon Sep 17 00:00:00 2001 From: George McCabe <23407799+georgemccabe@users.noreply.github.com> Date: Tue, 31 Aug 2021 12:40:16 -0600 Subject: [PATCH] Update Develop-ref after #1121 (#1126) Co-authored-by: George McCabe Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> Co-authored-by: johnhg Co-authored-by: George McCabe Co-authored-by: Keith Searight Co-authored-by: Hank Fisher Co-authored-by: MET Tools Test Account Co-authored-by: Julie.Prestopnik Co-authored-by: Lisa Goodrich Co-authored-by: jprestop Co-authored-by: bikegeek Co-authored-by: John Halley Gotway Co-authored-by: Minna Win Co-authored-by: Daniel Adriaansen Co-authored-by: Christina Kalb Co-authored-by: George McCabe Co-authored-by: George McCabe Co-authored-by: George McCabe Co-authored-by: Christina Kalb Co-authored-by: bikegeek <3753118+bikegeek@users.noreply.github.com> Co-authored-by: Julie Prestopnik Co-authored-by: j-opatz <59586397+j-opatz@users.noreply.github.com> Co-authored-by: Dan Adriaansen Co-authored-by: mrinalbiswas Co-authored-by: Keith Searight Co-authored-by: Molly Smith Co-authored-by: Hank Fisher Co-authored-by: Tatiana Burek Co-authored-by: Venita Hagerty <38571614+venitahagerty@users.noreply.github.com> Co-authored-by: lisagoodrich <33230218+lisagoodrich@users.noreply.github.com> Co-authored-by: George McCabe --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/ISSUE_TEMPLATE/enhancement_request.md | 2 +- .github/ISSUE_TEMPLATE/new_feature_request.md | 2 +- .github/ISSUE_TEMPLATE/new_use_case.md | 2 +- .github/ISSUE_TEMPLATE/sub-issue.md | 2 +- .github/ISSUE_TEMPLATE/task.md | 2 +- .github/jobs/get_use_cases_to_run.sh | 4 +- .github/jobs/set_job_controls.sh | 6 +- .github/parm/use_case_groups.json | 60 +- .github/workflows/documentation.yml | 2 + .github/workflows/testing.yml | 2 + docs/Contributors_Guide/add_use_case.rst | 26 +- .../continuous_integration.rst | 5 +- docs/Contributors_Guide/documentation.rst | 39 +- docs/Release_Guide/met_official.rst | 2 +- docs/Release_Guide/metcalcpy_official.rst | 1 + docs/Release_Guide/metdatadb_bugfix.rst | 3 +- docs/Release_Guide/metdatadb_development.rst | 3 +- docs/Release_Guide/metdatadb_official.rst | 4 +- docs/Release_Guide/metexpress_development.rst | 1 + docs/Release_Guide/metexpress_official.rst | 2 + docs/Release_Guide/metplotpy_official.rst | 1 + docs/Release_Guide/metplus_official.rst | 1 + docs/Release_Guide/metviewer_official.rst | 1 + .../common/update_dtc_website.rst | 11 +- .../release_steps/met/update_dtc_website.rst | 2 + .../metcalcpy/update_dtc_website.rst | 2 + .../metexpress/update_dtc_website.rst | 7 + .../metplotpy/update_dtc_website.rst | 2 + .../metplus/update_dtc_website.rst | 4 +- .../metviewer/update_dtc_website.rst | 2 + .../set_beta_deletion_reminder_official.rst | 8 + docs/Users_Guide/glossary.rst | 102 +- docs/Users_Guide/installation.rst | 6 +- docs/Users_Guide/quicksearch.rst | 2 + docs/Users_Guide/systemconfiguration.rst | 2 + docs/Users_Guide/wrappers.rst | 70 +- .../GFDLTracker/GFDLTracker_ETC.py | 106 ++ .../GFDLTracker/GFDLTracker_Genesis.py | 106 ++ .../GFDLTracker/GFDLTracker_TC.py | 103 ++ .../met_tool_wrapper/GFDLTracker/README.rst | 2 + .../compare_gridded/test_compare_gridded.py | 26 +- internal_tests/pytests/pcp_combine/test1.conf | 1 + .../pcp_combine/test_pcp_combine_wrapper.py | 611 +++--- .../point_stat/test_point_stat_wrapper.py | 6 +- .../pytests/tc_pairs/test_tc_pairs_wrapper.py | 13 +- internal_tests/use_cases/all_use_cases.txt | 2 + metplus/util/met_util.py | 36 +- metplus/wrappers/command_builder.py | 161 +- metplus/wrappers/gfdl_tracker_wrapper.py | 300 ++- metplus/wrappers/pcp_combine_wrapper.py | 1641 ++++++++--------- metplus/wrappers/point_stat_wrapper.py | 1 + metplus/wrappers/reformat_gridded_wrapper.py | 41 +- metplus/wrappers/tc_pairs_wrapper.py | 27 +- parm/met_config/TCPairsConfig_wrapped | 14 +- .../GFDLTracker/GFDLTracker_ETC.conf | 131 ++ .../GFDLTracker/GFDLTracker_Genesis.conf | 132 ++ .../GFDLTracker/GFDLTracker_TC.conf | 48 +- .../GFDLTracker/sgv_template.txt | 6 + .../met_tool_wrapper/GFDLTracker/template.nml | 4 + .../PCPCombine/PCPCombine_add.conf | 94 +- .../PCPCombine/PCPCombine_bucket.conf | 61 +- .../PCPCombine/PCPCombine_derive.conf | 105 +- .../PCPCombine/PCPCombine_loop_custom.conf | 84 +- .../PCPCombine_python_embedding.conf | 48 +- .../PCPCombine/PCPCombine_subtract.conf | 80 +- .../PCPCombine/PCPCombine_sum.conf | 86 +- .../PCPCombine/PCPCombine_user_defined.conf | 81 +- .../met_tool_wrapper/PointStat/PointStat.conf | 1 + .../TCPairs/TCPairs_extra_tropical.conf | 5 + .../TCPairs/TCPairs_tropical.conf | 5 + ...tat_fcstHRRR_fcstOnly_SurrogateSevere.conf | 117 +- .../GridStat_fcstGFS_obsCCPA_GRIB.conf | 88 +- ...GridStat_fcstHREFmean_obsStgIV_Gempak.conf | 134 +- ...GridStat_fcstHREFmean_obsStgIV_NetCDF.conf | 149 +- .../GridStat_fcstHRRR-TLE_obsStgIV_GRIB.conf | 121 +- 76 files changed, 2675 insertions(+), 2497 deletions(-) create mode 100644 docs/Release_Guide/release_steps/metexpress/update_dtc_website.rst create mode 100644 docs/Release_Guide/release_steps/set_beta_deletion_reminder_official.rst create mode 100644 docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.py create mode 100644 docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.py create mode 100644 docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.py create mode 100644 docs/use_cases/met_tool_wrapper/GFDLTracker/README.rst create mode 100644 parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf create mode 100644 parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf create mode 100644 parm/use_cases/met_tool_wrapper/GFDLTracker/sgv_template.txt diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 2bbe76335b..6c4450e814 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,7 +2,7 @@ name: Bug report about: Fix something that's not working title: '' -labels: 'type: bug' +labels: 'alert: NEED ACCOUNT KEY, alert: NEED MORE DEFINITION, alert: NEED PROJECT ASSIGNMENT, type: bug' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/enhancement_request.md b/.github/ISSUE_TEMPLATE/enhancement_request.md index 0c7f6c1331..3133dec892 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_request.md +++ b/.github/ISSUE_TEMPLATE/enhancement_request.md @@ -2,7 +2,7 @@ name: Enhancement request about: Improve something that it's currently doing title: '' -labels: 'type: enhancement' +labels: 'alert: NEED ACCOUNT KEY, alert: NEED MORE DEFINITION, alert: NEED PROJECT ASSIGNMENT, type: enhancement' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/new_feature_request.md b/.github/ISSUE_TEMPLATE/new_feature_request.md index c76da8ce50..b37efe8aef 100644 --- a/.github/ISSUE_TEMPLATE/new_feature_request.md +++ b/.github/ISSUE_TEMPLATE/new_feature_request.md @@ -2,7 +2,7 @@ name: New feature request about: Make it do something new title: '' -labels: 'type: new feature' +labels: 'alert: NEED ACCOUNT KEY, alert: NEED MORE DEFINITION, alert: NEED PROJECT ASSIGNMENT, type: new feature' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/new_use_case.md b/.github/ISSUE_TEMPLATE/new_use_case.md index 4a58e0a4f3..ad54e6baf7 100644 --- a/.github/ISSUE_TEMPLATE/new_use_case.md +++ b/.github/ISSUE_TEMPLATE/new_use_case.md @@ -2,7 +2,7 @@ name: New use case about: Add a new use case title: '' -labels: 'type: new use case' +labels: 'alert: NEED ACCOUNT KEY, alert: NEED MORE DEFINITION, alert: NEED PROJECT ASSIGNMENT, type: new use case' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/sub-issue.md b/.github/ISSUE_TEMPLATE/sub-issue.md index 3552fa1934..77bf2b2844 100644 --- a/.github/ISSUE_TEMPLATE/sub-issue.md +++ b/.github/ISSUE_TEMPLATE/sub-issue.md @@ -2,7 +2,7 @@ name: Sub-Issue about: Break an issue down into smaller parts title: '' -labels: 'type: sub-issue' +labels: 'alert: NEED ACCOUNT KEY, alert: NEED MORE DEFINITION, alert: NEED PROJECT ASSIGNMENT, type: sub-issue' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/task.md b/.github/ISSUE_TEMPLATE/task.md index 93e889017d..ce3a6fb09a 100644 --- a/.github/ISSUE_TEMPLATE/task.md +++ b/.github/ISSUE_TEMPLATE/task.md @@ -2,7 +2,7 @@ name: Task about: Describe something that needs to be done title: '' -labels: 'type: task' +labels: 'alert: NEED ACCOUNT KEY, alert: NEED MORE DEFINITION, alert: NEED PROJECT ASSIGNMENT, type: task' assignees: '' --- diff --git a/.github/jobs/get_use_cases_to_run.sh b/.github/jobs/get_use_cases_to_run.sh index 5e4cd9035b..bfd2991038 100755 --- a/.github/jobs/get_use_cases_to_run.sh +++ b/.github/jobs/get_use_cases_to_run.sh @@ -18,8 +18,8 @@ if [ "$run_use_cases" == "true" ]; then # if only running new use cases, add to filter criteria if [ "$run_all_use_cases" == "false" ]; then - echo Only run new use cases - matrix=$(jq '[.[] | select(.new == true) | (.category + ":" + .index_list)]' $use_case_groups_filepath) + echo "Only run use cases that are marked to run every time (run = true)" + matrix=$(jq '[.[] | select(.run == true) | (.category + ":" + .index_list)]' $use_case_groups_filepath) else echo Add all available use cases matrix=$(jq '[.[] | (.category + ":" + .index_list)]' $use_case_groups_filepath) diff --git a/.github/jobs/set_job_controls.sh b/.github/jobs/set_job_controls.sh index c8b241413b..0f175711d1 100755 --- a/.github/jobs/set_job_controls.sh +++ b/.github/jobs/set_job_controls.sh @@ -73,11 +73,15 @@ else run_all_use_cases=false fi - if grep -q "ci-run-diff" <<< "$commit_msg"; then + if grep -q "ci-run-all-diff" <<< "$commit_msg"; then run_all_use_cases=true run_diff=true fi + if grep -q "ci-run-diff" <<< "$commit_msg"; then + run_diff=true + fi + if grep -q "ci-run-all-cases" <<< "$commit_msg"; then run_use_cases=true run_all_use_cases=true diff --git a/.github/parm/use_case_groups.json b/.github/parm/use_case_groups.json index ef1ae2ffec..908acd10bf 100644 --- a/.github/parm/use_case_groups.json +++ b/.github/parm/use_case_groups.json @@ -1,147 +1,147 @@ [ { "category": "met_tool_wrapper", - "index_list": "0-55", - "new": false + "index_list": "0-57", + "run": false }, { "category": "air_quality_and_comp", "index_list": "0", - "new": false + "run": false }, { "category": "climate", "index_list": "0-1", - "new": false + "run": false }, { "category": "convection_allowing_models", "index_list": "0", - "new": false + "run": false }, { "category": "convection_allowing_models", "index_list": "1", - "new": false + "run": false }, { "category": "convection_allowing_models", "index_list": "2-6", - "new": false + "run": false }, { "category": "convection_allowing_models", "index_list": "7", - "new": false + "run": false }, { "category": "convection_allowing_models", "index_list": "8", - "new": false + "run": false }, { "category": "cryosphere", "index_list": "0", - "new": false + "run": false }, { "category": "data_assimilation", "index_list": "0", - "new": false + "run": false }, { "category": "marine_and_coastal", "index_list": "0-1", - "new": false + "run": false }, { "category": "medium_range", "index_list": "0", - "new": false + "run": false }, { "category": "medium_range", "index_list": "1-2", - "new": false + "run": false }, { "category": "medium_range", "index_list": "3-5", - "new": false + "run": false }, { "category": "medium_range", "index_list": "6", - "new": false + "run": false }, { "category": "medium_range", "index_list": "7", - "new": false + "run": false }, { "category": "precipitation", "index_list": "0", - "new": false + "run": false }, { "category": "precipitation", "index_list": "1", - "new": false + "run": false }, { "category": "precipitation", "index_list": "2", - "new": false + "run": false }, { "category": "precipitation", "index_list": "3-8", - "new": false + "run": false }, { "category": "s2s", "index_list": "0", - "new": false + "run": false }, { "category": "s2s", "index_list": "1-3", - "new": false + "run": false }, { "category": "s2s", "index_list": "4", - "new": false + "run": false }, { "category": "s2s", "index_list": "5", - "new": false + "run": false }, { "category": "s2s", "index_list": "6-7", - "new": false + "run": false }, { "category": "s2s", "index_list": "8-9", - "new": false + "run": false }, { "category": "space_weather", "index_list": "0-1", - "new": false + "run": false }, { "category": "tc_and_extra_tc", "index_list": "0-2", - "new": false + "run": false }, { "category": "tc_and_extra_tc", "index_list": "3", - "new": false + "run": false } ] diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 2f43da4edc..f96ca5eacd 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -10,6 +10,8 @@ on: - docs/** pull_request: types: [opened, reopened, synchronize] + paths: + - docs/** jobs: documentation: diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 9a2605ce36..23b87420a8 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -11,6 +11,8 @@ on: - docs/** pull_request: types: [opened, reopened, synchronize] + paths-ignore: + - docs/** jobs: job_control: diff --git a/docs/Contributors_Guide/add_use_case.rst b/docs/Contributors_Guide/add_use_case.rst index e13812a034..309d1a0a37 100644 --- a/docs/Contributors_Guide/add_use_case.rst +++ b/docs/Contributors_Guide/add_use_case.rst @@ -778,8 +778,6 @@ Embedding. Creating New Python Environments """""""""""""""""""""""""""""""" -**COMING SOON!** - In METplus v4.0.0 and earlier, a list of Python packages were added to use cases that required additional packages. These packages were either installed with pip3 or using a script. This approach was very time consuming as some @@ -791,6 +789,8 @@ environments, refer to the comments in the scripts found in developer, so please contact MET Help if none of these environments contain the package requirements needed to run a new use case. +**MORE INFO COMING SOON!** + .. _add_new_category_to_test_runs: Add new category to test runs @@ -802,15 +802,17 @@ In METplus version 4.0.0 and earlier, this list was found in the .github/workflows/testing.yml file. Add a new entry to the list that includes the category of the new use case, the list of indices that correspond to the index number described in the -:ref:`add_use_case_to_test_suite` section, -and set the "new" variable to true. +:ref:`add_use_case_to_test_suite` section. +Set the "run" variable to true so that the new use case group will run in +the automated test suite whenever a new change is pushed to GitHub. This +allows users to test that the new use case runs successfully. Example:: { "category": "climate", "index_list": "2", - "new": true + "run": true } .. note:: @@ -818,7 +820,7 @@ Example:: before the new item in the list. This example adds a new use case group that contains the climate use case -with index 2 and is marked as a "new" use case. +with index 2 and is marked to "run" for every push. New use cases are added as a separate item to make reviewing the test results easier. A new use case will produce new output data that is not found in the "truth" data set which is compared the output of the use case runs to check @@ -844,12 +846,12 @@ The argument supports a comma-separated list of numbers. Example:: { "category": "data_assimilation", "index_list": "0,2,4", - "new": false + "run": false }, { "category": "data_assimilation", "index_list": "1,3", - "new": false + "run": false }, The above example will run a job with data_assimilation use cases 0, 2, and @@ -860,12 +862,12 @@ It also supports a range of numbers separated with a dash. Example:: { "category": "data_assimilation", "index_list": "0-3", - "new": false + "run": false }, { "category": "data_assimilation", "index_list": "4-5", - "new": false + "run": false }, The above example will run a job with data_assimilation 0, 1, 2, and 3, then @@ -877,12 +879,12 @@ to run. Example:: { "category": "data_assimilation", "index_list": "0-2,4", - "new": false + "run": false }, { "category": "data_assimilation", "index_list": "3", - "new": false + "run": false }, The above example will run data_assimilation 0, 1, 2, and 4 in one diff --git a/docs/Contributors_Guide/continuous_integration.rst b/docs/Contributors_Guide/continuous_integration.rst index 6c6bed4d5b..f10f62abeb 100644 --- a/docs/Contributors_Guide/continuous_integration.rst +++ b/docs/Contributors_Guide/continuous_integration.rst @@ -120,7 +120,10 @@ Here is a list of the currently supported keywords and what they control: * **ci-skip-all**: Don't run anything - skip all automation jobs * **ci-skip-use-cases**: Don't run any use cases * **ci-run-all-cases**: Run all use cases -* **ci-run-diff**: Obtain truth data and run diffing logic +* **ci-run-diff**: Obtain truth data and run diffing logic for + use cases that are marked to run +* **ci-run-all-diff**: Obtain truth data and run diffing logic for + all use cases * **ci-only-docs**: Only run build documentation job - skip the rest Force MET Version Used for Tests diff --git a/docs/Contributors_Guide/documentation.rst b/docs/Contributors_Guide/documentation.rst index 06a6832cbb..98269f3f33 100644 --- a/docs/Contributors_Guide/documentation.rst +++ b/docs/Contributors_Guide/documentation.rst @@ -120,12 +120,47 @@ Contributor's Guide: Release Guide: ~~~~~~~~~~~~~~ -Coming soon! +* To add/modify the instructions for creating software releases for + any METplus component, including official, bugfix, and development + releases. + +* Each METplus component has a top level file (e.g. metplus.rst) + which simply contains references to files for each of the + releases. For example, metplus.rst contains references to: + + * metplus_official + * metplus_bugfix + * metplus_development + +* Each release file (e.g. metplus_official.rst, metplus_bugfix.rst, + metplus_development.rst) contains, at a minimum, a replacement + value for the projectRepo variable and include + statements for each release step. These individual steps + (e.g. open_release_issue.rst, clone_project_repository.rst, etc.) + may be common to multiple METplus components. These common steps + are located in the *release_steps* directory. However, a METplus + component may have different instructions from other componenets + (e.g. For METplus wrappers, update_version.rst, + create_release_extra.rst, etc.). In this case, the instructions + that are specific to that component are located in a subdirectory + of *release_steps*. For example, files that are specific to + METplus wrappers are located in *release_steps/metplus*, files + that are specific to METcalcpy are located in + *release_steps/metcalcpy*. + +* The file for each individual step (e.g. open_release_issue.rst, + update_version.rst, etc.) contains the instructions for + completing that step for the release. + Verification Datasets Guide: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Coming soon! +* To add/modify any relevant datasets in attempt to create a + centralized catalogue of verification datasets to provide the model + verification community with relevant "truth" datasets. See the + `Verification Datasets Guide Overview `_ + for more information. .. _read-the-docs: diff --git a/docs/Release_Guide/met_official.rst b/docs/Release_Guide/met_official.rst index 13980c49dc..cb277948c9 100644 --- a/docs/Release_Guide/met_official.rst +++ b/docs/Release_Guide/met_official.rst @@ -19,4 +19,4 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/met/update_dtc_website.rst .. include:: release_steps/finalize_release_on_github_official.rst .. include:: release_steps/update_docs_official.rst - +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/metcalcpy_official.rst b/docs/Release_Guide/metcalcpy_official.rst index bfd82c7894..670d0293e2 100644 --- a/docs/Release_Guide/metcalcpy_official.rst +++ b/docs/Release_Guide/metcalcpy_official.rst @@ -21,3 +21,4 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/finalize_release_on_github_official.rst .. include:: release_steps/metcalcpy/update_version_on_develop.rst .. include:: release_steps/update_docs_official.rst +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/metdatadb_bugfix.rst b/docs/Release_Guide/metdatadb_bugfix.rst index aa472354ed..c9d186f260 100644 --- a/docs/Release_Guide/metdatadb_bugfix.rst +++ b/docs/Release_Guide/metdatadb_bugfix.rst @@ -4,7 +4,8 @@ METdatadb Bugfix Release .. |projectRepo| replace:: METdatadb .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. - +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. + Create a new vX.Y.Z bugfix release from the main_vX.Y branch. .. include:: release_steps/open_release_issue.rst diff --git a/docs/Release_Guide/metdatadb_development.rst b/docs/Release_Guide/metdatadb_development.rst index b0fa32b85d..ed1ce403dc 100644 --- a/docs/Release_Guide/metdatadb_development.rst +++ b/docs/Release_Guide/metdatadb_development.rst @@ -4,7 +4,8 @@ METdatadb Development Release .. |projectRepo| replace:: METdatadb .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. - +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. + Create a new vX.Y.Z-betaN or vX.Y.Z-rcN development release from the develop branch while working toward an official vX.Y.Z release. .. include:: release_steps/open_release_issue.rst diff --git a/docs/Release_Guide/metdatadb_official.rst b/docs/Release_Guide/metdatadb_official.rst index 86aa2e8c27..38ead65779 100644 --- a/docs/Release_Guide/metdatadb_official.rst +++ b/docs/Release_Guide/metdatadb_official.rst @@ -4,7 +4,8 @@ METdatadb Official Release .. |projectRepo| replace:: METdatadb .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. - +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. + Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/open_release_issue.rst @@ -23,3 +24,4 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/finalize_release_on_github_official.rst .. include:: release_steps/metdatadb/update_version_on_develop.rst .. include:: release_steps/update_docs_official.rst +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/metexpress_development.rst b/docs/Release_Guide/metexpress_development.rst index 4d87e1fa09..84a3551144 100644 --- a/docs/Release_Guide/metexpress_development.rst +++ b/docs/Release_Guide/metexpress_development.rst @@ -9,4 +9,5 @@ Create a new vX.Y.Z-betaN or vX.Y.Z-rcN development release from the develop bra .. include:: release_steps/metexpress/checkout_development_branch.rst .. include:: release_steps/metexpress/create_development_feature_branch.rst .. include:: release_steps/metexpress/merge_feature_branch.rst +.. include:: release_steps/metexpress/update_dtc_website.rst .. include:: release_steps/metexpress/finalize_release_on_github_development.rst diff --git a/docs/Release_Guide/metexpress_official.rst b/docs/Release_Guide/metexpress_official.rst index 940c36b532..ace0cb0bd0 100644 --- a/docs/Release_Guide/metexpress_official.rst +++ b/docs/Release_Guide/metexpress_official.rst @@ -15,4 +15,6 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/metexpress/deploy_AWS_apps.rst .. include:: release_steps/metexpress/pull_changes_and_merge_to_dev.rst .. include:: release_steps/update_docs_official.rst +.. include:: release_steps/metexpress/update_dtc_website.rst .. include:: release_steps/metexpress/announce_release.rst +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/metplotpy_official.rst b/docs/Release_Guide/metplotpy_official.rst index ddd117ead0..ad9c264693 100644 --- a/docs/Release_Guide/metplotpy_official.rst +++ b/docs/Release_Guide/metplotpy_official.rst @@ -21,3 +21,4 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/finalize_release_on_github_official.rst .. include:: release_steps/metplotpy/update_version_official.rst .. include:: release_steps/update_docs_official.rst +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/metplus_official.rst b/docs/Release_Guide/metplus_official.rst index bfdc4a4d53..5d9f42b85f 100644 --- a/docs/Release_Guide/metplus_official.rst +++ b/docs/Release_Guide/metplus_official.rst @@ -23,3 +23,4 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/metplus/update_version_on_develop.rst .. include:: release_steps/update_docs_official.rst .. include:: release_steps/metplus/update_web_server_data.rst +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/metviewer_official.rst b/docs/Release_Guide/metviewer_official.rst index 3666dee71b..c2a5268e47 100644 --- a/docs/Release_Guide/metviewer_official.rst +++ b/docs/Release_Guide/metviewer_official.rst @@ -21,3 +21,4 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/finalize_release_on_github_official.rst .. include:: release_steps/metviewer/update_version_on_develop.rst .. include:: release_steps/update_docs_official.rst +.. include:: release_steps/set_beta_deletion_reminder_official.rst diff --git a/docs/Release_Guide/release_steps/common/update_dtc_website.rst b/docs/Release_Guide/release_steps/common/update_dtc_website.rst index 79584b50ad..e94d145c72 100644 --- a/docs/Release_Guide/release_steps/common/update_dtc_website.rst +++ b/docs/Release_Guide/release_steps/common/update_dtc_website.rst @@ -12,7 +12,7 @@ Update DTC Website * For *Full Title of Release* type "|projectRepo| Version X.Y.Z". * For *Related Community Code* select both the METplus and the |projectName| - options (use shift to select). + options (For Macs, hold the Command key to select both). * For *Version Label* type "|projectRepo| X.Y.Z betaN". @@ -27,7 +27,12 @@ Update DTC Website * Add Link: |addTarfileStep| * Add Link: Link text should be "User's Guide" and the URL should be the top - level directory of the User's Guide hosted on the web. + level directory of the User's Guide hosted on the web. Beta releases can + use "develop" in the URL, but for official releases, please ensure the + link uses the branch name (e.g. main_v4.0) as opposed to the tag name + (e.g. v4.0.0). For example, use + "https://metplus.readthedocs.io/en/main_v4.0/Users_Guide/" and NOT + "https://metplus.readthedocs.io/en/v4.0.0/Users_Guide/" * Add Link: Link text should be "Existing Builds and Docker" and the URL should be the latest Existing Builds page, i.e. @@ -41,3 +46,5 @@ Update DTC Website * Click on "Create Release". * Click on "Save". + + * |otherWebsiteUpdates| diff --git a/docs/Release_Guide/release_steps/met/update_dtc_website.rst b/docs/Release_Guide/release_steps/met/update_dtc_website.rst index fdf15ef8de..064eecf01a 100644 --- a/docs/Release_Guide/release_steps/met/update_dtc_website.rst +++ b/docs/Release_Guide/release_steps/met/update_dtc_website.rst @@ -3,3 +3,5 @@ .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the file name of the tar file and the URL should be the .tar.gz file created in the "Attach Release Tarfile" step. + +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. For example, the flowchart at https://dtcenter.org/community-code/model-evaluation-tools-met/system-architecture. diff --git a/docs/Release_Guide/release_steps/metcalcpy/update_dtc_website.rst b/docs/Release_Guide/release_steps/metcalcpy/update_dtc_website.rst index 464371828a..ae5455e477 100644 --- a/docs/Release_Guide/release_steps/metcalcpy/update_dtc_website.rst +++ b/docs/Release_Guide/release_steps/metcalcpy/update_dtc_website.rst @@ -3,3 +3,5 @@ .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. + +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. diff --git a/docs/Release_Guide/release_steps/metexpress/update_dtc_website.rst b/docs/Release_Guide/release_steps/metexpress/update_dtc_website.rst new file mode 100644 index 0000000000..8a7fdf0b60 --- /dev/null +++ b/docs/Release_Guide/release_steps/metexpress/update_dtc_website.rst @@ -0,0 +1,7 @@ +.. include:: release_steps/common/update_dtc_website.rst + +.. |projectName| replace:: METexpress + +.. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. + +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. For example, adding the User's Guide to https://dtcenter.org/community-code/metexpress/documentation diff --git a/docs/Release_Guide/release_steps/metplotpy/update_dtc_website.rst b/docs/Release_Guide/release_steps/metplotpy/update_dtc_website.rst index 464371828a..ae5455e477 100644 --- a/docs/Release_Guide/release_steps/metplotpy/update_dtc_website.rst +++ b/docs/Release_Guide/release_steps/metplotpy/update_dtc_website.rst @@ -3,3 +3,5 @@ .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. + +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. diff --git a/docs/Release_Guide/release_steps/metplus/update_dtc_website.rst b/docs/Release_Guide/release_steps/metplus/update_dtc_website.rst index 8b5fb6554a..d0773a851c 100644 --- a/docs/Release_Guide/release_steps/metplus/update_dtc_website.rst +++ b/docs/Release_Guide/release_steps/metplus/update_dtc_website.rst @@ -2,4 +2,6 @@ .. |projectName| replace:: METplus Wrappers -.. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. \ No newline at end of file +.. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. + +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. For example, adding the User's Guide to https://dtcenter.org/community-code/metplus/documentation diff --git a/docs/Release_Guide/release_steps/metviewer/update_dtc_website.rst b/docs/Release_Guide/release_steps/metviewer/update_dtc_website.rst index 464371828a..ae5455e477 100644 --- a/docs/Release_Guide/release_steps/metviewer/update_dtc_website.rst +++ b/docs/Release_Guide/release_steps/metviewer/update_dtc_website.rst @@ -3,3 +3,5 @@ .. |projectName| replace:: |projectRepo| .. |addTarfileStep| replace:: Link text should be the name of the release and the URL should be the release page that was just created under the GitHub Releases tab. + +.. |otherWebsiteUpdates| replace:: Make any other necessary website updates. diff --git a/docs/Release_Guide/release_steps/set_beta_deletion_reminder_official.rst b/docs/Release_Guide/release_steps/set_beta_deletion_reminder_official.rst new file mode 100644 index 0000000000..5665f7e1ba --- /dev/null +++ b/docs/Release_Guide/release_steps/set_beta_deletion_reminder_official.rst @@ -0,0 +1,8 @@ +Set up Reminder to Delete Beta Tags +----------------------------------- + +Help keep the GitHub repositories and DockerHub clean by removing beta tags. +Do not delete the beta tags for this release right away. Please set a +calendar reminder or schedule an email to be sent two weeks from the release +date as a reminder to delete the beta tags in both GitHub and DockerHub +(if applicable). diff --git a/docs/Users_Guide/glossary.rst b/docs/Users_Guide/glossary.rst index e2804652b7..c2a874e28f 100644 --- a/docs/Users_Guide/glossary.rst +++ b/docs/Users_Guide/glossary.rst @@ -1247,7 +1247,7 @@ METplus Configuration Glossary .. warning:: **DEPRECATED:** Please use :term:`FCST_POINT_STAT_INPUT_TEMPLATE` instead. FCST_IS_DAILY_FILE - .. warning:: **DEPRECATED:** Please use :term:`FCST_PCP_COMBINE_IS_DAILY_FILE` instead. + .. warning:: **DEPRECATED:** FCST_IS_PROB Specify whether the forecast data are probabilistic or not. Acceptable values: true/false @@ -1361,12 +1361,31 @@ METplus Configuration Glossary .. warning:: **DEPRECATED:** Please use :term:`FCST_PCP_COMBINE_INPUT_NAMES` instead. FCST_PCP_COMBINE_DATA_INTERVAL - Specify the accumulation interval of the forecast dataset used by the MET pcp_combine tool when processing daily input files. A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_DATA_INTERVAL`. + .. warning:: **DEPRECATED:** + + FCST_PCP_COMBINE_DERIVE_LOOKBACK + .. warning:: **DEPRECATED:** Please use :term:`FCST_PCP_COMBINE_LOOKBACK` instead. + + FCST_PCP_COMBINE_LOOKBACK + Specify how far to look back in time to find files for building + commands to run the pcp_combine tool. + If processing precipitation accumulation data, this is equivalent to the + desired output accumulation to compute. + Units are assumed to be hours unless a time identifier such as + Y, m, d, H, M, S is specified at the end of the value, i.e. 30M or 1m. + If unset, :term:`FCST_PCP_COMBINE_OUTPUT_ACCUM` will be used. + If that is unset, then :term:`FCST_PCP_COMBINE_DERIVE_LOOKBACK` will be + used. + If none of the variables are set or set to 0, data will be obtained by + using the input template with the current runtime instead of looking + backwards in time. + A corresponding variable exists for observation data called + :term:`OBS_PCP_COMBINE_LOOKBACK`. | *Used by:* PCPCombine - FCST_PCP_COMBINE_DERIVE_LOOKBACK - Specify how far to look back in time in hours to find files for running the MET pcp_combine tool in derive mode. If set to 0 or unset, data will be obtained by using the input template with the current runtime instead of looking backwards in time. A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_DERIVE_LOOKBACK`. + OBS_PCP_COMBINE_LOOKBACK + See :term:`FCST_PCP_COMBINE_LOOKBACK`. | *Used by:* PCPCombine @@ -1389,9 +1408,7 @@ METplus Configuration Glossary | *Used by:* PCPCombine FCST_PCP_COMBINE_IS_DAILY_FILE - Specify whether the forecast file is a daily file or not. A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_IS_DAILY_FILE`.Acceptable values: true/false - - | *Used by:* PCPCombine + .. warning:: **DEPRECATED:** FCST_PCP_COMBINE_METHOD Specify the method to be used with the MET pcp_combine tool processing forecast data.Valid options are ADD, SUM, SUBTRACT, DERIVE, and USER_DEFINED. A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_METHOD`. @@ -1438,9 +1455,7 @@ METplus Configuration Glossary | *Used by:* PCPCombine FCST_PCP_COMBINE_TIMES_PER_FILE - Specify the number of accumulation intervals of the forecast dataset used by the MET pcp_combine tool when processing daily input files. A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_TIMES_PER_FILE`. - - | *Used by:* PCPCombine + .. warning:: **DEPRECATED:** FCST_POINT_STAT_FILE_WINDOW_BEGIN See :term:`OBS_POINT_STAT_FILE_WINDOW_BEGIN` @@ -2624,10 +2639,10 @@ METplus Configuration Glossary .. warning:: **DEPRECATED:** Please use :term:`PB2NC_OBS_BUFR_VAR_LIST` instead. OBS_DATA_INTERVAL - .. warning:: **DEPRECATED:** Use :term:`OBS_PCP_COMBINE_DATA_INTERVAL` instead. + .. warning:: **DEPRECATED:** FCST_DATA_INTERVAL - .. warning:: **DEPRECATED:** Use :term:`FCST_PCP_COMBINE_DATA_INTERVAL` instead. + .. warning:: **DEPRECATED:** FCST_ENSEMBLE_STAT_INPUT_DATATYPE Specify the data type of the input directory for forecast files used with the MET ensemble_stat tool. Currently valid options are NETCDF, GRIB, and GEMPAK. If set to GEMPAK, data will automatically be converted to NetCDF via GempakToCF. Similar variables exists for observation grid and point data called :term:`OBS_ENSEMBLE_STAT_INPUT_GRID_DATATYPE` and :term:`OBS_ENSEMBLE_STAT_INPUT_POINT_DATATYPE`. @@ -2759,7 +2774,7 @@ METplus Configuration Glossary .. warning:: **DEPRECATED:** Please use :term:`OBS_POINT_STAT_INPUT_TEMPLATE` instead. OBS_IS_DAILY_FILE - .. warning:: **DEPRECATED:** Please use :term:`OBS_PCP_COMBINE_IS_DAILY_FILE` instead. + .. warning:: **DEPRECATED:** OBS_IS_PROB Used when setting OBS_* variables to process forecast data for comparisons with mtd. Specify whether the observation data are probabilistic or not. See :term:`FCST_IS_PROB` .Acceptable values: true/false @@ -2861,10 +2876,10 @@ METplus Configuration Glossary .. warning:: **DEPRECATED:** Please use :term:`OBS_PCP_COMBINE_INPUT_DATATYPE` instead. OBS_TIMES_PER_FILE - .. warning:: **DEPRECATED:** Please use :term:`OBS_PCP_COMBINE_TIMES_PER_FILE` instead. + .. warning:: **DEPRECATED:** FCST_TIMES_PER_FILE - .. warning:: **DEPRECATED:** Please use :term:`FCST_PCP_COMBINE_TIMES_PER_FILE` instead. + .. warning:: **DEPRECATED:** OBS_PCP_COMBINE__FIELD_NAME See :term:`FCST_PCP_COMBINE__FIELD_NAME`. @@ -2872,9 +2887,7 @@ METplus Configuration Glossary | *Used by:* PCPCombine OBS_PCP_COMBINE_DATA_INTERVAL - See :term:`FCST_PCP_COMBINE_DATA_INTERVAL`. - - | *Used by:* PCPCombine + .. warning:: **DEPRECATED:** OBS_PCP_COMBINE_DERIVE_LOOKBACK See :term:`FCST_PCP_COMBINE_DERIVE_LOOKBACK`. @@ -2902,9 +2915,7 @@ METplus Configuration Glossary | *Used by:* PCPCombine OBS_PCP_COMBINE_IS_DAILY_FILE - See :term:`FCST_PCP_COMBINE_IS_DAILY_FILE`. Acceptable values: true/false - - | *Used by:* PCPCombine + .. warning:: **DEPRECATED:** OBS_PCP_COMBINE_METHOD See :term:`FCST_PCP_COMBINE_METHOD`. @@ -2942,9 +2953,7 @@ METplus Configuration Glossary | *Used by:* PCPCombine OBS_PCP_COMBINE_TIMES_PER_FILE - See :term:`FCST_PCP_COMBINE_TIMES_PER_FILE`. - - | *Used by:* PCPCombine + .. warning:: **DEPRECATED:** OBS_POINT_STAT_FILE_WINDOW_BEGIN Used to control the lower bound of the window around the valid time to determine if a file should be used for processing by PointStat. See :ref:`Directory_and_Filename_Template_Info` subsection called 'Using Windows to Find Valid Files.' Units are seconds. If :term:`OBS_POINT_STAT_FILE_WINDOW_BEGIN` is not set in the config file, the value of :term:`OBS_FILE_WINDOW_BEGIN` will be used instead. If both file window begin and window end values are set to 0, then METplus will require an input file with an exact time match to process. @@ -4357,27 +4366,26 @@ METplus Configuration Glossary | *Used by:* PCPCombine FCST_PCP_COMBINE_OUTPUT_ACCUM - Specify desired accumulation to be built from the forecast data. Units are assumed to be hours unless a time identifier such as Y, m, d, H, M, S is specifed at the end of the value, i.e. 30M or 1m. If this variable is not set, then FCST_VAR_LEVELS is used. + Specify desired accumulation to be built from the forecast data. + Synonym for :term:`FCST_PCP_COMBINE_LOOKBACK`. A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_OUTPUT_ACCUM`. - Examples: - - 15H - - This will attempt to build a 15 hour accumulation. - | *Used by:* PCPCombine FCST_PCP_COMBINE_OUTPUT_NAME - Specify the output field name from processing forecast data. If this variable is not set, then :term:`FCST_VAR_NAME` is used. + Specify the output field name from processing forecast data. + If this variable is not set, then :term:`FCST_VAR_NAME` is used. - A corresponding variable exists for observation data called :term:`OBS_PCP_COMBINE_OUTPUT_NAME`. + A corresponding variable exists for observation data called + :term:`OBS_PCP_COMBINE_OUTPUT_NAME`. Example: APCP + | *Used by:* PCPCombine + OBS_PCP_COMBINE_OUTPUT_ACCUM - See :term:`FCST_PCP_COMBINE_OUTPUT_NAME`. + See :term:`FCST_PCP_COMBINE_LOOKBACK`. | *Used by:* PCPCombine @@ -6753,6 +6761,11 @@ METplus Configuration Glossary | *Used by:* PointStat + POINT_STAT_OUTPUT_FLAG_ORANK + Specify the value for 'output_flag.orank' in the MET configuration file for PointStat. + + | *Used by:* PointStat + POINT_STAT_INTERP_VLD_THRESH Specify the value for 'interp.vld_thresh' in the MET configuration file for PointStat. @@ -7859,9 +7872,30 @@ METplus Configuration Glossary | *Used by:* StatAnalysis + GFDL_TRACKER_KEEP_INTERMEDIATE + If True, do not scrub intermediate files created by the tracker. Useful + for debugging issues. + + | *Used by:* GFDLTracker + TCMPR_PLOTTER_READ_ALL_FILES If True, pass in input directory set by :term:`TCMPR_PLOTTER_TCMPR_DATA_DIR` to the script. If False, a list of all files that end with .tcst in the input directory is gathered and passed into the script. Defaults to False. | *Used by:* TCMPRPlotter + + TC_PAIRS_VALID_INCLUDE + Specify the value for 'valid_inc' in the MET configuration file for TCPairs. + + | *Used by:* TCPairs + + TC_PAIRS_VALID_EXCLUDE + Specify the value for 'valid_exc' in the MET configuration file for TCPairs. + + | *Used by:* TCPairs + + TC_PAIRS_WRITE_VALID + Specify the value for 'write_valid' in the MET configuration file for TCPairs. + + | *Used by:* TCPairs diff --git a/docs/Users_Guide/installation.rst b/docs/Users_Guide/installation.rst index 25dd57ed67..9e2c8b273f 100644 --- a/docs/Users_Guide/installation.rst +++ b/docs/Users_Guide/installation.rst @@ -253,8 +253,10 @@ Build_components and using manage_externals Running build_components/build_MET.sh will -- clone MET and METviewer from github using the manage_externals scripts +- clone METplotpy, METcalcpy and METviewer from github using the manage_externals scripts - grab the current MET compile script and all of the necessary external libraries +- download the most recent version of MET from github + - https://github.com/dtcenter/MET/releases/download/v10.0.0/met-10.0.0.20210510.tar.gz - build the external libraries - attempt to build MET @@ -265,6 +267,8 @@ the process and allow MET to be built with just a few manual changes. External Components =================== +.. _external-components-gfdl-tracker: + GFDL Tracker ------------ diff --git a/docs/Users_Guide/quicksearch.rst b/docs/Users_Guide/quicksearch.rst index db8380f4b5..457e7a4172 100644 --- a/docs/Users_Guide/quicksearch.rst +++ b/docs/Users_Guide/quicksearch.rst @@ -129,6 +129,7 @@ Use Cases by METplus Feature: | `Diagnostics <../search.html?q=DiagnosticsUseCase&check_keywords=yes&area=default>`_ | `Feature Relative <../search.html?q=FeatureRelativeUseCase&check_keywords=yes&area=default>`_ | `GempakToCF <../search.html?q=GempakToCFToolUseCase&check_keywords=yes&area=default>`_ + | `GFDLTracker <../search.html?q=GFDLTrackerToolUseCase&check_keywords=yes&area=default>`_ | `Looping by Month or Year <../search.html?q=LoopByMonthFeatureUseCase&check_keywords=yes&area=default>`_ | `List Expansion (using begin_end_incr syntax) <../search.html?q=ListExpansionFeatureUseCase&check_keywords=yes&area=default>`_ | `Masking for Regions of Interest <../search.html?q=MaskingFeatureUseCase&check_keywords=yes&area=default>`_ @@ -155,6 +156,7 @@ Use Cases by METplus Feature: | **Diagnostics**: *DiagnosticsUseCase* | **Feature Relative**: *FeatureRelativeUseCase* | **GempakToCF**: *GempakToCFToolUseCase* + | **GFDL Tracker**: *GFDLTrackerToolUseCase* | **Looping by Month or Year**: *LoopByMonthFeatureUseCase* | **List Expansion (using begin_end_incr syntax)**: *ListExpansionFeatureUseCase* | **Masking for Regions of Interest**: *MaskingFeatureUseCase* diff --git a/docs/Users_Guide/systemconfiguration.rst b/docs/Users_Guide/systemconfiguration.rst index 1a374a5e86..3893086f58 100644 --- a/docs/Users_Guide/systemconfiguration.rst +++ b/docs/Users_Guide/systemconfiguration.rst @@ -522,6 +522,8 @@ given use case. More information about the variables set in the use case configuration files can be found in the :ref:`common_config_variables` section. +.. _running-metplus: + Running METplus =============== diff --git a/docs/Users_Guide/wrappers.rst b/docs/Users_Guide/wrappers.rst index 6b1dd4ad37..ca7a066dc8 100644 --- a/docs/Users_Guide/wrappers.rst +++ b/docs/Users_Guide/wrappers.rst @@ -986,7 +986,13 @@ Description Used to call the GFDL Tracker applications to objectively analyze forecast data to provide an estimate of the vortex center position (latitude and longitude), -and track the storm for the duration of the forecast. +and track the storm for the duration of the forecast. The wrapper copies files +and uses symbolic links to ensure that input files are named and located in +the correct place so that the tracker can read them. The wrapper also generates +index files and other inputs that are required to run the tool and substitutes +values into template configuration files that are read by the tracker. +Relevant output files are renamed based on user configuration. +See :ref:`external-components-gfdl-tracker` for more information. METplus Configuration --------------------- @@ -1088,6 +1094,7 @@ METplus Configuration | :term:`GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA850` | :term:`GFDL_TRACKER_VERBOSE_VERB` | :term:`GFDL_TRACKER_VERBOSE_VERB_G2` +| :term:`GFDL_TRACKER_KEEP_INTERMEDIATE` .. _gfdl_tracker-nml-conf: @@ -4288,18 +4295,12 @@ METplus Configuration | :term:`OBS_PCP_COMBINE_CONSTANT_INIT` | :term:`FCST_PCP_COMBINE_STAT_LIST` | :term:`OBS_PCP_COMBINE_STAT_LIST` -| :term:`FCST_PCP_COMBINE_DERIVE_LOOKBACK` -| :term:`OBS_PCP_COMBINE_DERIVE_LOOKBACK` | :term:`PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS` -| :term:`FCST_PCP_COMBINE_DATA_INTERVAL` -| :term:`OBS_PCP_COMBINE_DATA_INTERVAL` -| :term:`FCST_PCP_COMBINE_TIMES_PER_FILE` -| :term:`OBS_PCP_COMBINE_TIMES_PER_FILE` -| :term:`FCST_PCP_COMBINE_IS_DAILY_FILE` -| :term:`OBS_PCP_COMBINE_IS_DAILY_FILE` | :term:`FCST_PCP_COMBINE_COMMAND` | :term:`OBS_PCP_COMBINE_COMMAND` | :term:`PCP_COMBINE_CUSTOM_LOOP_LIST` +| :term:`FCST_PCP_COMBINE_LOOKBACK` +| :term:`OBS_PCP_COMBINE_LOOKBACK` | :term:`FCST_PCP_COMBINE_EXTRA_NAMES` (optional) | :term:`FCST_PCP_COMBINE_EXTRA_LEVELS` (optional) | :term:`FCST_PCP_COMBINE_EXTRA_OUTPUT_NAMES` (optional) @@ -4331,6 +4332,14 @@ METplus Configuration | :term:`OBS_PCP_COMBINE_INPUT_LEVEL` | :term:`FCST_PCP_COMBINE__FIELD_NAME` | :term:`OBS_PCP_COMBINE__FIELD_NAME` + | :term:`FCST_PCP_COMBINE_DATA_INTERVAL` + | :term:`OBS_PCP_COMBINE_DATA_INTERVAL` + | :term:`FCST_PCP_COMBINE_TIMES_PER_FILE` + | :term:`OBS_PCP_COMBINE_TIMES_PER_FILE` + | :term:`FCST_PCP_COMBINE_IS_DAILY_FILE` + | :term:`OBS_PCP_COMBINE_IS_DAILY_FILE` + | :term:`FCST_PCP_COMBINE_DERIVE_LOOKBACK` + | :term:`OBS_PCP_COMBINE_DERIVE_LOOKBACK` | .. _plot_data_plane_wrapper: @@ -4461,6 +4470,7 @@ Configuration | :term:`POINT_STAT_OUTPUT_FLAG_PJC` | :term:`POINT_STAT_OUTPUT_FLAG_PRC` | :term:`POINT_STAT_OUTPUT_FLAG_ECNT` +| :term:`POINT_STAT_OUTPUT_FLAG_ORANK` | :term:`POINT_STAT_OUTPUT_FLAG_RPS` | :term:`POINT_STAT_OUTPUT_FLAG_ECLV` | :term:`POINT_STAT_OUTPUT_FLAG_MPR` @@ -4842,6 +4852,8 @@ see :ref:`How METplus controls MET config file settings`. - output_flag.eclv * - :term:`POINT_STAT_OUTPUT_FLAG_MPR` - output_flag.mpr + * - :term:`POINT_STAT_OUTPUT_FLAG_ORANK` + - output_flag.orank **${METPLUS_INTERP_DICT}** @@ -6586,6 +6598,9 @@ METplus Configuration | :term:`TC_PAIRS_CONFIG_FILE` | :term:`TC_PAIRS_INIT_INCLUDE` | :term:`TC_PAIRS_INIT_EXCLUDE` +| :term:`TC_PAIRS_VALID_INCLUDE` +| :term:`TC_PAIRS_VALID_EXCLUDE` +| :term:`TC_PAIRS_WRITE_VALID` | :term:`TC_PAIRS_READ_ALL_FILES` | :term:`TC_PAIRS_MODEL` | :term:`TC_PAIRS_STORM_ID` @@ -6741,7 +6756,7 @@ see :ref:`How METplus controls MET config file settings`. * - :term:`TC_PAIRS_INIT_END` - init_end -**${METPLUS_INIT_INCLUDE}** +**${METPLUS_INIT_INC}** .. list-table:: :widths: 5 5 @@ -6752,7 +6767,7 @@ see :ref:`How METplus controls MET config file settings`. * - :term:`TC_PAIRS_INIT_INCLUDE` - init_inc -**${METPLUS_INIT_EXCLUDE}** +**${METPLUS_INIT_EXC}** .. list-table:: :widths: 5 5 @@ -6763,6 +6778,39 @@ see :ref:`How METplus controls MET config file settings`. * - :term:`TC_PAIRS_INIT_EXCLUDE` - init_exc +**${METPLUS_VALID_INC}** + +.. list-table:: + :widths: 5 5 + :header-rows: 0 + + * - METplus Config(s) + - MET Config File + * - :term:`TC_PAIRS_VALID_INCLUDE` + - valid_inc + +**${METPLUS_VALID_EXC}** + +.. list-table:: + :widths: 5 5 + :header-rows: 0 + + * - METplus Config(s) + - MET Config File + * - :term:`TC_PAIRS_VALID_EXCLUDE` + - valid_exc + +**${METPLUS_WRITE_VALID}** + +.. list-table:: + :widths: 5 5 + :header-rows: 0 + + * - METplus Config(s) + - MET Config File + * - :term:`TC_PAIRS_WRITE_VALID` + - write_valid + **${METPLUS_VALID_BEG}** .. list-table:: diff --git a/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.py b/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.py new file mode 100644 index 0000000000..ee45353fcd --- /dev/null +++ b/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.py @@ -0,0 +1,106 @@ +""" +GFDLTracker: Extra Tropical Cyclone Use Case +============================================ + +met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf + +""" +############################################################################## +# Scientific Objective +# -------------------- +# +# Setup and run GFDL Tracker applications to track extra tropical cyclones. +# See :ref:`external-components-gfdl-tracker` for more information. +# A genesis vitals file is read into the tracker. This file contains +# information on storms that were tracked in the previous 2 runs so that +# additional data is attributed to the correct storm. +# + +############################################################################## +# Datasets +# -------- +# +# | **Forecast:** GFS +# +# | **Location:** All of the input data required for this use case can be found in the met_test sample data tarball. Click here to the METplus releases page and download sample data for the appropriate release: https://github.com/dtcenter/METplus/releases +# | This tarball should be unpacked into the directory that you will set the value of INPUT_BASE. See `Running METplus`_ section for more information. +# + +############################################################################## +# METplus Components +# ------------------ +# +# This use case utilizes the METplus GFDLTracker wrapper to generate a command +# to run the GFDL Tracker Fortran applications. + +############################################################################## +# METplus Workflow +# ---------------- +# +# GFDLTracker is the only tool called in this example. +# It processes the following run time: +# +# | **Init:** 2021-07-13 00Z +# | **Forecast lead**: All available leads (0 - 198 hour) +# | + +############################################################################## +# METplus Configuration +# --------------------- +# +# METplus first loads all of the configuration files found in parm/metplus_config, +# then it loads any configuration files passed to METplus via the command line +# with the -c option, i.e. -c parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf + +############################################################################## +# GFDL Tracker Configuration +# -------------------------- +# +# METplus replaces values in the template configuration files read by the +# tracker based on user settings in the METplus configuration file. +# +# **YOU SHOULD NOT SET ANY OF THESE ENVIRONMENT VARIABLES YOURSELF! THEY WILL BE OVERWRITTEN BY METPLUS WHEN IT CALLS THE MET TOOLS!** +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml + +############################################################################## +# Running METplus +# --------------- +# +# This use case can be run by passing in the conf file to the run script:: +# +# run_metplus.py /path/to/METplus/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf +# +# See the :ref:`running-metplus` section of the User's Guide for more +# information on how to run use cases. +# + +############################################################################## +# Expected Output +# --------------- +# +# A successful run will output the following both to the screen and to the logfile:: +# +# INFO: METplus has successfully finished running. +# +# Refer to the value set for **OUTPUT_BASE** to find where the output data was generated. +# Output for this use case will be found in gfdl_tracker/etc (relative to **OUTPUT_BASE**) +# and will contain the following file: +# +# * gfs.2021071300.etc.txt +# * input.202107130000.nml + +############################################################################## +# Keywords +# -------- +# +# .. note:: +# +# * GFDLTrackerToolUseCase +# +# Navigate to the :ref:`quick-search` page to discover other similar use cases. +# diff --git a/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.py b/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.py new file mode 100644 index 0000000000..f769e9256f --- /dev/null +++ b/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.py @@ -0,0 +1,106 @@ +""" +GFDLTracker: TC Genesis Use Case +================================ + +met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf + +""" +############################################################################## +# Scientific Objective +# -------------------- +# +# Setup and run GFDL Tracker applications to track cyclones in TC genesis mode. +# See :ref:`external-components-gfdl-tracker` for more information. +# A genesis vitals file is read into the tracker. This file contains +# information on storms that were tracked in the previous 2 runs so that +# additional data is attributed to the correct storm. +# + +############################################################################## +# Datasets +# -------- +# +# | **Forecast:** GFS +# +# | **Location:** All of the input data required for this use case can be found in the met_test sample data tarball. Click here to the METplus releases page and download sample data for the appropriate release: https://github.com/dtcenter/METplus/releases +# | This tarball should be unpacked into the directory that you will set the value of INPUT_BASE. See `Running METplus`_ section for more information. +# + +############################################################################## +# METplus Components +# ------------------ +# +# This use case utilizes the METplus GFDLTracker wrapper to generate a command +# to run the GFDL Tracker Fortran applications. + +############################################################################## +# METplus Workflow +# ---------------- +# +# GFDLTracker is the only tool called in this example. +# It processes the following run time: +# +# | **Init:** 2021-07-13 00Z +# | **Forecast lead**: All available leads (0 - 198 hour) +# | + +############################################################################## +# METplus Configuration +# --------------------- +# +# METplus first loads all of the configuration files found in parm/metplus_config, +# then it loads any configuration files passed to METplus via the command line +# with the -c option, i.e. -c parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf + +############################################################################## +# GFDL Tracker Configuration +# -------------------------- +# +# METplus replaces values in the template configuration files read by the +# tracker based on user settings in the METplus configuration file. +# +# **YOU SHOULD NOT SET ANY OF THESE ENVIRONMENT VARIABLES YOURSELF! THEY WILL BE OVERWRITTEN BY METPLUS WHEN IT CALLS THE MET TOOLS!** +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml + +############################################################################## +# Running METplus +# --------------- +# +# This use case can be run by passing in the conf file to the run script:: +# +# run_metplus.py /path/to/METplus/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf +# +# See the :ref:`running-metplus` section of the User's Guide for more +# information on how to run use cases. +# + +############################################################################## +# Expected Output +# --------------- +# +# A successful run will output the following both to the screen and to the logfile:: +# +# INFO: METplus has successfully finished running. +# +# Refer to the value set for **OUTPUT_BASE** to find where the output data was generated. +# Output for this use case will be found in gfdl_tracker/genesis (relative to **OUTPUT_BASE**) +# and will contain the following file: +# +# * gfs.2021071300.genesis.txt +# * input.202107130000.nml + +############################################################################## +# Keywords +# -------- +# +# .. note:: +# +# * GFDLTrackerToolUseCase +# +# Navigate to the :ref:`quick-search` page to discover other similar use cases. +# diff --git a/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.py b/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.py new file mode 100644 index 0000000000..2eddf453d6 --- /dev/null +++ b/docs/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.py @@ -0,0 +1,103 @@ +""" +GFDLTracker: Tropical Cyclone Use Case +====================================== + +met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf + +""" +############################################################################## +# Scientific Objective +# -------------------- +# +# Setup and run GFDL Tracker applications to track tropical cyclones. +# See :ref:`external-components-gfdl-tracker` for more information. +# + +############################################################################## +# Datasets +# -------- +# +# | **Forecast:** HWRF +# +# | **Location:** All of the input data required for this use case can be found in the met_test sample data tarball. Click here to the METplus releases page and download sample data for the appropriate release: https://github.com/dtcenter/METplus/releases +# | This tarball should be unpacked into the directory that you will set the value of INPUT_BASE. See `Running METplus`_ section for more information. +# + +############################################################################## +# METplus Components +# ------------------ +# +# This use case utilizes the METplus GFDLTracker wrapper to generate a command +# to run the GFDL Tracker Fortran applications. + +############################################################################## +# METplus Workflow +# ---------------- +# +# GFDLTracker is the only tool called in this example. +# It processes the following run time: +# +# | **Init:** 2016-09-06 00Z +# | **Forecast lead**: All available leads (0 - 126 hour) +# | + +############################################################################## +# METplus Configuration +# --------------------- +# +# METplus first loads all of the configuration files found in parm/metplus_config, +# then it loads any configuration files passed to METplus via the command line +# with the -c option, i.e. -c parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf + +############################################################################## +# GFDL Tracker Configuration +# -------------------------- +# +# METplus replaces values in the template configuration files read by the +# tracker based on user settings in the METplus configuration file. +# +# **YOU SHOULD NOT SET ANY OF THESE ENVIRONMENT VARIABLES YOURSELF! THEY WILL BE OVERWRITTEN BY METPLUS WHEN IT CALLS THE MET TOOLS!** +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml + +############################################################################## +# Running METplus +# --------------- +# +# This use case can be run by passing in the conf file to the run script:: +# +# run_metplus.py /path/to/METplus/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf +# +# See the :ref:`running-metplus` section of the User's Guide for more +# information on how to run use cases. +# + +############################################################################## +# Expected Output +# --------------- +# +# A successful run will output the following both to the screen and to the logfile:: +# +# INFO: METplus has successfully finished running. +# +# Refer to the value set for **OUTPUT_BASE** to find where the output data was generated. +# Output for this use case will be found in gfdl_tracker/tc (relative to **OUTPUT_BASE**) +# and will contain the following file: +# +# * hwrf.2016090600.track.txt +# * input.201609060000.nml + +############################################################################## +# Keywords +# -------- +# +# .. note:: +# +# * GFDLTrackerToolUseCase +# +# Navigate to the :ref:`quick-search` page to discover other similar use cases. +# diff --git a/docs/use_cases/met_tool_wrapper/GFDLTracker/README.rst b/docs/use_cases/met_tool_wrapper/GFDLTracker/README.rst new file mode 100644 index 0000000000..b1d32425ab --- /dev/null +++ b/docs/use_cases/met_tool_wrapper/GFDLTracker/README.rst @@ -0,0 +1,2 @@ +GFDLTracker +----------- diff --git a/internal_tests/pytests/compare_gridded/test_compare_gridded.py b/internal_tests/pytests/compare_gridded/test_compare_gridded.py index f7410eafda..4e92821b34 100644 --- a/internal_tests/pytests/compare_gridded/test_compare_gridded.py +++ b/internal_tests/pytests/compare_gridded/test_compare_gridded.py @@ -121,8 +121,8 @@ def test_get_field_info_no_prob(metplus_config, key, value): 'key, value', [ # forecast grib name level thresh (['NAME', 'L0', ['gt3', '<=5'], '', 'FCST'], - ['{ name=\"PROB\"; level=\"L0\"; prob={ name=\"NAME\"; thresh_lo=3.0; } cat_thresh=[==0.1]; }', - '{ name=\"PROB\"; level=\"L0\"; prob={ name=\"NAME\"; thresh_hi=5.0; } cat_thresh=[==0.1]; }']), + ['{ name=\"PROB\"; level=\"L0\"; prob={ name=\"NAME\"; thresh_lo=3.0; } cat_thresh=[ ==0.1 ]; }', + '{ name=\"PROB\"; level=\"L0\"; prob={ name=\"NAME\"; thresh_hi=5.0; } cat_thresh=[ ==0.1 ]; }']), # obs grib name level thresh (['NAME', 'L0', ['gt3', '<=5'], '', 'OBS'], @@ -130,11 +130,11 @@ def test_get_field_info_no_prob(metplus_config, key, value): '{ name=\"NAME\"; level=\"L0\"; cat_thresh=[ <=5 ]; }']), (['NAME', 'L0', ['gt3&<5'], '', 'FCST'], - ['{ name=\"PROB\"; level=\"L0\"; prob={ name=\"NAME\"; thresh_lo=3.0; thresh_hi=5.0; } cat_thresh=[==0.1]; }']), + ['{ name=\"PROB\"; level=\"L0\"; prob={ name=\"NAME\"; thresh_lo=3.0; thresh_hi=5.0; } cat_thresh=[ ==0.1 ]; }']), # fcst grib name py script (['/some/script/name.py args /path/of/infile.txt', '', [], '', 'FCST'], - ['{ name=\"/some/script/name.py args /path/of/infile.txt\"; prob=TRUE; cat_thresh=[==0.1]; }']), + ['{ name=\"/some/script/name.py args /path/of/infile.txt\"; prob=TRUE; cat_thresh=[ ==0.1 ]; }']), # obs name py script (['/some/script/name.py args /path/of/infile.txt', '', [], '', 'OBS'], @@ -150,11 +150,11 @@ def test_get_field_info_fcst_prob_grib_pds(metplus_config, key, value): w.c_dict['FCST_PROB_IN_GRIB_PDS'] = True w.c_dict['FCST_PROB_THRESH'] = '==0.1' - field_dict = {'v_name' : key[0], - 'v_level' : key[1], - 'v_thresh' : key[2], - 'v_extra' : key[3], - 'd_type' : key[4], + field_dict = {'v_name': key[0], + 'v_level': key[1], + 'v_thresh': key[2], + 'v_extra': key[3], + 'd_type': key[4], } fields = w.get_field_info(**field_dict) @@ -170,8 +170,8 @@ def test_get_field_info_fcst_prob_grib_pds(metplus_config, key, value): 'key, value', [ # forecast grib name level thresh (['NAME', 'L0', ['gt3', '<=5'], '', 'FCST'], - ['{ name=\"NAME\"; level=\"L0\"; prob=TRUE; cat_thresh=[==0.1]; }', - '{ name=\"NAME\"; level=\"L0\"; prob=TRUE; cat_thresh=[==0.1]; }']), + ['{ name=\"NAME\"; level=\"L0\"; prob=TRUE; cat_thresh=[ ==0.1 ]; }', + '{ name=\"NAME\"; level=\"L0\"; prob=TRUE; cat_thresh=[ ==0.1 ]; }']), # obs grib name level thresh (['NAME', 'L0', ['gt3', '<=5'], '', 'OBS'], @@ -179,11 +179,11 @@ def test_get_field_info_fcst_prob_grib_pds(metplus_config, key, value): '{ name=\"NAME\"; level=\"L0\"; cat_thresh=[ <=5 ]; }']), (['NAME', 'L0', ['gt3&<5'], '', 'FCST'], - ['{ name=\"NAME\"; level=\"L0\"; prob=TRUE; cat_thresh=[==0.1]; }']), + ['{ name=\"NAME\"; level=\"L0\"; prob=TRUE; cat_thresh=[ ==0.1 ]; }']), # fcst grib name py script (['/some/script/name.py args /path/of/infile.txt', '', [], '', 'FCST'], - ['{ name=\"/some/script/name.py args /path/of/infile.txt\"; prob=TRUE; cat_thresh=[==0.1]; }']), + ['{ name=\"/some/script/name.py args /path/of/infile.txt\"; prob=TRUE; cat_thresh=[ ==0.1 ]; }']), # obs name py script (['/some/script/name.py args /path/of/infile.txt', '', [], '', 'OBS'], diff --git a/internal_tests/pytests/pcp_combine/test1.conf b/internal_tests/pytests/pcp_combine/test1.conf index d75dfed61e..1fd7a2d7bd 100644 --- a/internal_tests/pytests/pcp_combine/test1.conf +++ b/internal_tests/pytests/pcp_combine/test1.conf @@ -1,6 +1,7 @@ [config] FCST_PCP_COMBINE_INPUT_ACCUMS = 6 FCST_PCP_COMBINE_INPUT_NAMES = P06M_NONE +FCST_PCP_COMBINE_INPUT_LEVELS = "(*,*)" OBS_PCP_COMBINE_INPUT_ACCUMS = 1 OBS_PCP_COMBINE_INPUT_NAMES = P01M_NONE diff --git a/internal_tests/pytests/pcp_combine/test_pcp_combine_wrapper.py b/internal_tests/pytests/pcp_combine/test_pcp_combine_wrapper.py index b3d05ff412..10a80164db 100644 --- a/internal_tests/pytests/pcp_combine/test_pcp_combine_wrapper.py +++ b/internal_tests/pytests/pcp_combine/test_pcp_combine_wrapper.py @@ -1,48 +1,20 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import os -import sys -import re -import logging -import datetime -from collections import namedtuple +from datetime import datetime import pytest -import produtil - from metplus.wrappers.pcp_combine_wrapper import PCPCombineWrapper -from metplus.util import time_util -from metplus.util import met_util as util - -# --------------------TEST CONFIGURATION and FIXTURE SUPPORT ------------- -# -# The test configuration and fixture support the additional configuration -# files used in METplus -# !!!!!!!!!!!!!!! -# !!!IMPORTANT!!! -# !!!!!!!!!!!!!!! -# The following two methods should be included in ALL pytest tests for METplus. -# -# -#def pytest_addoption(parser): -# parser.addoption("-c", action="store", help=" -c ") - - -# @pytest.fixture -#def cmdopt(request): -# return request.config.getoption("-c") - - -# -----------------FIXTURES THAT CAN BE USED BY ALL TESTS---------------- -#@pytest.fixture +from metplus.util import ti_calculate + def pcp_combine_wrapper(metplus_config, d_type): """! Returns a default PCPCombineWrapper with /path/to entries in the metplus_system.conf and metplus_runtime.conf configuration files. Subsequent tests can customize the final METplus configuration to over-ride these /path/to values.""" - # PB2NCWrapper with configuration values determined by what is set in - # the pb2nc_test.conf file. + # PCPCombineWrapper with configuration values determined by what is set in + # the test1.conf file. extra_configs = [] extra_configs.append(os.path.join(os.path.dirname(__file__), 'test1.conf')) config = metplus_config(extra_configs) @@ -53,208 +25,155 @@ def pcp_combine_wrapper(metplus_config, d_type): return PCPCombineWrapper(config) -# ------------------------ TESTS GO HERE -------------------------- - - -# ------------------------ -# test_search_day -# ------------------------ -# Need to have directory of test data to be able to test this functionality -# they could be empty files, they just need to exist so we can find the files - def test_get_accumulation_1_to_6(metplus_config): data_src = "OBS" pcw = pcp_combine_wrapper(metplus_config, data_src) input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/accum" task_info = {} - task_info['valid'] = datetime.datetime.strptime("2016090418", '%Y%m%d%H') - time_info = time_util.ti_calculate(task_info) - accum = 6 - - file_template = "{valid?fmt=%Y%m%d}/file.{valid?fmt=%Y%m%d%H}.{level?fmt=%HH}h" - - pcw.input_dir = input_dir - pcw.build_input_accum_list(data_src, time_info) - - pcw.get_accumulation(time_info, accum, data_src) - in_files = pcw.infiles - if len(in_files) == 6 and \ - input_dir+"/20160904/file.2016090418.01h" in in_files and \ - input_dir+"/20160904/file.2016090417.01h" in in_files and \ - input_dir+"/20160904/file.2016090416.01h" in in_files and \ - input_dir+"/20160904/file.2016090415.01h" in in_files and \ - input_dir+"/20160904/file.2016090414.01h" in in_files and \ - input_dir+"/20160904/file.2016090413.01h" in in_files: - assert True - else: - assert False - + task_info['valid'] = datetime.strptime("2016090418", '%Y%m%d%H') + time_info = ti_calculate(task_info) + # 6 hours in seconds + accum = 6 * 3600 + + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + pcw._build_input_accum_list(data_src, time_info) + + files_found = pcw.get_accumulation(time_info, accum, data_src) + in_files = [item[0] for item in files_found] + assert (len(in_files) == 6 and + input_dir+"/20160904/file.2016090418.01h" in in_files and + input_dir+"/20160904/file.2016090417.01h" in in_files and + input_dir+"/20160904/file.2016090416.01h" in in_files and + input_dir+"/20160904/file.2016090415.01h" in in_files and + input_dir+"/20160904/file.2016090414.01h" in in_files and + input_dir+"/20160904/file.2016090413.01h" in in_files) def test_get_accumulation_6_to_6(metplus_config): data_src = "FCST" pcw = pcp_combine_wrapper(metplus_config, data_src) input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/accum" task_info = {} - task_info['valid'] = datetime.datetime.strptime("2016090418", '%Y%m%d%H') - time_info = time_util.ti_calculate(task_info) - accum = 6 + task_info['valid'] = datetime.strptime("2016090418", '%Y%m%d%H') + time_info = ti_calculate(task_info) + accum = 6 * 3600 - pcw.c_dict['FCST_INPUT_TEMPLATE'] = "{valid?fmt=%Y%m%d}/file.{valid?fmt=%Y%m%d%H}.{level?fmt=%HH}h" - - pcw.input_dir = input_dir - pcw.build_input_accum_list(data_src, time_info) + template = "{valid?fmt=%Y%m%d}/file.{valid?fmt=%Y%m%d%H}.{level?fmt=%HH}h" + pcw.c_dict['FCST_INPUT_TEMPLATE'] = template - pcw.get_accumulation(time_info, accum, data_src) - in_files = pcw.infiles - if len(in_files) == 1 and input_dir+"/20160904/file.2016090418.06h" in in_files: - assert True - else: - assert False + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + pcw._build_input_accum_list(data_src, time_info) + files_found = pcw.get_accumulation(time_info, accum, data_src) + in_files = [item[0] for item in files_found] + assert (len(in_files) == 1 and + input_dir+"/20160904/file.2016090418.06h" in in_files) def test_get_lowest_forecast_file_dated_subdir(metplus_config): - dtype = "FCST" - pcw = pcp_combine_wrapper(metplus_config, dtype) + data_src = "FCST" + pcw = pcp_combine_wrapper(metplus_config, data_src) input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/fcst" - valid_time = datetime.datetime.strptime("201802012100", '%Y%m%d%H%M') - template = pcw.config.getraw('filename_templates', 'FCST_PCP_COMBINE_INPUT_TEMPLATE') - pcw.input_dir = input_dir - pcw.build_input_accum_list(dtype, {'valid': valid_time}) - out_file, fcst = pcw.getLowestForecastFile(valid_time, dtype, template) - assert(out_file == input_dir+"/20180201/file.2018020118f003.nc" and fcst == 10800) + valid_time = datetime.strptime("201802012100", '%Y%m%d%H%M') + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + pcw._build_input_accum_list(data_src, {'valid': valid_time}) + out_file, fcst = pcw.get_lowest_fcst_file(valid_time, data_src) + assert(out_file == input_dir+"/20180201/file.2018020118f003.nc" and + fcst == 10800) def test_forecast_constant_init(metplus_config): - dtype = "FCST" - pcw = pcp_combine_wrapper(metplus_config, dtype) + data_src = "FCST" + pcw = pcp_combine_wrapper(metplus_config, data_src) pcw.c_dict['FCST_CONSTANT_INIT'] = True input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/fcst" - init_time = datetime.datetime.strptime("2018020112", '%Y%m%d%H') - valid_time = datetime.datetime.strptime("2018020121", '%Y%m%d%H') - template = pcw.config.getraw('filename_templates', 'FCST_PCP_COMBINE_INPUT_TEMPLATE') - pcw.input_dir = input_dir - out_file, fcst = pcw.find_input_file(template, init_time, valid_time, 0, dtype) - assert(out_file == input_dir+"/20180201/file.2018020112f009.nc" and fcst == 32400) + init_time = datetime.strptime("2018020112", '%Y%m%d%H') + valid_time = datetime.strptime("2018020121", '%Y%m%d%H') + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + out_file, fcst = pcw.find_input_file(init_time, valid_time, 0, data_src) + assert(out_file == input_dir+"/20180201/file.2018020112f009.nc" and + fcst == 32400) def test_forecast_not_constant_init(metplus_config): - dtype = "FCST" - pcw = pcp_combine_wrapper(metplus_config, dtype) + data_src = "FCST" + pcw = pcp_combine_wrapper(metplus_config, data_src) pcw.c_dict['FCST_CONSTANT_INIT'] = False input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/fcst" - init_time = datetime.datetime.strptime("2018020112", '%Y%m%d%H') - valid_time = datetime.datetime.strptime("2018020121", '%Y%m%d%H') - template = pcw.config.getraw('filename_templates', 'FCST_PCP_COMBINE_INPUT_TEMPLATE') - pcw.input_dir = input_dir - pcw.build_input_accum_list(dtype, {'valid': valid_time}) - out_file, fcst = pcw.find_input_file(template, init_time, valid_time, 0, dtype) - assert(out_file == input_dir+"/20180201/file.2018020118f003.nc" and fcst == 10800) + init_time = datetime.strptime("2018020112", '%Y%m%d%H') + valid_time = datetime.strptime("2018020121", '%Y%m%d%H') + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + pcw._build_input_accum_list(data_src, {'valid': valid_time}) + out_file, fcst = pcw.find_input_file(init_time, valid_time, 0, data_src) + assert(out_file == input_dir+"/20180201/file.2018020118f003.nc" and + fcst == 10800) def test_get_lowest_forecast_file_no_subdir(metplus_config): - dtype = "FCST" - pcw = pcp_combine_wrapper(metplus_config, dtype) + data_src = "FCST" + pcw = pcp_combine_wrapper(metplus_config, data_src) input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/fcst" - valid_time = datetime.datetime.strptime("201802012100", '%Y%m%d%H%M') - + valid_time = datetime.strptime("201802012100", '%Y%m%d%H%M') template = "file.{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.nc" -# template = util.getraw(pcw.config, 'filename_templates', dtype+'_PCP_COMBINE_INPUT_TEMPLATE') - pcw.input_dir = input_dir - pcw.build_input_accum_list(dtype, {'valid': valid_time}) - out_file, fcst = pcw.getLowestForecastFile(valid_time, dtype, template) + pcw.c_dict[f'{data_src}_INPUT_TEMPLATE'] = template + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + pcw._build_input_accum_list(data_src, {'valid': valid_time}) + out_file, fcst = pcw.get_lowest_fcst_file(valid_time, data_src) assert(out_file == input_dir+"/file.2018020118f003.nc" and fcst == 10800) def test_get_lowest_forecast_file_yesterday(metplus_config): - dtype = "FCST" - pcw = pcp_combine_wrapper(metplus_config, dtype) + data_src = "FCST" + pcw = pcp_combine_wrapper(metplus_config, data_src) input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/fcst" - valid_time = datetime.datetime.strptime("201802010600", '%Y%m%d%H%M') + valid_time = datetime.strptime("201802010600", '%Y%m%d%H%M') template = "file.{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.nc" -# template = util.getraw(pcw.config, 'filename_templates', 'FCST2_PCP_COMBINE_INPUT_TEMPLATE') - pcw.input_dir = input_dir - pcw.build_input_accum_list(dtype, {'valid': valid_time}) - out_file, fcst = pcw.getLowestForecastFile(valid_time, dtype, template) + pcw.c_dict[f'{data_src}_INPUT_TEMPLATE'] = template + pcw.c_dict[f'{data_src}_INPUT_DIR'] = input_dir + pcw._build_input_accum_list(data_src, {'valid': valid_time}) + out_file, fcst = pcw.get_lowest_fcst_file(valid_time, data_src) assert(out_file == input_dir+"/file.2018013118f012.nc" and fcst == 43200) -def test_get_daily_file(metplus_config): +def test_setup_add_method(metplus_config): data_src = "OBS" pcw = pcp_combine_wrapper(metplus_config, data_src) - time_info = {'valid' : datetime.datetime.strptime("201802010000", '%Y%m%d%H%M') } - accum = 1 - file_template = "file.{valid?fmt=%Y%m%d}.txt" - pcw.get_daily_file(time_info, accum, data_src, file_template) - -def test_setup_add_method(metplus_config): - rl = "OBS" - pcw = pcp_combine_wrapper(metplus_config, rl) task_info = {} - task_info['valid'] = datetime.datetime.strptime("2016090418", '%Y%m%d%H') - time_info = time_util.ti_calculate(task_info) - var_info = {} - var_info['fcst_name'] = "APCP" - var_info['obs_name'] = "ACPCP" - var_info['fcst_extra'] = "" - var_info['obs_extra'] = "" - var_info['fcst_level'] = "A06" - var_info['obs_level'] = "A06" + task_info['valid'] = datetime.strptime("2016090418", '%Y%m%d%H') + time_info = ti_calculate(task_info) + input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/accum" - output_dir = pcw.config.getdir('OUTPUT_BASE')+"/internal_tests/data/fakeout" - pcw.setup_add_method(time_info, var_info, rl) + lookback = 6 * 3600 + files_found = pcw.setup_add_method(time_info, lookback, data_src) + assert files_found - in_files = pcw.infiles - out_file = pcw.get_output_path() - if len(in_files) == 6 and \ - input_dir+"/20160904/file.2016090418.01h" in in_files and \ - input_dir+"/20160904/file.2016090417.01h" in in_files and \ - input_dir+"/20160904/file.2016090416.01h" in in_files and \ - input_dir+"/20160904/file.2016090415.01h" in in_files and \ - input_dir+"/20160904/file.2016090414.01h" in in_files and \ - input_dir+"/20160904/file.2016090413.01h" in in_files and \ - out_file == output_dir+"/20160904/outfile.2016090418_A06h": - assert True - else: - assert False - + in_files = [item[0] for item in files_found] + print(f"Infiles: {in_files}") + assert (len(in_files) == 6 and + input_dir+"/20160904/file.2016090418.01h" in in_files and + input_dir+"/20160904/file.2016090417.01h" in in_files and + input_dir+"/20160904/file.2016090416.01h" in in_files and + input_dir+"/20160904/file.2016090415.01h" in in_files and + input_dir+"/20160904/file.2016090414.01h" in in_files and + input_dir+"/20160904/file.2016090413.01h" in in_files) # how to test? check output? def test_setup_sum_method(metplus_config): - rl = "OBS" - pcw = pcp_combine_wrapper(metplus_config, rl) + data_src = "OBS" + pcw = pcp_combine_wrapper(metplus_config, data_src) task_info = {} - task_info['valid'] = datetime.datetime.strptime("2016090418", '%Y%m%d%H') + task_info['valid'] = datetime.strptime("2016090418", '%Y%m%d%H') task_info['lead'] = 0 - time_info = time_util.ti_calculate(task_info) - var_info = {} - var_info['fcst_name'] = "APCP" - var_info['obs_name'] = "ACPCP" - var_info['fcst_extra'] = "" - var_info['obs_extra'] = "" - var_info['fcst_level'] = "A06" - var_info['obs_level'] = "A06" - input_dir = pcw.config.getdir('METPLUS_BASE')+"/internal_tests/data/accum" - output_dir = pcw.config.getdir('OUTPUT_BASE')+"/internal_tests/data/fakeout" - pcw.setup_sum_method(time_info, var_info, rl) - - in_files = pcw.infiles - out_file = pcw.get_output_path() - assert(out_file == output_dir+"/20160904/outfile.2016090418_A06h") + time_info = ti_calculate(task_info) + lookback = 6 * 3600 + assert pcw.setup_sum_method(time_info, lookback, data_src) def test_setup_subtract_method(metplus_config): - rl = "FCST" - pcw = pcp_combine_wrapper(metplus_config, rl) + data_src = "FCST" + pcw = pcp_combine_wrapper(metplus_config, data_src) task_info = {} - task_info['valid'] = datetime.datetime.strptime("201609050000", '%Y%m%d%H%M') + task_info['valid'] = datetime.strptime("201609050000", '%Y%m%d%H%M') task_info['lead_hours'] = 9 - time_info = time_util.ti_calculate(task_info) - var_info = {} - var_info['fcst_name'] = "APCP" - var_info['obs_name'] = "ACPCP" - var_info['fcst_extra'] = "" - var_info['obs_extra'] = "" - var_info['fcst_level'] = "A06" - var_info['obs_level'] = "A06" - pcw.setup_subtract_method(time_info, var_info, rl) - in_files = pcw.infiles - out_file = pcw.get_output_path() - assert(len(in_files) == 2) + time_info = ti_calculate(task_info) + lookback = 6 * 3600 + files_found = pcw.setup_subtract_method(time_info, lookback, data_src) + in_files = [item[0] for item in files_found] + + assert len(in_files) == 2 def test_pcp_combine_add_subhourly(metplus_config): fcst_name = 'A000500' @@ -299,13 +218,12 @@ def test_pcp_combine_add_subhourly(metplus_config): config.set('config', 'FCST_PCP_COMBINE_OUTPUT_ACCUM', '15M') wrapper = PCPCombineWrapper(config) - assert(wrapper.isOK) + assert wrapper.isOK app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" out_dir = wrapper.c_dict.get('FCST_OUTPUT_DIR') expected_cmds = [(f"{app_path} {verbosity} " - f"-name {fcst_output_name} " "-add " f"{fcst_input_dir}/20190802_i1800_m0_f1815.nc " f"{fcst_fmt} " @@ -313,16 +231,17 @@ def test_pcp_combine_add_subhourly(metplus_config): f"{fcst_fmt} " f"{fcst_input_dir}/20190802_i1800_m0_f1805.nc " f"{fcst_fmt} " + f'-name "{fcst_output_name}" ' f"{out_dir}/5min_mem00_lag00.nc"), ] all_cmds = wrapper.run_all_times() print(f"ALL COMMANDS: {all_cmds}") - assert(len(all_cmds) == len(expected_cmds)) + assert len(all_cmds) == len(expected_cmds) for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): # ensure commands are generated as expected - assert(cmd == expected_cmd) + assert cmd == expected_cmd def test_pcp_combine_bucket(metplus_config): fcst_output_name = 'APCP' @@ -363,39 +282,42 @@ def test_pcp_combine_bucket(metplus_config): config.set('config', 'FCST_PCP_COMBINE_OUTPUT_ACCUM', '15H') wrapper = PCPCombineWrapper(config) - assert(wrapper.isOK) + assert wrapper.isOK app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" out_dir = wrapper.c_dict.get('FCST_OUTPUT_DIR') expected_cmds = [(f"{app_path} {verbosity} " - f"-name {fcst_output_name} " "-add " - f"{fcst_input_dir}/2012040900_F015.grib 03 " - f"{fcst_input_dir}/2012040900_F012.grib 06 " - f"{fcst_input_dir}/2012040900_F006.grib 06 " + f"{fcst_input_dir}/2012040900_F015.grib " + "'name=\"APCP\"; level=\"A03\";' " + f"{fcst_input_dir}/2012040900_F012.grib " + "'name=\"APCP\"; level=\"A06\";' " + f"{fcst_input_dir}/2012040900_F006.grib " + "'name=\"APCP\"; level=\"A06\";' " + f'-name "{fcst_output_name}" ' f"{out_dir}/2012040915_A015.nc"), ] all_cmds = wrapper.run_all_times() print(f"ALL COMMANDS: {all_cmds}") - assert(len(all_cmds) == len(expected_cmds)) + assert len(all_cmds) == len(expected_cmds) for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): # ensure commands are generated as expected - assert(cmd == expected_cmd) + assert cmd == expected_cmd @pytest.mark.parametrize( - 'config_overrides, extra_fields', [ - ({}, - ''), - ({'FCST_PCP_COMBINE_EXTRA_NAMES': 'NAME1', - 'FCST_PCP_COMBINE_EXTRA_LEVELS': 'LEVEL1', }, - "-field 'name=\"NAME1\"; level=\"LEVEL1\";' "), - ({'FCST_PCP_COMBINE_EXTRA_NAMES': 'NAME1, NAME2', - 'FCST_PCP_COMBINE_EXTRA_LEVELS': 'LEVEL1, LEVEL2', }, - ("-field 'name=\"NAME1\"; level=\"LEVEL1\";' " - "-field 'name=\"NAME2\"; level=\"LEVEL2\";' ")), + 'config_overrides, extra_fields', [ + ({}, + ''), + ({'FCST_PCP_COMBINE_EXTRA_NAMES': 'NAME1', + 'FCST_PCP_COMBINE_EXTRA_LEVELS': 'LEVEL1', }, + "-field 'name=\"NAME1\"; level=\"LEVEL1\";' "), + ({'FCST_PCP_COMBINE_EXTRA_NAMES': 'NAME1, NAME2', + 'FCST_PCP_COMBINE_EXTRA_LEVELS': 'LEVEL1, LEVEL2', }, + ("-field 'name=\"NAME1\"; level=\"LEVEL1\";' " + "-field 'name=\"NAME2\"; level=\"LEVEL2\";' ")), ] ) def test_pcp_combine_derive(metplus_config, config_overrides, extra_fields): @@ -448,7 +370,7 @@ def test_pcp_combine_derive(metplus_config, config_overrides, extra_fields): config.set('config', key, value) wrapper = PCPCombineWrapper(config) - assert(wrapper.isOK) + assert wrapper.isOK app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" @@ -467,11 +389,11 @@ def test_pcp_combine_derive(metplus_config, config_overrides, extra_fields): all_cmds = wrapper.run_all_times() print(f"ALL COMMANDS: {all_cmds}") - assert(len(all_cmds) == len(expected_cmds)) + assert len(all_cmds) == len(expected_cmds) for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): # ensure commands are generated as expected - assert(cmd == expected_cmd) + assert cmd == expected_cmd def test_pcp_combine_loop_custom(metplus_config): fcst_name = 'APCP' @@ -514,7 +436,7 @@ def test_pcp_combine_loop_custom(metplus_config): config.set('config', 'FCST_PCP_COMBINE_OUTPUT_NAME', fcst_name) wrapper = PCPCombineWrapper(config) - assert(wrapper.isOK) + assert wrapper.isOK app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" @@ -522,19 +444,20 @@ def test_pcp_combine_loop_custom(metplus_config): expected_cmds = [] for ens in ens_list: cmd = (f"{app_path} {verbosity} " - f"-name {fcst_name} " f"-add " - f"{fcst_input_dir}/{ens}/2009123112_02400.grib 24 " + f"{fcst_input_dir}/{ens}/2009123112_02400.grib " + "'name=\"APCP\"; level=\"A24\";' " + f'-name "{fcst_name}" ' f"{out_dir}/{ens}/2009123112_02400.nc") expected_cmds.append(cmd) all_cmds = wrapper.run_all_times() print(f"ALL COMMANDS: {all_cmds}") - assert(len(all_cmds) == len(expected_cmds)) + assert len(all_cmds) == len(expected_cmds) for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): # ensure commands are generated as expected - assert(cmd == expected_cmd) + assert cmd == expected_cmd def test_pcp_combine_subtract(metplus_config): config = metplus_config() @@ -572,25 +495,28 @@ def test_pcp_combine_subtract(metplus_config): config.set('config', 'FCST_PCP_COMBINE_OUTPUT_NAME', 'APCP') wrapper = PCPCombineWrapper(config) - assert(wrapper.isOK) + assert wrapper.isOK app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" out_dir = wrapper.c_dict.get('FCST_OUTPUT_DIR') expected_cmds = [(f"{app_path} {verbosity} " f"-subtract " - f"{fcst_input_dir}/2005080700/18.tm00_G212 18 " - f"{fcst_input_dir}/2005080700/15.tm00_G212 15 " + f"{fcst_input_dir}/2005080700/18.tm00_G212 " + "'name=\"APCP\"; level=\"A18\";' " + f"{fcst_input_dir}/2005080700/15.tm00_G212 " + "'name=\"APCP\"; level=\"A15\";' " + '-name "APCP" ' f"{out_dir}/2005080718_A003.nc"), ] all_cmds = wrapper.run_all_times() print(f"ALL COMMANDS: {all_cmds}") - assert(len(all_cmds) == len(expected_cmds)) + assert len(all_cmds) == len(expected_cmds) for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): # ensure commands are generated as expected - assert(cmd == expected_cmd) + assert cmd == expected_cmd def test_pcp_combine_sum_subhourly(metplus_config): fcst_name = 'A000500' @@ -635,7 +561,7 @@ def test_pcp_combine_sum_subhourly(metplus_config): config.set('config', 'FCST_PCP_COMBINE_OUTPUT_ACCUM', '15M') wrapper = PCPCombineWrapper(config) - assert(wrapper.isOK) + assert wrapper.isOK app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" @@ -644,83 +570,222 @@ def test_pcp_combine_sum_subhourly(metplus_config): "-sum " "20190802_180000 000500 " "20190802_181500 001500 " + f"-pcpdir {fcst_input_dir} " + f"-pcprx 20190802_i1800_m0_f* " f"{fcst_fmt} " f"-name \"{fcst_output_name}\" " - f"{out_dir}/5min_mem00_lag00.nc " - f"-pcpdir {fcst_input_dir} " - f"-pcprx 20190802_i1800_m0_f*"), + f"{out_dir}/5min_mem00_lag00.nc"), ] all_cmds = wrapper.run_all_times() print(f"ALL COMMANDS: {all_cmds}") - assert(len(all_cmds) == len(expected_cmds)) + assert len(all_cmds) == len(expected_cmds) for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): # ensure commands are generated as expected - assert(cmd == expected_cmd) + assert cmd == expected_cmd @pytest.mark.parametrize( - 'output_name,extra_output,expected_result', [ - (None, None, None), - ('out_name1', None, '"out_name1"'), - ('out_name1', '"out_name2"', '"out_name1","out_name2"'), - ('out_name1', '"out_name2","out_name3"', - '"out_name1","out_name2","out_name3"'), + 'output_name,extra_output,expected_results', [ + # 0 + ('', [''], []), + # 1 + ('out_name1', None, ['-name "out_name1"']), + # 2 + ('out_name1', ['out_name2'], ['-name "out_name1","out_name2"']), + # 3 + ('out_name1', ['out_name2', 'out_name3'], + ['-name "out_name1","out_name2","out_name3"']), ] ) -def test_get_output_string(metplus_config, output_name, extra_output, - expected_result): +def test_handle_name_argument(metplus_config, output_name, extra_output, + expected_results): + data_src = 'FCST' config = metplus_config() wrapper = PCPCombineWrapper(config) - wrapper.output_name = output_name - wrapper.extra_output = extra_output - actual_result = wrapper.get_output_string() - assert(actual_result == expected_result) + wrapper.c_dict[data_src + '_EXTRA_OUTPUT_NAMES'] = extra_output + wrapper._handle_name_argument(output_name, data_src) + actual_results = wrapper.args + print(f"Actual: {actual_results}") + print(f"Expected: {expected_results}") + assert len(actual_results) == len(expected_results) + for index, expected_result in enumerate(expected_results): + assert actual_results[index] == expected_result @pytest.mark.parametrize( - 'names,levels,out_names,expected_input,expected_output', [ - # none specified - ('', '', '', - None, None), - # 1 input name, no level - ('input1', '', '', - "-field 'name=\"input1\";'", None), - # 1 input name, 1 level - ('input1', 'level1', '', - "-field 'name=\"input1\"; level=\"level1\";'", None), - # 2 input names, no levels - ('input1,input2', '', '', - "-field 'name=\"input1\";' -field 'name=\"input2\";'", None), - # 2 input names, 2 levels - ('input1,input2', 'level1,level2', '', - ("-field 'name=\"input1\"; level=\"level1\";' " - "-field 'name=\"input2\"; level=\"level2\";'"), None), - # 2 input names, 1 level - ('input1,input2', 'level1', '', - ("-field 'name=\"input1\"; level=\"level1\";' " - "-field 'name=\"input2\";'"), - None), - # 1 input name, 1 level, 1 output - ('input1', 'level1', 'output1', - "-field 'name=\"input1\"; level=\"level1\";'", '"output1"'), - # 2 input names, 2 levels, 2 outputs - ('input1,input2', 'level1,level2', 'output1,output2', - ("-field 'name=\"input1\"; level=\"level1\";' " - "-field 'name=\"input2\"; level=\"level2\";'"), - '"output1","output2"'), + 'names,levels,expected_args', [ + # 0: none specified + ('', '', + []), + # 1: 1 input name, no level + ('input1', '', + ["-field 'name=\"input1\";'"]), + # 2: 1 input name, 1 level + ('input1', 'level1', + ["-field 'name=\"input1\"; level=\"level1\";'"]), + # 3: 2 input names, no levels + ('input1,input2', '', + ["-field 'name=\"input1\";'", "-field 'name=\"input2\";'"]), + # 4: 2 input names, 2 levels + ('input1,input2', 'level1,level2', + ["-field 'name=\"input1\"; level=\"level1\";'", + "-field 'name=\"input2\"; level=\"level2\";'"]), + # 5: 2 input names, 1 level + ('input1,input2', 'level1', + ["-field 'name=\"input1\"; level=\"level1\";'", + "-field 'name=\"input2\";'"]), ] ) -def test_get_extra_fields(metplus_config, names, levels, out_names, - expected_input, expected_output): +def test_get_extra_fields(metplus_config, names, levels, expected_args): + data_src = 'FCST' config = metplus_config() config.set('config', 'FCST_PCP_COMBINE_RUN', True) + config.set('config', 'FCST_PCP_COMBINE_METHOD', 'ADD') config.set('config', 'FCST_PCP_COMBINE_EXTRA_NAMES', names) config.set('config', 'FCST_PCP_COMBINE_EXTRA_LEVELS', levels) - config.set('config', 'FCST_PCP_COMBINE_EXTRA_OUTPUT_NAMES', out_names) wrapper = PCPCombineWrapper(config) - actual_input, actual_output = wrapper.get_extra_fields('FCST') - assert(actual_input == expected_input) - assert (actual_output == expected_output) + wrapper._handle_extra_field_arguments(data_src) + wrapper._handle_name_argument('', data_src) + for index, expected_arg in enumerate(expected_args): + assert wrapper.args[index] == expected_arg + +def test_add_method_single_file(metplus_config): + data_src = 'FCST' + config = metplus_config() + config.set('config', 'DO_NOT_RUN_EXE', True) + config.set('config', 'INPUT_MUST_EXIST', False) + + # set process and time config variables + config.set('config', 'PROCESS_LIST', 'PCPCombine') + config.set('config', 'LOOP_BY', 'INIT') + config.set('config', 'INIT_TIME_FMT', '%Y%m%d%H%M') + config.set('config', 'INIT_BEG', '2019100200') + config.set('config', 'INIT_END', '2019100200') + config.set('config', 'INIT_INCREMENT', '3H') + config.set('config', 'LEAD_SEQ', '24,27,30') + config.set('config', 'LOOP_ORDER', 'times') + + config.set('config', 'FCST_PCP_COMBINE_RUN', True) + config.set('config', 'FCST_PCP_COMBINE_METHOD', 'ADD') + config.set('config', 'FCST_PCP_COMBINE_CONSTANT_INIT', True) + config.set('config', 'FCST_PCP_COMBINE_INPUT_DIR', '/some/input/dir') + config.set('config', 'FCST_PCP_COMBINE_INPUT_TEMPLATE', + '{init?fmt=%Y%m%d}_prec_1hracc_75hrfcst_e00.nc') + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_DIR', '/some/output/dir') + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_TEMPLATE', + '{valid?fmt=%Y%m%d%H}_prec_{level?fmt=%H}hracc_e00.nc') + config.set('config', 'FCST_PCP_COMBINE_INPUT_ACCUMS', '1H') + config.set('config', 'FCST_PCP_COMBINE_INPUT_NAMES', 'rf') + config.set('config', 'FCST_PCP_COMBINE_INPUT_LEVELS', + '"({valid?fmt=%Y%m%d_%H},*,*)"') + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_ACCUM', '3H') + + wrapper = PCPCombineWrapper(config) + assert wrapper.isOK + + all_cmds = wrapper.run_all_times() + + app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) + verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" + out_dir = wrapper.c_dict.get('FCST_OUTPUT_DIR') + in_file = (f"{wrapper.c_dict.get('FCST_INPUT_DIR')}/" + "20191002_prec_1hracc_75hrfcst_e00.nc") + expected_cmds = [ + (f"{app_path} {verbosity} -add " + f"{in_file} 'name=\"rf\"; level=\"(20191003_00,*,*)\";' " + f"{in_file} 'name=\"rf\"; level=\"(20191002_23,*,*)\";' " + f"{in_file} 'name=\"rf\"; level=\"(20191002_22,*,*)\";' " + f"{out_dir}/2019100300_prec_03hracc_e00.nc"), + (f"{app_path} {verbosity} -add " + f"{in_file} 'name=\"rf\"; level=\"(20191003_03,*,*)\";' " + f"{in_file} 'name=\"rf\"; level=\"(20191003_02,*,*)\";' " + f"{in_file} 'name=\"rf\"; level=\"(20191003_01,*,*)\";' " + f"{out_dir}/2019100303_prec_03hracc_e00.nc"), + (f"{app_path} {verbosity} -add " + f"{in_file} 'name=\"rf\"; level=\"(20191003_06,*,*)\";' " + f"{in_file} 'name=\"rf\"; level=\"(20191003_05,*,*)\";' " + f"{in_file} 'name=\"rf\"; level=\"(20191003_04,*,*)\";' " + f"{out_dir}/2019100306_prec_03hracc_e00.nc"), + ] + + assert len(all_cmds) == len(expected_cmds) + + for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): + # ensure commands are generated as expected + assert cmd == expected_cmd + +def test_subtract_method_zero_accum(metplus_config): + data_src = 'FCST' + input_name = 'stratiform_rainfall_amount' + input_level = '"(*,*)"' + in_dir = '/some/input/dir' + out_dir = '/some/output/dir' + config = metplus_config() + config.set('config', 'DO_NOT_RUN_EXE', True) + config.set('config', 'INPUT_MUST_EXIST', False) + + # set process and time config variables + config.set('config', 'PROCESS_LIST', 'PCPCombine') + config.set('config', 'LOOP_BY', 'INIT') + config.set('config', 'INIT_TIME_FMT', '%Y%m%d%H%M') + config.set('config', 'INIT_BEG', '2019100200') + config.set('config', 'INIT_END', '2019100200') + config.set('config', 'INIT_INCREMENT', '3H') + config.set('config', 'LEAD_SEQ', '1') + config.set('config', 'LOOP_ORDER', 'times') + + config.set('config', 'FCST_PCP_COMBINE_RUN', True) + config.set('config', 'FCST_PCP_COMBINE_METHOD', 'SUBTRACT') + config.set('config', 'FCST_PCP_COMBINE_INPUT_DIR', in_dir) + config.set('config', 'FCST_PCP_COMBINE_INPUT_TEMPLATE', + '{init?fmt=%Y%m%dT%H%M}Z_pverb{lead?fmt=%3H}.nc') + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_DIR', out_dir) + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_TEMPLATE', + '{init?fmt=%Y%m%d%H}_f{level?fmt=%3H}.nc') + config.set('config', 'FCST_PCP_COMBINE_INPUT_ACCUMS', '1H') + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_ACCUM', '1H') + config.set('config', 'FCST_PCP_COMBINE_OUTPUT_NAME', input_name) + + expected_cmds_dict = {} + expected_cmds_dict['NETCDF'] = [ + (f"-subtract " + f"{in_dir}/20191002T0000Z_pverb001.nc " + f"'name=\"{input_name}\"; level={input_level};' " + f"{in_dir}/20191002T0000Z_pverb000.nc " + f"'name=\"{input_name}\"; level={input_level};' " + f"-name \"{input_name}\" " + f"{out_dir}/2019100200_f001.nc"), + ] + expected_cmds_dict['GRIB'] = [ + (f"-add " + f"{in_dir}/20191002T0000Z_pverb001.nc " + "'name=\"APCP\"; level=\"A01\";' " + f"-name \"{input_name}\" " + f"{out_dir}/2019100200_f001.nc" + ), + ] + + for data_type in ['GRIB', 'NETCDF']: + config.set('config', 'FCST_PCP_COMBINE_INPUT_DATATYPE', data_type) + + if data_type == 'NETCDF': + config.set('config', 'FCST_PCP_COMBINE_INPUT_NAMES', input_name) + config.set('config', 'FCST_PCP_COMBINE_INPUT_LEVELS', input_level) + + wrapper = PCPCombineWrapper(config) + assert wrapper.isOK + + all_cmds = wrapper.run_all_times() + + app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name) + verbosity = f"-v {wrapper.c_dict['VERBOSITY']}" + expected_cmds = [f"{app_path} {verbosity} {item}" + for item in expected_cmds_dict[data_type]] + assert len(all_cmds) == len(expected_cmds) + + for (cmd, env_vars), expected_cmd in zip(all_cmds, expected_cmds): + # ensure commands are generated as expected + assert cmd == expected_cmd diff --git a/internal_tests/pytests/point_stat/test_point_stat_wrapper.py b/internal_tests/pytests/point_stat/test_point_stat_wrapper.py index d9672498a7..7155d1d1b8 100755 --- a/internal_tests/pytests/point_stat/test_point_stat_wrapper.py +++ b/internal_tests/pytests/point_stat/test_point_stat_wrapper.py @@ -238,6 +238,9 @@ def test_met_dictionary_in_var_options(metplus_config): ({'POINT_STAT_OUTPUT_FLAG_MPR': 'BOTH', }, {'METPLUS_OUTPUT_FLAG_DICT': 'output_flag = {mpr = BOTH;}'}), + ({'POINT_STAT_OUTPUT_FLAG_ORANK': 'BOTH', }, + {'METPLUS_OUTPUT_FLAG_DICT': 'output_flag = {orank = BOTH;}'}), + ({ 'POINT_STAT_OUTPUT_FLAG_FHO': 'BOTH', 'POINT_STAT_OUTPUT_FLAG_CTC': 'BOTH', @@ -258,9 +261,10 @@ def test_met_dictionary_in_var_options(metplus_config): 'POINT_STAT_OUTPUT_FLAG_RPS': 'BOTH', 'POINT_STAT_OUTPUT_FLAG_ECLV': 'BOTH', 'POINT_STAT_OUTPUT_FLAG_MPR': 'BOTH', + 'POINT_STAT_OUTPUT_FLAG_ORANK': 'BOTH', }, { - 'METPLUS_OUTPUT_FLAG_DICT': 'output_flag = {fho = BOTH;ctc = BOTH;cts = BOTH;mctc = BOTH;mcts = BOTH;cnt = BOTH;sl1l2 = BOTH;sal1l2 = BOTH;vl1l2 = BOTH;val1l2 = BOTH;vcnt = BOTH;pct = BOTH;pstd = BOTH;pjc = BOTH;prc = BOTH;ecnt = BOTH;rps = BOTH;eclv = BOTH;mpr = BOTH;}'}), + 'METPLUS_OUTPUT_FLAG_DICT': 'output_flag = {fho = BOTH;ctc = BOTH;cts = BOTH;mctc = BOTH;mcts = BOTH;cnt = BOTH;sl1l2 = BOTH;sal1l2 = BOTH;vl1l2 = BOTH;val1l2 = BOTH;vcnt = BOTH;pct = BOTH;pstd = BOTH;pjc = BOTH;prc = BOTH;ecnt = BOTH;rps = BOTH;eclv = BOTH;mpr = BOTH;orank = BOTH;}'}), ({'POINT_STAT_INTERP_VLD_THRESH': '0.5', }, {'METPLUS_INTERP_DICT': 'interp = {vld_thresh = 0.5;}'}), diff --git a/internal_tests/pytests/tc_pairs/test_tc_pairs_wrapper.py b/internal_tests/pytests/tc_pairs/test_tc_pairs_wrapper.py index 21c00850ed..5696553914 100644 --- a/internal_tests/pytests/tc_pairs/test_tc_pairs_wrapper.py +++ b/internal_tests/pytests/tc_pairs/test_tc_pairs_wrapper.py @@ -313,10 +313,10 @@ def test_tc_pairs_storm_id_lists(metplus_config, config_overrides, {'METPLUS_DLAND_FILE': 'dland_file = "my_dland.nc";'}), # 8: init_exc ({'TC_PAIRS_INIT_EXCLUDE': '20141031_14'}, - {'METPLUS_INIT_EXCLUDE': 'init_exc = ["20141031_14"];'}), + {'METPLUS_INIT_EXC': 'init_exc = ["20141031_14"];'}), # 9: init_inc ({'TC_PAIRS_INIT_INCLUDE': '20141031_14'}, - {'METPLUS_INIT_INCLUDE': 'init_inc = ["20141031_14"];'}), + {'METPLUS_INIT_INC': 'init_inc = ["20141031_14"];'}), # 10: storm name ({'TC_PAIRS_STORM_NAME': 'KATRINA, OTHER'}, {'METPLUS_STORM_NAME': 'storm_name = ["KATRINA", "OTHER"];'}), @@ -352,6 +352,15 @@ def test_tc_pairs_storm_id_lists(metplus_config, config_overrides, '{name = "name2";members = ["member2a", "member2b"];' 'required = [false, true];min_req = 2;}];' )}), + # 15: valid_exc + ({'TC_PAIRS_VALID_EXCLUDE': '20141031_14'}, + {'METPLUS_VALID_EXC': 'valid_exc = ["20141031_14"];'}), + # 16: valid_inc + ({'TC_PAIRS_VALID_INCLUDE': '20141031_14'}, + {'METPLUS_VALID_INC': 'valid_inc = ["20141031_14"];'}), + # 17: write_valid + ({'TC_PAIRS_WRITE_VALID': '20141031_14'}, + {'METPLUS_WRITE_VALID': 'write_valid = ["20141031_14"];'}), ] ) def test_tc_pairs_loop_order_processes(metplus_config, config_overrides, diff --git a/internal_tests/use_cases/all_use_cases.txt b/internal_tests/use_cases/all_use_cases.txt index e8242d6623..b686d5441f 100644 --- a/internal_tests/use_cases/all_use_cases.txt +++ b/internal_tests/use_cases/all_use_cases.txt @@ -55,6 +55,8 @@ Category: met_tool_wrapper 53::METdbLoad::met_tool_wrapper/METdbLoad/METdbLoad.conf:: metdatadb_env,metviewer 54::ExtractTiles_mtd::met_tool_wrapper/ExtractTiles/ExtractTiles_mtd.conf 55::GFDLTracker_TC::met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf::gfdl-tracker_env +56::GFDLTracker_ETC::met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf::gfdl-tracker_env +57::GFDLTracker_Genesis::met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf::gfdl-tracker_env Category: air_quality_and_comp 0::EnsembleStat_fcstICAP_obsMODIS_aod::model_applications/air_quality_and_comp/EnsembleStat_fcstICAP_obsMODIS_aod.conf diff --git a/metplus/util/met_util.py b/metplus/util/met_util.py index e4fba1f83f..ba3e6be179 100644 --- a/metplus/util/met_util.py +++ b/metplus/util/met_util.py @@ -801,6 +801,9 @@ def skip_time(time_info, skip_times): @param skip_times dictionary of times to skip, i.e. {'%d': [31]} means skip 31st day @returns True if run time should be skipped, False if not """ + if not skip_times: + return False + for time_format, skip_time_list in skip_times.items(): # extract time information from valid time based on skip time format run_time_value = time_info.get('valid') @@ -2468,15 +2471,28 @@ def sub_var_list(var_list, time_info): return out_var_list def split_level(level): - level_type = "" + """! If level value starts with a letter, then separate that letter from + the rest of the string. i.e. 'A03' will be returned as 'A', '03'. If no + level type letter is found and the level value consists of alpha-numeric + characters, return an empty string as the level type and the full level + string as the level value + + @param level input string to parse/split + @returns tuple of level type and level value + """ if not level: return '', '' - match = re.match(r'^(\w)(\d+)$', level) + + match = re.match(r'^([a-zA-Z])(\w+)$', level) if match: level_type = match.group(1) level = match.group(2) return level_type, level + match = re.match(r'^[\w]+$', level) + if match: + return '', level + return '', '' def remove_quotes(input_string): @@ -2703,10 +2719,12 @@ def preprocess_file(filename, data_type, config, allow_dir=False): if os.path.isfile(outpath): return outpath - # Create staging area if it does not exist - outdir = os.path.dirname(outpath) - if not os.path.exists(outdir): - os.makedirs(outdir, mode=0o0775) + # Create staging area directory only if file has compression extension + valid_extensions = ['gz', 'bz2', 'zip'] + if any([os.path.isfile(f'{filename}.{ext}') for ext in valid_extensions]): + outdir = os.path.dirname(outpath) + if not os.path.exists(outdir): + os.makedirs(outdir, mode=0o0775) # uncompress gz, bz2, or zip file if os.path.isfile(filename+".gz"): @@ -2735,9 +2753,13 @@ def preprocess_file(filename, data_type, config, allow_dir=False): f.write(z.read(os.path.basename(filename))) return outpath + # if input doesn't need to exist, return filename + if not config.getbool('config', 'INPUT_MUST_EXIST', True): + return filename + return None -def template_to_regex(template, time_info, logger): +def template_to_regex(template, time_info): in_template = re.sub(r'\.', '\\.', template) in_template = re.sub(r'{lead.*?}', '.*', in_template) return do_string_sub(in_template, diff --git a/metplus/wrappers/command_builder.py b/metplus/wrappers/command_builder.py index 343333010c..61086096c2 100755 --- a/metplus/wrappers/command_builder.py +++ b/metplus/wrappers/command_builder.py @@ -952,7 +952,8 @@ def find_and_check_output_file(self, time_info=None, return False # create full output dir if it doesn't already exist - if not os.path.exists(parent_dir): + if (not os.path.exists(parent_dir) and + not self.c_dict.get('DO_NOT_RUN_EXE', False)): self.logger.debug(f"Creating output directory: {parent_dir}") os.makedirs(parent_dir) @@ -1090,7 +1091,8 @@ def check_for_python_embedding(self, input_type, var_info): self.env_var_dict[f'METPLUS_{input_type}_FILE_TYPE'] = file_type return file_ext - def get_field_info(self, d_type, v_name, v_level='', v_thresh=[], v_extra=''): + def get_field_info(self, d_type='', v_name='', v_level='', v_thresh=None, + v_extra='', add_curly_braces=True): """! Format field information into format expected by MET config file Args: @param v_level level of data to extract @@ -1098,115 +1100,94 @@ def get_field_info(self, d_type, v_name, v_level='', v_thresh=[], v_extra=''): @param v_name name of field to process @param v_extra additional field information to add if available @param d_type type of data to find i.e. FCST or OBS + @param add_curly_braces if True, add curly braces around each + field info string. If False, add single quotes around each + field info string (defaults to True) @rtype string @return Returns formatted field information """ - # separate character from beginning of numeric level value if applicable - _, level = util.split_level(v_level) + # if thresholds are set + if v_thresh: + # if neither fcst or obs are probabilistic, + # pass in all thresholds as a comma-separated list for 1 field info + if (not self.c_dict.get('FCST_IS_PROB', False) and + not self.c_dict.get('OBS_IS_PROB', False)): + thresholds = [','.join(v_thresh)] + else: + thresholds = v_thresh + # if no thresholds are specified, fail if prob field is in grib PDS + elif (self.c_dict.get(d_type + '_IS_PROB', False) and + self.c_dict.get(d_type + '_PROB_IN_GRIB_PDS', False) and + not util.is_python_script(v_name)): + self.log_error('No threshold was specified for probabilistic ' + 'forecast GRIB data') + return None + else: + thresholds = [None] # list to hold field information fields = [] - # get cat thresholds if available - cat_thresh = "" - threshs = [None] - if len(v_thresh) != 0: - threshs = v_thresh - cat_thresh = "cat_thresh=[ " + ','.join(threshs) + " ];" + for thresh in thresholds: + if (self.c_dict.get(d_type + '_PROB_IN_GRIB_PDS', False) and + not util.is_python_script(v_name)): + field = self._handle_grib_pds_field_info(v_name, v_level, + thresh) + else: + # add field name + field = f'name="{v_name}";' - # if neither input is probabilistic, add all cat thresholds to same field info item - if not self.c_dict.get('FCST_IS_PROB', False) and not self.c_dict.get('OBS_IS_PROB', False): - field_name = v_name + if v_level: + field += f' level="{util.remove_quotes(v_level)}";' - field = "{ name=\"" + field_name + "\";" + if self.c_dict.get(d_type + '_IS_PROB', False): + field += " prob=TRUE;" - # add level if it is set - if v_level: - field += " level=\"" + util.remove_quotes(v_level) + "\";" + # handle cat_thresh + if self.c_dict.get(d_type + '_IS_PROB', False): + # add probabilistic cat thresh if different from default ==0.1 + cat_thresh = self.c_dict.get(d_type + '_PROB_THRESH') + else: + cat_thresh = thresh - # add threshold if it is set if cat_thresh: - field += ' ' + cat_thresh + field += f" cat_thresh=[ {cat_thresh} ];" - # add extra info if it is set + # handle extra options if set if v_extra: - field += ' ' + v_extra + field += f' {v_extra}' - field += ' }' - fields.append(field) - - # if either input is probabilistic, create separate item for each threshold - else: - - # if input currently being processed if probabilistic, format accordingly - if self.c_dict.get(d_type + '_IS_PROB', False): - # if probabilistic data for either fcst or obs, thresholds are required - # to be specified or no field items will be created. Create a field dict - # item for each threshold value - for thresh in threshs: - # if utilizing python embedding for prob input, just set the - # field name to the call to the script - if util.is_python_script(v_name): - field = "{ name=\"" + v_name + "\"; prob=TRUE;" - elif self.c_dict[d_type + '_INPUT_DATATYPE'] == 'NETCDF' or \ - not self.c_dict[d_type + '_PROB_IN_GRIB_PDS']: - field = "{ name=\"" + v_name + "\";" - if v_level: - field += " level=\"" + util.remove_quotes(v_level) + "\";" - field += " prob=TRUE;" - else: - # a threshold value is required for GRIB prob DICT data - if thresh is None: - self.log_error('No threshold was specified for probabilistic ' - 'forecast GRIB data') - return None - - thresh_str = "" - thresh_tuple_list = util.get_threshold_via_regex(thresh) - for comparison, number in thresh_tuple_list: - # skip adding thresh_lo or thresh_hi if comparison is NA - if comparison == 'NA': - continue - - if comparison in ["gt", "ge", ">", ">=", "==", "eq"]: - thresh_str += "thresh_lo=" + str(number) + "; " - if comparison in ["lt", "le", "<", "<=", "==", "eq"]: - thresh_str += "thresh_hi=" + str(number) + "; " - - field = "{ name=\"PROB\"; level=\"" + v_level + \ - "\"; prob={ name=\"" + v_name + \ - "\"; " + thresh_str + "}" - - # add probabilistic cat thresh if different from default ==0.1 - prob_cat_thresh = self.c_dict.get(d_type + '_PROB_THRESH') - if prob_cat_thresh: - field += " cat_thresh=[" + prob_cat_thresh + "];" - - if v_extra: - field += ' ' + v_extra - - field += ' }' - fields.append(field) + # add curly braces around field info + if add_curly_braces: + field = f'{{ {field} }}' + # otherwise add single quotes around field info else: - field_name = v_name + field = f"'{field}'" - for thresh in threshs: - field = "{ name=\"" + field_name + "\";" + # add field info string to list of fields + fields.append(field) - if v_level: - field += " level=\"" + util.remove_quotes(v_level) + "\";" + # return list of field dictionary items + return fields - if thresh is not None: - field += " cat_thresh=[ " + str(thresh) + " ];" + def _handle_grib_pds_field_info(self, v_name, v_level, thresh): - if v_extra: - field += ' ' + v_extra + field = f'name="PROB"; level="{v_level}"; prob={{ name="{v_name}";' - field += ' }' - fields.append(field) + if thresh: + thresh_tuple_list = util.get_threshold_via_regex(thresh) + for comparison, number in thresh_tuple_list: + # skip adding thresh_lo or thresh_hi if comparison is NA + if comparison == 'NA': + continue - # return list of field dictionary items - return fields + if comparison in ["gt", "ge", ">", ">=", "==", "eq"]: + field = f"{field} thresh_lo={number};" + if comparison in ["lt", "le", "<", "<=", "==", "eq"]: + field = f"{field} thresh_hi={number};" + + # add closing curly brace for prob= + return f'{field} }}' def read_mask_poly(self): """! Read old or new config variables used to set mask.poly in MET @@ -1311,7 +1292,7 @@ def run_command(self, cmd, cmd_name=None): """ # add command to list of all commands run self.all_commands.append((cmd, - self.print_all_envs(print_copyable=False))) + self.print_all_envs(print_copyable=True))) log_name = cmd_name if cmd_name else self.log_name diff --git a/metplus/wrappers/gfdl_tracker_wrapper.py b/metplus/wrappers/gfdl_tracker_wrapper.py index 761c8fdf89..19796bade0 100755 --- a/metplus/wrappers/gfdl_tracker_wrapper.py +++ b/metplus/wrappers/gfdl_tracker_wrapper.py @@ -12,6 +12,8 @@ import os import shutil +import glob +from dateutil.relativedelta import relativedelta from ..util import do_string_sub, ti_calculate, get_lead_sequence from ..util import remove_quotes, parse_template @@ -47,6 +49,10 @@ class GFDLTrackerWrapper(CommandBuilder): "TRACKERINFO_G1_MSLP_PARM_ID": "int", "TRACKERINFO_G1_SFCWIND_LEV_TYP": "int", "TRACKERINFO_G1_SFCWIND_LEV_VAL": "int", + "TRACKERINFO_WESTBD": "int", + "TRACKERINFO_EASTBD": "int", + "TRACKERINFO_SOUTHBD": "int", + "TRACKERINFO_NORTHBD": "int", "PHASEINFO_PHASEFLAG": "bool", "PHASEINFO_PHASESCHEME": "string", "PHASEINFO_WCORE_DEPTH": "float", @@ -176,12 +182,25 @@ def create_c_dict(self): self.log_error("GFDL_TRACKER_NML_TEMPLATE_FILE does not " f"exist: {c_dict['NML_TEMPLATE_FILE']}") + c_dict['SGV_TEMPLATE_FILE'] = ( + self.config.getraw('config', 'GFDL_TRACKER_SGV_TEMPLATE_FILE', '') + ) + c_dict['OUTPUT_TEMPLATE'] = ( self.config.getraw('config', 'GFDL_TRACKER_OUTPUT_TEMPLATE', '') ) c_dict['OUTPUT_DIR'] = self.config.getdir('GFDL_TRACKER_OUTPUT_DIR', '') + # optional "gen_vitals" file that holds info about known storms + c_dict['GEN_VITALS_INPUT_TEMPLATE'] = ( + self.config.getraw('config', + 'GFDL_TRACKER_GEN_VITALS_INPUT_TEMPLATE', '') + ) + c_dict['GEN_VITALS_INPUT_DIR'] = ( + self.config.getdir('GFDL_TRACKER_GEN_VITALS_INPUT_DIR', '') + ) + # read config variables for name, input_type in self.CONFIG_NAMES.items(): if input_type == 'int': @@ -196,9 +215,17 @@ def create_c_dict(self): value = get_fct('config', f'GFDL_TRACKER_{name}', '') c_dict[f'REPLACE_CONF_{name}'] = value + c_dict['KEEP_INTERMEDIATE'] = ( + self.config.getbool('config', + 'GFDL_TRACKER_KEEP_INTERMEDIATE', + False) + ) + # allow multiple input files c_dict['ALLOW_MULTIPLE_FILES'] = True + c_dict['FIRST_RUN'] = True + if not c_dict['INPUT_TEMPLATE']: self.log_error('GFDL_TRACKER_INPUT_TEMPLATE must be set') @@ -225,6 +252,8 @@ def run_at_time(self, input_dict): input_dict['custom'] = custom_string self.run_at_time_once(input_dict) + self.c_dict['FIRST_RUN'] = False + def run_at_time_once(self, input_dict): """! Do some processing for the current run time (init or valid) @@ -232,9 +261,10 @@ def run_at_time_once(self, input_dict): @returns True if everything was successful, False if not """ # get all input files - all_input_files, lead_minutes = self.get_all_input_files(input_dict) + all_input_files = self.get_all_input_files(input_dict) if not all_input_files: - self.log_error("No input files found") + self.log_error("Could not find input files in " + f"{self.c_dict['INPUT_DIR']}.") return False # get TCVitals file @@ -264,35 +294,108 @@ def run_at_time_once(self, input_dict): return False # create empty fort.14 file - self.create_fort_14_file() + self.create_fort_14_file(tc_vitals_out) # create fort.15 file with list of all forecast leads and indices + lead_minutes = [item.get('lead_minutes') for item in all_input_files] self.create_fort_15_file(lead_minutes) + # if gen_vitals file is specified, copy it to fort.67 + if not self.handle_gen_vitals(input_dict): + return False + # substitute values from config into template.nml and # write input.nml to output directory - if not self.fill_output_nml_template(input_dict): + input_nml_path = self.handle_templates(input_dict) + if not input_nml_path: return False # run tracker application from output directory passing in input.nml - if not self.run_tracker(): + if not self.run_tracker(input_nml_path): return False # rename fort.64 output file to output filename template - if not self.rename_fort_64_to_output_path(input_dict): + if not self.rename_fort_to_output_path(input_dict): return False - # remove sym links from output directory - for link_path in all_output_files: - self._remove_symlink(link_path) + # check if clean up should be skipped + if self.c_dict.get('KEEP_INTERMEDIATE', False): + return True - self._remove_symlink(tc_vitals_out) + # clean up files in output directory that are no longer needed + self.cleanup_output_dir(all_output_files, + tc_vitals_out) return True + def handle_gen_vitals(self, input_dict): + self.logger.debug("Checking for gen_vitals file") + # if template not set, do nothing + template = self.c_dict['GEN_VITALS_INPUT_TEMPLATE'] + if not template: + self.logger.debug("No gen vitals file specified") + return True + + # check if file exists + filedir = self.c_dict['GEN_VITALS_INPUT_DIR'] + src_path = os.path.join(filedir, template) + src_path = do_string_sub(src_path, **input_dict) + if not os.path.exists(src_path): + self.log_error(f"Gen vitals file does not exist: {src_path}") + return False + + dest_path = os.path.join(self.c_dict['OUTPUT_DIR'], + 'tcvit_genesis_storms.txt') + try: + shutil.copyfile(src_path, dest_path) + except: + self.log_error(f"Copy failed: from {src_path} to {dest_path}") + return False + + self.logger.debug(f"Copied gen vitals file {src_path} to {dest_path}") + + # check if fort.67 already exists in output directory + # do not copy file if it does + dest_path = os.path.join(self.c_dict['OUTPUT_DIR'], + 'fort.67') + if os.path.exists(dest_path): + self.logger.debug(f"Gen vitals file already exists: {dest_path}. " + f"Skip copying of {src_path}") + return True + + try: + shutil.copyfile(src_path, dest_path) + except: + self.log_error(f"Copy failed: from {src_path} to {dest_path}") + return False + + self.logger.debug(f"Copied gen vitals file {src_path} to {dest_path}") + return True + + def cleanup_output_dir(self, all_output_files, tc_vitals_out): + for output_file in all_output_files: + # remove symbolic links for input files + self._remove_symlink(output_file) + + # remove index files + index_file = f'{output_file}.ix' + if os.path.exists(index_file): + os.remove(index_file) + + # remove TCVitals symbolic link + self._remove_symlink(tc_vitals_out) + + # remove all fort files + all_forts = glob.glob(os.path.join(self.c_dict.get('OUTPUT_DIR'), + f'fort.*')) + for fort_file in all_forts: + # remove symlink if link, otherwise remove file + if not self._remove_symlink(fort_file): + self.logger.debug(f'Removing {fort_file}') + os.remove(fort_file) + def get_all_input_files(self, input_dict): all_input_files = [] - lead_minutes = [] # get forecast leads to loop over lead_seq = get_lead_sequence(self.config, input_dict) @@ -305,64 +408,78 @@ def get_all_input_files(self, input_dict): time_info = ti_calculate(input_dict) input_files = self.find_data(time_info=time_info, return_list=True) + if not input_files: + return None # add input files to list unless they are index files (.ix) input_files = [input_file for input_file in input_files if not input_file.endswith('.ix')] for input_file in input_files: - all_input_files.append(input_file) - - if lead != '*': - lead_minutes.append(time_info.get('lead_minutes')) - continue - - # extract lead time from each file found via wildcard - new_lead_minutes = self._get_leads_from_template(input_files) - lead_minutes.extend(new_lead_minutes) - - return all_input_files, sorted(lead_minutes) - - def _get_leads_from_template(self, input_files): + file_time_info = self._get_time_info_from_template(input_file) + if not file_time_info: + self.log_error("Could not get time info from file: " + f"{input_file}") + continue + + rename = self._get_input_file_rename(file_time_info) + input_file_dict = { + 'filepath': input_file, + 'rename': rename, + 'lead_minutes': file_time_info.get('lead_minutes'), + } + all_input_files.append(input_file_dict) + + return all_input_files + + def _get_time_info_from_template(self, input_file): # extract lead time from each file found via wildcard - lead_minutes_list = [] template = os.path.join(self.c_dict.get('INPUT_DIR'), self.c_dict.get('INPUT_TEMPLATE')) - for input_file in input_files: - file_time_info = parse_template(template, input_file) - if file_time_info: - lead_minutes_list.append(file_time_info.get('lead_minutes')) + file_time_info = parse_template(template, input_file) + if not file_time_info: + return None - return lead_minutes_list + return file_time_info - def link_files_to_output_dir(self, all_input_files, tc_vitals_file): + def _get_input_file_rename(self, file_time_info): + gmodname = remove_quotes(self.c_dict[f'REPLACE_CONF_FNAMEINFO_GMODNAME']) + rundescr = remove_quotes(self.c_dict[f'REPLACE_CONF_FNAMEINFO_RUNDESCR']) + atcfdescr = remove_quotes(self.c_dict[f'REPLACE_CONF_FNAMEINFO_ATCFDESCR']) + template = (f"{gmodname}.{rundescr}.{atcfdescr}." + "{init?fmt=%Y%m%d%H}.f{lead?fmt=%5M}") + return do_string_sub(template, **file_time_info) + + def link_files_to_output_dir(self, all_input_files, tc_vitals_src): all_output_files = [] - output_dir = self.c_dict.get('OUTPUT_DIR') # create symbolic links for input files - for src_path in all_input_files: - dest_path = self._create_symlink(src_path, output_dir) + for input_file_dict in all_input_files: + src_path = input_file_dict.get('filepath') + dest_path = os.path.join(self.c_dict.get('OUTPUT_DIR'), + input_file_dict.get('rename')) + self._create_symlink(src_path, dest_path) all_output_files.append(dest_path) # create symbolic links for TCVitals file - tc_vitals_out = self._create_symlink(tc_vitals_file, output_dir) - - return all_output_files, tc_vitals_out + tc_vitals_dest = os.path.join(self.c_dict.get('OUTPUT_DIR'), + os.path.basename(tc_vitals_src)) + self._create_symlink(tc_vitals_src, tc_vitals_dest) - def _create_symlink(self, src_path, output_dir): - src_file = os.path.basename(src_path) - dest_path = os.path.join(output_dir, src_file) + return all_output_files, tc_vitals_dest + def _create_symlink(self, src_path, dest_path): self._remove_symlink(dest_path) - self.logger.debug(f"Creating sym link in {output_dir} for {src_file}") + self.logger.debug(f"Creating sym link {dest_path} for {src_path}") os.symlink(src_path, dest_path) - return dest_path - def _remove_symlink(self, link_path): if os.path.islink(link_path): self.logger.debug(f"Removing existing symbolic link: {link_path}") os.unlink(link_path) + return True + + return False def run_grib_index(self, all_output_files): index_script = self.c_dict.get('INDEX_APP') @@ -375,11 +492,26 @@ def run_grib_index(self, all_output_files): return True - def create_fort_14_file(self): + def create_fort_14_file(self, tc_vitals_out): output_dir = self.c_dict.get('OUTPUT_DIR') fort_14_path = os.path.join(output_dir, 'fort.14') - self.logger.debug(f"Writing fort.14 file: {fort_14_path}") - with open(fort_14_path, 'w') as file_handle: + + if os.path.exists(fort_14_path): + self.logger.debug("Removing existing fort.14 file") + os.remove(fort_14_path) + + # if running in cyclogenesis mode (tcgen or midlat) use TCVitals + # file for fort.14 + run_type = remove_quotes(self.c_dict["REPLACE_CONF_TRACKERINFO_TYPE"]) + if run_type == 'tcgen' or run_type == 'midlat': + self.logger.debug("Linking TCVitals file to fort.14 " + "for cyclogenesis run") + self._create_symlink(tc_vitals_out, fort_14_path) + return + + # if not, create a blank file + self.logger.debug(f"Writing blank fort.14 file: {fort_14_path}") + with open(fort_14_path, 'w'): pass def create_fort_15_file(self, all_lead_minutes): @@ -398,16 +530,38 @@ def create_fort_15_file(self, all_lead_minutes): with open(fort_15_path, 'w') as file_handle: file_handle.write(write_content) - def fill_output_nml_template(self, input_dict): + def handle_templates(self, input_dict): template_file = self.c_dict['NML_TEMPLATE_FILE'] if not template_file: - return False + return None # set up dictionary of text to substitute in XML file sub_dict = self.populate_sub_dict(input_dict) + output_path = os.path.join(self.c_dict.get('OUTPUT_DIR'), + 'input.{init?fmt=%Y%m%d%H%M}.nml') + output_path = do_string_sub(output_path, **input_dict) + # open template file and replace any values encountered self.logger.debug(f"Reading nml template: {template_file}") + self.sub_template(template_file, output_path, sub_dict) + + # only fill out sgv template file if template is specified + # and on a 0Z run that is not the first run time + if (not self.c_dict['SGV_TEMPLATE_FILE'] or + self.c_dict['FIRST_RUN'] or + input_dict['init'].strftime('%H') != '00'): + return output_path + + sgv_template_file = self.c_dict['SGV_TEMPLATE_FILE'] + sgv_output_path = os.path.join(self.c_dict.get('OUTPUT_DIR'), + 'sgv.{init?fmt=%Y%m%d%H%M}.txt') + sgv_output_path = do_string_sub(sgv_output_path, **input_dict) + self.sub_template(sgv_template_file, sgv_output_path, sub_dict) + + return output_path + + def sub_template(self, template_file, output_path, sub_dict): with open(template_file, 'r') as file_handle: input_lines = file_handle.read().splitlines() @@ -420,14 +574,11 @@ def fill_output_nml_template(self, input_dict): output_lines.append(output_line) # write tmp file with XML content with substituted values - out_path = os.path.join(self.c_dict.get('OUTPUT_DIR'), - 'input.nml') - self.logger.debug(f"Writing file: {out_path}") - with open(out_path, 'w') as file_handle: + self.logger.debug(f"Writing file: {output_path}") + with open(output_path, 'w') as file_handle: for line in output_lines: file_handle.write(f'{line}\n') - return True def populate_sub_dict(self, time_info): sub_dict = {} @@ -457,25 +608,50 @@ def populate_sub_dict(self, time_info): sub_dict['METPLUS_DATEIN_INP_BHH'] = init_ymdh[8:10] sub_dict['METPLUS_ATCFINFO_ATCFYMDH'] = init_ymdh + sub_dict['METPLUS_DATENOW_YY'] = init_ymdh[0:4] + sub_dict['METPLUS_DATENOW_MM'] = init_ymdh[4:6] + sub_dict['METPLUS_DATENOW_DD'] = init_ymdh[6:8] + sub_dict['METPLUS_DATENOW_HH'] = init_ymdh[8:10] + + init_6ago = time_info['init'] - relativedelta(hours=6) + init_6ago = init_6ago.strftime('%Y%m%d%H') + sub_dict['METPLUS_DATE6AGO_YY'] = init_6ago[0:4] + sub_dict['METPLUS_DATE6AGO_MM'] = init_6ago[4:6] + sub_dict['METPLUS_DATE6AGO_DD'] = init_6ago[6:8] + sub_dict['METPLUS_DATE6AGO_HH'] = init_6ago[8:10] + + init_6ahead = time_info['init'] + relativedelta(hours=6) + init_6ahead = init_6ahead.strftime('%Y%m%d%H') + sub_dict['METPLUS_DATE6AHEAD_YY'] = init_6ahead[0:4] + sub_dict['METPLUS_DATE6AHEAD_MM'] = init_6ahead[4:6] + sub_dict['METPLUS_DATE6AHEAD_DD'] = init_6ahead[6:8] + sub_dict['METPLUS_DATE6AHEAD_HH'] = init_6ahead[8:10] + return sub_dict - def run_tracker(self): + def run_tracker(self, input_nml_path): output_dir = self.c_dict.get('OUTPUT_DIR') command = (f"cd {output_dir}; " f"{self.c_dict['TRACKER_APP']} " - f"< input.nml; " + f"< {os.path.basename(input_nml_path)}; " f"ret=$?; " f"cd -; " f"if [ $ret != 0 ]; then false; fi") return self.run_command(command) - def rename_fort_64_to_output_path(self, time_info): + def rename_fort_to_output_path(self, time_info): output_dir = self.c_dict.get('OUTPUT_DIR') + run_type = remove_quotes(self.c_dict["REPLACE_CONF_TRACKERINFO_TYPE"]) + if run_type == 'tcgen' or run_type == 'midlat': + fort_file = 'fort.66' + else: + fort_file = 'fort.64' + # check that fort.64 file was created successfully - fort_64_path = os.path.join(output_dir, 'fort.64') - if not os.path.exists(fort_64_path): - self.log_error(f"Could not find output file: {fort_64_path}") + fort_path = os.path.join(output_dir, fort_file) + if not os.path.exists(fort_path): + self.log_error(f"Could not find output file: {fort_path}") return False output_path = os.path.join(output_dir, @@ -487,10 +663,10 @@ def rename_fort_64_to_output_path(self, time_info): if not os.path.exists(parent_dir): self.logger.debug(f"Creating output directory: {parent_dir}") - # copy fort.64 file to new file name - self.logger.debug(f"Copying fort.64 file to: {output_path}") + # copy fort.64/66 file to new file name + self.logger.debug(f"Copying {fort_file} file to: {output_path}") try: - shutil.copyfile(fort_64_path, output_path) + shutil.copyfile(fort_path, output_path) except OSError as err: self.log_error(f"Could not copy file: {err}") return False diff --git a/metplus/wrappers/pcp_combine_wrapper.py b/metplus/wrappers/pcp_combine_wrapper.py index decdec7e33..d252c10deb 100755 --- a/metplus/wrappers/pcp_combine_wrapper.py +++ b/metplus/wrappers/pcp_combine_wrapper.py @@ -1,35 +1,26 @@ ''' Program Name: pcp_combine_wrapper.py Contact(s): George McCabe -Abstract: Runs pcp_combine to merge multiple forecast files -History Log: Initial version -Usage: -Parameters: None -Input Files: grib2 files -Output Files: pcp_combine files -Condition codes: 0 for success, 1 for failure +Abstract: Builds commands to run MET tool pcp_combine ''' import os -import datetime +from datetime import timedelta from ..util import met_util as util -from ..util import time_util -from ..util import do_string_sub +from ..util import do_string_sub, getlist +from ..util import get_seconds_from_string, ti_get_lead_string, ti_calculate +from ..util import get_relativedelta, ti_get_seconds_from_relativedelta +from ..util import time_string_to_met_time, seconds_to_met_time from . import ReformatGriddedWrapper '''!@namespace PCPCombineWrapper @brief Wraps the MET tool pcp_combine to combine/divide precipitation accumulations or derive additional fields -Call as follows: -@code{.sh} -Cannot be called directly. Must use child classes. -@endcode -@todo add main function to be able to run alone via command line ''' class PCPCombineWrapper(ReformatGriddedWrapper): - """!Wraps the MET tool pcp_combine to combine or divide - precipitation accumulations""" + """! Wraps the MET tool pcp_combine to combine or divide + precipitation accumulations """ # valid values for [FCST/OBS]_PCP_COMBINE_METHOD valid_run_methods = ['ADD', 'SUM', 'SUBTRACT', 'DERIVE', 'USER_DEFINED'] @@ -41,406 +32,602 @@ def __init__(self, config, instance=None, config_overrides={}): super().__init__(config, instance=instance, config_overrides=config_overrides) - self.inaddons = [] - self.method = "" - self.pcp_dir = "" - self.pcp_regex = "" - self.init_time = -1 - self.valid_time = -1 - self.in_accum = -1 - self.out_accum = -1 - self.field_name = None - self.field_level = "" - self.output_name = "" - self.name = "" - self.compress = -1 - self.user_command = '' def create_c_dict(self): + """! Create dictionary from config items to be used in the wrapper + Allows developer to reference config items without having to know + the type and consolidates config get calls so it is easier to see + which config variables are used in the wrapper + + @returns dictionary of values to use in wrapper + """ c_dict = super().create_c_dict() - c_dict['VERBOSITY'] = self.config.getstr('config', 'LOG_PCP_COMBINE_VERBOSITY', + c_dict['VERBOSITY'] = self.config.getstr('config', + 'LOG_PCP_COMBINE_VERBOSITY', c_dict['VERBOSITY']) - + c_dict['ALLOW_MULTIPLE_FILES'] = True fcst_run = self.config.getbool('config', 'FCST_PCP_COMBINE_RUN', False) obs_run = self.config.getbool('config', 'OBS_PCP_COMBINE_RUN', False) if not fcst_run and not obs_run: - self.log_error("Must set either FCST_PCP_COMBINE_RUN or OBS_PCP_COMBINE_RUN") - else: - if fcst_run: - c_dict = self.set_fcst_or_obs_dict_items('FCST', c_dict) - c_dict['VAR_LIST_FCST'] = util.parse_var_list( - self.config, - data_type='FCST', - met_tool=self.app_name - ) - if obs_run: - c_dict = self.set_fcst_or_obs_dict_items('OBS', c_dict) - c_dict['VAR_LIST_OBS'] = util.parse_var_list( - self.config, - data_type='OBS', - met_tool=self.app_name - ) - - + self.log_error("Must set either FCST_PCP_COMBINE_RUN or " + "OBS_PCP_COMBINE_RUN") + return c_dict + + if fcst_run: + c_dict = self.set_fcst_or_obs_dict_items('FCST', c_dict) + c_dict['VAR_LIST_FCST'] = util.parse_var_list( + self.config, + data_type='FCST', + met_tool=self.app_name + ) + if obs_run: + c_dict = self.set_fcst_or_obs_dict_items('OBS', c_dict) + c_dict['VAR_LIST_OBS'] = util.parse_var_list( + self.config, + data_type='OBS', + met_tool=self.app_name + ) return c_dict def set_fcst_or_obs_dict_items(self, d_type, c_dict): - c_dict[d_type+'_MIN_FORECAST'] = self.config.getstr('config', d_type+'_PCP_COMBINE_MIN_FORECAST', '0') - c_dict[d_type+'_MAX_FORECAST'] = self.config.getstr('config', d_type+'_PCP_COMBINE_MAX_FORECAST', '256H') - c_dict[d_type+'_INPUT_DATATYPE'] = self.config.getstr('config', - d_type+'_PCP_COMBINE_INPUT_DATATYPE', '') - c_dict[d_type+'_DATA_INTERVAL'] = self.config.getint('config', d_type+'_PCP_COMBINE_DATA_INTERVAL', 1) - c_dict[d_type+'_TIMES_PER_FILE'] = self.config.getint('config', d_type+'_PCP_COMBINE_TIMES_PER_FILE', -1) - c_dict[d_type+'_IS_DAILY_FILE'] = self.config.getbool('config', d_type+'_PCP_COMBINE_IS_DAILY_FILE', False) - c_dict[d_type+'_ACCUMS'] = util.getlist(self.config.getraw('config', d_type+'_PCP_COMBINE_INPUT_ACCUMS', '')) - c_dict[d_type+'_NAMES'] = util.getlist(self.config.getraw('config', d_type+'_PCP_COMBINE_INPUT_NAMES', '')) - c_dict[d_type+'_LEVELS'] = util.getlist(self.config.getraw('config', d_type+'_PCP_COMBINE_INPUT_LEVELS', '')) - c_dict[d_type+'_OPTIONS'] = util.getlist(self.config.getraw('config', d_type+'_PCP_COMBINE_INPUT_OPTIONS', '')) - c_dict[d_type+'_OUTPUT_ACCUM'] = self.config.getstr('config', d_type+'_PCP_COMBINE_OUTPUT_ACCUM', '') - c_dict[d_type+'_OUTPUT_NAME'] = self.config.getstr('config', d_type+'_PCP_COMBINE_OUTPUT_NAME', '') - c_dict[d_type+'_INPUT_DIR'] = self.config.getdir(d_type+'_PCP_COMBINE_INPUT_DIR', '') - c_dict[d_type+'_INPUT_TEMPLATE'] = self.config.getraw('filename_templates', - d_type+'_PCP_COMBINE_INPUT_TEMPLATE', '') - if not c_dict[d_type+'_INPUT_TEMPLATE']: - self.log_error(d_type + "_PCP_COMBINE_INPUT_TEMPLATE required to run") - - c_dict[d_type+'_OUTPUT_DIR'] = self.config.getdir(d_type+'_PCP_COMBINE_OUTPUT_DIR', '') - c_dict[d_type+'_OUTPUT_TEMPLATE'] = self.config.getraw('filename_templates', - d_type+'_PCP_COMBINE_OUTPUT_TEMPLATE') - - c_dict[d_type+'_STAT_LIST'] = \ - util.getlist(self.config.getstr('config', - d_type+'_PCP_COMBINE_STAT_LIST', '')) - - run_method = \ - self.config.getstr('config', d_type+'_PCP_COMBINE_METHOD', '').upper() - - # support run method of CUSTOM, but warn and change it to USER_DEFINED + """! Set c_dict values specific to either forecast (FCST) or + observation (OBS) data. + + @param d_type data type, either FCST or OBS + @param c_dict config dictionary to populate + @returns c_dict with values for given data type set + """ + # handle run method + run_method = self.config.getstr( + 'config', + f'{d_type}_PCP_COMBINE_METHOD', '' + ).upper() + + # change CUSTOM (deprecated) to USER_DEFINED if run_method == 'CUSTOM': - self.logger.warning(f'{d_type}_PCP_COMBINE_RUN_METHOD should be set to USER_DEFINED. CUSTOM method is deprecated') run_method = 'USER_DEFINED' - c_dict[d_type+'_RUN_METHOD'] = run_method + if run_method not in self.valid_run_methods: + self.log_error(f"Invalid value for {d_type}_PCP_COMBINE_METHOD: " + f"{run_method}. Valid options are " + f"{','.join(self.valid_run_methods)}.") + return c_dict + + c_dict[f'{d_type}_RUN_METHOD'] = run_method + + # get lookback from _LOOKBACK or _OUTPUT_ACCUM or _DERIVE_LOOKBACK + c_dict[f'{d_type}_LOOKBACK'] = self._handle_lookback(c_dict, d_type) + + c_dict[f'{d_type}_MIN_FORECAST'] = self.config.getstr( + 'config', + f'{d_type}_PCP_COMBINE_MIN_FORECAST', '0' + ) + c_dict[f'{d_type}_MAX_FORECAST'] = self.config.getstr( + 'config', + f'{d_type}_PCP_COMBINE_MAX_FORECAST', '256H' + ) + + c_dict[f'{d_type}_INPUT_DATATYPE'] = self.config.getstr( + 'config', + f'{d_type}_PCP_COMBINE_INPUT_DATATYPE', '' + ) + + c_dict[f'{d_type}_ACCUMS'] = getlist( + self.config.getraw('config', + f'{d_type}_PCP_COMBINE_INPUT_ACCUMS', '') + ) + + c_dict[f'{d_type}_NAMES'] = getlist( + self.config.getraw('config', + f'{d_type}_PCP_COMBINE_INPUT_NAMES', '') + ) + c_dict[f'{d_type}_LEVELS'] = getlist( + self.config.getraw('config', + f'{d_type}_PCP_COMBINE_INPUT_LEVELS', '') + ) + c_dict[f'{d_type}_OPTIONS'] = getlist( + self.config.getraw('config', + f'{d_type}_PCP_COMBINE_INPUT_OPTIONS', '') + ) + + c_dict[f'{d_type}_OUTPUT_NAME'] = self.config.getstr( + 'config', + f'{d_type}_PCP_COMBINE_OUTPUT_NAME', '' + ) + c_dict[f'{d_type}_INPUT_DIR'] = self.config.getdir( + f'{d_type}_PCP_COMBINE_INPUT_DIR', '' + ) + c_dict[f'{d_type}_INPUT_TEMPLATE'] = self.config.getraw( + 'config', + f'{d_type}_PCP_COMBINE_INPUT_TEMPLATE' + ) - c_dict[d_type+'_DERIVE_LOOKBACK'] = \ - self.config.getstr('config', d_type+'_PCP_COMBINE_DERIVE_LOOKBACK', '0') + c_dict[f'{d_type}_OUTPUT_DIR'] = self.config.getdir( + f'{d_type}_PCP_COMBINE_OUTPUT_DIR', '' + ) + c_dict[f'{d_type}_OUTPUT_TEMPLATE'] = self.config.getraw( + 'config', + f'{d_type}_PCP_COMBINE_OUTPUT_TEMPLATE' + ) - c_dict[d_type+'_BUCKET_INTERVAL'] = self.config.getseconds('config', - d_type+'_PCP_COMBINE_BUCKET_INTERVAL', - 0) + c_dict[f'{d_type}_STAT_LIST'] = getlist( + self.config.getstr('config', + f'{d_type}_PCP_COMBINE_STAT_LIST', '') + ) - c_dict[d_type + '_CONSTANT_INIT'] = self.config.getbool('config', - d_type+'_PCP_COMBINE_CONSTANT_INIT', - False) + c_dict[f'{d_type}_BUCKET_INTERVAL'] = self.config.getseconds( + 'config', + f'{d_type}_PCP_COMBINE_BUCKET_INTERVAL', 0 + ) - # initialize custom string for tests - c_dict['CUSTOM_STRING'] = '' + c_dict[f'{d_type}_CONSTANT_INIT'] = self.config.getbool( + 'config', + f'{d_type}_PCP_COMBINE_CONSTANT_INIT', False + ) # read any additional names/levels to add to command - c_dict[d_type+'_EXTRA_NAMES'] = util.getlist( + c_dict[f'{d_type}_EXTRA_NAMES'] = getlist( self.config.getraw('config', - d_type+'_PCP_COMBINE_EXTRA_NAMES', '') + f'{d_type}_PCP_COMBINE_EXTRA_NAMES', '') ) - c_dict[d_type+'_EXTRA_LEVELS'] = util.getlist( + c_dict[f'{d_type}_EXTRA_LEVELS'] = getlist( self.config.getraw('config', - d_type+'_PCP_COMBINE_EXTRA_LEVELS', '') + f'{d_type}_PCP_COMBINE_EXTRA_LEVELS', '') ) # fill in missing extra level values with None fill_num = (len(c_dict[f'{d_type}_EXTRA_NAMES']) - len(c_dict[f'{d_type}_EXTRA_LEVELS'])) if fill_num > 0: for num in range(fill_num): - c_dict[d_type + '_EXTRA_LEVELS'].append(None) + c_dict[f'{d_type}_EXTRA_LEVELS'].append(None) - c_dict[d_type+'_EXTRA_OUTPUT_NAMES'] = util.getlist( + c_dict[f'{d_type}_EXTRA_OUTPUT_NAMES'] = getlist( self.config.getraw('config', - d_type+'_PCP_COMBINE_EXTRA_OUTPUT_NAMES', '') + f'{d_type}_PCP_COMBINE_EXTRA_OUTPUT_NAMES', '') ) - if run_method not in self.valid_run_methods: - self.log_error(f"Invalid value for {d_type}_PCP_COMBINE_METHOD: " - f"{run_method}. Valid options are " - f"{','.join(self.valid_run_methods)}.") + if run_method == 'DERIVE' and not c_dict[f'{d_type}_STAT_LIST']: + self.log_error('Statistic list is empty. Must set ' + f'{d_type}_PCP_COMBINE_STAT_LIST if running ' + 'derive mode') - if run_method == 'DERIVE' and not c_dict[d_type+'_STAT_LIST']: - self.log_error('Statistic list is empty. ' + \ - 'Must set ' + d_type + '_PCP_COMBINE_STAT_LIST if running ' +\ - 'derive mode') + if (not c_dict[f'{d_type}_INPUT_TEMPLATE'] and + c_dict[f'{d_type}_RUN_METHOD'] != 'SUM'): + self.log_error(f"Must set {d_type}_PCP_COMBINE_INPUT_TEMPLATE " + "unless using SUM method") - if not c_dict[d_type+'_INPUT_TEMPLATE'] and c_dict[d_type+'_RUN_METHOD'] != 'SUM': - self.log_error(f"Must set {d_type}_PCP_COMBINE_INPUT_TEMPLATE unless using SUM method") - - if not c_dict[d_type+'_OUTPUT_TEMPLATE']: + if not c_dict[f'{d_type}_OUTPUT_TEMPLATE']: self.log_error(f"Must set {d_type}_PCP_COMBINE_OUTPUT_TEMPLATE") if run_method == 'DERIVE' or run_method == 'ADD': - if not c_dict[d_type+'_ACCUMS']: - self.log_error(f'{d_type}_PCP_COMBINE_INPUT_ACCUMS must be specified.') + if not c_dict[f'{d_type}_ACCUMS']: + self.log_error(f'{d_type}_PCP_COMBINE_INPUT_ACCUMS ' + 'must be specified.') # name list should either be empty or the same length as accum list - if c_dict[d_type+'_NAMES'] and \ - len(c_dict[d_type+'_ACCUMS']) != len(c_dict[d_type+'_NAMES']): - msg = f'{d_type}_PCP_COMBINE_INPUT_ACCUM_NAMES list should be ' +\ - 'either empty or the same length as ' +\ - f'{d_type}_PCP_COMBINE_INPUT_ACCUMS list.' - self.log_error(msg) - - if c_dict[d_type+'_LEVELS'] and \ - len(c_dict[d_type+'_ACCUMS']) != len(c_dict[d_type+'_LEVELS']): - msg = f'{d_type}_PCP_COMBINE_INPUT_LEVELS list should be ' +\ - 'either empty or the same length as ' +\ - f'{d_type}_PCP_COMBINE_INPUT_ACCUMS list.' - self.log_error(msg) - - c_dict['ALLOW_MULTIPLE_FILES'] = True + len_names = len(c_dict[f'{d_type}_NAMES']) + len_accums = len(c_dict[f'{d_type}_ACCUMS']) + len_levels = len(c_dict[f'{d_type}_LEVELS']) + if c_dict[f'{d_type}_NAMES'] and len_accums != len_names: + self.log_error(f'{d_type}_PCP_COMBINE_INPUT_ACCUM_NAMES list ' + 'should be either empty or the same length as ' + f'{d_type}_PCP_COMBINE_INPUT_ACCUMS list.') + + if c_dict[f'{d_type}_LEVELS'] and len_accums != len_levels: + self.log_error(f'{d_type}_PCP_COMBINE_INPUT_LEVELS list ' + 'should be either empty or the same length as ' + f'{d_type}_PCP_COMBINE_INPUT_ACCUMS list.') return c_dict - def clear(self): - super().clear() - self.inaddons = [] - self.method = "" - self.pcp_dir = "" - self.pcp_regex = "" - self.init_time = -1 - self.valid_time = -1 - self.in_accum = -1 - self.out_accum = -1 - self.field_name = None - self.field_level = "" - self.field_extra = "" - self.output_name = "" - self.name = "" - self.compress = -1 - self.user_command = '' - self.extra_fields = None - self.extra_output = None - - def add_input_file(self, filename, addon): - self.infiles.append(filename) - self.inaddons.append(str(addon)) - - def get_dir_and_template(self, data_type, in_or_out): - dirr = self.c_dict[data_type+'_'+in_or_out+'_DIR'] - template = self.c_dict[data_type+'_'+in_or_out+'_TEMPLATE'] - - return (dirr, template) - - def getLowestForecastFile(self, valid_time, dtype, template): - """!Find the lowest forecast hour that corresponds to the - valid time - Args: - @param valid_time valid time to search - @param dtype data type (FCST or OBS) to get filename template - @rtype string - @return Path to file with the lowest forecast hour""" - out_file = None + def run_at_time_once(self, time_info, var_list, data_src): - # search for file with lowest forecast, then loop up into you find a valid one - min_forecast = time_util.get_seconds_from_string(self.c_dict[dtype+'_MIN_FORECAST'], 'H') - max_forecast = time_util.get_seconds_from_string(self.c_dict[dtype+'_MAX_FORECAST'], 'H') - smallest_input_accum = min([lev['amount'] for lev in self.c_dict['ACCUM_DICT_LIST']]) + if not var_list: + var_list = [None] - # if smallest input accumulation is greater than an hour, search hourly - if smallest_input_accum > 3600: - smallest_input_accum = 3600 + for var_info in var_list: + self.run_at_time_one_field(time_info, var_info, data_src) - min_forecast_string = time_util.ti_get_lead_string(min_forecast) - max_forecast_string = time_util.ti_get_lead_string(max_forecast) - smallest_input_accum_string = time_util.ti_get_lead_string(smallest_input_accum, plural=False) - self.logger.debug(f"Looking for file with lowest forecast lead valid at {valid_time}" - f" between {min_forecast_string} and {max_forecast_string} using " - f"{smallest_input_accum_string} intervals") + def run_at_time_one_field(self, time_info, var_info, data_src): - forecast_lead = min_forecast - while forecast_lead <= max_forecast: - input_dict = {} - input_dict['valid'] = valid_time - input_dict['lead_seconds'] = forecast_lead - time_info = time_util.ti_calculate(input_dict) - time_info['custom'] = self.c_dict['CUSTOM_STRING'] - fSts = do_string_sub(template, - **time_info) - search_file = os.path.join(self.input_dir, - fSts) + self.clear() - self.logger.debug(f"Looking for {search_file}") + method = self.c_dict[data_src+'_RUN_METHOD'] - search_file = util.preprocess_file(search_file, - self.c_dict[dtype+'_INPUT_DATATYPE'], - self.config) + self.c_dict['OUTPUT_DIR'] = self.c_dict[f'{data_src}_OUTPUT_DIR'] + self.c_dict['OUTPUT_TEMPLATE'] = ( + self.c_dict[f'{data_src}_OUTPUT_TEMPLATE'] + ) - if search_file is not None: - return search_file, forecast_lead - forecast_lead += smallest_input_accum + # get lookback/output accum seconds and add it to time info dictionary + lookback_seconds = self._get_lookback_seconds(time_info=time_info, + var_info=var_info, + data_src=data_src) + if lookback_seconds is None: + return False - return None, 0 + time_info['level'] = lookback_seconds - def get_daily_file(self, time_info, accum, data_src, file_template): - """!Pull accumulation out of file that contains a full day of data - Args: - @param time_info dictionary containing timing information - @param accum accumulation to extract from file - @param data_src type of data (FCST or OBS) - @param file_template filename template to search - @rtype bool - @return True if file was added to output list, False if not""" - - data_interval = self.c_dict[data_src + '_DATA_INTERVAL'] - times_per_file = self.c_dict[data_src + '_TIMES_PER_FILE'] - search_file = None - # loop from valid_time back to data interval * times per file - for i in range(0, times_per_file+1): - search_time = time_info['valid'] - datetime.timedelta(hours=(i * data_interval)) - # check if file exists - dSts = do_string_sub(file_template, - valid=search_time, - custom=self.c_dict['CUSTOM_STRING']) - search_file = os.path.join(self.input_dir, - dSts) - search_file = util.preprocess_file(search_file, - self.c_dict[data_src+\ - '_INPUT_DATATYPE'], - self.config) - if search_file is not None: - break + # if method is not USER_DEFINED or DERIVE, + # check that field information is set + if method == "USER_DEFINED": + can_run = self.setup_user_method(time_info, data_src) + elif method == "DERIVE": + can_run = self.setup_derive_method(time_info, lookback_seconds, + var_info, data_src) + elif method == "ADD": + can_run = self.setup_add_method(time_info, lookback_seconds, + data_src) + elif method == "SUM": + can_run = self.setup_sum_method(time_info, lookback_seconds, + data_src) + elif method == "SUBTRACT": + can_run = self.setup_subtract_method(time_info, lookback_seconds, + data_src) + else: + can_run = None - if search_file is None: + if not can_run: + self.log_error("pcp_combine could not generate command") return False - diff = time_info['valid'] - search_time - - # Specifying integer division // Python 3, - # assuming that was the intent in Python 2. - lead = int((diff.days * 24) // (data_interval)) - lead += int((diff).seconds // (data_interval*3600)) - 1 - search_time_info = { 'valid' : search_time, - 'custom': self.c_dict['CUSTOM_STRING']} - - # get name of input level item that matches the accumulation to extract from daily file - accum_seconds = time_util.get_seconds_from_string(accum, 'H') - accum_dict_list = self.c_dict['ACCUM_DICT_LIST'] - fname = next((item['name'] for item in accum_dict_list if item['amount'] == accum_seconds), '-1') - # if accumulation was not found in levels dictionary list, error and return - if fname == '-1': - self.log_error(f'Accumulation {accum} was not specified in the {data_src}' - '_PCP_COMBINE_INPUT_ACCUMS list') - return False + # set time info level back to lookback seconds + time_info['level'] = lookback_seconds + + self._handle_extra_field_arguments(data_src, time_info) + + # add -name argument + output_name = self.c_dict.get(f'{data_src}_OUTPUT_NAME') + if not output_name: + if var_info: + output_name = var_info.get(f"{data_src.lower()}_name") + self.logger.warning( + f'{data_src}_PCP_COMBINE_OUTPUT_NAME is ' + f'not set. Using {output_name} from ' + f'{data_src}_VAR{var_info.get("index")}_NAME.' + ) + + if output_name: + self._handle_name_argument(output_name, data_src) + + if not self.find_and_check_output_file(time_info=time_info): + return True + + # set user environment variables if needed and print all envs + self.set_environment_variables(time_info) + + return self.build() + + def setup_user_method(self, time_info, data_src): + """! Setup pcp_combine to call user defined command + + @param time_info dictionary containing timing information + @params data_src data type (FCST or OBS) + @rtype string + @return path to output file + """ + command_template = self.config.getraw( + 'config', + f'{data_src}_PCP_COMBINE_COMMAND' + ) + user_command = do_string_sub(command_template, **time_info) + self.args.extend(user_command.split()) + + return True + + def setup_subtract_method(self, time_info, accum, data_src): + """! Setup pcp_combine to subtract two files to build accumulation + + @param time_info object containing timing information + @param accum accumulation amount to compute in seconds + @params data_src data type (FCST or OBS) + @rtype string + @return path to output file + """ + self.args.append('-subtract') + + lead = time_info['lead_seconds'] + lead2 = lead - accum - # if name was not set in the input levels list, use accumulation time in MET time format - if fname is None: - addon = time_util.time_string_to_met_time(accum, default_unit='S') + self.logger.debug( + f"Attempting to build {ti_get_lead_string(accum, False)} " + f"accumulation by subtracting {ti_get_lead_string(lead2, False)} " + f"from {ti_get_lead_string(lead, False)}." + ) + + files_found = [] + + full_template = os.path.join(self.c_dict[f'{data_src}_INPUT_DIR'], + self.c_dict[f'{data_src}_INPUT_TEMPLATE']) + + # get first file + filepath1 = do_string_sub(full_template, **time_info) + file1 = util.preprocess_file(filepath1, + self.c_dict[data_src+'_INPUT_DATATYPE'], + self.config) + + if file1 is None: + self.log_error(f'Could not find {data_src} file {filepath1} ' + f'using template {full_template}') + return None + + # handle field information + field_args = {} + if self.c_dict.get(f"{data_src}_NAMES"): + field_args['name'] = self.c_dict[f"{data_src}_NAMES"][0] + + if self.c_dict.get(f"{data_src}_LEVELS"): + field_args['level'] = self.c_dict[f"{data_src}_LEVELS"][0] + + # if data is GRIB and second lead is 0, then + # run PCPCombine in -add mode with just the first file + if lead2 == 0 and self.c_dict[data_src+'_INPUT_DATATYPE'] == 'GRIB': + self.logger.debug("Subtracted accumulation is 0 for GRIB data," + " so running ADD mode on one file") + self.args.clear() + self.args.append('-add') + field_info = self.get_field_string( + time_info=time_info, + search_accum=seconds_to_met_time(lead), + **field_args + ) + self.args.append(file1) + self.args.append(field_info) + files_found.append((file1, field_info)) + return files_found + + # else continue building -subtract command + + # set time info for second lead + input_dict2 = {'init': time_info['init'], + 'lead': lead2} + time_info2 = ti_calculate(input_dict2) + time_info2['level'] = accum + if hasattr(time_info, 'custom'): + time_info2['custom'] = time_info['custom'] + + filepath2 = do_string_sub(full_template, **time_info2) + file2 = util.preprocess_file(filepath2, + self.c_dict[data_src+'_INPUT_DATATYPE'], + self.config) + + if file2 is None: + self.log_error(f'Could not find {data_src} file {filepath2} ' + f'using template {full_template}') + return None + + field_info1 = self.get_field_string( + time_info=time_info, + search_accum=seconds_to_met_time(lead), + **field_args + ) + field_info2 = self.get_field_string( + time_info=time_info2, + search_accum=seconds_to_met_time(lead2), + **field_args + ) + + self.args.append(file1) + self.args.append(field_info1) + + self.args.append(file2) + self.args.append(field_info2) + files_found.append((file1, field_info1)) + files_found.append((file2, field_info2)) + + return files_found + + def setup_sum_method(self, time_info, lookback, data_src): + """! Setup pcp_combine to build desired accumulation based on + init/valid times and accumulations + + @param time_info object containing timing information + @param lookback accumulation amount to compute in seconds + @params data_src data type (FCST or OBS) + @rtype string + @return path to output file + """ + self.args.append('-sum') + + if self.c_dict[f"{data_src}_ACCUMS"]: + in_accum = self.c_dict[data_src+'_ACCUMS'][0] else: - fname = do_string_sub(fname, **search_time_info) - addon = "'name=\"" + fname + "\";" + in_accum = 0 - # if name is a python script, don't set level - if not util.is_python_script(fname): - addon += " level=\"(" + str(lead) + ",*,*)\";" + in_accum = time_string_to_met_time(in_accum, 'H') + out_accum = time_string_to_met_time(lookback, 'S') - addon += "'" + time_info['level'] = in_accum + pcp_regex = util.template_to_regex( + self.c_dict[f'{data_src}_INPUT_TEMPLATE'], + time_info + ) + pcp_regex_split = pcp_regex.split('/') + pcp_dir = os.path.join(self.c_dict[f'{data_src}_INPUT_DIR'], + *pcp_regex_split[0:-1]) + pcp_regex = pcp_regex_split[-1] + + # set arguments + # init time + self.args.append(time_info['init'].strftime('%Y%m%d_%H%M%S')) + # input accum + self.args.append(in_accum) + # valid time + self.args.append(time_info['valid'].strftime('%Y%m%d_%H%M%S')) + # output accum + self.args.append(out_accum) + self.args.append(f"-pcpdir {pcp_dir}") + self.args.append(f"-pcprx {pcp_regex}") + + # set -field name and level if set in config + self._handle_field_argument(data_src, time_info) - self.add_input_file(search_file, addon) return True - def get_addon(self, accum_dict, search_accum, search_time): - field_name = accum_dict['name'] - field_level = accum_dict['level'] - field_extra = accum_dict['extra'] - if field_name is None: - return search_accum + def setup_add_method(self, time_info, lookback, data_src): + """! Setup pcp_combine to add files to build desired accumulation - # perform string substitution on name in case it uses filename templates - field_name = do_string_sub(field_name, - valid=search_time, - custom=self.c_dict['CUSTOM_STRING']) - addon = "'name=\"" + field_name + "\";" + @param time_info dictionary containing timing information + @param lookback accumulation amount to compute in seconds + @params data_src data type (FCST or OBS) + @rtype string + @return path to output file + """ + self.args.append('-add') - if not util.is_python_script(field_name) and field_level is not None: - addon += f" level=\"{field_level}\";" + # create list of tuples for input levels and optional field names + self._build_input_accum_list(data_src, time_info) + + files_found = self.get_accumulation(time_info, lookback, data_src) + if not files_found: + self.log_error( + f'Could not find files to build accumulation in ' + f"{self.c_dict[f'{data_src}_INPUT_DIR']} using template " + f"{self.c_dict[f'{data_src}_INPUT_TEMPLATE']}") + return False - if field_extra: - search_time_info = {'valid': search_time, - 'custom': self.c_dict['CUSTOM_STRING']} + return files_found - field_extra = do_string_sub(field_extra, - **search_time_info) + def setup_derive_method(self, time_info, lookback, var_info, data_src): + """! Setup pcp_combine to derive stats - field_extra = field_extra.replace('"', '\"') - addon += f" {field_extra}" + @param time_info dictionary containing timing information + @param lookback accumulation amount to compute in seconds + @param var_info object containing variable information + @param data_src data type (FCST or OBS) + @rtype string + @return path to output file + """ + self.args.append('-derive') - addon += "'" - return addon + # add list of statistics + self.args.append(','.join(self.c_dict[f"{data_src}_STAT_LIST"])) - def find_input_file(self, in_template, init_time, valid_time, search_accum, data_src): - lead = 0 + # create list of tuples for input levels and optional field names + self._build_input_accum_list(data_src, time_info) - if '{lead?' in in_template or ('{init?' in in_template and '{valid?' in in_template): - if not self.c_dict[f'{data_src}_CONSTANT_INIT']: - return self.getLowestForecastFile(valid_time, data_src, in_template) + # if no lookback is specified, get files using the template without + # using the get accumulation logic + if not lookback: + self.logger.debug(f"{data_src}_PCP_COMBINE_LOOKBACK unset " + "or set to 0. Using template to find files.") + accum_dict = self.c_dict['ACCUM_DICT_LIST'][0] + field_info = self.get_field_string(time_info=time_info, + search_accum=0, + name=accum_dict['name'], + level=accum_dict['level'], + extra=accum_dict['extra']) + input_files = self.find_data(time_info, + var_info, + data_type=data_src, + return_list=True) + if not input_files: + return None + + files_found = [] + for input_file in input_files: + # exclude field info and set it with -field + self.args.append(input_file) + files_found.append((input_file, field_info)) - # set init time and lead in time dictionary if init time should be constant - # time_util.ti_calculate cannot currently handle supplying both init and valid - lead = (valid_time - init_time).total_seconds() - input_dict = {'init': init_time, - 'lead': lead} else: - if self.c_dict[f'{data_src}_CONSTANT_INIT']: - input_dict = { 'init': init_time } - else: - input_dict = { 'valid': valid_time } + files_found = self.get_accumulation(time_info, + lookback, + data_src, + field_info_after_file=False) + if not files_found: + self.log_error( + f'Could not find files to build accumulation in ' + f"{self.c_dict[f'{data_src}_INPUT_DIR']} using template " + f"{self.c_dict[f'{data_src}_INPUT_TEMPLATE']}") + return None + # set -field name and level from first file field info + self.args.append(f'-field {files_found[0][1]}') - time_info = time_util.ti_calculate(input_dict) - time_info['custom'] = self.c_dict['CUSTOM_STRING'] - input_file = do_string_sub(in_template, - level=int(search_accum), - **time_info) - input_path = os.path.join(self.input_dir, input_file) + return files_found - return util.preprocess_file(input_path, - self.c_dict[data_src+'_INPUT_DATATYPE'], - self.config), lead + def _handle_lookback(self, c_dict, d_type): + """! Get value for lookback time from config. + [FCST/OBS]_PCP_COMBINE_LOOKBACK is used if set. If not, use synonyms + [FCST/OBS]_PCP_COMBINE_DERIVE_LOOKBACK or + [FCST/OBS]_PCP_COMBINE_OUTPUT_ACCUM. Priority of synonyms is based on + run method (derive mode prioritizes DERIVE_LOOKBACK, all other + prioritize OUTPUT_ACCUM). This is done because we want to handle the + lookback with the same value for all run methods, but the clearest + name depending on the method. - def get_template_accum(self, accum_dict, search_time, lead, data_src): - # apply string substitution to accum amount - search_time_dict = {'valid': search_time, 'lead_seconds': lead} - search_time_info = time_util.ti_calculate(search_time_dict) - search_time_info['custom'] = self.c_dict['CUSTOM_STRING'] - amount = do_string_sub(accum_dict['template'], - **search_time_info) - amount = time_util.get_seconds_from_string(amount, default_unit='S', valid_time=search_time) + @param c_dict config dictionary to populate + @param d_type data type (FCST or OBS) + @returns lookback time / desired accumulation in seconds + """ + lookback = self.config.getstr('config', + f'{d_type}_PCP_COMBINE_LOOKBACK', '') + if lookback: + return lookback + + # if _PCP_COMBINE_LOOKBACK is not set + # prioritize DERIVE_LOOKBACK over OUTPUT_ACCUM if in -derive mode + # or vice versa otherwise + if c_dict[f'{d_type}_RUN_METHOD'] == "DERIVE": + ordered_synonyms = [ + 'DERIVE_LOOKBACK', + 'OUTPUT_ACCUM', + ] + else: + ordered_synonyms = [ + 'OUTPUT_ACCUM', + 'DERIVE_LOOKBACK', + ] + + for synonym in ordered_synonyms: + lookback = self.config.getstr( + 'config', + f'{d_type}_PCP_COMBINE_{synonym}', '') + if lookback: + return lookback + + # if none of the variables are set, return None + return None + + def _get_lookback_seconds(self, time_info, var_info, data_src): + if self.c_dict[f"{data_src}_LOOKBACK"]: + lookback = self.c_dict[f"{data_src}_LOOKBACK"] + elif var_info: + lookback = var_info[f'{data_src.lower()}_level'] + self.logger.warning( + f'{data_src}_PCP_COMBINE_LOOKBACK is ' + f'not set. Using {lookback} from ' + f'{data_src}_VAR{var_info.get("index")}_LEVELS' + '. It is recommended that you explicitly set ' + 'the output accumulation.') + else: + lookback = '0' - # if bucket interval is provided, adjust the accumulation amount - # if adjustment sets amount to 0, set it to the bucket interval - bucket_interval = self.c_dict[f"{data_src}_BUCKET_INTERVAL"] - if bucket_interval != 0: - self.logger.debug(f"Applying bucket interval {time_util.ti_get_lead_string(bucket_interval)}" - f" to {time_util.ti_get_lead_string(amount)}") - amount = amount % bucket_interval - if amount == 0: - amount = bucket_interval + _, lookback = util.split_level(lookback) - self.logger.debug(f"New accumulation amount is {time_util.ti_get_lead_string(amount)}") + lookback_seconds = get_seconds_from_string( + lookback, + default_unit='H', + valid_time=time_info['valid'] + ) + if lookback_seconds is None: + self.log_error(f'Invalid format for derived lookback: {lookback}') - return amount + return lookback_seconds + + def get_accumulation(self, time_info, accum, data_src, + field_info_after_file=True): + """! Find files to combine to build the desired accumulation - def get_accumulation(self, time_info, accum, data_src): - """!Find files to combine to build the desired accumulation - Args: @param time_info dictionary containing time information - @param accum desired accumulation to build + @param accum desired accumulation to build in seconds @param data_src type of data (FCST or OBS) @rtype bool @return True if full set of files to build accumulation is found """ - in_template = self.c_dict[data_src+'_INPUT_TEMPLATE'] - - if self.c_dict[data_src + '_IS_DAILY_FILE'] is True: - return self.get_daily_file(time_info, accum, data_src, in_template) - search_time = time_info['valid'] # last time to search is the output accumulation subtracted from the # valid time, then add back the smallest accumulation that is available @@ -448,18 +635,19 @@ def get_accumulation(self, time_info, accum, data_src): # the file/field time backwards in time # If building 6 hour accumulation from 1 hour accumulation files, # last time to process is valid - 6 + 1 - accum_relative = time_util.get_relativedelta(accum, 'H') + accum_relative = get_relativedelta(accum, 'S') # using 1 hour for now - smallest_input_accum = min([lev['amount'] for lev in self.c_dict['ACCUM_DICT_LIST']]) + smallest_input_accum = min( + [lev['amount'] for lev in self.c_dict['ACCUM_DICT_LIST']] + ) if smallest_input_accum == 9999999: smallest_input_accum = 3600 - last_time = time_info['valid'] -\ - accum_relative +\ - datetime.timedelta(seconds=smallest_input_accum) + last_time = (time_info['valid'] - accum_relative + + timedelta(seconds=smallest_input_accum)) - total_accum = time_util.ti_get_seconds_from_relativedelta(accum_relative, - time_info['valid']) + total_accum = ti_get_seconds_from_relativedelta(accum_relative, + time_info['valid']) # log the input and output accumulation information search_accum_list = [] @@ -467,10 +655,15 @@ def get_accumulation(self, time_info, accum, data_src): if lev['template'] is not None: search_accum_list.append(lev['template']) else: - search_accum_list.append(time_util.ti_get_lead_string(lev['amount'], plural=False)) + search_accum_list.append(ti_get_lead_string(lev['amount'], + plural=False)) - search_accum_string = ' or '.join(search_accum_list) - self.logger.debug(f"Trying to build a {time_util.ti_get_lead_string(total_accum, plural=False)} accumulation using {search_accum_string} input data") + self.logger.debug("Trying to build a " + f"{ti_get_lead_string(total_accum, plural=False)} " + "accumulation using " + f"{' or '.join(search_accum_list)} input data") + + files_found = [] # loop backwards in time until you have a full set of accum while last_time <= search_time: @@ -481,35 +674,56 @@ def get_accumulation(self, time_info, accum, data_src): # look for biggest accum that fits search for accum_dict in self.c_dict['ACCUM_DICT_LIST']: - if accum_dict['amount'] > total_accum and accum_dict['template'] is None: + if (accum_dict['amount'] > total_accum and + accum_dict['template'] is None): continue - search_file, lead = self.find_input_file(in_template, time_info['init'], search_time, - accum_dict['amount'], data_src) + search_file, lead = self.find_input_file(time_info['init'], + search_time, + accum_dict['amount'], + data_src) + + if not search_file: + continue # if found a file, add it to input list with info - if search_file is not None: - # if template is used in accum, find value and apply bucket interval is set - if accum_dict['template'] is not None: - accum_amount = self.get_template_accum(accum_dict, - search_time, - lead, - data_src) - if accum_amount > total_accum: - self.logger.debug("Accumulation amount is bigger than remaining accumulation.") - continue - else: - accum_amount = accum_dict['amount'] - - accum_met_time = time_util.time_string_to_met_time(accum_amount) - addon = self.get_addon(accum_dict, accum_met_time, search_time) - # add file to input list and step back in time to find more data - self.add_input_file(search_file, addon) - self.logger.debug(f"Adding input file: {search_file} with {addon}") - search_time = search_time - datetime.timedelta(seconds=accum_amount) - total_accum -= accum_amount - found = True - break + # if template is used in accum, find value and + # apply bucket interval is set + if accum_dict['template'] is not None: + accum_amount = self.get_template_accum(accum_dict, + search_time, + lead, + data_src) + if accum_amount > total_accum: + self.logger.debug("Accumulation amount is bigger " + "than remaining accumulation.") + continue + else: + accum_amount = accum_dict['amount'] + + search_time_info = { + 'valid': search_time, + 'lead': lead, + } + field_info = self.get_field_string( + time_info=search_time_info, + search_accum=time_string_to_met_time(accum_amount), + name=accum_dict['name'], + level=accum_dict['level'], + extra=accum_dict['extra'] + ) + # add file to input list and step back to find more data + self.args.append(search_file) + if field_info_after_file: + self.args.append(field_info) + + files_found.append((search_file, field_info)) + self.logger.debug(f"Adding input file: {search_file} " + f"with {field_info}") + search_time -= timedelta(seconds=accum_amount) + total_accum -= accum_amount + found = True + break # if we don't need any more accumulation, break out of loop and run if not total_accum: @@ -517,598 +731,217 @@ def get_accumulation(self, time_info, accum, data_src): # if we still need to find more accum but we couldn't find it, fail if not found: - return False + return None # fail if no files were found or if we didn't find # the entire accumulation needed - if not self.infiles or total_accum: - return False - - return True - - def get_command(self): - - cmd = '{} -v {} '.format(self.app_path, self.c_dict['VERBOSITY']) - - for a in self.args: - cmd += a + " " + if not files_found or total_accum: + return None - if self.method == "USER_DEFINED": - cmd += self.user_command - return cmd - elif self.method == "SUM": - if self.init_time == -1: - self.log_error("No init time specified") - return None + return files_found - if self.valid_time == -1: - self.log_error("No valid time specified") - return None + def get_lowest_fcst_file(self, valid_time, data_src): + """! Find the lowest forecast hour that corresponds to the valid time - if self.in_accum == -1: - self.log_error("No input accumulation specified") - return None - - if self.out_accum == -1: - self.log_error("No output accumulation specified") - return None + @param valid_time valid time to search + @param data_src data type (FCST or OBS) to get filename template + @rtype string + @return Path to file with the lowest forecast hour + """ + # search for file with lowest forecast, + # then loop up into you find a valid one + min_forecast = get_seconds_from_string( + self.c_dict[data_src+'_MIN_FORECAST'], 'H' + ) + max_forecast = get_seconds_from_string( + self.c_dict[data_src+'_MAX_FORECAST'], 'H' + ) + smallest_input_accum = min( + [lev['amount'] for lev in self.c_dict['ACCUM_DICT_LIST']] + ) - cmd += "-sum " + self.init_time + " " + str(self.in_accum) + " " +\ - self.valid_time + " " + str(self.out_accum) + " " + # if smallest input accumulation is greater than an hour, search hourly + if smallest_input_accum > 3600: + smallest_input_accum = 3600 - else: - if self.method == "ADD": - cmd += "-add " - elif self.method == "SUBTRACT": - cmd += "-subtract " - elif self.method == 'DERIVE': - cmd += '-derive ' - cmd += ','.join(self.c_dict['STAT_LIST']) + ' ' - - if len(self.infiles) == 0: - self.log_error("No input filenames specified") - return None + min_forecast_string = ti_get_lead_string(min_forecast) + max_forecast_string = ti_get_lead_string(max_forecast) + smallest_input_accum_string = ti_get_lead_string(smallest_input_accum, + plural=False) + self.logger.debug("Looking for file with lowest forecast lead valid " + f"at {valid_time} between {min_forecast_string} " + f"and {max_forecast_string} using " + f"{smallest_input_accum_string} intervals") - for idx, f in enumerate(self.infiles): - cmd += f + " " - if self.method != 'DERIVE': - cmd += self.inaddons[idx] + " " + forecast_lead = min_forecast + while forecast_lead <= max_forecast: + input_dict = { + 'valid': valid_time, + 'lead_seconds': forecast_lead + } + time_info = ti_calculate(input_dict) + time_info['custom'] = self.c_dict.get('CUSTOM_STRING', '') + search_file = os.path.join(self.c_dict[f'{data_src}_INPUT_DIR'], + self.c_dict[data_src+'_INPUT_TEMPLATE']) + search_file = do_string_sub(search_file, **time_info) + self.logger.debug(f"Looking for {search_file}") - # set -field options if set - if self.field_name: - cmd += "-field 'name=\""+self.field_name+"\";" + search_file = util.preprocess_file( + search_file, + self.c_dict[data_src+'_INPUT_DATATYPE'], + self.config) - if self.field_level: - cmd += " level=\""+self.field_level+"\";" + if search_file is not None: + return search_file, forecast_lead + forecast_lead += smallest_input_accum - if self.field_extra: - cmd += f' {self.field_extra}' + return None, 0 - cmd += "' " + def get_field_string(self, time_info=None, search_accum=0, name=None, + level=None, extra=None): + if name is None: + name = 'APCP' + level = f'A{str(search_accum).zfill(2)}' + self.logger.debug("Field name not specified. Assuming " + f"{name}/{level}") + + field_info = self.get_field_info(v_name=name, + v_level=level, + v_extra=extra, + add_curly_braces=False)[0] + + # string sub values into full field info string using search time info + if time_info: + field_info = do_string_sub(field_info, + **time_info) + return field_info + + def find_input_file(self, init_time, valid_time, search_accum, data_src): + lead = 0 - if self.extra_fields: - cmd += self.extra_fields + ' ' + in_template = self.c_dict[data_src+'_INPUT_TEMPLATE'] - output_string = self.get_output_string() - if output_string: - cmd += f'-name {output_string} ' + if ('{lead?' in in_template or + ('{init?' in in_template and '{valid?' in in_template)): + if not self.c_dict[f'{data_src}_CONSTANT_INIT']: + return self.get_lowest_fcst_file(valid_time, data_src) - if not self.outfile: - self.log_error("No output filename specified") - return None + # set init time and lead in time dict if init should be constant + # ti_calculate cannot currently handle both init and valid + lead = (valid_time - init_time).total_seconds() + input_dict = {'init': init_time, 'lead': lead} + else: + if self.c_dict[f'{data_src}_CONSTANT_INIT']: + input_dict = {'init': init_time} + else: + input_dict = {'valid': valid_time} - out_path = self.get_output_path() + time_info = ti_calculate(input_dict) + time_info['custom'] = self.c_dict.get('CUSTOM_STRING', '') + time_info['level'] = int(search_accum) + input_path = os.path.join(self.c_dict[f'{data_src}_INPUT_DIR'], + in_template) + input_path = do_string_sub(input_path, **time_info) - # create outdir (including subdir in outfile) if it doesn't exist - if not os.path.exists(os.path.dirname(out_path)): - os.makedirs(os.path.dirname(out_path)) + return util.preprocess_file(input_path, + self.c_dict[f'{data_src}_INPUT_DATATYPE'], + self.config), lead - cmd += f"{out_path} " + def get_template_accum(self, accum_dict, search_time, lead, data_src): + # apply string substitution to accum amount + search_time_dict = {'valid': search_time, 'lead_seconds': lead} + search_time_info = ti_calculate(search_time_dict) + search_time_info['custom'] = self.c_dict.get('CUSTOM_STRING', '') + amount = do_string_sub(accum_dict['template'], + **search_time_info) + amount = get_seconds_from_string(amount, default_unit='S', + valid_time=search_time) - if self.pcp_dir: - cmd += f"-pcpdir {self.pcp_dir} " + # if bucket interval is provided, adjust the accumulation amount + # if adjustment sets amount to 0, set it to the bucket interval + bucket_interval = self.c_dict[f"{data_src}_BUCKET_INTERVAL"] + if bucket_interval != 0: + self.logger.debug("Applying bucket interval " + f"{ti_get_lead_string(bucket_interval)}" + f" to {ti_get_lead_string(amount)}") + amount = amount % bucket_interval + if amount == 0: + amount = bucket_interval - if self.pcp_regex: - cmd += f"-pcprx {self.pcp_regex} " + self.logger.debug("New accumulation amount is " + f"{ti_get_lead_string(amount)}") - if self.name: - cmd += f"-name {self.name} " + return amount - if self.compress != -1: - cmd += f"-compress {str(self.compress)} " + def get_command(self): - # remove whitespace at beginning/end and return command - return cmd.strip() + cmd = (f"{self.app_path} -v {self.c_dict['VERBOSITY']} " + f"{' '.join(self.args)} {self.get_output_path()}") + return cmd - def get_extra_fields(self, data_src): + def _handle_extra_field_arguments(self, data_src, time_info=None): extra_names = self.c_dict.get(data_src + '_EXTRA_NAMES') if not extra_names: - return None, None - - extra_list = [] + return extra_levels = self.c_dict.get(data_src + '_EXTRA_LEVELS') for name, level in zip(extra_names, extra_levels): - field_fmt = f"-field 'name=\"{name}\";" - if level: - field_fmt += f" level=\"{level}\";" - field_fmt += "'" - extra_list.append(field_fmt) - - extra_input_fmt = ' '.join(extra_list) - - # handle extra output names if specified - extra_output_names = self.c_dict.get(data_src + '_EXTRA_OUTPUT_NAMES') - if not extra_output_names: - extra_output_fmt = None - else: - extra_output_fmt = '","'.join(extra_output_names) - extra_output_fmt = f'"{extra_output_fmt}"' - - return extra_input_fmt, extra_output_fmt - - def get_output_string(self): - """! If self.output_name is set, add quotes and return the string. If - self.extra_output is also set, add the additional names separated by - commas inside the quotes. - - @returns formatted string if output name(s) is specified, None if not - """ - if not self.output_name: - return None - - output_string = f'"{self.output_name}"' - # add extra output field names - if self.extra_output: - output_string = f'{output_string},{self.extra_output}' - - return output_string - - def run_at_time_once(self, time_info, var_list, data_src): - - if not var_list: - var_list = [None] - - for var_info in var_list: - self.run_at_time_one_field(time_info, var_info, data_src) - - def run_at_time_one_field(self, time_info, var_info, data_src): - - self.clear() - - # read additional names/levels to add to command if set - self.extra_fields, self.extra_output = self.get_extra_fields(data_src) - - cmd = None - self.method = self.c_dict[data_src+'_RUN_METHOD'] - - # if method is not USER_DEFINED or DERIVE, check that field information is set - if self.method == "USER_DEFINED": - cmd = self.setup_user_method(time_info, data_src) - elif self.method == "DERIVE": - cmd = self.setup_derive_method(time_info, var_info, data_src) - elif not var_info and not self.c_dict[f"{data_src}_OUTPUT_ACCUM"]: - self.log_error('Cannot run PCPCombine without specifying fields to process ' - 'unless running in USER_DEFINED mode. You must set ' - f'{data_src}_VAR_[NAME/LEVELS] or {data_src}_OUTPUT_[NAME/LEVEL]') - return False - - if self.method == "ADD": - cmd = self.setup_add_method(time_info, var_info, data_src) - elif self.method == "SUM": - cmd = self.setup_sum_method(time_info, var_info, data_src) - elif self.method == "SUBTRACT": - cmd = self.setup_subtract_method(time_info, var_info, data_src) - - # invalid method should never happen because value is checked on init - - if cmd is None: - self.log_error("pcp_combine could not generate command") - return False - - # if output file exists and we want to skip it, warn and continue - outfile = self.get_output_path() - if os.path.exists(outfile) and self.c_dict['SKIP_IF_OUTPUT_EXISTS'] is True: - self.logger.debug('Skip writing output file {} because it already ' - 'exists. Remove file or change ' - 'PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS to True to process' - .format(outfile)) - return True - - # set user environment variables if needed and print all envs - self.set_environment_variables(time_info) - - return self.build() - - def setup_subtract_method(self, time_info, var_info, data_src): - """!Setup pcp_combine to subtract two files to build desired accumulation - Args: - @param time_info object containing timing information - @param var_info object containing variable information - @params data_src data type (FCST or OBS) - @rtype string - @return path to output file""" - in_dir, in_template = self.get_dir_and_template(data_src, 'INPUT') - out_dir, out_template = self.get_dir_and_template(data_src, 'OUTPUT') - - # if [FCST/OBS]_OUTPUT_[NAME/ACCUM] are set, use them instead of - # [FCST/OBS]_VAR_[NAME/LEVELS] - if self.c_dict[f"{data_src}_OUTPUT_NAME"]: - field_name = self.c_dict[f"{data_src}_OUTPUT_NAME"] - else: - field_name = var_info[f"{data_src.lower()}_name"] - self.logger.warning(f'{data_src}_PCP_COMBINE_OUTPUT_NAME is not set. Using ' - f'{field_name} from {data_src}_VAR{var_info.get("index")}_NAME. ') - - if self.c_dict[f"{data_src}_OUTPUT_ACCUM"]: - accum = self.c_dict[f"{data_src}_OUTPUT_ACCUM"] - level_type = 'A' - else: - level = var_info[f'{data_src.lower()}_level'] - level_type, accum = util.split_level(level) - self.logger.warning(f'{data_src}_PCP_COMBINE_OUTPUT_ACCUM is not set. Using ' - f'{accum} from {data_src}_VAR{var_info.get("index")}_LEVELS. ' - 'It is recommended that you explicitly set the ' - 'output accumulation.') - - accum = time_util.get_seconds_from_string(accum, - default_unit='H', - valid_time=time_info['valid']) - if accum is None: - self.log_error("Could not get accumulation from {data_src}_VAR{var_info.get('index')}_LEVEL or " - f"{data_src}_PCP_COMBINE_OUTPUT_ACCUM") - return None - - lead = time_info['lead_seconds'] - lead2 = lead - accum - - self.logger.debug(f"Attempting to build {time_util.ti_get_lead_string(accum, False)} " - f"accumulation by subtracting {time_util.ti_get_lead_string(lead2, False)} " - f"from {time_util.ti_get_lead_string(lead, False)}.") - - # set output file information - out_file = do_string_sub(out_template, - level=accum, - **time_info) - self.outfile = out_file - self.outdir = out_dir - - # get first file - pcpSts1 = do_string_sub(in_template, - level=accum, - **time_info) - file1_expected = os.path.join(in_dir, pcpSts1) - file1 = util.preprocess_file(file1_expected, - self.c_dict[data_src+'_INPUT_DATATYPE'], - self.config) - - if file1 is None: - self.log_error(f'Could not find {data_src} file {file1_expected} using template {in_template}') - return None - - # if level type is A (accum) and second lead is 0, then - # run PCPCombine in -add mode with just the first file - if lead2 == 0 and level_type == 'A': - self.logger.debug("Subtracted accumulation is 0, so running ADD mode on one file") - self.method = 'ADD' - lead = time_util.seconds_to_met_time(lead) - self.add_input_file(file1, lead) - return self.get_command() - - # else continue building -subtract command - - # set time info for second lead - input_dict2 = { 'init' : time_info['init'], - 'lead' : lead2 } - time_info2 = time_util.ti_calculate(input_dict2) - if hasattr(time_info, 'custom'): - time_info2['custom'] = time_info['custom'] - - pcpSts2 = do_string_sub(in_template, - level=accum, - **time_info2) - file2_expected = os.path.join(in_dir, pcpSts2) - file2 = util.preprocess_file(file2_expected, - self.c_dict[data_src+'_INPUT_DATATYPE'], - self.config) - - if file2 is None: - self.log_error(f'Could not find {data_src} file {file2_expected} using template {in_template}') - return None - - if self.c_dict[data_src+'_INPUT_DATATYPE'] != 'GRIB': - field_name_1 = do_string_sub(field_name, **time_info) - lead = "'name=\"" + field_name_1 + "\";'" - field_name_2 = do_string_sub(field_name, **time_info2) - lead2 = "'name=\"" + field_name_2 + "\";'" - # TODO: need to add level if NetCDF input - how to specify levels for each - else: - lead = time_util.seconds_to_met_time(lead) - lead2 = time_util.seconds_to_met_time(lead2) - - self.add_input_file(file1, - lead) - self.add_input_file(file2, - lead2) - - return self.get_command() - - - def setup_sum_method(self, time_info, var_info, data_src): - """!Setup pcp_combine to build desired accumulation based on - init/valid times and accumulations - Args: - @param time_info object containing timing information - @param var_info object containing variable information - @params data_src data type (FCST or OBS) - @rtype string - @return path to output file""" - if self.c_dict[f"{data_src}_ACCUMS"]: - in_accum = self.c_dict[data_src+'_ACCUMS'][0] - else: - in_accum = 0 - - in_accum = time_util.time_string_to_met_time(in_accum, 'H') - - in_dir, in_template = self.get_dir_and_template(data_src, 'INPUT') - out_dir, out_template = self.get_dir_and_template(data_src, 'OUTPUT') - - # if OUTPUT_ACCUM is set, use that instead of obs_level - # and use obs_level as field level - if self.c_dict[data_src+'_OUTPUT_ACCUM']: - out_accum = self.c_dict[data_src+'_OUTPUT_ACCUM'] - else: - out_accum = var_info[data_src.lower()+'_level'] - if out_accum[0].isalpha(): - out_accum = out_accum[1:] - - self.logger.warning(f'{data_src}_PCP_COMBINE_OUTPUT_ACCUM is not set. Using ' - f'{out_accum} from {data_src}_VAR{var_info.get("index")}_LEVELS. ' - 'It is recommended that you explicitly set the ' - 'output accumulation.') - - if self.c_dict[data_src+'_OUTPUT_NAME']: - self.output_name = self.c_dict[data_src+'_OUTPUT_NAME'] - else: - self.output_name = var_info[f"{data_src.lower()}_name"] - self.logger.warning(f'{data_src}_PCP_COMBINE_OUTPUT_NAME is not set. Using ' - f'{self.output_name} from {data_src}_VAR{var_info.get("index")}_NAME.') + field_string = self.get_field_string(time_info=time_info, + name=name, + level=level) + field_format = f"-field {field_string}" + self.args.append(field_format) + + def _handle_field_argument(self, data_src, time_info): + if not self.c_dict[f'{data_src}_NAMES']: + return - # set field name and level if set in config - if self.c_dict[f'{data_src}_NAMES']: - self.field_name = self.c_dict[f'{data_src}_NAMES'][0] + field_args = {'name': self.c_dict[f'{data_src}_NAMES'][0]} if self.c_dict[f'{data_src}_LEVELS']: - self.field_level = self.c_dict[f'{data_src}_LEVELS'][0] + field_args['level'] = self.c_dict[f'{data_src}_LEVELS'][0] if self.c_dict[f'{data_src}_OPTIONS']: - self.field_extra = do_string_sub(self.c_dict[f'{data_src}_OPTIONS'][0], - **time_info) - - init_time = time_info['init'].strftime('%Y%m%d_%H%M%S') - valid_time = time_info['valid'].strftime('%Y%m%d_%H%M%S') - - time_info['level'] = time_util.get_seconds_from_string(out_accum, - 'H', - time_info['valid']) - - out_accum = time_util.time_string_to_met_time(out_accum, - 'H') - - in_regex = util.template_to_regex(in_template, time_info, - self.logger) - in_regex_split = in_regex.split('/') - in_dir = os.path.join(in_dir, *in_regex_split[0:-1]) - in_regex = in_regex_split[-1] - - self.init_time = init_time - self.valid_time = valid_time - self.in_accum = in_accum - self.out_accum = out_accum - self.pcp_dir = in_dir - self.pcp_regex = in_regex - self.outdir = out_dir - - pcp_out = do_string_sub(out_template, - **time_info) - self.outfile = pcp_out - - return self.get_command() - - - def setup_add_method(self, time_info, var_info, data_src): - """!Setup pcp_combine to add files to build desired accumulation - Args: - @param time_info dictionary containing timing information - @param var_info object containing variable information - @params data_src data type (FCST or OBS) - @rtype string - @return path to output file""" - - # if [FCST/OBS]_OUTPUT_[NAME/ACCUM] are set, use them instead of - # [FCST/OBS]_VAR_[NAME/LEVELS] - if self.c_dict[f"{data_src}_OUTPUT_ACCUM"]: - accum_string = self.c_dict[f"{data_src}_OUTPUT_ACCUM"] - else: - level = var_info[f'{data_src.lower()}_level'] - _, accum_string = util.split_level(level) - - self.logger.warning(f'{data_src}_PCP_COMBINE_OUTPUT_ACCUM is not set. Using ' - f'{accum_string} from {data_src}_VAR{var_info.get("index")}_LEVELS. ' - 'It is recommended that you explicitly set the ' - 'output accumulation.') - - if self.c_dict[f"{data_src}_OUTPUT_NAME"]: - field_name = self.c_dict[f"{data_src}_OUTPUT_NAME"] - else: - field_name = var_info[f"{data_src.lower()}_name"] - - self.logger.warning(f'{data_src}_PCP_COMBINE_OUTPUT_NAME is not set. Using ' - f'{field_name} from {data_src}_VAR{var_info.get("index")}_NAME.') + field_args['extra'] = self.c_dict[f'{data_src}_OPTIONS'][0] - # get number of seconds relative to valid time - accum_seconds = time_util.get_seconds_from_string(accum_string, - default_unit='H', - valid_time=time_info['valid']) - if accum_seconds is None: - self.log_error(f'Invalid accumulation specified: {accum_string}') - return - - # create list of tuples for input levels and optional field names - self.build_input_accum_list(data_src, time_info) - - in_dir, in_template = self.get_dir_and_template(data_src, 'INPUT') - out_dir, out_template = self.get_dir_and_template(data_src, 'OUTPUT') - - # check _PCP_COMBINE_INPUT_DIR to get accumulation files - self.input_dir = in_dir - - if not self.get_accumulation(time_info, accum_string, data_src): - self.log_error(f'Could not find files to build accumulation in {in_dir} using template {in_template}') - return None - - self.outdir = out_dir - time_info['level'] = int(accum_seconds) - pcp_out = do_string_sub(out_template, - **time_info) - self.outfile = pcp_out - self.args.append("-name " + field_name) - return self.get_command() - - def setup_derive_method(self, time_info, var_info, data_src): - """!Setup pcp_combine to derive stats - Args: - @param time_info dictionary containing timing information - @param var_info object containing variable information - @params data_src data type (FCST or OBS) - @rtype string - @return path to output file""" - if self.c_dict[f"{data_src}_NAMES"]: - self.field_name = self.c_dict[f"{data_src}_NAMES"][0] - - if self.c_dict[f"{data_src}_LEVELS"]: - self.field_level = self.c_dict[f"{data_src}_LEVELS"][0] - - if self.c_dict[f"{data_src}_OUTPUT_NAME"]: - self.output_name = self.c_dict[f"{data_src}_OUTPUT_NAME"] - # if list of output names, remove whitespace between items - self.output_name = [name.strip() for name in self.output_name.split(',')] - self.output_name = ','.join(self.output_name) + field_string = self.get_field_string(time_info=time_info, + **field_args) + field_string = f'-field {field_string}' + self.args.append(field_string) - if self.c_dict[f"{data_src}_OPTIONS"]: - self.field_extra = do_string_sub(self.c_dict[f'{data_src}_OPTIONS'][0], - **time_info) - - in_dir, in_template = self.get_dir_and_template(data_src, 'INPUT') - out_dir, out_template = self.get_dir_and_template(data_src, 'OUTPUT') - - # check _PCP_COMBINE_INPUT_DIR to get accumulation files - self.input_dir = in_dir - - # create list of tuples for input levels and optional field names - self.build_input_accum_list(data_src, time_info) - - # get files - lookback = self.c_dict[data_src+'_DERIVE_LOOKBACK'] - lookback_seconds = time_util.get_seconds_from_string(lookback, - default_unit='H', - valid_time=time_info['valid']) - if lookback_seconds is None: - self.log_error(f'Invalid format for derived lookback: {lookback}') + def _handle_name_argument(self, output_name, data_src): + if not output_name: return - # if no lookback is specified, get files using the template without - # using the get accumulation logic - if lookback_seconds == 0: - self.logger.debug(f"{data_src}_PCP_COMBINE_DERIVE_LOOKBACK unset " - "or set to 0. Using template to find files.") - accum_dict = self.c_dict['ACCUM_DICT_LIST'][0] - addon = self.get_addon(accum_dict, 0, time_info.get('valid', '')) - input_files = self.find_data(time_info, - var_info, - data_type=data_src, - return_list=True) - if not input_files: - return None - - for input_file in input_files: - self.add_input_file(input_file, addon) - - elif not self.get_accumulation(time_info, - lookback, - data_src): - self.log_error(f'Could not find files in {in_dir} using template {in_template}') - return None + # if list of output names, remove whitespace between items + output_names = [name.strip() for name in output_name.split(',')] - # set output - self.outdir = out_dir - time_info['level'] = lookback_seconds - pcp_out = do_string_sub(out_template, - **time_info) - self.outfile = pcp_out - - # set STAT_LIST for data type (FCST/OBS) - self.c_dict['STAT_LIST'] = self.c_dict[f"{data_src}_STAT_LIST"] - return self.get_command() - - def setup_user_method(self, time_info, data_src): - """!Setup pcp_combine to call user defined command - Args: - @param time_info dictionary containing timing information - @param var_info object containing variable information - @params data_src data type (FCST or OBS) - @rtype string - @return path to output file""" - command_template = self.config.getraw('config', data_src + '_PCP_COMBINE_COMMAND') - self.user_command = do_string_sub(command_template, **time_info) - - # get output accumulation in case output template uses level - accum_string = '0' - if self.c_dict[f"{data_src}_OUTPUT_ACCUM"]: - accum_string = self.c_dict[f"{data_src}_OUTPUT_ACCUM"] - _, accum_string = util.split_level(accum_string) - - accum_seconds = time_util.get_seconds_from_string(accum_string, 'H') - if accum_seconds is not None: - time_info['level'] = int(accum_seconds) - - # add output path to user defined command - self.outdir, out_template = self.get_dir_and_template(data_src, 'OUTPUT') - - self.outfile = do_string_sub(out_template, - **time_info) - - out_path = self.get_output_path() - - # create outdir (including subdir in outfile) if it doesn't exist - if not os.path.exists(os.path.dirname(out_path)): - os.makedirs(os.path.dirname(out_path)) - - self.user_command += ' ' + out_path + # handle extra output names if specified + extra_output_names = self.c_dict.get(data_src + '_EXTRA_OUTPUT_NAMES') + if extra_output_names: + output_names.extend(extra_output_names) - return '{} -v {} {}'.format(self.app_path, self.c_dict['VERBOSITY'], self.user_command) + name_format = '","'.join(output_names) + name_format = f'-name "{name_format}"' + self.args.append(name_format) - def build_input_accum_list(self, data_src, time_info): + def _build_input_accum_list(self, data_src, time_info): accum_list = self.c_dict[data_src + '_ACCUMS'] level_list = self.c_dict[data_src + '_LEVELS'] name_list = self.c_dict[data_src + '_NAMES'] extra_list = self.c_dict[data_src + '_OPTIONS'] - # if no name list, create list of None values + # if no list, create list of None values if not name_list: name_list = [None] * len(accum_list) - - # do the same for level list if not level_list: level_list = [None] * len(accum_list) - - # do the same for extra list if not extra_list: extra_list = [None] * len(accum_list) accum_dict_list = [] - for accum, level, name, extra in zip(accum_list, level_list, name_list, extra_list): - + for accum, level, name, extra in zip(accum_list, level_list, name_list, + extra_list): template = None # if accum is forecast lead, set amount to 999999 and save template if 'lead' in accum: @@ -1116,7 +949,7 @@ def build_input_accum_list(self, data_src, time_info): accum = '9999999S' # convert accum amount to seconds from time string - amount = time_util.get_seconds_from_string(accum, 'H', time_info['valid']) + amount = get_seconds_from_string(accum, 'H', time_info['valid']) accum_dict_list.append({'amount': amount, 'name': name, diff --git a/metplus/wrappers/point_stat_wrapper.py b/metplus/wrappers/point_stat_wrapper.py index 77374c5585..e461fed985 100755 --- a/metplus/wrappers/point_stat_wrapper.py +++ b/metplus/wrappers/point_stat_wrapper.py @@ -66,6 +66,7 @@ class PointStatWrapper(CompareGriddedWrapper): 'rps', 'eclv', 'mpr', + 'orank', ] def __init__(self, config, instance=None, config_overrides={}): diff --git a/metplus/wrappers/reformat_gridded_wrapper.py b/metplus/wrappers/reformat_gridded_wrapper.py index 18c10affff..2021d3617b 100755 --- a/metplus/wrappers/reformat_gridded_wrapper.py +++ b/metplus/wrappers/reformat_gridded_wrapper.py @@ -26,9 +26,10 @@ @endcode ''' + class ReformatGriddedWrapper(CommandBuilder): - """!Common functionality to wrap similar MET applications -that reformat gridded data + """! Common functionality to wrap similar MET applications + that reformat gridded data """ def __init__(self, config, instance=None, config_overrides={}): super().__init__(config, @@ -39,18 +40,17 @@ def __init__(self, config, instance=None, config_overrides={}): # pylint:disable=unused-argument def run_at_time_once(self, time_info, var_list, data_type): """!To be implemented by child class""" - self.log_error('ReformatGridded wrapper cannot be called directly.'+\ - ' Please use child wrapper') + self.log_error('ReformatGridded wrapper cannot be called directly.' + ' Please use child wrapper') return def run_at_time(self, input_dict): """! Runs the MET application for a given run time. Processing forecast - or observation data is determined by conf variables. This function - loops over the list of forecast leads and runs the application for - each. - Args: - @param init_time initialization time to run. -1 if not set - @param valid_time valid time to run. -1 if not set + or observation data is determined by conf variables. + This function loops over the list of forecast leads and runs + the application for each. + + @param input_dict dictionary containing init or valid time info """ app_name_caps = self.app_name.upper() class_name = self.__class__.__name__[0: -7] @@ -63,10 +63,11 @@ def run_at_time(self, input_dict): run_list.append("OBS") if not run_list: - self.log_error(class_name+" specified in process_list, but "+\ - "FCST_"+app_name_caps+"_RUN and OBS_"+app_name_caps+"_RUN "+\ - " are both False. Set one or both to true or "+\ - "remove "+class_name+" from the process_list") + self.log_error(f"{class_name} specified in process_list, but " + f"FCST_{app_name_caps}_RUN and " + f"OBS_{app_name_caps}_RUN are both False. " + f"Set one or both to true or remove {class_name} " + "from the process_list") return for to_run in run_list: @@ -76,17 +77,19 @@ def run_at_time(self, input_dict): time_info = time_util.ti_calculate(input_dict) - self.logger.info("Processing forecast lead {}".format(time_info['lead_string'])) - + self.logger.info("Processing forecast lead " + f"{time_info['lead_string']}") - if util.skip_time(time_info, self.c_dict.get('SKIP_TIMES', {})): + if util.skip_time(time_info, self.c_dict.get('SKIP_TIMES')): self.logger.debug('Skipping run time') continue - # loop over custom string list and set custom in the time_info dictionary + # loop over custom string list and set + # custom in the time_info dictionary for custom_string in self.c_dict['CUSTOM_LOOP_LIST']: if custom_string: - self.logger.info(f"Processing custom string: {custom_string}") + self.logger.info("Processing custom string: " + f"{custom_string}") time_info['custom'] = custom_string self.c_dict['CUSTOM_STRING'] = custom_string diff --git a/metplus/wrappers/tc_pairs_wrapper.py b/metplus/wrappers/tc_pairs_wrapper.py index 59a6fa1a13..35bb6c2ec5 100755 --- a/metplus/wrappers/tc_pairs_wrapper.py +++ b/metplus/wrappers/tc_pairs_wrapper.py @@ -49,12 +49,15 @@ class TCPairsWrapper(CommandBuilder): 'METPLUS_STORM_NAME', 'METPLUS_INIT_BEG', 'METPLUS_INIT_END', - 'METPLUS_INIT_INCLUDE', - 'METPLUS_INIT_EXCLUDE', + 'METPLUS_INIT_INC', + 'METPLUS_INIT_EXC', 'METPLUS_VALID_BEG', 'METPLUS_VALID_END', 'METPLUS_DLAND_FILE', 'METPLUS_CONSENSUS_LIST', + 'METPLUS_WRITE_VALID', + 'METPLUS_VALID_INC', + 'METPLUS_VALID_EXC', ] WILDCARDS = { @@ -112,16 +115,32 @@ def create_c_dict(self): self.add_met_config(name='init_inc', data_type='list', - env_var_name='METPLUS_INIT_INCLUDE', metplus_configs=['TC_PAIRS_INIT_INCLUDE', + 'TC_PAIRS_INIT_INC', 'INIT_INCLUDE']) self.add_met_config(name='init_exc', data_type='list', - env_var_name='METPLUS_INIT_EXCLUDE', metplus_configs=['TC_PAIRS_INIT_EXCLUDE', + 'TC_PAIRS_INIT_EXC', 'INIT_EXCLUDE']) + self.add_met_config(name='valid_inc', + data_type='list', + metplus_configs=['TC_PAIRS_VALID_INCLUDE', + 'TC_PAIRS_VALID_INC', + 'VALID_INCLUDE']) + + self.add_met_config(name='valid_exc', + data_type='list', + metplus_configs=['TC_PAIRS_VALID_EXCLUDE', + 'TC_PAIRS_VALID_EXC', + 'VALID_EXCLUDE']) + + self.add_met_config(name='write_valid', + data_type='list', + metplus_configs=['TC_PAIRS_WRITE_VALID']) + self.add_met_config(name='valid_beg', data_type='string', metplus_configs=['TC_PAIRS_VALID_BEG', diff --git a/parm/met_config/TCPairsConfig_wrapped b/parm/met_config/TCPairsConfig_wrapped index 4266c4a90a..c780a3d486 100644 --- a/parm/met_config/TCPairsConfig_wrapped +++ b/parm/met_config/TCPairsConfig_wrapped @@ -44,8 +44,18 @@ ${METPLUS_STORM_NAME} // ${METPLUS_INIT_BEG} ${METPLUS_INIT_END} -${METPLUS_INIT_INCLUDE} -${METPLUS_INIT_EXCLUDE} +// init_inc = +${METPLUS_INIT_INC} +// init_exc = +${METPLUS_INIT_EXC} + +// valid_inc = +${METPLUS_VALID_INC} +// valid_exc = +${METPLUS_VALID_EXC} + +// write_valid = +${METPLUS_WRITE_VALID} // // Valid model time window diff --git a/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf b/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf new file mode 100644 index 0000000000..7baec48b3d --- /dev/null +++ b/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_ETC.conf @@ -0,0 +1,131 @@ +[config] + +PROCESS_LIST = GFDLTracker + +LOOP_BY = INIT + +INIT_TIME_FMT = %Y%m%d%H +INIT_BEG = 2021071300 +INIT_END = 2021071300 +INIT_INCREMENT = 6H + +LEAD_SEQ = * + +GFDL_TRACKER_INPUT_DIR = {INPUT_BASE}/met_test/gfdl/gfs +GFDL_TRACKER_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/gfs.t{init?fmt=%H}z.pgrb2.1p00.f{lead?fmt=%3H} + +GFDL_TRACKER_TC_VITALS_INPUT_DIR = {GFDL_TRACKER_INPUT_DIR} +GFDL_TRACKER_TC_VITALS_INPUT_TEMPLATE = syndat_tcvitals.{init?fmt=%Y} + +GFDL_TRACKER_GEN_VITALS_INPUT_DIR = {GFDL_TRACKER_INPUT_DIR} +GFDL_TRACKER_GEN_VITALS_INPUT_TEMPLATE = genesis.vitals.gfso.glbl.{init?fmt=%Y%m} + +GFDL_TRACKER_OUTPUT_DIR = {OUTPUT_BASE}/gfdl_tracker/etc +GFDL_TRACKER_OUTPUT_TEMPLATE = gfs.{init?fmt=%Y%m%d%H}.etc.txt + +GFDL_TRACKER_GRIB_VERSION = 2 + +GFDL_TRACKER_NML_TEMPLATE_FILE = {PARM_BASE}/use_cases/met_tool_wrapper/GFDLTracker/template.nml + +GFDL_TRACKER_DATEIN_INP_MODEL = 1 +GFDL_TRACKER_DATEIN_INP_MODTYP = "global" +GFDL_TRACKER_DATEIN_INP_LT_UNITS = "hours" +GFDL_TRACKER_DATEIN_INP_FILE_SEQ = "multi" +GFDL_TRACKER_DATEIN_INP_NESTTYP = "fixed" + +GFDL_TRACKER_ATCFINFO_ATCFNUM = 81 +GFDL_TRACKER_ATCFINFO_ATCFNAME = "GFML" +GFDL_TRACKER_ATCFINFO_ATCFFREQ = 600 + +GFDL_TRACKER_TRACKERINFO_TYPE = "midlat" +GFDL_TRACKER_TRACKERINFO_MSLPTHRESH = 0.0015 +GFDL_TRACKER_TRACKERINFO_USE_BACKUP_MSLP_GRAD_CHECK = True +GFDL_TRACKER_TRACKERINFO_V850THRESH = 1.5 +GFDL_TRACKER_TRACKERINFO_USE_BACKUP_850_VT_CHECK = True +GFDL_TRACKER_TRACKERINFO_ENABLE_TIMING = 1 +GFDL_TRACKER_TRACKERINFO_GRIDTYPE = "global" +GFDL_TRACKER_TRACKERINFO_CONTINT = 100.0 +GFDL_TRACKER_TRACKERINFO_WANT_OCI = T +GFDL_TRACKER_TRACKERINFO_OUT_VIT = True +GFDL_TRACKER_TRACKERINFO_USE_LAND_MASK = False +GFDL_TRACKER_TRACKERINFO_INP_DATA_TYPE = "grib" +GFDL_TRACKER_TRACKERINFO_GRIBVER = 2 +GFDL_TRACKER_TRACKERINFO_G2_JPDTN = 0 +GFDL_TRACKER_TRACKERINFO_G2_MSLP_PARM_ID = 192 +GFDL_TRACKER_TRACKERINFO_G1_MSLP_PARM_ID = 2 +GFDL_TRACKER_TRACKERINFO_G1_SFCWIND_LEV_TYP = 105 +GFDL_TRACKER_TRACKERINFO_G1_SFCWIND_LEV_VAL = 10 + +GFDL_TRACKER_TRACKERINFO_WESTBD = 0 +GFDL_TRACKER_TRACKERINFO_EASTBD = 358 +GFDL_TRACKER_TRACKERINFO_SOUTHBD = -89 +GFDL_TRACKER_TRACKERINFO_NORTHBD = 89 + +GFDL_TRACKER_PHASEINFO_PHASEFLAG = True +GFDL_TRACKER_PHASEINFO_PHASESCHEME = "both" +GFDL_TRACKER_PHASEINFO_WCORE_DEPTH = 1.0 + +GFDL_TRACKER_STRUCTINFO_STRUCTFLAG = False +GFDL_TRACKER_STRUCTINFO_IKEFLAG = False + +GFDL_TRACKER_FNAMEINFO_GMODNAME = "gfs" +GFDL_TRACKER_FNAMEINFO_RUNDESCR = "t{init?fmt=%H}z.pgrb2" +GFDL_TRACKER_FNAMEINFO_ATCFDESCR = "1p00" + +GFDL_TRACKER_WAITINFO_USE_WAITFOR = True +GFDL_TRACKER_WAITINFO_WAIT_MIN_AGE = 10 +GFDL_TRACKER_WAITINFO_WAIT_MIN_SIZE = 100 +GFDL_TRACKER_WAITINFO_WAIT_MAX_WAIT = 3600 +GFDL_TRACKER_WAITINFO_WAIT_SLEEPTIME = 5 +GFDL_TRACKER_WAITINFO_USE_PER_FCST_COMMAND = True +GFDL_TRACKER_WAITINFO_PER_FCST_COMMAND = "./deliver %[FHOUR] %[FMIN]" + +GFDL_TRACKER_NETCDFINFO_LAT_NAME = "" +GFDL_TRACKER_NETCDFINFO_LMASKNAME = "" +GFDL_TRACKER_NETCDFINFO_LON_NAME = "" +GFDL_TRACKER_NETCDFINFO_MSLPNAME = "" +GFDL_TRACKER_NETCDFINFO_NETCDF_FILENAME = "" +GFDL_TRACKER_NETCDFINFO_NUM_NETCDF_VARS = 0 +GFDL_TRACKER_NETCDFINFO_RV700NAME = "" +GFDL_TRACKER_NETCDFINFO_RV850NAME = "" +GFDL_TRACKER_NETCDFINFO_TIME_NAME = "" +GFDL_TRACKER_NETCDFINFO_TIME_UNITS = "" +GFDL_TRACKER_NETCDFINFO_TMEAN_300_500_NAME = "" +GFDL_TRACKER_NETCDFINFO_U500NAME = "" +GFDL_TRACKER_NETCDFINFO_U700NAME = "" +GFDL_TRACKER_NETCDFINFO_U850NAME = "" +GFDL_TRACKER_NETCDFINFO_USFCNAME = "" +GFDL_TRACKER_NETCDFINFO_V500NAME = "" +GFDL_TRACKER_NETCDFINFO_V700NAME = "" +GFDL_TRACKER_NETCDFINFO_V850NAME = "" +GFDL_TRACKER_NETCDFINFO_VSFCNAME = "" +GFDL_TRACKER_NETCDFINFO_Z200NAME = "" +GFDL_TRACKER_NETCDFINFO_Z300NAME = "" +GFDL_TRACKER_NETCDFINFO_Z350NAME = "" +GFDL_TRACKER_NETCDFINFO_Z400NAME = "" +GFDL_TRACKER_NETCDFINFO_Z450NAME = "" +GFDL_TRACKER_NETCDFINFO_Z500NAME = "" +GFDL_TRACKER_NETCDFINFO_Z550NAME = "" +GFDL_TRACKER_NETCDFINFO_Z600NAME = "" +GFDL_TRACKER_NETCDFINFO_Z650NAME = "" +GFDL_TRACKER_NETCDFINFO_Z700NAME = "" +GFDL_TRACKER_NETCDFINFO_Z750NAME = "" +GFDL_TRACKER_NETCDFINFO_Z800NAME = "" +GFDL_TRACKER_NETCDFINFO_Z850NAME = "" +GFDL_TRACKER_NETCDFINFO_Z900NAME = "" + +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA700 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC700 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH700 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_MSLP = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRCSFC = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETASFC = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK500850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200500 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200850 = True + +GFDL_TRACKER_VERBOSE_VERB = 3 +GFDL_TRACKER_VERBOSE_VERB_G2 = 0 diff --git a/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf b/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf new file mode 100644 index 0000000000..055265621d --- /dev/null +++ b/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_Genesis.conf @@ -0,0 +1,132 @@ +[config] + +PROCESS_LIST = GFDLTracker + +LOOP_BY = INIT + +INIT_TIME_FMT = %Y%m%d%H +INIT_BEG = 2021071300 +INIT_END = 2021071300 +INIT_INCREMENT = 6H + +LEAD_SEQ = * + +GFDL_TRACKER_INPUT_DIR = {INPUT_BASE}/met_test/gfdl/gfs +GFDL_TRACKER_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/gfs.t{init?fmt=%H}z.pgrb2.1p00.f{lead?fmt=%3H} + +GFDL_TRACKER_TC_VITALS_INPUT_DIR = {GFDL_TRACKER_INPUT_DIR} +GFDL_TRACKER_TC_VITALS_INPUT_TEMPLATE = syndat_tcvitals.{init?fmt=%Y} + +GFDL_TRACKER_GEN_VITALS_INPUT_DIR = {GFDL_TRACKER_INPUT_DIR} +GFDL_TRACKER_GEN_VITALS_INPUT_TEMPLATE = genesis.vitals.gfso.glbl.{init?fmt=%Y%m} + +GFDL_TRACKER_OUTPUT_DIR = {OUTPUT_BASE}/gfdl_tracker/genesis +GFDL_TRACKER_OUTPUT_TEMPLATE = gfs.{init?fmt=%Y%m%d%H}.genesis.txt + +GFDL_TRACKER_GRIB_VERSION = 2 + +GFDL_TRACKER_NML_TEMPLATE_FILE = {PARM_BASE}/use_cases/met_tool_wrapper/GFDLTracker/template.nml + +GFDL_TRACKER_DATEIN_INP_MODEL = 1 +GFDL_TRACKER_DATEIN_INP_MODTYP = "global" +GFDL_TRACKER_DATEIN_INP_LT_UNITS = "hours" +GFDL_TRACKER_DATEIN_INP_FILE_SEQ = "multi" +GFDL_TRACKER_DATEIN_INP_NESTTYP = "fixed" + +GFDL_TRACKER_ATCFINFO_ATCFNUM = 81 +GFDL_TRACKER_ATCFINFO_ATCFNAME = "GFML" +GFDL_TRACKER_ATCFINFO_ATCFFREQ = 600 + +GFDL_TRACKER_TRACKERINFO_TYPE = "tcgen" +GFDL_TRACKER_TRACKERINFO_MSLPTHRESH = 0.0015 +GFDL_TRACKER_TRACKERINFO_USE_BACKUP_MSLP_GRAD_CHECK = True +GFDL_TRACKER_TRACKERINFO_V850THRESH = 1.5 +GFDL_TRACKER_TRACKERINFO_USE_BACKUP_850_VT_CHECK = True +GFDL_TRACKER_TRACKERINFO_ENABLE_TIMING = 1 +GFDL_TRACKER_TRACKERINFO_GRIDTYPE = "global" +GFDL_TRACKER_TRACKERINFO_CONTINT = 100.0 +GFDL_TRACKER_TRACKERINFO_WANT_OCI = T +GFDL_TRACKER_TRACKERINFO_OUT_VIT = True +GFDL_TRACKER_TRACKERINFO_USE_LAND_MASK = False +GFDL_TRACKER_TRACKERINFO_INP_DATA_TYPE = "grib" +GFDL_TRACKER_TRACKERINFO_GRIBVER = 2 +GFDL_TRACKER_TRACKERINFO_G2_JPDTN = 0 +GFDL_TRACKER_TRACKERINFO_G2_MSLP_PARM_ID = 192 +GFDL_TRACKER_TRACKERINFO_G1_MSLP_PARM_ID = 2 +GFDL_TRACKER_TRACKERINFO_G1_SFCWIND_LEV_TYP = 105 +GFDL_TRACKER_TRACKERINFO_G1_SFCWIND_LEV_VAL = 10 + +GFDL_TRACKER_TRACKERINFO_WESTBD = 0 +GFDL_TRACKER_TRACKERINFO_EASTBD = 358 +GFDL_TRACKER_TRACKERINFO_SOUTHBD = -89 +GFDL_TRACKER_TRACKERINFO_NORTHBD = 89 + +GFDL_TRACKER_PHASEINFO_PHASEFLAG = True +GFDL_TRACKER_PHASEINFO_PHASESCHEME = "both" +GFDL_TRACKER_PHASEINFO_WCORE_DEPTH = 1.0 + +GFDL_TRACKER_STRUCTINFO_STRUCTFLAG = False +GFDL_TRACKER_STRUCTINFO_IKEFLAG = False + +GFDL_TRACKER_FNAMEINFO_GMODNAME = "gfs" +GFDL_TRACKER_FNAMEINFO_RUNDESCR = "t{init?fmt=%H}z.pgrb2" +GFDL_TRACKER_FNAMEINFO_ATCFDESCR = "1p00" + +GFDL_TRACKER_WAITINFO_USE_WAITFOR = True +GFDL_TRACKER_WAITINFO_WAIT_MIN_AGE = 10 +GFDL_TRACKER_WAITINFO_WAIT_MIN_SIZE = 100 +GFDL_TRACKER_WAITINFO_WAIT_MAX_WAIT = 3600 +GFDL_TRACKER_WAITINFO_WAIT_SLEEPTIME = 5 +GFDL_TRACKER_WAITINFO_USE_PER_FCST_COMMAND = True +GFDL_TRACKER_WAITINFO_PER_FCST_COMMAND = "./deliver %[FHOUR] %[FMIN]" + +GFDL_TRACKER_NETCDFINFO_LAT_NAME = "" +GFDL_TRACKER_NETCDFINFO_LMASKNAME = "" +GFDL_TRACKER_NETCDFINFO_LON_NAME = "" +GFDL_TRACKER_NETCDFINFO_MSLPNAME = "" +GFDL_TRACKER_NETCDFINFO_NETCDF_FILENAME = "" +GFDL_TRACKER_NETCDFINFO_NUM_NETCDF_VARS = 0 +GFDL_TRACKER_NETCDFINFO_RV700NAME = "" +GFDL_TRACKER_NETCDFINFO_RV850NAME = "" +GFDL_TRACKER_NETCDFINFO_TIME_NAME = "" +GFDL_TRACKER_NETCDFINFO_TIME_UNITS = "" +GFDL_TRACKER_NETCDFINFO_TMEAN_300_500_NAME = "" +GFDL_TRACKER_NETCDFINFO_U500NAME = "" +GFDL_TRACKER_NETCDFINFO_U700NAME = "" +GFDL_TRACKER_NETCDFINFO_U850NAME = "" +GFDL_TRACKER_NETCDFINFO_USFCNAME = "" +GFDL_TRACKER_NETCDFINFO_V500NAME = "" +GFDL_TRACKER_NETCDFINFO_V700NAME = "" +GFDL_TRACKER_NETCDFINFO_V850NAME = "" +GFDL_TRACKER_NETCDFINFO_VSFCNAME = "" +GFDL_TRACKER_NETCDFINFO_Z200NAME = "" +GFDL_TRACKER_NETCDFINFO_Z300NAME = "" +GFDL_TRACKER_NETCDFINFO_Z350NAME = "" +GFDL_TRACKER_NETCDFINFO_Z400NAME = "" +GFDL_TRACKER_NETCDFINFO_Z450NAME = "" +GFDL_TRACKER_NETCDFINFO_Z500NAME = "" +GFDL_TRACKER_NETCDFINFO_Z550NAME = "" +GFDL_TRACKER_NETCDFINFO_Z600NAME = "" +GFDL_TRACKER_NETCDFINFO_Z650NAME = "" +GFDL_TRACKER_NETCDFINFO_Z700NAME = "" +GFDL_TRACKER_NETCDFINFO_Z750NAME = "" +GFDL_TRACKER_NETCDFINFO_Z800NAME = "" +GFDL_TRACKER_NETCDFINFO_Z850NAME = "" +GFDL_TRACKER_NETCDFINFO_Z900NAME = "" + +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA700 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC700 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH700 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_MSLP = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRCSFC = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETASFC = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK500850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200500 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200850 = True + + +GFDL_TRACKER_VERBOSE_VERB = 3 +GFDL_TRACKER_VERBOSE_VERB_G2 = 0 diff --git a/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf b/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf index 134828df81..45b2b1bfb6 100644 --- a/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf +++ b/parm/use_cases/met_tool_wrapper/GFDLTracker/GFDLTracker_TC.conf @@ -5,22 +5,20 @@ PROCESS_LIST = GFDLTracker LOOP_BY = INIT INIT_TIME_FMT = %Y%m%d%H - INIT_BEG = 2016090600 INIT_END = 2016090600 - LEAD_SEQ = * #LEAD_SEQ = begin_end_incr(0, 18, 6)H #LEAD_SEQ = begin_end_incr(0, 9, 1)H, begin_end_incr(12,126,3)H -GFDL_TRACKER_INPUT_DIR = {INPUT_BASE}/met_test/gfdl/tc +GFDL_TRACKER_INPUT_DIR = {INPUT_BASE}/met_test/gfdl/hwrf GFDL_TRACKER_INPUT_TEMPLATE = hwrf.25x25.EP152016.{init?fmt=%Y%m%d%H}.f{lead?fmt=%5M} GFDL_TRACKER_TC_VITALS_INPUT_DIR = {GFDL_TRACKER_INPUT_DIR} GFDL_TRACKER_TC_VITALS_INPUT_TEMPLATE = tcvit_rsmc_storms.txt GFDL_TRACKER_OUTPUT_DIR = {OUTPUT_BASE}/gfdl_tracker/tc -GFDL_TRACKER_OUTPUT_TEMPLATE = hwrf.25x25.EP152016.{init?fmt=%Y%m%d%H} +GFDL_TRACKER_OUTPUT_TEMPLATE = hwrf.{init?fmt=%Y%m%d%H}.track.txt GFDL_TRACKER_GRIB_VERSION = 1 @@ -38,15 +36,15 @@ GFDL_TRACKER_ATCFINFO_ATCFFREQ = 100 GFDL_TRACKER_TRACKERINFO_TYPE = "tracker" GFDL_TRACKER_TRACKERINFO_MSLPTHRESH = 0.0015 -GFDL_TRACKER_TRACKERINFO_USE_BACKUP_MSLP_GRAD_CHECK = "y" +GFDL_TRACKER_TRACKERINFO_USE_BACKUP_MSLP_GRAD_CHECK = True GFDL_TRACKER_TRACKERINFO_V850THRESH = 1.5 -GFDL_TRACKER_TRACKERINFO_USE_BACKUP_850_VT_CHECK = "y" +GFDL_TRACKER_TRACKERINFO_USE_BACKUP_850_VT_CHECK = True GFDL_TRACKER_TRACKERINFO_ENABLE_TIMING = 1 GFDL_TRACKER_TRACKERINFO_GRIDTYPE = "regional" GFDL_TRACKER_TRACKERINFO_CONTINT = 100.0 GFDL_TRACKER_TRACKERINFO_WANT_OCI = T -GFDL_TRACKER_TRACKERINFO_OUT_VIT = "y" -GFDL_TRACKER_TRACKERINFO_USE_LAND_MASK = "y" +GFDL_TRACKER_TRACKERINFO_OUT_VIT = True +GFDL_TRACKER_TRACKERINFO_USE_LAND_MASK = True GFDL_TRACKER_TRACKERINFO_INP_DATA_TYPE = "grib" GFDL_TRACKER_TRACKERINFO_GRIBVER = 1 GFDL_TRACKER_TRACKERINFO_G2_JPDTN = 0 @@ -55,23 +53,23 @@ GFDL_TRACKER_TRACKERINFO_G1_MSLP_PARM_ID = 2 GFDL_TRACKER_TRACKERINFO_G1_SFCWIND_LEV_TYP = 105 GFDL_TRACKER_TRACKERINFO_G1_SFCWIND_LEV_VAL = 10 -GFDL_TRACKER_PHASEINFO_PHASEFLAG = "y" +GFDL_TRACKER_PHASEINFO_PHASEFLAG = True GFDL_TRACKER_PHASEINFO_PHASESCHEME = "both" GFDL_TRACKER_PHASEINFO_WCORE_DEPTH = 1.0 -GFDL_TRACKER_STRUCTINFO_STRUCTFLAG = "n" -GFDL_TRACKER_STRUCTINFO_IKEFLAG = "n" +GFDL_TRACKER_STRUCTINFO_STRUCTFLAG = False +GFDL_TRACKER_STRUCTINFO_IKEFLAG = False GFDL_TRACKER_FNAMEINFO_GMODNAME = "hwrf" GFDL_TRACKER_FNAMEINFO_RUNDESCR = "25x25" GFDL_TRACKER_FNAMEINFO_ATCFDESCR = "EP152016" -GFDL_TRACKER_WAITINFO_USE_WAITFOR = "y" +GFDL_TRACKER_WAITINFO_USE_WAITFOR = True GFDL_TRACKER_WAITINFO_WAIT_MIN_AGE = 10 GFDL_TRACKER_WAITINFO_WAIT_MIN_SIZE = 100 GFDL_TRACKER_WAITINFO_WAIT_MAX_WAIT = 3600 GFDL_TRACKER_WAITINFO_WAIT_SLEEPTIME = 5 -GFDL_TRACKER_WAITINFO_USE_PER_FCST_COMMAND = "y" +GFDL_TRACKER_WAITINFO_USE_PER_FCST_COMMAND = True GFDL_TRACKER_WAITINFO_PER_FCST_COMMAND = "./deliver %[FHOUR] %[FMIN]" GFDL_TRACKER_NETCDFINFO_LAT_NAME = "" @@ -108,18 +106,18 @@ GFDL_TRACKER_NETCDFINFO_Z800NAME = "" GFDL_TRACKER_NETCDFINFO_Z850NAME = "" GFDL_TRACKER_NETCDFINFO_Z900NAME = "" -GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA700 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC850 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC700 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH850 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH700 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_MSLP = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRCSFC = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETASFC = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK500850 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200500 = "n" -GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200850 = "y" -GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA850 = "y" +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA700 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRC700 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH850 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_GPH700 = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_MSLP = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_WCIRCSFC = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETASFC = True +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK500850 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200500 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_THICK200850 = False +GFDL_TRACKER_USER_WANTS_TO_TRACK_ZETA850 = True GFDL_TRACKER_VERBOSE_VERB = 3 GFDL_TRACKER_VERBOSE_VERB_G2 = 0 diff --git a/parm/use_cases/met_tool_wrapper/GFDLTracker/sgv_template.txt b/parm/use_cases/met_tool_wrapper/GFDLTracker/sgv_template.txt new file mode 100644 index 0000000000..e8cad883e6 --- /dev/null +++ b/parm/use_cases/met_tool_wrapper/GFDLTracker/sgv_template.txt @@ -0,0 +1,6 @@ +&datenowin dnow%yy=${METPLUS_DATENOW_YY}, dnow%mm=${METPLUS_DATENOW_MM}, + dnow%dd=${METPLUS_DATENOW_DD}, dnow%hh=${METPLUS_DATENOW_HH}/ +&date6agoin d6ago%yy=${METPLUS_DATE6AGO_YY}, d6ago%mm=${METPLUS_DATE6AGO_MM}, + d6ago%dd=${METPLUS_DATE6AGO_DD}, d6ago%hh=${METPLUS_DATE6AGO_HH}/ +&date6aheadin d6ahead%yy=${METPLUS_DATE6AHEAD_YY}, d6ahead%mm=${METPLUS_DATE6AHEAD_MM}, + d6ahead%dd=${METPLUS_DATE6AHEAD_DD}, d6ahead%hh=${METPLUS_DATE6AHEAD_HH}/ diff --git a/parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml b/parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml index df07971316..6dab40bf90 100644 --- a/parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml +++ b/parm/use_cases/met_tool_wrapper/GFDLTracker/template.nml @@ -37,6 +37,10 @@ trkrinfo%g1_mslp_parm_id = ${METPLUS_TRACKERINFO_G1_MSLP_PARM_ID}, trkrinfo%g1_sfcwind_lev_typ = ${METPLUS_TRACKERINFO_G1_SFCWIND_LEV_TYP}, trkrinfo%g1_sfcwind_lev_val = ${METPLUS_TRACKERINFO_G1_SFCWIND_LEV_VAL}, + trkrinfo%westbd = ${METPLUS_TRACKERINFO_WESTBD}, + trkrinfo%eastbd = ${METPLUS_TRACKERINFO_EASTBD}, + trkrinfo%southbd = ${METPLUS_TRACKERINFO_SOUTHBD}, + trkrinfo%northbd = ${METPLUS_TRACKERINFO_NORTHBD}, / &phaseinfo diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_add.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_add.conf index 5fc8abf337..e9fb5e284c 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_add.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_add.conf @@ -1,117 +1,39 @@ -# PCPCombine Add Method Example - [config] -# List of applications to run - only PCPCombine for this case + PROCESS_LIST = PCPCombine -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set LOOP_BY = VALID - -# Format of VALID_BEG and VALID_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH VALID_TIME_FMT = %Y%m%d%H%M - -# Start time for METplus run - must match VALID_TIME_FMT VALID_BEG = 201908021815 - -# Start time for METplus run - must match VALID_TIME_FMT VALID_END = 201908021815 - -# Increment between METplus runs in seconds. Must be >= 60 VALID_INCREMENT = 1M -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 15M -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run LOOP_ORDER = times -# verbosity of PCPCombine MET output +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new +FCST_PCP_COMBINE_INPUT_TEMPLATE = NEWSe_{init?fmt=%Y%m%d}_i{init?fmt=%H%M}_m0_f{valid?fmt=%H%M}.nc + +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_add +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = NEWSe5min_mem00_lag00.nc + + #LOG_PCP_COMBINE_VERBOSITY = 2 -# If True, run PCPCombine on forecast data -# observation equivalent OBS_PCP_COMBINE_RUN also exists FCST_PCP_COMBINE_RUN = True - -# mode of PCPCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM) FCST_PCP_COMBINE_METHOD = ADD -# maximum forecast lead to allow when searching for model data to use in PCPCombine -# Default is a very large time (4000 years) so setting this to a valid maximum value can -# speed up execution time of numerous runs FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# keep initialization time constant FCST_PCP_COMBINE_CONSTANT_INIT = FALSE -# description of data to be processed -# used in output file path -MODEL = GFS -OBTYPE = ANLYS - -# Forecast data description variables - -# set to True if forecast input is probabilistic FCST_IS_PROB = false -# Input data type of forecast input, only used if data is probabilistic to determine -# how to extract data from file FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -# List of input accumulations for forecast data. Units are hours if not specified -# Items should be in order of preference of what accumulations to use if available and relevant -# Each item in the list corresponds to the equivalent in *_INPUT_NAMES and *_INPUT_LEVELS if specified FCST_PCP_COMBINE_INPUT_ACCUMS = 5M - -# List of input names corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_NAMES = A000500 - -# List of input levels corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_LEVELS = Surface -# desired output accumulation to build with forecast input -# Units are in hours if not specified -# Corresponds to *_OUTPUT_NAME FCST_PCP_COMBINE_OUTPUT_ACCUM = 15M - -# Name of output field name to write FCST_PCP_COMBINE_OUTPUT_NAME = A001500 - -# If running a MET tool comparison tool after PCPCombine, one can instead set FCST_VAR1_[NAME/LEVELS] to -# a value starting with A that corresponds to the desired accumulation to use in the comparison -# this value will be used to determine the accumulation to build with PCPCombine as well -# If FCST_PCP_COMBINE_OUTPUT_[NAME/ACCUM] are set, these variables take priority over -# FCST_VAR_[NAME/LEVELS] -#FCST_VAR1_NAME = A001500 -#FCST_VAR1_LEVELS = 15M - -# End of [config] section and start of [dir] section -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new - -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_add - - -# End of [dir] section and start of [filename_templates] section -[filename_templates] -# format of filenames -FCST_PCP_COMBINE_INPUT_TEMPLATE = NEWSe_{init?fmt=%Y%m%d}_i{init?fmt=%H%M}_m0_f{valid?fmt=%H%M}.nc -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = NEWSe5min_mem00_lag00.nc diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_bucket.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_bucket.conf index 6ef9b614e7..614adf6179 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_bucket.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_bucket.conf @@ -1,58 +1,30 @@ -# PCPCombine Bucket Interval Example - [config] -# time looping - options are INIT, VALID, RETRO, and REALTIME -LOOP_BY = INIT -# Format of INIT_BEG and INIT_END -INIT_TIME_FMT = %Y%m%d%H +PROCESS_LIST = PcpCombine -# Start time for METplus run +LOOP_BY = INIT +INIT_TIME_FMT = %Y%m%d%H INIT_BEG = 2012040900 - -# End time for METplus run INIT_END = 2012040900 - -# Increment between METplus runs in seconds. Must be >= 60 INIT_INCREMENT = 86400 -# list of forecast leads to process LEAD_SEQ = 15H -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. LOOP_ORDER = times -# List of applications to run -PROCESS_LIST = PcpCombine - -# verbosity of PCPCombine MET output -#LOG_PCP_COMBINE_VERBOSITY = 2 - -#FCST_VAR1_NAME = APCP -#FCST_VAR1_LEVELS = 15M - -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# run PCPCombine on forecast data FCST_PCP_COMBINE_RUN = True - -# mode of PCPCombine to use (SUM, ADD, SUBTRACT) FCST_PCP_COMBINE_METHOD = ADD -# list of variables to compare -#FCST_VAR1_NAME = A001500 +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new/gfs +FCST_PCP_COMBINE_INPUT_TEMPLATE = gfs_{init?fmt=%Y%m%d%H}_F{lead?fmt=%3H}.grib + +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_bucket +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = gfs_{valid?fmt=%Y%m%d%H}_A{level?fmt=%3H}.nc + +#LOG_PCP_COMBINE_VERBOSITY = 2 -# description of data to be processed -# used in output file path -MODEL = GFS -#OBTYPE = ANLYS +FCST_PCP_COMBINE_MAX_FORECAST = 2d -# Forecast data description variables -#FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB FCST_IS_PROB = false FCST_PCP_COMBINE_BUCKET_INTERVAL = 6H @@ -60,14 +32,3 @@ FCST_PCP_COMBINE_INPUT_ACCUMS = {lead} FCST_PCP_COMBINE_OUTPUT_ACCUM = 15H FCST_PCP_COMBINE_OUTPUT_NAME = APCP - -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new/gfs -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_bucket - -[filename_templates] -# format of filenames -# GFS -FCST_PCP_COMBINE_INPUT_TEMPLATE = gfs_{init?fmt=%Y%m%d%H}_F{lead?fmt=%3H}.grib -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = gfs_{valid?fmt=%Y%m%d%H}_A{level?fmt=%3H}.nc \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_derive.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_derive.conf index 2306ee9600..5276d22f39 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_derive.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_derive.conf @@ -1,136 +1,47 @@ -# PCPCombine Subtract Method Example - [config] -# List of applications to run - only PCPCombine for this case -PROCESS_LIST = PCPCombine +PROCESS_LIST = PCPCombine -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set LOOP_BY = INIT - -# Format of INIT_BEG and INT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT INIT_BEG = 2005080700 - -# End time for METplus run - must match INIT_TIME_FMT INIT_END = 2005080700 - -# Increment between METplus runs (in seconds if no units are specified) -# Must be >= 60 seconds INIT_INCREMENT = 1M -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 24H -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run LOOP_ORDER = times -# verbosity of PCPCombine MET output #LOG_PCP_COMBINE_VERBOSITY = 2 -# If True, run PCPCombine on forecast data -# observation equivalent OBS_PCP_COMBINE_RUN also exists FCST_PCP_COMBINE_RUN = True - -# mode of PCPCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM) FCST_PCP_COMBINE_METHOD = DERIVE -# lookback time relative to current run time to consider data to be valid for a run -FCST_PCP_COMBINE_DERIVE_LOOKBACK = 18H - -# minimum forecast lead to allow when searching for model data to use in PCPCombine -# Default is 0 so setting this to a valid minimum value can -# speed up execution time of numerous runs -FCST_PCP_COMBINE_MIN_FORECAST = 9H +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 -# maximum forecast lead to allow when searching for model data to use in PCPCombine -# Default is a very large time (4000 years) so setting this to a valid maximum value can -# speed up execution time of numerous runs -FCST_PCP_COMBINE_MAX_FORECAST = 2d +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_derive +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = wrfprs_ruc13_{init?fmt=%Y%m%d%H}_f{lead?fmt=%HH}_A{level?fmt=%HH}.nc -# list of statistics to pass to PCPCombine with the -derive command line argument FCST_PCP_COMBINE_STAT_LIST = sum,min,max,range,mean,stdev,vld_count -# description of data to be processed -# used in output file path -MODEL = GFS +FCST_PCP_COMBINE_DERIVE_LOOKBACK = 18H -# Forecast data description variables +FCST_PCP_COMBINE_MIN_FORECAST = 9H +FCST_PCP_COMBINE_MAX_FORECAST = 2d -# set to True if forecast input is probabilistic FCST_IS_PROB = false -# Input data type of forecast input, only used if data is probabilistic to determine -# how to extract data from file FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -# List of input accumulations for forecast data. Units are hours if not specified -# Items should be in order of preference of what accumulations to use if available and relevant -# Each item in the list corresponds to the equivalent in *_INPUT_NAMES and *_INPUT_LEVELS if specified FCST_PCP_COMBINE_INPUT_ACCUMS = 3H - -# List of input names corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_NAMES = APCP - -# List of input levels corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_LEVELS = A03 - -# optional extra options to pass to the fcst field object FCST_PCP_COMBINE_INPUT_OPTIONS = -# desired output accumulation to build with forecast input -# Units are in hours if not specified -# Corresponds to *_OUTPUT_NAME FCST_PCP_COMBINE_OUTPUT_ACCUM = 18H - -# Name of output field name to write -# Not set for this example -# Note you must specify a list of names that is the same length as the -# number of derivation specified in FCST_PCP_COMBINE_STAT_LIST FCST_PCP_COMBINE_OUTPUT_NAME = -# set the following to add additional fields to add to command #FCST_PCP_COMBINE_EXTRA_NAMES = #FCST_PCP_COMBINE_EXTRA_LEVELS = #FCST_PCP_COMBINE_EXTRA_OUTPUT_NAMES = - -# If running a MET tool comparison tool after PCPCombine, one can instead set FCST_VAR1_[NAME/LEVELS] to -# a value starting with A that corresponds to the desired accumulation to use in the comparison -# this value will be used to determine the accumulation to build with PCPCombine as well -# If FCST_PCP_COMBINE_OUTPUT_[NAME/ACCUM] are set, these variables take priority over -# FCST_VAR_[NAME/LEVELS] -#FCST_VAR1_NAME = APCP -#FCST_VAR1_LEVELS = 18H - -# End of [config] section and start of [dir] section -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_derive - - -# End of [dir] section and start of [filename_templates] section -[filename_templates] -# format of filenames -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = wrfprs_ruc13_{init?fmt=%Y%m%d%H}_f{lead?fmt=%HH}_A{level?fmt=%HH}.nc \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_loop_custom.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_loop_custom.conf index 682d69dcf2..1155e41111 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_loop_custom.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_loop_custom.conf @@ -1,108 +1,38 @@ -# PCPCombine Add Method Example - [config] -# List of applications to run - only PCPCombine for this case + PROCESS_LIST = PCPCombine -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set LOOP_BY = INIT - -# Format of VALID_BEG and VALID_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match VALID_TIME_FMT INIT_BEG = 2009123112 - -# Start time for METplus run - must match VALID_TIME_FMT INIT_END = 2009123112 - -# Increment between METplus runs in seconds. Must be >= 60 INIT_INCREMENT = 1M -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 24H -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run LOOP_ORDER = times PCP_COMBINE_CUSTOM_LOOP_LIST = arw-fer-gep1, arw-fer-gep5, arw-sch-gep2, arw-sch-gep6, arw-tom-gep3, arw-tom-gep7 -# verbosity of PCPCombine MET output #LOG_PCP_COMBINE_VERBOSITY = 2 -# If True, run PCPCombine on forecast data -# observation equivalent OBS_PCP_COMBINE_RUN also exists FCST_PCP_COMBINE_RUN = True - -# mode of PCPCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM) FCST_PCP_COMBINE_METHOD = ADD -FCST_PCP_COMBINE_CONSTANT_INIT = True +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/{custom?fmt=%s}/d01_{init?fmt=%Y%m%d%H}_0{lead?fmt=%HH}00.grib -# maximum forecast lead to allow when searching for model data to use in PCPCombine -# Default is a very large time (4000 years) so setting this to a valid maximum value can -# speed up execution time of numerous runs -FCST_PCP_COMBINE_MAX_FORECAST = 2d +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_loop_custom +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {custom?fmt=%s}/d01_{init?fmt=%Y%m%d%H}_0{lead?fmt=%HH}00.nc -# description of data to be processed -# used in output file path -MODEL = WRF +FCST_PCP_COMBINE_CONSTANT_INIT = True -# Forecast data description variables +FCST_PCP_COMBINE_MAX_FORECAST = 2d -# set to True if forecast input is probabilistic FCST_IS_PROB = false -# Input data type of forecast input, only used if data is probabilistic to determine -# how to extract data from file FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB - -# List of input accumulations for forecast data. Units are hours if not specified -# Items should be in order of preference of what accumulations to use if available and relevant -# Each item in the list corresponds to the equivalent in *_INPUT_NAMES and *_INPUT_LEVELS if specified FCST_PCP_COMBINE_INPUT_ACCUMS = 24H -# desired output accumulation to build with forecast input -# Units are in hours if not specified -# Corresponds to *_OUTPUT_NAME FCST_PCP_COMBINE_OUTPUT_ACCUM = 24H - -# Name of output field name to write FCST_PCP_COMBINE_OUTPUT_NAME = APCP - -# If running a MET tool comparison tool after PCPCombine, one can instead set FCST_VAR1_[NAME/LEVELS] to -# a value starting with A that corresponds to the desired accumulation to use in the comparison -# this value will be used to determine the accumulation to build with PCPCombine as well -# If FCST_PCP_COMBINE_OUTPUT_[NAME/ACCUM] are set, these variables take priority over -# FCST_VAR_[NAME/LEVELS] -#FCST_VAR1_NAME = A001500 -#FCST_VAR1_LEVELS = 15M - -# End of [config] section and start of [dir] section -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_loop_custom - - -# End of [dir] section and start of [filename_templates] section -[filename_templates] -# format of filenames -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/{custom?fmt=%s}/d01_{init?fmt=%Y%m%d%H}_0{lead?fmt=%HH}00.grib -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {custom?fmt=%s}/d01_{init?fmt=%Y%m%d%H}_0{lead?fmt=%HH}00.nc \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_python_embedding.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_python_embedding.conf index 442bb999c8..7e72c8b1df 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_python_embedding.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_python_embedding.conf @@ -1,64 +1,36 @@ [config] -# time looping - options are INIT, VALID, RETRO, and REALTIME -LOOP_BY = VALID -# Format of VALID_BEG and VALID_END -VALID_TIME_FMT = %Y%m%d%H%M +PROCESS_LIST = PCPCombine -# Start time for METplus run +LOOP_BY = VALID +VALID_TIME_FMT = %Y%m%d%H%M VALID_BEG=201801021300 - -# End time for METplus run VALID_END=201801021300 - -# Increment between METplus runs in seconds. Must be >= 60 VALID_INCREMENT=43200 -# List of forecast leads to process LEAD_SEQ = 0 -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. LOOP_ORDER = times -# List of applications to run -PROCESS_LIST = PCPCombine - -# verbosity of PCPCombine MET output #LOG_PCP_COMBINE_VERBOSITY = 2 -# run PCPCombine on observation data OBS_PCP_COMBINE_RUN = True - -# method to run PCPCombine. Options are ADD, SUM, SUBTRACT, and DERIVE OBS_PCP_COMBINE_METHOD = ADD -# List of variables to compare +OBS_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new/imerg +OBS_PCP_COMBINE_INPUT_TEMPLATE = PYTHON_NUMPY + +OBS_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_combine_py_embed +OBS_PCP_COMBINE_OUTPUT_TEMPLATE = IMERG.{valid?fmt=%Y%m%d_%H%M}_A{level?fmt=%2H}h + + OBS_VAR1_NAME = APCP OBS_VAR1_LEVELS = A06 -# Name to identify observation data in output -OBTYPE = IMERG - -# File format. Options are GRIB, NETCDF, or GEMPAK OBS_PCP_COMBINE_INPUT_DATATYPE = PYTHON_NUMPY - OBS_PCP_COMBINE_INPUT_ACCUMS = 6 OBS_PCP_COMBINE_INPUT_NAMES = {PARM_BASE}/use_cases/met_tool_wrapper/PCPCombine/sum_IMERG_V06_HDF5.py {OBS_PCP_COMBINE_INPUT_DIR} IRprecipitation {valid?fmt=%Y%m%d%H} 02 -[dir] -OBS_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new/imerg -OBS_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_combine_py_embed - -[filename_templates] -# format of filenames - -OBS_PCP_COMBINE_INPUT_TEMPLATE = PYTHON_NUMPY -OBS_PCP_COMBINE_OUTPUT_TEMPLATE = IMERG.{valid?fmt=%Y%m%d_%H%M}_A{level?fmt=%2H}h - [user_env_vars] # uncomment and change this to the path of a version of python that has the h5py package installed #MET_PYTHON_EXE = /path/to/python/with/h5-py/and/numpy/packages/bin/python \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_subtract.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_subtract.conf index 4c1cce4503..f1172bc4a5 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_subtract.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_subtract.conf @@ -1,99 +1,35 @@ -# PCPCombine Subtract Method Example - [config] -# List of applications to run - only PCPCombine for this case + PROCESS_LIST = PCPCombine -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set LOOP_BY = INIT -# Format of INIT_BEG and INT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT INIT_BEG = 2005080700 - -# End time for METplus run - must match INIT_TIME_FMT INIT_END = 2005080700 - -# Increment between METplus runs (in seconds if no units are specified) -# Must be >= 60 seconds INIT_INCREMENT = 1M -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 18H -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run LOOP_ORDER = times -# verbosity of PCPCombine MET output #LOG_PCP_COMBINE_VERBOSITY = 2 -# If True, run PCPCombine on forecast data -# observation equivalent OBS_PCP_COMBINE_RUN also exists FCST_PCP_COMBINE_RUN = True - -# mode of PCPCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM) FCST_PCP_COMBINE_METHOD = SUBTRACT -# maximum forecast lead to allow when searching for model data to use in PCPCombine -# Default is a very large time (4000 years) so setting this to a valid maximum value can -# speed up execution time of numerous runs -FCST_PCP_COMBINE_MAX_FORECAST = 2d +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 -# description of data to be processed -# used in output file path -MODEL = GFS -OBTYPE = ANLYS +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_subtract +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = wrfprs_ruc13_{init?fmt=%Y%m%d%H}_f{lead?fmt=%HH}_A03.nc + +FCST_PCP_COMBINE_MAX_FORECAST = 2d -# set to True if forecast input is probabilistic FCST_IS_PROB = false -# Input data type of forecast input, only used if data is probabilistic to determine -# how to extract data from file FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -# desired output accumulation to build with forecast input -# Units are in hours if not specified -# Corresponds to *_OUTPUT_NAME FCST_PCP_COMBINE_OUTPUT_ACCUM = 3H -# Name of output field name to write -FCST_PCP_COMBINE_OUTPUT_NAME = APCP - -# If running a MET tool comparison tool after PCPCombine, one can instead set FCST_VAR1_[NAME/LEVELS] to -# a value starting with A that corresponds to the desired accumulation to use in the comparison -# this value will be used to determine the accumulation to build with PCPCombine as well -# If FCST_PCP_COMBINE_OUTPUT_[NAME/ACCUM] are set, these variables take priority over -# FCST_VAR_[NAME/LEVELS] -#FCST_VAR1_NAME = APCP -#FCST_VAR1_LEVELS = 3H - -# End of [config] section and start of [dir] section -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_subtract - - -# End of [dir] section and start of [filename_templates] section -[filename_templates] -# format of filenames -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = wrfprs_ruc13_{init?fmt=%Y%m%d%H}_f{lead?fmt=%HH}_A03.nc \ No newline at end of file +FCST_PCP_COMBINE_OUTPUT_NAME = APCP_03 diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_sum.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_sum.conf index 61805d7ee0..bad19b6f74 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_sum.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_sum.conf @@ -1,108 +1,34 @@ -# PCPCombine Sum Method Example - [config] -# List of applications to run - only PCPCombine for this case PROCESS_LIST = PCPCombine -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set LOOP_BY = VALID - -# Format of VALID_BEG and VALID_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH VALID_TIME_FMT = %Y%m%d%H%M - -# Start time for METplus run - must match VALID_TIME_FMT VALID_BEG = 201908021815 - -# End time for METplus run - must match VALID_TIME_FMT VALID_END = 201908021815 - -# Increment between METplus runs in seconds. Must be >= 60 VALID_INCREMENT = 1M -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 15M -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run LOOP_ORDER = times -# If True, run PCPCombine on forecast data -# observation equivalent OBS_PCP_COMBINE_RUN also exists FCST_PCP_COMBINE_RUN = True - -# verbosity of PCPCombine MET output -#LOG_PCP_COMBINE_VERBOSITY = 2 - -# mode of PCPCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM) FCST_PCP_COMBINE_METHOD = SUM -# description of data to be processed -# used in output file path -MODEL = GFS -OBTYPE = ANLYS +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new +FCST_PCP_COMBINE_INPUT_TEMPLATE = NEWSe_{init?fmt=%Y%m%d}_i{init?fmt=%H%M}_m0_f* + +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_sum +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = NEWSe5min_mem00_lag00.nc -# Forecast data description variables +#LOG_PCP_COMBINE_VERBOSITY = 2 -# set to True if forecast input is probabilistic FCST_IS_PROB = false -# Input data type of forecast input, only used if data is probabilistic to determine -# how to extract data from file FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB - -# List of input accumulations for forecast data. Units are hours if not specified -# Items should be in order of preference of what accumulations to use if available and relevant -# Each item in the list corresponds to the equivalent in *_INPUT_NAMES and *_INPUT_LEVELS if specified FCST_PCP_COMBINE_INPUT_ACCUMS = 5M - -# List of input names corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_NAMES = A000500 - -# List of input levels corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_LEVELS = Surface -# desired output accumulation to build with forecast input -# Units are in hours if not specified -# Corresponds to *_OUTPUT_NAME FCST_PCP_COMBINE_OUTPUT_ACCUM = 15M - -# Name of output field name to write FCST_PCP_COMBINE_OUTPUT_NAME = A001500 - -# If running a MET tool comparison tool after PCPCombine, one can instead set FCST_VAR_[NAME/LEVELS] to -# a value starting with A that corresponds to the desired accumulation to use in the comparison -# this value will be used to determine the accumulation to build with PCPCombine as well -# If FCST_PCP_COMBINE_OUTPUT_[NAME/ACCUM] are set, these variables take priority over FCST_VAR_[NAME/LEVELS] -#FCST_VAR1_NAME = A001500 -#FCST_VAR1_LEVELS = 15M - -# End of [config] section and start of [dir] section -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/new -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_sum - - -# End of [dir] section and start of [filename_templates] section -[filename_templates] -# format of filenames -FCST_PCP_COMBINE_INPUT_TEMPLATE = NEWSe_{init?fmt=%Y%m%d}_i{init?fmt=%H%M}_m0_f* -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = NEWSe5min_mem00_lag00.nc \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_user_defined.conf b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_user_defined.conf index 642572bb1e..6ec72af5db 100644 --- a/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_user_defined.conf +++ b/parm/use_cases/met_tool_wrapper/PCPCombine/PCPCombine_user_defined.conf @@ -1,100 +1,37 @@ -# PCPCombine Subtract Method Example - [config] -# List of applications to run - only PCPCombine for this case + PROCESS_LIST = PCPCombine -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set LOOP_BY = INIT - -# Format of INIT_BEG and INT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT INIT_BEG = 2005080700 - -# End time for METplus run - must match INIT_TIME_FMT INIT_END = 2005080700 - -# Increment between METplus runs (in seconds if no units are specified) -# Must be >= 60 seconds INIT_INCREMENT = 1M -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 24H -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run LOOP_ORDER = times -# verbosity of PCPCombine MET output -#LOG_PCP_COMBINE_VERBOSITY = 2 - -# If True, run pcp_combine on forecast data -# observation equivalent OBS_PCP_COMBINE_RUN also exists FCST_PCP_COMBINE_RUN = True - -# mode of pcp_combine to use (SUM, ADD, SUBTRACT, DERIVE, or USER_DEFINED) FCST_PCP_COMBINE_METHOD = USER_DEFINED -# user-defined command to run pcp_combine -# NOTE: do not include pcp_combine app name or the output path, these will be added in automatically -FCST_PCP_COMBINE_COMMAND = -derive sum,min,max,range,mean,stdev,vld_count {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-3H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-6H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-9H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-12H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-15H}.tm00_G212 -field 'name="{FCST_PCP_COMBINE_INPUT_NAMES}"; level="{FCST_PCP_COMBINE_INPUT_LEVELS}";' +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 -# description of data to be processed -# used in output file path -MODEL = GFS +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_user_defined +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = wrfprs_ruc13_{init?fmt=%Y%m%d%H}_f{lead?fmt=%HH}_A{level?fmt=%HH}.nc -# Forecast data description variables -# set to True if forecast input is probabilistic +FCST_PCP_COMBINE_COMMAND = -derive sum,min,max,range,mean,stdev,vld_count {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-3H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-6H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-9H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-12H}.tm00_G212 {FCST_PCP_COMBINE_INPUT_DIR}/{init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH?shift=-15H}.tm00_G212 -field 'name="{FCST_PCP_COMBINE_INPUT_NAMES}"; level="{FCST_PCP_COMBINE_INPUT_LEVELS}";' + +#LOG_PCP_COMBINE_VERBOSITY = 2 + FCST_IS_PROB = false -# Input data type of forecast input, only used if data is probabilistic to determine -# how to extract data from file FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -# List of input accumulations for forecast data. Units are hours if not specified -# Items should be in order of preference of what accumulations to use if available and relevant -# Each item in the list corresponds to the equivalent in *_INPUT_NAMES and *_INPUT_LEVELS if specified FCST_PCP_COMBINE_INPUT_ACCUMS = 3H - -# List of input names corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_NAMES = APCP - -# List of input levels corresponding to each accumulation in *_INPUT_ACCUMS -# Can be left blank if using GRIB input FCST_PCP_COMBINE_INPUT_LEVELS = A03 -# desired output accumulation to build with forecast input -# Units are in hours if not specified FCST_PCP_COMBINE_OUTPUT_ACCUM = A24 - -# End of [config] section and start of [dir] section -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_fcst -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/met_tool_wrapper/PCPCombine/PCPCombine_user_defined - - -# End of [dir] section and start of [filename_templates] section -[filename_templates] -# format of filenames -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/wrfprs_ruc13_{lead?fmt=%HH}.tm00_G212 -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = wrfprs_ruc13_{init?fmt=%Y%m%d%H}_f{lead?fmt=%HH}_A{level?fmt=%HH}.nc \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PointStat/PointStat.conf b/parm/use_cases/met_tool_wrapper/PointStat/PointStat.conf index 5b35b41d06..a290427772 100644 --- a/parm/use_cases/met_tool_wrapper/PointStat/PointStat.conf +++ b/parm/use_cases/met_tool_wrapper/PointStat/PointStat.conf @@ -79,6 +79,7 @@ POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT #POINT_STAT_OUTPUT_FLAG_RPS = #POINT_STAT_OUTPUT_FLAG_ECLV = #POINT_STAT_OUTPUT_FLAG_MPR = +#POINT_STAT_OUTPUT_FLAG_ORANK = #POINT_STAT_CLIMO_CDF_BINS = 1 #POINT_STAT_CLIMO_CDF_CENTER_BINS = False diff --git a/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_extra_tropical.conf b/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_extra_tropical.conf index f52fd687e7..62640e4ba8 100644 --- a/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_extra_tropical.conf +++ b/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_extra_tropical.conf @@ -35,6 +35,11 @@ TC_PAIRS_INIT_EXCLUDE = TC_PAIRS_INIT_BEG = 2014121318 TC_PAIRS_INIT_END = 2014121418 +#TC_PAIRS_VALID_INCLUDE = +#TC_PAIRS_VALID_EXCLUDE = + +#TC_PAIRS_WRITE_VALID = + # Specify model valid time window in format YYYYMM[DD[_hh]] # Only tracks that fall within the valid time window will be used TC_PAIRS_VALID_BEG = diff --git a/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_tropical.conf b/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_tropical.conf index c7a5c0069b..81f9ed99c0 100644 --- a/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_tropical.conf +++ b/parm/use_cases/met_tool_wrapper/TCPairs/TCPairs_tropical.conf @@ -37,6 +37,11 @@ TC_PAIRS_INIT_EXCLUDE = TC_PAIRS_INIT_BEG = TC_PAIRS_INIT_END = +#TC_PAIRS_VALID_INCLUDE = +#TC_PAIRS_VALID_EXCLUDE = + +#TC_PAIRS_WRITE_VALID = + # Specify model valid time window in format YYYYMM[DD[_hh]] # Only tracks that fall within the valid time window will be used TC_PAIRS_VALID_BEG = diff --git a/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRR_fcstOnly_SurrogateSevere.conf b/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRR_fcstOnly_SurrogateSevere.conf index 8b3afd74d5..22e00738e4 100644 --- a/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRR_fcstOnly_SurrogateSevere.conf +++ b/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRR_fcstOnly_SurrogateSevere.conf @@ -1,48 +1,48 @@ -# HRRR Surrogate Severe Calculation - [config] -# time looping - options are INIT, VALID, RETRO, and REALTIME -LOOP_BY = INIT -# Format of INIT_BEG and INIT_END -INIT_TIME_FMT = %Y%m%d%H +PROCESS_LIST = PCPCombine, EnsembleStat, RegridDataPlane -# Start time for METplus run +LOOP_BY = INIT +INIT_TIME_FMT = %Y%m%d%H INIT_BEG=2020020500 - -# End time for METplus run INIT_END=2020020500 - -# Increment between METplus runs in seconds. Must be >= 60 INIT_INCREMENT=86400 -# list of forecast leads to process LEAD_SEQ = 36 -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. LOOP_ORDER = processes -# List of applications to run -PROCESS_LIST = PCPCombine, EnsembleStat, RegridDataPlane - -MODEL = FCST_ens -OBTYPE = ANALYS - -# PCP_COMBINE (Step 1) -# run pcp_combine on forecast data FCST_PCP_COMBINE_RUN = True - -# method to run pcp_combine on forecast data -# Options are ADD, SUM, SUBTRACT, and DERIVE FCST_PCP_COMBINE_METHOD = DERIVE FCST_PCP_COMBINE_STAT_LIST = MAX -# field name of 1 hr accumulation in forecast files +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/convection_allowing_models/surrogate_severe_calc +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrrr_ncep_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.grib2 + +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/convection_allowing_models/surrogate_severe_calc +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrrr_ncep_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.nc + + +FCST_ENSEMBLE_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE} + +ENSEMBLE_STAT_OUTPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} + + +FCST_REGRID_DATA_PLANE_RUN = True + +FCST_REGRID_DATA_PLANE_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +FCST_REGRID_DATA_PLANE_INPUT_TEMPLATE = ensemble_stat_{valid?fmt=%Y%m%d}_120000V_ens.nc + +FCST_REGRID_DATA_PLANE_OUTPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +FCST_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = surrogate_severe_{init?fmt=%Y%m%d}_{lead?fmt=%HHH}V_regrid.nc + + +MODEL = FCST_ens +OBTYPE = ANALYS + + FCST_PCP_COMBINE_INPUT_ACCUMS = 1 FCST_PCP_COMBINE_INPUT_NAMES = MXUPHL FCST_PCP_COMBINE_INPUT_LEVELS = Z2000-5000 @@ -51,16 +51,12 @@ FCST_PCP_COMBINE_OUTPUT_ACCUM = 24 FCST_PCP_COMBINE_DERIVE_LOOKBACK = 24 FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -#ENSEMBLE_STAT (Steps 2 and 3) -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = 1 -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run +ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped + +ENSEMBLE_STAT_N_MEMBERS = 1 ENSEMBLE_STAT_ENS_THRESH = 1.0 -# Used in the MET config file for: regrid to_grid field ENSEMBLE_STAT_REGRID_TO_GRID = G211 ENSEMBLE_STAT_REGRID_METHOD = MAX ENSEMBLE_STAT_REGRID_WIDTH = 27 @@ -101,80 +97,39 @@ ENSEMBLE_STAT_ENSEMBLE_FLAG_NMEP = FALSE ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -ENSEMBLE_STAT_CONFIG_FILE = {CONFIG_DIR}/EnsembleStatConfig_wrapped ENS_VAR1_NAME = {FCST_PCP_COMBINE_OUTPUT_NAME} ENS_VAR1_LEVELS = "(*,*)" ENS_VAR1_THRESH = >=14.2, >=19.0, >=26.0, >=38.0, >=61.0 -# REGRID_DATA_PLANE (Step 4) -# Run regrid_data_plane on forecast data -FCST_REGRID_DATA_PLANE_RUN = True - -# If true, process each field individually and write a file for each -# If false, run once per run time passing in all fields specified -REGRID_DATA_PLANE_ONCE_PER_FIELD = False - -# Name of input field to process FCST_REGRID_DATA_PLANE_VAR1_INPUT_FIELD_NAME = MXUPHL_24_A1_ENS_FREQ_ge14.2 FCST_REGRID_DATA_PLANE_VAR2_INPUT_FIELD_NAME = MXUPHL_24_A1_ENS_FREQ_ge19.0 FCST_REGRID_DATA_PLANE_VAR3_INPUT_FIELD_NAME = MXUPHL_24_A1_ENS_FREQ_ge26.0 FCST_REGRID_DATA_PLANE_VAR4_INPUT_FIELD_NAME = MXUPHL_24_A1_ENS_FREQ_ge38.0 FCST_REGRID_DATA_PLANE_VAR5_INPUT_FIELD_NAME = MXUPHL_24_A1_ENS_FREQ_ge61.0 -# Level of input field to process FCST_REGRID_DATA_PLANE_VAR1_INPUT_LEVEL = "(*,*)" FCST_REGRID_DATA_PLANE_VAR2_INPUT_LEVEL = "(*,*)" FCST_REGRID_DATA_PLANE_VAR3_INPUT_LEVEL = "(*,*)" FCST_REGRID_DATA_PLANE_VAR4_INPUT_LEVEL = "(*,*)" FCST_REGRID_DATA_PLANE_VAR5_INPUT_LEVEL = "(*,*)" -# Name of output field to create FCST_REGRID_DATA_PLANE_VAR1_OUTPUT_FIELD_NAME = MXUPHL_prob_75 FCST_REGRID_DATA_PLANE_VAR2_OUTPUT_FIELD_NAME = MXUPHL_prob_80 FCST_REGRID_DATA_PLANE_VAR3_OUTPUT_FIELD_NAME = MXUPHL_prob_85 FCST_REGRID_DATA_PLANE_VAR4_OUTPUT_FIELD_NAME = MXUPHL_prob_90 FCST_REGRID_DATA_PLANE_VAR5_OUTPUT_FIELD_NAME = MXUPHL_prob_95 -# Mask to use for regridding + + +REGRID_DATA_PLANE_ONCE_PER_FIELD = False + REGRID_DATA_PLANE_VERIF_GRID = G211 -# Method to run regrid_data_plane, not setting this will default to NEAREST REGRID_DATA_PLANE_METHOD = MAXGAUSS -# Regridding width used in regrid_data_plane, not setting this will default to 1 REGRID_DATA_PLANE_WIDTH = 1 -# Set Gaussian dx value to add as command line argument - not added if unset or blank REGRID_DATA_PLANE_GAUSSIAN_DX = 81.271 - -# Set Gaussian filter radius value to add as command line argument - not added if unset or blank REGRID_DATA_PLANE_GAUSSIAN_RADIUS = 120 - -[dir] - -CONFIG_DIR={PARM_BASE}/met_config - -# input and output data directories for each application in PROCESS_LIST -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/convection_allowing_models/surrogate_severe_calc -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/convection_allowing_models/surrogate_severe_calc - -FCST_ENSEMBLE_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} -ENSEMBLE_STAT_OUTPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} - -FCST_REGRID_DATA_PLANE_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} -FCST_REGRID_DATA_PLANE_OUTPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} - -[filename_templates] -# format of filenames -# Input HRRR -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrrr_ncep_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.grib2 -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrrr_ncep_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.nc - -# To enemble_stat -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE} - -# To regrid_data_plane -FCST_REGRID_DATA_PLANE_INPUT_TEMPLATE = ensemble_stat_{valid?fmt=%Y%m%d}_120000V_ens.nc -FCST_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = surrogate_severe_{init?fmt=%Y%m%d}_{lead?fmt=%HHH}V_regrid.nc diff --git a/parm/use_cases/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB.conf b/parm/use_cases/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB.conf index e083a34941..6fe3bea524 100644 --- a/parm/use_cases/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB.conf +++ b/parm/use_cases/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB.conf @@ -1,51 +1,49 @@ -# Grid to Grid Precipitation Example - [config] -# time looping - options are INIT, VALID, RETRO, and REALTIME -LOOP_BY = VALID -# Format of VALID_BEG and VALID_END -VALID_TIME_FMT = %Y%m%d%H +PROCESS_LIST = PCPCombine, GridStat -# Start time for METplus run +LOOP_BY = VALID +VALID_TIME_FMT = %Y%m%d%H VALID_BEG = 2017061300 - -# End time for METplus run VALID_END = 2017061300 - -# Increment between METplus runs in seconds. Must be >= 60 VALID_INCREMENT = 86400 -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times +LEAD_SEQ = 24 -# List of applications to run -PROCESS_LIST = PCPCombine, GridStat +LOOP_ORDER = times -# run pcp_combine on forecast data FCST_PCP_COMBINE_RUN = True - -# mode of pcp_combine to use (SUM, ADD, SUBTRACT) FCST_PCP_COMBINE_METHOD = SUM -# list of variables to compare +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/fcst +FCST_PCP_COMBINE_INPUT_TEMPLATE = pgbf{lead?fmt=%HHH}.gfs.{init?fmt=%Y%m%d%H} + +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB/gfs/bucket +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = gfs.{init?fmt=%Y%m%d%H}_A{level?fmt=%HH}h + +FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +FCST_GRID_STAT_INPUT_TEMPLATE = gfs.{init?fmt=%Y%m%d%H}_A{level?fmt=%HH}h + +OBS_GRID_STAT_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/daily_1deg_ccpa +OBS_GRID_STAT_INPUT_TEMPLATE = ccpa_conus_1.0d_{valid?fmt=%Y%m%d} + +GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB/met_out/{MODEL}/precip +GRID_STAT_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d%H%M}/grid_stat + + +MODEL = GFS +OBTYPE = ANLYS + BOTH_VAR1_NAME = APCP BOTH_VAR1_LEVELS = A24 BOTH_VAR1_THRESH = ge12.7, ge25.4, ge50.8, ge76.2, ge152.4 -# list of forecast leads to process -LEAD_SEQ = 24 -# description of data to be processed -# used in output file path -MODEL = GFS -OBTYPE = ANLYS +FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB +FCST_IS_PROB = false +FCST_PCP_COMBINE_INPUT_ACCUMS = 6 + -# location of grid_stat MET config file GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped GRID_STAT_REGRID_TO_GRID = G211 @@ -69,33 +67,3 @@ GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE GRID_STAT_CLIMO_MEAN_REGRID_METHOD = BILIN GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = 2 GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST - -# Forecast data description variables -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -FCST_IS_PROB = false -FCST_PCP_COMBINE_INPUT_ACCUMS = 6 - -# Observation data description variables -# none needed - -[dir] -# input and output data directories -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/fcst -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB/gfs/bucket -FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} - -OBS_GRID_STAT_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/daily_1deg_ccpa - -GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstGFS_obsCCPA_GRIB/met_out/{MODEL}/precip - -[filename_templates] -# format of filenames -# GFS -FCST_PCP_COMBINE_INPUT_TEMPLATE = pgbf{lead?fmt=%HHH}.gfs.{init?fmt=%Y%m%d%H} -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = gfs.{init?fmt=%Y%m%d%H}_A{level?fmt=%HH}h -FCST_GRID_STAT_INPUT_TEMPLATE = gfs.{init?fmt=%Y%m%d%H}_A{level?fmt=%HH}h - -# ANLYS -OBS_GRID_STAT_INPUT_TEMPLATE = ccpa_conus_1.0d_{valid?fmt=%Y%m%d} - -GRID_STAT_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d%H%M}/grid_stat diff --git a/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak.conf b/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak.conf index eaec762c96..b5cf7631c2 100644 --- a/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak.conf +++ b/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak.conf @@ -1,51 +1,50 @@ -# HREF Mean vs. StageIV Gempak Configurations - [config] -# time looping - options are INIT, VALID, RETRO, and REALTIME -LOOP_BY = INIT -# Format of INIT_BEG and INIT_END -INIT_TIME_FMT = %Y%m%d%H +PROCESS_LIST = PCPCombine, RegridDataPlane, GridStat -# Start time for METplus run +LOOP_BY = INIT +INIT_TIME_FMT = %Y%m%d%H INIT_BEG=2017050912 - -# End time for METplus run INIT_END=2017050912 - -# Increment between METplus runs in seconds. Must be >= 60 INIT_INCREMENT=43200 -# list of forecast leads to process LEAD_SEQ = 18 -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. LOOP_ORDER = times -# List of applications to run -PROCESS_LIST = PCPCombine, RegridDataPlane, GridStat -# run pcp_combine on forecast data FCST_PCP_COMBINE_RUN = True - -# method to run pcp_combine on forecast data -# Options are ADD, SUM, SUBTRACT, and DERIVE FCST_PCP_COMBINE_METHOD = ADD -# run regrid_data_plane on observation data +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/HREFv2_Mean_Gempak +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrefmean_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.grd + +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak/HREFv2_Mean/bucket +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}.nc + + OBS_REGRID_DATA_PLANE_RUN = True -# method to run regrid_data_plane, not setting this will default to NEAREST -REGRID_DATA_PLANE_METHOD = BUDGET +OBS_REGRID_DATA_PLANE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/StageIV +OBS_REGRID_DATA_PLANE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d?shift=-12H}12_st4.nc + +OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak/StageIV_gempak/regrid +OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d%H}_st4_A06.nc -# regridding width used in regrid_data_plane, not setting this will default to 1 -REGRID_DATA_PLANE_WIDTH = 2 -# list of variables to compare +FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +FCST_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}.nc + +OBS_GRID_STAT_INPUT_DIR = {OBS_REGRID_DATA_PLANE_OUTPUT_DIR} +OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE} + +GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak/GridStat +GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H%M} + + +MODEL = HREF_MEAN +OBTYPE = STAGE4 + FCST_VAR1_NAME = APCP FCST_VAR1_LEVELS = A06 FCST_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 @@ -54,15 +53,26 @@ OBS_VAR1_NAME = P06M_NONE OBS_VAR1_LEVELS = "(*,*)" OBS_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 -# description of data to be processed -# used in output file path -MODEL = HREF_MEAN -OBTYPE = STAGE4 -# mask to use for regridding +FCST_PCP_COMBINE_INPUT_DATATYPE = GEMPAK +FCST_IS_PROB = false +FCST_PCP_COMBINE_CONSTANT_INIT = true + +FCST_PCP_COMBINE_INPUT_ACCUMS = 1 +FCST_PCP_COMBINE_INPUT_NAMES = P01M_NONE +FCST_PCP_COMBINE_INPUT_LEVELS = "(0,*,*)" + + +REGRID_DATA_PLANE_METHOD = BUDGET +REGRID_DATA_PLANE_WIDTH = 2 + REGRID_DATA_PLANE_VERIF_GRID={INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc -# location of grid_stat MET config file +OBS_REGRID_DATA_PLANE_VAR1_INPUT_LEVEL = "({valid?fmt=%Y%m%d_%H%M%S},*,*)" + +OBS_PCP_COMBINE_INPUT_DATATYPE = NETCDF + + GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped GRID_STAT_REGRID_TO_GRID = OBS @@ -81,57 +91,3 @@ GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = TRUE - -# HREF Mean Model Options: - -# Data type of forecast data read by pcp_combine -# valid options are GRIB, NETCDF, and GEMPAK -FCST_PCP_COMBINE_INPUT_DATATYPE = GEMPAK - -# Set to true if forecast data is probabilistic -FCST_IS_PROB = false - -# Set to true if forecast files are generated once per day -FCST_PCP_COMBINE_IS_DAILY_FILE = false - -FCST_PCP_COMBINE_CONSTANT_INIT = true - -# field name of 1 hr accumulation in forecast files -FCST_PCP_COMBINE_INPUT_ACCUMS = 1 -FCST_PCP_COMBINE_INPUT_NAMES = P01M_NONE -FCST_PCP_COMBINE_INPUT_LEVELS = "(0,*,*)" - -# Stage 4 Gempak Observation Data Parameters -OBS_REGRID_DATA_PLANE_VAR1_INPUT_LEVEL = "({valid?fmt=%Y%m%d_%H%M%S},*,*)" - -# Data type of observation data read by pcp_combine -# valid options are GRIB, NETCDF, and GEMPAK -OBS_PCP_COMBINE_INPUT_DATATYPE = NETCDF - -[dir] - -# input and output data directories for each application in PROCESS_LIST -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/HREFv2_Mean_Gempak -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak/HREFv2_Mean/bucket -FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} - -OBS_REGRID_DATA_PLANE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/StageIV -OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak/StageIV_gempak/regrid -OBS_GRID_STAT_INPUT_DIR = {OBS_REGRID_DATA_PLANE_OUTPUT_DIR} - -GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_Gempak/GridStat - -[filename_templates] -# format of filenames - -# HREF Mean -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrefmean_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.grd -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}.nc -FCST_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}.nc - -# StageIV Gempak -OBS_REGRID_DATA_PLANE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d?shift=-12H}12_st4.nc -OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d%H}_st4_A06.nc -OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE} - -GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H%M} diff --git a/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF.conf b/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF.conf index 42dc08c726..42d7c0785b 100644 --- a/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF.conf +++ b/parm/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF.conf @@ -1,109 +1,52 @@ -# HREF Mean vs. StageIV NetCDF Configurations - [config] -# time looping - options are INIT, VALID, RETRO, and REALTIME -LOOP_BY = INIT -# Format of INIT_BEG and INIT_END -INIT_TIME_FMT = %Y%m%d%H +PROCESS_LIST = PCPCombine, RegridDataPlane, GridStat -# Start time for METplus run +LOOP_BY = INIT +INIT_TIME_FMT = %Y%m%d%H INIT_BEG=2017050912 - -# End time for METplus run INIT_END=2017050912 - -# Increment between METplus runs in seconds. Must be >= 60 INIT_INCREMENT=43200 -# list of forecast leads to process LEAD_SEQ = 18 -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. LOOP_ORDER = times -# List of applications to run -PROCESS_LIST = PCPCombine, RegridDataPlane, GridStat - -# run pcp_combine on forecast data FCST_PCP_COMBINE_RUN = True - -# method to run pcp_combine on forecast data -# Options are ADD, SUM, SUBTRACT, and DERIVE FCST_PCP_COMBINE_METHOD = ADD -# run regrid_data_plane on observation data -OBS_REGRID_DATA_PLANE_RUN = True - -# method to run regrid_data_plane, not setting this will default to NEAREST -REGRID_DATA_PLANE_METHOD = BUDGET - -# regridding width used in regrid_data_plane, not setting this will default to 1 -REGRID_DATA_PLANE_WIDTH = 2 - -# Set to True to use field name/level defined by the user instead of deriving it -# used to override settings to get around limitation caused by supporting legacy -# configurations that will become deprecated in a future version -# If True, target accumulation for PCPCombine must be set explicitly instead -# of referring to FCST_VAR1_LEVELS -USE_EXPLICIT_NAME_AND_LEVEL = True +FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/HREFv2_Mean +FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrefmean_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.nc -# list of variables to compare -FCST_VAR1_NAME = {FCST_PCP_COMBINE_OUTPUT_NAME} -FCST_VAR1_LEVELS = "(*,*)" -FCST_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 +FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF/HREFv2_Mean/bucket +FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}.nc -OBS_VAR1_NAME = {OBS_REGRID_DATA_PLANE_VAR1_OUTPUT_FIELD_NAME} -OBS_VAR1_LEVELS = "(*,*)" -OBS_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 -# description of data to be processed -# used in output file path -MODEL = HREF_MEAN -OBTYPE = STAGE4 +OBS_REGRID_DATA_PLANE_RUN = True -# mask to use for regridding -REGRID_DATA_PLANE_VERIF_GRID={INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc +OBS_REGRID_DATA_PLANE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/StageIV +OBS_REGRID_DATA_PLANE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d?shift=-12H}12_st4.nc -# location of grid_stat MET config file -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped +OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF/StageIV_netcdf/regrid +OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d%H}_st4_A06.nc -GRID_STAT_REGRID_TO_GRID = OBS -GRID_STAT_NEIGHBORHOOD_WIDTH = 3, 7, 15 -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 +FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +FCST_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A06.nc -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -GRID_STAT_OUTPUT_FLAG_DMAP = STAT +OBS_GRID_STAT_INPUT_DIR = {OBS_REGRID_DATA_PLANE_OUTPUT_DIR} +OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE} -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = TRUE +GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF/GridStat +GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H%M} -# HREF Mean Model Options: -# Data type of forecast data read by pcp_combine -# valid options are GRIB, NETCDF, and GEMPAK FCST_PCP_COMBINE_INPUT_DATATYPE = NETCDF -# Set to true if forecast data is probabilistic FCST_IS_PROB = false -# Set to true if forecast files are generated once per day -FCST_PCP_COMBINE_IS_DAILY_FILE = false - FCST_PCP_COMBINE_CONSTANT_INIT = true -# field name of 1 hr accumulation in forecast files FCST_PCP_COMBINE_INPUT_ACCUMS = 1 FCST_PCP_COMBINE_INPUT_NAMES = P01M_NONE FCST_PCP_COMBINE_INPUT_LEVELS = "(0,*,*)" @@ -111,44 +54,48 @@ FCST_PCP_COMBINE_INPUT_LEVELS = "(0,*,*)" FCST_PCP_COMBINE_OUTPUT_ACCUM = 6 FCST_PCP_COMBINE_OUTPUT_NAME = APCP_06 -# Stage 4 NetCDF Observation Data Parameters + OBS_REGRID_DATA_PLANE_VAR1_INPUT_FIELD_NAME = P06M_NONE OBS_REGRID_DATA_PLANE_VAR1_INPUT_LEVEL = "({valid?fmt=%Y%m%d_%H%M%S},*,*)" OBS_REGRID_DATA_PLANE_VAR1_OUTPUT_FIELD_NAME = P06M_NONE - -# Data type of observation data read by pcp_combine -# valid options are GRIB, NETCDF, and GEMPAK OBS_PCP_COMBINE_INPUT_DATATYPE = NETCDF -[dir] -# location of configuration files used by MET applications -CONFIG_DIR={PARM_BASE}/use_cases/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF +REGRID_DATA_PLANE_METHOD = BUDGET +REGRID_DATA_PLANE_WIDTH = 2 -# input and output data directories for each application in PROCESS_LIST -FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/HREFv2_Mean -FCST_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF/HREFv2_Mean/bucket -FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR} +REGRID_DATA_PLANE_VERIF_GRID={INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc -OBS_REGRID_DATA_PLANE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/StageIV -OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF/StageIV_netcdf/regrid -OBS_GRID_STAT_INPUT_DIR = {OBS_REGRID_DATA_PLANE_OUTPUT_DIR} +#USE_EXPLICIT_NAME_AND_LEVEL = True -GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHREFmean_obsStgIV_NetCDF/GridStat +MODEL = HREF_MEAN +OBTYPE = STAGE4 -[filename_templates] -# format of filenames +FCST_VAR1_NAME = {FCST_PCP_COMBINE_OUTPUT_NAME} +FCST_VAR1_LEVELS = "(*,*)" +FCST_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 -# HREF Mean -FCST_PCP_COMBINE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/hrefmean_{init?fmt=%Y%m%d%H}f{lead?fmt=%HHH}.nc -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}.nc +OBS_VAR1_NAME = {OBS_REGRID_DATA_PLANE_VAR1_OUTPUT_FIELD_NAME} +OBS_VAR1_LEVELS = "(*,*)" +OBS_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 -FCST_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/hrefmean_{valid?fmt=%Y%m%d%H}_A06.nc -# StageIV NetCDF -OBS_REGRID_DATA_PLANE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d?shift=-12H}12_st4.nc -OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d%H}_st4_A06.nc -OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE} +GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped -GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H%M} +GRID_STAT_REGRID_TO_GRID = OBS + +GRID_STAT_NEIGHBORHOOD_WIDTH = 3, 7, 15 +GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE +GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 + +GRID_STAT_OUTPUT_FLAG_CTC = STAT +GRID_STAT_OUTPUT_FLAG_CTS = STAT +GRID_STAT_OUTPUT_FLAG_DMAP = STAT + +GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE +GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE +GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE +GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE +GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE +GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = TRUE diff --git a/parm/use_cases/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB.conf b/parm/use_cases/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB.conf index 8a78237763..0ae1781c37 100644 --- a/parm/use_cases/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB.conf +++ b/parm/use_cases/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB.conf @@ -1,130 +1,81 @@ -# PHPT vs. StageIV Grib Configurations - [config] -# List of applications to run + PROCESS_LIST = PCPCombine, RegridDataPlane, GridStat -# time looping - options are INIT, VALID, RETRO, and REALTIME LOOP_BY = INIT -# Format of INIT_BEG and INIT_END INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run INIT_BEG=2016090412 - -# End time for METplus run INIT_END=2016090412 - -# Increment between METplus runs in seconds. Must be >= 60 INIT_INCREMENT=60 -# list of forecast leads to process LEAD_SEQ = 6, 7 -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. LOOP_ORDER = times - -# run pcp_combine on observation data OBS_PCP_COMBINE_RUN = True - -# method to run pcp_combine on observation data -# Options are ADD, SUM, SUBTRACT, and DERIVE OBS_PCP_COMBINE_METHOD = ADD -# run regrid_data_plane on observation data OBS_REGRID_DATA_PLANE_RUN = True -# method to run regrid_data_plane, not setting this will default to NEAREST -REGRID_DATA_PLANE_METHOD = BUDGET +OBS_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/StageIV +OBS_PCP_COMBINE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ST4.{valid?fmt=%Y%m%d%H}.{level?fmt=%HH}h -# regridding width used in regrid_data_plane, not setting this will default to 1 -REGRID_DATA_PLANE_WIDTH = 2 +OBS_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB/StageIV_grib/bucket +OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ST4.{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}h -# list of variables to compare -BOTH_VAR1_NAME = APCP -BOTH_VAR1_LEVELS = A06 -BOTH_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 +OBS_REGRID_DATA_PLANE_INPUT_DIR = {OBS_PCP_COMBINE_OUTPUT_DIR} +OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB/StageIV_grib/regrid +OBS_REGRID_DATA_PLANE_TEMPLATE = {OBS_PCP_COMBINE_OUTPUT_TEMPLATE} -# description of data to be processed -# used in output file path -MODEL = PHPT -OBTYPE = STAGE4_GRIB -# mask to use for regridding -REGRID_DATA_PLANE_VERIF_GRID = {INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc +FCST_GRID_STAT_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/PHPT +FCST_GRID_STAT_INPUT_TEMPLATE= {init?fmt=%Y%m%d}/{init?fmt=%Y%m%d}_i{init?fmt=%H}_f{lead?fmt=%HHH}_HRRRTLE_PHPT.grb2 -# location of grid_stat MET config file -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped +OBS_GRID_STAT_INPUT_DIR = {OBS_REGRID_DATA_PLANE_OUTPUT_DIR} +OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_REGRID_DATA_PLANE_TEMPLATE} -GRID_STAT_OUTPUT_PREFIX = PROB_{MODEL}_{CURRENT_FCST_NAME}_vs_{OBTYPE}_{CURRENT_OBS_NAME}_{CURRENT_FCST_LEVEL} +GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB/GridStat +GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H%M} -GRID_STAT_MASK_GRID = +GRID_STAT_VERIFICATION_MASK_TEMPLATE = {INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc, {INPUT_BASE}/model_applications/precipitation/mask/EAST_HRRRTLE.nc, {INPUT_BASE}/model_applications/precipitation/mask/WEST_HRRRTLE.nc -GRID_STAT_OUTPUT_FLAG_PCT = BOTH -GRID_STAT_OUTPUT_FLAG_PSTD = BOTH -GRID_STAT_OUTPUT_FLAG_PJC = BOTH -GRID_STAT_OUTPUT_FLAG_PRC = BOTH -GRID_STAT_OUTPUT_FLAG_ECLV = STAT -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# PHPT Model Options: +MODEL = PHPT +OBTYPE = STAGE4_GRIB -# Set to true if forecast data is probabilistic FCST_IS_PROB = true - -# True if probabilistic information is in the GRIB Product Definition Section FCST_PROB_IN_GRIB_PDS = True -# Set to true if forecast files are generated once per day -FCST_PCP_COMBINE_IS_DAILY_FILE = false +BOTH_VAR1_NAME = APCP +BOTH_VAR1_LEVELS = A06 +BOTH_VAR1_THRESH = gt12.7, gt25.4, gt50.8, gt76.2, gt152.4 -# Stage4 Grib Observation Data Parameters: -# Data type of observation data read by pcp_combine -# valid options are GRIB, NETCDF, and GEMPAK OBS_PCP_COMBINE_INPUT_DATATYPE = GRIB - -# Set to true if observation files are generated once per day -OBS_PCP_COMBINE_IS_DAILY_FILE = false - OBS_PCP_COMBINE_INPUT_ACCUMS = 6, 1 -[dir] -# input and output data directories for each application in PROCESS_LIST -FCST_GRID_STAT_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/PHPT +REGRID_DATA_PLANE_METHOD = BUDGET +REGRID_DATA_PLANE_WIDTH = 2 -OBS_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/StageIV -OBS_PCP_COMBINE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB/StageIV_grib/bucket -OBS_REGRID_DATA_PLANE_INPUT_DIR = {OBS_PCP_COMBINE_OUTPUT_DIR} -OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB/StageIV_grib/regrid -OBS_GRID_STAT_INPUT_DIR = {OBS_REGRID_DATA_PLANE_OUTPUT_DIR} +REGRID_DATA_PLANE_VERIF_GRID = {INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc -GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation/GridStat_fcstHRRR-TLE_obsStgIV_GRIB/GridStat -[filename_templates] -# format of filenames +GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped -# PHPT -FCST_GRID_STAT_INPUT_TEMPLATE= {init?fmt=%Y%m%d}/{init?fmt=%Y%m%d}_i{init?fmt=%H}_f{lead?fmt=%HHH}_HRRRTLE_PHPT.grb2 +GRID_STAT_OUTPUT_PREFIX = PROB_{MODEL}_{CURRENT_FCST_NAME}_vs_{OBTYPE}_{CURRENT_OBS_NAME}_{CURRENT_FCST_LEVEL} -# StageIV Grib -OBS_PCP_COMBINE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ST4.{valid?fmt=%Y%m%d%H}.{level?fmt=%HH}h -OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ST4.{valid?fmt=%Y%m%d%H}_A{level?fmt=%HH}h -OBS_REGRID_DATA_PLANE_TEMPLATE = {OBS_PCP_COMBINE_OUTPUT_TEMPLATE} -OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_REGRID_DATA_PLANE_TEMPLATE} +GRID_STAT_MASK_GRID = -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {INPUT_BASE}/model_applications/precipitation/mask/CONUS_HRRRTLE.nc, {INPUT_BASE}/model_applications/precipitation/mask/EAST_HRRRTLE.nc, {INPUT_BASE}/model_applications/precipitation/mask/WEST_HRRRTLE.nc +GRID_STAT_OUTPUT_FLAG_PCT = BOTH +GRID_STAT_OUTPUT_FLAG_PSTD = BOTH +GRID_STAT_OUTPUT_FLAG_PJC = BOTH +GRID_STAT_OUTPUT_FLAG_PRC = BOTH +GRID_STAT_OUTPUT_FLAG_ECLV = STAT -GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H%M} +GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE +GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE +GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE +GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE +GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE