diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000000..c9b72628dd4
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,40 @@
+---
+name: Feature Request
+about: Use this template for requesting new features
+title:
+labels: feature
+assignees:
+
+---
+
+
+
+
+
+
+**Description**
+
+
+
+**Requirements**
+
+
+**Acceptance Criteria (Definition of Done)**
+
+
+**(Optional): Suggest A Solution**
+
diff --git a/.github/ISSUE_TEMPLATE/fix_file.md b/.github/ISSUE_TEMPLATE/fix_file.md
new file mode 100644
index 00000000000..1e05f0c9df9
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/fix_file.md
@@ -0,0 +1,24 @@
+---
+name: Fix File Update
+about: Use this template for adding, updating, or removing fix files from global dataset
+title:
+labels: Fix Files
+assignees:
+ - KateFriedman-NOAA
+ - WalterKolczynski-NOAA
+
+---
+
+**Description**
+
+
+
+
+
+
+**Tasks**
+
+- [ ] Discuss needs with global-workflow developer assigned to request.
+- [ ] Add/update/remove fix file(s) in fix sets on supported platforms (global-workflow assignee task).
+- [ ] Update "Fix File Management" spreadsheet (https://docs.google.com/spreadsheets/d/1BeIvcz6TO3If4YCqkUK-oz_kGS9q2wTjwLS-BBemSEY/edit?usp=sharing).
+- [ ] Make related workflow/component updates.
diff --git a/.github/ISSUE_TEMPLATE/production_update.md b/.github/ISSUE_TEMPLATE/production_update.md
new file mode 100644
index 00000000000..fd517d3d0a7
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/production_update.md
@@ -0,0 +1,31 @@
+---
+name: Production Update
+about: Use this template for operational production updates
+title:
+labels: production update
+assignees:
+ - KateFriedman-NOAA
+
+---
+
+**Description**
+
+
+
+
+**Workflow Changes**
+
+
+
+**Tasks**
+- [ ] Create release branch
+- [ ] Make workflow changes for upgrade in release branch (add additional checklist items as needed)
+- [ ] Create release notes
+- [ ] Cut hand-off tag for CDF
+- [ ] Submit CDF to NCO
+- [ ] Implementation into operations complete
+- [ ] Merge release branch into operational branch
+- [ ] Cut version tag from operational branch
+- [ ] Release new version tag
+- [ ] Announce to users
+- [ ] Update Read-The-Docs operations status version in develop
diff --git a/.github/scripts/build_docs.sh b/.github/scripts/build_docs.sh
new file mode 100755
index 00000000000..7fb6701da2a
--- /dev/null
+++ b/.github/scripts/build_docs.sh
@@ -0,0 +1,31 @@
+#! /bin/bash
+
+set -eux
+
+# path to docs directory relative to top level of repository
+# $GITHUB_WORKSPACE is set if the actions/checkout@v3 action is run first
+
+cwd=$(pwd)
+DOCS_DIR="${GITHUB_WORKSPACE}/docs"
+
+# run Make to build the documentation and return to previous directory
+cd "${DOCS_DIR}"
+make clean html
+cd "${cwd}"
+
+# copy HTML output into directory to create an artifact
+mkdir -p artifact/documentation
+cp -R "${DOCS_DIR}/build/html/." artifact/documentation
+
+# check if the warnings.log file is empty
+# Copy it into the artifact and documeentation directories
+# so it will be available in the artifacts
+warning_file="${DOCS_DIR}/build/warnings.log"
+if [[ -s ${warning_file} ]]; then
+ cp -r "${DOCS_DIR}/build/warnings.log" artifact/doc_warnings.log
+ cp artifact/doc_warnings.log artifact/documentation
+ echo "Warnings were encountered while building documentation."
+ echo "========== Begin warnings =========="
+ cat artifact/doc_warnings.log
+ echo "=========== End warnings ==========="
+fi
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
new file mode 100644
index 00000000000..ae083a3c0bf
--- /dev/null
+++ b/.github/workflows/docs.yaml
@@ -0,0 +1,51 @@
+name: Build and Deploy Documentation
+on:
+ push:
+ branches:
+ - develop
+ - feature/*
+ - main/*
+ - bugfix/*
+ - release/*
+ paths:
+ - docs/**
+ pull_request:
+ types: [opened, reopened, synchronize]
+
+jobs:
+ documentation:
+ runs-on: ubuntu-latest
+ name: Build and deploy documentation
+
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.9"
+
+ - name: Install (upgrade) python dependencies
+ run: |
+ pip install --upgrade pip sphinx sphinx-gallery sphinx_rtd_theme sphinxcontrib-bibtex
+
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Build documentation
+ run: |
+ ./.github/scripts/build_docs.sh
+
+ - name: Upload documentation (on success)
+ uses: actions/upload-artifact@v3
+ if: always()
+ with:
+ name: documentation
+ path: artifact/documentation
+
+ - name: Upload warnings (on failure)
+ uses: actions/upload-artifact@v3
+ if: failure()
+ with:
+ name: documentation_warnings.log
+ path: artifact/doc_warnings.log
+ if-no-files-found: ignore
+
diff --git a/.github/workflows/linters.yaml b/.github/workflows/linters.yaml
new file mode 100644
index 00000000000..488b6a1407e
--- /dev/null
+++ b/.github/workflows/linters.yaml
@@ -0,0 +1,64 @@
+#
+name: shellnorms
+on:
+ pull_request:
+
+permissions:
+ contents: read
+
+defaults:
+ run:
+ shell: bash -o pipefail {0}
+
+jobs:
+ lint-shell:
+ runs-on: ubuntu-latest
+
+ permissions:
+ security-events: write
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+
+ - id: ShellCheck
+ name: Lint shell scripts
+ uses: redhat-plumbers-in-action/differential-shellcheck@v4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+
+ - if: ${{ always() }}
+ name: Upload artifact with ShellCheck defects in SARIF format
+ uses: actions/upload-artifact@v3
+ with:
+ name: Differential ShellCheck SARIF
+ path: ${{ steps.ShellCheck.outputs.sarif }}
+
+ # lint-python:
+ # runs-on: ubuntu-latest
+
+ # permissions:
+ # security-events: write
+
+ # steps:
+ # - name: Checkout code
+ # uses: actions/checkout@v3
+
+ # - id: VCS_Diff_Lint
+ # name: Lint python scripts
+ # uses: fedora-copr/vcs-diff-lint-action@v1
+
+ # - if: ${{ always() }}
+ # name: Upload artifact with detected defects in SARIF format
+ # uses: actions/upload-artifact@v3
+ # with:
+ # name: VCS Diff Lint SARIF
+ # path: ${{ steps.VCS_Diff_Lint.outputs.sarif }}
+
+ # - if: ${{ failure() }}
+ # name: Upload SARIF to GitHub using github/codeql-action/upload-sarif
+ # uses: github/codeql-action/upload-sarif@v2
+ # with:
+ # sarif_file: ${{ steps.VCS_Diff_Lint.outputs.sarif }}
diff --git a/.github/workflows/pynorms.yaml b/.github/workflows/pynorms.yaml
new file mode 100644
index 00000000000..7f823f83181
--- /dev/null
+++ b/.github/workflows/pynorms.yaml
@@ -0,0 +1,24 @@
+name: pynorms
+on: [push, pull_request]
+
+jobs:
+ check_norms:
+ runs-on: ubuntu-latest
+ name: Check Python coding norms with pycodestyle
+
+ steps:
+
+ - name: Install dependencies
+ run: |
+ pip install --upgrade pip
+ pip install pycodestyle
+
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ path: global-workflow
+
+ - name: Run pycodestyle
+ run: |
+ cd $GITHUB_WORKSPACE/global-workflow
+ pycodestyle -v --config ./.pycodestyle --exclude='.git,.github' ./
diff --git a/.github/workflows/pytests.yaml b/.github/workflows/pytests.yaml
new file mode 100644
index 00000000000..f15a776c0f1
--- /dev/null
+++ b/.github/workflows/pytests.yaml
@@ -0,0 +1,36 @@
+name: pytests
+on: [push, pull_request]
+
+jobs:
+ run_pytests:
+ runs-on: ubuntu-latest
+ name: Install pygw and run tests with pytests
+ strategy:
+ max-parallel: 1
+ matrix:
+ python: ["3.7", "3.8", "3.9", "3.10"]
+
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install (upgrade) python dependencies
+ run: |
+ pip install --upgrade pip
+
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ path: global-workflow
+
+ - name: Install pygw
+ run: |
+ cd $GITHUB_WORKSPACE/global-workflow/ush/python/pygw
+ pip install .[dev]
+
+ - name: Run pytests
+ run: |
+ cd $GITHUB_WORKSPACE/global-workflow/ush/python/pygw
+ pytest -v src/tests
diff --git a/.gitignore b/.gitignore
index df54a098920..e73b9f2e05b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,9 +5,15 @@ __pycache__
*.[aox]
*.mod
*.sw[a-p]
+._*
.DS_Store
+#nohup.out - some users do not want this to be a part of .gitignore. TODO: review against best practices
.idea/
+.vscode/
+# Ignore editor generated backup files
+#-------------------------------------
+*~
# Ignore folders
#-------------------
exec/
@@ -17,8 +23,23 @@ install*/
# Ignore fix directory symlinks
#------------------------------
fix/0readme
-fix/fix_*
-fix/gdas/
+fix/aer
+fix/am
+fix/chem
+fix/cice
+fix/cpl
+fix/datm
+fix/gdas
+fix/gldas
+fix/gsi
+fix/lut
+fix/mom6
+fix/orog
+fix/reg2grb2
+fix/sfc_climo
+fix/ugwd
+fix/verif
+fix/wave
fix/wafs
# Ignore parm file symlinks
@@ -75,71 +96,12 @@ parm/wafs
#--------------------------------------------
sorc/*log
sorc/logs
-sorc/ufs_model.fd
-sorc/gfs_post.fd
-sorc/gfs_wafs.fd
-sorc/gldas.fd
-sorc/gsi_enkf.fd
-sorc/gsi.fd
-sorc/enkf.fd
-sorc/gdas.cd
-sorc/gsi_utils.fd
-sorc/gsi_monitor.fd
-sorc/ufs_utils.fd
-sorc/verif-global.fd
-
-# Ignore sorc symlinks
-#---------------------
-sorc/calc_analysis.fd
-sorc/calc_increment_ens.fd
-sorc/calc_increment_ens_ncio.fd
-sorc/emcsfc_ice_blend.fd
-sorc/emcsfc_snow2mdl.fd
-sorc/fregrid.fd
-sorc/gdas2gldas.fd
-sorc/getsfcensmeanp.fd
-sorc/getsigensmeanp_smooth.fd
-sorc/getsigensstatp.fd
-sorc/gfs_ncep_post.fd
-sorc/gldas2gdas.fd
-sorc/gldas_forcing.fd
-sorc/gldas_model.fd
-sorc/gldas_post.fd
-sorc/gldas_rst.fd
-sorc/global_chgres.fd
-sorc/global_cycle.fd
-sorc/global_enkf.fd
-sorc/global_gsi.fd
-sorc/interp_inc.fd
-sorc/make_hgrid.fd
-sorc/make_solo_mosaic.fd
-sorc/ncdiag_cat.fd
-sorc/nst_tf_chg.fd
-sorc/oznmon_horiz.fd
-sorc/oznmon_time.fd
-sorc/radmon_angle.fd
-sorc/radmon_bcoef.fd
-sorc/radmon_bcor.fd
-sorc/radmon_time.fd
-sorc/recentersigp.fd
-sorc/upp.fd
-sorc/wafs_awc_wafavn.fd
-sorc/wafs_blending.fd
-sorc/wafs_blending_0p25.fd
-sorc/wafs_cnvgrib2.fd
-sorc/wafs_gcip.fd
-sorc/wafs_grib2_0p25.fd
-sorc/wafs_makewafs.fd
-sorc/wafs_setmissing.fd
+sorc/*.cd
+sorc/*.fd
# Ignore scripts from externals
#------------------------------
# jobs symlinks
-jobs/JGDAS_ATMOS_GLDAS
-jobs/JGDAS_ATMOS_VERFOZN
-jobs/JGDAS_ATMOS_VERFRAD
-jobs/JGDAS_ATMOS_VMINMON
-jobs/JGFS_ATMOS_VMINMON
jobs/JGFS_ATMOS_WAFS
jobs/JGFS_ATMOS_WAFS_BLENDING
jobs/JGFS_ATMOS_WAFS_BLENDING_0P25
@@ -148,11 +110,6 @@ jobs/JGFS_ATMOS_WAFS_GRIB2
jobs/JGFS_ATMOS_WAFS_GRIB2_0P25
# scripts symlinks
scripts/exemcsfc_global_sfc_prep.sh
-scripts/exgdas_atmos_gldas.sh
-scripts/exgdas_atmos_verfozn.sh
-scripts/exgdas_atmos_verfrad.sh
-scripts/exgdas_atmos_vminmon.sh
-scripts/exgfs_atmos_vminmon.sh
scripts/exgfs_atmos_wafs_blending.sh
scripts/exgfs_atmos_wafs_blending_0p25.sh
scripts/exgfs_atmos_wafs_gcip.sh
@@ -168,29 +125,18 @@ ush/fv3gfs_driver_grid.sh
ush/fv3gfs_filter_topo.sh
ush/fv3gfs_make_grid.sh
ush/fv3gfs_make_orog.sh
-ush/gldas_archive.sh
-ush/gldas_forcing.sh
-ush/gldas_get_data.sh
-ush/gldas_liscrd.sh
-ush/gldas_post.sh
-ush/gldas_process_data.sh
ush/global_chgres.sh
ush/global_chgres_driver.sh
ush/global_cycle.sh
ush/global_cycle_driver.sh
-ush/minmon_xtrct_costs.pl
-ush/minmon_xtrct_gnorms.pl
-ush/minmon_xtrct_reduct.pl
+ush/jediinc2fv3.py
ush/mkwfsgbl.sh
-ush/ozn_xtrct.sh
-ush/radmon_ck_stdout.sh
-ush/radmon_err_rpt.sh
-ush/radmon_verf_angle.sh
-ush/radmon_verf_bcoef.sh
-ush/radmon_verf_bcor.sh
-ush/radmon_verf_time.sh
ush/ufsda
-ush/rstprod.sh
ush/wafs_blending.sh
ush/wafs_grib2.regrid.sh
ush/wafs_intdsk.sh
+ush/finddate.sh
+ush/make_NTC_file.pl
+ush/make_ntc_bull.pl
+ush/make_tif.sh
+ush/month_name.sh
diff --git a/.pycodestyle b/.pycodestyle
new file mode 100644
index 00000000000..8bd18fa9d71
--- /dev/null
+++ b/.pycodestyle
@@ -0,0 +1,6 @@
+[pycodestyle]
+count = False
+ignore = E402,W504
+max-line-length = 160
+statistics = True
+exclude = Experimental
diff --git a/.shellcheckrc b/.shellcheckrc
new file mode 100644
index 00000000000..6d540ba17fa
--- /dev/null
+++ b/.shellcheckrc
@@ -0,0 +1,16 @@
+# Global settings for Shellcheck (https://github.com/koalaman/shellcheck)
+enable=all
+
+external-sources=false
+
+# Disable variable referenced but not assigned
+disable=SC2154
+
+# Disable following non-constant source
+disable=SC1090
+
+# Disable non-existent binary
+disable=SC1091
+
+# Disable -p -m only applies to deepest directory
+disable=SC2174
diff --git a/Externals.cfg b/Externals.cfg
index 0725d254897..e78cd2838e0 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -1,59 +1,72 @@
# External sub-modules of global-workflow
-[FV3GFS]
-hash = 9350745855aebe0790813e0ed2ba5ad680e3f75c
-local_path = sorc/fv3gfs.fd
+[UFS]
+tag = 2247060
+local_path = sorc/ufs_model.fd
repo_url = https://github.com/ufs-community/ufs-weather-model.git
protocol = git
required = True
-[GSI]
-hash = 9c1fc15d42573b398037319bbf8d5143ad126fb6
-local_path = sorc/gsi.fd
-repo_url = https://github.com/NOAA-EMC/GSI.git
-protocol = git
-required = True
-
-[GLDAS]
-tag = gldas_gfsv16_release.v1.15.0
-local_path = sorc/gldas.fd
-repo_url = https://github.com/NOAA-EMC/GLDAS.git
+[gfs-utils]
+hash = 0b8ff56
+local_path = sorc/gfs_utils.fd
+repo_url = https://github.com/NOAA-EMC/gfs-utils
protocol = git
required = True
-[UPP]
-#No externals setting = .gitmodules will be invoked for CMakeModules and comupp/src/lib/crtm2 submodules
-hash = ff42e0227d6100285d4179a2572b700fd5a959cb
-local_path = sorc/gfs_post.fd
-repo_url = https://github.com/NOAA-EMC/UPP.git
-protocol = git
-required = True
-
-[UFS_UTILS]
-tag = ufs_utils_1_8_0
+[UFS-Utils]
+hash = 5b67e4d
local_path = sorc/ufs_utils.fd
repo_url = https://github.com/ufs-community/UFS_UTILS.git
protocol = git
required = True
[EMC_verif-global]
-tag = verif_global_v2.5.2
+tag = c267780
local_path = sorc/verif-global.fd
repo_url = https://github.com/NOAA-EMC/EMC_verif-global.git
protocol = git
required = True
-[EMC_gfs_wafs]
-hash = c2a29a67d9432b4d6fba99eac7797b81d05202b6
-local_path = sorc/gfs_wafs.fd
-repo_url = https://github.com/NOAA-EMC/EMC_gfs_wafs.git
+[GSI-EnKF]
+hash = 113e307
+local_path = sorc/gsi_enkf.fd
+repo_url = https://github.com/NOAA-EMC/GSI.git
+protocol = git
+required = False
+
+[GSI-Utils]
+hash = 322cc7b
+local_path = sorc/gsi_utils.fd
+repo_url = https://github.com/NOAA-EMC/GSI-utils.git
+protocol = git
+required = False
+
+[GSI-Monitor]
+hash = 45783e3
+local_path = sorc/gsi_monitor.fd
+repo_url = https://github.com/NOAA-EMC/GSI-monitor.git
protocol = git
required = False
-[aeroconv]
-hash = 24f6ddc
-local_path = sorc/aeroconv.fd
-repo_url = https://github.com/NCAR/aeroconv.git
+[GDASApp]
+hash = aaf7caa
+local_path = sorc/gdas.cd
+repo_url = https://github.com/NOAA-EMC/GDASApp.git
+protocol = git
+required = False
+
+[GLDAS]
+tag = fd8ba62
+local_path = sorc/gldas.fd
+repo_url = https://github.com/NOAA-EMC/GLDAS.git
+protocol = git
+required = False
+
+[EMC-gfs_wafs]
+hash = 014a0b8
+local_path = sorc/gfs_wafs.fd
+repo_url = https://github.com/NOAA-EMC/EMC_gfs_wafs.git
protocol = git
required = False
diff --git a/LICENSE.md b/LICENSE.md
new file mode 100644
index 00000000000..0927556b544
--- /dev/null
+++ b/LICENSE.md
@@ -0,0 +1,157 @@
+### GNU LESSER GENERAL PUBLIC LICENSE
+
+Version 3, 29 June 2007
+
+Copyright (C) 2007 Free Software Foundation, Inc.
+
+
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+
+This version of the GNU Lesser General Public License incorporates the
+terms and conditions of version 3 of the GNU General Public License,
+supplemented by the additional permissions listed below.
+
+#### 0. Additional Definitions.
+
+As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the
+GNU General Public License.
+
+"The Library" refers to a covered work governed by this License, other
+than an Application or a Combined Work as defined below.
+
+An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+#### 1. Exception to Section 3 of the GNU GPL.
+
+You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+#### 2. Conveying Modified Versions.
+
+If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+- a) under this License, provided that you make a good faith effort
+ to ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+- b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+#### 3. Object Code Incorporating Material from Library Header Files.
+
+The object code form of an Application may incorporate material from a
+header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+- a) Give prominent notice with each copy of the object code that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+- b) Accompany the object code with a copy of the GNU GPL and this
+ license document.
+
+#### 4. Combined Works.
+
+You may convey a Combined Work under terms of your choice that, taken
+together, effectively do not restrict modification of the portions of
+the Library contained in the Combined Work and reverse engineering for
+debugging such modifications, if you also do each of the following:
+
+- a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+- b) Accompany the Combined Work with a copy of the GNU GPL and this
+ license document.
+- c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+- d) Do one of the following:
+ - 0) Convey the Minimal Corresponding Source under the terms of
+ this License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+ - 1) Use a suitable shared library mechanism for linking with
+ the Library. A suitable mechanism is one that (a) uses at run
+ time a copy of the Library already present on the user's
+ computer system, and (b) will operate properly with a modified
+ version of the Library that is interface-compatible with the
+ Linked Version.
+- e) Provide Installation Information, but only if you would
+ otherwise be required to provide such information under section 6
+ of the GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the Application
+ with a modified version of the Linked Version. (If you use option
+ 4d0, the Installation Information must accompany the Minimal
+ Corresponding Source and Corresponding Application Code. If you
+ use option 4d1, you must provide the Installation Information in
+ the manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.)
+
+#### 5. Combined Libraries.
+
+You may place library facilities that are a work based on the Library
+side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+- a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities, conveyed under the terms of this License.
+- b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+#### 6. Revised Versions of the GNU Lesser General Public License.
+
+The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+as you received it specifies that a certain numbered version of the
+GNU Lesser General Public License "or any later version" applies to
+it, you have the option of following the terms and conditions either
+of that published version or of any later version published by the
+Free Software Foundation. If the Library as you received it does not
+specify a version number of the GNU Lesser General Public License, you
+may choose any version of the GNU Lesser General Public License ever
+published by the Free Software Foundation.
+
+If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/README.md b/README.md
index c89aa7275b4..465b0529fac 100644
--- a/README.md
+++ b/README.md
@@ -1,54 +1,40 @@
-# global-workflow
-Global Superstructure/Workflow currently supporting the Finite-Volume on a Cubed-Sphere Global Forecast System (FV3GFS)
-
-The global-workflow depends on the following prerequisities to be available on the system:
-
-* workload management platform / scheduler - LSF or SLURM
-* workflow manager - ROCOTO (https://github.com/christopherwharrop/rocoto)
-* modules - NCEPLIBS (various), esmf v8.0.0bs48, hdf5, intel/ips v18, impi v18, wgrib2, netcdf v4.7.0, hpss, gempak (see module files under /modulefiles for additional details)
-
-The global-workflow current supports the following machines:
+[![Read The Docs Status](https://readthedocs.org/projects/global-workflow/badge/?badge=latest)](http://global-workflow.readthedocs.io/)
+[![shellnorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml)
+[![pynorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml)
+[![pytests](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml)
-* WCOSS-Dell
-* WCOSS-Cray
-* Hera
-* Orion
-
-Quick-start instructions are below. Full instructions are available in the [wiki](https://github.com/NOAA-EMC/global-workflow/wiki/Run-Global-Workflow)
-
-## Build global-workflow:
+# global-workflow
+Global Workflow currently supporting the Global Forecast System (GFS) with the [UFS-weather-model](https://github.com/ufs-community/ufs-weather-model) and [GSI](https://github.com/NOAA-EMC/GSI)-based Data Assimilation System.
-### 1. Check out components
+The `global-workflow` depends on the following prerequisities to be available on the system:
-While in /sorc folder:
-```
-$ sh checkout.sh
-```
+* Workflow Engine - [Rocoto](https://github.com/christopherwharrop/rocoto) and [ecFlow](https://github.com/ecmwf/ecflow) (for NWS Operations)
+* Compiler - Intel Compiler Suite
+* Software - NCEPLIBS (various), ESMF, HDF5, NetCDF, and a host of other softwares (see module files under /modulefiles for additional details)
-### 2. Build components
+The `global-workflow` current supports the following tier-1 machines:
-While in /sorc folder:
+* NOAA RDHPCS - Hera
+* MSU HPC - Orion
+* NOAA's operational HPC - WCOSS2
-```
-$ sh build_all.sh
-```
+Additionally, the following tier-2 machine is supported:
+* SSEC at Univ. of Wisconsin - S4 (Note that S2S+ experiments are not fully supported)
-Or use an available option:
-```
-build_all.sh [-a UFS_app][-c build_config][-h][-v]
- -a UFS_app:
- Build a specific UFS app instead of the default
- -c build_config:
- Selectively build based on the provided config instead of the default config
- -h:
- Print usage message and exit
- -v:
- Run all scripts in verbose mode
-```
+Documentation (in progress) is available [here](https://global-workflow.readthedocs.io/en/latest/).
-### 3. Link components
+# Disclaimer
-While in /sorc folder:
+The United States Department of Commerce (DOC) GitHub project code is provided
+on an "as is" basis and the user assumes responsibility for its use. DOC has
+relinquished control of the information and no longer has responsibility to
+protect the integrity, confidentiality, or availability of the information. Any
+claims against the Department of Commerce stemming from the use of its GitHub
+project will be governed by all applicable Federal law. Any reference to
+specific commercial products, processes, or services by service mark,
+trademark, manufacturer, or otherwise, does not constitute or imply their
+endorsement, recommendation or favoring by the Department of Commerce. The
+Department of Commerce seal and logo, or the seal and logo of a DOC bureau,
+shall not be used in any manner to imply endorsement of any commercial product
+or activity by DOC or the United States Government.
-$ sh link_workflow.sh emc $MACHINE
-...where $MACHINE is "dell", "cray", "hera", or "orion".
diff --git a/ci/cases/C96C48_hybatmDA.yaml b/ci/cases/C96C48_hybatmDA.yaml
new file mode 100644
index 00000000000..9efce409009
--- /dev/null
+++ b/ci/cases/C96C48_hybatmDA.yaml
@@ -0,0 +1,15 @@
+experiment:
+ mode: cycled
+
+arguments:
+ app: ATM
+ resdet: 96
+ resens: 48
+ comrot: ${RUNTESTS}/${pslot}/COMROT
+ expdir: ${RUNTESTS}/${pslot}/EXPDIR
+ icsdir: ${ICSDIR_ROOT}/C96C48
+ idate: 2021122018
+ edate: 2021122200
+ nens: 2
+ gfs_cyc: 1
+ start: cold
diff --git a/ci/cases/C96_atm3DVar.yaml b/ci/cases/C96_atm3DVar.yaml
new file mode 100644
index 00000000000..1648432e091
--- /dev/null
+++ b/ci/cases/C96_atm3DVar.yaml
@@ -0,0 +1,14 @@
+experiment:
+ mode: cycled
+
+arguments:
+ app: ATM
+ resdet: 96
+ comrot: ${RUNTESTS}/${pslot}/COMROT
+ expdir: ${RUNTESTS}/${pslot}/EXPDIR
+ icsdir: ${ICSDIR_ROOT}/C96C48
+ idate: 2021122018
+ edate: 2021122100
+ nens: 0
+ gfs_cyc: 1
+ start: cold
diff --git a/ci/platforms/hera.sh b/ci/platforms/hera.sh
new file mode 100644
index 00000000000..35fe7bca912
--- /dev/null
+++ b/ci/platforms/hera.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/bash
+export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT
+export SLURM_ACCOUNT=fv3-cpu
+export SALLOC_ACCOUNT="${SLURM_ACCOUNT}"
+export SBATCH_ACCOUNT="${SLURM_ACCOUNT}"
+export SLURM_QOS=debug
+export ICSDIR_ROOT="/scratch1/NCEPDEV/global/glopara/data/ICSDIR"
diff --git a/ci/platforms/orion.sh b/ci/platforms/orion.sh
new file mode 100644
index 00000000000..7d69a3b276e
--- /dev/null
+++ b/ci/platforms/orion.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/bash
+
+export GFS_CI_ROOT=/work2/noaa/global/mterry/GFS_CI_ROOT
+export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR
+export SLURM_ACCOUNT=fv3-cpu
+export SALLOC_ACCOUNT=${SLURM_ACCOUNT}
+export SBATCH_ACCOUNT=${SLURM_ACCOUNT}
+export SLURM_QOS=debug
+export SLURM_EXCLUSIVE=user
+export OMP_NUM_THREADS=1
+ulimit -s unlimited
diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh
new file mode 100755
index 00000000000..aa48e9f8946
--- /dev/null
+++ b/ci/scripts/check_ci.sh
@@ -0,0 +1,115 @@
+#!/bin/bash
+set -eux
+#####################################################################################
+#
+# Script description: BASH script for checking for cases in a given PR and
+# running rocotostat on each to determine if the experiment has
+# succeeded or faild. This script is intended
+# to run from within a cron job in the CI Managers account
+# Abstract TODO
+#####################################################################################
+
+HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
+scriptname=$(basename "${BASH_SOURCE[0]}")
+echo "Begin ${scriptname} at $(date -u)" || true
+export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
+
+GH=${HOME}/bin/gh
+REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"}
+
+#########################################################################
+# Set up runtime environment varibles for accounts on supproted machines
+#########################################################################
+
+source "${HOMEgfs}/ush/detect_machine.sh"
+case ${MACHINE_ID} in
+ hera | orion)
+ echo "Running Automated Testing on ${MACHINE_ID}"
+ source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh"
+ ;;
+ *)
+ echo "Unsupported platform. Exiting with error."
+ exit 1
+ ;;
+esac
+set +x
+source "${HOMEgfs}/ush/module-setup.sh"
+module use "${HOMEgfs}/modulefiles"
+module load "module_gwsetup.${MACHINE_ID}"
+module list
+set -x
+rocotostat=$(which rocotostat)
+if [[ -z ${rocotostat+x} ]]; then
+ echo "rocotostat not found on system"
+ exit 1
+else
+ echo "rocotostat being used from ${rocotostat}"
+fi
+
+pr_list_file="open_pr_list"
+
+if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then
+ pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}")
+else
+ echo "no PRs to process .. exit"
+ exit 0
+fi
+
+#############################################################
+# Loop throu all PRs in PR List and look for expirments in
+# the RUNTESTS dir and for each one run runcotorun on them
+#############################################################
+
+for pr in ${pr_list}; do
+ id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id')
+ echo "Processing Pull Request #${pr} and looking for cases"
+ pr_dir="${GFS_CI_ROOT}/PR/${pr}"
+
+ # If there is no RUNTESTS dir for this PR then cases have not been made yet
+ if [[ ! -d "${pr_dir}/RUNTESTS" ]]; then
+ continue
+ fi
+ num_cases=$(find "${pr_dir}/RUNTESTS" -mindepth 1 -maxdepth 1 -type d | wc -l) || true
+
+ #Check for PR success when ${pr_dir}/RUNTESTS is void of subfolders
+ # since all successfull ones where previously removed
+ if [[ "${num_cases}" -eq 0 ]] && [[ -d "${pr_dir}/RUNTESTS" ]]; then
+ "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed"
+ "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}"
+ sed -i "/${pr}/d" "${GFS_CI_ROOT}/${pr_list_file}"
+ # Completely remove the PR and its cloned repo on sucess of all cases
+ rm -Rf "${pr_dir}"
+ continue
+ fi
+
+ for cases in "${pr_dir}/RUNTESTS/"*; do
+ pslot=$(basename "${cases}")
+ xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml"
+ db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db"
+ rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true
+ num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true
+ num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true
+ num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true
+ echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true
+ num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true
+ if [[ ${num_failed} -ne 0 ]]; then
+ {
+ echo "Experiment ${pslot} Terminated: *FAILED*"
+ echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true
+ } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}"
+ "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed"
+ "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}"
+ sed -i "/${pr}/d" "${GFS_CI_ROOT}/${pr_list_file}"
+ fi
+ if [[ "${num_done}" -eq "${num_cycles}" ]]; then
+ {
+ echo "Experiment ${pslot} completed: *SUCCESS*"
+ echo "Experiment ${pslot} Completed at $(date)" || true
+ echo -n "with ${num_succeeded} successfully completed jobs" || true
+ } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}"
+ "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}"
+ #Remove Experment cases that completed successfully
+ rm -Rf "${pr_dir}/RUNTESTS/${pslot}"
+ fi
+ done
+done
diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh
new file mode 100755
index 00000000000..022cc443784
--- /dev/null
+++ b/ci/scripts/clone-build_ci.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+set -eux
+
+#####################################################################
+# Usage and arguments for specfifying cloned directgory
+#####################################################################
+usage() {
+ set +x
+ echo
+ echo "Usage: $0 -p -d -o