Skip to content

Commit

Permalink
Updated modulefiles, created drivers ufs-community#653
Browse files Browse the repository at this point in the history
  • Loading branch information
DavidHuber-NOAA committed May 23, 2022
1 parent b6efa86 commit 5b82295
Show file tree
Hide file tree
Showing 8 changed files with 607 additions and 32 deletions.
27 changes: 0 additions & 27 deletions modulefiles/build.s4.intel

This file was deleted.

59 changes: 59 additions & 0 deletions modulefiles/build.s4.intel.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
help([[
Load environment to compile UFS_UTILS on Hera using Intel
]])

load(pathJoin("license_intel","S4"))
prepend_path("MODULEPATH", "/data/prod/hpc-stack/modulefiles/stack")

hpc_ver=os.getenv("hpc_ver") or "1.2.0"
load(pathJoin("hpc", hpc_ver))

hpc_intel_ver=os.getenv("hpc_intel_ver") or "2022.1"
load(pathJoin("hpc-intel", hpc_intel_ver))

impi_ver=os.getenv("impi_ver") or "2022.1"
load(pathJoin("hpc-impi", impi_ver))

bacio_ver=os.getenv("bacio_ver") or "2.4.1"
load(pathJoin("bacio", bacio_ver))

g2_ver=os.getenv("g2_ver") or "3.4.5"
load(pathJoin("g2", g2_ver))

ip_ver=os.getenv("ip_ver") or "3.3.3"
load(pathJoin("ip", ip_ver))

nemsio_ver=os.getenv("nemsio_ver") or "2.5.4"
load(pathJoin("nemsio", nemsio_ver))

sp_ver=os.getenv("sp_ver") or "2.3.3"
load(pathJoin("sp", sp_ver))

w3nco_ver=os.getenv("w3nco_ver") or "2.4.1"
load(pathJoin("w3nco", w3nco_ver))

sfcio_ver=os.getenv("sfcio_ver") or "1.4.1"
load(pathJoin("sfcio", sfcio_ver))

sigio_ver=os.getenv("sigio_ver") or "2.3.2"
load(pathJoin("sigio", sigio_ver))

zlib_ver=os.getenv("zlib_ver") or "1.2.11"
load(pathJoin("zlib", zlib_ver))

png_ver=os.getenv("png_ver") or "1.6.35"
load(pathJoin("libpng", png_ver))

hdf5_ver=os.getenv("hdf5_ver") or "1.10.6"
load(pathJoin("hdf5", hdf5_ver))

netcdf_ver=os.getenv("netcdf_ver") or "4.7.4"
load(pathJoin("netcdf", netcdf_ver))

nccmp_ver=os.getenv("nccmp_ver") or "1.8.9.0"
load(pathJoin("nccmp", nccmp_ver))

esmf_ver=os.getenv("esmf_ver") or "8.2.1b04"
load(pathJoin("esmf", esmf_ver))

whatis("Description: UFS_UTILS build environment")
223 changes: 223 additions & 0 deletions reg_tests/chgres_cube/driver.s4.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,223 @@
#!/bin/bash

#-----------------------------------------------------------------------------
#
# Run the chgres_cube consistency tests on S4.
#
# Set WORK_DIR to a general working location outside the UFS_UTILS directory.
# The exact working directory (OUTDIR) will be WORK_DIR/reg_tests/chgres-cube.
# Set the PROJECT_CODE and QUEUE as appropriate. To see which projects you
# are authorized to use, type "account_params".
#
# Invoke the script with no arguments. A series of daily-chained
# consistency tests will be submitted. To check the queue, type:
# "squeue -u USERNAME".
#
# The run output will be stored in OUTDIR. Log output from the suite
# will be in LOG_FILE. Once the suite has completed, a summary is
# placed in SUM_FILE.
#
# A test fails when its output does not match the baseline files as
# determined by the "nccmp" utility. The baseline files are stored in
# HOMEreg.
#
#-----------------------------------------------------------------------------

set -x

compiler=${compiler:-"intel"}

source ../../sorc/machine-setup.sh > /dev/null 2>&1
module use ../../modulefiles
module load build.$target.$compiler
module list

export OUTDIR="${WORK_DIR:-/scratch/short/users/$LOGNAME}"
export OUTDIR="${OUTDIR}/reg-tests/chgres-cube"

PROJECT_CODE="${PROJECT_CODE:-fv3-cpu}"
QUEUE="${QUEUE:-batch}"

#-----------------------------------------------------------------------------
# Should not have to change anything below here. HOMEufs is the root
# directory of your UFS_UTILS clone. HOMEreg contains the input data
# and baseline data for each test.
#-----------------------------------------------------------------------------

export UPDATE_BASELINE="FALSE"
#export UPDATE_BASELINE="TRUE"

if [ "$UPDATE_BASELINE" = "TRUE" ]; then
source ../get_hash.sh
fi

export HOMEufs=$PWD/../..

export HOMEreg=/data/users/dhuber/save/nems/role.ufsutils/ufs_utils/reg_tests/chgres_cube

LOG_FILE=consistency.log
SUM_FILE=summary.log
rm -f $LOG_FILE* $SUM_FILE

export OMP_STACKSIZE=1024M

export APRUN=srun
export NCCMP=${NCCMP:-nccmp}
rm -fr $OUTDIR

#-----------------------------------------------------------------------------
# Initialize C96 using FV3 warm restart files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log01
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST1=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.fv3.restart \
-o $LOG_FILE -e $LOG_FILE ./c96.fv3.restart.sh)

#-----------------------------------------------------------------------------
# Initialize C192 using FV3 tiled history files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log02
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=2 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c192.fv3.history \
-o $LOG_FILE -e $LOG_FILE ./c192.fv3.history.sh)

#-----------------------------------------------------------------------------
# Initialize C96 using FV3 gaussian nemsio files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log03
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST3=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.fv3.nemsio \
-o $LOG_FILE -e $LOG_FILE ./c96.fv3.nemsio.sh)

#-----------------------------------------------------------------------------
# Initialize C96 using spectral GFS sigio/sfcio files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log04
export OMP_NUM_THREADS=6 # should match cpus-per-task
TEST4=$(sbatch --parsable --ntasks-per-node=3 --cpus-per-task=6 --nodes=2 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.gfs.sigio \
-o $LOG_FILE -e $LOG_FILE ./c96.gfs.sigio.sh)

#-----------------------------------------------------------------------------
# Initialize C96 using spectral GFS gaussian nemsio files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log05
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST5=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.gfs.nemsio \
-o $LOG_FILE -e $LOG_FILE ./c96.gfs.nemsio.sh)

#-----------------------------------------------------------------------------
# Initialize regional C96 using FV3 gaussian nemsio files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log06
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST6=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.regional \
-o $LOG_FILE -e $LOG_FILE ./c96.regional.sh)

#-----------------------------------------------------------------------------
# Initialize C96 using FV3 gaussian netcdf files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log07
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST7=$(sbatch --parsable --ntasks-per-node=12 --nodes=1 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.fv3.netcdf \
-o $LOG_FILE -e $LOG_FILE ./c96.fv3.netcdf.sh)

#-----------------------------------------------------------------------------
# Initialize global C192 using GFS GRIB2 files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log08
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST8=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c192.gfs.grib2 \
-o $LOG_FILE -e $LOG_FILE ./c192.gfs.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 25-KM USING GFS GRIB2 files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log09
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST9=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J 25km.conus.gfs.grib2.conus \
-o $LOG_FILE -e $LOG_FILE ./25km.conus.gfs.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 3-KM USING HRRR GRIB2 file WITH GFS PHYSICS.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log10
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST10=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J 3km.conus.hrrr.gfssdf.grib2.conus \
-o $LOG_FILE -e $LOG_FILE ./3km.conus.hrrr.gfssdf.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 3-KM USING HRRR GRIB2 file WITH GSD PHYSICS AND SFC VARS FROM FILE.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log11
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST11=$(sbatch --parsable --ntasks-per-node=6 --nodes=2 -t 0:10:00 -A $PROJECT_CODE -q $QUEUE -J 3km.conus.hrrr.newsfc.grib2.conus \
-o $LOG_FILE -e $LOG_FILE ./3km.conus.hrrr.newsfc.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 13-KM USING NAM GRIB2 file WITH GFS PHYSICS .
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log12
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST12=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J 13km.conus.nam.grib2.conus \
-o $LOG_FILE -e $LOG_FILE ./13km.conus.nam.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 13-KM USING RAP GRIB2 file WITH GSD PHYSICS .
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log13
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST13=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J 13km.conus.rap.grib2.conus \
-o $LOG_FILE -e $LOG_FILE ./13km.conus.rap.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 13-KM NA USING NCEI GFS GRIB2 file WITH GFS PHYSICS .
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log14
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST14=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J 13km.na.gfs.ncei.grib2.conus \
-o $LOG_FILE -e $LOG_FILE ./13km.na.gfs.ncei.grib2.sh)

#-----------------------------------------------------------------------------
# Initialize C96 WAM IC using FV3 gaussian netcdf files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log15
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST15=$(sbatch --parsable --ntasks-per-node=12 --nodes=1 -t 0:15:00 -A $PROJECT_CODE -q $QUEUE -J c96.fv3.netcdf2wam \
-o $LOG_FILE -e $LOG_FILE ./c96.fv3.netcdf2wam.sh)

#-----------------------------------------------------------------------------
# Initialize CONUS 25-KM USING GFS PGRIB2+BGRIB2 files.
#-----------------------------------------------------------------------------

LOG_FILE=consistency.log16
export OMP_NUM_THREADS=1 # should match cpus-per-task
TEST16=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J 25km.conus.gfs.pbgrib2.conus \
-o $LOG_FILE -e $LOG_FILE ./25km.conus.gfs.pbgrib2.sh)

#-----------------------------------------------------------------------------
# Create summary log.
#-----------------------------------------------------------------------------
LOG_FILE=consistency.log
sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J chgres_summary -o $LOG_FILE -e $LOG_FILE \
--open-mode=append -q $QUEUE -d\
afterok:$TEST1:$TEST2:$TEST3:$TEST4:$TEST5:$TEST6:$TEST7:$TEST8:$TEST9:$TEST10:$TEST11:$TEST12:$TEST13:$TEST14:$TEST15:$TEST16 << EOF
#!/bin/bash
grep -a '<<<' $LOG_FILE* > $SUM_FILE
EOF

exit 0
84 changes: 84 additions & 0 deletions reg_tests/global_cycle/driver.s4.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
#!/bin/bash

#-----------------------------------------------------------------------------
#
# Run global_cycle consistency test on S4.
#
# Set $WORK_DIR to your working directory. Set the project code
# and queue as appropriate.
#
# Invoke the script from the command line as follows: ./$script
#
# Log output is placed in consistency.log??. A summary is
# placed in summary.log
#
# A test fails when its output does not match the baseline files
# as determined by the 'nccmp' utility. This baseline files are
# stored in HOMEreg.
#
#-----------------------------------------------------------------------------

set -x

compiler=${compiler:-"intel"}

source ../../sorc/machine-setup.sh > /dev/null 2>&1
module use ../../modulefiles
module load build.$target.$compiler
module list

WORK_DIR="${WORK_DIR:-/scratch/short/users/$LOGNAME}"

PROJECT_CODE="${PROJECT_CODE:-fv3-cpu}"
QUEUE="${QUEUE:-batch}"

#-----------------------------------------------------------------------------
# Should not have to change anything below.
#-----------------------------------------------------------------------------

export UPDATE_BASELINE="FALSE"
#export UPDATE_BASELINE="TRUE"

if [ "$UPDATE_BASELINE" = "TRUE" ]; then
source ../get_hash.sh
fi

DATA_DIR="${WORK_DIR}/reg-tests/global-cycle"

export HOMEreg=/data/users/dhuber/save/nems/role.ufsutils/ufs_utils/reg_tests/global_cycle

export OMP_NUM_THREADS_CY=2

export APRUNCY="srun"

export NWPROD=$PWD/../..

reg_dir=$PWD

LOG_FILE=consistency.log01
export DATA="${DATA_DIR}/test1"
export COMOUT=$DATA
TEST1=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c768.fv3gfs \
-o $LOG_FILE -e $LOG_FILE ./C768.fv3gfs.sh)

LOG_FILE=consistency.log02
export DATA="${DATA_DIR}/test2"
export COMOUT=$DATA
TEST2=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c768.lndincsoil \
-o $LOG_FILE -e $LOG_FILE ./C768.lndincsoil.sh)

LOG_FILE=consistency.log03
export DATA="${DATA_DIR}/test3"
export COMOUT=$DATA
TEST3=$(sbatch --parsable --ntasks-per-node=6 --nodes=1 -t 0:05:00 -A $PROJECT_CODE -q $QUEUE -J c768.lndincsnow \
-o $LOG_FILE -e $LOG_FILE ./C768.lndincsnow.sh)

LOG_FILE=consistency.log
sbatch --nodes=1 -t 0:01:00 -A $PROJECT_CODE -J chgres_summary -o $LOG_FILE -e $LOG_FILE \
--open-mode=append -q $QUEUE -d\
afterok:$TEST1:$TEST2:$TEST3 << EOF
#!/bin/bash
grep -a '<<<' ${LOG_FILE}* > summary.log
EOF

exit
Loading

0 comments on commit 5b82295

Please sign in to comment.