Skip to content

Commit

Permalink
Removed mpi calls during initialization. Reading data ona ll processo…
Browse files Browse the repository at this point in the history
…rs. Started from 3dfb4c9
  • Loading branch information
dustinswales committed Feb 10, 2020
1 parent 3dfb4c9 commit b67bc2d
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 258 deletions.
71 changes: 3 additions & 68 deletions physics/rrtmgp_lw_cloud_optics.F90
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@ module rrtmgp_lw_cloud_optics
use mo_rrtmg_lw_cloud_optics, only: rrtmg_lw_cloud_optics
use rrtmgp_aux, only: check_error_msg
use netcdf
#ifdef MPI
use mpi
#endif

public rrtmgp_lw_cloud_optics_init, rrtmgp_lw_cloud_optics_run, rrtmgp_lw_cloud_optics_finalize
contains
Expand Down Expand Up @@ -92,9 +89,6 @@ subroutine rrtmgp_lw_cloud_optics_init(cld_optics_scheme, nrghice, rrtmgp_root_d
integer :: dimID,varID,status,ncid
character(len=264) :: lw_cloud_props_file
integer,parameter :: max_strlen=256
#ifdef MPI
integer :: mpierr
#endif

! Initialize
errmsg = ''
Expand All @@ -106,7 +100,7 @@ subroutine rrtmgp_lw_cloud_optics_init(cld_optics_scheme, nrghice, rrtmgp_root_d
lw_cloud_props_file = trim(rrtmgp_root_dir)//trim(rrtmgp_lw_file_clouds)

! On master processor only...
if (mpirank .eq. mpiroot) then
! if (mpirank .eq. mpiroot) then
! Open file
status = nf90_open(trim(lw_cloud_props_file), NF90_WRITE, ncid)

Expand Down Expand Up @@ -241,67 +235,8 @@ subroutine rrtmgp_lw_cloud_optics_init(cld_optics_scheme, nrghice, rrtmgp_root_d

! Close file
status = nf90_close(ncid)
endif

#ifdef MPI
if (cld_optics_scheme .eq. 1) then
! Wait for processor 0 to catch up...
call MPI_BARRIER(mpicomm, mpierr)

! Broadcast data
write (*,*) 'Broadcasting RRTMGP longwave cloud-optics data ... '
call MPI_BCAST(nBand, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nrghice, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nSize_liq, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nSize_ice, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radliq_lwr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radliq_upr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radliq_fac, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radice_lwr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radice_upr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radice_fac, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_extliq, size(lut_extliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_ssaliq, size(lut_ssaliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_asyliq, size(lut_asyliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_extice, size(lut_extice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_ssaice, size(lut_ssaice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_asyice, size(lut_asyice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(band_lims, size(band_lims), MPI_REAL, mpiroot, mpicomm, mpierr)

! Don't advance until data broadcast complete on all processors
call MPI_BARRIER(mpicomm, mpierr)
endif
if (cld_optics_scheme .eq. 2) then
! Wait for processor 0 to catch up...
call MPI_BARRIER(mpicomm, mpierr)

! Broadcast data
write (*,*) 'Broadcasting RRTMGP longwave cloud-optics data ... '
call MPI_BCAST(nBand, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nrghice, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nSizeReg, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nCoeff_ext, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nCoeff_ssa_g, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nBound, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_extliq, size(pade_extliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_ssaliq, size(pade_ssaliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_asyliq, size(pade_asyliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_extice, size(pade_extice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_ssaice, size(pade_ssaice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_asyice, size(pade_asyice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_extliq, size(pade_sizereg_extliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_ssaliq, size(pade_sizereg_ssaliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_asyliq, size(pade_sizereg_asyliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_extice, size(pade_sizereg_extice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_ssaice, size(pade_sizereg_ssaice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_asyice, size(pade_sizereg_asyice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(band_lims, size(band_lims), MPI_REAL, mpiroot, mpicomm, mpierr)

! Don't advance until data broadcast complete on all processors
call MPI_BARRIER(mpicomm, mpierr)
endif
#endif

! endif

! Load tables data for RRTMGP cloud-optics
if (cld_optics_scheme .eq. 1) then
call check_error_msg('lw_cloud_optics_init',lw_cloud_props%set_ice_roughness(nrghice))
Expand Down
64 changes: 2 additions & 62 deletions physics/rrtmgp_lw_gas_optics.F90
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@ module rrtmgp_lw_gas_optics
use mo_compute_bc, only: compute_bc
use rrtmgp_aux, only: check_error_msg
use netcdf
#ifdef MPI
use mpi
#endif

contains

Expand Down Expand Up @@ -105,9 +102,6 @@ subroutine rrtmgp_lw_gas_optics_init(rrtmgp_root_dir, rrtmgp_lw_file_gas, rrtmgp
integer,dimension(:),allocatable :: temp1, temp2, temp3, temp4, &
temp_log_array1, temp_log_array2, temp_log_array3, temp_log_array4
character(len=264) :: lw_gas_props_file
#ifdef MPI
integer :: mpierr
#endif

! Initialize
errmsg = ''
Expand All @@ -119,7 +113,7 @@ subroutine rrtmgp_lw_gas_optics_init(rrtmgp_root_dir, rrtmgp_lw_file_gas, rrtmgp
lw_gas_props_file = trim(rrtmgp_root_dir)//trim(rrtmgp_lw_file_gas)

! On master processor only...
if (mpirank .eq. mpiroot) then
! if (mpirank .eq. mpiroot) then
! Open file
status = nf90_open(trim(lw_gas_props_file), NF90_WRITE, ncid)

Expand Down Expand Up @@ -260,61 +254,7 @@ subroutine rrtmgp_lw_gas_optics_init(rrtmgp_root_dir, rrtmgp_lw_file_gas, rrtmgp

! Close file
status = nf90_close(ncid)
endif

#ifdef MPI
! Wait for processor 0 to catch up...
call MPI_BARRIER(mpicomm, mpierr)

! Broadcast data
write (*,*) 'Broadcasting RRTMGP longwave k-distribution data ... '
call MPI_BCAST(ntemps, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(npress, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nabsorbers, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nminorabsorbers, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nextrabsorbers, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nmixingfracs, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nlayers, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nbnds, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(ngpts, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(npairs, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(ncontributors_lower, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(ncontributors_upper, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nminor_absorber_intervals_lower, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nminor_absorber_intervals_upper, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(ninternalSourcetemps, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(minor_limits_gpt_upper, size(minor_limits_gpt_upper), MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(minor_limits_gpt_lower, size(minor_limits_gpt_lower), MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(kminor_start_upper, size(kminor_start_upper), MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(kminor_start_lower, size(kminor_start_lower), MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(key_species, size(key_species), MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(band2gpt, size(band2gpt), MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(band_lims, size(band_lims), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(press_ref, size(press_ref), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(temp_ref, size(temp_ref), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(kminor_lower, size(kminor_lower), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(kminor_upper, size(kminor_upper), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(scaling_gas_lower, size(scaling_gas_lower), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(scaling_gas_upper, size(scaling_gas_upper), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(vmr_ref, size(vmr_ref), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(kmajor, size(kmajor), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(temp_ref_p, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(temp_ref_t, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(press_ref_trop, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(totplnk, size(totplnk), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(planck_frac, size(planck_frac), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(gas_names, size(gas_names), MPI_CHARACTER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(gas_minor, size(gas_minor), MPI_CHARACTER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(identifier_minor, size(identifier_minor), MPI_CHARACTER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(minor_gases_lower, size(minor_gases_lower), MPI_CHARACTER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(minor_gases_upper, size(minor_gases_upper), MPI_CHARACTER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(minor_scales_with_density_lower, nminor_absorber_intervals_lower, MPI_LOGICAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(scale_by_complement_lower, nminor_absorber_intervals_lower, MPI_LOGICAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(minor_scales_with_density_upper, nminor_absorber_intervals_upper, MPI_LOGICAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(scale_by_complement_upper, nminor_absorber_intervals_upper, MPI_LOGICAL, mpiroot, mpicomm, mpierr)
! Don't advance until data broadcast complete on all processors
call MPI_BARRIER(mpicomm, mpierr)
#endif
! endif

! Initialize gas concentrations and gas optics class
do iGas=1,rrtmgp_nGases
Expand Down
70 changes: 3 additions & 67 deletions physics/rrtmgp_sw_cloud_optics.F90
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@ module rrtmgp_sw_cloud_optics
use mo_rrtmg_sw_cloud_optics, only: rrtmg_sw_cloud_optics
use rrtmgp_aux, only: check_error_msg
use netcdf
#ifdef MPI
use mpi
#endif

public rrtmgp_sw_cloud_optics_init, rrtmgp_sw_cloud_optics_run, rrtmgp_sw_cloud_optics_finalize
contains
Expand Down Expand Up @@ -91,9 +88,7 @@ subroutine rrtmgp_sw_cloud_optics_init(cld_optics_scheme, nrghice, rrtmgp_root_d
! Local variables
integer :: status,ncid,dimid,varID
character(len=264) :: sw_cloud_props_file
#ifdef MPI
integer :: mpierr
#endif

! Initialize
errmsg = ''
errflg = 0
Expand All @@ -104,7 +99,7 @@ subroutine rrtmgp_sw_cloud_optics_init(cld_optics_scheme, nrghice, rrtmgp_root_d
sw_cloud_props_file = trim(rrtmgp_root_dir)//trim(rrtmgp_sw_file_clouds)

! On master processor only...
if (mpirank .eq. mpiroot) then
! if (mpirank .eq. mpiroot) then
! Open file
status = nf90_open(trim(sw_cloud_props_file), NF90_WRITE, ncid)

Expand Down Expand Up @@ -238,66 +233,7 @@ subroutine rrtmgp_sw_cloud_optics_init(cld_optics_scheme, nrghice, rrtmgp_root_d

! Close file
status = nf90_close(ncid)
endif

#ifdef MPI
if (cld_optics_scheme .eq. 1) then
! Wait for processor 0 to catch up...
call MPI_BARRIER(mpicomm, mpierr)

! Broadcast data
write (*,*) 'Broadcasting RRTMGP shortwave cloud-optics data ... '
call MPI_BCAST(nBand, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nrghice, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nSize_liq, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nSize_ice, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radliq_lwr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radliq_upr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radliq_fac, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radice_lwr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radice_upr, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(radice_fac, 1, MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_extliq, size(lut_extliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_ssaliq, size(lut_ssaliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_asyliq, size(lut_asyliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_extice, size(lut_extice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_ssaice, size(lut_ssaice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(lut_asyice, size(lut_asyice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(band_lims, size(band_lims), MPI_REAL, mpiroot, mpicomm, mpierr)

! Don't advance until data broadcast complete on all processors
call MPI_BARRIER(mpicomm, mpierr)
endif
if (cld_optics_scheme .eq. 2) then
! Wait for processor 0 to catch up...
call MPI_BARRIER(mpicomm, mpierr)

! Broadcast data
write (*,*) 'Broadcasting RRTMGP shortwave cloud-optics data ... '
call MPI_BCAST(nBand, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nrghice, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nSizeReg, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nCoeff_ext, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nCoeff_ssa_g, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(nBound, 1, MPI_INTEGER, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_extliq, size(pade_extliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_ssaliq, size(pade_ssaliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_asyliq, size(pade_asyliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_extice, size(pade_extice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_ssaice, size(pade_ssaice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_asyice, size(pade_asyice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_extliq, size(pade_sizereg_extliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_ssaliq, size(pade_sizereg_ssaliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_asyliq, size(pade_sizereg_asyliq), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_extice, size(pade_sizereg_extice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_ssaice, size(pade_sizereg_ssaice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(pade_sizereg_asyice, size(pade_sizereg_asyice), MPI_REAL, mpiroot, mpicomm, mpierr)
call MPI_BCAST(band_lims, size(band_lims), MPI_REAL, mpiroot, mpicomm, mpierr)

! Don't advance until data broadcast complete on all processors
call MPI_BARRIER(mpicomm, mpierr)
endif
#endif
! endif

! Load tables data for RRTMGP cloud-optics
if (cld_optics_scheme .eq. 1) then
Expand Down
Loading

0 comments on commit b67bc2d

Please sign in to comment.