Skip to content

Commit

Permalink
Add 2d decomposition on the write grid comp (#470)
Browse files Browse the repository at this point in the history
* update write grid comp to have 2D decomposition on output grid.
* combine PR#468: Bug fix in FV3GFS_io.F90 for allocation of temp2d

Co-authored-by: Ted Mansell <ted.mansell@noaa.gov>
  • Loading branch information
junwang-noaa and MicroTed authored Feb 2, 2022
1 parent 23b7dd3 commit 6901981
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 8 deletions.
2 changes: 1 addition & 1 deletion io/FV3GFS_io.F90
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ subroutine FV3GFS_GFS_checksum (Model, GFS_Data, Atm_block)
nsfcprop2d = nsfcprop2d + 16
endif

allocate (temp2d(isc:iec,jsc:jec,nsfcprop2d+Model%ntot3d+Model%nctp))
allocate (temp2d(isc:iec,jsc:jec,nsfcprop2d+Model%ntot2d+Model%nctp))
allocate (temp3d(isc:iec,jsc:jec,1:lev,14+Model%ntot3d+2*ntr))
allocate (temp3dlevsp1(isc:iec,jsc:jec,1:lev+1,3))

Expand Down
26 changes: 19 additions & 7 deletions io/module_wrt_grid_comp.F90
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ module module_wrt_grid_comp
integer,save :: lead_write_task !<-- Rank of the first write task in the write group
integer,save :: last_write_task !<-- Rank of the last write task in the write group
integer,save :: ntasks !<-- # of write tasks in the current group
integer,save :: itasks, jtasks !<-- # of write tasks in i/j direction in the current group

integer,save :: mytile !<-- the tile number in write task
integer,save :: wrt_mpi_comm !<-- the mpi communicator in the write comp
Expand Down Expand Up @@ -294,11 +295,19 @@ subroutine wrt_initialize(wrt_comp, imp_state_write, exp_state_write, clock, rc)
print *,'output_grid=',trim(output_grid)
end if

call ESMF_ConfigGetAttribute(config=CF, value=itasks,default=1,label ='itasks:',rc=rc)
jtasks = ntasks
if(itasks > 0 ) jtasks = ntasks/itasks
if( itasks*jtasks /= ntasks ) then
itasks = 1
jtasks = ntasks
endif

if(trim(output_grid) == 'gaussian_grid' .or. trim(output_grid) == 'global_latlon') then
call ESMF_ConfigGetAttribute(config=CF, value=imo, label ='imo:',rc=rc)
call ESMF_ConfigGetAttribute(config=CF, value=jmo, label ='jmo:',rc=rc)
if (lprnt) then
print *,'imo=',imo,'jmo=',jmo
print *,'imo=',imo,'jmo=',jmo,'itasks=',itasks,'jtasks=',jtasks
end if
else if(trim(output_grid) == 'regional_latlon') then
call ESMF_ConfigGetAttribute(config=CF, value=lon1, label ='lon1:',rc=rc)
Expand Down Expand Up @@ -422,7 +431,7 @@ subroutine wrt_initialize(wrt_comp, imp_state_write, exp_state_write, clock, rc)
else if ( trim(output_grid) == 'gaussian_grid') then

wrtgrid = ESMF_GridCreate1PeriDim(minIndex=(/1,1/), &
maxIndex=(/imo,jmo/), regDecomp=(/1,ntasks/), &
maxIndex=(/imo,jmo/), regDecomp=(/itasks,jtasks/), &
indexflag=ESMF_INDEX_GLOBAL, &
name='wrt_grid',rc=rc)
! indexflag=ESMF_INDEX_GLOBAL, coordSys=ESMF_COORDSYS_SPH_DEG
Expand Down Expand Up @@ -462,8 +471,9 @@ subroutine wrt_initialize(wrt_comp, imp_state_write, exp_state_write, clock, rc)
latPtr(i,j) = lat(j)
enddo
enddo
! print *,'aft wrtgrd, Gaussian, dimi,i=',lbound(lonPtr,1),ubound(lonPtr,1), &
! ' j=',lbound(lonPtr,2),ubound(lonPtr,2),'imo=',imo,'jmo=',jmo
if(lprnt) print *,'aft wrtgrd, Gaussian, dimi,i=',lbound(lonPtr,1),ubound(lonPtr,1), &
lbound(lonPtr,2),ubound(lonPtr,2),'j(i)=',lbound(latPtr,1),ubound(latPtr,1),&
' j(j)=',lbound(latPtr,2),ubound(latPtr,2),'imo=',imo,'jmo=',jmo
! if(wrt_int_state%mype==0) print *,'aft wrtgrd, lon=',lonPtr(1:5,1), &
! 'lat=',latPtr(1,1:5),'imo,jmo=',imo,jmo
! lonPtr(lbound(lonPtr,1),ubound(lonPtr,2)),'lat=',latPtr(lbound(lonPtr,1),lbound(lonPtr,2)), &
Expand All @@ -479,7 +489,9 @@ subroutine wrt_initialize(wrt_comp, imp_state_write, exp_state_write, clock, rc)
call mpi_allgather(wrt_int_state%lat_end, 1,MPI_INTEGER, &
wrt_int_state%lat_end_wrtgrp, 1, MPI_INTEGER, wrt_mpi_comm, rc)
if( lprnt ) print *,'aft wrtgrd, Gaussian, dimj_start=',wrt_int_state%lat_start_wrtgrp, &
'dimj_end=',wrt_int_state%lat_end_wrtgrp, 'wrt_group=',n_group
'dimj_end=',wrt_int_state%lat_end_wrtgrp, 'wrt_group=',n_group, &
'lon_start,end=',wrt_int_state%lon_start,wrt_int_state%lon_end, &
'lat_start,end=',wrt_int_state%lat_start, wrt_int_state%lat_end
allocate( wrt_int_state%latPtr(wrt_int_state%lon_start:wrt_int_state%lon_end, &
wrt_int_state%lat_start:wrt_int_state%lat_end))
allocate( wrt_int_state%lonPtr(wrt_int_state%lon_start:wrt_int_state%lon_end, &
Expand All @@ -497,7 +509,7 @@ subroutine wrt_initialize(wrt_comp, imp_state_write, exp_state_write, clock, rc)
deallocate(slat)
else if ( trim(output_grid) == 'global_latlon') then
wrtgrid = ESMF_GridCreate1PeriDim(minIndex=(/1,1/), &
maxIndex=(/imo,jmo/), regDecomp=(/1,ntasks/), &
maxIndex=(/imo,jmo/), regDecomp=(/itasks,jtasks/), &
indexflag=ESMF_INDEX_GLOBAL, name='wrt_grid',rc=rc)

if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) return
Expand Down Expand Up @@ -581,7 +593,7 @@ subroutine wrt_initialize(wrt_comp, imp_state_write, exp_state_write, clock, rc)
trim(output_grid) == 'lambert_conformal' ) then

wrtgrid = ESMF_GridCreate1PeriDim(minIndex=(/1,1/), &
maxIndex=(/imo,jmo/), regDecomp=(/1,ntasks/), &
maxIndex=(/imo,jmo/), regDecomp=(/itasks,jtasks/), &
indexflag=ESMF_INDEX_GLOBAL, &
name='wrt_grid',rc=rc)

Expand Down

0 comments on commit 6901981

Please sign in to comment.