FV3 Bundle
fv_restart_nlm.F90
Go to the documentation of this file.
1 !***********************************************************************
2 !* GNU General Public License *
3 !* This file is a part of fvGFS. *
4 !* *
5 !* fvGFS is free software; you can redistribute it and/or modify it *
6 !* and are expected to follow the terms of the GNU General Public *
7 !* License as published by the Free Software Foundation; either *
8 !* version 2 of the License, or (at your option) any later version. *
9 !* *
10 !* fvGFS is distributed in the hope that it will be useful, but *
11 !* WITHOUT ANY WARRANTY; without even the implied warranty of *
12 !* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
13 !* General Public License for more details. *
14 !* *
15 !* For the full text of the GNU General Public License, *
16 !* write to: Free Software Foundation, Inc., *
17 !* 675 Mass Ave, Cambridge, MA 02139, USA. *
18 !* or see: http://www.gnu.org/licenses/gpl.html *
19 !***********************************************************************
21 
22  !<OVERVIEW>
23  ! Restart facilities for FV core
24  !</OVERVIEW>
25  !<DESCRIPTION>
26  ! This module writes and reads restart files for the FV core. Additionally
27  ! it provides setup and calls routines necessary to provide a complete restart
28  ! for the model.
29  !</DESCRIPTION>
30 
39  use init_hydro_nlm_mod, only: p_var
40  use mpp_domains_mod, only: mpp_update_domains, domain2d, dgrid_ne
41  use mpp_mod, only: mpp_chksum, stdout, mpp_error, fatal, note, get_unit, mpp_sum
43  use fv_mp_nlm_mod, only: is_master, switch_current_atm, mp_reduce_min, mp_reduce_max
44  use fv_surf_map_nlm_mod, only: sgh_g, oro_g
55  use mpp_mod, only: mpp_send, mpp_recv, mpp_sync_self, mpp_set_current_pelist, mpp_get_current_pelist, mpp_npes, mpp_pe, mpp_sync
56  use mpp_domains_mod, only: center, corner, north, east, mpp_get_c2f_index, west, south
58  use fms_mod, only: file_exist
59 
60  implicit none
61  private
62 
64  public :: d2c_setup, d2a_setup
65 
66  real(kind=R_GRID), parameter :: cnst_0p20=0.20d0
67  !--- private data type
68  logical :: module_is_initialized = .false.
69 
70 contains
71 
72  !#####################################################################
73  ! <SUBROUTINE NAME="fv_restart_init">
74  !
75  ! <DESCRIPTION>
76  ! Initialize the fv core restart facilities
77  ! </DESCRIPTION>
78  !
79  subroutine fv_restart_init()
80  call fv_io_init()
81  module_is_initialized = .true.
82  end subroutine fv_restart_init
83  ! </SUBROUTINE> NAME="fv_restart_init"
84 
85 
86  !#####################################################################
87  ! <SUBROUTINE NAME="fv_restart">
88  !
89  ! <DESCRIPTION>
90  ! The fv core restart facility
91  ! </DESCRIPTION>
92  !
93  subroutine fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, grids_on_this_pe)
94  type(domain2d), intent(inout) :: fv_domain
95  type(fv_atmos_type), intent(inout) :: atm(:)
96  real, intent(in) :: dt_atmos
97  integer, intent(out) :: seconds
98  integer, intent(out) :: days
99  logical, intent(inout) :: cold_start
100  integer, intent(in) :: grid_type
101  logical, intent(INOUT) :: grids_on_this_pe(:)
102 
103 
104  integer :: i, j, k, n, ntileme, nt, iq
105  integer :: isc, iec, jsc, jec, npz, npz_rst, ncnst, ntprog, ntdiag
106  integer :: isd, ied, jsd, jed
107  integer isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p, isg, ieg, jsg,jeg, npx_p, npy_p
108  real, allocatable :: g_dat(:,:,:)
109 
110  integer :: unit
111  real, allocatable :: dz1(:)
112  real rgrav, f00, ztop, pertn
113  logical :: hybrid
114  logical :: cold_start_grids(size(atm))
115  character(len=128):: tname, errstring, fname, tracer_name
116  character(len=120):: fname_ne, fname_sw
117  character(len=3) :: gn
118 
119  integer :: npts
120  real :: sumpertn
121 
122  rgrav = 1. / grav
123 
124  if(.not.module_is_initialized) call mpp_error(fatal, 'You must call fv_restart_init.')
125 
126  ntileme = size(atm(:))
127 
128  cold_start_grids(:) = cold_start
129  do n = 1, ntileme
130 
131  if (is_master()) then
132  print*, 'FV_RESTART: ', n, cold_start_grids(n)
133  endif
134 
135  if (atm(n)%neststruct%nested) then
136  write(fname,'(A, I2.2, A)') 'INPUT/fv_core.res.nest', atm(n)%grid_number, '.nc'
137  write(fname_ne,'(A, I2.2, A)') 'INPUT/fv_BC_ne.res.nest', atm(n)%grid_number, '.nc'
138  write(fname_sw,'(A, I2.2, A)') 'INPUT/fv_BC_sw.res.nest', atm(n)%grid_number, '.nc'
139  if (atm(n)%flagstruct%external_ic) then
140  if (is_master()) print*, 'External IC set on grid', atm(n)%grid_number, ', re-initializing grid'
141  cold_start_grids(n) = .true.
142  atm(n)%flagstruct%warm_start = .false. !resetting warm_start flag to avoid FATAL error below
143  else
144  if (is_master()) print*, 'Searching for nested grid restart file ', trim(fname)
145  cold_start_grids(n) = .not. file_exist(fname, atm(n)%domain)
146  atm(n)%flagstruct%warm_start = file_exist(fname, atm(n)%domain)!resetting warm_start flag to avoid FATAL error below
147  endif
148  endif
149 
150  if (.not. grids_on_this_pe(n)) then
151 
152  !Even if this grid is not on this PE, if it has child grids we must send
153  !along the data that is needed.
154  !This is a VERY complicated bit of code that attempts to follow the entire decision tree
155  ! of the initialization without doing anything. This could very much be cleaned up.
156 
157  if (atm(n)%neststruct%nested) then
158  if (cold_start_grids(n)) then
159  if (atm(n)%parent_grid%flagstruct%n_zs_filter > 0) call fill_nested_grid_topo_halo(atm(n), .false.)
160  if (atm(n)%flagstruct%nggps_ic) then
161  call fill_nested_grid_topo(atm(n), .false.)
162  call fill_nested_grid_topo_halo(atm(n), .false.)
163  call nested_grid_bc(atm(n)%ps, atm(n)%parent_grid%ps, atm(n)%neststruct%nest_domain, &
164  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
165  atm(n)%npx, atm(n)%npy,atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.false.)
166  call setup_nested_boundary_halo(atm(n),.false.)
167  else
168  call fill_nested_grid_topo(atm(n), .false.)
169  call setup_nested_boundary_halo(atm(n),.false.)
170  if ( atm(n)%flagstruct%external_ic .and. grid_type < 4 ) call fill_nested_grid_data(atm(n:n), .false.)
171  endif
172  else
173  if (is_master()) print*, 'Searching for nested grid BC files ', trim(fname_ne), ' ', trim(fname_sw)
174 
175  !!!! PROBLEM: file_exist doesn't know to look for fv_BC_ne.res.nest02.nc instead of fv_BC_ne.res.nc on coarse grid
176  if (file_exist(fname_ne, atm(n)%domain) .and. file_exist(fname_sw, atm(n)%domain)) then
177  else
178  if ( is_master() ) write(*,*) 'BC files not found, re-generating nested grid boundary conditions'
179  call fill_nested_grid_topo_halo(atm(n), .false.)
180  call setup_nested_boundary_halo(atm(n), .false.)
181  atm(n)%neststruct%first_step = .true.
182  endif
183  end if
184 
185  if (.not. atm(n)%flagstruct%hydrostatic .and. atm(n)%flagstruct%make_nh .and. &
186  (.not. atm(n)%flagstruct%nggps_ic .and. .not. atm(n)%flagstruct%ecmwf_ic) ) then
187  call nested_grid_bc(atm(n)%delz, atm(n)%parent_grid%delz, atm(n)%neststruct%nest_domain, &
188  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
189  atm(n)%npx, atm(n)%npy, npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.false.)
190  call nested_grid_bc(atm(n)%w, atm(n)%parent_grid%w, atm(n)%neststruct%nest_domain, &
191  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
192  atm(n)%npx, atm(n)%npy, npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.false.)
193  endif
194 
195 
196  endif
197 
198  cycle
199 
200  endif
201  !This call still appears to be necessary to get isd, etc. correct
202  call switch_current_atm(atm(n))
203 
204  npz = atm(1)%npz
205  npz_rst = atm(1)%flagstruct%npz_rst
206 
207  !--- call fv_io_register_restart to register restart field to be written out in fv_io_write_restart
208  call fv_io_register_restart(atm(n)%domain,atm(n:n))
209  if (atm(n)%neststruct%nested) call fv_io_register_restart_bcs(atm(n))
210  if( .not.cold_start_grids(n) .and. (.not. atm(n)%flagstruct%external_ic) ) then
211 
212 
213  if ( npz_rst /= 0 .and. npz_rst /= npz ) then
214 ! Remap vertically the prognostic variables for the chosen vertical resolution
215  if( is_master() ) then
216  write(*,*) ' '
217  write(*,*) '***** Important Note from FV core ********************'
218  write(*,*) 'Remapping dynamic IC from', npz_rst, 'levels to ', npz,'levels'
219  write(*,*) '***** End Note from FV core **************************'
220  write(*,*) ' '
221  endif
222  call remap_restart( atm(n)%domain, atm(n:n) )
223  if( is_master() ) write(*,*) 'Done remapping dynamical IC'
224  else
225  if( is_master() ) write(*,*) 'Warm starting, calling fv_io_restart'
226  call fv_io_read_restart(atm(n)%domain,atm(n:n))
227  endif
228  endif
229 
230 !---------------------------------------------------------------------------------------------
231 ! Read, interpolate (latlon to cubed), then remap vertically with terrain adjustment if needed
232 !---------------------------------------------------------------------------------------------
233  if (atm(n)%neststruct%nested) then
234  if (cold_start_grids(n)) call fill_nested_grid_topo(atm(n), .true.)
235  !if (cold_start_grids(n) .and. .not. Atm(n)%flagstruct%nggps_ic) call fill_nested_grid_topo(Atm(n), .true.)
236  if (cold_start_grids(n)) then
237  if (atm(n)%parent_grid%flagstruct%n_zs_filter > 0 .or. atm(n)%flagstruct%nggps_ic) call fill_nested_grid_topo_halo(atm(n), .true.)
238  end if
239  if (atm(n)%flagstruct%external_ic .and. atm(n)%flagstruct%nggps_ic) then
240  !Fill nested grid halo with ps
241  call nested_grid_bc(atm(n)%ps, atm(n)%parent_grid%ps, atm(n)%neststruct%nest_domain, &
242  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
243  atm(n)%npx, atm(n)%npy,atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.true.)
244  endif
245  endif
246  if ( atm(n)%flagstruct%external_ic ) then
247  if( is_master() ) write(*,*) 'Calling get_external_ic'
248  call get_external_ic(atm(n:n), atm(n)%domain, cold_start_grids(n))
249  if( is_master() ) write(*,*) 'IC generated from the specified external source'
250  endif
251 
252  seconds = 0; days = 0 ! Restart needs to be modified to record seconds and days.
253 
254 ! Notes by Jeff D.
255  ! This logic doesn't work very well.
256  ! Shouldn't have read for all tiles then loop over tiles
257 
258  isd = atm(n)%bd%isd
259  ied = atm(n)%bd%ied
260  jsd = atm(n)%bd%jsd
261  jed = atm(n)%bd%jed
262  ncnst = atm(n)%ncnst
263  if( is_master() ) write(*,*) 'in fv_restart ncnst=', ncnst
264  isc = atm(n)%bd%isc; iec = atm(n)%bd%iec; jsc = atm(n)%bd%jsc; jec = atm(n)%bd%jec
265 
266  ! Init model data
267  if(.not.cold_start_grids(n))then
268  atm(n)%neststruct%first_step = .false.
269  if (atm(n)%neststruct%nested) then
270  if ( npz_rst /= 0 .and. npz_rst /= npz ) then
271  call setup_nested_boundary_halo(atm(n))
272  else
273  !If BC file is found, then read them in. Otherwise we need to initialize the BCs.
274  if (is_master()) print*, 'Searching for nested grid BC files ', trim(fname_ne), ' ', trim(fname_sw)
275  if (file_exist(fname_ne, atm(n)%domain) .and. file_exist(fname_sw, atm(n)%domain)) then
276  call fv_io_read_bcs(atm(n))
277  else
278  if ( is_master() ) write(*,*) 'BC files not found, re-generating nested grid boundary conditions'
279  call fill_nested_grid_topo_halo(atm(n), .true.)
280  call setup_nested_boundary_halo(atm(n), .true.)
281  atm(n)%neststruct%first_step = .true.
282  endif
283  !Following line to make sure u and v are consistent across processor subdomains
284  call mpp_update_domains(atm(n)%u, atm(n)%v, atm(n)%domain, gridtype=dgrid_ne, complete=.true.)
285  endif
286  endif
287 
288  if ( atm(n)%flagstruct%mountain ) then
289 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
290 ! !!! Additional terrain filter -- should not be called repeatedly !!!
291 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
292  if ( atm(n)%flagstruct%n_zs_filter > 0 ) then
293  if ( atm(n)%flagstruct%nord_zs_filter == 2 ) then
294  call del2_cubed_sphere(atm(n)%npx, atm(n)%npy, atm(n)%phis, &
295  atm(n)%gridstruct%area_64, atm(n)%gridstruct%dx, atm(n)%gridstruct%dy, &
296  atm(n)%gridstruct%dxc, atm(n)%gridstruct%dyc, atm(n)%gridstruct%sin_sg, &
297  atm(n)%flagstruct%n_zs_filter, cnst_0p20*atm(n)%gridstruct%da_min, &
298  .false., oro_g, atm(n)%neststruct%nested, atm(n)%domain, atm(n)%bd)
299  if ( is_master() ) write(*,*) 'Warning !!! del-2 terrain filter has been applied ', &
300  atm(n)%flagstruct%n_zs_filter, ' times'
301  else if( atm(n)%flagstruct%nord_zs_filter == 4 ) then
302  call del4_cubed_sphere(atm(n)%npx, atm(n)%npy, atm(n)%phis, atm(n)%gridstruct%area_64, &
303  atm(n)%gridstruct%dx, atm(n)%gridstruct%dy, &
304  atm(n)%gridstruct%dxc, atm(n)%gridstruct%dyc, atm(n)%gridstruct%sin_sg, &
305  atm(n)%flagstruct%n_zs_filter, .false., oro_g, atm(n)%neststruct%nested, &
306  atm(n)%domain, atm(n)%bd)
307  if ( is_master() ) write(*,*) 'Warning !!! del-4 terrain filter has been applied ', &
308  atm(n)%flagstruct%n_zs_filter, ' times'
309  endif
310  endif
311 
312  call mpp_update_domains( atm(n)%phis, atm(n)%domain, complete=.true. )
313  else
314  atm(n)%phis = 0.
315  if( is_master() ) write(*,*) 'phis set to zero'
316  endif !mountain
317 
318 
319 #ifdef SW_DYNAMICS
320  atm(n)%pt(:,:,:)=1.
321 #else
322  if ( .not.atm(n)%flagstruct%hybrid_z ) then
323  if(atm(n)%ptop/=atm(n)%ak(1)) call mpp_error(fatal,'FV restart: ptop not equal Atm(n)%ak(1)')
324  else
325  atm(n)%ptop = atm(n)%ak(1); atm(n)%ks = 0
326  endif
327  call p_var(npz, isc, iec, jsc, jec, atm(n)%ptop, ptop_min, &
328  atm(n)%delp, atm(n)%delz, atm(n)%pt, atm(n)%ps, atm(n)%pe, atm(n)%peln, &
329  atm(n)%pk, atm(n)%pkz, kappa, atm(n)%q, atm(n)%ng, &
330  ncnst, atm(n)%gridstruct%area_64, atm(n)%flagstruct%dry_mass, &
331  atm(n)%flagstruct%adjust_dry_mass, atm(n)%flagstruct%mountain, &
332  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
333  atm(n)%flagstruct%nwat, atm(n)%domain, atm(n)%flagstruct%make_nh)
334 
335 #endif
336  if ( grid_type < 7 .and. grid_type /= 4 ) then
337 ! Fill big values in the non-existing corner regions:
338 ! call fill_ghost(Atm(n)%phis, Atm(n)%npx, Atm(n)%npy, big_number)
339  do j=jsd,jed+1
340  do i=isd,ied+1
341  atm(n)%gridstruct%fc(i,j) = 2.*omega*( -cos(atm(n)%gridstruct%grid(i,j,1))*cos(atm(n)%gridstruct%grid(i,j,2))*sin(alpha) + &
342  sin(atm(n)%gridstruct%grid(i,j,2))*cos(alpha) )
343  enddo
344  enddo
345  do j=jsd,jed
346  do i=isd,ied
347  atm(n)%gridstruct%f0(i,j) = 2.*omega*( -cos(atm(n)%gridstruct%agrid(i,j,1))*cos(atm(n)%gridstruct%agrid(i,j,2))*sin(alpha) + &
348  sin(atm(n)%gridstruct%agrid(i,j,2))*cos(alpha) )
349  enddo
350  enddo
351  else
352  f00 = 2.*omega*sin(atm(n)%flagstruct%deglat/180.*pi)
353  do j=jsd,jed+1
354  do i=isd,ied+1
355  atm(n)%gridstruct%fc(i,j) = f00
356  enddo
357  enddo
358  do j=jsd,jed
359  do i=isd,ied
360  atm(n)%gridstruct%f0(i,j) = f00
361  enddo
362  enddo
363  endif
364  else
365  if ( atm(n)%flagstruct%warm_start ) then
366  call mpp_error(fatal, 'FV restart files not found; set warm_start = .F. if cold_start is desired.')
367  endif
368 ! Cold start
369  if ( atm(n)%flagstruct%make_hybrid_z ) then
370  hybrid = .false.
371  else
372  hybrid = atm(n)%flagstruct%hybrid_z
373  endif
374  if (grid_type < 4) then
375  if ( .not. atm(n)%flagstruct%external_ic ) then
376  call init_case(atm(n)%u,atm(n)%v,atm(n)%w,atm(n)%pt,atm(n)%delp,atm(n)%q, &
377  atm(n)%phis, atm(n)%ps,atm(n)%pe, atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
378  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
379  atm(n)%ak, atm(n)%bk, atm(n)%gridstruct, atm(n)%flagstruct,&
380  atm(n)%npx, atm(n)%npy, npz, atm(n)%ng, &
381  ncnst, atm(n)%flagstruct%nwat, &
382  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
383  atm(n)%flagstruct%dry_mass, &
384  atm(n)%flagstruct%mountain, &
385  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
386  hybrid, atm(n)%delz, atm(n)%ze0, &
387  atm(n)%flagstruct%adiabatic, atm(n)%ks, atm(n)%neststruct%npx_global, &
388  atm(n)%ptop, atm(n)%domain, atm(n)%tile, atm(n)%bd)
389  endif
390  elseif (grid_type == 4) then
391  call init_double_periodic(atm(n)%u,atm(n)%v,atm(n)%w,atm(n)%pt, &
392  atm(n)%delp,atm(n)%q,atm(n)%phis, atm(n)%ps,atm(n)%pe, &
393  atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
394  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
395  atm(n)%ak, atm(n)%bk, &
396  atm(n)%gridstruct, atm(n)%flagstruct, &
397  atm(n)%npx, atm(n)%npy, npz, atm(n)%ng, &
398  ncnst, atm(n)%flagstruct%nwat, &
399  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
400  atm(n)%flagstruct%dry_mass, atm(n)%flagstruct%mountain, &
401  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
402  hybrid, atm(n)%delz, atm(n)%ze0, atm(n)%ks, atm(n)%ptop, &
403  atm(n)%domain, atm(n)%tile, atm(n)%bd)
404  if( is_master() ) write(*,*) 'Doubly Periodic IC generated'
405  elseif (grid_type == 5 .or. grid_type == 6) then
406  call init_latlon(atm(n)%u,atm(n)%v,atm(n)%pt,atm(n)%delp,atm(n)%q,&
407  atm(n)%phis, atm(n)%ps,atm(n)%pe, &
408  atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
409  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
410  atm(n)%ak, atm(n)%bk, atm(n)%gridstruct, &
411  atm(n)%npx, atm(n)%npy, npz, atm(n)%ng, ncnst, &
412  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
413  atm(n)%flagstruct%dry_mass, &
414  atm(n)%flagstruct%mountain, &
415  atm(n)%flagstruct%moist_phys, hybrid, atm(n)%delz, &
416  atm(n)%ze0, atm(n)%domain, atm(n)%tile)
417  endif
418 
419  !Turn this off on the nested grid if you are just interpolating topography from the coarse grid!
420  if ( atm(n)%flagstruct%fv_land ) then
421  do j=jsc,jec
422  do i=isc,iec
423  atm(n)%sgh(i,j) = sgh_g(i,j)
424  atm(n)%oro(i,j) = oro_g(i,j)
425  enddo
426  enddo
427  endif
428 
429 
430  !Set up nested grids
431  !Currently even though we do fill in the nested-grid IC from
432  ! init_case or external_ic we appear to overwrite it using
433  ! coarse-grid data
434  !if (Atm(n)%neststruct%nested) then
435  ! Only fill nested-grid data if external_ic is called for the cubed-sphere grid
436  if (atm(n)%neststruct%nested) then
437  call setup_nested_boundary_halo(atm(n), .true.)
438  if (atm(n)%flagstruct%external_ic .and. .not. atm(n)%flagstruct%nggps_ic .and. grid_type < 4 ) then
439  call fill_nested_grid_data(atm(n:n))
440  endif
441  end if
442 
443  endif !end cold_start check
444 
445  if ( (.not.atm(n)%flagstruct%hydrostatic) .and. atm(n)%flagstruct%make_nh .and. atm(n)%neststruct%nested) then
446  call nested_grid_bc(atm(n)%delz, atm(n)%parent_grid%delz, atm(n)%neststruct%nest_domain, &
447  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
448  atm(n)%npx, atm(n)%npy, npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.true.)
449  call nested_grid_bc(atm(n)%w, atm(n)%parent_grid%w, atm(n)%neststruct%nest_domain, &
450  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
451  atm(n)%npx, atm(n)%npy, npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.true.)
452  call fv_io_register_restart_bcs_nh(atm(n)) !needed to register nested-grid BCs not registered earlier
453  endif
454 
455  end do
456 
457 
458  do n = ntileme,1,-1
459  if (atm(n)%neststruct%nested .and. atm(n)%flagstruct%external_ic .and. &
460  atm(n)%flagstruct%grid_type < 4 .and. cold_start_grids(n)) then
461  call fill_nested_grid_data_end(atm(n), grids_on_this_pe(n))
462  endif
463  end do
464 
465  do n = 1, ntileme
466  if (.not. grids_on_this_pe(n)) cycle
467 
468  isd = atm(n)%bd%isd
469  ied = atm(n)%bd%ied
470  jsd = atm(n)%bd%jsd
471  jed = atm(n)%bd%jed
472  ncnst = atm(n)%ncnst
473  ntprog = size(atm(n)%q,4)
474  ntdiag = size(atm(n)%qdiag,4)
475  isc = atm(n)%bd%isc; iec = atm(n)%bd%iec; jsc = atm(n)%bd%jsc; jec = atm(n)%bd%jec
476 
477 
478 !---------------------------------------------------------------------------------------------
479 ! Transform the (starting) Eulerian vertical coordinate from sigma-p to hybrid_z
480  if ( atm(n)%flagstruct%hybrid_z ) then
481  if ( atm(n)%flagstruct%make_hybrid_z ) then
482  allocate ( dz1(npz) )
483  if( npz==32 ) then
484  call compute_dz_l32(npz, ztop, dz1)
485  else
486  ztop = 45.e3
487  call compute_dz_var(npz, ztop, dz1)
488  endif
489  call set_hybrid_z(isc, iec, jsc, jec, atm(n)%ng, npz, ztop, dz1, rgrav, &
490  atm(n)%phis, atm(n)%ze0)
491  deallocate ( dz1 )
492 ! call prt_maxmin('ZE0', Atm(n)%ze0, isc, iec, jsc, jec, 0, npz, 1.E-3)
493 ! call prt_maxmin('DZ0', Atm(n)%delz, isc, iec, jsc, jec, 0, npz, 1. )
494  endif
495 ! call make_eta_level(npz, Atm(n)%pe, area, Atm(n)%ks, Atm(n)%ak, Atm(n)%bk, Atm(n)%ptop)
496  endif
497 !---------------------------------------------------------------------------------------------
498 
499  if (atm(n)%flagstruct%add_noise > 0.) then
500  write(errstring,'(A, E16.9)') "Adding thermal noise of amplitude ", atm(n)%flagstruct%add_noise
501  call mpp_error(note, errstring)
502  call random_seed
503  npts = 0
504  sumpertn = 0.
505  do k=1,npz
506  do j=jsc,jec
507  do i=isc,iec
508  call random_number(pertn)
509  atm(n)%pt(i,j,k) = atm(n)%pt(i,j,k) + pertn*atm(n)%flagstruct%add_noise
510  npts = npts + 1
511  sumpertn = sumpertn + pertn*atm(n)%flagstruct%add_noise ** 2
512  enddo
513  enddo
514  enddo
515  call mpp_update_domains(atm(n)%pt, atm(n)%domain)
516  call mpp_sum(sumpertn)
517  call mpp_sum(npts)
518  write(errstring,'(A, E16.9)') "RMS added noise: ", sqrt(sumpertn/npts)
519  call mpp_error(note, errstring)
520  endif
521 
522  if (atm(n)%grid_number > 1) then
523  write(gn,'(A2, I1)') " g", atm(n)%grid_number
524  else
525  gn = ''
526  end if
527 
528  unit = stdout()
529  write(unit,*)
530  write(unit,*) 'fv_restart u ', trim(gn),' = ', mpp_chksum(atm(n)%u(isc:iec,jsc:jec,:))
531  write(unit,*) 'fv_restart v ', trim(gn),' = ', mpp_chksum(atm(n)%v(isc:iec,jsc:jec,:))
532  if ( .not.atm(n)%flagstruct%hydrostatic ) &
533  write(unit,*) 'fv_restart w ', trim(gn),' = ', mpp_chksum(atm(n)%w(isc:iec,jsc:jec,:))
534  write(unit,*) 'fv_restart delp', trim(gn),' = ', mpp_chksum(atm(n)%delp(isc:iec,jsc:jec,:))
535  write(unit,*) 'fv_restart phis', trim(gn),' = ', mpp_chksum(atm(n)%phis(isc:iec,jsc:jec))
536 
537 #ifdef SW_DYNAMICS
538  call prt_maxmin('H ', atm(n)%delp, isc, iec, jsc, jec, atm(n)%ng, 1, rgrav)
539 #else
540  write(unit,*) 'fv_restart pt ', trim(gn),' = ', mpp_chksum(atm(n)%pt(isc:iec,jsc:jec,:))
541  if (ntprog>0) &
542  write(unit,*) 'fv_restart q(prog) nq ', trim(gn),' =',ntprog, mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,:))
543  if (ntdiag>0) &
544  write(unit,*) 'fv_restart q(diag) nq ', trim(gn),' =',ntdiag, mpp_chksum(atm(n)%qdiag(isc:iec,jsc:jec,:,:))
545  do iq=1,min(17, ntprog) ! Check up to 17 tracers
546  call get_tracer_names(model_atmos, iq, tracer_name)
547  write(unit,*) 'fv_restart '//trim(tracer_name)//' = ', mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,iq))
548  enddo
549 !---------------
550 ! Check Min/Max:
551 !---------------
552  call pmaxmn_g('ZS', atm(n)%phis, isc, iec, jsc, jec, 1, rgrav, atm(n)%gridstruct%area_64, atm(n)%domain)
553  call pmaxmn_g('PS', atm(n)%ps, isc, iec, jsc, jec, 1, 0.01, atm(n)%gridstruct%area_64, atm(n)%domain)
554  call pmaxmn_g('T ', atm(n)%pt, isc, iec, jsc, jec, npz, 1., atm(n)%gridstruct%area_64, atm(n)%domain)
555 
556 ! Check tracers:
557  do i=1, ntprog
558  call get_tracer_names ( model_atmos, i, tname )
559  call pmaxmn_g(trim(tname), atm(n)%q(isd:ied,jsd:jed,1:npz,i:i), isc, iec, jsc, jec, npz, &
560  1., atm(n)%gridstruct%area_64, atm(n)%domain)
561  enddo
562 #endif
563  call prt_maxmin('U ', atm(n)%u(isc:iec,jsc:jec,1:npz), isc, iec, jsc, jec, 0, npz, 1.)
564  call prt_maxmin('V ', atm(n)%v(isc:iec,jsc:jec,1:npz), isc, iec, jsc, jec, 0, npz, 1.)
565 
566  if ( (.not.atm(n)%flagstruct%hydrostatic) .and. atm(n)%flagstruct%make_nh ) then
567  call mpp_error(note, " Initializing w to 0")
568  atm(n)%w = 0.
569  if ( .not.atm(n)%flagstruct%hybrid_z ) then
570  call mpp_error(note, " Initializing delz from hydrostatic state")
571  do k=1,npz
572  do j=jsc,jec
573  do i=isc,iec
574  atm(n)%delz(i,j,k) = (rdgas*rgrav)*atm(n)%pt(i,j,k)*(atm(n)%peln(i,k,j)-atm(n)%peln(i,k+1,j))
575  enddo
576  enddo
577  enddo
578  endif
579  endif
580 
581  if ( .not.atm(n)%flagstruct%hydrostatic ) &
582  call pmaxmn_g('W ', atm(n)%w, isc, iec, jsc, jec, npz, 1., atm(n)%gridstruct%area_64, atm(n)%domain)
583 
584  if (is_master()) write(unit,*)
585 
586 !--------------------------------------------
587 ! Initialize surface winds for flux coupler:
588 !--------------------------------------------
589  if ( .not. atm(n)%flagstruct%srf_init ) then
590  call cubed_to_latlon(atm(n)%u, atm(n)%v, atm(n)%ua, atm(n)%va, &
591  atm(n)%gridstruct, &
592  atm(n)%npx, atm(n)%npy, npz, 1, &
593  atm(n)%gridstruct%grid_type, atm(n)%domain, &
594  atm(n)%gridstruct%nested, atm(n)%flagstruct%c2l_ord, atm(n)%bd)
595  do j=jsc,jec
596  do i=isc,iec
597  atm(n)%u_srf(i,j) = atm(n)%ua(i,j,npz)
598  atm(n)%v_srf(i,j) = atm(n)%va(i,j,npz)
599  enddo
600  enddo
601  atm(n)%flagstruct%srf_init = .true.
602  endif
603 
604  end do ! n_tile
605 
606  end subroutine fv_restart
607  ! </SUBROUTINE> NAME="fv_restart"
608 
609  subroutine setup_nested_boundary_halo(Atm, proc_in)
611  !This routine is now taking the "easy way out" with regards
612  ! to pt (virtual potential temperature), q_con, and cappa;
613  ! their halo values are now set up when the BCs are set up
614  ! in fv_dynamics
615 
616  type(fv_atmos_type), intent(INOUT) :: atm
617  logical, INTENT(IN), OPTIONAL :: proc_in
618  real, allocatable :: g_dat(:,:,:), g_dat2(:,:,:)
619  real, allocatable :: pt_coarse(:,:,:)
620  integer i,j,k,nq, sphum, ncnst, istart, iend, npz, nwat
621  integer isc, iec, jsc, jec, isd, ied, jsd, jed, is, ie, js, je
622  integer isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p, isg, ieg, jsg,jeg, npx_p, npy_p
623  real zvir
624  logical process
625  integer :: liq_wat, ice_wat, rainwat, snowwat, graupel
626  real :: qv, dp1, q_liq, q_sol, q_con, cvm, cappa, dp, pt, dz, pkz, rdg
627 
628  if (PRESENT(proc_in)) then
629  process = proc_in
630  else
631  process = .true.
632  endif
633 
634  isd = atm%bd%isd
635  ied = atm%bd%ied
636  jsd = atm%bd%jsd
637  jed = atm%bd%jed
638  ncnst = atm%ncnst
639  isc = atm%bd%isc; iec = atm%bd%iec; jsc = atm%bd%jsc; jec = atm%bd%jec
640  is = atm%bd%is ; ie = atm%bd%ie ; js = atm%bd%js ; je = atm%bd%je
641  npz = atm%npz
642  nwat = atm%flagstruct%nwat
643 
644 #ifdef MAPL_MODE
645  liq_wat = -1
646  ice_wat = -1
647  rainwat = -1
648  snowwat = -1
649  graupel = -1
650 #else
651  if (nwat>=3 ) then
652  liq_wat = get_tracer_index(model_atmos, 'liq_wat')
653  ice_wat = get_tracer_index(model_atmos, 'ice_wat')
654  endif
655  if ( nwat==6 ) then
656  rainwat = get_tracer_index(model_atmos, 'rainwat')
657  snowwat = get_tracer_index(model_atmos, 'snowwat')
658  graupel = get_tracer_index(model_atmos, 'graupel')
659  endif
660 #endif
661 
662  call mpp_get_data_domain( atm%parent_grid%domain, &
663  isd_p, ied_p, jsd_p, jed_p )
664  call mpp_get_compute_domain( atm%parent_grid%domain, &
665  isc_p, iec_p, jsc_p, jec_p )
666  call mpp_get_global_domain( atm%parent_grid%domain, &
667  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
668 
669  call nested_grid_bc(atm%delp, atm%parent_grid%delp, atm%neststruct%nest_domain, &
670  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
671  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
672  do nq=1,ncnst
673  call nested_grid_bc(atm%q(:,:,:,nq), &
674  atm%parent_grid%q(:,:,:,nq), atm%neststruct%nest_domain, &
675  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
676  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
677  end do
678 
679  if (process) then
680  if (is_master()) print*, 'FILLING NESTED GRID HALO'
681  else
682  if (is_master()) print*, 'SENDING DATA TO FILL NESTED GRID HALO'
683  endif
684 
685 
686  !Filling phis?
687  !In idealized test cases, where the topography is EXACTLY known (ex case 13),
688  !interpolating the topography yields a much worse result. In comparison in
689  !real topography cases little difference is seen.
690 
691  !This is probably because the halo phis, which is used to compute
692  !geopotential height (gz, gh), only affects the interior by being
693  !used to compute corner gz in a2b_ord[24]. We might suppose this
694  !computation would be more accurate when using values of phis which
695  !are more consistent with those on the interior (ie the exactly-known
696  !values) than the crude values given through linear interpolation.
697 
698  !For real topography cases, or in cases in which the coarse-grid topo
699  ! is smoothed, we fill the boundary halo with the coarse-grid topo.
700 
701 #ifndef SW_DYNAMICS
702  !pt --- actually temperature
703 
704  call nested_grid_bc(atm%pt, atm%parent_grid%pt, atm%neststruct%nest_domain, &
705  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
706  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
707 
708  if (.not. atm%flagstruct%hydrostatic) then
709 
710  !w
711  call nested_grid_bc(atm%w(:,:,:), &
712  atm%parent_grid%w(:,:,:), &
713  atm%neststruct%nest_domain, atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
714  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
715 
716 
717  !delz
718  call nested_grid_bc(atm%delz(:,:,:), &
719  atm%parent_grid%delz(:,:,:), &
720  atm%neststruct%nest_domain, atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
721  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
722 
723  end if
724 
725 #endif
726 
727  if (atm%neststruct%child_proc) then
728  call nested_grid_bc(atm%u, atm%parent_grid%u(:,:,:), &
729  atm%neststruct%nest_domain, atm%neststruct%ind_u, atm%neststruct%wt_u, 0, 1, &
730  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
731  call nested_grid_bc(atm%v, atm%parent_grid%v(:,:,:), &
732  atm%neststruct%nest_domain, atm%neststruct%ind_v, atm%neststruct%wt_v, 1, 0, &
733  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
734  else
735  call nested_grid_bc(atm%parent_grid%u(:,:,:), &
736  atm%neststruct%nest_domain, 0, 1)
737  call nested_grid_bc(atm%parent_grid%v(:,:,:), &
738  atm%neststruct%nest_domain, 1, 0)
739  endif
740 
741 
742  if (process) then
743 !!$#ifdef SW_DYNAMICS
744 !!$ !ps: first level only
745 !!$ !This is only valid for shallow-water simulations
746 !!$ do j=jsd,jed
747 !!$ do i=isd,ied
748 !!$
749 !!$ Atm%ps(i,j) = Atm%delp(i,j,1)/grav
750 !!$
751 !!$ end do
752 !!$ end do
753 !!$#endif
754  call mpp_update_domains(atm%u, atm%v, atm%domain, gridtype=dgrid_ne)
755  call mpp_update_domains(atm%w, atm%domain, complete=.true.) ! needs an update-domain for rayleigh damping
756  endif
757 
758  call mpp_sync_self()
759 
760  end subroutine setup_nested_boundary_halo
761 
762  subroutine fill_nested_grid_topo_halo(Atm, proc_in)
764  type(fv_atmos_type), intent(INOUT) :: Atm
765  logical, intent(IN), OPTIONAL :: proc_in
766  integer :: isg, ieg, jsg, jeg
767 
768  if (.not. atm%neststruct%nested) return
769 
770  call mpp_get_global_domain( atm%parent_grid%domain, &
771  isg, ieg, jsg, jeg)
772 
773  if (is_master()) print*, ' FILLING NESTED GRID HALO WITH INTERPOLATED TERRAIN'
774  call nested_grid_bc(atm%phis, atm%parent_grid%phis, atm%neststruct%nest_domain, &
775  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
776  atm%npx, atm%npy, atm%bd, isg, ieg, jsg, jeg, proc_in=proc_in)
777 
778  end subroutine fill_nested_grid_topo_halo
779 
780 !!! We call this routine to fill the nested grid with topo so that we can do the boundary smoothing.
781 !!! Interior topography is then over-written in get_external_ic.
782  subroutine fill_nested_grid_topo(Atm, proc_in)
784  type(fv_atmos_type), intent(INOUT) :: Atm
785  logical, intent(IN), OPTIONAL :: proc_in
786  real, allocatable :: g_dat(:,:,:)
787  integer :: p, sending_proc
788  integer :: isd_p, ied_p, jsd_p, jed_p
789  integer :: isg, ieg, jsg,jeg
790 
791  logical :: process
792 
793  process = .true.
794  if (present(proc_in)) then
795  process = proc_in
796  else
797  process = .true.
798  endif
799 
800 !!$ if (.not. Atm%neststruct%nested) return
801 
802  call mpp_get_global_domain( atm%parent_grid%domain, &
803  isg, ieg, jsg, jeg)
804  call mpp_get_data_domain( atm%parent_grid%domain, &
805  isd_p, ied_p, jsd_p, jed_p )
806 
807  allocate(g_dat( isg:ieg, jsg:jeg, 1) )
808  call timing_on('COMM_TOTAL')
809 
810  !!! FIXME: For whatever reason this code CRASHES if the lower-left corner
811  !!! of the nested grid lies within the first PE of a grid tile.
812 
813  if (is_master() .and. .not. atm%flagstruct%external_ic ) print*, ' FILLING NESTED GRID INTERIOR WITH INTERPOLATED TERRAIN'
814 
815  sending_proc = atm%parent_grid%pelist(1) + (atm%neststruct%parent_tile-1)*atm%parent_grid%npes_per_tile
816  if (atm%neststruct%parent_proc .and. atm%neststruct%parent_tile == atm%parent_grid%tile) then
817  call mpp_global_field( &
818  atm%parent_grid%domain, &
819  atm%parent_grid%phis(isd_p:ied_p,jsd_p:jed_p), g_dat(isg:,jsg:,1), position=center)
820  if (mpp_pe() == sending_proc) then
821  do p=1,size(atm%pelist)
822  call mpp_send(g_dat,size(g_dat),atm%pelist(p))
823  enddo
824  endif
825  endif
826 
827  if (any(atm%pelist == mpp_pe())) then
828  call mpp_recv(g_dat, size(g_dat), sending_proc)
829  endif
830 
831  call timing_off('COMM_TOTAL')
832  if (process) call fill_nested_grid(atm%phis, g_dat(isg:,jsg:,1), &
833  atm%neststruct%ind_h, atm%neststruct%wt_h, &
834  0, 0, isg, ieg, jsg, jeg, atm%bd)
835 
836  call mpp_sync_self
837 
838  deallocate(g_dat)
839 
840 
841  end subroutine fill_nested_grid_topo
842 
843  subroutine fill_nested_grid_data(Atm, proc_in)
845  type(fv_atmos_type), intent(INOUT) :: Atm(:) !Only intended to be one element; needed for cubed_sphere_terrain
846  logical, intent(IN), OPTIONAL :: proc_in
847  real, allocatable :: g_dat(:,:,:), pt_coarse(:,:,:)
848  integer :: i,j,k,nq, sphum, ncnst, istart, iend, npz
849  integer :: isc, iec, jsc, jec, isd, ied, jsd, jed
850  integer :: isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p
851  integer :: isg, ieg, jsg,jeg, npx_p, npy_p
852  integer :: isg_n, ieg_n, jsg_n, jeg_n, npx_n, npy_n
853  real zvir, gh0, p1(2), p2(2), r, r0
854 
855  integer :: p, sending_proc, gid
856  logical process
857 
858  if (present(proc_in)) then
859  process = proc_in
860  else
861  process = .true.
862  endif
863 
864  isd = atm(1)%bd%isd
865  ied = atm(1)%bd%ied
866  jsd = atm(1)%bd%jsd
867  jed = atm(1)%bd%jed
868  ncnst = atm(1)%ncnst
869  isc = atm(1)%bd%isc; iec = atm(1)%bd%iec; jsc = atm(1)%bd%jsc; jec = atm(1)%bd%jec
870  npz = atm(1)%npz
871 
872  gid = mpp_pe()
873 
874  sending_proc = atm(1)%parent_grid%pelist(1) + (atm(1)%neststruct%parent_tile-1)*atm(1)%parent_grid%npes_per_tile
875 
876  call mpp_get_data_domain( atm(1)%parent_grid%domain, &
877  isd_p, ied_p, jsd_p, jed_p )
878  call mpp_get_compute_domain( atm(1)%parent_grid%domain, &
879  isc_p, iec_p, jsc_p, jec_p )
880  call mpp_get_global_domain( atm(1)%parent_grid%domain, &
881  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
882 
883  if (process) then
884 
885  call mpp_error(note, "FILLING NESTED GRID DATA")
886 
887  else
888 
889  call mpp_error(note, "SENDING TO FILL NESTED GRID DATA")
890 
891  endif
892 
893  !delp
894 
895  allocate(g_dat( isg:ieg, jsg:jeg, npz) )
896 
897  call timing_on('COMM_TOTAL')
898 
899  !Call mpp_global_field on the procs that have the required data.
900  !Then broadcast from the head PE to the receiving PEs
901  if (atm(1)%neststruct%parent_proc .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
902  call mpp_global_field( &
903  atm(1)%parent_grid%domain, &
904  atm(1)%parent_grid%delp(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
905  if (gid == sending_proc) then !crazy logic but what we have for now
906  do p=1,size(atm(1)%pelist)
907  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
908  enddo
909  endif
910  endif
911  if (any(atm(1)%pelist == gid)) then
912  call mpp_recv(g_dat, size(g_dat), sending_proc)
913  endif
914 
915  call timing_off('COMM_TOTAL')
916  if (process) call fill_nested_grid(atm(1)%delp, g_dat, &
917  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
918  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
919 
920  call mpp_sync_self
921 
922  !tracers
923  do nq=1,ncnst
924 
925  call timing_on('COMM_TOTAL')
926 
927  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
928  call mpp_global_field( &
929  atm(1)%parent_grid%domain, &
930  atm(1)%parent_grid%q(isd_p:ied_p,jsd_p:jed_p,:,nq), g_dat, position=center)
931  if (gid == sending_proc) then
932  do p=1,size(atm(1)%pelist)
933  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
934  enddo
935  endif
936  endif
937  if (any(atm(1)%pelist == gid)) then
938  call mpp_recv(g_dat, size(g_dat), sending_proc)
939  endif
940 
941  call timing_off('COMM_TOTAL')
942  if (process) call fill_nested_grid(atm(1)%q(isd:ied,jsd:jed,:,nq), g_dat, &
943  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
944  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
945 
946  call mpp_sync_self
947 
948  end do
949 
950  !Note that we do NOT fill in phis (surface geopotential), which should
951  !be computed exactly instead of being interpolated.
952 
953 
954 #ifndef SW_DYNAMICS
955  !pt --- actually temperature
956 
957  call timing_on('COMM_TOTAL')
958 
959  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
960  call mpp_global_field( &
961  atm(1)%parent_grid%domain, &
962  atm(1)%parent_grid%pt(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
963  if (gid == sending_proc) then
964  do p=1,size(atm(1)%pelist)
965  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
966  enddo
967  endif
968  endif
969  if (any(atm(1)%pelist == gid)) then
970  call mpp_recv(g_dat, size(g_dat), sending_proc)
971  endif
972 
973  call mpp_sync_self
974 
975  call timing_off('COMM_TOTAL')
976  if (process) call fill_nested_grid(atm(1)%pt, g_dat, &
977  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
978  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
979 
980 #ifdef MAPL_MODE
981  sphum = 1
982 #else
983  if ( atm(1)%flagstruct%nwat > 0 ) then
984  sphum = get_tracer_index(model_atmos, 'sphum')
985  else
986  sphum = 1
987  endif
988 #endif
989  if ( atm(1)%parent_grid%flagstruct%adiabatic .or. atm(1)%parent_grid%flagstruct%do_Held_Suarez ) then
990  zvir = 0. ! no virtual effect
991  else
992  zvir = rvgas/rdgas - 1.
993  endif
994 
995  call timing_on('COMM_TOTAL')
996 
997  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
998  call mpp_global_field( &
999  atm(1)%parent_grid%domain, &
1000  atm(1)%parent_grid%pkz(isc_p:iec_p,jsc_p:jec_p,:), g_dat, position=center)
1001  if (gid == sending_proc) then
1002  do p=1,size(atm(1)%pelist)
1003  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1004  enddo
1005  endif
1006  endif
1007  if (any(atm(1)%pelist == gid)) then
1008  call mpp_recv(g_dat, size(g_dat), sending_proc)
1009  endif
1010 
1011  call mpp_sync_self
1012 
1013  call timing_off('COMM_TOTAL')
1014  if (process) then
1015  allocate(pt_coarse(isd:ied,jsd:jed,npz))
1016  call fill_nested_grid(pt_coarse, g_dat, &
1017  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1018  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1019 
1020  if (atm(1)%bd%is == 1) then
1021  do k=1,npz
1022  do j=atm(1)%bd%jsd,atm(1)%bd%jed
1023  do i=atm(1)%bd%isd,0
1024  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1025  end do
1026  end do
1027  end do
1028  end if
1029 
1030  if (atm(1)%bd%js == 1) then
1031  if (atm(1)%bd%is == 1) then
1032  istart = atm(1)%bd%is
1033  else
1034  istart = atm(1)%bd%isd
1035  end if
1036  if (atm(1)%bd%ie == atm(1)%npx-1) then
1037  iend = atm(1)%bd%ie
1038  else
1039  iend = atm(1)%bd%ied
1040  end if
1041 
1042  do k=1,npz
1043  do j=atm(1)%bd%jsd,0
1044  do i=istart,iend
1045  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1046  end do
1047  end do
1048  end do
1049  end if
1050 
1051  if (atm(1)%bd%ie == atm(1)%npx-1) then
1052  do k=1,npz
1053  do j=atm(1)%bd%jsd,atm(1)%bd%jed
1054  do i=atm(1)%npx,atm(1)%bd%ied
1055  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1056  end do
1057  end do
1058  end do
1059  end if
1060 
1061  if (atm(1)%bd%je == atm(1)%npy-1) then
1062  if (atm(1)%bd%is == 1) then
1063  istart = atm(1)%bd%is
1064  else
1065  istart = atm(1)%bd%isd
1066  end if
1067  if (atm(1)%bd%ie == atm(1)%npx-1) then
1068  iend = atm(1)%bd%ie
1069  else
1070  iend = atm(1)%bd%ied
1071  end if
1072 
1073  do k=1,npz
1074  do j=atm(1)%npy,atm(1)%bd%jed
1075  do i=istart,iend
1076  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1077  end do
1078  end do
1079  end do
1080  end if
1081 
1082  deallocate(pt_coarse)
1083 
1084  end if
1085 
1086  if (.not. atm(1)%flagstruct%hydrostatic) then
1087 
1088  !delz
1089  call timing_on('COMM_TOTAL')
1090 
1091  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1092  call mpp_global_field( &
1093  atm(1)%parent_grid%domain, &
1094  atm(1)%parent_grid%delz(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1095  if (gid == sending_proc) then
1096  do p=1,size(atm(1)%pelist)
1097  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1098  enddo
1099  endif
1100  endif
1101  if (any(atm(1)%pelist == gid)) then
1102  call mpp_recv(g_dat, size(g_dat), sending_proc)
1103  endif
1104 
1105  call mpp_sync_self
1106 
1107  call timing_off('COMM_TOTAL')
1108  if (process) call fill_nested_grid(atm(1)%delz, g_dat, &
1109  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1110  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1111 
1112  !w
1113 
1114  call timing_on('COMM_TOTAL')
1115 
1116  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1117  call mpp_global_field( &
1118  atm(1)%parent_grid%domain, &
1119  atm(1)%parent_grid%w(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1120  if (gid == sending_proc) then
1121  do p=1,size(atm(1)%pelist)
1122  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1123  enddo
1124  endif
1125  endif
1126  if (any(atm(1)%pelist == gid)) then
1127  call mpp_recv(g_dat, size(g_dat), sending_proc)
1128  endif
1129 
1130  call mpp_sync_self
1131 
1132  call timing_off('COMM_TOTAL')
1133  if (process) call fill_nested_grid(atm(1)%w, g_dat, &
1134  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1135  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1136  !
1137 
1138  end if
1139 
1140 #endif
1141  deallocate(g_dat)
1142 
1143  !u
1144 
1145  allocate(g_dat( isg:ieg, jsg:jeg+1, npz) )
1146  g_dat = 1.e25
1147 
1148  call timing_on('COMM_TOTAL')
1149 
1150  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1151  call mpp_global_field( &
1152  atm(1)%parent_grid%domain, &
1153  atm(1)%parent_grid%u(isd_p:ied_p,jsd_p:jed_p+1,:), g_dat, position=north)
1154  if (gid == sending_proc) then
1155  do p=1,size(atm(1)%pelist)
1156  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1157  enddo
1158  endif
1159  endif
1160  if (any(atm(1)%pelist == gid)) then
1161  call mpp_recv(g_dat, size(g_dat), sending_proc)
1162  endif
1163 
1164  call mpp_sync_self
1165 
1166  call timing_off('COMM_TOTAL')
1167  call mpp_sync_self
1168  if (process) call fill_nested_grid(atm(1)%u, g_dat, &
1169  atm(1)%neststruct%ind_u, atm(1)%neststruct%wt_u, &
1170  0, 1, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1171  deallocate(g_dat)
1172 
1173  !v
1174 
1175  allocate(g_dat( isg:ieg+1, jsg:jeg, npz) )
1176  g_dat = 1.e25
1177 
1178  call timing_on('COMM_TOTAL')
1179 
1180  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1181  call mpp_global_field( &
1182  atm(1)%parent_grid%domain, &
1183  atm(1)%parent_grid%v(isd_p:ied_p+1,jsd_p:jed_p,:), g_dat, position=east)
1184  if (gid == sending_proc) then
1185  do p=1,size(atm(1)%pelist)
1186  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1187  enddo
1188  endif
1189  endif
1190  if (any(atm(1)%pelist == gid)) then
1191  call mpp_recv(g_dat, size(g_dat), sending_proc)
1192  endif
1193 
1194  call mpp_sync_self
1195  call timing_off('COMM_TOTAL')
1196 
1197  if (process) call fill_nested_grid(atm(1)%v, g_dat, &
1198  atm(1)%neststruct%ind_v, atm(1)%neststruct%wt_v, &
1199  1, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1200 
1201  deallocate(g_dat)
1202 
1203  end subroutine fill_nested_grid_data
1204 
1205  subroutine fill_nested_grid_data_end(Atm, proc_in)
1207  type(fv_atmos_type), intent(INOUT) :: Atm
1208  logical, intent(IN), OPTIONAL :: proc_in
1209  real, allocatable :: g_dat(:,:,:), pt_coarse(:,:,:)
1210  integer :: i,j,k,nq, sphum, ncnst, istart, iend, npz
1211  integer :: isc, iec, jsc, jec, isd, ied, jsd, jed
1212  integer :: isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p
1213  integer :: isg, ieg, jsg,jeg, npx_p, npy_p
1214  integer :: isg_n, ieg_n, jsg_n, jeg_n, npx_n, npy_n
1215  real zvir
1216 
1217  integer :: p , sending_proc
1218  logical :: process
1219 
1220  if (present(proc_in)) then
1221  process = proc_in
1222  else
1223  process = .true.
1224  endif
1225 
1226  isd = atm%bd%isd
1227  ied = atm%bd%ied
1228  jsd = atm%bd%jsd
1229  jed = atm%bd%jed
1230  ncnst = atm%ncnst
1231  isc = atm%bd%isc; iec = atm%bd%iec; jsc = atm%bd%jsc; jec = atm%bd%jec
1232  npz = atm%npz
1233 
1234  isd_p = atm%parent_grid%bd%isd
1235  ied_p = atm%parent_grid%bd%ied
1236  jsd_p = atm%parent_grid%bd%jsd
1237  jed_p = atm%parent_grid%bd%jed
1238  isc_p = atm%parent_grid%bd%isc
1239  iec_p = atm%parent_grid%bd%iec
1240  jsc_p = atm%parent_grid%bd%jsc
1241  jec_p = atm%parent_grid%bd%jec
1242  sending_proc = atm%parent_grid%pelist(1) + (atm%neststruct%parent_tile-1)*atm%parent_grid%npes_per_tile
1243 
1244  call mpp_get_global_domain( atm%parent_grid%domain, &
1245  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
1246 
1247 
1248  !NOW: what we do is to update the nested-grid terrain to the coarse grid,
1249  !to ensure consistency between the two grids.
1250  if ( process ) call mpp_update_domains(atm%phis, atm%domain, complete=.true.)
1251  if (atm%neststruct%twowaynest) then
1252  if (any(atm%parent_grid%pelist == mpp_pe()) .or. atm%neststruct%child_proc) then
1253  call update_coarse_grid(atm%parent_grid%phis, &
1254  atm%phis, atm%neststruct%nest_domain, &
1255  atm%neststruct%ind_update_h(isd_p:ied_p+1,jsd_p:jed_p+1,:), &
1256  atm%gridstruct%dx, atm%gridstruct%dy, atm%gridstruct%area, &
1257  isd_p, ied_p, jsd_p, jed_p, isd, ied, jsd, jed, &
1258  atm%neststruct%isu, atm%neststruct%ieu, atm%neststruct%jsu, atm%neststruct%jeu, &
1259  atm%npx, atm%npy, 0, 0, &
1260  atm%neststruct%refinement, atm%neststruct%nestupdate, 0, 0, &
1261  atm%neststruct%parent_proc, atm%neststruct%child_proc, atm%parent_grid)
1262  atm%parent_grid%neststruct%parent_of_twoway = .true.
1263  !NOTE: mpp_update_nest_coarse (and by extension, update_coarse_grid) does **NOT** pass data
1264  !allowing a two-way update into the halo of the coarse grid. It only passes data so that the INTERIOR
1265  ! can have the two-way update. Thus, on the nest's cold start, if this update_domains call is not done,
1266  ! the coarse grid will have the wrong topography in the halo, which will CHANGE when a restart is done!!
1267  if (atm%neststruct%parent_proc) call mpp_update_domains(atm%parent_grid%phis, atm%parent_grid%domain)
1268  end if
1269 
1270  end if
1271 
1272 
1273 
1274 
1275 #ifdef SW_DYNAMICS
1276 !!$ !ps: first level only
1277 !!$ !This is only valid for shallow-water simulations
1278 !!$ if (process) then
1279 !!$ do j=jsd,jed
1280 !!$ do i=isd,ied
1281 !!$
1282 !!$ Atm%ps(i,j) = Atm%delp(i,j,1)/grav
1283 !!$
1284 !!$ end do
1285 !!$ end do
1286 !!$ endif
1287 #else
1288  !Sets up flow to be initially hydrostatic (shouldn't be the case for all ICs?)
1289  if (process) call p_var(npz, isc, iec, jsc, jec, atm%ptop, ptop_min, atm%delp, &
1290  atm%delz, atm%pt, atm%ps, &
1291  atm%pe, atm%peln, atm%pk, atm%pkz, kappa, atm%q, &
1292  atm%ng, ncnst, atm%gridstruct%area_64, atm%flagstruct%dry_mass, .false., atm%flagstruct%mountain, &
1293  atm%flagstruct%moist_phys, .true., atm%flagstruct%nwat, atm%domain)
1294 #endif
1295 
1296 
1297 
1298  end subroutine fill_nested_grid_data_end
1299 
1300 
1301  !#######################################################################
1302  ! <SUBROUTINE NAME="fv_write_restart">
1303  ! <DESCRIPTION>
1304  ! Write out restart files registered through register_restart_file
1305  ! </DESCRIPTION>
1306  subroutine fv_write_restart(Atm, grids_on_this_pe, timestamp)
1307  type(fv_atmos_type), intent(inout) :: atm(:)
1308  character(len=*), intent(in) :: timestamp
1309  logical, intent(IN) :: grids_on_this_pe(:)
1310  integer n
1311 
1312  call fv_io_write_restart(atm, grids_on_this_pe, timestamp)
1313  do n=1,size(atm)
1314  if (atm(n)%neststruct%nested .and. grids_on_this_pe(n)) then
1315  call fv_io_write_bcs(atm(n))
1316  endif
1317  enddo
1318 
1319  end subroutine fv_write_restart
1320  ! </SUBROUTINE>
1321 
1322 
1323 
1324  !#####################################################################
1325  ! <SUBROUTINE NAME="fv_restart_end">
1326  !
1327  ! <DESCRIPTION>
1328  ! Initialize the fv core restart facilities
1329  ! </DESCRIPTION>
1330  !
1331  subroutine fv_restart_end(Atm, grids_on_this_pe)
1332  type(fv_atmos_type), intent(inout) :: atm(:)
1333  logical, intent(INOUT) :: grids_on_this_pe(:)
1334 
1335  integer :: isc, iec, jsc, jec
1336  integer :: iq, n, ntileme, ncnst, ntprog, ntdiag
1337  integer :: isd, ied, jsd, jed, npz
1338  integer :: unit
1339  integer :: file_unit
1340  integer, allocatable :: pelist(:)
1341  character(len=128):: tracer_name
1342  character(len=3):: gn
1343 
1344 
1345  ntileme = size(atm(:))
1346 
1347  do n = 1, ntileme
1348 
1349  if (.not. grids_on_this_pe(n)) then
1350  cycle
1351  endif
1352 
1353  call mpp_set_current_pelist(atm(n)%pelist)
1354 
1355  isc = atm(n)%bd%isc; iec = atm(n)%bd%iec; jsc = atm(n)%bd%jsc; jec = atm(n)%bd%jec
1356 
1357  isd = atm(n)%bd%isd
1358  ied = atm(n)%bd%ied
1359  jsd = atm(n)%bd%jsd
1360  jed = atm(n)%bd%jed
1361  npz = atm(n)%npz
1362  ncnst = atm(n)%ncnst
1363  ntprog = size(atm(n)%q,4)
1364  ntdiag = size(atm(n)%qdiag,4)
1365 
1366  if (atm(n)%grid_number > 1) then
1367  write(gn,'(A2, I1)') " g", atm(n)%grid_number
1368  else
1369  gn = ''
1370  end if
1371 
1372  unit = stdout()
1373  write(unit,*)
1374  write(unit,*) 'fv_restart_end u ', trim(gn),' = ', mpp_chksum(atm(n)%u(isc:iec,jsc:jec,:))
1375  write(unit,*) 'fv_restart_end v ', trim(gn),' = ', mpp_chksum(atm(n)%v(isc:iec,jsc:jec,:))
1376  if ( .not. atm(n)%flagstruct%hydrostatic ) &
1377  write(unit,*) 'fv_restart_end w ', trim(gn),' = ', mpp_chksum(atm(n)%w(isc:iec,jsc:jec,:))
1378  write(unit,*) 'fv_restart_end delp', trim(gn),' = ', mpp_chksum(atm(n)%delp(isc:iec,jsc:jec,:))
1379  write(unit,*) 'fv_restart_end phis', trim(gn),' = ', mpp_chksum(atm(n)%phis(isc:iec,jsc:jec))
1380 #ifndef SW_DYNAMICS
1381  write(unit,*) 'fv_restart_end pt ', trim(gn),' = ', mpp_chksum(atm(n)%pt(isc:iec,jsc:jec,:))
1382  if (ntprog>0) &
1383  write(unit,*) 'fv_restart_end q(prog) nq ', trim(gn),' =',ntprog, mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,:))
1384  if (ntdiag>0) &
1385  write(unit,*) 'fv_restart_end q(diag) nq ', trim(gn),' =',ntdiag, mpp_chksum(atm(n)%qdiag(isc:iec,jsc:jec,:,:))
1386  do iq=1,min(17, ntprog) ! Check up to 17 tracers
1387  call get_tracer_names(model_atmos, iq, tracer_name)
1388  write(unit,*) 'fv_restart_end '//trim(tracer_name)// trim(gn),' = ', mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,iq))
1389  enddo
1390 
1391 !---------------
1392 ! Check Min/Max:
1393 !---------------
1394 ! call prt_maxmin('ZS', Atm(n)%phis, isc, iec, jsc, jec, Atm(n)%ng, 1, 1./grav)
1395  call pmaxmn_g('ZS', atm(n)%phis, isc, iec, jsc, jec, 1, 1./grav, atm(n)%gridstruct%area_64, atm(n)%domain)
1396  call pmaxmn_g('PS ', atm(n)%ps, isc, iec, jsc, jec, 1, 0.01 , atm(n)%gridstruct%area_64, atm(n)%domain)
1397  call prt_maxmin('PS*', atm(n)%ps, isc, iec, jsc, jec, atm(n)%ng, 1, 0.01)
1398  call prt_maxmin('U ', atm(n)%u(isd:ied,jsd:jed,1:npz), isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1399  call prt_maxmin('V ', atm(n)%v(isd:ied,jsd:jed,1:npz), isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1400  if ( .not. atm(n)%flagstruct%hydrostatic ) &
1401  call prt_maxmin('W ', atm(n)%w , isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1402  call prt_maxmin('T ', atm(n)%pt, isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1403  do iq=1, ntprog
1404  call get_tracer_names ( model_atmos, iq, tracer_name )
1405  call pmaxmn_g(trim(tracer_name), atm(n)%q(isd:ied,jsd:jed,1:npz,iq:iq), isc, iec, jsc, jec, npz, &
1406  1., atm(n)%gridstruct%area_64, atm(n)%domain)
1407  enddo
1408 ! Write4 energy correction term
1409 #endif
1410 
1411  enddo
1412 
1413  call fv_io_write_restart(atm, grids_on_this_pe)
1414  do n=1,ntileme
1415  if (atm(n)%neststruct%nested .and. grids_on_this_pe(n)) call fv_io_write_bcs(atm(n))
1416  end do
1417 
1418  module_is_initialized = .false.
1419 
1420 #ifdef EFLUX_OUT
1421  if( is_master() ) then
1422  write(*,*) steps, 'Mean equivalent Heat flux for this integration period=',atm(1)%idiag%efx_sum/real(max(1,Atm(1)%idiag%steps)), &
1423  'Mean nesting-related flux for this integration period=',atm(1)%idiag%efx_sum_nest/real(max(1,Atm(1)%idiag%steps)), &
1424  'Mean mountain torque=',atm(1)%idiag%mtq_sum/real(max(1,atm(1)%idiag%steps))
1425  file_unit = get_unit()
1426  open (unit=file_unit, file='e_flux.data', form='unformatted',status='unknown', access='sequential')
1427  do n=1,steps
1428  write(file_unit) atm(1)%idiag%efx(n)
1429  write(file_unit) atm(1)%idiag%mtq(n) ! time series global mountain torque
1430  !write(file_unit) Atm(1)%idiag%efx_nest(n)
1431  enddo
1432  close(unit=file_unit)
1433  endif
1434 #endif
1435 
1436  end subroutine fv_restart_end
1437  ! </SUBROUTINE> NAME="fv_restart_end"
1438 
1439  subroutine d2c_setup(u, v, &
1440  ua, va, &
1441  uc, vc, dord4, &
1442  isd,ied,jsd,jed, is,ie,js,je, npx,npy, &
1443  grid_type, nested, &
1444  se_corner, sw_corner, ne_corner, nw_corner, &
1445  rsin_u,rsin_v,cosa_s,rsin2 )
1447  logical, intent(in):: dord4
1448  real, intent(in) :: u(isd:ied,jsd:jed+1)
1449  real, intent(in) :: v(isd:ied+1,jsd:jed)
1450  real, intent(out), dimension(isd:ied ,jsd:jed ):: ua
1451  real, intent(out), dimension(isd:ied ,jsd:jed ):: va
1452  real, intent(out), dimension(isd:ied+1,jsd:jed ):: uc
1453  real, intent(out), dimension(isd:ied ,jsd:jed+1):: vc
1454  integer, intent(in) :: isd,ied,jsd,jed, is,ie,js,je, npx,npy,grid_type
1455  logical, intent(in) :: nested, se_corner, sw_corner, ne_corner, nw_corner
1456  real, intent(in) :: rsin_u(isd:ied+1,jsd:jed)
1457  real, intent(in) :: rsin_v(isd:ied,jsd:jed+1)
1458  real, intent(in) :: cosa_s(isd:ied,jsd:jed)
1459  real, intent(in) :: rsin2(isd:ied,jsd:jed)
1460 
1461 ! Local
1462  real, dimension(isd:ied,jsd:jed):: utmp, vtmp
1463  real, parameter:: t11=27./28., t12=-13./28., t13=3./7., t14=6./7., t15=3./28.
1464  real, parameter:: a1 = 0.5625
1465  real, parameter:: a2 = -0.0625
1466  real, parameter:: c1 = -2./14.
1467  real, parameter:: c2 = 11./14.
1468  real, parameter:: c3 = 5./14.
1469  integer npt, i, j, ifirst, ilast, id
1470 
1471  if ( dord4) then
1472  id = 1
1473  else
1474  id = 0
1475  endif
1476 
1477 
1478  if (grid_type < 3 .and. .not. nested) then
1479  npt = 4
1480  else
1481  npt = -2
1482  endif
1483 
1484  if ( nested) then
1485 
1486  do j=jsd+1,jed-1
1487  do i=isd,ied
1488  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1489  enddo
1490  enddo
1491  do i=isd,ied
1492  j = jsd
1493  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1494  j = jed
1495  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1496  end do
1497 
1498  do j=jsd,jed
1499  do i=isd+1,ied-1
1500  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1501  enddo
1502  i = isd
1503  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1504  i = ied
1505  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1506  enddo
1507 
1508  do j=jsd,jed
1509  do i=isd,ied
1510  ua(i,j) = (utmp(i,j)-vtmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1511  va(i,j) = (vtmp(i,j)-utmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1512  enddo
1513  enddo
1514 
1515  else
1516 
1517  !----------
1518  ! Interior:
1519  !----------
1520  utmp = 0.
1521  vtmp = 0.
1522 
1523 
1524  do j=max(npt,js-1),min(npy-npt,je+1)
1525  do i=max(npt,isd),min(npx-npt,ied)
1526  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1527  enddo
1528  enddo
1529  do j=max(npt,jsd),min(npy-npt,jed)
1530  do i=max(npt,is-1),min(npx-npt,ie+1)
1531  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1532  enddo
1533  enddo
1534 
1535  !----------
1536  ! edges:
1537  !----------
1538  if (grid_type < 3) then
1539 
1540  if ( js==1 .or. jsd<npt) then
1541  do j=jsd,npt-1
1542  do i=isd,ied
1543  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1544  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1545  enddo
1546  enddo
1547  endif
1548 
1549  if ( (je+1)==npy .or. jed>=(npy-npt)) then
1550  do j=npy-npt+1,jed
1551  do i=isd,ied
1552  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1553  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1554  enddo
1555  enddo
1556  endif
1557 
1558  if ( is==1 .or. isd<npt ) then
1559  do j=max(npt,jsd),min(npy-npt,jed)
1560  do i=isd,npt-1
1561  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1562  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1563  enddo
1564  enddo
1565  endif
1566 
1567  if ( (ie+1)==npx .or. ied>=(npx-npt)) then
1568  do j=max(npt,jsd),min(npy-npt,jed)
1569  do i=npx-npt+1,ied
1570  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1571  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1572  enddo
1573  enddo
1574  endif
1575 
1576  endif
1577  do j=js-1-id,je+1+id
1578  do i=is-1-id,ie+1+id
1579  ua(i,j) = (utmp(i,j)-vtmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1580  va(i,j) = (vtmp(i,j)-utmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1581  enddo
1582  enddo
1583 
1584  end if
1585 
1586 ! A -> C
1587 !--------------
1588 ! Fix the edges
1589 !--------------
1590 ! Xdir:
1591  if( sw_corner ) then
1592  do i=-2,0
1593  utmp(i,0) = -vtmp(0,1-i)
1594  enddo
1595  endif
1596  if( se_corner ) then
1597  do i=0,2
1598  utmp(npx+i,0) = vtmp(npx,i+1)
1599  enddo
1600  endif
1601  if( ne_corner ) then
1602  do i=0,2
1603  utmp(npx+i,npy) = -vtmp(npx,je-i)
1604  enddo
1605  endif
1606  if( nw_corner ) then
1607  do i=-2,0
1608  utmp(i,npy) = vtmp(0,je+i)
1609  enddo
1610  endif
1611 
1612  if (grid_type < 3 .and. .not. nested) then
1613  ifirst = max(3, is-1)
1614  ilast = min(npx-2,ie+2)
1615  else
1616  ifirst = is-1
1617  ilast = ie+2
1618  endif
1619 !---------------------------------------------
1620 ! 4th order interpolation for interior points:
1621 !---------------------------------------------
1622  do j=js-1,je+1
1623  do i=ifirst,ilast
1624  uc(i,j) = a1*(utmp(i-1,j)+utmp(i,j))+a2*(utmp(i-2,j)+utmp(i+1,j))
1625  enddo
1626  enddo
1627 
1628  if (grid_type < 3) then
1629 ! Xdir:
1630  if( is==1 .and. .not. nested ) then
1631  do j=js-1,je+1
1632  uc(0,j) = c1*utmp(-2,j) + c2*utmp(-1,j) + c3*utmp(0,j)
1633  uc(1,j) = ( t14*(utmp( 0,j)+utmp(1,j)) &
1634  + t12*(utmp(-1,j)+utmp(2,j)) &
1635  + t15*(utmp(-2,j)+utmp(3,j)) )*rsin_u(1,j)
1636  uc(2,j) = c1*utmp(3,j) + c2*utmp(2,j) + c3*utmp(1,j)
1637  enddo
1638  endif
1639 
1640  if( (ie+1)==npx .and. .not. nested ) then
1641  do j=js-1,je+1
1642  uc(npx-1,j) = c1*utmp(npx-3,j)+c2*utmp(npx-2,j)+c3*utmp(npx-1,j)
1643  uc(npx,j) = (t14*(utmp(npx-1,j)+utmp(npx,j))+ &
1644  t12*(utmp(npx-2,j)+utmp(npx+1,j)) &
1645  + t15*(utmp(npx-3,j)+utmp(npx+2,j)))*rsin_u(npx,j)
1646  uc(npx+1,j) = c3*utmp(npx,j)+c2*utmp(npx+1,j)+c1*utmp(npx+2,j)
1647  enddo
1648  endif
1649 
1650  endif
1651 
1652 !------
1653 ! Ydir:
1654 !------
1655  if( sw_corner ) then
1656  do j=-2,0
1657  vtmp(0,j) = -utmp(1-j,0)
1658  enddo
1659  endif
1660  if( nw_corner ) then
1661  do j=0,2
1662  vtmp(0,npy+j) = utmp(j+1,npy)
1663  enddo
1664  endif
1665  if( se_corner ) then
1666  do j=-2,0
1667  vtmp(npx,j) = utmp(ie+j,0)
1668  enddo
1669  endif
1670  if( ne_corner ) then
1671  do j=0,2
1672  vtmp(npx,npy+j) = -utmp(ie-j,npy)
1673  enddo
1674  endif
1675 
1676  if (grid_type < 3) then
1677 
1678  do j=js-1,je+2
1679  if ( j==1 .and. .not. nested) then
1680  do i=is-1,ie+1
1681  vc(i,1) = (t14*(vtmp(i, 0)+vtmp(i,1)) &
1682  + t12*(vtmp(i,-1)+vtmp(i,2)) &
1683  + t15*(vtmp(i,-2)+vtmp(i,3)))*rsin_v(i,1)
1684  enddo
1685  elseif ( (j==0 .or. j==(npy-1)) .and. .not. nested) then
1686  do i=is-1,ie+1
1687  vc(i,j) = c1*vtmp(i,j-2) + c2*vtmp(i,j-1) + c3*vtmp(i,j)
1688  enddo
1689  elseif ( (j==2 .or. j==(npy+1)) .and. .not. nested) then
1690  do i=is-1,ie+1
1691  vc(i,j) = c1*vtmp(i,j+1) + c2*vtmp(i,j) + c3*vtmp(i,j-1)
1692  enddo
1693  elseif ( j==npy .and. .not. nested) then
1694  do i=is-1,ie+1
1695  vc(i,npy) = (t14*(vtmp(i,npy-1)+vtmp(i,npy)) &
1696  + t12*(vtmp(i,npy-2)+vtmp(i,npy+1)) &
1697  + t15*(vtmp(i,npy-3)+vtmp(i,npy+2)))*rsin_v(i,npy)
1698  enddo
1699  else
1700 ! 4th order interpolation for interior points:
1701  do i=is-1,ie+1
1702  vc(i,j) = a2*(vtmp(i,j-2)+vtmp(i,j+1))+a1*(vtmp(i,j-1)+vtmp(i,j))
1703  enddo
1704  endif
1705  enddo
1706  else
1707 ! 4th order interpolation:
1708  do j=js-1,je+2
1709  do i=is-1,ie+1
1710  vc(i,j) = a2*(vtmp(i,j-2)+vtmp(i,j+1))+a1*(vtmp(i,j-1)+vtmp(i,j))
1711  enddo
1712  enddo
1713  endif
1714 
1715  end subroutine d2c_setup
1716 
1717  subroutine d2a_setup(u, v, ua, va, dord4, &
1718  isd,ied,jsd,jed, is,ie,js,je, npx,npy, &
1719  grid_type, nested, &
1720  cosa_s,rsin2 )
1722  logical, intent(in):: dord4
1723  real, intent(in) :: u(isd:ied,jsd:jed+1)
1724  real, intent(in) :: v(isd:ied+1,jsd:jed)
1725  real, intent(out), dimension(isd:ied ,jsd:jed ):: ua
1726  real, intent(out), dimension(isd:ied ,jsd:jed ):: va
1727  integer, intent(in) :: isd,ied,jsd,jed, is,ie,js,je, npx,npy,grid_type
1728  real, intent(in) :: cosa_s(isd:ied,jsd:jed)
1729  real, intent(in) :: rsin2(isd:ied,jsd:jed)
1730  logical, intent(in) :: nested
1731 
1732 ! Local
1733  real, dimension(isd:ied,jsd:jed):: utmp, vtmp
1734  real, parameter:: t11=27./28., t12=-13./28., t13=3./7., t14=6./7., t15=3./28.
1735  real, parameter:: a1 = 0.5625
1736  real, parameter:: a2 = -0.0625
1737  real, parameter:: c1 = -2./14.
1738  real, parameter:: c2 = 11./14.
1739  real, parameter:: c3 = 5./14.
1740  integer npt, i, j, ifirst, ilast, id
1741 
1742  if ( dord4) then
1743  id = 1
1744  else
1745  id = 0
1746  endif
1747 
1748 
1749  if (grid_type < 3 .and. .not. nested) then
1750  npt = 4
1751  else
1752  npt = -2
1753  endif
1754 
1755  if ( nested) then
1756 
1757  do j=jsd+1,jed-1
1758  do i=isd,ied
1759  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1760  enddo
1761  enddo
1762  do i=isd,ied
1763  j = jsd
1764  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1765  j = jed
1766  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1767  end do
1768 
1769  do j=jsd,jed
1770  do i=isd+1,ied-1
1771  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1772  enddo
1773  i = isd
1774  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1775  i = ied
1776  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1777  enddo
1778 
1779  else
1780 
1781  !----------
1782  ! Interior:
1783  !----------
1784 
1785  do j=max(npt,js-1),min(npy-npt,je+1)
1786  do i=max(npt,isd),min(npx-npt,ied)
1787  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1788  enddo
1789  enddo
1790  do j=max(npt,jsd),min(npy-npt,jed)
1791  do i=max(npt,is-1),min(npx-npt,ie+1)
1792  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1793  enddo
1794  enddo
1795 
1796  !----------
1797  ! edges:
1798  !----------
1799  if (grid_type < 3) then
1800 
1801  if ( js==1 .or. jsd<npt) then
1802  do j=jsd,npt-1
1803  do i=isd,ied
1804  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1805  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1806  enddo
1807  enddo
1808  endif
1809 
1810  if ( (je+1)==npy .or. jed>=(npy-npt)) then
1811  do j=npy-npt+1,jed
1812  do i=isd,ied
1813  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1814  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1815  enddo
1816  enddo
1817  endif
1818 
1819  if ( is==1 .or. isd<npt ) then
1820  do j=max(npt,jsd),min(npy-npt,jed)
1821  do i=isd,npt-1
1822  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1823  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1824  enddo
1825  enddo
1826  endif
1827 
1828  if ( (ie+1)==npx .or. ied>=(npx-npt)) then
1829  do j=max(npt,jsd),min(npy-npt,jed)
1830  do i=npx-npt+1,ied
1831  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1832  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1833  enddo
1834  enddo
1835  endif
1836 
1837  endif
1838 
1839  end if
1840 
1841 
1842 
1843  do j=js-1-id,je+1+id
1844  do i=is-1-id,ie+1+id
1845  ua(i,j) = (utmp(i,j)-vtmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1846  va(i,j) = (vtmp(i,j)-utmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1847  enddo
1848  enddo
1849 
1850 end subroutine d2a_setup
1851 
1852 subroutine pmaxmn_g(qname, q, is, ie, js, je, km, fac, area, domain)
1853  character(len=*), intent(in):: qname
1854  integer, intent(in):: is, ie, js, je
1855  integer, intent(in):: km
1856  real, intent(in):: q(is-3:ie+3, js-3:je+3, km)
1857  real, intent(in):: fac
1858  real(kind=R_GRID), intent(IN):: area(is-3:ie+3, js-3:je+3)
1859  type(domain2d), intent(INOUT) :: domain
1860 !
1861  real qmin, qmax, gmean
1862  integer i,j,k
1863 
1864  qmin = q(is,js,1)
1865  qmax = qmin
1866 
1867  do k=1,km
1868  do j=js,je
1869  do i=is,ie
1870  if( q(i,j,k) < qmin ) then
1871  qmin = q(i,j,k)
1872  elseif( q(i,j,k) > qmax ) then
1873  qmax = q(i,j,k)
1874  endif
1875  enddo
1876  enddo
1877  enddo
1878 
1879  call mp_reduce_min(qmin)
1880  call mp_reduce_max(qmax)
1881 
1882  gmean = g_sum(domain, q(is:ie,js:je,km), is, ie, js, je, 3, area, 1, .true.)
1883  if(is_master()) write(6,*) qname, qmax*fac, qmin*fac, gmean*fac
1884 
1885 end subroutine pmaxmn_g
1886 end module fv_restart_nlm_mod
subroutine fill_nested_grid_data(Atm, proc_in)
Definition: fms.F90:20
subroutine, public fv_restart_end(Atm, grids_on_this_pe)
real, parameter, public radius
Radius of the Earth [m].
Definition: constants.F90:72
subroutine, public del4_cubed_sphere(npx, npy, q, area, dx, dy, dxc, dyc, sin_sg, nmax, zero_ocean, oro, nested, domain, bd)
subroutine, public fv_write_restart(Atm, grids_on_this_pe, timestamp)
integer, parameter, public model_atmos
subroutine, public p_var(km, ifirst, ilast, jfirst, jlast, ptop, ptop_min, delp, delz, pt, ps, pe, peln, pk, pkz, cappa, q, ng, nq, area, dry_mass, adjust_dry_mass, mountain, moist_phys, hydrostatic, nwat, domain, make_nh)
subroutine, public fv_io_register_restart(fv_domain, Atm)
Definition: fv_io_nlm.F90:435
real, parameter, public omega
Rotation rate of the Earth [1/s].
Definition: constants.F90:75
subroutine, public d2c_setup(u, v, ua, va, uc, vc, dord4, isd, ied, jsd, jed, is, ie, js, je, npx, npy, grid_type, nested, se_corner, sw_corner, ne_corner, nw_corner, rsin_u, rsin_v, cosa_s, rsin2)
real, parameter, public ptop_min
subroutine, public init_double_periodic(u, v, w, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, flagstruct, npx, npy, npz, ng, ncnst, nwat, ndims, nregions, dry_mass, mountain, moist_phys, hydrostatic, hybrid_z, delz, ze0, ks, ptop, domain_in, tile_in, bd)
real(kind=8), parameter, public pi_8
Ratio of circle circumference to diameter [N/A].
Definition: constants.F90:73
subroutine, public fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, grids_on_this_pe)
subroutine, public fv_io_register_restart_bcs_nh(Atm)
Definition: fv_io_nlm.F90:956
subroutine, public fv_io_read_restart(fv_domain, Atm)
Definition: fv_io_nlm.F90:110
subroutine, public init_case(u, v, w, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, flagstruct, npx, npy, npz, ng, ncnst, nwat, ndims, nregions, dry_mass, mountain, moist_phys, hydrostatic, hybrid_z, delz, ze0, adiabatic, ks, npx_global, ptop, domain_in, tile_in, bd)
subroutine, public fv_io_register_restart_bcs(Atm)
Definition: fv_io_nlm.F90:891
real, parameter, public rdgas
Gas constant for dry air [J/kg/deg].
Definition: constants.F90:77
subroutine, public init_latlon(u, v, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, npx, npy, npz, ng, ncnst, ndims, nregions, dry_mass, mountain, moist_phys, hybrid_z, delz, ze0, domain_in, tile_in)
real, dimension(:,:), allocatable, public oro_g
subroutine, public cubed_to_latlon(u, v, ua, va, gridstruct, npx, npy, km, mode, grid_type, domain, nested, c2l_ord, bd)
Definition: mpp.F90:39
subroutine, public get_external_ic(Atm, fv_domain, use_geos_latlon_restart, use_geos_cubed_restart, ntracers)
subroutine fill_nested_grid_topo(Atm, proc_in)
subroutine, public fv_io_init()
Definition: fv_io_nlm.F90:84
subroutine, public get_cubed_sphere_terrain(Atm, fv_domain)
real, parameter, public rvgas
Gas constant for water vapor [J/kg/deg].
Definition: constants.F90:78
subroutine timing_on(blk_name)
real, parameter, public cp_air
Specific heat capacity of dry air at constant pressure [J/kg/deg].
Definition: constants.F90:83
subroutine, public fv_io_read_bcs(Atm)
Definition: fv_io_nlm.F90:990
subroutine, public fv_io_register_nudge_restart(Atm)
Definition: fv_io_nlm.F90:412
integer, parameter, public r_grid
real function, public great_circle_dist(q1, q2, radius)
subroutine fill_nested_grid_data_end(Atm, proc_in)
subroutine, public fv_io_write_bcs(Atm, timestamp)
Definition: fv_io_nlm.F90:979
subroutine, public compute_dz_var(km, ztop, dz)
subroutine, public make_eta_level(km, pe, area, kks, ak, bk, ptop, domain, bd)
subroutine fill_nested_grid_topo_halo(Atm, proc_in)
real, parameter, public grav
Acceleration due to gravity [m/s^2].
Definition: constants.F90:76
real function, public g_sum(domain, p, ifirst, ilast, jfirst, jlast, ngc, area, mode, reproduce)
************************************************************************GNU Lesser General Public License **This file is part of the GFDL Flexible Modeling System(FMS). ! *! *FMS is free software without even the implied warranty of MERCHANTABILITY or *FITNESS FOR A PARTICULAR PURPOSE See the GNU General Public License *for more details **You should have received a copy of the GNU Lesser General Public *License along with FMS If see< http:! ***********************************************************************subroutine READ_RECORD_CORE_(unit, field, nwords, data, start, axsiz) integer, intent(in) ::unit type(fieldtype), intent(in) ::field integer, intent(in) ::nwords MPP_TYPE_, intent(inout) ::data(nwords) integer, intent(in) ::start(:), axsiz(:) integer(SHORT_KIND) ::i2vals(nwords)!rab used in conjunction with transfer intrinsic to determine size of a variable integer(KIND=1) ::one_byte(8) integer ::word_sz!#ifdef __sgi integer(INT_KIND) ::ivals(nwords) real(FLOAT_KIND) ::rvals(nwords)!#else! integer ::ivals(nwords)! real ::rvals(nwords)!#endif real(DOUBLE_KIND) ::r8vals(nwords) pointer(ptr1, i2vals) pointer(ptr2, ivals) pointer(ptr3, rvals) pointer(ptr4, r8vals) if(mpp_io_stack_size< nwords) call mpp_io_set_stack_size(nwords) call mpp_error(FATAL, 'MPP_READ currently requires use_netCDF option') end subroutine READ_RECORD_CORE_ subroutine READ_RECORD_(unit, field, nwords, data, time_level, domain, position, tile_count, start_in, axsiz_in)!routine that is finally called by all mpp_read routines to perform the read!a non-netCDF record contains:! field ID! a set of 4 coordinates(is:ie, js:je) giving the data subdomain! a timelevel and a timestamp(=NULLTIME if field is static)! 3D real data(stored as 1D)!if you are using direct access I/O, the RECL argument to OPEN must be large enough for the above!in a global direct access file, record position on PE is given by %record.!Treatment of timestamp:! We assume that static fields have been passed without a timestamp.! Here that is converted into a timestamp of NULLTIME.! For non-netCDF fields, field is treated no differently, but is written! with a timestamp of NULLTIME. There is no check in the code to prevent! the user from repeatedly writing a static field. integer, intent(in) ::unit, nwords type(fieldtype), intent(in) ::field MPP_TYPE_, intent(inout) ::data(nwords) integer, intent(in), optional ::time_level type(domain2D), intent(in), optional ::domain integer, intent(in), optional ::position, tile_count integer, intent(in), optional ::start_in(:), axsiz_in(:) integer, dimension(size(field%axes(:))) ::start, axsiz integer ::tlevel !, subdomain(4) integer ::i, error, is, ie, js, je, isg, ieg, jsg, jeg type(domain2d), pointer ::io_domain=> tlevel if(PRESENT(start_in) .AND. PRESENT(axsiz_in)) then if(size(start(! the data domain and compute domain must refer to the subdomain being passed ! In this ! since that attempts to gather all data on PE size(field%axes(:)) axsiz(i)
#define max(a, b)
Definition: mosaic_util.h:33
subroutine, public get_tracer_names(model, n, name, longname, units, err_msg)
subroutine, public fv_restart_init()
subroutine, public fv_io_write_restart(Atm, grids_on_this_pe, timestamp)
Definition: fv_io_nlm.F90:561
subroutine pmaxmn_g(qname, q, is, ie, js, je, km, fac, area, domain)
subroutine, public set_hybrid_z(is, ie, js, je, ng, km, ztop, dz, rgrav, hs, ze, dz3)
real, dimension(:,:), allocatable, public sgh_g
subroutine, public compute_dz_l32(km, ztop, dz)
subroutine, public del2_cubed_sphere(npx, npy, q, area, dx, dy, dxc, dyc, sin_sg, nmax, cd, zero_ocean, oro, nested, domain, bd)
subroutine, public prt_maxmin(qname, q, is, ie, js, je, n_g, km, fac)
integer, public test_case
subroutine, public d2a_setup(u, v, ua, va, dord4, isd, ied, jsd, jed, is, ie, js, je, npx, npy, grid_type, nested, cosa_s, rsin2)
real(kind=r_grid), parameter cnst_0p20
#define min(a, b)
Definition: mosaic_util.h:32
real, parameter, public kappa
RDGAS / CP_AIR [dimensionless].
Definition: constants.F90:82
subroutine, public remap_restart(fv_domain, Atm)
Definition: fv_io_nlm.F90:236
Derived type containing the data.
subroutine, public setup_nested_boundary_halo(Atm, proc_in)
real(fp), parameter, public pi
subroutine timing_off(blk_name)