FV3 Bundle
drifters_comm.F90
Go to the documentation of this file.
1 !***********************************************************************
2 !* GNU Lesser General Public License
3 !*
4 !* This file is part of the GFDL Flexible Modeling System (FMS).
5 !*
6 !* FMS is free software: you can redistribute it and/or modify it under
7 !* the terms of the GNU Lesser General Public License as published by
8 !* the Free Software Foundation, either version 3 of the License, or (at
9 !* your option) any later version.
10 !*
11 !* FMS is distributed in the hope that it will be useful, but WITHOUT
12 !* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 !* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 !* for more details.
15 !*
16 !* You should have received a copy of the GNU Lesser General Public
17 !* License along with FMS. If not, see <http://www.gnu.org/licenses/>.
18 !***********************************************************************
19 #include "fms_switches.h"
20 
22 #include <fms_platform.h>
23 
24 #ifdef _SERIAL
25 
26 #define _TYPE_DOMAIN2D integer
27 #define _NULL_PE 0
28 
29 #else
30 
31  use mpp_mod, only : null_pe, fatal, note, mpp_error, mpp_pe, mpp_npes
32  use mpp_mod, only : mpp_root_pe
33  use mpp_mod, only : mpp_send, mpp_recv, mpp_sync_self
34  use mpp_mod, only : comm_tag_1, comm_tag_2, comm_tag_3, comm_tag_4
35  use mpp_domains_mod, only : domain2d
38  use mpp_domains_mod, only : north, south, east, west, cyclic_global_domain
39  use mpp_domains_mod, only : north_east, south_east, south_west, north_west
40 
41 #define _TYPE_DOMAIN2D type(domain2d)
42 #define _NULL_PE NULL_PE
43 
44 #endif
45 
47 
48  implicit none
49  private
50 
53 
55  ! compute domain
56  real :: xcmin, xcmax
57  real :: ycmin, ycmax
58  ! data domain
59  real :: xdmin, xdmax
60  real :: ydmin, ydmax
61  ! global valid min/max
62  real :: xgmin, xgmax
63  real :: ygmin, ygmax
64  ! x/y period (can be be nearly infinite)
65  logical :: xperiodic, yperiodic
66  ! neighbor domains
67  integer :: pe_n, pe_s, pe_e, pe_w, pe_ne, pe_se, pe_sw, pe_nw
68  ! starting/ending pe, set this to a value /= 0 if running concurrently
69  integer :: pe_beg, pe_end
70  end type drifters_comm_type
71 
72 contains
73 
74 !===============================================================================
75  subroutine drifters_comm_new(self)
76  type(drifters_comm_type) :: self
77 
78  self%xcmin = -huge(1.); self%xcmax = +huge(1.)
79  self%ycmin = -huge(1.); self%ycmax = +huge(1.)
80 
81  self%xdmin = -huge(1.); self%xdmax = +huge(1.)
82  self%ydmin = -huge(1.); self%ydmax = +huge(1.)
83 
84  self%xgmin = -huge(1.); self%xgmax = +huge(1.)
85  self%ygmin = -huge(1.); self%ygmax = +huge(1.)
86 
87  self%xperiodic = .false.; self%yperiodic = .false.
88 
89  self%pe_N = _null_pe
90  self%pe_S = _null_pe
91  self%pe_E = _null_pe
92  self%pe_W = _null_pe
93  self%pe_NE = _null_pe
94  self%pe_SE = _null_pe
95  self%pe_SW = _null_pe
96  self%pe_NW = _null_pe
97 
98  self%pe_beg = 0
99  self%pe_end = -1
100 
101 
102  end subroutine drifters_comm_new
103 
104 !===============================================================================
105  subroutine drifters_comm_del(self)
106  type(drifters_comm_type) :: self
107 
108  ! nothing to deallocate
109  call drifters_comm_new(self)
110 
111  end subroutine drifters_comm_del
112 
113 !===============================================================================
114  subroutine drifters_comm_set_data_bounds(self, xmin, ymin, xmax, ymax)
115  ! Set data domain bounds.
116  type(drifters_comm_type) :: self
117  real, intent(in) :: xmin, ymin, xmax, ymax
118 
119  self%xdmin = max(xmin, self%xdmin)
120  self%xdmax = min(xmax, self%xdmax)
121  self%ydmin = max(ymin, self%ydmin)
122  self%ydmax = min(ymax, self%ydmax)
123 
124  end subroutine drifters_comm_set_data_bounds
125 
126 !===============================================================================
127  subroutine drifters_comm_set_comp_bounds(self, xmin, ymin, xmax, ymax)
128  ! Set compute domain bounds.
129  type(drifters_comm_type) :: self
130  real, intent(in) :: xmin, ymin, xmax, ymax
131 
132  self%xcmin = max(xmin, self%xcmin)
133  self%xcmax = min(xmax, self%xcmax)
134  self%ycmin = max(ymin, self%ycmin)
135  self%ycmax = min(ymax, self%ycmax)
136 
137  end subroutine drifters_comm_set_comp_bounds
138 
139 !===============================================================================
140  subroutine drifters_comm_set_pe_neighbors(self, domain)
141  ! Set neighboring pe numbers.
142  type(drifters_comm_type) :: self
143  _type_domain2d, intent(inout) :: domain
144 
145 #ifndef _SERIAL
146 ! parallel code
147 
148  integer :: pe_n, pe_s, pe_e, pe_w, pe_ne, pe_se, pe_sw, pe_nw
149 
150  call mpp_get_neighbor_pe(domain, north , pe_n )
151  call mpp_get_neighbor_pe(domain, north_east, pe_ne)
152  call mpp_get_neighbor_pe(domain, east , pe_e )
153  call mpp_get_neighbor_pe(domain, south_east, pe_se)
154  call mpp_get_neighbor_pe(domain, south , pe_s )
155  call mpp_get_neighbor_pe(domain, south_west, pe_sw)
156  call mpp_get_neighbor_pe(domain, west , pe_w )
157  call mpp_get_neighbor_pe(domain, north_west, pe_nw)
158 
159  if(pe_n /= self%pe_N .and. self%pe_N == _null_pe) then
160  self%pe_N = pe_n
161  else if(pe_n /= self%pe_N ) then
162  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: NORTH PE changed!.')
163  endif
164  if(pe_ne /= self%pe_NE .and. self%pe_NE == _null_pe) then
165  self%pe_NE = pe_ne
166  else if(pe_ne /= self%pe_NE) then
167  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: NORTH-EAST PE changed!.')
168  endif
169  if(pe_e /= self%pe_E .and. self%pe_E == _null_pe) then
170  self%pe_E = pe_e
171  else if(pe_e /= self%pe_E ) then
172  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: EAST PE changed!.')
173  endif
174  if(pe_se /= self%pe_SE .and. self%pe_SE == _null_pe) then
175  self%pe_SE = pe_se
176  else if(pe_se /= self%pe_SE) then
177  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: SOUTH-EAST PE changed!.')
178  endif
179  if(pe_s /= self%pe_S .and. self%pe_S == _null_pe) then
180  self%pe_S = pe_s
181  else if(pe_s /= self%pe_S ) then
182  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: SOUTH PE changed!.')
183  endif
184  if(pe_sw /= self%pe_SW .and. self%pe_SW == _null_pe) then
185  self%pe_SW = pe_sw
186  else if(pe_sw /= self%pe_SW) then
187  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: SOUTH-WEST PE changed!.')
188  endif
189  if(pe_w /= self%pe_W .and. self%pe_W == _null_pe) then
190  self%pe_W = pe_w
191  else if(pe_w /= self%pe_W ) then
192  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: WEST PE changed!.')
193  endif
194  if(pe_nw /= self%pe_NW .and. self%pe_NW == _null_pe) then
195  self%pe_NW = pe_nw
196  else if(pe_nw /= self%pe_NW) then
197  call mpp_error( fatal, 'drifters_comm_set_pe_neighbors: NORTH-WEST PE changed!.')
198  endif
199 
200 #endif
201 ! end of parallel code
202 
203  end subroutine drifters_comm_set_pe_neighbors
204 
205 !===============================================================================
206  subroutine drifters_comm_set_domain(self, domain, x, y, backoff_x, backoff_y)
207  ! Set boundaries of domain and compute neighbors. This method can be called
208  ! multiple times; the data domain will just be the intersection (overlap) of
209  ! all domains (e.g domain_u, domain_v, etc).
210  type(drifters_comm_type) :: self
211  _type_domain2d, intent(inout) :: domain
212  real, intent(in) :: x(:), y(:) ! global axes
213  integer, intent(in) :: backoff_x, backoff_y ! >=0, data domain is reduced by "backoff_x,y" indices in x, resp. y
214 
215  ! compute/data domain start/end indices
216  integer isc, iec, jsc, jec
217  integer isd, ied, jsd, jed
218  integer nx, ny, hx, hy, bckf_x, bckf_y, halox, haloy
219  real dx, dy, xdmin, xdmax, ydmin, ydmax
220 
221 #ifdef _SERIAL
222  integer :: ibnds(1)
223 
224  ibnds = lbound(x); isc = ibnds(1)
225  ibnds = ubound(x); iec = ibnds(1) - 1
226  ibnds = lbound(y); jsc = ibnds(1)
227  ibnds = ubound(y); jec = ibnds(1) - 1
228 #else
229  call mpp_get_compute_domain( domain, isc, iec, jsc, jec )
230 #endif
231 
232  self%xcmin = max(x(isc), self%xcmin)
233  self%xcmax = min(x(iec), self%xcmax)
234  self%ycmin = max(y(jsc), self%ycmin)
235  self%ycmax = min(y(jec), self%ycmax)
236 
237  nx = iec - isc + 1
238  ny = jec - jsc + 1
239 
240 #ifdef _SERIAL
241  isd = 1; ied = size(x); jsd = 1; jed = size(y)
242 #else
243  call mpp_get_data_domain ( domain, isd, ied, jsd, jed )
244 #endif
245 
246  hx = max(ied-iec, isc-isd)
247  hy = max(jed-jec, jsc-jsd)
248  bckf_x = min(backoff_x, hx)
249  bckf_y = min(backoff_y, hy)
250 
251  halox = max(0, hx - bckf_x)
252  haloy = max(0, hy - bckf_y)
253 
254  if(isd < 1) then
255  dx = x(2) - x(1)
256  xdmin = self%xcmin - dx*halox
257  else
258  xdmin = x(isd+bckf_x)
259  endif
260 
261  if(ied > nx) then
262  dx = x(nx) - x(nx-1)
263  xdmax = self%xcmax + dx*halox
264  else
265  xdmax = x(ied-bckf_x)
266  endif
267 
268  if(jsd < 1) then
269  dy = y(2) - y(1)
270  ydmin = self%ycmin - dy*haloy
271  else
272  ydmin = y(jsd+bckf_y)
273  endif
274 
275  if(jed > ny) then
276  dy = y(ny) - y(ny-1)
277  ydmax = self%ycmax + dy*haloy
278  else
279  ydmax = y(jed-bckf_y)
280  endif
281 
282  self%xdmin = max(xdmin, self%xdmin)
283  self%ydmin = max(ydmin, self%ydmin)
284  self%xdmax = min(xdmax, self%xdmax)
285  self%ydmax = min(ydmax, self%ydmax)
286 
287  call drifters_comm_set_pe_neighbors(self, domain)
288 
289  end subroutine drifters_comm_set_domain
290 
291 !===============================================================================
292  subroutine drifters_comm_update(self, drfts, new_positions, &
293  & comm, remove, max_add_remove)
295  type(drifters_comm_type) :: self
296  type(drifters_core_type) :: drfts
297  real, intent(inout) :: new_positions(:,:)
298  integer, intent(in), optional :: comm ! MPI communicator
299  logical, intent(in), optional :: remove(:) ! Set to True for particles that should be removed
300  integer, intent(in), optional :: max_add_remove ! max no of particles to add/remove
301 
302 #ifdef _SERIAL
303 ! serial code
304 
305  drfts%positions(:, 1:drfts%np) = new_positions(:, 1:drfts%np)
306  return
307 
308 #else
309 ! parallel code
310 
311  include 'mpif.h'
312 
313 
314  integer nd, np, nar_est, ip, neigh_pe, irem, pe, npes, ntuples
315  integer ntuples_tot, ndata, mycomm
316 #ifdef _USE_MPI
317  integer ier
318 #endif
319  integer, allocatable :: iadd(:)
320  integer, allocatable :: table_recv(:), table_send(:)
321  real , allocatable :: data_recv(:,:), data_send(:,:)
322  integer, allocatable :: indices_to_remove(:)
323  integer, allocatable :: ids_to_add(:)
324  real , allocatable :: positions_to_add(:,:)
325  real :: x, y, xold, yold
326  character(len=128) :: ermsg, notemsg
327  logical :: is_present
328  integer :: id, j, k, m, n, el
329  logical :: crossed_w, crossed_e, crossed_s, crossed_n
330  logical :: was_in_compute_domain, left_domain
331 
332  mycomm = mpi_comm_world
333  if( present(comm) ) mycomm = comm
334 
335  nd = drfts%nd
336  np = size(new_positions,2)
337  if(np > 0 .and. nd < 2) call mpp_error( fatal, &
338  & 'drifters_comm_update: number of dimensions must be 2 or higher.' )
339 
340  nar_est = 100
341  if(present(max_add_remove)) nar_est = max(1, max_add_remove)
342 
343  pe = mpp_pe()
344  npes = mpp_npes()
345 
346  ! assume pe list is contiguous, self%pe_beg...self%pe_end
347  allocate(iadd(self%pe_beg:self%pe_end))
348  allocate(table_recv(self%pe_beg:self%pe_end))
349  allocate(table_send(self%pe_beg:self%pe_end))
350  allocate(data_recv(nar_est*(1+nd), self%pe_beg:self%pe_end))
351  allocate(data_send(nar_est*(1+nd), self%pe_beg:self%pe_end))
352  allocate(indices_to_remove(nar_est))
353 
354  table_send = 0
355  table_recv = 0
356  data_send = 0
357  data_recv = 0
358 
359  iadd = 0
360  irem = 0
361  do ip = 1, np
362  x = new_positions(1, ip)
363  y = new_positions(2, ip)
364  xold = drfts%positions(1, ip)
365  yold = drfts%positions(2, ip)
366 
367  if( xold<self%xcmin .or. xold>self%xcmax .or. &
368  & yold<self%ycmin .or. yold>self%ycmax ) then
369  was_in_compute_domain = .false.
370  else
371  was_in_compute_domain = .true.
372  endif
373 
374  ! check if drifters crossed compute domain boundary
375 
376  crossed_w = .false.
377  crossed_e = .false.
378  crossed_s = .false.
379  crossed_n = .false.
380  if( was_in_compute_domain .and. &
381  & (x<self%xcmin) .and. (xold>self%xcmin) ) crossed_w = .true.
382  if( was_in_compute_domain .and. &
383  & (x>self%xcmax) .and. (xold<self%xcmax) ) crossed_e = .true.
384  if( was_in_compute_domain .and. &
385  & (y<self%ycmin) .and. (yold>self%ycmin) ) crossed_s = .true.
386  if( was_in_compute_domain .and. &
387  & (y>self%ycmax) .and. (yold<self%ycmax) ) crossed_n = .true.
388 
389  neigh_pe = _null_pe
390  if(crossed_n .and. .not. crossed_e .and. .not. crossed_w) neigh_pe = self%pe_N
391  if(crossed_n .and. crossed_e ) neigh_pe = self%pe_NE
392  if(crossed_e .and. .not. crossed_n .and. .not. crossed_s) neigh_pe = self%pe_E
393  if(crossed_s .and. crossed_e ) neigh_pe = self%pe_SE
394  if(crossed_s .and. .not. crossed_e .and. .not. crossed_w) neigh_pe = self%pe_S
395  if(crossed_s .and. crossed_w ) neigh_pe = self%pe_SW
396  if(crossed_w .and. .not. crossed_s .and. .not. crossed_n) neigh_pe = self%pe_W
397  if(crossed_n .and. crossed_w ) neigh_pe = self%pe_NW
398 
399  if(neigh_pe /= _null_pe) then
400  iadd(neigh_pe) = iadd(neigh_pe) + 1
401  if(iadd(neigh_pe) > nar_est) then
402  write(notemsg, '(a,i4,a,i4,a)') 'drifters_comm_update: exceeded nar_est (', &
403  & iadd(neigh_pe),'>',nar_est,').'
404  call mpp_error( fatal, notemsg)
405  endif
406  table_send(neigh_pe) = table_send(neigh_pe) + 1
407  k = ( iadd(neigh_pe)-1 )*(1+nd) + 1
408  data_send(k , neigh_pe) = drfts%ids(ip)
409  data_send(k+1:k+nd, neigh_pe) = new_positions(:,ip)
410  endif
411 
412  ! check if drifters left data domain
413 
414  left_domain = .false.
415  if( (x<self%xdmin .and. (self%pe_W/=pe)) .or. &
416  & (x>self%xdmax .and. (self%pe_E/=pe)) .or. &
417  & (y<self%ydmin .and. (self%pe_S/=pe)) .or. &
418  & (y>self%ydmax .and. (self%pe_N/=pe)) ) then
419  left_domain = .true.
420  endif
421 
422  ! remove if particle was tagged as such
423 
424  if(present(remove)) then
425  if(remove(ip)) left_domain = .true.
426  endif
427 
428  if(left_domain) then
429  irem = irem + 1
430  if(irem > nar_est) then
431  write(notemsg, '(a,i4,a,i4,a)') 'drifters_comm_update: exceeded nar_est (',&
432  & irem,'>',nar_est,').'
433  call mpp_error( fatal, notemsg)
434  endif
435  indices_to_remove(irem) = ip
436  endif
437 
438  enddo
439 
440 
441  ! update drifters' positions (remove whatever needs to be removed later)
442  call drifters_core_set_positions(drfts, new_positions, ermsg)
443  if(ermsg/='') call mpp_error( fatal, ermsg)
444 
445  ! fill in table_recv from table_send. table_send contains the
446  ! number of tuples that will be sent to another pe. table_recv
447  ! will contain the number of tuples to be received. The indices
448  ! of table_send refer to the pe where the tuples should be sent to;
449  ! the indices of table_recv refer to the pe number
450  ! (self%pe_beg..self%pe_end) from
451  ! which the tuple should be received from.
452  !
453  ! table_send(to_pe) = ntuples; table_recv(from_pe) = ntuples
454 
455  ! the following is a transpose operation
456  ! table_send(m)[pe] -> table_recv(pe)[m]
457  do m = self%pe_beg, self%pe_end
458 #ifdef _USE_MPI
459  call mpi_scatter (table_send , 1, mpi_integer, &
460  & table_recv(m), 1, mpi_integer, &
461  & m, mycomm, ier )
462 #else
463  if(pe==m) then
464  do k = self%pe_beg, self%pe_end
465  call mpp_send(table_send(k), plen=1, to_pe=k, tag=comm_tag_1)
466  enddo
467  endif
468  call mpp_recv(table_recv(m), glen=1, from_pe=m, tag=comm_tag_1)
469 #endif
470  enddo
471 
472  ! communicate new positions. data_send is an array of size n*(nd+1) times npes.
473  ! Each column j of data_send contains the tuple (id, x, y, ..) to be sent to pe=j.
474  ! Inversely, data_recv's column j contains tuples (id, x, y,..) received from pe=j.
475  do m = self%pe_beg, self%pe_end
476  ntuples = table_send(m)
477  ndata = ntuples*(nd+1)
478  ! should be able to send ndata?
479 #ifdef _USE_MPI
480  call mpi_scatter (data_send , nar_est*(1+nd), mpi_real8, &
481  & data_recv(1,m), nar_est*(1+nd), mpi_real8, &
482  & m, mycomm, ier )
483 #else
484  if(pe==m) then
485  do k = self%pe_beg, self%pe_end
486  call mpp_send(data_send(1,k), plen=nar_est*(1+nd), to_pe=k, tag=comm_tag_2)
487  enddo
488  endif
489  call mpp_recv(data_recv(1,m), glen=nar_est*(1+nd), from_pe=m, tag=comm_tag_2)
490 #endif
491  enddo
492 
493  ! total number of tuples will determine size of ids_to_add/positions_to_add
494  ntuples_tot = 0
495  do m = self%pe_beg, self%pe_end
496  ntuples_tot = ntuples_tot + table_recv(m)
497  enddo
498 
499  allocate(positions_to_add(nd, ntuples_tot))
500  allocate( ids_to_add( ntuples_tot))
501 
502  ! fill positions_to_add and ids_to_add.
503  k = 0
504  do m = self%pe_beg, self%pe_end
505  ! get ids/positions coming from all pes
506  do n = 1, table_recv(m)
507  ! iterate over all ids/positions coming from pe=m
508  el = (n-1)*(nd+1) + 1
509  id = int(data_recv(el, m))
510  ! only add if id not already present in drfts
511  ! this can happen if a drifter meanders about
512  ! the compute domain boundary
513  is_present = .false.
514  do j = 1, drfts%np
515  if(id == drfts%ids(j)) then
516  is_present = .true.
517  write(notemsg, '(a,i4,a)') 'Drifter ', id, ' already advected (will not be added).'
518  call mpp_error(note, notemsg)
519  exit
520  endif
521  enddo
522  if(.not. is_present) then
523  k = k + 1
524  ids_to_add(k) = id
525 
526  positions_to_add(1:nd, k) = data_recv(el+1:el+nd, m)
527 
528  endif
529  enddo
530  enddo
531 
532  ! remove and add
533  if(irem > 0 .or. k > 0) then
534  write(notemsg, '(i4,a,i4,a)') irem, ' drifter(s) will be removed, ', k,' will be added'
535  call mpp_error(note, notemsg)
536 !!$ if(k>0) print *,'positions to add ', positions_to_add(:,1:k)
537 !!$ if(irem>0) print *,'ids to remove: ', indices_to_remove(1:irem)
538  endif
539  call drifters_core_remove_and_add(drfts, indices_to_remove(1:irem), &
540  & ids_to_add(1:k), positions_to_add(:,1:k), ermsg)
541  if(ermsg/='') call mpp_error( fatal, ermsg)
542 
543 #ifndef _USE_MPI
544  ! make sure unbuffered mpp_isend call returned before deallocating
545  call mpp_sync_self()
546 #endif
547 
548  deallocate(ids_to_add)
549  deallocate(positions_to_add)
550 
551  deallocate(iadd)
552  deallocate(table_recv)
553  deallocate(table_send)
554  deallocate(data_recv)
555  deallocate(data_send)
556  deallocate(indices_to_remove)
557 
558 #endif
559 ! end of parallel code
560 
561  end subroutine drifters_comm_update
562 
563 !===============================================================================
564  subroutine drifters_comm_gather(self, drfts, dinp, &
565  & lons, lats, do_save_lonlat, &
566  & filename, &
567  & root, mycomm)
570 
571  type(drifters_comm_type) :: self
572  type(drifters_core_type) :: drfts
573  type(drifters_input_type) :: dinp
574  real, intent(in) :: lons(:), lats(:)
575  logical, intent(in) :: do_save_lonlat
576  character(len=*), intent(in) :: filename
577  integer, intent(in), optional :: root ! root pe
578  integer, intent(in), optional :: mycomm ! MPI communicator
579 
580  character(len=128) :: ermesg
581 
582 #ifdef _SERIAL
583 ! serial code
584 
585  dinp%ids(1:drfts%np) = drfts%ids(1:drfts%np)
586  dinp%positions(:, 1:drfts%np) = drfts%positions(:, 1:drfts%np)
587 
588  if(do_save_lonlat) then
589 
590  call drifters_input_save(dinp, filename=filename, &
591  & geolon=lons, geolat=lats, ermesg=ermesg)
592 
593  else
594 
595  call drifters_input_save(dinp, filename=filename, ermesg=ermesg)
596 
597  endif
598 
599 #else
600 ! parallel code
601 
602 
603  integer :: npf, ip, comm, root_pe, pe, npes, nd, np, npdim, npmax, ier, nptot
604  integer :: i, j, k, kk
605  integer, allocatable :: nps(:)
606  real :: x, y
607  real, allocatable :: lons0(:), lats0(:), recvbuf(:,:)
608  real :: data(drfts%nd+3, drfts%np)
609  include 'mpif.h'
610 
611  comm = mpi_comm_world
612  if(present(mycomm)) comm = mycomm
613 
614  root_pe = mpp_root_pe()
615  if(present(root)) root_pe = root
616 
617  pe = mpp_pe()
618  npes = mpp_npes()
619 
620  nd = drfts%nd
621  np = drfts%np
622  npdim = drfts%npdim
623 
624  allocate(nps(self%pe_beg:self%pe_end))
625  nps = 0
626 
627  ! npf= number of drifters in compute domain
628 
629  npf = 0
630  do ip = 1, np
631  x = drfts%positions(1, ip)
632  y = drfts%positions(2, ip)
633  if( x <= self%xcmax .and. x >= self%xcmin .and. &
634  & y <= self%ycmax .and. y >= self%ycmin) then
635  npf = npf + 1
636  data(1 , npf) = real(drfts%ids(ip))
637  data(1+1:1+nd, npf) = drfts%positions(:, ip)
638  data( 2+nd, npf) = lons(ip)
639  data( 3+nd, npf) = lats(ip)
640  endif
641  enddo
642 
643  ! gather number of drifters
644 #ifdef _USE_MPI
645  call mpi_gather(npf, 1, mpi_int, &
646  & nps, 1, mpi_int, &
647  & root_pe, comm, ier)
648  !!if(ier/=0) ermesg = 'drifters_write_restart: ERROR while gathering "npf"'
649 #else
650  call mpp_send(npf, plen=1, to_pe=root_pe, tag=comm_tag_3)
651  if(pe==root_pe) then
652  do i = self%pe_beg, self%pe_end
653  call mpp_recv(nps(i), glen=1, from_pe=i, tag=comm_tag_3)
654  enddo
655  endif
656 #endif
657 
658  ! Now we know the max number of drifters to expect from each PE, so allocate
659  ! recvbuf (first dim will be zero on all PEs except root).
660 
661  ! allocate recvbuf to receive all the data on root PE, strided by npmax*(nd+3)
662  npmax = maxval(nps)
663  allocate(recvbuf(npmax*(nd+3), self%pe_beg:self%pe_end))
664  recvbuf = -1
665 
666  ! Each PE sends data to recvbuf on root_pe.
667 #ifdef _USE_MPI
668  call mpi_gather( data , npf*(nd+3), mpi_real8, &
669  & recvbuf, npmax*(nd+3), mpi_real8, &
670  & root_pe, comm, ier)
671  !!if(ier/=0) ermesg = 'drifters_write_restart: ERROR while gathering "data"'
672 #else
673  if(npf > 0) call mpp_send(data(1,1), plen=npf*(nd+3), to_pe=root_pe, tag=comm_tag_4)
674  if(pe==root_pe) then
675  do i = self%pe_beg, self%pe_end
676  if(nps(i) > 0) call mpp_recv(recvbuf(1, i), glen=nps(i)*(nd+3), from_pe=i, tag=comm_tag_4)
677  enddo
678  endif
679 #endif
680 
681  ! Set positions and ids
682  if(pe == root_pe) then
683  ! check dims
684  nptot = sum(nps) ! total number of drifters, across al PEs
685  if(nptot /= size(dinp%ids)) then
686  deallocate(dinp%ids , stat=ier)
687  deallocate(dinp%positions, stat=ier)
688  allocate(dinp%ids(nptot))
689  allocate(dinp%positions(nd, nptot))
690  dinp%ids = -1
691  dinp%positions = -huge(1.)
692  endif
693 
694  allocate(lons0(nptot), lats0(nptot))
695 
696  ! Set the new positions/ids in dinp, on PE=root. Also set
697  ! lons/lats, these arrays will hold garbage if x1, y1, etc. were
698  ! not passed as subroutine arguments, that's ok 'cause we won't
699  ! save them.
700  j = 1
701  do i = self%pe_beg, self%pe_end
702  do k = 1, nps(i)
703  kk = (nd+3)*(k-1)
704  dinp%ids(j) = int(recvbuf(kk+1 , i))
705  dinp%positions(1:nd, j) = recvbuf(kk+1+1:kk+1+nd, i)
706  lons0(j) = recvbuf(kk+2+nd, i)
707  lats0(j) = recvbuf(kk+3+nd, i)
708  j = j + 1
709  enddo
710  enddo
711 
712  if(do_save_lonlat) then
713 
714  call drifters_input_save(dinp, filename=filename, &
715  & geolon=lons0, geolat=lats0, ermesg=ermesg)
716 
717  else
718 
719  call drifters_input_save(dinp, filename=filename, ermesg=ermesg)
720 
721  endif
722 
723  deallocate(lons0, lats0)
724 
725  endif
726 
727 #ifndef _USE_MPI
728  call mpp_sync_self()
729 #endif
730  deallocate(nps , stat=ier)
731  deallocate(recvbuf, stat=ier)
732 
733 #endif
734 ! _end of parallel code
735 
736  end subroutine drifters_comm_gather
737 
738 
739 end module drifters_comm_mod
740 
741 !===============================================================================
742 !===============================================================================
743 #ifdef _TEST_DRIFTERS_COMM
744 ! set FMS=/home/ap/ia64/mpp/mpp_test/exec/
745 ! set OPTS="-r8 -g"
746 ! set OBJS="$FMS/mpp*.o $FMS/threadloc.o"
747 ! set INCS="-I/usr/include -I/usr/local/include -I${FMS}"
748 ! set LIBS="-L/usr/local/lib -lnetcdf -L/usr/lib -lmpi -lcprts"
749 ! ifort $OPTS $INCS -D_TEST_DRIFTERS_COMM drifters_comm.F90 quicksort.F90 drifters_core.F90 $OBJS $LIBS
750 program main
751 
754  use mpp_mod
755  use mpp_domains_mod
756 
757  implicit none
758 
759  integer, parameter :: nd=2, npmax = 4
760  integer :: nx, ny, halox, haloy, layout(2), i, j, npes, pe, it, nt
761  _type_domain2d :: domain
762  type(drifters_core_type) :: drfts
763  type(drifters_comm_type) :: drfts_com
764  real, parameter :: xmin=0., xmax=1., ymin=0., ymax=1.
765  real :: dx, dy, u0, v0, dt, lx, ly
766  real, allocatable :: x(:), y(:)
767  character(len=128) :: ermsg = ''
768  integer :: ids(npmax)
769  real :: positions(nd, npmax), velocity(nd, npmax)
770  integer :: io_status
771 !!$ integer :: stackmax=4000000
772 
773  namelist /drifters_comm_nml/ nx, ny, halox, haloy, u0, v0, dt, nt
774 
775 
776  call mpp_init !(MPP_DEBUG)
777  !call mpp_set_stack_size(3145746)
778 
779  ! default input values
780  nx = 11
781  ny = 21
782  halox = 2
783  haloy = 2
784  u0 = 1.0
785  v0 = 0.0
786  dt = 0.1
787  nt = 10
788 
789  ! read input
790 #ifdef INTERNAL_FILE_NML
791  read (input_nml_file, drifters_comm_nml, iostat=io_status)
792 #else
793  open(unit=1, file='input.nml', form='formatted')
794  read(1, drifters_comm_nml)
795  close(unit=1)
796  if(mpp_pe()==0) write(*,drifters_comm_nml)
797 #endif
798 
799  ! create global domain
800  lx = xmax - xmin
801  ly = ymax - ymin
802  dx = lx/real(nx-1)
803  dy = ly/real(ny-1)
804  allocate(x(nx), y(ny))
805  x = xmin + (/ ( i*dx, i=0, nx-1) /)
806  y = ymin + (/ ( j*dy, j=0, ny-1) /)
807 
808  ! decompose domain
809  call mpp_domains_init ! (MPP_DEBUG)
810 !!$ call mpp_domains_set_stack_size(stackmax)
811 
812  npes = mpp_npes()
813  call mpp_define_layout( (/1,nx, 1,ny/), npes, layout )
814  if(mpp_pe()==0) print *,'LAYOUT: ', layout
815  call mpp_define_domains((/1,nx, 1,ny/), layout, domain, xhalo=halox, yhalo=haloy,&
816  & xflags=cyclic_global_domain, yflags=cyclic_global_domain)
817 
818  ! set up drifters' communicator
819  call drifters_comm_new(drfts_com)
820  call drifters_comm_set_domain(drfts_com, domain, x, y, 0,0)
821  ! repeated calls just for the fun of it
822  call drifters_comm_set_domain(drfts_com, domain, x, y, 0,0)
823  call drifters_comm_set_domain(drfts_com, domain, x, y, 0,0)
824 
825  ! create drifters
826  call drifters_core_new(drfts, nd, npmax, ermsg)
827  if(ermsg /= '') print *,ermsg
828 
829  pe = mpp_pe()
830  ids = (/ (i+100*pe, i=1,npmax) /)
831  call drifters_core_set_ids(drfts, ids, ermsg)
832  if(ermsg /= '') print *,ermsg
833 
834  ! position particles
835  if(pe == 0) then
836  positions(:, 1) = (/ (drfts_com%xcmin + drfts_com%xcmax)/2., &
837  & (drfts_com%ycmin + drfts_com%ycmax)/2. /)
838  !positions(:, 2) = (/0.,0.01/)
839  call drifters_core_set_positions(drfts, positions(:, 1:1), ermsg)
840  if(ermsg /= '') print *,ermsg
841  endif
842 
843  ! push drifters
844  velocity(:,1) = (/u0, v0/)
845  do it = 1, nt
846  positions(:,1:drfts%np) = xmin + &
847  & modulo(drfts%positions(:,1:drfts%np) + dt*velocity(:,1:drfts%np)-xmin, xmax-xmin)
848  ! this will redistribute the drifters and update the positions
849  call drifters_comm_update(drfts_com, drfts, positions(:,1:drfts%np))
850 
851  if(drfts%np > 0) then
852  do i=1,drfts%np
853  print '(a,i6,a,i3,a,i3,a, i3, a,2f10.6)', 'PE: ',pe, ' it=', it, ' np=', drfts%np, ' ip=', i, &
854  & ' x,y=', drfts%positions(1,i), drfts%positions(2,i)
855  enddo
856  endif
857 !!$ call drifters_print(drfts, ermsg)
858 !!$ if(ermsg /= '') print *,ermsg
859  enddo
860 
861  ! clean up
862  call drifters_core_del(drfts, ermsg)
863  if(ermsg /= '') print *,ermsg
864  call drifters_comm_del(drfts_com)
865  call mpp_domains_exit
866  call mpp_exit
867 
868 end program main
869 #endif
870 ! _TEST_DRIFTERS_COMM
subroutine drifters_comm_set_comp_bounds(self, xmin, ymin, xmax, ymax)
subroutine drifters_core_set_ids(self, ids, ermesg)
subroutine, public drifters_comm_update(self, drfts, new_positions, comm, remove, max_add_remove)
subroutine, public drifters_core_set_positions(self, positions, ermesg)
subroutine, public drifters_core_new(self, nd, npdim, ermesg)
subroutine, public drifters_comm_new(self)
Definition: mpp.F90:39
subroutine, public drifters_core_remove_and_add(self, indices_to_remove_in, ids_to_add, positions_to_add, ermesg)
character(len=input_str_length), dimension(:), allocatable, target, public input_nml_file
Definition: mpp.F90:1378
subroutine, public drifters_comm_set_pe_neighbors(self, domain)
subroutine, public drifters_comm_set_domain(self, domain, x, y, backoff_x, backoff_y)
#define max(a, b)
Definition: mosaic_util.h:33
subroutine drifters_comm_set_data_bounds(self, xmin, ymin, xmax, ymax)
subroutine, public drifters_comm_del(self)
subroutine, public drifters_comm_gather(self, drfts, dinp, lons, lats, do_save_lonlat, filename, root, mycomm)
program main
Definition: xgrid.F90:5439
#define min(a, b)
Definition: mosaic_util.h:32
subroutine, public drifters_input_save(self, filename, geolon, geolat, ermesg)
subroutine, public drifters_core_del(self, ermesg)
integer npes
Definition: mpp.F90:1298