FMS  2026.01
Flexible Modeling System
offloading_io.F90
1 !***********************************************************************
2 !* Apache License 2.0
3 !*
4 !* This file is part of the GFDL Flexible Modeling System (FMS).
5 !*
6 !* Licensed under the Apache License, Version 2.0 (the "License");
7 !* you may not use this file except in compliance with the License.
8 !* You may obtain a copy of the License at
9 !*
10 !* http://www.apache.org/licenses/LICENSE-2.0
11 !*
12 !* FMS is distributed in the hope that it will be useful, but WITHOUT
13 !* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied;
14 !* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
15 !* PARTICULAR PURPOSE. See the License for the specific language
16 !* governing permissions and limitations under the License.
17 !***********************************************************************
18 module offloading_io_mod
19  use mpp_mod
20  use mpp_domains_mod
21  use fms_string_utils_mod, only: string
22  use fms2_io_mod
23  use metadata_transfer_mod
24  use platform_mod
25  use fms_mod
26 
27  implicit none
28 
29  integer :: current_files_init !< number of currently initialized offloading files
30  logical :: module_is_initialized !< .true. if module has been initialized
31 
32  integer :: max_files = 10 !< amount of offloaded files to allocate space for
33 
34  namelist / offloading_io_nml / max_files
35 
36  !> Structure to hold offloading file information
38  integer :: id
39  character(len=:), allocatable :: filename !< filename of the offloaded netcdf file
40  class(fmsnetcdffile_t), allocatable :: fileobj !< fms2_io file object
41  type(domain2d) :: domain_out !< domain on offloading PEs
42  end type
43 
44  !> Offload equivalent of register_axis in fms2_io_mod
45  !! Registers an axis to a netcdf file on offloaded PEs. File must have been opened with open_file_offload.
46  !! TODO: add register_unstructured_axis_offload for the unstructured grid
48  procedure :: register_netcdf_axis_offload
49  procedure :: register_domain_axis_offload
50  end interface
51 
52  !> Offload equivalent of write_data in fms2_io_mod
53  !! Writes data to a netcdf file on offloaded PEs. File must have been opened with open_file_offload.
55  procedure :: write_data_offload_2d
56  procedure :: write_data_offload_3d
57  end interface
58 
59  !> Array of offloading objects that have been initialized by this module.
60  type(offloading_obj_out), allocatable, target :: offloading_objs(:)
61 
62  private
63 
64  public :: offloading_io_init, open_file_offload
65  public :: global_metadata_offload, close_file_offload, register_axis_offload, register_field_offload
66  public :: write_data_offload
67  public :: create_cubic_domain, create_lat_lon_domain
68 
69  contains
70 
71  !> Initialize by allocating array used to keep track of offloading file objects.
72  subroutine offloading_io_init()
73  integer :: ierr, io
74  if (module_is_initialized) return
75  current_files_init = 0
76  allocate(offloading_objs(max_files))
77  module_is_initialized = .true.
78  read (input_nml_file, offloading_io_nml, iostat=io)
79  ierr = check_nml_error(io,'offloading_io_nml')
80  end subroutine offloading_io_init
81 
82  !> Open a netcdf file and set it up for offloaded writes
83  !! This routine should be called from both the model PEs and the offload PEs, with the full list
84  !! of pes for each group being provided. The model PEs will broadcast the filename and domain.
85  subroutine open_file_offload(fileobj, filename, domain_in, pe_in, pe_out)
86  class(FmsNetcdfFile_t), intent(inout) :: fileobj !< fms2_io file object
87  character(len=*), intent(in) :: filename !< filename to open
88  type(domain2D), intent(inout) :: domain_in !< model domain (from model pes)
89  integer, intent(in) :: pe_in(:) !< model pes
90  integer, intent(in) :: pe_out(:) !< offload pes
91 
92  integer, parameter :: str_len = 255
93  character(len=str_len) :: filename_out(1)
94  integer :: object_id
95  integer :: global_domain_size(2)
96  integer :: ntile
97  integer, allocatable :: all_current_pes(:)
98  integer, allocatable :: broadcasting_pes(:)
99  logical :: is_pe_out
100 
101  is_pe_out = any(pe_out .eq. mpp_pe())
102 
103  ! This should be called from the model PEs and the offload PEs
104  if (.not. module_is_initialized) &
105  call mpp_error(fatal, "offloading_io_mod is not initialized")
106 
107  allocate(all_current_pes(mpp_npes()))
108  call mpp_get_current_pelist(all_current_pes)
109 
110  filename_out(1) = ""
111  if (mpp_pe() .eq. pe_in(1)) then
112  !< The root model pe gets the domain info (ntiles and size of global domain)
113  ntile = mpp_get_ntile_count(domain_in)
114 
115  !< The number of tiles must be the same as the number of offloading pes
116  if ( mod(size(pe_out), ntile) .ne. 0 ) &
117  call mpp_error(fatal, "The number of offloading PEs must be the same as the number of tiles of the domain")
118  filename_out(1) = filename
119 
120  call mpp_get_global_domain(domain_in, xsize=global_domain_size(1), ysize=global_domain_size(2))
121  endif
122 
123  ! A "Root" model PE broadcasts the filename, domain size, and number of tiles to the offload pes
124  if (mpp_pe() .eq. pe_in(1) .or. is_pe_out) then
125  allocate(broadcasting_pes(1 + size(pe_out)))
126  broadcasting_pes(1) = pe_in(1) ! root pe
127  broadcasting_pes(2:size(broadcasting_pes)) = pe_out ! offload pes
128  call mpp_set_current_pelist( broadcasting_pes )
129  ! TODO bundle these into a single derived type to reduce the number of broadcasts
130  call mpp_broadcast(filename_out, str_len, pe_in(1))
131  call mpp_broadcast(global_domain_size, size(global_domain_size), pe_in(1))
132  call mpp_broadcast(ntile, pe_in(1))
133  endif
134 
135  ! Broadcast the domain
136  call mpp_set_current_pelist(all_current_pes)
137  if (is_pe_out) call mpp_define_null_domain(domain_in)
138  call mpp_broadcast_domain(domain_in)
139 
140  ! The offload pes inits the offloading object and return an object id
141  if (is_pe_out) then
142  call mpp_set_current_pelist(pe_out)
143  object_id = init_offloading_object(filename_out(1), global_domain_size(1), global_domain_size(2),&
144  ntile)
145  endif
146  call mpp_set_current_pelist(all_current_pes)
147  call mpp_broadcast(object_id, pe_out(1))
148 
149  ! Init the "offloading object" in the fileobj
150  call fileobj%offloading_obj_in%init(object_id, pe_out, pe_in, domain_in)
151  end subroutine open_file_offload
152 
153  !> Broadcast and register a global metadata attribute on offloading PEs
154  subroutine global_metadata_offload(fileobj, attribute_name, attribute_value)
155  class(FmsNetcdfFile_t), intent(inout) :: fileobj !< fms2_io file object
156  character(len=*), intent(in) :: attribute_name !< name of the global attribute to register
157  class(*), intent(in) :: attribute_value !< value of the global attribute to register (r4, r8, i4, i8, str)
158 
159  integer :: id
160  type(offloading_obj_out), pointer :: this
161 
162  !TODO better PEs management!
163  integer, allocatable :: offloading_pes(:)
164  integer, allocatable :: model_pes(:)
165  integer, allocatable :: all_current_pes(:)
166  integer, allocatable :: broadcasting_pes(:)
167  logical :: is_model_pe
168  character(len=255) :: att_name(1)
169 
170  integer :: int_buf
171  real(r4_kind), allocatable :: r4_tmp(:)
172  real(r8_kind), allocatable :: r8_tmp(:)
173  integer(i4_kind) , allocatable :: i4_tmp(:)
174  integer(i8_kind) , allocatable :: i8_tmp(:)
175  character(len=:), allocatable :: str_tmp
176  class(metadata_class), allocatable :: transfer_obj
177 
178  select type (attribute_value)
179  type is (real(kind=r8_kind))
180  allocate(metadata_r8_type :: transfer_obj)
181  call transfer_obj%fms_metadata_transfer_init(real8_type)
182  type is (real(kind=r4_kind))
183  allocate(metadata_r4_type :: transfer_obj)
184  call transfer_obj%fms_metadata_transfer_init(real4_type)
185  type is (integer(kind=i4_kind))
186  allocate(metadata_i4_type :: transfer_obj)
187  call transfer_obj%fms_metadata_transfer_init(int4_type)
188  type is (integer(kind=i8_kind))
189  allocate(metadata_i8_type :: transfer_obj)
190  call transfer_obj%fms_metadata_transfer_init(int8_type)
191  type is (character(*))
192  allocate(metadata_str_type :: transfer_obj)
193  call transfer_obj%fms_metadata_transfer_init(str_type)
194  class default
195  call mpp_error(fatal, "Unsupported attribute type for offloading: " // string(attribute_value))
196  end select
197 
198  offloading_pes = fileobj%offloading_obj_in%offloading_pes
199  model_pes = fileobj%offloading_obj_in%model_pes
200  is_model_pe = fileobj%offloading_obj_in%is_model_pe
201 
202  id = fileobj%offloading_obj_in%id
203  this => offloading_objs(id)
204 
205  if (is_model_pe) then
206  att_name(1) = attribute_name
207  call transfer_obj%set_attribute_name(attribute_name)
208  endif
209 
210  allocate(all_current_pes(mpp_npes()))
211  call mpp_get_current_pelist(all_current_pes)
212 
213  if (mpp_pe() .eq. model_pes(1) .or. .not. is_model_pe) then
214 
215  allocate(broadcasting_pes(1 + size(offloading_pes)))
216  broadcasting_pes(1) = model_pes(1)
217  broadcasting_pes(2:size(broadcasting_pes)) = offloading_pes
218  call mpp_set_current_pelist( broadcasting_pes )
219 
220  select type (attribute_value)
221  type is (real(kind=r4_kind))
222  ! TODO replace this mess with a single call if possible
223  if (is_model_pe) then
224  select type(transfer_obj)
225  type is (metadata_r4_type)
226  call transfer_obj%set_attribute_value([attribute_value])
227  end select
228  endif
229  call transfer_obj%fms_metadata_broadcast()
230  select type(transfer_obj)
231  type is (metadata_r4_type)
232  r4_tmp = transfer_obj%get_attribute_value()
233  end select
234  att_name(1) = transfer_obj%get_attribute_name()
235  if (.not. is_model_pe) then
236  call register_global_attribute(this%fileobj, att_name(1), r4_tmp)
237  endif
238 
239  type is (real(kind=r8_kind))
240  ! TODO replace this mess with a single call if possible
241  if (is_model_pe) then
242  select type(transfer_obj)
243  type is (metadata_r8_type)
244  call transfer_obj%set_attribute_value([attribute_value])
245  end select
246  endif
247  call transfer_obj%fms_metadata_broadcast()
248  select type(transfer_obj)
249  type is (metadata_r8_type)
250  r8_tmp = transfer_obj%get_attribute_value()
251  end select
252  att_name(1) = transfer_obj%get_attribute_name()
253  if (.not. is_model_pe) then
254  call register_global_attribute(this%fileobj, att_name(1), r8_tmp)
255  endif
256 
257  type is (integer(kind=i4_kind))
258  if (is_model_pe) then
259  select type(transfer_obj)
260  type is (metadata_i4_type)
261  call transfer_obj%set_attribute_value([attribute_value])
262  int_buf = attribute_value
263  end select
264  endif
265  call transfer_obj%fms_metadata_broadcast()
266  select type(transfer_obj)
267  type is (metadata_i4_type)
268  i4_tmp = transfer_obj%get_attribute_value()
269  end select
270  att_name(1) = transfer_obj%get_attribute_name()
271  if (.not. is_model_pe) then
272  call register_global_attribute(this%fileobj, att_name(1), i4_tmp)
273  endif
274 
275  type is (integer(kind=i8_kind))
276  ! TODO replace this mess with a single call if possible
277  if (is_model_pe) then
278  select type(transfer_obj)
279  type is (metadata_i8_type)
280  call transfer_obj%set_attribute_value([attribute_value])
281  end select
282  endif
283  call transfer_obj%fms_metadata_broadcast()
284  select type(transfer_obj)
285  type is (metadata_i8_type)
286  i8_tmp = transfer_obj%get_attribute_value()
287  end select
288  att_name(1) = transfer_obj%get_attribute_name()
289  if (.not. is_model_pe) then
290  call register_global_attribute(this%fileobj, att_name(1), i8_tmp)
291  endif
292 
293  type is (character(len=*))
294  ! TODO replace this mess with a single call if possible
295  if (is_model_pe) then
296  select type(transfer_obj)
297  type is (metadata_str_type)
298  call transfer_obj%set_attribute_value(attribute_value)
299  end select
300  endif
301  call transfer_obj%fms_metadata_broadcast()
302  select type(transfer_obj)
303  type is (metadata_str_type)
304  str_tmp = transfer_obj%get_attribute_value()
305  end select
306  att_name(1) = transfer_obj%get_attribute_name()
307  if (.not. is_model_pe) then
308  call register_global_attribute(this%fileobj, att_name(1), str_tmp)
309  endif
310 
311  end select
312  endif
313 
314  call mpp_set_current_pelist(all_current_pes)
315  end subroutine
316 
317  !> Register a domain axis (ie. x or y) on offloading PEs
318  subroutine register_domain_axis_offload(fileobj, axis_name, cart)
319  class(FmsNetcdfFile_t), intent(inout) :: fileobj !< fms2_io file object
320  character(len=*), intent(in) :: axis_name !< axis name to be written to file
321  character(len=1), intent(in) :: cart !< must be either 'x' or 'y' for cartesian axis
322 
323  integer :: id
324  type(offloading_obj_out), pointer :: this
325 
326  integer, allocatable :: offloading_pes(:)
327  integer, allocatable :: model_pes(:)
328  integer, allocatable :: all_current_pes(:)
329  integer, allocatable :: broadcasting_pes(:)
330  logical :: is_model_pe
331  character(len=ATTR_NAME_MAX_LENGTH) :: var_info(2)
332 
333  offloading_pes = fileobj%offloading_obj_in%offloading_pes
334  model_pes = fileobj%offloading_obj_in%model_pes
335  is_model_pe = fileobj%offloading_obj_in%is_model_pe
336 
337  id = fileobj%offloading_obj_in%id
338  this => offloading_objs(id)
339 
340  if (is_model_pe) then
341  var_info(1) = axis_name
342  var_info(2) = cart
343  endif
344 
345  allocate(all_current_pes(mpp_npes()))
346  call mpp_get_current_pelist(all_current_pes)
347 
348  if (mpp_pe() .eq. model_pes(1) .or. .not. is_model_pe) then
349  allocate(broadcasting_pes(1 + size(offloading_pes)))
350  broadcasting_pes(1) = model_pes(1)
351  broadcasting_pes(2:size(broadcasting_pes)) = offloading_pes
352  call mpp_set_current_pelist( broadcasting_pes )
353  call mpp_broadcast(var_info, 255, model_pes(1))
354  endif
355 
356  if (.not. is_model_pe) then
357  select type(file=>this%fileobj)
358  type is(fmsnetcdfdomainfile_t)
359  call register_axis(file, trim(var_info(1)), trim(var_info(2)))
360  class default
361  call mpp_error(fatal, &
362  "offloading_io_mod::register_domain_axis_offload currently only supports FmsNetcdfDomainFile_t")
363  end select
364  endif
365 
366  call mpp_set_current_pelist(all_current_pes)
367  end subroutine register_domain_axis_offload
368 
369  !> Register a netcdf axis on offloading PEs
370  subroutine register_netcdf_axis_offload(fileobj, axis_name, length)
371  class(FmsNetcdfFile_t), intent(inout) :: fileobj !< fms2_io file object
372  character(len=*), intent(in) :: axis_name !< axis name to be written to file
373  integer, intent(in) :: length !< length of the axis
374 
375  integer :: id
376  type(offloading_obj_out), pointer :: this
377 
378  !TODO better PEs management!
379  integer, allocatable :: offloading_pes(:)
380  integer, allocatable :: model_pes(:)
381  integer, allocatable :: all_current_pes(:)
382  integer, allocatable :: broadcasting_pes(:)
383  logical :: is_model_pe
384  character(len=255) :: var_axis(1)
385  integer :: var_length
386  integer :: axis_length
387 
388  offloading_pes = fileobj%offloading_obj_in%offloading_pes
389  model_pes = fileobj%offloading_obj_in%model_pes
390  is_model_pe = fileobj%offloading_obj_in%is_model_pe
391 
392  id = fileobj%offloading_obj_in%id
393  this => offloading_objs(id)
394 
395  ! get var data on root for broadcasting to offload pes
396  if (mpp_pe() .eq. model_pes(1)) then
397  var_axis(1) = trim(axis_name)
398  axis_length = len_trim(var_axis(1))
399  var_length = length
400  endif
401 
402  allocate(all_current_pes(mpp_npes()))
403  call mpp_get_current_pelist(all_current_pes)
404 
405  ! root pe broadcasts the axis name and length to offload pes
406  if (mpp_pe() .eq. model_pes(1) .or. .not. is_model_pe) then
407  allocate(broadcasting_pes(1 + size(offloading_pes)))
408  broadcasting_pes(1) = model_pes(1)
409  broadcasting_pes(2:size(broadcasting_pes)) = offloading_pes
410  call mpp_set_current_pelist( broadcasting_pes )
411  ! TODO bundle these into a single derived type to reduce the number of broadcasts
412  call mpp_broadcast(axis_length, model_pes(1))
413  call mpp_broadcast(var_axis, axis_length, model_pes(1))
414  call mpp_broadcast(var_length, model_pes(1))
415  endif
416 
417  if (.not. is_model_pe) then
418  select type(wut=>this%fileobj)
419  type is(fmsnetcdfdomainfile_t)
420  call register_axis(this%fileobj, var_axis(1)(1:axis_length), var_length)
421  class default
422  call mpp_error(fatal, &
423  "offloading_io_mod::register_netcdf_axis_offload currently only supports FmsNetcdfDomainFile_t")
424  end select
425  endif
426 
427  call mpp_set_current_pelist(all_current_pes)
428  end subroutine register_netcdf_axis_offload
429 
430  !> Register a netcdf field on offloading PEs
431  subroutine register_field_offload(fileobj, varname, vartype, dimensions)
432  class(FmsNetcdfFile_t), intent(inout) :: fileobj !> fms2_io file object
433  character(len=*), intent(in) :: varname !> name of the variable to be registered
434  character(len=*), intent(in) :: vartype !> type of the variable to be registered
435  !! must be one of {r4_type, r8_type, i4_type, i8_type, str_type}
436  character(len=*), intent(in) :: dimensions(:) !< previously registered dimension/axis names for the variable
437 
438  integer :: id
439  type(offloading_obj_out), pointer :: this
440  integer :: ndim
441 
442  !TODO better PEs management!
443  integer, allocatable :: offloading_pes(:)
444  integer, allocatable :: model_pes(:)
445  integer, allocatable :: all_current_pes(:)
446  integer, allocatable :: broadcasting_pes(:)
447  logical :: is_model_pe
448  character(len=255), allocatable :: var_info(:)
449 
450  offloading_pes = fileobj%offloading_obj_in%offloading_pes
451  model_pes = fileobj%offloading_obj_in%model_pes
452  is_model_pe = fileobj%offloading_obj_in%is_model_pe
453 
454  id = fileobj%offloading_obj_in%id
455  this => offloading_objs(id)
456 
457  allocate(all_current_pes(mpp_npes()))
458  call mpp_get_current_pelist(all_current_pes)
459 
460  if (is_model_pe) then
461  ndim = size(dimensions)
462 
463  allocate(var_info(ndim + 2))
464  var_info(1) = varname
465  var_info(2) = vartype
466  var_info(3:) = dimensions
467 
468  endif
469 
470  if (mpp_pe() .eq. model_pes(1) .or. .not. is_model_pe) then
471  allocate(broadcasting_pes(1 + size(offloading_pes)))
472  broadcasting_pes(1) = model_pes(1)
473  broadcasting_pes(2:size(broadcasting_pes)) = offloading_pes
474  call mpp_set_current_pelist( broadcasting_pes )
475  call mpp_broadcast(ndim, model_pes(1))
476 
477  if (.not. is_model_pe) allocate(var_info(ndim + 2))
478  call mpp_broadcast(var_info, 255, model_pes(1))
479  endif
480 
481  if (.not. is_model_pe) then
482  !select type(wut=>this%fileobj)
483  ! type is(FmsNetcdfDomainFile_t)
484  call register_field(this%fileobj, trim(var_info(1)), trim(var_info(2)), var_info(3:))
485  !end select
486  endif
487 
488  call mpp_set_current_pelist(all_current_pes)
489  end subroutine
490 
491  !> Write 3D data to offloaded netcdf file
492  subroutine write_data_offload_3d(fileobj, varname, vardata, unlim_dim_level)
493  class(FmsNetcdfFile_t), intent(inout) :: fileobj !< fms2_io file object
494  character(len=*), intent(in) :: varname !< name of the variable to be written
495  real(kind=r4_kind), intent(in) :: vardata(:,:,:) !< 3D data to be written
496  integer, intent(in), optional :: unlim_dim_level !< level along unlimited dimension to write to
497 
498  integer :: id
499  type(offloading_obj_out), pointer :: this
500 
501  !TODO better PEs management!
502  integer, allocatable :: offloading_pes(:)
503  integer, allocatable :: model_pes(:)
504  integer, allocatable :: all_current_pes(:)
505  logical :: is_model_pe
506 
507  real(kind=r4_kind), allocatable :: var_r4_data(:,:,:)
508  type(domain2D) :: domain_out
509  type(domain2D) :: domain_in
510  integer :: isc, iec, jsc, jec, nz, redistribute_clock
511  character(len=ATTR_NAME_MAX_LENGTH) :: varname_tmp(1)
512 
513  offloading_pes = fileobj%offloading_obj_in%offloading_pes
514  model_pes = fileobj%offloading_obj_in%model_pes
515  is_model_pe = fileobj%offloading_obj_in%is_model_pe
516 
517  id = fileobj%offloading_obj_in%id
518  this => offloading_objs(id)
519 
520  allocate(all_current_pes(mpp_npes()))
521  call mpp_get_current_pelist(all_current_pes)
522 
523  redistribute_clock = mpp_clock_id( 'data_transfer' )
524  call mpp_clock_begin(redistribute_clock)
525 
526  nz = size(vardata, 3)
527  call mpp_broadcast(nz, model_pes(1))
528 
529  !Allocate space to store the data!
530  if (.not. is_model_pe) then
531  domain_out = this%domain_out
532  call mpp_get_data_domain(domain_out, isc, iec, jsc, jec)
533  allocate(var_r4_data(isc:iec, jsc:jec, nz))
534  call mpp_define_null_domain(domain_in)
535  else
536  domain_in = fileobj%offloading_obj_in%domain_in
537  call mpp_define_null_domain(domain_out)
538  endif
539 
540  ! get domain from the other pes
541  call mpp_broadcast_domain(domain_out)
542  call mpp_broadcast_domain(domain_in)
543 
544  call mpp_redistribute( domain_in, vardata, domain_out, var_r4_data)
545  call mpp_redistribute( domain_in, vardata, domain_out, var_r4_data, free=.true.)
546 
547  if(mpp_pe() .eq. model_pes(1)) then
548  varname_tmp(1) = trim(varname)
549  endif
550  call mpp_broadcast(varname_tmp, 255, model_pes(1))
551 
552  call mpp_clock_end(redistribute_clock)
553 
554  if (.not. is_model_pe) then
555  select type(wut=>this%fileobj)
556  type is(fmsnetcdfdomainfile_t)
557  if (present(unlim_dim_level)) then
558  call write_data(wut, varname, var_r4_data, unlim_dim_level=unlim_dim_level)
559  else
560  call write_data(wut, varname, var_r4_data)
561  endif
562  end select
563  endif
564  call mpp_set_current_pelist(all_current_pes)
565  end subroutine write_data_offload_3d
566 
567  !> Write 2D data to offloaded netcdf file
568  subroutine write_data_offload_2d(fileobj, varname, vardata)
569  class(FmsNetcdfFile_t), intent(inout) :: fileobj !< fms2_io file object
570  character(len=*), intent(in) :: varname !< name of the variable to be written
571  real(kind=r4_kind), intent(in) :: vardata(:,:) !< 2D data to be written
572 
573  integer :: id
574  type(offloading_obj_out), pointer :: this
575 
576  !TODO better PEs management!
577  integer, allocatable :: offloading_pes(:)
578  integer, allocatable :: model_pes(:)
579  integer, allocatable :: all_current_pes(:)
580  logical :: is_model_pe
581 
582  real(kind=r4_kind), allocatable :: var_r4_data(:,:)
583  type(domain2D) :: domain_out
584  type(domain2D) :: domain_in
585  integer :: isc, iec, jsc, jec
586  character(len=255) :: varname_tmp(1)
587 
588  offloading_pes = fileobj%offloading_obj_in%offloading_pes
589  model_pes = fileobj%offloading_obj_in%model_pes
590  is_model_pe = fileobj%offloading_obj_in%is_model_pe
591 
592  id = fileobj%offloading_obj_in%id
593  this => offloading_objs(id)
594 
595  allocate(all_current_pes(mpp_npes()))
596  call mpp_get_current_pelist(all_current_pes)
597 
598  !Allocate space to store the data!
599  if (.not. is_model_pe) then
600  domain_out = this%domain_out
601  call mpp_get_compute_domain(domain_out, isc, iec, jsc, jec)
602  allocate(var_r4_data(isc:iec, jsc:jec))
603  call mpp_define_null_domain(domain_in)
604  else
605  domain_in = fileobj%offloading_obj_in%domain_in
606  call mpp_define_null_domain(domain_out)
607  endif
608 
609  call mpp_broadcast_domain(domain_out)
610  call mpp_broadcast_domain(domain_in)
611 
612  ! redistribute data from model domain to offload domain and then free memory for future calls
613  call mpp_redistribute( domain_in, vardata, domain_out, var_r4_data)
614  call mpp_redistribute( domain_in, vardata, domain_out, var_r4_data, free=.true.)
615 
616  ! broadcast the variable name
617  if(mpp_pe() .eq. model_pes(1)) then
618  varname_tmp(1) = trim(varname)
619  endif
620  call mpp_broadcast(varname_tmp, 255, model_pes(1))
621 
622  if (.not. is_model_pe) then
623  select type(wut=>this%fileobj)
624  type is(fmsnetcdfdomainfile_t)
625  call write_data(wut, varname_tmp(1), var_r4_data)
626  end select
627  endif
628  call mpp_set_current_pelist(all_current_pes)
629  end subroutine write_data_offload_2d
630 
631  !> Close offloaded netcdf file
632  subroutine close_file_offload(fileobj)
633  class(FmsNetcdfFile_t), intent(inout) :: fileobj !> fms2_io file object to close
634 
635  integer :: id
636  type(offloading_obj_out), pointer :: this
637  logical :: is_model_pe
638 
639  id = fileobj%offloading_obj_in%id
640  this => offloading_objs(id)
641  is_model_pe = fileobj%offloading_obj_in%is_model_pe
642 
643  if (.not. is_model_pe) call close_file(this%fileobj)
644  end subroutine close_file_offload
645 
646  !> Initialize an offloading object on offload PEs
647  integer function init_offloading_object(filename, nx, ny, ntile)
648  character(len=*), intent(in) :: filename !< filename to open
649  integer, intent(in) :: nx !< x size of the global domain
650  integer, intent(in) :: ny !< y size of the global domain
651  integer, intent(in) :: ntile !< number of tiles, only supports 1 for lat-lon or 6 for cubed-sphere
652 
653  type(offloading_obj_out), pointer :: this
654  integer, allocatable :: curr_pelist(:)
655 
656  current_files_init = current_files_init + 1
657  if (current_files_init .gt. max_files) &
658  call mpp_error(fatal, "The number of files is too large")
659 
660  ! An array of offloading objects is stored in this module, the "id" is the index of the object in the array
661  this => offloading_objs(current_files_init)
662  this%id = current_files_init
663  this%filename = trim(filename)
664 
665  ! does npes return current pelist or total??
666  allocate(curr_pelist(mpp_npes()))
667  call mpp_get_current_pelist(curr_pelist)
668 
669  select case (ntile)
670  case (1)
671  this%domain_out = create_lat_lon_domain(nx, ny)
672  case (6)
673  this%domain_out = create_cubic_domain(nx, ny, ntile, (/1,1/), offload_pes=curr_pelist)
674  case default
675  call mpp_error(fatal, "Unsupported number of tiles for offloading: " // trim(adjustl(string(ntile))))
676  end select
677 
678  allocate(fmsnetcdfdomainfile_t :: this%fileobj)
679  select type (fileobj => this%fileobj)
680  type is (fmsnetcdfdomainfile_t)
681  if ( .not. open_file(fileobj, trim(this%filename), "overwrite", this%domain_out)) &
682  call mpp_error(fatal, "Error opening file")
683  end select
684 
685  init_offloading_object = current_files_init
686  end function
687 
688  !! TODO move this somewhere else
689  function create_lat_lon_domain(nx_in, ny_in, halox, haloy ) &
690  result(domain_out)
691  integer, intent(in) :: nx_in !< number of lat-lon grid points in x direction
692  integer, intent(in) :: ny_in !< number of lat-lon grid points in y direction
693  integer, intent(in), optional :: halox !< number of halo points in x direction
694  integer, intent(in), optional :: haloy !< number of halo points in y direction
695  type(domain2d) :: domain_out
696  integer :: layout(2)
697 
698  call mpp_define_layout( (/1,nx_in,1,ny_in/), mpp_npes(), layout )
699  call mpp_define_domains( (/1,nx_in,1,ny_in/), layout, domain_out, xhalo=halox, yhalo=haloy)
700  call mpp_define_io_domain(domain_out, (/1,1/))
701  end function create_lat_lon_domain
702 
703  !! TODO move this somewhere else
704  function create_cubic_domain(nx_in, ny_in, ntiles, io_layout, nhalos, offload_pes, layout) &
705  result(domain_out)
706  integer, intent(in) :: nx_in !< number of grid points in x direction per tile
707  integer, intent(in) :: ny_in !< number of grid points in y direction per tile
708  integer, intent(in) :: ntiles !< number of tiles, must be 6 for cubed-sphere
709  integer, intent(in) :: io_layout(2) !< layout for I/O operations
710  integer, intent(in), optional :: nhalos !< number of halo points
711  integer, intent(in), optional :: offload_pes(:) !< list of PEs used for offloading write operations
712  integer, optional :: layout(2) !< layout to be used for each tile)
713 
714  type(domain2d) :: domain_out
715 
716  integer :: npes
717  integer :: npes_per_tile
718  integer :: layout_tmp(2)
719  integer, allocatable :: global_indices(:,:)
720  integer, allocatable :: layout2D(:,:)
721  integer, allocatable :: pe_start(:)
722  integer, allocatable :: pe_end(:)
723  integer :: n
724 
725  npes = mpp_npes()
726  if( mod(npes, ntiles) .NE. 0 ) call mpp_error(fatal, &
727  "create_cubic_domain: npes is not divisible by ntiles")
728 
729  npes_per_tile = npes/ntiles
730  if( .not. present(layout)) then
731  call mpp_define_layout ((/1,nx_in,1,ny_in/), npes_per_tile, layout_tmp )
732  else
733  layout_tmp = layout
734  endif
735 
736  allocate(global_indices(4, ntiles))
737  allocate(layout2d(2, ntiles))
738  allocate(pe_start(ntiles), pe_end(ntiles))
739 
740  do n = 1, ntiles
741  global_indices(:,n) = (/1,nx_in,1,ny_in/)
742  layout2d(:,n) = layout_tmp
743  if( present(offload_pes)) then
744  pe_start(n) = offload_pes((n-1)*npes_per_tile+1)
745  pe_end(n) = offload_pes((n)*npes_per_tile)
746  else
747  pe_start(n) = (n-1)*npes_per_tile
748  pe_end(n) = n*npes_per_tile-1
749  endif
750  end do
751 
752  print *, "pe: ", mpp_pe(), "creating mosaic with pe_start", pe_start, "pe_end", pe_end
753 
754  call define_cubic_mosaic(domain_out, (/nx_in,nx_in,nx_in,nx_in,nx_in,nx_in/), &
755  (/ny_in,ny_in,ny_in,ny_in,ny_in,ny_in/), &
756  global_indices, layout2d, pe_start, pe_end, io_layout, nhalos )
757  end function create_cubic_domain
758 
759  !> @brief Initialize a cubed-sphere atomsphere domain.
760  !! TODO move this somehere else
761  subroutine define_cubic_mosaic(domain, ni, nj, global_indices, layout, pe_start, pe_end, &
762  io_layout, nhalos)
763 
764  integer, dimension(:), intent(in) :: ni !< number of grid points in i direction per tile
765  integer, dimension(:), intent(in) :: nj !< number of grid points in j direction per tile
766  integer, dimension(:,:), intent(in) :: global_indices !< global indices for each tile
767  integer, dimension(:,:), intent(in) :: layout !< array of layouts for each tile
768  integer, dimension(:), intent(in) :: pe_start !< starting PE for each tile
769  integer, dimension(:), intent(in) :: pe_end !< ending PE for each tile
770  integer, dimension(2), intent(in) :: io_layout !< layout for I/O operations
771  type(domain2d), intent(inout) :: domain !< A cubed-sphere domain.
772  integer, optional, intent(in) :: nhalos !< number of halo points
773 
774  integer, dimension(12) :: tile1
775  integer, dimension(12) :: tile2
776  integer, dimension(12) :: istart1
777  integer, dimension(12) :: iend1
778  integer, dimension(12) :: jstart1
779  integer, dimension(12) :: jend1
780  integer, dimension(12) :: istart2
781  integer, dimension(12) :: iend2
782  integer, dimension(12) :: jstart2
783  integer, dimension(12) :: jend2
784  integer :: ntiles
785  integer :: num_contact
786  integer, dimension(2) :: msize
787  integer :: whalo
788  integer :: ehalo
789  integer :: shalo
790  integer :: nhalo
791 
792  ntiles = 6
793  num_contact = 12
794 
795  whalo = 2
796  if (present(nhalos)) whalo = nhalos
797  ehalo = whalo
798  shalo = whalo
799  nhalo = whalo
800 
801  if (size(pe_start) .ne. 6 .or. size(pe_end) .ne. 6 ) then
802  call mpp_error(fatal, "size of pe_start and pe_end should be 6.")
803  endif
804  if (size(global_indices,1) .ne. 4) then
805  call mpp_error(fatal, "size of first dimension of global_indices should be 4.")
806  endif
807  if (size(global_indices,2) .ne. 6) then
808  call mpp_error(fatal, "size of second dimension of global_indices should be 6.")
809  endif
810  if (size(layout,1) .ne. 2) then
811  call mpp_error(fatal, "size of first dimension of layout should be 2.")
812  endif
813  if (size(layout,2) .ne. 6) then
814  call mpp_error(fatal, "size of second dimension of layout should be 6.")
815  endif
816  if (size(ni) .ne. 6 .or. size(nj) .ne. 6) then
817  call mpp_error(fatal, "size of ni and nj should be 6.")
818  endif
819 
820  !Contact line 1, between tile 1 (EAST) and tile 2 (WEST)
821  tile1(1) = 1
822  tile2(1) = 2
823  istart1(1) = ni(1)
824  iend1(1) = ni(1)
825  jstart1(1) = 1
826  jend1(1) = nj(1)
827  istart2(1) = 1
828  iend2(1) = 1
829  jstart2(1) = 1
830  jend2(1) = nj(2)
831 
832  !Contact line 2, between tile 1 (NORTH) and tile 3 (WEST)
833  tile1(2) = 1
834  tile2(2) = 3
835  istart1(2) = 1
836  iend1(2) = ni(1)
837  jstart1(2) = nj(1)
838  jend1(2) = nj(1)
839  istart2(2) = 1
840  iend2(2) = 1
841  jstart2(2) = nj(3)
842  jend2(2) = 1
843 
844  !Contact line 3, between tile 1 (WEST) and tile 5 (NORTH)
845  tile1(3) = 1
846  tile2(3) = 5
847  istart1(3) = 1
848  iend1(3) = 1
849  jstart1(3) = 1
850  jend1(3) = nj(1)
851  istart2(3) = ni(5)
852  iend2(3) = 1
853  jstart2(3) = nj(5)
854  jend2(3) = nj(5)
855 
856  !Contact line 4, between tile 1 (SOUTH) and tile 6 (NORTH)
857  tile1(4) = 1
858  tile2(4) = 6
859  istart1(4) = 1
860  iend1(4) = ni(1)
861  jstart1(4) = 1
862  jend1(4) = 1
863  istart2(4) = 1
864  iend2(4) = ni(6)
865  jstart2(4) = nj(6)
866  jend2(4) = nj(6)
867 
868  !Contact line 5, between tile 2 (NORTH) and tile 3 (SOUTH)
869  tile1(5) = 2
870  tile2(5) = 3
871  istart1(5) = 1
872  iend1(5) = ni(2)
873  jstart1(5) = nj(2)
874  jend1(5) = nj(2)
875  istart2(5) = 1
876  iend2(5) = ni(3)
877  jstart2(5) = 1
878  jend2(5) = 1
879 
880  !Contact line 6, between tile 2 (EAST) and tile 4 (SOUTH)
881  tile1(6) = 2
882  tile2(6) = 4
883  istart1(6) = ni(2)
884  iend1(6) = ni(2)
885  jstart1(6) = 1
886  jend1(6) = nj(2)
887  istart2(6) = ni(4)
888  iend2(6) = 1
889  jstart2(6) = 1
890  jend2(6) = 1
891 
892  !Contact line 7, between tile 2 (SOUTH) and tile 6 (EAST)
893  tile1(7) = 2
894  tile2(7) = 6
895  istart1(7) = 1
896  iend1(7) = ni(2)
897  jstart1(7) = 1
898  jend1(7) = 1
899  istart2(7) = ni(6)
900  iend2(7) = ni(6)
901  jstart2(7) = nj(6)
902  jend2(7) = 1
903 
904  !Contact line 8, between tile 3 (EAST) and tile 4 (WEST)
905  tile1(8) = 3
906  tile2(8) = 4
907  istart1(8) = ni(3)
908  iend1(8) = ni(3)
909  jstart1(8) = 1
910  jend1(8) = nj(3)
911  istart2(8) = 1
912  iend2(8) = 1
913  jstart2(8) = 1
914  jend2(8) = nj(4)
915 
916  !Contact line 9, between tile 3 (NORTH) and tile 5 (WEST)
917  tile1(9) = 3
918  tile2(9) = 5
919  istart1(9) = 1
920  iend1(9) = ni(3)
921  jstart1(9) = nj(3)
922  jend1(9) = nj(3)
923  istart2(9) = 1
924  iend2(9) = 1
925  jstart2(9) = nj(5)
926  jend2(9) = 1
927 
928  !Contact line 10, between tile 4 (NORTH) and tile 5 (SOUTH)
929  tile1(10) = 4
930  tile2(10) = 5
931  istart1(10) = 1
932  iend1(10) = ni(4)
933  jstart1(10) = nj(4)
934  jend1(10) = nj(4)
935  istart2(10) = 1
936  iend2(10) = ni(5)
937  jstart2(10) = 1
938  jend2(10) = 1
939 
940  !Contact line 11, between tile 4 (EAST) and tile 6 (SOUTH)
941  tile1(11) = 4
942  tile2(11) = 6
943  istart1(11) = ni(4)
944  iend1(11) = ni(4)
945  jstart1(11) = 1
946  jend1(11) = nj(4)
947  istart2(11) = ni(6)
948  iend2(11) = 1
949  jstart2(11) = 1
950  jend2(11) = 1
951 
952  !Contact line 12, between tile 5 (EAST) and tile 6 (WEST)
953  tile1(12) = 5
954  tile2(12) = 6
955  istart1(12) = ni(5)
956  iend1(12) = ni(5)
957  jstart1(12) = 1
958  jend1(12) = nj(5)
959  istart2(12) = 1
960  iend2(12) = 1
961  jstart2(12) = 1
962  jend2(12) = nj(6)
963  msize(1) = maxval(ni(:)/layout(1,:)) + whalo + ehalo + 1
964  msize(2) = maxval(nj(:)/layout(2,:)) + shalo + nhalo + 1
965  call mpp_define_mosaic(global_indices, layout, domain, ntiles, num_contact, tile1, &
966  tile2, istart1, iend1, jstart1, jend1, istart2, iend2, &
967  jstart2, jend2, pe_start, pe_end, symmetry = .true., &
968  whalo=whalo, ehalo=ehalo, shalo=shalo, nhalo=nhalo, &
969  name=trim("Cubed-sphere"), memory_size=msize)
970  call mpp_define_io_domain(domain, io_layout)
971  end subroutine define_cubic_mosaic
972 end module offloading_io_mod
Close a netcdf or domain file opened with open_file or open_virtual_file.
Definition: fms2_io.F90:165
Opens a given netcdf or domain file.
Definition: fms2_io.F90:121
Add a dimension to a given file.
Definition: fms2_io.F90:189
Defines a new field within the given file Example usage:
Definition: fms2_io.F90:211
Write data to a defined field within a file Example usage:
Definition: fms2_io.F90:261
integer function, public check_nml_error(IOSTAT, NML_NAME)
Checks the iostat argument that is returned after reading a namelist and determines if the error code...
Definition: fms.F90:523
character(:) function, allocatable, public string(v, fmt)
Converts a number or a Boolean value to a string.
integer function mpp_get_ntile_count(domain)
Returns number of tiles in mosaic.
subroutine mpp_define_io_domain(domain, io_layout)
Define the layout for IO pe's for the given domain.
subroutine mpp_define_mosaic(global_indices, layout, domain, num_tile, num_contact, tile1, tile2, istart1, iend1, jstart1, jend1, istart2, iend2, jstart2, jend2, pe_start, pe_end, pelist, whalo, ehalo, shalo, nhalo, xextent, yextent, maskmap, name, memory_size, symmetry, xflags, yflags, tile_id)
Defines a domain for mosaic tile grids.
Broadcasts domain to every pe. Only useful outside the context of it's own pelist.
Set up a domain decomposition.
Retrieve layout associated with a domain decomposition. Given a global 2D domain and the number of di...
Defines a nullified 1D or 2D domain.
These routines retrieve the axis specifications associated with the compute domains....
These routines retrieve the axis specifications associated with the data domains. The domain is a der...
These routines retrieve the axis specifications associated with the global domains....
Reorganization of distributed global arrays. mpp_redistribute is used to reorganize a distributed ar...
The domain2D type contains all the necessary information to define the global, compute and data domai...
subroutine mpp_set_current_pelist(pelist, no_sync)
Set context pelist.
Definition: mpp_util.inc:498
integer function mpp_npes()
Returns processor count for current pelist.
Definition: mpp_util.inc:420
integer function mpp_pe()
Returns processor ID.
Definition: mpp_util.inc:406
integer function mpp_clock_id(name, flags, grain)
Return an ID for a new or existing clock.
Definition: mpp_util.inc:713
Perform parallel broadcasts.
Definition: mpp.F90:1104
Error handler.
Definition: mpp.F90:381
Offload equivalent of register_axis in fms2_io_mod Registers an axis to a netcdf file on offloaded PE...
Offload equivalent of write_data in fms2_io_mod Writes data to a netcdf file on offloaded PEs....
Metadata class for integer(kind=4) attribute values.
Metadata class for integer(kind=8) attribute values.
Metadata class for real(kind=4) attribute values.
Metadata class for real(kind=8) attribute values.
Metadata class for string attribute values.
Structure to hold offloading file information.