MOM6
MOM_grid.F90
1 !> Provides the ocean grid type
2 module mom_grid
3 
4 ! This file is part of MOM6. See LICENSE.md for the license.
5 
6 use mom_hor_index, only : hor_index_type, hor_index_init
7 use mom_domains, only : mom_domain_type, get_domain_extent, compute_block_extent
8 use mom_domains, only : get_global_shape, get_domain_extent_dsamp2
9 use mom_error_handler, only : mom_error, mom_mesg, fatal
11 
12 implicit none ; private
13 
14 #include <MOM_memory.h>
15 
16 public mom_grid_init, mom_grid_end, set_derived_metrics, set_first_direction
17 public ispointincell, hor_index_type, get_global_grid_size, rescale_grid_bathymetry
18 
19 ! A note on unit descriptions in comments: MOM6 uses units that can be rescaled for dimensional
20 ! consistency testing. These are noted in comments with units like Z, H, L, and T, along with
21 ! their mks counterparts with notation like "a velocity [Z T-1 ~> m s-1]". If the units
22 ! vary with the Boussinesq approximation, the Boussinesq variant is given first.
23 
24 !> Ocean grid type. See mom_grid for details.
25 type, public :: ocean_grid_type
26  type(mom_domain_type), pointer :: domain => null() !< Ocean model domain
27  type(mom_domain_type), pointer :: domain_aux => null() !< A non-symmetric auxiliary domain type.
28  type(hor_index_type) :: hi !< Horizontal index ranges
29  type(hor_index_type) :: hid2 !< Horizontal index ranges for level-2-downsampling
30 
31  integer :: isc !< The start i-index of cell centers within the computational domain
32  integer :: iec !< The end i-index of cell centers within the computational domain
33  integer :: jsc !< The start j-index of cell centers within the computational domain
34  integer :: jec !< The end j-index of cell centers within the computational domain
35 
36  integer :: isd !< The start i-index of cell centers within the data domain
37  integer :: ied !< The end i-index of cell centers within the data domain
38  integer :: jsd !< The start j-index of cell centers within the data domain
39  integer :: jed !< The end j-index of cell centers within the data domain
40 
41  integer :: isg !< The start i-index of cell centers within the global domain
42  integer :: ieg !< The end i-index of cell centers within the global domain
43  integer :: jsg !< The start j-index of cell centers within the global domain
44  integer :: jeg !< The end j-index of cell centers within the global domain
45 
46  integer :: iscb !< The start i-index of cell vertices within the computational domain
47  integer :: iecb !< The end i-index of cell vertices within the computational domain
48  integer :: jscb !< The start j-index of cell vertices within the computational domain
49  integer :: jecb !< The end j-index of cell vertices within the computational domain
50 
51  integer :: isdb !< The start i-index of cell vertices within the data domain
52  integer :: iedb !< The end i-index of cell vertices within the data domain
53  integer :: jsdb !< The start j-index of cell vertices within the data domain
54  integer :: jedb !< The end j-index of cell vertices within the data domain
55 
56  integer :: isgb !< The start i-index of cell vertices within the global domain
57  integer :: iegb !< The end i-index of cell vertices within the global domain
58  integer :: jsgb !< The start j-index of cell vertices within the global domain
59  integer :: jegb !< The end j-index of cell vertices within the global domain
60 
61  integer :: isd_global !< The value of isd in the global index space (decompoistion invariant).
62  integer :: jsd_global !< The value of isd in the global index space (decompoistion invariant).
63  integer :: idg_offset !< The offset between the corresponding global and local i-indices.
64  integer :: jdg_offset !< The offset between the corresponding global and local j-indices.
65  integer :: ke !< The number of layers in the vertical.
66  logical :: symmetric !< True if symmetric memory is used.
67  logical :: nonblocking_updates !< If true, non-blocking halo updates are
68  !! allowed. The default is .false. (for now).
69  integer :: first_direction !< An integer that indicates which direction is
70  !! to be updated first in directionally split
71  !! parts of the calculation. This can be altered
72  !! during the course of the run via calls to
73  !! set_first_direction.
74 
75  real allocable_, dimension(NIMEM_,NJMEM_) :: &
76  mask2dt, & !< 0 for land points and 1 for ocean points on the h-grid. Nd.
77  geolatt, & !< The geographic latitude at q points in degrees of latitude or m.
78  geolont, & !< The geographic longitude at q points in degrees of longitude or m.
79  dxt, & !< dxT is delta x at h points [m].
80  idxt, & !< 1/dxT [m-1].
81  dyt, & !< dyT is delta y at h points [m].
82  idyt, & !< IdyT is 1/dyT [m-1].
83  areat, & !< The area of an h-cell [m2].
84  iareat, & !< 1/areaT [m-2].
85  sin_rot, & !< The sine of the angular rotation between the local model grid's northward
86  !! and the true northward directions.
87  cos_rot !< The cosine of the angular rotation between the local model grid's northward
88  !! and the true northward directions.
89 
90  real allocable_, dimension(NIMEMB_PTR_,NJMEM_) :: &
91  mask2dcu, & !< 0 for boundary points and 1 for ocean points on the u grid. Nondim.
92  geolatcu, & !< The geographic latitude at u points in degrees of latitude or m.
93  geoloncu, & !< The geographic longitude at u points in degrees of longitude or m.
94  dxcu, & !< dxCu is delta x at u points [m].
95  idxcu, & !< 1/dxCu [m-1].
96  dycu, & !< dyCu is delta y at u points [m].
97  idycu, & !< 1/dyCu [m-1].
98  dy_cu, & !< The unblocked lengths of the u-faces of the h-cell [m].
99  iareacu, & !< The masked inverse areas of u-grid cells [m2].
100  areacu !< The areas of the u-grid cells [m2].
101 
102  real allocable_, dimension(NIMEM_,NJMEMB_PTR_) :: &
103  mask2dcv, & !< 0 for boundary points and 1 for ocean points on the v grid. Nondim.
104  geolatcv, & !< The geographic latitude at v points in degrees of latitude or m.
105  geoloncv, & !< The geographic longitude at v points in degrees of longitude or m.
106  dxcv, & !< dxCv is delta x at v points [m].
107  idxcv, & !< 1/dxCv [m-1].
108  dycv, & !< dyCv is delta y at v points [m].
109  idycv, & !< 1/dyCv [m-1].
110  dx_cv, & !< The unblocked lengths of the v-faces of the h-cell [m].
111  iareacv, & !< The masked inverse areas of v-grid cells [m2].
112  areacv !< The areas of the v-grid cells [m2].
113 
114  real allocable_, dimension(NIMEMB_PTR_,NJMEMB_PTR_) :: &
115  mask2dbu, & !< 0 for boundary points and 1 for ocean points on the q grid. Nondim.
116  geolatbu, & !< The geographic latitude at q points in degrees of latitude or m.
117  geolonbu, & !< The geographic longitude at q points in degrees of longitude or m.
118  dxbu, & !< dxBu is delta x at q points [m].
119  idxbu, & !< 1/dxBu [m-1].
120  dybu, & !< dyBu is delta y at q points [m].
121  idybu, & !< 1/dyBu [m-1].
122  areabu, & !< areaBu is the area of a q-cell [m2]
123  iareabu !< IareaBu = 1/areaBu [m-2].
124 
125  real, pointer, dimension(:) :: &
126  gridlatt => null(), & !< The latitude of T points for the purpose of labeling the output axes.
127  !! On many grids this is the same as geoLatT.
128  gridlatb => null() !< The latitude of B points for the purpose of labeling the output axes.
129  !! On many grids this is the same as geoLatBu.
130  real, pointer, dimension(:) :: &
131  gridlont => null(), & !< The longitude of T points for the purpose of labeling the output axes.
132  !! On many grids this is the same as geoLonT.
133  gridlonb => null() !< The longitude of B points for the purpose of labeling the output axes.
134  !! On many grids this is the same as geoLonBu.
135  character(len=40) :: &
136  x_axis_units, & !< The units that are used in labeling the x coordinate axes.
137  y_axis_units !< The units that are used in labeling the y coordinate axes.
138 
139  real allocable_, dimension(NIMEM_,NJMEM_) :: &
140  bathyt !< Ocean bottom depth at tracer points, in depth units [Z ~> m].
141 
142  logical :: bathymetry_at_vel !< If true, there are separate values for the
143  !! basin depths at velocity points. Otherwise the effects of
144  !! of topography are entirely determined from thickness points.
145  real allocable_, dimension(NIMEMB_PTR_,NJMEM_) :: &
146  dblock_u, & !< Topographic depths at u-points at which the flow is blocked [Z ~> m].
147  dopen_u !< Topographic depths at u-points at which the flow is open at width dy_Cu [Z ~> m].
148  real allocable_, dimension(NIMEM_,NJMEMB_PTR_) :: &
149  dblock_v, & !< Topographic depths at v-points at which the flow is blocked [Z ~> m].
150  dopen_v !< Topographic depths at v-points at which the flow is open at width dx_Cv [Z ~> m].
151  real allocable_, dimension(NIMEMB_PTR_,NJMEMB_PTR_) :: &
152  coriolisbu !< The Coriolis parameter at corner points [T-1 ~> s-1].
153  real allocable_, dimension(NIMEM_,NJMEM_) :: &
154  df_dx, & !< Derivative d/dx f (Coriolis parameter) at h-points [T-1 m-1 ~> s-1 m-1].
155  df_dy !< Derivative d/dy f (Coriolis parameter) at h-points [T-1 m-1 ~> s-1 m-1].
156  real :: g_earth !< The gravitational acceleration [m2 Z-1 s-2 ~> m s-2].
157 
158  ! These variables are global sums that are useful for 1-d diagnostics
159  real :: areat_global !< Global sum of h-cell area [m2]
160  real :: iareat_global !< Global sum of inverse h-cell area (1/areaT_global) [m2].
161 
162  ! These variables are for block structures.
163  integer :: nblocks !< The number of sub-PE blocks on this PE
164  type(hor_index_type), pointer :: block(:) => null() !< Index ranges for each block
165 
166  ! These parameters are run-time parameters that are used during some
167  ! initialization routines (but not all)
168  real :: south_lat !< The latitude (or y-coordinate) of the first v-line
169  real :: west_lon !< The longitude (or x-coordinate) of the first u-line
170  real :: len_lat = 0. !< The latitudinal (or y-coord) extent of physical domain
171  real :: len_lon = 0. !< The longitudinal (or x-coord) extent of physical domain
172  real :: rad_earth = 6.378e6 !< The radius of the planet [m].
173  real :: max_depth !< The maximum depth of the ocean in depth units [Z ~> m].
174 end type ocean_grid_type
175 
176 contains
177 
178 !> MOM_grid_init initializes the ocean grid array sizes and grid memory.
179 subroutine mom_grid_init(G, param_file, HI, global_indexing, bathymetry_at_vel)
180  type(ocean_grid_type), intent(inout) :: g !< The horizontal grid type
181  type(param_file_type), intent(in) :: param_file !< Parameter file handle
182  type(hor_index_type), &
183  optional, intent(in) :: hi !< A hor_index_type for array extents
184  logical, optional, intent(in) :: global_indexing !< If true use global index
185  !! values instead of having the data domain on each
186  !! processor start at 1.
187  logical, optional, intent(in) :: bathymetry_at_vel !< If true, there are
188  !! separate values for the ocean bottom depths at
189  !! velocity points. Otherwise the effects of topography
190  !! are entirely determined from thickness points.
191 
192 ! This include declares and sets the variable "version".
193 #include "version_variable.h"
194  integer :: isd, ied, jsd, jed, nk
195  integer :: isdb, iedb, jsdb, jedb
196  integer :: ied_max, jed_max
197  integer :: niblock, njblock, nihalo, njhalo, nblocks, n, i, j
198  logical :: local_indexing ! If false use global index values instead of having
199  ! the data domain on each processor start at 1.
200 
201  integer, allocatable, dimension(:) :: ibegin, iend, jbegin, jend
202  character(len=40) :: mod_nm = "MOM_grid" ! This module's name.
203 
204 
205  ! Read all relevant parameters and write them to the model log.
206  call log_version(param_file, mod_nm, version, &
207  "Parameters providing information about the lateral grid.")
208 
209 
210  call get_param(param_file, mod_nm, "NIBLOCK", niblock, "The number of blocks "// &
211  "in the x-direction on each processor (for openmp).", default=1, &
212  layoutparam=.true.)
213  call get_param(param_file, mod_nm, "NJBLOCK", njblock, "The number of blocks "// &
214  "in the y-direction on each processor (for openmp).", default=1, &
215  layoutparam=.true.)
216 
217  if (present(hi)) then
218  g%HI = hi
219 
220  g%isc = hi%isc ; g%iec = hi%iec ; g%jsc = hi%jsc ; g%jec = hi%jec
221  g%isd = hi%isd ; g%ied = hi%ied ; g%jsd = hi%jsd ; g%jed = hi%jed
222  g%isg = hi%isg ; g%ieg = hi%ieg ; g%jsg = hi%jsg ; g%jeg = hi%jeg
223 
224  g%IscB = hi%IscB ; g%IecB = hi%IecB ; g%JscB = hi%JscB ; g%JecB = hi%JecB
225  g%IsdB = hi%IsdB ; g%IedB = hi%IedB ; g%JsdB = hi%JsdB ; g%JedB = hi%JedB
226  g%IsgB = hi%IsgB ; g%IegB = hi%IegB ; g%JsgB = hi%JsgB ; g%JegB = hi%JegB
227 
228  g%idg_offset = hi%idg_offset ; g%jdg_offset = hi%jdg_offset
229  g%isd_global = g%isd + hi%idg_offset ; g%jsd_global = g%jsd + hi%jdg_offset
230  g%symmetric = hi%symmetric
231  else
232  local_indexing = .true.
233  if (present(global_indexing)) local_indexing = .not.global_indexing
234  call hor_index_init(g%Domain, g%HI, param_file, &
235  local_indexing=local_indexing)
236 
237  ! get_domain_extent ensures that domains start at 1 for compatibility between
238  ! static and dynamically allocated arrays, unless global_indexing is true.
239  call get_domain_extent(g%Domain, g%isc, g%iec, g%jsc, g%jec, &
240  g%isd, g%ied, g%jsd, g%jed, &
241  g%isg, g%ieg, g%jsg, g%jeg, &
242  g%idg_offset, g%jdg_offset, g%symmetric, &
243  local_indexing=local_indexing)
244  g%isd_global = g%isd+g%idg_offset ; g%jsd_global = g%jsd+g%jdg_offset
245  endif
246 
247  g%nonblocking_updates = g%Domain%nonblocking_updates
248 
249  ! Set array sizes for fields that are discretized at tracer cell boundaries.
250  g%IscB = g%isc ; g%JscB = g%jsc
251  g%IsdB = g%isd ; g%JsdB = g%jsd
252  g%IsgB = g%isg ; g%JsgB = g%jsg
253  if (g%symmetric) then
254  g%IscB = g%isc-1 ; g%JscB = g%jsc-1
255  g%IsdB = g%isd-1 ; g%JsdB = g%jsd-1
256  g%IsgB = g%isg-1 ; g%JsgB = g%jsg-1
257  endif
258  g%IecB = g%iec ; g%JecB = g%jec
259  g%IedB = g%ied ; g%JedB = g%jed
260  g%IegB = g%ieg ; g%JegB = g%jeg
261 
262  call mom_mesg(" MOM_grid.F90, MOM_grid_init: allocating metrics", 5)
263 
264  call allocate_metrics(g)
265 
266  isd = g%isd ; ied = g%ied ; jsd = g%jsd ; jed = g%jed
267  isdb = g%IsdB ; iedb = g%IedB ; jsdb = g%JsdB ; jedb = g%JedB
268 
269  g%bathymetry_at_vel = .false.
270  if (present(bathymetry_at_vel)) g%bathymetry_at_vel = bathymetry_at_vel
271  if (g%bathymetry_at_vel) then
272  alloc_(g%Dblock_u(isdb:iedb, jsd:jed)) ; g%Dblock_u(:,:) = 0.0
273  alloc_(g%Dopen_u(isdb:iedb, jsd:jed)) ; g%Dopen_u(:,:) = 0.0
274  alloc_(g%Dblock_v(isd:ied, jsdb:jedb)) ; g%Dblock_v(:,:) = 0.0
275  alloc_(g%Dopen_v(isd:ied, jsdb:jedb)) ; g%Dopen_v(:,:) = 0.0
276  endif
277 
278 ! setup block indices.
279  nihalo = g%Domain%nihalo
280  njhalo = g%Domain%njhalo
281  nblocks = niblock * njblock
282  if (nblocks < 1) call mom_error(fatal, "MOM_grid_init: " // &
283  "nblocks(=NI_BLOCK*NJ_BLOCK) must be no less than 1")
284 
285  allocate(ibegin(niblock), iend(niblock), jbegin(njblock), jend(njblock))
286  call compute_block_extent(g%HI%isc,g%HI%iec,niblock,ibegin,iend)
287  call compute_block_extent(g%HI%jsc,g%HI%jec,njblock,jbegin,jend)
288  !-- make sure the last block is the largest.
289  do i = 1, niblock-1
290  if (iend(i)-ibegin(i) > iend(niblock)-ibegin(niblock) ) call mom_error(fatal, &
291  "MOM_grid_init: the last block size in x-direction is not the largest")
292  enddo
293  do j = 1, njblock-1
294  if (jend(j)-jbegin(j) > jend(njblock)-jbegin(njblock) ) call mom_error(fatal, &
295  "MOM_grid_init: the last block size in y-direction is not the largest")
296  enddo
297 
298  g%nblocks = nblocks
299  allocate(g%Block(nblocks))
300  ied_max = 1 ; jed_max = 1
301  do n = 1,nblocks
302  ! Copy all information from the array index type describing the local grid.
303  g%Block(n) = g%HI
304 
305  i = mod((n-1), niblock) + 1
306  j = (n-1)/niblock + 1
307  !--- isd and jsd are always 1 for each block to permit array reuse.
308  g%Block(n)%isd = 1 ; g%Block(n)%jsd = 1
309  g%Block(n)%isc = g%Block(n)%isd+nihalo
310  g%Block(n)%jsc = g%Block(n)%jsd+njhalo
311  g%Block(n)%iec = g%Block(n)%isc + iend(i) - ibegin(i)
312  g%Block(n)%jec = g%Block(n)%jsc + jend(j) - jbegin(j)
313  g%Block(n)%ied = g%Block(n)%iec + nihalo
314  g%Block(n)%jed = g%Block(n)%jec + njhalo
315  g%Block(n)%IscB = g%Block(n)%isc; g%Block(n)%IecB = g%Block(n)%iec
316  g%Block(n)%JscB = g%Block(n)%jsc; g%Block(n)%JecB = g%Block(n)%jec
317  ! For symmetric memory domains, the first block will have the extra point
318  ! at the lower boundary of its computational domain.
319  if (g%symmetric) then
320  if (i==1) g%Block(n)%IscB = g%Block(n)%IscB-1
321  if (j==1) g%Block(n)%JscB = g%Block(n)%JscB-1
322  endif
323  g%Block(n)%IsdB = g%Block(n)%isd; g%Block(n)%IedB = g%Block(n)%ied
324  g%Block(n)%JsdB = g%Block(n)%jsd; g%Block(n)%JedB = g%Block(n)%jed
325  !--- For symmetric memory domain, every block will have an extra point
326  !--- at the lower boundary of its data domain.
327  if (g%symmetric) then
328  g%Block(n)%IsdB = g%Block(n)%IsdB-1
329  g%Block(n)%JsdB = g%Block(n)%JsdB-1
330  endif
331  g%Block(n)%idg_offset = (ibegin(i) - g%Block(n)%isc) + g%HI%idg_offset
332  g%Block(n)%jdg_offset = (jbegin(j) - g%Block(n)%jsc) + g%HI%jdg_offset
333  ! Find the largest values of ied and jed so that all blocks will have the
334  ! same size in memory.
335  ied_max = max(ied_max, g%Block(n)%ied)
336  jed_max = max(jed_max, g%Block(n)%jed)
337  enddo
338 
339  ! Reset all of the data domain sizes to match the largest for array reuse,
340  ! recalling that all block have isd=jed=1 for array reuse.
341  do n = 1,nblocks
342  g%Block(n)%ied = ied_max ; g%Block(n)%IedB = ied_max
343  g%Block(n)%jed = jed_max ; g%Block(n)%JedB = jed_max
344  enddo
345 
346  !-- do some bounds error checking
347  if ( g%block(nblocks)%ied+g%block(nblocks)%idg_offset > g%HI%ied + g%HI%idg_offset ) &
348  call mom_error(fatal, "MOM_grid_init: G%ied_bk > G%ied")
349  if ( g%block(nblocks)%jed+g%block(nblocks)%jdg_offset > g%HI%jed + g%HI%jdg_offset ) &
350  call mom_error(fatal, "MOM_grid_init: G%jed_bk > G%jed")
351 
352  call get_domain_extent_dsamp2(g%Domain, g%HId2%isc, g%HId2%iec, g%HId2%jsc, g%HId2%jec,&
353  g%HId2%isd, g%HId2%ied, g%HId2%jsd, g%HId2%jed,&
354  g%HId2%isg, g%HId2%ieg, g%HId2%jsg, g%HId2%jeg)
355 
356  ! Set array sizes for fields that are discretized at tracer cell boundaries.
357  g%HId2%IscB = g%HId2%isc ; g%HId2%JscB = g%HId2%jsc
358  g%HId2%IsdB = g%HId2%isd ; g%HId2%JsdB = g%HId2%jsd
359  g%HId2%IsgB = g%HId2%isg ; g%HId2%JsgB = g%HId2%jsg
360  if (g%symmetric) then
361  g%HId2%IscB = g%HId2%isc-1 ; g%HId2%JscB = g%HId2%jsc-1
362  g%HId2%IsdB = g%HId2%isd-1 ; g%HId2%JsdB = g%HId2%jsd-1
363  g%HId2%IsgB = g%HId2%isg-1 ; g%HId2%JsgB = g%HId2%jsg-1
364  endif
365  g%HId2%IecB = g%HId2%iec ; g%HId2%JecB = g%HId2%jec
366  g%HId2%IedB = g%HId2%ied ; g%HId2%JedB = g%HId2%jed
367  g%HId2%IegB = g%HId2%ieg ; g%HId2%JegB = g%HId2%jeg
368 
369 end subroutine mom_grid_init
370 
371 !> rescale_grid_bathymetry permits a change in the internal units for the bathymetry on the grid,
372 !! both rescaling the depths and recording the new internal units.
373 subroutine rescale_grid_bathymetry(G, m_in_new_units)
374  type(ocean_grid_type), intent(inout) :: g !< The horizontal grid structure
375  real, intent(in) :: m_in_new_units !< The new internal representation of 1 m depth.
376 
377  ! Local variables
378  real :: rescale
379  integer :: i, j, isd, ied, jsd, jed, isdb, iedb, jsdb, jedb
380 
381  isd = g%isd ; ied = g%ied ; jsd = g%jsd ; jed = g%jed
382  isdb = g%IsdB ; iedb = g%IedB ; jsdb = g%JsdB ; jedb = g%JedB
383 
384  if (m_in_new_units == 1.0) return
385  if (m_in_new_units < 0.0) &
386  call mom_error(fatal, "rescale_grid_bathymetry: Negative depth units are not permitted.")
387  if (m_in_new_units == 0.0) &
388  call mom_error(fatal, "rescale_grid_bathymetry: Zero depth units are not permitted.")
389 
390  rescale = 1.0 / m_in_new_units
391  do j=jsd,jed ; do i=isd,ied
392  g%bathyT(i,j) = rescale*g%bathyT(i,j)
393  enddo ; enddo
394  if (g%bathymetry_at_vel) then ; do j=jsd,jed ; do i=isdb,iedb
395  g%Dblock_u(i,j) = rescale*g%Dblock_u(i,j) ; g%Dopen_u(i,j) = rescale*g%Dopen_u(i,j)
396  enddo ; enddo ; endif
397  if (g%bathymetry_at_vel) then ; do j=jsdb,jedb ; do i=isd,ied
398  g%Dblock_v(i,j) = rescale*g%Dblock_v(i,j) ; g%Dopen_v(i,j) = rescale*g%Dopen_v(i,j)
399  enddo ; enddo ; endif
400  g%max_depth = rescale*g%max_depth
401 
402 end subroutine rescale_grid_bathymetry
403 
404 !> set_derived_metrics calculates metric terms that are derived from other metrics.
405 subroutine set_derived_metrics(G)
406  type(ocean_grid_type), intent(inout) :: g !< The horizontal grid structure
407 ! Various inverse grid spacings and derived areas are calculated within this
408 ! subroutine.
409  integer :: i, j, isd, ied, jsd, jed
410  integer :: isdb, iedb, jsdb, jedb
411 
412  isd = g%isd ; ied = g%ied ; jsd = g%jsd ; jed = g%jed
413  isdb = g%IsdB ; iedb = g%IedB ; jsdb = g%JsdB ; jedb = g%JedB
414 
415  do j=jsd,jed ; do i=isd,ied
416  if (g%dxT(i,j) < 0.0) g%dxT(i,j) = 0.0
417  if (g%dyT(i,j) < 0.0) g%dyT(i,j) = 0.0
418  g%IdxT(i,j) = adcroft_reciprocal(g%dxT(i,j))
419  g%IdyT(i,j) = adcroft_reciprocal(g%dyT(i,j))
420  g%IareaT(i,j) = adcroft_reciprocal(g%areaT(i,j))
421  enddo ; enddo
422 
423  do j=jsd,jed ; do i=isdb,iedb
424  if (g%dxCu(i,j) < 0.0) g%dxCu(i,j) = 0.0
425  if (g%dyCu(i,j) < 0.0) g%dyCu(i,j) = 0.0
426  g%IdxCu(i,j) = adcroft_reciprocal(g%dxCu(i,j))
427  g%IdyCu(i,j) = adcroft_reciprocal(g%dyCu(i,j))
428  enddo ; enddo
429 
430  do j=jsdb,jedb ; do i=isd,ied
431  if (g%dxCv(i,j) < 0.0) g%dxCv(i,j) = 0.0
432  if (g%dyCv(i,j) < 0.0) g%dyCv(i,j) = 0.0
433  g%IdxCv(i,j) = adcroft_reciprocal(g%dxCv(i,j))
434  g%IdyCv(i,j) = adcroft_reciprocal(g%dyCv(i,j))
435  enddo ; enddo
436 
437  do j=jsdb,jedb ; do i=isdb,iedb
438  if (g%dxBu(i,j) < 0.0) g%dxBu(i,j) = 0.0
439  if (g%dyBu(i,j) < 0.0) g%dyBu(i,j) = 0.0
440 
441  g%IdxBu(i,j) = adcroft_reciprocal(g%dxBu(i,j))
442  g%IdyBu(i,j) = adcroft_reciprocal(g%dyBu(i,j))
443  ! areaBu has usually been set to a positive area elsewhere.
444  if (g%areaBu(i,j) <= 0.0) g%areaBu(i,j) = g%dxBu(i,j) * g%dyBu(i,j)
445  g%IareaBu(i,j) = adcroft_reciprocal(g%areaBu(i,j))
446  enddo ; enddo
447 end subroutine set_derived_metrics
448 
449 !> Adcroft_reciprocal(x) = 1/x for |x|>0 or 0 for x=0.
450 function adcroft_reciprocal(val) result(I_val)
451  real, intent(in) :: val !< The value being inverted.
452  real :: i_val !< The Adcroft reciprocal of val.
453 
454  i_val = 0.0 ; if (val /= 0.0) i_val = 1.0/val
455 end function adcroft_reciprocal
456 
457 !> Returns true if the coordinates (x,y) are within the h-cell (i,j)
458 logical function ispointincell(G, i, j, x, y)
459  type(ocean_grid_type), intent(in) :: g !< Grid type
460  integer, intent(in) :: i !< i index of cell to test
461  integer, intent(in) :: j !< j index of cell to test
462  real, intent(in) :: x !< x coordinate of point
463  real, intent(in) :: y !< y coordinate of point
464  ! Local variables
465  real :: xne, xnw, xse, xsw, yne, ynw, yse, ysw
466  real :: p0, p1, p2, p3, l0, l1, l2, l3
467  ispointincell = .false.
468  xne = g%geoLonBu(i ,j ) ; yne = g%geoLatBu(i ,j )
469  xnw = g%geoLonBu(i-1,j ) ; ynw = g%geoLatBu(i-1,j )
470  xse = g%geoLonBu(i ,j-1) ; yse = g%geoLatBu(i ,j-1)
471  xsw = g%geoLonBu(i-1,j-1) ; ysw = g%geoLatBu(i-1,j-1)
472  ! This is a crude calculation that assume a geographic coordinate system
473  if (x<min(xne,xnw,xse,xsw) .or. x>max(xne,xnw,xse,xsw) .or. &
474  y<min(yne,ynw,yse,ysw) .or. y>max(yne,ynw,yse,ysw) ) then
475  return ! Avoid the more complicated calculation
476  endif
477  l0 = (x-xsw)*(yse-ysw) - (y-ysw)*(xse-xsw)
478  l1 = (x-xse)*(yne-yse) - (y-yse)*(xne-xse)
479  l2 = (x-xne)*(ynw-yne) - (y-yne)*(xnw-xne)
480  l3 = (x-xnw)*(ysw-ynw) - (y-ynw)*(xsw-xnw)
481 
482  p0 = sign(1., l0) ; if (l0 == 0.) p0=0.
483  p1 = sign(1., l1) ; if (l1 == 0.) p1=0.
484  p2 = sign(1., l2) ; if (l2 == 0.) p2=0.
485  p3 = sign(1., l3) ; if (l3 == 0.) p3=0.
486 
487  if ( (abs(p0)+abs(p2)) + (abs(p1)+abs(p3)) == abs((p0+p2) + (p1+p3)) ) then
488  ispointincell=.true.
489  endif
490 end function ispointincell
491 
492 !> Store an integer indicating which direction to work on first.
493 subroutine set_first_direction(G, y_first)
494  type(ocean_grid_type), intent(inout) :: g !< The ocean's grid structure
495  integer, intent(in) :: y_first !< The first direction to store
496 
497  g%first_direction = y_first
498 end subroutine set_first_direction
499 
500 !> Return global shape of horizontal grid
501 subroutine get_global_grid_size(G, niglobal, njglobal)
502  type(ocean_grid_type), intent(inout) :: g !< The horizontal grid type
503  integer, intent(out) :: niglobal !< i-index global size of grid
504  integer, intent(out) :: njglobal !< j-index global size of grid
505 
506  call get_global_shape(g%domain, niglobal, njglobal)
507 
508 end subroutine get_global_grid_size
509 
510 !> Allocate memory used by the ocean_grid_type and related structures.
511 subroutine allocate_metrics(G)
512  type(ocean_grid_type), intent(inout) :: G !< The horizontal grid type
513  integer :: isd, ied, jsd, jed, IsdB, IedB, JsdB, JedB, isg, ieg, jsg, jeg
514 
515  ! This subroutine allocates the lateral elements of the ocean_grid_type that
516  ! are always used and zeros them out.
517 
518  isd = g%isd ; ied = g%ied ; jsd = g%jsd ; jed = g%jed
519  isdb = g%IsdB ; iedb = g%IedB ; jsdb = g%JsdB ; jedb = g%JedB
520  isg = g%isg ; ieg = g%ieg ; jsg = g%jsg ; jeg = g%jeg
521 
522  alloc_(g%dxT(isd:ied,jsd:jed)) ; g%dxT(:,:) = 0.0
523  alloc_(g%dxCu(isdb:iedb,jsd:jed)) ; g%dxCu(:,:) = 0.0
524  alloc_(g%dxCv(isd:ied,jsdb:jedb)) ; g%dxCv(:,:) = 0.0
525  alloc_(g%dxBu(isdb:iedb,jsdb:jedb)) ; g%dxBu(:,:) = 0.0
526  alloc_(g%IdxT(isd:ied,jsd:jed)) ; g%IdxT(:,:) = 0.0
527  alloc_(g%IdxCu(isdb:iedb,jsd:jed)) ; g%IdxCu(:,:) = 0.0
528  alloc_(g%IdxCv(isd:ied,jsdb:jedb)) ; g%IdxCv(:,:) = 0.0
529  alloc_(g%IdxBu(isdb:iedb,jsdb:jedb)) ; g%IdxBu(:,:) = 0.0
530 
531  alloc_(g%dyT(isd:ied,jsd:jed)) ; g%dyT(:,:) = 0.0
532  alloc_(g%dyCu(isdb:iedb,jsd:jed)) ; g%dyCu(:,:) = 0.0
533  alloc_(g%dyCv(isd:ied,jsdb:jedb)) ; g%dyCv(:,:) = 0.0
534  alloc_(g%dyBu(isdb:iedb,jsdb:jedb)) ; g%dyBu(:,:) = 0.0
535  alloc_(g%IdyT(isd:ied,jsd:jed)) ; g%IdyT(:,:) = 0.0
536  alloc_(g%IdyCu(isdb:iedb,jsd:jed)) ; g%IdyCu(:,:) = 0.0
537  alloc_(g%IdyCv(isd:ied,jsdb:jedb)) ; g%IdyCv(:,:) = 0.0
538  alloc_(g%IdyBu(isdb:iedb,jsdb:jedb)) ; g%IdyBu(:,:) = 0.0
539 
540  alloc_(g%areaT(isd:ied,jsd:jed)) ; g%areaT(:,:) = 0.0
541  alloc_(g%IareaT(isd:ied,jsd:jed)) ; g%IareaT(:,:) = 0.0
542  alloc_(g%areaBu(isdb:iedb,jsdb:jedb)) ; g%areaBu(:,:) = 0.0
543  alloc_(g%IareaBu(isdb:iedb,jsdb:jedb)) ; g%IareaBu(:,:) = 0.0
544 
545  alloc_(g%mask2dT(isd:ied,jsd:jed)) ; g%mask2dT(:,:) = 0.0
546  alloc_(g%mask2dCu(isdb:iedb,jsd:jed)) ; g%mask2dCu(:,:) = 0.0
547  alloc_(g%mask2dCv(isd:ied,jsdb:jedb)) ; g%mask2dCv(:,:) = 0.0
548  alloc_(g%mask2dBu(isdb:iedb,jsdb:jedb)) ; g%mask2dBu(:,:) = 0.0
549  alloc_(g%geoLatT(isd:ied,jsd:jed)) ; g%geoLatT(:,:) = 0.0
550  alloc_(g%geoLatCu(isdb:iedb,jsd:jed)) ; g%geoLatCu(:,:) = 0.0
551  alloc_(g%geoLatCv(isd:ied,jsdb:jedb)) ; g%geoLatCv(:,:) = 0.0
552  alloc_(g%geoLatBu(isdb:iedb,jsdb:jedb)) ; g%geoLatBu(:,:) = 0.0
553  alloc_(g%geoLonT(isd:ied,jsd:jed)) ; g%geoLonT(:,:) = 0.0
554  alloc_(g%geoLonCu(isdb:iedb,jsd:jed)) ; g%geoLonCu(:,:) = 0.0
555  alloc_(g%geoLonCv(isd:ied,jsdb:jedb)) ; g%geoLonCv(:,:) = 0.0
556  alloc_(g%geoLonBu(isdb:iedb,jsdb:jedb)) ; g%geoLonBu(:,:) = 0.0
557 
558  alloc_(g%dx_Cv(isd:ied,jsdb:jedb)) ; g%dx_Cv(:,:) = 0.0
559  alloc_(g%dy_Cu(isdb:iedb,jsd:jed)) ; g%dy_Cu(:,:) = 0.0
560 
561  alloc_(g%areaCu(isdb:iedb,jsd:jed)) ; g%areaCu(:,:) = 0.0
562  alloc_(g%areaCv(isd:ied,jsdb:jedb)) ; g%areaCv(:,:) = 0.0
563  alloc_(g%IareaCu(isdb:iedb,jsd:jed)) ; g%IareaCu(:,:) = 0.0
564  alloc_(g%IareaCv(isd:ied,jsdb:jedb)) ; g%IareaCv(:,:) = 0.0
565 
566  alloc_(g%bathyT(isd:ied, jsd:jed)) ; g%bathyT(:,:) = 0.0
567  alloc_(g%CoriolisBu(isdb:iedb, jsdb:jedb)) ; g%CoriolisBu(:,:) = 0.0
568  alloc_(g%dF_dx(isd:ied, jsd:jed)) ; g%dF_dx(:,:) = 0.0
569  alloc_(g%dF_dy(isd:ied, jsd:jed)) ; g%dF_dy(:,:) = 0.0
570 
571  alloc_(g%sin_rot(isd:ied,jsd:jed)) ; g%sin_rot(:,:) = 0.0
572  alloc_(g%cos_rot(isd:ied,jsd:jed)) ; g%cos_rot(:,:) = 1.0
573 
574  allocate(g%gridLonT(isg:ieg)) ; g%gridLonT(:) = 0.0
575  allocate(g%gridLonB(g%IsgB:g%IegB)) ; g%gridLonB(:) = 0.0
576  allocate(g%gridLatT(jsg:jeg)) ; g%gridLatT(:) = 0.0
577  allocate(g%gridLatB(g%JsgB:g%JegB)) ; g%gridLatB(:) = 0.0
578 
579 end subroutine allocate_metrics
580 
581 !> Release memory used by the ocean_grid_type and related structures.
582 subroutine mom_grid_end(G)
583  type(ocean_grid_type), intent(inout) :: g !< The horizontal grid type
584 
585  dealloc_(g%dxT) ; dealloc_(g%dxCu) ; dealloc_(g%dxCv) ; dealloc_(g%dxBu)
586  dealloc_(g%IdxT) ; dealloc_(g%IdxCu) ; dealloc_(g%IdxCv) ; dealloc_(g%IdxBu)
587 
588  dealloc_(g%dyT) ; dealloc_(g%dyCu) ; dealloc_(g%dyCv) ; dealloc_(g%dyBu)
589  dealloc_(g%IdyT) ; dealloc_(g%IdyCu) ; dealloc_(g%IdyCv) ; dealloc_(g%IdyBu)
590 
591  dealloc_(g%areaT) ; dealloc_(g%IareaT)
592  dealloc_(g%areaBu) ; dealloc_(g%IareaBu)
593  dealloc_(g%areaCu) ; dealloc_(g%IareaCu)
594  dealloc_(g%areaCv) ; dealloc_(g%IareaCv)
595 
596  dealloc_(g%mask2dT) ; dealloc_(g%mask2dCu)
597  dealloc_(g%mask2dCv) ; dealloc_(g%mask2dBu)
598 
599  dealloc_(g%geoLatT) ; dealloc_(g%geoLatCu)
600  dealloc_(g%geoLatCv) ; dealloc_(g%geoLatBu)
601  dealloc_(g%geoLonT) ; dealloc_(g%geoLonCu)
602  dealloc_(g%geoLonCv) ; dealloc_(g%geoLonBu)
603 
604  dealloc_(g%dx_Cv) ; dealloc_(g%dy_Cu)
605 
606  dealloc_(g%bathyT) ; dealloc_(g%CoriolisBu)
607  dealloc_(g%dF_dx) ; dealloc_(g%dF_dy)
608  dealloc_(g%sin_rot) ; dealloc_(g%cos_rot)
609 
610  if (g%bathymetry_at_vel) then
611  dealloc_(g%Dblock_u) ; dealloc_(g%Dopen_u)
612  dealloc_(g%Dblock_v) ; dealloc_(g%Dopen_v)
613  endif
614 
615  deallocate(g%gridLonT) ; deallocate(g%gridLatT)
616  deallocate(g%gridLonB) ; deallocate(g%gridLatB)
617 
618  deallocate(g%Domain%mpp_domain)
619  deallocate(g%Domain)
620 
621 end subroutine mom_grid_end
622 
623 !> \namespace mom_grid
624 !!
625 !! Grid metrics and their inverses are labelled according to their staggered location on a Arakawa C (or B) grid.
626 !! - Metrics centered on h- or T-points are labelled T, e.g. dxT is the distance across the cell in the x-direction.
627 !! - Metrics centered on u-points are labelled Cu (C-grid u location). e.g. dyCu is the y-distance between
628 !! two corners of a T-cell.
629 !! - Metrics centered on v-points are labelled Cv (C-grid v location). e.g. dyCv is the y-distance between two -points.
630 !! - Metrics centered on q-points are labelled Bu (B-grid u,v location). e.g. areaBu is the area centered on a q-point.
631 !!
632 !! \image html Grid_metrics.png "The labelling of distances (grid metrics) at various staggered
633 !! location on an T-cell and around a q-point."
634 !!
635 !! Areas centered at T-, u-, v- and q- points are `areaT`, `areaCu`, `areaCv` and `areaBu` respectively.
636 !!
637 !! The reciprocal of metrics are pre-calculated and also stored in the ocean_grid_type with a I prepended to the name.
638 !! For example, `1./areaT` is called `IareaT`, and `1./dyCv` is `IdyCv`.
639 !!
640 !! Geographic latitude and longitude (or model coordinates if not on a sphere) are stored in
641 !! `geoLatT`, `geoLonT` for T-points.
642 !! u-, v- and q- point coordinates are follow same pattern of replacing T with Cu, Cv and Bu respectively.
643 !!
644 !! Each location also has a 2D mask indicating whether the entire column is land or ocean.
645 !! `mask2dT` is 1 if the column is wet or 0 if the T-cell is land.
646 !! `mask2dCu` is 1 if both neighboring column are ocean, and 0 if either is land.
647 
648 end module mom_grid
mom_file_parser::log_version
An overloaded interface to log version information about modules.
Definition: MOM_file_parser.F90:109
mom_file_parser::param_file_type
A structure that can be parsed to read and document run-time parameters.
Definition: MOM_file_parser.F90:54
mom_file_parser::get_param
An overloaded interface to read and log the values of various types of parameters.
Definition: MOM_file_parser.F90:102
mom_hor_index
Defines the horizontal index type (hor_index_type) used for providing index ranges.
Definition: MOM_hor_index.F90:2
mom_domains
Describes the decomposed MOM domain and has routines for communications across PEs.
Definition: MOM_domains.F90:2
mom_file_parser
The MOM6 facility to parse input files for runtime parameters.
Definition: MOM_file_parser.F90:2
mom_hor_index::hor_index_type
Container for horizontal index ranges for data, computational and global domains.
Definition: MOM_hor_index.F90:15
mom_grid
Provides the ocean grid type.
Definition: MOM_grid.F90:2
mom_domains::mom_domain_type
The MOM_domain_type contains information about the domain decompositoin.
Definition: MOM_domains.F90:99
mom_file_parser::log_param
An overloaded interface to log the values of various types of parameters.
Definition: MOM_file_parser.F90:96
mom_error_handler
Routines for error handling and I/O management.
Definition: MOM_error_handler.F90:2
mom_grid::ocean_grid_type
Ocean grid type. See mom_grid for details.
Definition: MOM_grid.F90:25