1 | MODULE nemogcm |
---|
2 | !!====================================================================== |
---|
3 | !! *** MODULE nemogcm *** |
---|
4 | !! Off-line Ocean : passive tracer evolution, dynamics read in files |
---|
5 | !!====================================================================== |
---|
6 | !! History : 3.3 ! 2010-05 (C. Ethe) Full reorganization of the off-line: phasing with the on-line |
---|
7 | !! 3.4 ! 2011-01 (C. Ethe, A. R. Porter, STFC Daresbury) dynamical allocation |
---|
8 | !! 4.0 ! 2016-10 (C. Ethe, G. Madec, S. Flavoni) domain configuration / user defined interface |
---|
9 | !!---------------------------------------------------------------------- |
---|
10 | |
---|
11 | !!---------------------------------------------------------------------- |
---|
12 | !! nemo_gcm : off-line: solve ocean tracer only |
---|
13 | !! nemo_gcm : solve ocean dynamics, tracer, biogeochemistry and/or sea-ice |
---|
14 | !! nemo_init : initialization of the NEMO system |
---|
15 | !! nemo_ctl : initialisation of the contol print |
---|
16 | !! nemo_closefile: close remaining open files |
---|
17 | !! nemo_alloc : dynamical allocation |
---|
18 | !! nemo_partition: calculate MPP domain decomposition |
---|
19 | !! factorise : calculate the factors of the no. of MPI processes |
---|
20 | !! nemo_nfdcom : Setup for north fold exchanges with explicit point-to-point messaging |
---|
21 | !! istate_init : simple initialization to zero of ocean fields |
---|
22 | !! stp_ctl : reduced step control (no dynamics in off-line) |
---|
23 | !!---------------------------------------------------------------------- |
---|
24 | USE dom_oce ! ocean space domain variables |
---|
25 | USE oce ! dynamics and tracers variables |
---|
26 | USE trc_oce ! Shared ocean/passive tracers variables |
---|
27 | USE c1d ! 1D configuration |
---|
28 | USE domain ! domain initialization from coordinate & bathymetry (dom_init routine) |
---|
29 | USE closea ! treatment of closed seas (for ln_closea) |
---|
30 | USE usrdef_nam ! user defined configuration |
---|
31 | USE eosbn2 ! equation of state (eos bn2 routine) |
---|
32 | ! ! ocean physics |
---|
33 | USE ldftra ! lateral diffusivity setting (ldf_tra_init routine) |
---|
34 | USE ldfslp ! slopes of neutral surfaces (ldf_slp_init routine) |
---|
35 | USE traqsr ! solar radiation penetration (tra_qsr_init routine) |
---|
36 | USE trabbl ! bottom boundary layer (tra_bbl_init routine) |
---|
37 | USE traldf ! lateral physics (tra_ldf_init routine) |
---|
38 | USE sbcmod ! surface boundary condition (sbc_init routine) |
---|
39 | USE phycst ! physical constant (par_cst routine) |
---|
40 | USE dtadyn ! Lecture and Interpolation of the dynamical fields |
---|
41 | USE trcini ! Initilization of the passive tracers |
---|
42 | USE daymod ! calendar (day routine) |
---|
43 | USE trcstp ! passive tracer time-stepping (trc_stp routine) |
---|
44 | USE dtadyn ! Lecture and interpolation of the dynamical fields |
---|
45 | ! ! Passive tracers needs |
---|
46 | USE trc ! passive tracer : variables |
---|
47 | USE trcnam ! passive tracer : namelist |
---|
48 | USE trcrst ! passive tracer restart |
---|
49 | USE diaptr ! Need to initialise this as some variables are used in if statements later |
---|
50 | USE sbc_oce , ONLY : ln_rnf |
---|
51 | USE sbcrnf ! surface boundary condition : runoffs |
---|
52 | ! ! I/O & MPP |
---|
53 | USE iom ! I/O library |
---|
54 | USE in_out_manager ! I/O manager |
---|
55 | USE mppini ! shared/distributed memory setting (mpp_init routine) |
---|
56 | USE lib_mpp ! distributed memory computing |
---|
57 | #if defined key_iomput |
---|
58 | USE xios ! xIOserver |
---|
59 | #endif |
---|
60 | USE prtctl ! Print control (prt_ctl_init routine) |
---|
61 | USE timing ! Timing |
---|
62 | USE lib_fortran ! Fortran utilities (allows no signed zero when 'key_nosignedzero' defined) |
---|
63 | USE lbcnfd , ONLY : isendto, nsndto, nfsloop, nfeloop ! Setup of north fold exchanges |
---|
64 | |
---|
65 | IMPLICIT NONE |
---|
66 | PRIVATE |
---|
67 | |
---|
68 | PUBLIC nemo_gcm ! called by nemo.F90 |
---|
69 | |
---|
70 | CHARACTER (len=64) :: cform_aaa="( /, 'AAAAAAAA', / ) " ! flag for output listing |
---|
71 | |
---|
72 | !!---------------------------------------------------------------------- |
---|
73 | !! NEMO/OFF 4.0 , NEMO Consortium (2018) |
---|
74 | !! $Id$ |
---|
75 | !! Software governed by the CeCILL licence (NEMOGCM/NEMO_CeCILL.txt) |
---|
76 | !!---------------------------------------------------------------------- |
---|
77 | CONTAINS |
---|
78 | |
---|
79 | SUBROUTINE nemo_gcm |
---|
80 | !!---------------------------------------------------------------------- |
---|
81 | !! *** ROUTINE nemo_gcm *** |
---|
82 | !! |
---|
83 | !! ** Purpose : NEMO solves the primitive equations on an orthogonal |
---|
84 | !! curvilinear mesh on the sphere. |
---|
85 | !! |
---|
86 | !! ** Method : - model general initialization |
---|
87 | !! - launch the time-stepping (dta_dyn and trc_stp) |
---|
88 | !! - finalize the run by closing files and communications |
---|
89 | !! |
---|
90 | !! References : Madec, Delecluse,Imbard, and Levy, 1997: internal report, IPSL. |
---|
91 | !! Madec, 2008, internal report, IPSL. |
---|
92 | !!---------------------------------------------------------------------- |
---|
93 | INTEGER :: istp, indic ! time step index |
---|
94 | !!---------------------------------------------------------------------- |
---|
95 | |
---|
96 | CALL nemo_init ! Initializations |
---|
97 | |
---|
98 | ! check that all process are still there... If some process have an error, |
---|
99 | ! they will never enter in step and other processes will wait until the end of the cpu time! |
---|
100 | IF( lk_mpp ) CALL mpp_max( nstop ) |
---|
101 | |
---|
102 | ! !-----------------------! |
---|
103 | ! !== time stepping ==! |
---|
104 | ! !-----------------------! |
---|
105 | istp = nit000 |
---|
106 | ! |
---|
107 | IF( ln_rnf ) CALL sbc_rnf(istp) ! runoffs initialization |
---|
108 | ! |
---|
109 | CALL iom_init( cxios_context ) ! iom_put initialization (must be done after nemo_init for AGRIF+XIOS+OASIS) |
---|
110 | ! |
---|
111 | DO WHILE ( istp <= nitend .AND. nstop == 0 ) !== OFF time-stepping ==! |
---|
112 | ! |
---|
113 | IF( istp /= nit000 ) CALL day ( istp ) ! Calendar (day was already called at nit000 in day_init) |
---|
114 | CALL iom_setkt ( istp - nit000 + 1, "nemo" ) ! say to iom that we are at time step kstp |
---|
115 | CALL dta_dyn ( istp ) ! Interpolation of the dynamical fields |
---|
116 | IF( .NOT.ln_linssh ) CALL dta_dyn_swp( istp ) ! swap of sea surface height and vertical scale factors |
---|
117 | |
---|
118 | CALL trc_stp ( istp ) ! time-stepping |
---|
119 | CALL stp_ctl ( istp, indic ) ! Time loop: control and print |
---|
120 | istp = istp + 1 |
---|
121 | END DO |
---|
122 | ! |
---|
123 | #if defined key_iomput |
---|
124 | CALL iom_context_finalize( cxios_context ) ! needed for XIOS+AGRIF |
---|
125 | #endif |
---|
126 | |
---|
127 | ! !------------------------! |
---|
128 | ! !== finalize the run ==! |
---|
129 | ! !------------------------! |
---|
130 | IF(lwp) WRITE(numout,cform_aaa) ! Flag AAAAAAA |
---|
131 | |
---|
132 | IF( nstop /= 0 .AND. lwp ) THEN ! error print |
---|
133 | WRITE(numout,cform_err) |
---|
134 | WRITE(numout,*) ' ==>>> nemo_gcm: a total of ', nstop, ' errors have been found' |
---|
135 | WRITE(numout,*) |
---|
136 | ENDIF |
---|
137 | ! |
---|
138 | IF( ln_timing ) CALL timing_finalize |
---|
139 | ! |
---|
140 | CALL nemo_closefile |
---|
141 | ! |
---|
142 | #if defined key_iomput |
---|
143 | CALL xios_finalize ! end mpp communications with xios |
---|
144 | #else |
---|
145 | IF( lk_mpp ) CALL mppstop ! end mpp communications |
---|
146 | #endif |
---|
147 | ! |
---|
148 | END SUBROUTINE nemo_gcm |
---|
149 | |
---|
150 | |
---|
151 | SUBROUTINE nemo_init |
---|
152 | !!---------------------------------------------------------------------- |
---|
153 | !! *** ROUTINE nemo_init *** |
---|
154 | !! |
---|
155 | !! ** Purpose : initialization of the nemo model in off-line mode |
---|
156 | !!---------------------------------------------------------------------- |
---|
157 | INTEGER :: ji ! dummy loop indices |
---|
158 | INTEGER :: ios, ilocal_comm ! local integers |
---|
159 | INTEGER :: iiarea, ijarea ! - - |
---|
160 | INTEGER :: iirest, ijrest ! - - |
---|
161 | CHARACTER(len=120), DIMENSION(30) :: cltxt, cltxt2, clnam |
---|
162 | !! |
---|
163 | NAMELIST/namctl/ ln_ctl , nn_print, nn_ictls, nn_ictle, & |
---|
164 | & nn_isplt , nn_jsplt, nn_jctls, nn_jctle, & |
---|
165 | & ln_timing, ln_diacfl |
---|
166 | NAMELIST/namcfg/ ln_read_cfg, cn_domcfg, ln_closea, ln_write_cfg, cn_domcfg_out, ln_use_jattr |
---|
167 | !!---------------------------------------------------------------------- |
---|
168 | ! |
---|
169 | cltxt = '' |
---|
170 | cltxt2 = '' |
---|
171 | clnam = '' |
---|
172 | cxios_context = 'nemo' |
---|
173 | ! |
---|
174 | ! ! Open reference namelist and configuration namelist files |
---|
175 | CALL ctl_opn( numnam_ref, 'namelist_ref', 'OLD', 'FORMATTED', 'SEQUENTIAL', -1, 6, .FALSE. ) |
---|
176 | CALL ctl_opn( numnam_cfg, 'namelist_cfg', 'OLD', 'FORMATTED', 'SEQUENTIAL', -1, 6, .FALSE. ) |
---|
177 | ! |
---|
178 | REWIND( numnam_ref ) ! Namelist namctl in reference namelist |
---|
179 | READ ( numnam_ref, namctl, IOSTAT = ios, ERR = 901 ) |
---|
180 | 901 IF( ios /= 0 ) CALL ctl_nam ( ios , 'namctl in reference namelist', .TRUE. ) |
---|
181 | REWIND( numnam_cfg ) ! Namelist namctl in confguration namelist |
---|
182 | READ ( numnam_cfg, namctl, IOSTAT = ios, ERR = 902 ) |
---|
183 | 902 IF( ios > 0 ) CALL ctl_nam ( ios , 'namctl in configuration namelist', .TRUE. ) |
---|
184 | ! |
---|
185 | REWIND( numnam_ref ) ! Namelist namcfg in reference namelist |
---|
186 | READ ( numnam_ref, namcfg, IOSTAT = ios, ERR = 903 ) |
---|
187 | 903 IF( ios /= 0 ) CALL ctl_nam ( ios , 'namcfg in reference namelist', .TRUE. ) |
---|
188 | REWIND( numnam_cfg ) ! Namelist namcfg in confguration namelist |
---|
189 | READ ( numnam_cfg, namcfg, IOSTAT = ios, ERR = 904 ) |
---|
190 | 904 IF( ios > 0 ) CALL ctl_nam ( ios , 'namcfg in configuration namelist', .TRUE. ) |
---|
191 | |
---|
192 | ! !--------------------------! |
---|
193 | ! ! Set global domain size ! (control print return in cltxt2) |
---|
194 | ! !--------------------------! |
---|
195 | IF( ln_read_cfg ) THEN ! Read sizes in domain configuration file |
---|
196 | CALL domain_cfg ( cltxt2, cn_cfg, nn_cfg, jpiglo, jpjglo, jpkglo, jperio ) |
---|
197 | ! |
---|
198 | ELSE ! user-defined namelist |
---|
199 | CALL usr_def_nam( cltxt2, clnam, cn_cfg, nn_cfg, jpiglo, jpjglo, jpkglo, jperio ) |
---|
200 | ENDIF |
---|
201 | ! |
---|
202 | l_offline = .true. ! passive tracers are run offline |
---|
203 | ! |
---|
204 | ! !--------------------------------------------! |
---|
205 | ! ! set communicator & select the local node ! |
---|
206 | ! ! NB: mynode also opens output.namelist.dyn ! |
---|
207 | ! ! on unit number numond on first proc ! |
---|
208 | ! !--------------------------------------------! |
---|
209 | #if defined key_iomput |
---|
210 | CALL xios_initialize( "for_xios_mpi_id",return_comm=ilocal_comm ) |
---|
211 | narea = mynode( cltxt, 'output.namelist.dyn', numnam_ref, numnam_cfg, numond , nstop, ilocal_comm ) ! Nodes selection |
---|
212 | #else |
---|
213 | ilocal_comm = 0 |
---|
214 | narea = mynode( cltxt, 'output.namelist.dyn', numnam_ref, numnam_cfg, numond , nstop ) ! Nodes selection (control print return in cltxt) |
---|
215 | #endif |
---|
216 | |
---|
217 | narea = narea + 1 ! mynode return the rank of proc (0 --> jpnij -1 ) |
---|
218 | |
---|
219 | lwm = (narea == 1) ! control of output namelists |
---|
220 | lwp = (narea == 1) .OR. ln_ctl ! control of all listing output print |
---|
221 | |
---|
222 | IF(lwm) THEN ! write merged namelists from earlier to output namelist |
---|
223 | ! ! now that the file has been opened in call to mynode. |
---|
224 | ! ! NB: nammpp has already been written in mynode (if lk_mpp_mpi) |
---|
225 | WRITE( numond, namctl ) |
---|
226 | WRITE( numond, namcfg ) |
---|
227 | IF( .NOT.ln_read_cfg ) THEN |
---|
228 | DO ji = 1, SIZE(clnam) |
---|
229 | IF( TRIM(clnam(ji)) /= '' ) WRITE(numond, * ) clnam(ji) ! namusr_def print |
---|
230 | END DO |
---|
231 | ENDIF |
---|
232 | ENDIF |
---|
233 | |
---|
234 | ! If dimensions of processor grid weren't specified in the namelist file |
---|
235 | ! then we calculate them here now that we have our communicator size |
---|
236 | IF( jpni < 1 .OR. jpnj < 1 ) THEN |
---|
237 | #if defined key_mpp_mpi |
---|
238 | CALL nemo_partition( mppsize ) |
---|
239 | #else |
---|
240 | jpni = 1 |
---|
241 | jpnj = 1 |
---|
242 | jpnij = jpni*jpnj |
---|
243 | #endif |
---|
244 | ENDIF |
---|
245 | |
---|
246 | iiarea = 1 + MOD( narea - 1 , jpni ) |
---|
247 | ijarea = 1 + ( narea - 1 ) / jpni |
---|
248 | iirest = 1 + MOD( jpiglo - 2*nn_hls - 1 , jpni ) |
---|
249 | ijrest = 1 + MOD( jpjglo - 2*nn_hls - 1 , jpnj ) |
---|
250 | #if defined key_nemocice_decomp |
---|
251 | jpi = ( nx_global+2-2*nn_hls + (jpni-1) ) / jpni + 2*nn_hls ! first dim. |
---|
252 | jpj = ( ny_global+2-2*nn_hls + (jpnj-1) ) / jpnj + 2*nn_hls ! second dim. |
---|
253 | jpimax = jpi |
---|
254 | jpjmax = jpj |
---|
255 | IF( iiarea == jpni ) jpi = jpiglo - (jpni - 1) * (jpi - 2*nn_hls) |
---|
256 | IF( ijarea == jpnj ) jpj = jpjglo - (jpnj - 1) * (jpj - 2*nn_hls) |
---|
257 | #else |
---|
258 | jpi = ( jpiglo -2*nn_hls + (jpni-1) ) / jpni + 2*nn_hls ! first dim. |
---|
259 | jpj = ( jpjglo -2*nn_hls + (jpnj-1) ) / jpnj + 2*nn_hls ! second dim. |
---|
260 | jpimax = jpi |
---|
261 | jpjmax = jpj |
---|
262 | IF( iiarea > iirest ) jpi = jpi - 1 |
---|
263 | IF( ijarea > ijrest ) jpj = jpj - 1 |
---|
264 | #endif |
---|
265 | |
---|
266 | jpk = jpkglo ! third dim |
---|
267 | |
---|
268 | jpim1 = jpi-1 ! inner domain indices |
---|
269 | jpjm1 = jpj-1 ! " " |
---|
270 | jpkm1 = MAX( 1, jpk-1 ) ! " " |
---|
271 | jpij = jpi*jpj ! jpi x j |
---|
272 | |
---|
273 | |
---|
274 | IF(lwp) THEN ! open listing units |
---|
275 | ! |
---|
276 | CALL ctl_opn( numout, 'ocean.output', 'REPLACE', 'FORMATTED', 'SEQUENTIAL', -1, 6, .FALSE., narea ) |
---|
277 | ! |
---|
278 | WRITE(numout,*) |
---|
279 | WRITE(numout,*) ' CNRS - NERC - Met OFFICE - MERCATOR-ocean - INGV - CMCC' |
---|
280 | WRITE(numout,*) ' NEMO team' |
---|
281 | WRITE(numout,*) ' Off-line TOP Model' |
---|
282 | WRITE(numout,*) ' NEMO version 4.0 (2017) ' |
---|
283 | WRITE(numout,*) |
---|
284 | WRITE(numout,*) |
---|
285 | DO ji = 1, SIZE(cltxt) |
---|
286 | IF( TRIM(cltxt (ji)) /= '' ) WRITE(numout,*) cltxt(ji) ! control print of mynode |
---|
287 | END DO |
---|
288 | WRITE(numout,*) |
---|
289 | WRITE(numout,*) |
---|
290 | DO ji = 1, SIZE(cltxt2) |
---|
291 | IF( TRIM(cltxt2(ji)) /= '' ) WRITE(numout,*) cltxt2(ji) ! control print of domain size |
---|
292 | END DO |
---|
293 | ! |
---|
294 | WRITE(numout,cform_aaa) ! Flag AAAAAAA |
---|
295 | ! |
---|
296 | ENDIF |
---|
297 | |
---|
298 | ! Now we know the dimensions of the grid and numout has been set: we can allocate arrays |
---|
299 | CALL nemo_alloc() |
---|
300 | |
---|
301 | ! !-------------------------------! |
---|
302 | ! ! NEMO general initialization ! |
---|
303 | ! !-------------------------------! |
---|
304 | |
---|
305 | CALL nemo_ctl ! Control prints |
---|
306 | |
---|
307 | ! ! Domain decomposition |
---|
308 | CALL mpp_init ! MPP |
---|
309 | IF( ln_nnogather ) CALL nemo_nfdcom ! northfold neighbour lists |
---|
310 | ! |
---|
311 | ! ! General initialization |
---|
312 | IF( ln_timing ) CALL timing_init |
---|
313 | IF( ln_timing ) CALL timing_start( 'nemo_init') |
---|
314 | ! |
---|
315 | CALL phy_cst ! Physical constants |
---|
316 | CALL eos_init ! Equation of state |
---|
317 | IF( lk_c1d ) CALL c1d_init ! 1D column configuration |
---|
318 | CALL dom_init("OPA") ! Domain |
---|
319 | IF( ln_ctl ) CALL prt_ctl_init ! Print control |
---|
320 | |
---|
321 | CALL istate_init ! ocean initial state (Dynamics and tracers) |
---|
322 | |
---|
323 | CALL sbc_init ! Forcings : surface module |
---|
324 | |
---|
325 | ! ! Tracer physics |
---|
326 | CALL ldf_tra_init ! Lateral ocean tracer physics |
---|
327 | CALL ldf_eiv_init ! Eddy induced velocity param |
---|
328 | CALL tra_ldf_init ! lateral mixing |
---|
329 | IF( l_ldfslp ) CALL ldf_slp_init ! slope of lateral mixing |
---|
330 | CALL tra_qsr_init ! penetrative solar radiation qsr |
---|
331 | IF( ln_trabbl ) CALL tra_bbl_init ! advective (and/or diffusive) bottom boundary layer scheme |
---|
332 | |
---|
333 | ! ! Passive tracers |
---|
334 | CALL trc_nam_run ! Needed to get restart parameters for passive tracers |
---|
335 | CALL trc_rst_cal( nit000, 'READ' ) ! calendar |
---|
336 | CALL dta_dyn_init ! Initialization for the dynamics |
---|
337 | |
---|
338 | CALL trc_init ! Passive tracers initialization |
---|
339 | CALL dia_ptr_init ! Poleward TRansports initialization |
---|
340 | |
---|
341 | IF(lwp) WRITE(numout,cform_aaa) ! Flag AAAAAAA |
---|
342 | ! |
---|
343 | IF( ln_timing ) CALL timing_stop( 'nemo_init') |
---|
344 | ! |
---|
345 | END SUBROUTINE nemo_init |
---|
346 | |
---|
347 | |
---|
348 | SUBROUTINE nemo_ctl |
---|
349 | !!---------------------------------------------------------------------- |
---|
350 | !! *** ROUTINE nemo_ctl *** |
---|
351 | !! |
---|
352 | !! ** Purpose : control print setting |
---|
353 | !! |
---|
354 | !! ** Method : - print namctl information and check some consistencies |
---|
355 | !!---------------------------------------------------------------------- |
---|
356 | ! |
---|
357 | IF(lwp) THEN ! control print |
---|
358 | WRITE(numout,*) |
---|
359 | WRITE(numout,*) 'nemo_ctl: Control prints' |
---|
360 | WRITE(numout,*) '~~~~~~~~' |
---|
361 | WRITE(numout,*) ' Namelist namctl' |
---|
362 | WRITE(numout,*) ' run control (for debugging) ln_ctl = ', ln_ctl |
---|
363 | WRITE(numout,*) ' level of print nn_print = ', nn_print |
---|
364 | WRITE(numout,*) ' Start i indice for SUM control nn_ictls = ', nn_ictls |
---|
365 | WRITE(numout,*) ' End i indice for SUM control nn_ictle = ', nn_ictle |
---|
366 | WRITE(numout,*) ' Start j indice for SUM control nn_jctls = ', nn_jctls |
---|
367 | WRITE(numout,*) ' End j indice for SUM control nn_jctle = ', nn_jctle |
---|
368 | WRITE(numout,*) ' number of proc. following i nn_isplt = ', nn_isplt |
---|
369 | WRITE(numout,*) ' number of proc. following j nn_jsplt = ', nn_jsplt |
---|
370 | WRITE(numout,*) ' timing by routine ln_timing = ', ln_timing |
---|
371 | WRITE(numout,*) ' CFL diagnostics ln_diacfl = ', ln_diacfl |
---|
372 | ENDIF |
---|
373 | ! |
---|
374 | nprint = nn_print ! convert DOCTOR namelist names into OLD names |
---|
375 | nictls = nn_ictls |
---|
376 | nictle = nn_ictle |
---|
377 | njctls = nn_jctls |
---|
378 | njctle = nn_jctle |
---|
379 | isplt = nn_isplt |
---|
380 | jsplt = nn_jsplt |
---|
381 | |
---|
382 | IF(lwp) THEN ! control print |
---|
383 | WRITE(numout,*) |
---|
384 | WRITE(numout,*) ' Namelist namcfg' |
---|
385 | WRITE(numout,*) ' read domain configuration file ln_read_cfg = ', ln_read_cfg |
---|
386 | WRITE(numout,*) ' filename to be read cn_domcfg = ', TRIM(cn_domcfg) |
---|
387 | WRITE(numout,*) ' keep closed seas in the domain (if exist) ln_closea = ', TRIM(cn_domcfg) |
---|
388 | WRITE(numout,*) ' create a configuration definition file ln_write_cfg = ', ln_write_cfg |
---|
389 | WRITE(numout,*) ' filename to be written cn_domcfg_out = ', TRIM(cn_domcfg_out) |
---|
390 | WRITE(numout,*) ' use file attribute if exists as i/p j-start ln_use_jattr = ', ln_use_jattr |
---|
391 | ENDIF |
---|
392 | IF( .NOT.ln_read_cfg ) ln_closea = .false. ! dealing possible only with a domcfg file |
---|
393 | ! |
---|
394 | ! ! Parameter control |
---|
395 | ! |
---|
396 | IF( ln_ctl ) THEN ! sub-domain area indices for the control prints |
---|
397 | IF( lk_mpp .AND. jpnij > 1 ) THEN |
---|
398 | isplt = jpni ; jsplt = jpnj ; ijsplt = jpni*jpnj ! the domain is forced to the real split domain |
---|
399 | ELSE |
---|
400 | IF( isplt == 1 .AND. jsplt == 1 ) THEN |
---|
401 | CALL ctl_warn( ' - isplt & jsplt are equal to 1', & |
---|
402 | & ' - the print control will be done over the whole domain' ) |
---|
403 | ENDIF |
---|
404 | ijsplt = isplt * jsplt ! total number of processors ijsplt |
---|
405 | ENDIF |
---|
406 | IF(lwp) WRITE(numout,*)' - The total number of processors over which the' |
---|
407 | IF(lwp) WRITE(numout,*)' print control will be done is ijsplt : ', ijsplt |
---|
408 | ! |
---|
409 | ! ! indices used for the SUM control |
---|
410 | IF( nictls+nictle+njctls+njctle == 0 ) THEN ! print control done over the default area |
---|
411 | lsp_area = .FALSE. |
---|
412 | ELSE ! print control done over a specific area |
---|
413 | lsp_area = .TRUE. |
---|
414 | IF( nictls < 1 .OR. nictls > jpiglo ) THEN |
---|
415 | CALL ctl_warn( ' - nictls must be 1<=nictls>=jpiglo, it is forced to 1' ) |
---|
416 | nictls = 1 |
---|
417 | ENDIF |
---|
418 | IF( nictle < 1 .OR. nictle > jpiglo ) THEN |
---|
419 | CALL ctl_warn( ' - nictle must be 1<=nictle>=jpiglo, it is forced to jpiglo' ) |
---|
420 | nictle = jpiglo |
---|
421 | ENDIF |
---|
422 | IF( njctls < 1 .OR. njctls > jpjglo ) THEN |
---|
423 | CALL ctl_warn( ' - njctls must be 1<=njctls>=jpjglo, it is forced to 1' ) |
---|
424 | njctls = 1 |
---|
425 | ENDIF |
---|
426 | IF( njctle < 1 .OR. njctle > jpjglo ) THEN |
---|
427 | CALL ctl_warn( ' - njctle must be 1<=njctle>=jpjglo, it is forced to jpjglo' ) |
---|
428 | njctle = jpjglo |
---|
429 | ENDIF |
---|
430 | ENDIF |
---|
431 | ENDIF |
---|
432 | ! |
---|
433 | IF( 1._wp /= SIGN(1._wp,-0._wp) ) CALL ctl_stop( 'nemo_ctl: The intrinsec SIGN function follows f2003 standard.', & |
---|
434 | & 'Compile with key_nosignedzero enabled' ) |
---|
435 | ! |
---|
436 | END SUBROUTINE nemo_ctl |
---|
437 | |
---|
438 | |
---|
439 | SUBROUTINE nemo_closefile |
---|
440 | !!---------------------------------------------------------------------- |
---|
441 | !! *** ROUTINE nemo_closefile *** |
---|
442 | !! |
---|
443 | !! ** Purpose : Close the files |
---|
444 | !!---------------------------------------------------------------------- |
---|
445 | ! |
---|
446 | IF( lk_mpp ) CALL mppsync |
---|
447 | ! |
---|
448 | CALL iom_close ! close all input/output files managed by iom_* |
---|
449 | ! |
---|
450 | IF( numstp /= -1 ) CLOSE( numstp ) ! time-step file |
---|
451 | IF( numnam_ref /= -1 ) CLOSE( numnam_ref ) ! oce reference namelist |
---|
452 | IF( numnam_cfg /= -1 ) CLOSE( numnam_cfg ) ! oce configuration namelist |
---|
453 | IF( numout /= 6 ) CLOSE( numout ) ! standard model output file |
---|
454 | IF( lwm.AND.numond /= -1 ) CLOSE( numond ) ! oce output namelist |
---|
455 | ! |
---|
456 | numout = 6 ! redefine numout in case it is used after this point... |
---|
457 | ! |
---|
458 | END SUBROUTINE nemo_closefile |
---|
459 | |
---|
460 | |
---|
461 | SUBROUTINE nemo_alloc |
---|
462 | !!---------------------------------------------------------------------- |
---|
463 | !! *** ROUTINE nemo_alloc *** |
---|
464 | !! |
---|
465 | !! ** Purpose : Allocate all the dynamic arrays of the OPA modules |
---|
466 | !! |
---|
467 | !! ** Method : |
---|
468 | !!---------------------------------------------------------------------- |
---|
469 | USE diawri , ONLY : dia_wri_alloc |
---|
470 | USE dom_oce, ONLY : dom_oce_alloc |
---|
471 | USE zdf_oce, ONLY : zdf_oce_alloc |
---|
472 | USE trc_oce, ONLY : trc_oce_alloc |
---|
473 | ! |
---|
474 | INTEGER :: ierr |
---|
475 | !!---------------------------------------------------------------------- |
---|
476 | ! |
---|
477 | ierr = oce_alloc () ! ocean |
---|
478 | ierr = ierr + dia_wri_alloc() |
---|
479 | ierr = ierr + dom_oce_alloc() ! ocean domain |
---|
480 | ierr = ierr + zdf_oce_alloc() ! ocean vertical physics |
---|
481 | ierr = ierr + trc_oce_alloc() ! shared TRC / TRA arrays |
---|
482 | ! |
---|
483 | IF( lk_mpp ) CALL mpp_sum( ierr ) |
---|
484 | IF( ierr /= 0 ) CALL ctl_stop( 'STOP', 'nemo_alloc: unable to allocate standard ocean arrays' ) |
---|
485 | ! |
---|
486 | END SUBROUTINE nemo_alloc |
---|
487 | |
---|
488 | |
---|
489 | SUBROUTINE nemo_partition( num_pes ) |
---|
490 | !!---------------------------------------------------------------------- |
---|
491 | !! *** ROUTINE nemo_partition *** |
---|
492 | !! |
---|
493 | !! ** Purpose : |
---|
494 | !! |
---|
495 | !! ** Method : |
---|
496 | !!---------------------------------------------------------------------- |
---|
497 | INTEGER, INTENT(in) :: num_pes ! The number of MPI processes we have |
---|
498 | ! |
---|
499 | INTEGER, PARAMETER :: nfactmax = 20 |
---|
500 | INTEGER :: nfact ! The no. of factors returned |
---|
501 | INTEGER :: ierr ! Error flag |
---|
502 | INTEGER :: ji |
---|
503 | INTEGER :: idiff, mindiff, imin ! For choosing pair of factors that are closest in value |
---|
504 | INTEGER, DIMENSION(nfactmax) :: ifact ! Array of factors |
---|
505 | !!---------------------------------------------------------------------- |
---|
506 | ! |
---|
507 | ierr = 0 |
---|
508 | ! |
---|
509 | CALL factorise( ifact, nfactmax, nfact, num_pes, ierr ) |
---|
510 | ! |
---|
511 | IF( nfact <= 1 ) THEN |
---|
512 | WRITE (numout, *) 'WARNING: factorisation of number of PEs failed' |
---|
513 | WRITE (numout, *) ' : using grid of ',num_pes,' x 1' |
---|
514 | jpnj = 1 |
---|
515 | jpni = num_pes |
---|
516 | ELSE |
---|
517 | ! Search through factors for the pair that are closest in value |
---|
518 | mindiff = 1000000 |
---|
519 | imin = 1 |
---|
520 | DO ji = 1, nfact-1, 2 |
---|
521 | idiff = ABS( ifact(ji) - ifact(ji+1) ) |
---|
522 | IF( idiff < mindiff ) THEN |
---|
523 | mindiff = idiff |
---|
524 | imin = ji |
---|
525 | ENDIF |
---|
526 | END DO |
---|
527 | jpnj = ifact(imin) |
---|
528 | jpni = ifact(imin + 1) |
---|
529 | ENDIF |
---|
530 | ! |
---|
531 | jpnij = jpni*jpnj |
---|
532 | ! |
---|
533 | END SUBROUTINE nemo_partition |
---|
534 | |
---|
535 | |
---|
536 | SUBROUTINE factorise( kfax, kmaxfax, knfax, kn, kerr ) |
---|
537 | !!---------------------------------------------------------------------- |
---|
538 | !! *** ROUTINE factorise *** |
---|
539 | !! |
---|
540 | !! ** Purpose : return the prime factors of n. |
---|
541 | !! knfax factors are returned in array kfax which is of |
---|
542 | !! maximum dimension kmaxfax. |
---|
543 | !! ** Method : |
---|
544 | !!---------------------------------------------------------------------- |
---|
545 | INTEGER , INTENT(in ) :: kn, kmaxfax |
---|
546 | INTEGER , INTENT( out) :: kerr, knfax |
---|
547 | INTEGER, DIMENSION(kmaxfax), INTENT( out) :: kfax |
---|
548 | ! |
---|
549 | INTEGER :: ifac, jl, inu |
---|
550 | INTEGER, PARAMETER :: ntest = 14 |
---|
551 | INTEGER, DIMENSION(ntest) :: ilfax |
---|
552 | !!---------------------------------------------------------------------- |
---|
553 | ! |
---|
554 | ! lfax contains the set of allowed factors. |
---|
555 | ilfax(:) = (/(2**jl,jl=ntest,1,-1)/) |
---|
556 | ! |
---|
557 | ! Clear the error flag and initialise output vars |
---|
558 | kerr = 0 |
---|
559 | kfax = 1 |
---|
560 | knfax = 0 |
---|
561 | ! |
---|
562 | IF( kn /= 1 ) THEN ! Find the factors of n |
---|
563 | ! |
---|
564 | ! nu holds the unfactorised part of the number. |
---|
565 | ! knfax holds the number of factors found. |
---|
566 | ! l points to the allowed factor list. |
---|
567 | ! ifac holds the current factor. |
---|
568 | ! |
---|
569 | inu = kn |
---|
570 | knfax = 0 |
---|
571 | ! |
---|
572 | DO jl = ntest, 1, -1 |
---|
573 | ! |
---|
574 | ifac = ilfax(jl) |
---|
575 | IF( ifac > inu ) CYCLE |
---|
576 | ! |
---|
577 | ! Test whether the factor will divide. |
---|
578 | ! |
---|
579 | IF( MOD(inu,ifac) == 0 ) THEN |
---|
580 | ! |
---|
581 | knfax = knfax + 1 ! Add the factor to the list |
---|
582 | IF( knfax > kmaxfax ) THEN |
---|
583 | kerr = 6 |
---|
584 | write (*,*) 'FACTOR: insufficient space in factor array ', knfax |
---|
585 | return |
---|
586 | ENDIF |
---|
587 | kfax(knfax) = ifac |
---|
588 | ! Store the other factor that goes with this one |
---|
589 | knfax = knfax + 1 |
---|
590 | kfax(knfax) = inu / ifac |
---|
591 | !WRITE (*,*) 'ARPDBG, factors ',knfax-1,' & ',knfax,' are ', kfax(knfax-1),' and ',kfax(knfax) |
---|
592 | ENDIF |
---|
593 | ! |
---|
594 | END DO |
---|
595 | ! |
---|
596 | ENDIF |
---|
597 | ! |
---|
598 | END SUBROUTINE factorise |
---|
599 | |
---|
600 | #if defined key_mpp_mpi |
---|
601 | |
---|
602 | SUBROUTINE nemo_nfdcom |
---|
603 | !!---------------------------------------------------------------------- |
---|
604 | !! *** ROUTINE nemo_nfdcom *** |
---|
605 | !! ** Purpose : Setup for north fold exchanges with explicit |
---|
606 | !! point-to-point messaging |
---|
607 | !! |
---|
608 | !! ** Method : Initialization of the northern neighbours lists. |
---|
609 | !!---------------------------------------------------------------------- |
---|
610 | !! 1.0 ! 2011-10 (A. C. Coward, NOCS & J. Donners, PRACE) |
---|
611 | !! 2.0 ! 2013-06 Setup avoiding MPI communication (I. Epicoco, S. Mocavero, CMCC) |
---|
612 | !!---------------------------------------------------------------------- |
---|
613 | INTEGER :: sxM, dxM, sxT, dxT, jn |
---|
614 | INTEGER :: njmppmax |
---|
615 | !!---------------------------------------------------------------------- |
---|
616 | ! |
---|
617 | njmppmax = MAXVAL( njmppt ) |
---|
618 | ! |
---|
619 | !initializes the north-fold communication variables |
---|
620 | isendto(:) = 0 |
---|
621 | nsndto = 0 |
---|
622 | ! |
---|
623 | IF ( njmpp == njmppmax ) THEN ! if I am a process in the north |
---|
624 | ! |
---|
625 | !sxM is the first point (in the global domain) needed to compute the north-fold for the current process |
---|
626 | sxM = jpiglo - nimppt(narea) - nlcit(narea) + 1 |
---|
627 | !dxM is the last point (in the global domain) needed to compute the north-fold for the current process |
---|
628 | dxM = jpiglo - nimppt(narea) + 2 |
---|
629 | ! |
---|
630 | ! loop over the other north-fold processes to find the processes |
---|
631 | ! managing the points belonging to the sxT-dxT range |
---|
632 | ! |
---|
633 | DO jn = 1, jpni |
---|
634 | ! |
---|
635 | sxT = nfiimpp(jn, jpnj) ! sxT = 1st point (in the global domain) of the jn process |
---|
636 | dxT = nfiimpp(jn, jpnj) + nfilcit(jn, jpnj) - 1 ! dxT = last point (in the global domain) of the jn process |
---|
637 | ! |
---|
638 | IF ( sxT < sxM .AND. sxM < dxT ) THEN |
---|
639 | nsndto = nsndto + 1 |
---|
640 | isendto(nsndto) = jn |
---|
641 | ELSEIF( sxM <= sxT .AND. dxM >= dxT ) THEN |
---|
642 | nsndto = nsndto + 1 |
---|
643 | isendto(nsndto) = jn |
---|
644 | ELSEIF( dxM < dxT .AND. sxT < dxM ) THEN |
---|
645 | nsndto = nsndto + 1 |
---|
646 | isendto(nsndto) = jn |
---|
647 | ENDIF |
---|
648 | ! |
---|
649 | END DO |
---|
650 | nfsloop = 1 |
---|
651 | nfeloop = nlci |
---|
652 | DO jn = 2,jpni-1 |
---|
653 | IF( nfipproc(jn,jpnj) == (narea - 1) ) THEN |
---|
654 | IF( nfipproc(jn-1,jpnj) == -1 ) nfsloop = nldi |
---|
655 | IF( nfipproc(jn+1,jpnj) == -1 ) nfeloop = nlei |
---|
656 | ENDIF |
---|
657 | END DO |
---|
658 | ! |
---|
659 | ENDIF |
---|
660 | l_north_nogather = .TRUE. |
---|
661 | ! |
---|
662 | END SUBROUTINE nemo_nfdcom |
---|
663 | |
---|
664 | #else |
---|
665 | SUBROUTINE nemo_nfdcom ! Dummy routine |
---|
666 | WRITE(*,*) 'nemo_nfdcom: You should not have seen this print! error?' |
---|
667 | END SUBROUTINE nemo_nfdcom |
---|
668 | #endif |
---|
669 | |
---|
670 | SUBROUTINE istate_init |
---|
671 | !!---------------------------------------------------------------------- |
---|
672 | !! *** ROUTINE istate_init *** |
---|
673 | !! |
---|
674 | !! ** Purpose : Initialization to zero of the dynamics and tracers. |
---|
675 | !!---------------------------------------------------------------------- |
---|
676 | ! |
---|
677 | ! now fields ! after fields ! |
---|
678 | un (:,:,:) = 0._wp ; ua(:,:,:) = 0._wp ! |
---|
679 | vn (:,:,:) = 0._wp ; va(:,:,:) = 0._wp ! |
---|
680 | wn (:,:,:) = 0._wp ! ! |
---|
681 | hdivn(:,:,:) = 0._wp ! ! |
---|
682 | tsn (:,:,:,:) = 0._wp ! ! |
---|
683 | ! |
---|
684 | rhd (:,:,:) = 0.e0 |
---|
685 | rhop (:,:,:) = 0.e0 |
---|
686 | rn2 (:,:,:) = 0.e0 |
---|
687 | ! |
---|
688 | END SUBROUTINE istate_init |
---|
689 | |
---|
690 | |
---|
691 | SUBROUTINE stp_ctl( kt, kindic ) |
---|
692 | !!---------------------------------------------------------------------- |
---|
693 | !! *** ROUTINE stp_ctl *** |
---|
694 | !! |
---|
695 | !! ** Purpose : Control the run |
---|
696 | !! |
---|
697 | !! ** Method : - Save the time step in numstp |
---|
698 | !! |
---|
699 | !! ** Actions : 'time.step' file containing the last ocean time-step |
---|
700 | !!---------------------------------------------------------------------- |
---|
701 | INTEGER, INTENT(in ) :: kt ! ocean time-step index |
---|
702 | INTEGER, INTENT(inout) :: kindic ! indicator of solver convergence |
---|
703 | !!---------------------------------------------------------------------- |
---|
704 | ! |
---|
705 | IF( kt == nit000 .AND. lwp ) THEN |
---|
706 | WRITE(numout,*) |
---|
707 | WRITE(numout,*) 'stp_ctl : time-stepping control' |
---|
708 | WRITE(numout,*) '~~~~~~~' |
---|
709 | ! open time.step file |
---|
710 | CALL ctl_opn( numstp, 'time.step', 'REPLACE', 'FORMATTED', 'SEQUENTIAL', -1, numout, lwp, narea ) |
---|
711 | ENDIF |
---|
712 | ! |
---|
713 | IF(lwp) WRITE ( numstp, '(1x, i8)' ) kt !* save the current time step in numstp |
---|
714 | IF(lwp) REWIND( numstp ) ! -------------------------- |
---|
715 | ! |
---|
716 | END SUBROUTINE stp_ctl |
---|
717 | |
---|
718 | !!====================================================================== |
---|
719 | END MODULE nemogcm |
---|