source: TOOLS/MOSAIX/CalvingWeights.py @ 5924

Last change on this file since 5924 was 5915, checked in by snguyen, 3 years ago

Ajout de l'option --ocePerio à RunoffWeights?.py et CalvingWeights?.py

  • Property svn:keywords set to Date Revision HeadURL Author Id
File size: 22.5 KB
Line 
1### ===========================================================================
2###
3### Compute calving weights.
4###
5### ===========================================================================
6##
7##  Warning, to install, configure, run, use any of Olivier Marti's
8##  software or to read the associated documentation you'll need at least
9##  one (1) brain in a reasonably working order. Lack of this implement
10##  will void any warranties (either express or implied).
11##  O. Marti assumes no responsability for errors, omissions,
12##  data loss, or any other consequences caused directly or indirectly by
13##  the usage of his software by incorrectly or partially configured
14##  personal.
15##
16import netCDF4, numpy as np
17import sys, os, platform, argparse, textwrap, time
18from scipy import ndimage
19import nemo
20
21## SVN information
22__Author__   = "$Author$"
23__Date__     = "$Date$"
24__Revision__ = "$Revision$"
25__Id__       = "$Id$"
26__HeadURL__  = "$HeadURL$"
27__SVN_Date__ = "$SVN_Date: $"
28
29###
30
31### ===== Handling command line parameters ==================================================
32# Creating a parser
33parser = argparse.ArgumentParser (
34    description = """Compute calving weights""",
35    epilog='-------- End of the help message --------')
36
37# Adding arguments
38parser.add_argument ('--oce'     , help='oce model name', type=str, default='eORCA1.2', choices=['ORCA2.3', 'eORCA1.2', 'eORCA025', 'eORCA025.1'] )
39parser.add_argument ('--atm'     , help='atm model name (ICO* or LMD*)', type=str, default='ICO40'    )
40parser.add_argument ('--type'    , help='Type of distribution', type=str, choices=['iceshelf', 'iceberg', 'nosouth', 'full'], default='full'  )
41parser.add_argument ('--dir'     , help='Directory of input file', type=str, default='./' )
42parser.add_argument ('--repartition_file', help='Data files with iceberg melting' , type=str, default='./runoff-icb_DaiTrenberth_Depoorter_eORCA1_JD.nc' )
43parser.add_argument ('--repartition_var' , help='Variable name for iceshelf'      , type=str, default=None)
44parser.add_argument ('--output'  , help='output rmp file name', default='rmp_tlmd_to_torc_calving.nc' )
45parser.add_argument ('--fmt'     , help='NetCDF file format, using nco syntax', default='netcdf4', choices=['classic', 'netcdf3', '64bit', '64bit_data', '64bit_data', 'netcdf4', 'netcdf4_classsic'] )
46parser.add_argument ('--ocePerio'   , help='periodicity of ocean grid', type=int, default=0 )
47
48# Parse command line
49myargs = parser.parse_args()
50
51# Model Names
52src_Name = myargs.atm ; dst_Name = myargs.oce
53   
54# Default vars
55if myargs.repartition_var == None :
56    # Runoff from Antarctica iceshelves (Depoorter, 2013)
57    if myargs.type == 'iceshelf' : repartitionVar = 'sornfisf'
58       
59    # Runoff from Antarctica iceberg (Depoorter, 2013)
60    if myargs.type == 'iceberg' :  repartitionVar = 'Icb_Flux'
61       
62else :                             repartitionVar = myargs.repartition_var
63
64# Latitude limits of each calving zone
65limit_lat = ( (40.0, 90.0), (-50.0, 40.0), ( -90.0, -50.0) )
66
67nb_zone = len(limit_lat)
68
69if myargs.fmt == 'classic'         : FmtNetcdf = 'CLASSIC'
70if myargs.fmt == 'netcdf3'         : FmtNetcdf = 'CLASSIC'
71if myargs.fmt == '64bit'           : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
72if myargs.fmt == '64bit_data'      : FmtNetcdf = 'NETCDF3_64BIT_DATA'
73if myargs.fmt == '64bit_offset'    : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
74if myargs.fmt == 'netcdf4'         : FmtNetcdf = 'NETCDF4'
75if myargs.fmt == 'netcdf4_classic' : FmtNetcdf = 'NETCDF4_CLASSIC'
76### ==========================================================================================
77
78# Model short names
79src_name = None ; dst_name = None
80if src_Name.count('ICO')  != 0 : src_name = 'ico'
81if src_Name.count('LMD')  != 0 : src_name = 'lmd'
82if dst_Name.count('ORCA') != 0 : dst_name = 'orc'
83
84CplModel = dst_Name + 'x' + src_Name
85
86print ('Atm names : ' + src_name + ' ' + src_Name )
87print ('Oce names : ' + dst_name + ' ' + dst_Name )
88print (' ')
89
90# Reading domains characteristics
91areas = myargs.dir + '/areas_' + dst_Name + 'x' + src_Name + '.nc' 
92masks = myargs.dir + '/masks_' + dst_Name + 'x' + src_Name + '.nc'
93grids = myargs.dir + '/grids_' + dst_Name + 'x' + src_Name + '.nc'
94
95print ('Opening ' + areas)
96f_areas = netCDF4.Dataset ( areas, "r" )
97print ('Opening ' + masks)
98f_masks = netCDF4.Dataset ( masks, "r" )
99print ('Opening ' + grids)
100f_grids = netCDF4.Dataset ( grids, "r" )
101
102src_lon = f_grids.variables ['t' + src_name + '.lon'][:]
103src_lat = f_grids.variables ['t' + src_name + '.lat'][:]
104dst_lon = f_grids.variables ['t' + dst_name + '.lon'][:]
105dst_lat = f_grids.variables ['t' + dst_name + '.lat'][:]
106
107src_msk = f_masks.variables ['t' + src_name + '.msk'][:]
108dst_msk = f_masks.variables ['t' + dst_name + '.msk'][:]
109dst_mskutil = 1-dst_msk # Reversing the convention : 0 on continent, 1 on ocean
110print ('dst_msk     : ' + str(np.sum(dst_msk)))
111print ('dst_mskutil : ' + str(np.sum(dst_mskutil)))
112
113# Ocean grid periodicity
114nperio_dst=myargs.ocePerio
115
116# Periodicity masking for NEMO
117if nperio_dst == 0 :
118    if dst_Name == 'ORCA2.3'    : nperio_dst = 4
119    if dst_Name == 'eORCA1.2'   : nperio_dst = 6
120    if dst_Name == 'ORCA025'    : nperio_dst = 6
121    if dst_Name == 'eORCA025'   : nperio_dst = 6
122    if dst_Name == 'eORCA025.1' : nperio_dst = 6
123print ('nperio_dst: ' + str(nperio_dst) )
124dst_mskutil = nemo.lbc_mask (dst_mskutil, nperio=nperio_dst, cd_type='T' )
125print ('dst_mskutil : ' + str(np.sum(dst_mskutil)))
126
127##
128## Fill Closed seas and other
129## ==================================================
130
131# Preparation by closing some straits
132# -----------------------------------
133if dst_Name in [ 'eORCA025', 'eORCA025.1' ] :
134    print ('Filling some seas for ' + dst_Name )
135    # Set Gibraltar strait to 0 to fill Mediterranean sea
136    dst_mskutil[838:841,1125] = 0
137    # Set Bal-El-Mandeb to 0 to fill Red Sea
138    dst_mskutil[736,1321:1324] = 0
139    # Set Stagerak to 0 to fill Baltic Sea
140    dst_mskutil[961,1179:1182] = 0
141    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
142    dst_mskutil[794:798,1374] = 0
143    # Set Hudson Strait to 0 to fill Hudson Bay
144    dst_mskutil[997:1012,907] = 0
145   
146if dst_Name == 'eORCA1.2' :
147    print ('Filling some seas for eORCA1.2')
148    # Set Gibraltar strait to 0 to fill Mediterrean sea
149    dst_mskutil[240, 283] = 0
150    # Set Bal-El-Mandeb to 0 to fill Red Sea
151    dst_mskutil[211:214, 332] = 0
152    # Set Stagerak to 0 to fill Baltic Sea
153    dst_mskutil[272:276, 293] = 0
154    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
155    dst_mskutil[227:230, 345] = 0
156    # Set Hudson Strait to 0 to fill Hudson Bay
157    dst_mskutil[284,222:225] = 0
158   
159if dst_Name == 'ORCA2.3' :
160    print ('Filling some seas for ORCA2.3')
161    # Set Gibraltar strait to 0 to fill Mediterrean sea
162    dst_mskutil[101,139] = 0
163    # Set Black Sea to zero. At the edge of the domain : binary_fill_holes fails
164    dst_mskutil[ 99:111,   0:  5] = 0
165    dst_mskutil[106:111, 173:182] = 0
166    # Set Stagerak to 0 to fill Baltic Sea
167    dst_mskutil[115,144] = 0
168    # Set Hudson Strait to 0 to fill Hudson Bay
169    dst_mskutil[120:123,110] = 0
170    # Set Bal-El-Mandeb to 0 to fill Red Sea
171    dst_mskutil[87:89,166] = 0
172
173dst_closed = dst_mskutil
174
175# Fill closed seas with image processing library
176# ----------------------------------------------
177dst_mskutil = nemo.lbc_mask ( 1-ndimage.binary_fill_holes (1-nemo.lbc(dst_mskutil, nperio=nperio_dst, cd_type='T')), nperio=nperio_dst, cd_type='T' )
178
179# Surfaces
180src_srf = f_areas.variables ['t' + src_name + '.srf']
181dst_srf = f_areas.variables ['t' + dst_name + '.srf']
182dst_srfutil = dst_srf * np.float64 (dst_mskutil)
183
184dst_srfutil_sum = np.sum( dst_srfutil)
185
186src_clo = f_grids.variables ['t' + src_name + '.clo'][:]
187src_cla = f_grids.variables ['t' + src_name + '.cla'][:]
188dst_clo = f_grids.variables ['t' + dst_name + '.clo'][:]
189dst_cla = f_grids.variables ['t' + dst_name + '.cla'][:]
190
191# Indices
192( src_jpj, src_jpi) = src_lat.shape ; src_grid_size = src_jpj*src_jpi
193( dst_jpj, dst_jpi) = dst_lat.shape ; dst_grid_size = dst_jpj*dst_jpi
194orc_index = np.arange (dst_jpj*dst_jpi, dtype=np.int32) + 1 # Fortran indices (starting at one)
195
196### ===== Reading needed data ==================================================
197if myargs.type in ['iceberg', 'iceshelf' ]: 
198    # Reading data file for calving or iceberg geometry around Antarctica
199    print ( 'Opening ' + myargs.repartition_file)
200    f_repartition = netCDF4.Dataset ( myargs.repartition_file, "r" )
201    repartition = np.sum ( f_repartition.variables [repartitionVar][:], axis=0 )
202
203## Before loop on basins
204remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
205src_address  = np.empty ( shape=(0), dtype=np.int32   )
206dst_address  = np.empty ( shape=(0), dtype=np.int32   )
207
208print (' ')
209
210### ===== Starting loop on basins==============================================
211
212# Initialise some fields
213remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
214src_address  = np.empty ( shape=(0), dtype=np.int32   )
215dst_address  = np.empty ( shape=(0), dtype=np.int32   )
216
217basin_msk       = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float32)
218key_repartition = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float64)
219
220## Loop
221for n_bas in np.arange ( nb_zone ) :
222    south = False ; ok_run = False
223    lat_south = np.min(limit_lat[n_bas]) ; lat_north = np.max(limit_lat[n_bas])
224    if lat_south <= -60.0 : south = True
225   
226    print ( 'basin: {:2d} -- Latitudes: {:+.1f} {:+.1f} --'.format(n_bas, lat_south, lat_north) )
227    ##
228    if myargs.type == 'iceberg'  and     south : ok_run = True  ; print ('Applying iceberg to south' )
229    if myargs.type == 'iceshelf' and     south : ok_run = True  ; print ('Applying iceshelf to south' )
230    if myargs.type == 'iceberg'  and not south : ok_run = False ; print ('Skipping iceberg: not south ')
231    if myargs.type == 'iceshelf' and not south : ok_run = False ; print ('Skipping iceshelf: not south ')
232    if myargs.type == 'nosouth'  and     south : ok_run = False ; print ('Skipping south: nosouth case' )
233    if myargs.type == 'nosouth'  and not south : ok_run = True  ; print ('Running: not in south, uniform repartition')
234    if myargs.type == 'full'                   : ok_run = True  ; print ('Running general trivial case, uniform repartition' )
235
236    if ok_run :
237        # Calving integral send to one point per latitude bands
238        index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
239   
240        # Basin mask
241        basin_msk[n_bas] = np.where ( (dst_lat > lat_south ) & (dst_lat <= lat_north ), dst_mskutil, 0 )
242
243        # Repartition pattern
244        if myargs.type in ['iceberg', 'iceshelf' ] : key_repartition[n_bas] = repartition * basin_msk[n_bas]
245        else :                                       key_repartition[n_bas] =               basin_msk[n_bas]
246
247        # Integral and normalisation
248        sum_repartition = np.sum ( key_repartition[n_bas] * dst_srfutil )
249        key_repartition = key_repartition / sum_repartition
250   
251        print ( 'Sum of repartition key                      : {:12.3e}'.format (np.sum (key_repartition[n_bas]               )) )
252        print ( 'Integral (area weighted) of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] * dst_srfutil )) )
253   
254        # Basin surface (modulated by repartition key)
255        basin_srfutil = np.sum ( key_repartition[n_bas] * dst_srfutil )
256           
257        # Weights and links
258        poids = 1.0 / basin_srfutil
259        matrix_local   = np.where ( basin_msk[n_bas].ravel() > 0.5, key_repartition[n_bas].ravel()*poids, 0. )
260        matrix_local   = matrix_local[matrix_local > 0.0] # Keep only non zero values
261        num_links      = np.count_nonzero (matrix_local)
262        # Address on source grid : all links points to the same LMDZ point.
263        src_address_local = np.ones(num_links, dtype=np.int32 )*index_src
264        # Address on destination grid : each NEMO point with non zero link
265        dst_address_local = np.where ( key_repartition[n_bas].ravel() > 0.0, orc_index, 0)
266        dst_address_local = dst_address_local[dst_address_local > 0]
267        # Append to global tabs
268        remap_matrix = np.append ( remap_matrix, matrix_local      )
269        src_address  = np.append ( src_address , src_address_local )
270        dst_address  = np.append ( dst_address , dst_address_local )
271        #
272        #print ( 'Sum of remap_matrix : {:12.3e}'.format(np.sum(matrix_local)) )
273        print ( 'Point in atmospheric grid : {:4d} -- num_links: {:6d}'.format(index_src, num_links) ) 
274        print ('  ')
275
276
277
278## End of loop
279print (' ')
280
281#
282num_links = np.count_nonzero (remap_matrix)
283print ( 'Total num_links : {:10d}'.format(num_links) )
284
285# Diag : interpolate uniform field
286src_field = np.zeros(shape=(src_grid_size))
287for n_bas in np.arange ( nb_zone ) :
288    index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
289    src_field[index_src-1] = n_bas
290
291dst_field = np.zeros(shape=(dst_grid_size,))
292for link in np.arange (num_links) :
293    dst_field [dst_address [link]-1] +=  remap_matrix[link] * src_field[src_address[link]-1]
294
295### ===== Writing the weights file, for OASIS MCT ==========================================
296
297# Output file
298calving = myargs.output
299f_calving = netCDF4.Dataset ( calving, 'w', format=FmtNetcdf )
300   
301print ('Output file: ' + calving )
302
303f_calving.Conventions     = "CF-1.6"
304f_calving.source          = "IPSL Earth system model"
305f_calving.group           = "ICMC IPSL Climate Modelling Center"
306f_calving.Institution     = "IPSL https.//www.ipsl.fr"
307f_calving.Ocean           = dst_Name + " https://www.nemo-ocean.eu"
308f_calving.Atmosphere      = src_Name + " http://lmdz.lmd.jussieu.fr"
309if myargs.type in ['iceberg', 'iceshelf' ]: f_calving.originalFiles = myargs.repartition_file
310f_calving.associatedFiles = grids + " " + areas + " " + masks
311f_calving.directory       = os.getcwd ()
312f_calving.description     = "Generated with XIOS http://forge.ipsl.jussieu.fr/ioserver and MOSAIX https://forge.ipsl.jussieu.fr/igcmg/browser/TOOLS/MOSAIX"
313f_calving.title           = calving
314f_calving.Program         = "Generated by " + sys.argv[0] + " with flags " + str(sys.argv[1:])
315f_calving.repartitionType = myargs.type
316if myargs.type in [ 'iceberg', 'iceshelf' ] :
317    f_calving.repartitionFile = myargs.repartition_file
318    f_calving.repartitionVar  = repartitionVar
319f_calving.gridsFile       = grids
320f_calving.areasFile       = areas
321f_calving.masksFile       = masks
322f_calving.timeStamp       = time.asctime()
323f_calving.HOSTNAME        = platform.node()
324#f_calving.LOGNAME         = os.getlogin()
325f_calving.Python          = "Python version " +  platform.python_version()
326f_calving.OS              = platform.system()
327f_calving.release         = platform.release()
328f_calving.hardware        = platform.machine()
329f_calving.conventions     = "SCRIP"
330if src_name == 'lmd' : f_calving.source_grid = "curvilinear"
331if src_name == 'ico' : f_calving.source_grid = "unstructured"
332f_calving.dest_grid       = "curvilinear"
333f_calving.Model           = "IPSL CM6"
334f_calving.SVN_Author      = "$Author$"
335f_calving.SVN_Date        = "$Date$"
336f_calving.SVN_Revision    = "$Revision$"
337f_calving.SVN_Id          = "$Id$"
338f_calving.SVN_HeadURL     = "$HeadURL$"
339
340d_nb_zone   = f_calving.createDimension ('nb_zone'         ,   nb_zone )
341d_num_links = f_calving.createDimension ('num_links'       , num_links )
342d_num_wgts  = f_calving.createDimension ('num_wgts'        ,         1 )
343
344d_src_grid_size    = f_calving.createDimension ('src_grid_size'   , src_grid_size )
345d_src_grid_corners = f_calving.createDimension ('src_grid_corners', src_clo.shape[0]  )
346d_src_grid_rank    = f_calving.createDimension ('src_grid_rank'   ,        2  )
347
348d_dst_grid_size    = f_calving.createDimension ('dst_grid_size'   , dst_grid_size )
349d_dst_grid_corners = f_calving.createDimension ('dst_grid_corners', dst_clo.shape[0] )
350d_dst_grid_rank    = f_calving.createDimension ('dst_grid_rank'   ,        2  )
351
352v_remap_matrix = f_calving.createVariable ( 'remap_matrix', 'f8', ('num_links', 'num_wgts') )
353v_src_address  = f_calving.createVariable ( 'src_address' , 'i4', ('num_links',) )
354v_src_address.convention = "Fortran style addressing, starting at 1"
355v_dst_address  = f_calving.createVariable ( 'dst_address' , 'i4', ('num_links',) )
356v_dst_address.convention = "Fortran style addressing, starting at 1"
357
358v_remap_matrix[:] = remap_matrix
359v_src_address [:] = src_address
360v_dst_address [:] = dst_address
361
362v_src_grid_dims       = f_calving.createVariable ( 'src_grid_dims'      , 'i4', ('src_grid_rank',) )
363v_src_grid_center_lon = f_calving.createVariable ( 'src_grid_center_lon', 'i4', ('src_grid_size',) )
364v_src_grid_center_lat = f_calving.createVariable ( 'src_grid_center_lat', 'i4', ('src_grid_size',) )
365v_src_grid_center_lon.units='degrees_east'  ; v_src_grid_center_lon.long_name='Longitude'
366v_src_grid_center_lon.long_name='longitude' ; v_src_grid_center_lon.bounds="src_grid_corner_lon" 
367v_src_grid_center_lat.units='degrees_north' ; v_src_grid_center_lat.long_name='Latitude'
368v_src_grid_center_lat.long_name='latitude ' ; v_src_grid_center_lat.bounds="src_grid_corner_lat" 
369v_src_grid_corner_lon = f_calving.createVariable ( 'src_grid_corner_lon', 'f8', ('src_grid_size', 'src_grid_corners')  )
370v_src_grid_corner_lat = f_calving.createVariable ( 'src_grid_corner_lat', 'f8', ('src_grid_size', 'src_grid_corners')  )
371v_src_grid_corner_lon.units="degrees_east"
372v_src_grid_corner_lat.units="degrees_north"
373v_src_grid_area       = f_calving.createVariable ( 'src_grid_area'      , 'f8', ('src_grid_size',)  )
374v_src_grid_area.long_name="Grid area" ; v_src_grid_area.standard_name="cell_area" ; v_src_grid_area.units="m2"
375v_src_grid_imask      = f_calving.createVariable ( 'src_grid_imask'     , 'f8', ('src_grid_size',)  )
376v_src_grid_imask.long_name="Land-sea mask" ; v_src_grid_imask.units="Land:1, Ocean:0"
377
378v_src_grid_dims      [:] = ( src_jpi, src_jpi ) 
379v_src_grid_center_lon[:] = src_lon[:].ravel()
380v_src_grid_center_lat[:] = src_lat[:].ravel()
381v_src_grid_corner_lon[:] = src_clo.reshape( (src_jpi*src_jpj, d_src_grid_corners.__len__()) )
382v_src_grid_corner_lat[:] = src_cla.reshape( (src_jpi*src_jpj, d_src_grid_corners.__len__()) )
383v_src_grid_area      [:] = src_srf[:].ravel()
384v_src_grid_imask     [:] = src_msk[:].ravel()
385
386# --
387
388v_dst_grid_dims       = f_calving.createVariable ( 'dst_grid_dims'      , 'i4', ('dst_grid_rank',) )
389v_dst_grid_center_lon = f_calving.createVariable ( 'dst_grid_center_lon', 'i4', ('dst_grid_size',) )
390v_dst_grid_center_lat = f_calving.createVariable ( 'dst_grid_center_lat', 'i4', ('dst_grid_size',) )
391v_dst_grid_center_lon.units='degrees_east'  ; v_dst_grid_center_lon.long_name='Longitude'
392v_dst_grid_center_lon.long_name='longitude' ; v_dst_grid_center_lon.bounds="dst_grid_corner_lon" 
393v_dst_grid_center_lat.units='degrees_north' ; v_dst_grid_center_lat.long_name='Latitude'
394v_dst_grid_center_lat.long_name='latitude'  ; v_dst_grid_center_lat.bounds="dst_grid_corner_lat" 
395v_dst_grid_corner_lon = f_calving.createVariable ( 'dst_grid_corner_lon', 'f8', ('dst_grid_size', 'dst_grid_corners')  )
396v_dst_grid_corner_lat = f_calving.createVariable ( 'dst_grid_corner_lat', 'f8', ('dst_grid_size', 'dst_grid_corners')  )
397v_dst_grid_corner_lon.units="degrees_east"
398v_dst_grid_corner_lat.units="degrees_north"
399v_dst_grid_area       = f_calving.createVariable ( 'dst_grid_area'      , 'f8', ('dst_grid_size',)  )
400v_dst_grid_area.long_name="Grid area" ; v_dst_grid_area.standard_name="cell_area" ; v_dst_grid_area.units="m2"
401v_dst_grid_imask      = f_calving.createVariable ( 'dst_grid_imask'     , 'f8', ('dst_grid_size',)  )
402v_dst_grid_imask.long_name="Land-sea mask" ; v_dst_grid_imask.units="Land:1, Ocean:0"
403
404v_dst_bassin      = f_calving.createVariable ( 'dst_bassin'      , 'f8', ('nb_zone', 'dst_grid_size',)  )
405v_dst_repartition = f_calving.createVariable ( 'dst_repartition' , 'f8', ('nb_zone', 'dst_grid_size',)  )
406
407v_dst_southLimit = f_calving.createVariable ('dst_southLimit', 'f4', ('nb_zone',) )
408v_dst_northLimit = f_calving.createVariable ('dst_northLimit', 'f4', ('nb_zone',) )
409v_dst_southLimit[:] = np.min(limit_lat, axis=(1,) ) 
410v_dst_northLimit[:] = np.max(limit_lat, axis=(1,) ) 
411
412v_dst_grid_dims      [:] = ( dst_jpi, dst_jpi ) 
413v_dst_grid_center_lon[:] = dst_lon[:].ravel()
414v_dst_grid_center_lat[:] = dst_lat[:].ravel()
415v_dst_grid_corner_lon[:] = dst_clo.reshape( (dst_jpi*dst_jpj, d_dst_grid_corners.__len__()) )
416v_dst_grid_corner_lat[:] = dst_cla.reshape( (dst_jpi*dst_jpj, d_dst_grid_corners.__len__()) )
417v_dst_grid_area      [:] = dst_srf[:].ravel()
418v_dst_grid_imask     [:] = dst_msk[:].ravel()
419
420v_dst_bassin[:]      = basin_msk.reshape      ( (nb_zone,dst_grid_size) )
421v_dst_repartition[:] = key_repartition.reshape( (nb_zone,dst_grid_size) )
422
423# Additionnal fields for debugging
424# ================================
425v_dst_grid_imaskutil      = f_calving.createVariable ( 'dst_grid_imaskutil'     , 'f8', ('dst_grid_size',)  )
426v_dst_grid_imaskutil.long_name="Land-sea mask" ; v_dst_grid_imaskutil.units="Land:1, Ocean:0"
427v_dst_grid_imaskclose      = f_calving.createVariable ( 'dst_grid_imaskclose'     , 'f8', ('dst_grid_size',)  )
428v_dst_grid_imaskclose.long_name="Land-sea mask" ; v_dst_grid_imaskclose.units="Land:1, Ocean:0"
429v_dst_grid_imaskutil [:] = dst_mskutil[:].ravel()
430v_dst_grid_imaskclose[:] = dst_closed[:].ravel()
431
432v_dst_lon_addressed = f_calving.createVariable ( 'dst_lon_addressed', 'f8', ('num_links',) )
433v_dst_lat_addressed = f_calving.createVariable ( 'dst_lat_addressed', 'f8', ('num_links',) )
434v_dst_lon_addressed.long_name="Longitude" ; v_dst_lon_addressed.standard_name="longitude" ; v_dst_lon_addressed.units="degrees_east"
435v_dst_lat_addressed.long_name="Latitude"  ; v_dst_lat_addressed.standard_name="latitude"  ; v_dst_lat_addressed.units="degrees_north"
436v_dst_area_addressed  = f_calving.createVariable ( 'dst_area_addressed', 'f8', ('num_links',) )
437v_dst_area_addressed.long_name="Grid area" ; v_dst_area_addressed.standard_name="cell_area" ; v_dst_grid_area.units="m2"
438v_dst_imask_addressed = f_calving.createVariable ( 'dst_imask_addressed', 'i4', ('num_links',) )
439v_dst_imask_addressed.long_name="Land-sea mask" ; v_dst_imask_addressed.units="Land:1, Ocean:0"
440v_dst_imaskutil_addressed = f_calving.createVariable ( 'dst_imaskutil_addressed', 'i4', ('num_links',) )
441v_dst_imaskutil_addressed.long_name="Land-sea mask" ; v_dst_imaskutil_addressed.units="Land:1, Ocean:0"
442
443v_dst_lon_addressed  [:] = dst_lon.ravel()[dst_address-1].ravel()
444v_dst_lat_addressed  [:] = dst_lat.ravel()[dst_address-1].ravel()
445v_dst_area_addressed [:] = dst_srf[:].ravel()[dst_address-1].ravel()
446v_dst_imask_addressed[:] = dst_msk[:].ravel()[dst_address-1].ravel()
447v_dst_imaskutil_addressed[:] = dst_mskutil.ravel()[dst_address-1].ravel()
448
449v_src_field = f_calving.createVariable ( 'src_field', 'f8', ('src_grid_size',) )
450v_dst_field = f_calving.createVariable ( 'dst_field', 'f8', ('dst_grid_size',) )
451v_src_field[:] = src_field
452v_dst_field[:] = dst_field
453
454f_calving.close ()
455
456
457### ===== That's all Folk's !! ==============================================
Note: See TracBrowser for help on using the repository browser.