source: TOOLS/MOSAIX/CalvingWeights.py @ 6688

Last change on this file since 6688 was 6666, checked in by omamce, 7 months ago

O.M. : MOSAIX

Improved code with pylint analysis

  • Property svn:keywords set to Date Revision HeadURL Author Id
File size: 24.5 KB
Line 
1### ===========================================================================
2###
3### Compute calving weights.
4###
5### ===========================================================================
6##
7##  MOSAIX is under CeCILL_V2 licence. See "Licence_CeCILL_V2-en.txt"
8##  file for an english version of the licence and
9##  "Licence_CeCILL_V2-fr.txt" for a french version.
10##
11##  Permission is hereby granted, free of charge, to any person or
12##  organization obtaining a copy of the software and accompanying
13##  documentation covered by this license (the "Software") to use,
14##  reproduce, display, distribute, execute, and transmit the
15##  Software, and to prepare derivative works of the Software, and to
16##  permit third-parties to whom the Software is furnished to do so,
17##  all subject to the following:
18##
19##  Warning, to install, configure, run, use any of MOSAIX software or
20##  to read the associated documentation you'll need at least one (1)
21##  brain in a reasonably working order. Lack of this implement will
22##  void any warranties (either express or implied).  Authors assumes
23##  no responsability for errors, omissions, data loss, or any other
24##  consequences caused directly or indirectly by the usage of his
25##  software by incorrectly or partially configured
26##
27'''Python code to generates weights for calving. Launched by `CreateWeights.bash`.
28
29More information with python `CalvingWeights.py -h`.
30
31- The atmosphere model integrate the calving over on specific
32  regions. Presently the regions are three latitudinal bands with
33  limits 90°S/40°S/50°N/90°N.
34
35- The weights distribute the calving in the ocean in the
36  corresponding latitudinal bands. By default, the distribution is
37  uniform.
38
39- For the southernmost band, it is possible to use a geographical
40  distribution read in a file. These files are currently available
41  for eORCA1 and eORCA025.
42
43## SVN information
44Author   = "$Author$"
45Date     = "$Date$"
46Revision = "$Revision$"
47Id       = "$Id$"
48HeadURL  = "$HeadURL$"
49'''
50import sys
51import os
52import getpass
53import platform
54import argparse
55import time
56import numpy as np
57import xarray as xr
58from scipy import ndimage
59import nemo
60
61## SVN information
62__SVN__ =  ( {
63    'Author'   : "$Author$",
64    'Date'     : '$Date$',
65    'Revision' : '$Revision$',
66    'Id'       : '$Id$',
67    'HeadURL'  : '$HeadURL$',
68    'SVN_Date' : '$SVN_Date: $',
69    } )
70###
71
72### ===== Handling command line parameters ==========================
73# Creating a parser
74parser = argparse.ArgumentParser (
75    description = '''Compute calving weights''',
76    epilog='-------- End of the help message --------')
77
78# Adding arguments
79parser.add_argument ('--oce'     , help='oce model name', type=str, default='eORCA1.2',
80                      choices=['ORCA2.3', 'ORCA1.0', 'ORCA1.1', 'ORCA1_CNRM', 'eORCA1.2',
81                               'eORCA1.4', 'eORCA1.4.2', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] )
82parser.add_argument ('--atm'     , type=str, default='ICO40',
83                         help='atm model name (ICO* or LMD*)' )
84parser.add_argument ('--type'    , help='Type of distribution', type=str,
85                         choices=['iceshelf', 'iceberg', 'nosouth', 'full'], default='full'  )
86## parser.add_argument ('--dir'     , help='Directory of input file', type=str, default='./' )
87parser.add_argument ('--repartition_file', help='Data files with iceberg melting' , type=str,
88                         default='./runoff-icb_DaiTrenberth_Depoorter_eORCA1_JD.nc' )
89parser.add_argument ('--repartition_var' , type=str, default=None,
90                         help='Variable name for iceshelf' )
91parser.add_argument ('--grids' , help='grids file name', default='grids.nc' )
92parser.add_argument ('--areas' , help='masks file name', default='areas.nc' )
93parser.add_argument ('--masks' , help='areas file name', default='masks.nc' )
94parser.add_argument ('--o2a'   , help='o2a file name'  , default='o2a.nc'   )
95parser.add_argument ('--output', help='output rmp file name',
96                         default='rmp_tlmd_to_torc_calving.nc' )
97parser.add_argument ('--fmt' , default='netcdf4'    ,
98                         help='NetCDF file format, using nco syntax',
99                         choices=['classic', 'netcdf3', '64bit', '64bit_data',
100                                  '64bit_data', 'netcdf4', 'netcdf4_classsic'] )
101parser.add_argument ('--ocePerio', type=float, default=0, choices=nemo.NPERIO_VALID_RANGE,
102                         help='periodicity of ocean grid' )
103parser.add_argument ('--modelName'   , help='Name of model', type=str, default=None)
104
105# Parse command line
106myargs = parser.parse_args ()
107
108# Model Names
109src_Name   = myargs.atm
110dst_Name   = myargs.oce
111model_name = myargs.modelName
112
113# Default vars
114if myargs.repartition_var is None :
115    # Runoff from Antarctica iceshelves (Depoorter, 2013)
116    if myargs.type == 'iceshelf' :
117        repartitionVar = 'sornfisf'
118    # Runoff from Antarctica iceberg (Depoorter, 2013)
119    if myargs.type == 'iceberg' :
120        repartitionVar = 'Icb_Flux'
121else :
122    repartitionVar = myargs.repartition_var
123
124# Latitude limits of each calving zone
125limit_lat = ( (40.0, 90.0), (-50.0, 40.0), ( -90.0, -50.0) )
126
127nb_zone = len(limit_lat)
128
129if myargs.fmt == 'classic'         : FmtNetcdf = 'CLASSIC'
130if myargs.fmt == 'netcdf3'         : FmtNetcdf = 'CLASSIC'
131if myargs.fmt == '64bit'           : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
132if myargs.fmt == '64bit_data'      : FmtNetcdf = 'NETCDF3_64BIT_DATA'
133if myargs.fmt == '64bit_offset'    : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
134if myargs.fmt == 'netcdf4'         : FmtNetcdf = 'NETCDF4'
135if myargs.fmt == 'netcdf4_classic' : FmtNetcdf = 'NETCDF4_CLASSIC'
136### =====================================================================
137
138# Model short names
139src_name = None
140dst_name = None
141if src_Name.count('ICO')  != 0 : src_name = 'ico'
142if src_Name.count('LMD')  != 0 : src_name = 'lmd'
143if dst_Name.count('ORCA') != 0 : dst_name = 'orc'
144
145CplModel = dst_Name + 'x' + src_Name
146
147print ('Atm names : ' + src_name + ' ' + src_Name )
148print ('Oce names : ' + dst_name + ' ' + dst_Name )
149print (' ')
150
151# Reading domains characteristics
152grids = myargs.grids
153areas = myargs.areas
154masks = myargs.masks
155o2a   = myargs.o2a
156
157print ('Opening ' + areas)
158f_areas = xr.open_dataset ( areas )
159print ('Opening ' + masks)
160f_masks = xr.open_dataset ( masks )
161print ('Opening ' + grids)
162f_grids = xr.open_dataset ( grids )
163
164src_lon = f_grids ['t' + src_name + '.lon']
165src_lat = f_grids ['t' + src_name + '.lat']
166dst_lon = f_grids ['t' + dst_name + '.lon']
167dst_lat = f_grids ['t' + dst_name + '.lat']
168
169src_msk = f_masks ['t' + src_name + '.msk']
170dst_msk = f_masks ['t' + dst_name + '.msk']
171dst_mskutil = 1-dst_msk # Reversing the convention : 0 on continent, 1 on ocean
172print ('dst_msk     : ', dst_msk.sum().values )
173print ('dst_mskutil : ', dst_mskutil.sum().values )
174
175# Ocean grid periodicity
176nperio_dst=myargs.ocePerio
177
178# Periodicity masking for NEMO
179if nperio_dst == 0 :
180    if dst_Name in ['ORCA2.3', 'ORCA2.4']            : nperio_dst = 4
181    if dst_Name in ['eORCA1.2', 'eORCA1.3']          : nperio_dst = 6
182    if dst_Name in ['ORCA025', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] : nperio_dst = 6
183
184print ('nperio_dst: ' + str(nperio_dst) )
185dst_mskutil = nemo.lbc_mask (dst_mskutil, nperio=nperio_dst, cd_type='T', sval=0 )
186print ('dst_mskutil : ' + str(np.sum(dst_mskutil)))
187
188##
189## Fill Closed seas and other
190## ==================================================
191
192# Preparation by closing some straits
193# -----------------------------------
194if dst_Name in [ 'eORCA025', 'eORCA025.1', 'eORCA025.4' ] :
195    print ('Filling some seas for ' + dst_Name )
196    # Set Gibraltar strait to 0 to fill Mediterranean sea
197    dst_mskutil[838:841,1125] = 0
198    # Set Bal-El-Mandeb to 0 to fill Red Sea
199    dst_mskutil[736,1321:1324] = 0
200    # Set Stagerak to 0 to fill Baltic Sea
201    dst_mskutil[961,1179:1182] = 0
202    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
203    dst_mskutil[794:798,1374] = 0
204    # Set Hudson Strait to 0 to fill Hudson Bay
205    dst_mskutil[997:1012,907] = 0
206
207if dst_Name in [ 'eORCA1.2', 'eORCA1.3', 'eORCA1.4', 'eORCA1.4.2' ] :
208    print ('Filling some seas for ' + dst_Name)
209    # Set Gibraltar strait to 0 to fill Mediterrean sea
210    dst_mskutil[240, 283] = 0
211    # Set Bal-El-Mandeb to 0 to fill Red Sea
212    dst_mskutil[211:214, 332] = 0
213    # Set Stagerak to 0 to fill Baltic Sea
214    dst_mskutil[272:276, 293] = 0
215    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
216    dst_mskutil[227:230, 345] = 0
217    # Set Hudson Strait to 0 to fill Hudson Bay
218    dst_mskutil[284,222:225] = 0
219
220if dst_Name in [ 'ORCA2.3', 'ORCA2.4' ] :
221    print ('Filling some seas for ' + dst_Name)
222    # Set Gibraltar strait to 0 to fill Mediterrean sea
223    dst_mskutil[101,139] = 0
224    # Set Black Sea to zero. At the edge of the domain : binary_fill_holes fails
225    dst_mskutil[ 99:111,   0:  5] = 0
226    dst_mskutil[106:111, 173:182] = 0
227    # Set Stagerak to 0 to fill Baltic Sea
228    dst_mskutil[115,144] = 0
229    # Set Hudson Strait to 0 to fill Hudson Bay
230    dst_mskutil[120:123,110] = 0
231    # Set Bal-El-Mandeb to 0 to fill Red Sea
232    dst_mskutil[87:89,166] = 0
233
234dst_closed = dst_mskutil
235
236# Fill closed seas with image processing library
237# ----------------------------------------------
238dst_mskutil = nemo.lbc_mask ( 1-ndimage.binary_fill_holes (
239    1-nemo.lbc(dst_mskutil, nperio=nperio_dst, cd_type='T')),
240                              nperio=nperio_dst, cd_type='T', sval=0)
241
242# Surfaces
243src_srf = f_areas.variables ['t' + src_name + '.srf']
244dst_srf = f_areas.variables ['t' + dst_name + '.srf']
245dst_srfutil = dst_srf * np.float64 (dst_mskutil)
246
247dst_srfutil_sum = np.sum( dst_srfutil)
248
249src_clo = f_grids ['t' + src_name + '.clo']
250src_cla = f_grids ['t' + src_name + '.cla']
251dst_clo = f_grids ['t' + dst_name + '.clo']
252dst_cla = f_grids ['t' + dst_name + '.cla']
253
254# Indices
255( src_jpj, src_jpi) = src_lat.shape
256src_grid_size = src_jpj*src_jpi
257( dst_jpj, dst_jpi) = dst_lat.shape
258dst_grid_size = dst_jpj*dst_jpi
259orc_index = np.arange (dst_jpj*dst_jpi, dtype=np.int32) + 1 # Fortran indices (starting at one)
260
261### ===== Reading needed data ========================================
262if myargs.type in ['iceberg', 'iceshelf' ]:
263    # Reading data file for calving or iceberg geometry around Antarctica
264    print ( 'Opening ' + myargs.repartition_file)
265    f_repartition = xr.open_dataset ( myargs.repartition_file )
266    repartition = np.sum ( f_repartition.variables [repartitionVar], axis=0 )
267
268## Before loop on basins
269remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
270src_address  = np.empty ( shape=(0), dtype=np.int32   )
271dst_address  = np.empty ( shape=(0), dtype=np.int32   )
272
273print (' ')
274
275### ===== Starting loop on basins=====================================
276
277# Initialise some fields
278remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
279src_address  = np.empty ( shape=(0), dtype=np.int32   )
280dst_address  = np.empty ( shape=(0), dtype=np.int32   )
281
282basin_msk       = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float32)
283key_repartition = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float64)
284
285## Loop
286for n_bas in np.arange ( nb_zone ) :
287    south  = False
288    ok_run = False
289    lat_south = np.min(limit_lat[n_bas])
290    lat_north = np.max(limit_lat[n_bas])
291    if lat_south <= -60.0 : south = True
292
293    print ( f'basin: {n_bas:2d} -- Latitudes: {lat_south:+.1f} {lat_north:+.1f} --' )
294    ##
295    if myargs.type == 'iceberg'  and     south :
296        ok_run = True
297        print ('Applying iceberg to south' )
298    if myargs.type == 'iceshelf' and     south :
299        ok_run = True
300        print ('Applying iceshelf to south' )
301    if myargs.type == 'iceberg'  and not south :
302        ok_run = False
303        print ('Skipping iceberg: not south ')
304    if myargs.type == 'iceshelf' and not south :
305        ok_run = False
306        print ('Skipping iceshelf: not south ')
307    if myargs.type == 'nosouth'  and     south :
308        ok_run = False
309        print ('Skipping south: nosouth case' )
310    if myargs.type == 'nosouth'  and not south :
311        ok_run = True
312        print ('Running: not in south, uniform repartition')
313    if myargs.type == 'full' :
314        ok_run = True
315        print ('Running general trivial case, uniform repartition' )
316
317    if ok_run :
318        # Calving integral send to one point per latitude bands
319        index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
320
321        # Basin mask
322        basin_msk[n_bas] = np.where ( (dst_lat > lat_south ) & (dst_lat <= lat_north ),
323                                          dst_mskutil, 0 )
324
325        # Repartition pattern
326        if myargs.type in ['iceberg', 'iceshelf' ] :
327            key_repartition[n_bas] = repartition * basin_msk[n_bas]
328        else :
329            key_repartition[n_bas] =               basin_msk[n_bas]
330
331        # Integral and normalisation
332        sum_repartition = np.sum ( key_repartition[n_bas] * dst_srfutil ).values
333        key_repartition = key_repartition / sum_repartition
334
335        print ( 'Sum of repartition key                      : {:12.3e}'.format (np.sum (key_repartition[n_bas]               )) )
336        print ( 'Integral (area weighted) of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] * dst_srfutil ).values) )
337
338        # Basin surface (modulated by repartition key)
339        basin_srfutil = np.sum ( key_repartition[n_bas] * dst_srfutil )
340
341        # Weights and links
342        poids = 1.0 / basin_srfutil
343        matrix_local   = np.where ( basin_msk[n_bas].ravel() > 0.5,
344                                        key_repartition[n_bas].ravel()*poids.values, 0. )
345        matrix_local   = matrix_local[matrix_local > 0.0] # Keep only non zero values
346        num_links      = np.count_nonzero (matrix_local)
347        # Address on source grid : all links points to the same LMDZ point.
348        src_address_local = np.ones(num_links, dtype=np.int32 )*index_src
349        # Address on destination grid : each NEMO point with non zero link
350        dst_address_local = np.where ( key_repartition[n_bas].ravel() > 0.0, orc_index, 0)
351        dst_address_local = dst_address_local[dst_address_local > 0]
352        # Append to global tabs
353        remap_matrix = np.append ( remap_matrix, matrix_local      )
354        src_address  = np.append ( src_address , src_address_local )
355        dst_address  = np.append ( dst_address , dst_address_local )
356        #
357        #print ( 'Sum of remap_matrix : {:12.3e}'.format(np.sum(matrix_local)) )
358        print ( 'Point in atmospheric grid : {index_src:4d} -- num_links: {num_links:6d}' )
359        print ('  ')
360
361## End of loop
362print (' ')
363
364#
365num_links = np.count_nonzero (remap_matrix)
366print ( f'Total num_links : {num_links:10d}' )
367
368# Diag : interpolate uniform field
369src_field = np.zeros(shape=(src_grid_size,))
370for n_bas in np.arange ( nb_zone ) :
371    index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
372    src_field[index_src-1] = n_bas
373
374dst_field = np.zeros(shape=(dst_grid_size,))
375for link in np.arange (num_links) :
376    dst_field [dst_address [link]-1] +=  remap_matrix[link] * src_field[src_address[link]-1]
377
378### ===== Writing the weights file, for OASIS MCT =======================
379
380# Output file
381calving = myargs.output
382
383print ('Output file: ' + calving )
384
385remap_matrix = xr.DataArray (np.reshape(remap_matrix, (num_links, 1)),
386                                 dims = ['num_links', 'num_wgts'] )
387src_address  = xr.DataArray (src_address.astype(np.int32) ,
388                                 dims = ['num_links',] )
389src_address.attrs['convention'] = 'Fortran style addressing, starting at 1'
390dst_address  = xr.DataArray (dst_address.astype(np.int32) ,
391                                 dims =  ['num_links',] )
392dst_address.attrs['convention'] = 'Fortran style addressing, starting at 1'
393
394src_grid_dims       = xr.DataArray ( np.array (( src_jpi, src_jpi ), dtype=np.int32),
395                                         dims =  ['src_grid_rank',] )
396src_grid_center_lon = xr.DataArray ( src_lon.values.ravel(), dims = ['src_grid_size',] )
397src_grid_center_lat = xr.DataArray ( src_lat.values.ravel(), dims = ['src_grid_size',] )
398src_grid_center_lon.attrs.update ( {'units':'degrees_east ', 'long_name':'Longitude',
399                                        'standard_name':'longitude',
400                                        'bounds':'src_grid_corner_lon'})
401src_grid_center_lat.attrs.update ( {'units':'degrees_north', 'long_name':'Latitude' ,
402                                        'standard_name':'latitude' ,
403                                        'bounds':'src_grid_corner_lat'})
404src_grid_corner_lon = xr.DataArray ( src_clo.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ),
405                                         dims = ['src_grid_size', 'src_grid_corners'] )
406src_grid_corner_lat = xr.DataArray ( src_cla.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ),
407                                         dims = ['src_grid_size', 'src_grid_corners' ] )
408src_grid_corner_lon.attrs['units']='degrees_east'
409src_grid_corner_lat.attrs['units']='degrees_north'
410src_grid_area       = xr.DataArray ( src_srf.values.ravel(), dims = ['src_grid_size',] )
411src_grid_area.attrs.update ({'long_name':'Grid area', 'standard_name':'cell_area', 'units':'m2'})
412src_grid_imask      = xr.DataArray ( src_msk.values.ravel().astype(np.int32) ,
413                                         dims = ['src_grid_size',] )
414src_grid_imask.attrs.update ( {'long_name':'-sea mask', 'units':'Land:1, Ocean:0' })
415
416# --
417dst_grid_dims       = xr.DataArray ( np.array(( dst_jpi, dst_jpi), dtype=np.int32) ,
418                                         dims = ['dst_grid_rank', ])
419dst_grid_center_lon = xr.DataArray ( dst_lon.values.ravel(), dims = ['dst_grid_size', ])
420dst_grid_center_lat = xr.DataArray ( dst_lat.values.ravel(), dims = ['dst_grid_size', ])
421dst_grid_center_lon.attrs.update ( {'units':'degrees_east ', 'long_name':'Longitude',
422                                        'standard_name':'longitude',
423                                        'bounds':'dst_grid_corner_lon'})
424dst_grid_center_lat.attrs.update ( {'units':'degrees_north', 'long_name':'Latitude' ,
425                                        'standard_name':'latitude' ,
426                                        'bounds':'dst_grid_corner_lat'})
427dst_grid_corner_lon = xr.DataArray ( dst_clo.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ),
428                                         dims = ['dst_grid_size', 'dst_grid_corners'] )
429dst_grid_corner_lat = xr.DataArray ( dst_cla.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ),
430                                         dims = ['dst_grid_size', 'dst_grid_corners'] )
431dst_grid_corner_lon.attrs['units']='degrees_east'
432dst_grid_corner_lat.attrs['units']='degrees_north'
433dst_grid_area       = xr.DataArray ( dst_srf.values.ravel(), dims = ['dst_grid_size', ] )
434dst_grid_area.attrs.update ({'long_name':'Grid area', 'standard_name':'cell_area', 'units':'m2'})
435dst_grid_imask      = xr.DataArray ( dst_msk.values.ravel(), dims =  ['dst_grid_size',]  )
436dst_grid_imask.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' } )
437
438dst_bassin      = xr.DataArray ( basin_msk.reshape ( (nb_zone,dst_grid_size) ) ,
439                                     dims = ['nb_zone', 'dst_grid_size',  ] )
440dst_repartition = xr.DataArray ( key_repartition.reshape( (nb_zone,dst_grid_size) ) ,
441                                     dims = ['nb_zone', 'dst_grid_size',  ] )
442
443dst_southLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) ,
444                                    dims = ['nb_zone', ] )
445dst_northLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) ,
446                                    dims = ['nb_zone', ] )
447
448# Additionnal fields for debugging
449# ================================
450dst_grid_imaskutil  = xr.DataArray ( dst_mskutil.ravel().astype(np.float32),
451                                         dims = ['dst_grid_size',] )
452dst_grid_imaskutil.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' })
453dst_grid_imaskclose = xr.DataArray ( dst_closed.values.ravel(), dims = ['dst_grid_size',] )
454dst_grid_imaskclose.attrs.update ({'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' })
455
456dst_lon_addressed = xr.DataArray ( dst_lon.values.ravel()[dst_address-1].ravel(),
457                                       dims = ['num_links',] )
458dst_lat_addressed = xr.DataArray ( dst_lat.values.ravel()[dst_address-1].ravel(),
459                                       dims = ['num_links',] )
460dst_lon_addressed.attrs.update ( {'units':'degrees_east ', 'long_name':'Longitude',
461                                      'standard_name':'longitude' })
462dst_lat_addressed.attrs.update ( {'units':'degrees_north', 'long_name':'Latitude' ,
463                                      'standard_name':'latitude'  })
464dst_area_addressed  = xr.DataArray ( dst_srf.values.ravel()[dst_address-1].ravel(),
465                                         dims = ['num_links',] )
466dst_area_addressed.attrs.update ( {'long_name':'Grid area0', 'standard_name':'cell_area',
467                                       'units':'m2' })
468dst_imask_addressed = xr.DataArray ( dst_msk.values.ravel()[dst_address-1].ravel(),
469                                         dims = ['num_links', ] )
470dst_imask_addressed.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' })
471dst_imaskutil_addressed = xr.DataArray ( dst_mskutil.ravel()[dst_address-1].astype(np.float32),
472                                             dims = ['num_links'] )
473dst_imaskutil_addressed.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' })
474
475src_field = xr.DataArray ( src_field, dims = ['src_grid_size',] )
476dst_field = xr.DataArray ( dst_field, dims = ['dst_grid_size',] )
477
478f_calving =  xr.Dataset ( {
479    'remap_matrix'            : remap_matrix,
480        'src_address'             : src_address,
481        'dst_address'             : dst_address,
482        'src_grid_dims'           : src_grid_dims,
483        'src_grid_center_lon'     : src_grid_center_lat,
484        'src_grid_center_lat'     : src_grid_center_lat,
485        'src_grid_corner_lon'     : src_grid_corner_lon,
486        'src_grid_corner_lat'     : src_grid_corner_lat,
487        'src_grid_area'           : src_grid_area,
488        'src_grid_imask'          : src_grid_imask,
489        'dst_grid_dims'           : dst_grid_dims,
490        'dst_grid_center_lon'     : dst_grid_center_lon,
491        'dst_grid_center_lat'     : dst_grid_center_lat,
492        'dst_grid_corner_lon'     : dst_grid_corner_lon,
493        'dst_grid_corner_lat'     : dst_grid_corner_lat,
494        'dst_grid_area'           : dst_grid_area,
495        'dst_grid_imask'          : dst_grid_imask,
496        'dst_bassin'              : dst_bassin,
497        'dst_repartition'         : dst_repartition,
498        'dst_southLimit'          : dst_southLimit,
499        'dst_northLimit'          : dst_northLimit,
500        'dst_grid_imaskutil'      : dst_grid_imaskutil,
501        'dst_grid_imaskclose'     : dst_grid_imaskclose,
502        'dst_lon_addressed'       : dst_lon_addressed,
503        'dst_lat_addressed'       : dst_lat_addressed,
504        'dst_area_addressed'      : dst_area_addressed,
505        'dst_imask_addressed'     : dst_imask_addressed,
506        'dst_imaskutil_addressed' : dst_imaskutil_addressed,
507        'src_field'               : src_field,
508        'dst_field'               : dst_field,
509    } )
510
511f_calving.attrs.update ( {
512    'Conventions'     : 'CF-1.6',
513    'source'          : 'IPSL Earth system model',
514    'group'           : 'ICMC IPSL Climate Modelling Center',
515    'Institution'     : 'IPSL https.//www.ipsl.fr',
516    'Ocean'           : f'{dst_Name} https://www.nemo-ocean.eu',
517    'Atmosphere'      : f'{src_Name} http://lmdz.lmd.jussieu.fr',
518    'associatedFiles' : f'{grids} {areas} {masks}',
519    'description'     : 'Generated with XIOS http://forge.ipsl.jussieu.fr/ioserver and MOSAIX https://forge.ipsl.jussieu.fr/igcmg/browser/TOOLS/MOSAIX',
520    'title'           : calving,
521    'Program'         : f'Generated by {sys.argv[0]} with flags ' + ' '.join (sys.argv[1:]),
522    'repartitionType' : myargs.type,
523    'gridsFile'       : grids,
524    'areasFile'       : areas,
525    'masksFile'       : masks,
526    'timeStamp'       : time.asctime(),
527    'directory'       : os.getcwd (),
528    'HOSTNAME'        : platform.node (),
529    'LOGNAME'         : getpass.getuser(),
530    'Python'          : 'Python version ' +  platform.python_version (),
531    'OS'              : platform.system (),
532    'release'         : platform.release (),
533    'hardware'        : platform.machine (),
534    'conventions'     : 'SCRIP',
535    'dest_grid'       : 'curvilinear',
536    'Model'           : model_name,
537    'SVN_Author'      : '$Author$',
538    'SVN_Date'        : '$Date$',
539    'SVN_Revision'    : '$Revision$',
540    'SVN_Id'          : '$Id$',
541    'SVN_HeadURL'     : '$HeadURL$',
542    })
543
544if myargs.type in [ 'iceberg', 'iceshelf' ] :
545    f_calving.attrs.update ( {
546    'originalFiles'   : myargs.repartition_file,
547    'repartitionFile' : myargs.repartition_file,
548    'repartitionVar'  : repartitionVar,
549    } )
550
551##
552f_calving.to_netcdf ( calving, mode='w', format=FmtNetcdf )
553f_calving.close()
554
555print ("That''s all folks !")
556## ======================================================================================
Note: See TracBrowser for help on using the repository browser.