1 | ### =========================================================================== |
---|
2 | ### |
---|
3 | ### Compute calving weights. |
---|
4 | ### |
---|
5 | ### =========================================================================== |
---|
6 | ## |
---|
7 | ## Warning, to install, configure, run, use any of Olivier Marti's |
---|
8 | ## software or to read the associated documentation you'll need at least |
---|
9 | ## one (1) brain in a reasonably working order. Lack of this implement |
---|
10 | ## will void any warranties (either express or implied). |
---|
11 | ## O. Marti assumes no responsability for errors, omissions, |
---|
12 | ## data loss, or any other consequences caused directly or indirectly by |
---|
13 | ## the usage of his software by incorrectly or partially configured |
---|
14 | ## personal. |
---|
15 | ## |
---|
16 | import numpy as np, xarray as xr |
---|
17 | import sys, os, platform, argparse, textwrap, time |
---|
18 | from scipy import ndimage |
---|
19 | import nemo |
---|
20 | |
---|
21 | ## SVN information |
---|
22 | __Author__ = "$Author$" |
---|
23 | __Date__ = "$Date$" |
---|
24 | __Revision__ = "$Revision$" |
---|
25 | __Id__ = "$Id$" |
---|
26 | __HeadURL__ = "$HeadURL$" |
---|
27 | __SVN_Date__ = "$SVN_Date: $" |
---|
28 | |
---|
29 | ### |
---|
30 | |
---|
31 | ### ===== Handling command line parameters ================================================== |
---|
32 | # Creating a parser |
---|
33 | parser = argparse.ArgumentParser ( |
---|
34 | description = """Compute calving weights""", |
---|
35 | epilog='-------- End of the help message --------') |
---|
36 | |
---|
37 | # Adding arguments |
---|
38 | parser.add_argument ('--oce' , help='oce model name', type=str, default='eORCA1.2', |
---|
39 | choices=['ORCA2.3', 'eORCA1.2', 'eORCA1.4', 'eORCA1.4.2', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] ) |
---|
40 | parser.add_argument ('--atm' , help='atm model name (ICO* or LMD*)', type=str, default='ICO40' ) |
---|
41 | parser.add_argument ('--type' , help='Type of distribution', type=str, choices=['iceshelf', 'iceberg', 'nosouth', 'full'], default='full' ) |
---|
42 | ## parser.add_argument ('--dir' , help='Directory of input file', type=str, default='./' ) |
---|
43 | parser.add_argument ('--repartition_file', help='Data files with iceberg melting' , type=str, default='./runoff-icb_DaiTrenberth_Depoorter_eORCA1_JD.nc' ) |
---|
44 | parser.add_argument ('--repartition_var' , help='Variable name for iceshelf' , type=str, default=None) |
---|
45 | parser.add_argument ('--grids' , help='grids file name', default='grids.nc' ) |
---|
46 | parser.add_argument ('--areas' , help='masks file name', default='areas.nc' ) |
---|
47 | parser.add_argument ('--masks' , help='areas file name', default='masks.nc' ) |
---|
48 | parser.add_argument ('--o2a' , help='o2a file name' , default='o2a.nc' ) |
---|
49 | parser.add_argument ('--output' , help='output rmp file name', default='rmp_tlmd_to_torc_calving.nc' ) |
---|
50 | parser.add_argument ('--fmt' , help='NetCDF file format, using nco syntax', default='netcdf4', |
---|
51 | choices=['classic', 'netcdf3', '64bit', '64bit_data', '64bit_data', 'netcdf4', 'netcdf4_classsic'] ) |
---|
52 | parser.add_argument ('--ocePerio', help='periodicity of ocean grid', type=float, default=0, choices=nemo.nperio_valid_range ) |
---|
53 | |
---|
54 | # Parse command line |
---|
55 | myargs = parser.parse_args() |
---|
56 | |
---|
57 | # Model Names |
---|
58 | src_Name = myargs.atm ; dst_Name = myargs.oce |
---|
59 | |
---|
60 | # Default vars |
---|
61 | if myargs.repartition_var == None : |
---|
62 | # Runoff from Antarctica iceshelves (Depoorter, 2013) |
---|
63 | if myargs.type == 'iceshelf' : repartitionVar = 'sornfisf' |
---|
64 | |
---|
65 | # Runoff from Antarctica iceberg (Depoorter, 2013) |
---|
66 | if myargs.type == 'iceberg' : repartitionVar = 'Icb_Flux' |
---|
67 | |
---|
68 | else : repartitionVar = myargs.repartition_var |
---|
69 | |
---|
70 | # Latitude limits of each calving zone |
---|
71 | limit_lat = ( (40.0, 90.0), (-50.0, 40.0), ( -90.0, -50.0) ) |
---|
72 | |
---|
73 | nb_zone = len(limit_lat) |
---|
74 | |
---|
75 | if myargs.fmt == 'classic' : FmtNetcdf = 'CLASSIC' |
---|
76 | if myargs.fmt == 'netcdf3' : FmtNetcdf = 'CLASSIC' |
---|
77 | if myargs.fmt == '64bit' : FmtNetcdf = 'NETCDF3_64BIT_OFFSET' |
---|
78 | if myargs.fmt == '64bit_data' : FmtNetcdf = 'NETCDF3_64BIT_DATA' |
---|
79 | if myargs.fmt == '64bit_offset' : FmtNetcdf = 'NETCDF3_64BIT_OFFSET' |
---|
80 | if myargs.fmt == 'netcdf4' : FmtNetcdf = 'NETCDF4' |
---|
81 | if myargs.fmt == 'netcdf4_classic' : FmtNetcdf = 'NETCDF4_CLASSIC' |
---|
82 | ### ========================================================================================== |
---|
83 | |
---|
84 | # Model short names |
---|
85 | src_name = None ; dst_name = None |
---|
86 | if src_Name.count('ICO') != 0 : src_name = 'ico' |
---|
87 | if src_Name.count('LMD') != 0 : src_name = 'lmd' |
---|
88 | if dst_Name.count('ORCA') != 0 : dst_name = 'orc' |
---|
89 | |
---|
90 | CplModel = dst_Name + 'x' + src_Name |
---|
91 | |
---|
92 | print ('Atm names : ' + src_name + ' ' + src_Name ) |
---|
93 | print ('Oce names : ' + dst_name + ' ' + dst_Name ) |
---|
94 | print (' ') |
---|
95 | |
---|
96 | # Reading domains characteristics |
---|
97 | grids = myargs.grids |
---|
98 | areas = myargs.areas |
---|
99 | masks = myargs.masks |
---|
100 | o2a = myargs.o2a |
---|
101 | |
---|
102 | print ('Opening ' + areas) |
---|
103 | f_areas = xr.open_dataset ( areas ) |
---|
104 | print ('Opening ' + masks) |
---|
105 | f_masks = xr.open_dataset ( masks ) |
---|
106 | print ('Opening ' + grids) |
---|
107 | f_grids = xr.open_dataset ( grids ) |
---|
108 | |
---|
109 | src_lon = f_grids ['t' + src_name + '.lon'] |
---|
110 | src_lat = f_grids ['t' + src_name + '.lat'] |
---|
111 | dst_lon = f_grids ['t' + dst_name + '.lon'] |
---|
112 | dst_lat = f_grids ['t' + dst_name + '.lat'] |
---|
113 | |
---|
114 | src_msk = f_masks ['t' + src_name + '.msk'] |
---|
115 | dst_msk = f_masks ['t' + dst_name + '.msk'] |
---|
116 | dst_mskutil = 1-dst_msk # Reversing the convention : 0 on continent, 1 on ocean |
---|
117 | print ('dst_msk : ', dst_msk.sum().values ) |
---|
118 | print ('dst_mskutil : ', dst_mskutil.sum().values ) |
---|
119 | |
---|
120 | # Ocean grid periodicity |
---|
121 | nperio_dst=myargs.ocePerio |
---|
122 | |
---|
123 | # Periodicity masking for NEMO |
---|
124 | if nperio_dst == 0 : |
---|
125 | if dst_Name in ['ORCA2.3', 'ORCA2.4'] : nperio_dst = 4 |
---|
126 | if dst_Name in ['eORCA1.2', 'eORCA1.3'] : nperio_dst = 6 |
---|
127 | if dst_Name in ['ORCA025', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] : nperio_dst = 6 |
---|
128 | |
---|
129 | print ('nperio_dst: ' + str(nperio_dst) ) |
---|
130 | dst_mskutil = nemo.lbc_mask (dst_mskutil, nperio=nperio_dst, cd_type='T', sval=0 ) |
---|
131 | print ('dst_mskutil : ' + str(np.sum(dst_mskutil))) |
---|
132 | |
---|
133 | ## |
---|
134 | ## Fill Closed seas and other |
---|
135 | ## ================================================== |
---|
136 | |
---|
137 | # Preparation by closing some straits |
---|
138 | # ----------------------------------- |
---|
139 | if dst_Name in [ 'eORCA025', 'eORCA025.1', 'eORCA025.4' ] : |
---|
140 | print ('Filling some seas for ' + dst_Name ) |
---|
141 | # Set Gibraltar strait to 0 to fill Mediterranean sea |
---|
142 | dst_mskutil[838:841,1125] = 0 |
---|
143 | # Set Bal-El-Mandeb to 0 to fill Red Sea |
---|
144 | dst_mskutil[736,1321:1324] = 0 |
---|
145 | # Set Stagerak to 0 to fill Baltic Sea |
---|
146 | dst_mskutil[961,1179:1182] = 0 |
---|
147 | # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf |
---|
148 | dst_mskutil[794:798,1374] = 0 |
---|
149 | # Set Hudson Strait to 0 to fill Hudson Bay |
---|
150 | dst_mskutil[997:1012,907] = 0 |
---|
151 | |
---|
152 | if dst_Name in [ 'eORCA1.2', 'eORCA1.3', 'eORCA1.4', 'eORCA1.4.2' ] : |
---|
153 | print ('Filling some seas for ' + dst_Name) |
---|
154 | # Set Gibraltar strait to 0 to fill Mediterrean sea |
---|
155 | dst_mskutil[240, 283] = 0 |
---|
156 | # Set Bal-El-Mandeb to 0 to fill Red Sea |
---|
157 | dst_mskutil[211:214, 332] = 0 |
---|
158 | # Set Stagerak to 0 to fill Baltic Sea |
---|
159 | dst_mskutil[272:276, 293] = 0 |
---|
160 | # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf |
---|
161 | dst_mskutil[227:230, 345] = 0 |
---|
162 | # Set Hudson Strait to 0 to fill Hudson Bay |
---|
163 | dst_mskutil[284,222:225] = 0 |
---|
164 | |
---|
165 | if dst_Name in [ 'ORCA2.3', 'ORCA2.4' ] : |
---|
166 | print ('Filling some seas for ' + dst_Name) |
---|
167 | # Set Gibraltar strait to 0 to fill Mediterrean sea |
---|
168 | dst_mskutil[101,139] = 0 |
---|
169 | # Set Black Sea to zero. At the edge of the domain : binary_fill_holes fails |
---|
170 | dst_mskutil[ 99:111, 0: 5] = 0 |
---|
171 | dst_mskutil[106:111, 173:182] = 0 |
---|
172 | # Set Stagerak to 0 to fill Baltic Sea |
---|
173 | dst_mskutil[115,144] = 0 |
---|
174 | # Set Hudson Strait to 0 to fill Hudson Bay |
---|
175 | dst_mskutil[120:123,110] = 0 |
---|
176 | # Set Bal-El-Mandeb to 0 to fill Red Sea |
---|
177 | dst_mskutil[87:89,166] = 0 |
---|
178 | |
---|
179 | dst_closed = dst_mskutil |
---|
180 | |
---|
181 | # Fill closed seas with image processing library |
---|
182 | # ---------------------------------------------- |
---|
183 | dst_mskutil = nemo.lbc_mask ( 1-ndimage.binary_fill_holes (1-nemo.lbc(dst_mskutil, nperio=nperio_dst, cd_type='T')), nperio=nperio_dst, cd_type='T', sval=0) |
---|
184 | |
---|
185 | # Surfaces |
---|
186 | src_srf = f_areas.variables ['t' + src_name + '.srf'] |
---|
187 | dst_srf = f_areas.variables ['t' + dst_name + '.srf'] |
---|
188 | dst_srfutil = dst_srf * np.float64 (dst_mskutil) |
---|
189 | |
---|
190 | dst_srfutil_sum = np.sum( dst_srfutil) |
---|
191 | |
---|
192 | src_clo = f_grids ['t' + src_name + '.clo'] |
---|
193 | src_cla = f_grids ['t' + src_name + '.cla'] |
---|
194 | dst_clo = f_grids ['t' + dst_name + '.clo'] |
---|
195 | dst_cla = f_grids ['t' + dst_name + '.cla'] |
---|
196 | |
---|
197 | # Indices |
---|
198 | ( src_jpj, src_jpi) = src_lat.shape ; src_grid_size = src_jpj*src_jpi |
---|
199 | ( dst_jpj, dst_jpi) = dst_lat.shape ; dst_grid_size = dst_jpj*dst_jpi |
---|
200 | orc_index = np.arange (dst_jpj*dst_jpi, dtype=np.int32) + 1 # Fortran indices (starting at one) |
---|
201 | |
---|
202 | ### ===== Reading needed data ================================================== |
---|
203 | if myargs.type in ['iceberg', 'iceshelf' ]: |
---|
204 | # Reading data file for calving or iceberg geometry around Antarctica |
---|
205 | print ( 'Opening ' + myargs.repartition_file) |
---|
206 | f_repartition = xr.open_dataset ( myargs.repartition_file ) |
---|
207 | repartition = np.sum ( f_repartition.variables [repartitionVar], axis=0 ) |
---|
208 | |
---|
209 | ## Before loop on basins |
---|
210 | remap_matrix = np.empty ( shape=(0), dtype=np.float64 ) |
---|
211 | src_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
212 | dst_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
213 | |
---|
214 | print (' ') |
---|
215 | |
---|
216 | ### ===== Starting loop on basins============================================== |
---|
217 | |
---|
218 | # Initialise some fields |
---|
219 | remap_matrix = np.empty ( shape=(0), dtype=np.float64 ) |
---|
220 | src_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
221 | dst_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
222 | |
---|
223 | basin_msk = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float32) |
---|
224 | key_repartition = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float64) |
---|
225 | |
---|
226 | ## Loop |
---|
227 | for n_bas in np.arange ( nb_zone ) : |
---|
228 | south = False ; ok_run = False |
---|
229 | lat_south = np.min(limit_lat[n_bas]) ; lat_north = np.max(limit_lat[n_bas]) |
---|
230 | if lat_south <= -60.0 : south = True |
---|
231 | |
---|
232 | print ( 'basin: {:2d} -- Latitudes: {:+.1f} {:+.1f} --'.format(n_bas, lat_south, lat_north) ) |
---|
233 | ## |
---|
234 | if myargs.type == 'iceberg' and south : ok_run = True ; print ('Applying iceberg to south' ) |
---|
235 | if myargs.type == 'iceshelf' and south : ok_run = True ; print ('Applying iceshelf to south' ) |
---|
236 | if myargs.type == 'iceberg' and not south : ok_run = False ; print ('Skipping iceberg: not south ') |
---|
237 | if myargs.type == 'iceshelf' and not south : ok_run = False ; print ('Skipping iceshelf: not south ') |
---|
238 | if myargs.type == 'nosouth' and south : ok_run = False ; print ('Skipping south: nosouth case' ) |
---|
239 | if myargs.type == 'nosouth' and not south : ok_run = True ; print ('Running: not in south, uniform repartition') |
---|
240 | if myargs.type == 'full' : ok_run = True ; print ('Running general trivial case, uniform repartition' ) |
---|
241 | |
---|
242 | if ok_run : |
---|
243 | # Calving integral send to one point per latitude bands |
---|
244 | index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1 |
---|
245 | |
---|
246 | # Basin mask |
---|
247 | basin_msk[n_bas] = np.where ( (dst_lat > lat_south ) & (dst_lat <= lat_north ), dst_mskutil, 0 ) |
---|
248 | |
---|
249 | # Repartition pattern |
---|
250 | if myargs.type in ['iceberg', 'iceshelf' ] : key_repartition[n_bas] = repartition * basin_msk[n_bas] |
---|
251 | else : key_repartition[n_bas] = basin_msk[n_bas] |
---|
252 | |
---|
253 | # Integral and normalisation |
---|
254 | sum_repartition = np.sum ( key_repartition[n_bas] * dst_srfutil ).values |
---|
255 | key_repartition = key_repartition / sum_repartition |
---|
256 | |
---|
257 | print ( 'Sum of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] )) ) |
---|
258 | print ( 'Integral (area weighted) of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] * dst_srfutil ).values) ) |
---|
259 | |
---|
260 | # Basin surface (modulated by repartition key) |
---|
261 | basin_srfutil = np.sum ( key_repartition[n_bas] * dst_srfutil ) |
---|
262 | |
---|
263 | # Weights and links |
---|
264 | poids = 1.0 / basin_srfutil |
---|
265 | matrix_local = np.where ( basin_msk[n_bas].ravel() > 0.5, key_repartition[n_bas].ravel()*poids.values, 0. ) |
---|
266 | matrix_local = matrix_local[matrix_local > 0.0] # Keep only non zero values |
---|
267 | num_links = np.count_nonzero (matrix_local) |
---|
268 | # Address on source grid : all links points to the same LMDZ point. |
---|
269 | src_address_local = np.ones(num_links, dtype=np.int32 )*index_src |
---|
270 | # Address on destination grid : each NEMO point with non zero link |
---|
271 | dst_address_local = np.where ( key_repartition[n_bas].ravel() > 0.0, orc_index, 0) |
---|
272 | dst_address_local = dst_address_local[dst_address_local > 0] |
---|
273 | # Append to global tabs |
---|
274 | remap_matrix = np.append ( remap_matrix, matrix_local ) |
---|
275 | src_address = np.append ( src_address , src_address_local ) |
---|
276 | dst_address = np.append ( dst_address , dst_address_local ) |
---|
277 | # |
---|
278 | #print ( 'Sum of remap_matrix : {:12.3e}'.format(np.sum(matrix_local)) ) |
---|
279 | print ( 'Point in atmospheric grid : {:4d} -- num_links: {:6d}'.format(index_src, num_links) ) |
---|
280 | print (' ') |
---|
281 | |
---|
282 | |
---|
283 | |
---|
284 | ## End of loop |
---|
285 | print (' ') |
---|
286 | |
---|
287 | # |
---|
288 | num_links = np.count_nonzero (remap_matrix) |
---|
289 | print ( 'Total num_links : {:10d}'.format(num_links) ) |
---|
290 | |
---|
291 | # Diag : interpolate uniform field |
---|
292 | src_field = np.zeros(shape=(src_grid_size)) |
---|
293 | for n_bas in np.arange ( nb_zone ) : |
---|
294 | index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1 |
---|
295 | src_field[index_src-1] = n_bas |
---|
296 | |
---|
297 | dst_field = np.zeros(shape=(dst_grid_size,)) |
---|
298 | for link in np.arange (num_links) : |
---|
299 | dst_field [dst_address [link]-1] += remap_matrix[link] * src_field[src_address[link]-1] |
---|
300 | |
---|
301 | ### ===== Writing the weights file, for OASIS MCT ========================================== |
---|
302 | |
---|
303 | # Output file |
---|
304 | calving = myargs.output |
---|
305 | |
---|
306 | print ('Output file: ' + calving ) |
---|
307 | |
---|
308 | remap_matrix = xr.DataArray (np.reshape(remap_matrix, (num_links, 1)), dims = ['num_links', 'num_wgts'] ) |
---|
309 | src_address = xr.DataArray (src_address.astype(np.int32) , dims = ['num_links',] ) |
---|
310 | src_address.attrs['convention'] = "Fortran style addressing, starting at 1" |
---|
311 | dst_address = xr.DataArray (dst_address.astype(np.int32) , dims = ['num_links',] ) |
---|
312 | dst_address.attrs['convention'] = "Fortran style addressing, starting at 1" |
---|
313 | |
---|
314 | src_grid_dims = xr.DataArray ( np.array (( src_jpi, src_jpi ), dtype=np.int32), dims = ['src_grid_rank',] ) |
---|
315 | src_grid_center_lon = xr.DataArray ( src_lon.values.ravel(), dims = ['src_grid_size',] ) |
---|
316 | src_grid_center_lat = xr.DataArray ( src_lat.values.ravel(), dims = ['src_grid_size',] ) |
---|
317 | src_grid_center_lon.attrs['units']='degrees_east' ; src_grid_center_lon.attrs['long_name']='Longitude' |
---|
318 | src_grid_center_lon.attrs['long_name']='longitude' ; src_grid_center_lon.attrs['bounds']="src_grid_corner_lon" |
---|
319 | src_grid_center_lat.attrs['units']='degrees_north' ; src_grid_center_lat.attrs['long_name']='Latitude' |
---|
320 | src_grid_center_lat.attrs['long_name']='latitude ' ; src_grid_center_lat.attrs['bounds']="src_grid_corner_lat" |
---|
321 | src_grid_corner_lon = xr.DataArray ( src_clo.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), dims = ['src_grid_size', 'src_grid_corners'] ) |
---|
322 | src_grid_corner_lat = xr.DataArray ( src_cla.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), dims = ['src_grid_size', 'src_grid_corners' ] ) |
---|
323 | src_grid_corner_lon.attrs['units']="degrees_east" |
---|
324 | src_grid_corner_lat.attrs['units']="degrees_north" |
---|
325 | src_grid_area = xr.DataArray ( src_srf.values.ravel(), dims = ['src_grid_size',] ) |
---|
326 | src_grid_area.attrs['long_name']="Grid area" ; src_grid_area.attrs['standard_name']="cell_area" ; src_grid_area.attrs['units']="m2" |
---|
327 | src_grid_imask = xr.DataArray ( src_msk.values.ravel().astype(np.int32) , dims = ['src_grid_size',] ) |
---|
328 | src_grid_imask.attrs['long_name']="Land-sea mask" ; src_grid_imask.attrs['units']="Land:1, Ocean:0" |
---|
329 | |
---|
330 | # -- |
---|
331 | dst_grid_dims = xr.DataArray ( np.array(( dst_jpi, dst_jpi), dtype=np.int32) , dims = ['dst_grid_rank', ]) |
---|
332 | dst_grid_center_lon = xr.DataArray ( dst_lon.values.ravel(), dims = ['dst_grid_size', ]) |
---|
333 | dst_grid_center_lat = xr.DataArray ( dst_lat.values.ravel(), dims = ['dst_grid_size', ]) |
---|
334 | dst_grid_center_lon.attrs['units']='degrees_east' ; dst_grid_center_lon.attrs['long_name']='Longitude' |
---|
335 | dst_grid_center_lon.attrs['long_name']='longitude' ; dst_grid_center_lon.attrs['bounds']="dst_grid_corner_lon" |
---|
336 | dst_grid_center_lat.attrs['units']='degrees_north' ; dst_grid_center_lat.attrs['long_name']='Latitude' |
---|
337 | dst_grid_center_lat.attrs['long_name']='latitude' ; dst_grid_center_lat.attrs['bounds']="dst_grid_corner_lat" |
---|
338 | dst_grid_corner_lon = xr.DataArray ( dst_clo.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), dims = ['dst_grid_size', 'dst_grid_corners'] ) |
---|
339 | dst_grid_corner_lat = xr.DataArray ( dst_cla.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), dims = ['dst_grid_size', 'dst_grid_corners'] ) |
---|
340 | dst_grid_corner_lon.attrs['units']="degrees_east" |
---|
341 | dst_grid_corner_lat.attrs['units']="degrees_north" |
---|
342 | dst_grid_area = xr.DataArray ( dst_srf.values.ravel(), dims = ['dst_grid_size', ] ) |
---|
343 | dst_grid_area.attrs['long_name']="Grid area" ; dst_grid_area.attrs['standard_name']="cell_area" ; dst_grid_area.attrs['units']="m2" |
---|
344 | dst_grid_imask = xr.DataArray ( dst_msk.values.ravel(), dims = ['dst_grid_size',] ) |
---|
345 | dst_grid_imask.attrs['long_name']="Land-sea mask" ; dst_grid_imask.attrs['units']="Land:1, Ocean:0" |
---|
346 | |
---|
347 | dst_bassin = xr.DataArray ( basin_msk.reshape ( (nb_zone,dst_grid_size) ) , dims = ['nb_zone', 'dst_grid_size', ] ) |
---|
348 | dst_repartition = xr.DataArray ( key_repartition.reshape( (nb_zone,dst_grid_size) ) , dims = ['nb_zone', 'dst_grid_size', ] ) |
---|
349 | |
---|
350 | dst_southLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , dims = ['nb_zone', ] ) |
---|
351 | dst_northLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , dims = ['nb_zone', ] ) |
---|
352 | |
---|
353 | # Additionnal fields for debugging |
---|
354 | # ================================ |
---|
355 | dst_grid_imaskutil = xr.DataArray ( dst_mskutil.ravel().astype(np.float32), dims = ['dst_grid_size',] ) |
---|
356 | dst_grid_imaskutil.attrs['long_name']="Land-sea mask" ; dst_grid_imaskutil.attrs['units']="Land:1, Ocean:0" |
---|
357 | dst_grid_imaskclose = xr.DataArray ( dst_closed.values.ravel(), dims = ['dst_grid_size',] ) |
---|
358 | dst_grid_imaskclose.attrs['long_name']="Land-sea mask" ; dst_grid_imaskclose.attrs['units']="Land:1, Ocean:0" |
---|
359 | |
---|
360 | dst_lon_addressed = xr.DataArray ( dst_lon.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] ) |
---|
361 | dst_lat_addressed = xr.DataArray ( dst_lat.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] ) |
---|
362 | dst_lon_addressed.attrs['long_name']="Longitude" ; dst_lon_addressed.attrs['standard_name']="longitude" ; dst_lon_addressed.attrs['units']="degrees_east" |
---|
363 | dst_lat_addressed.attrs['long_name']="Latitude" ; dst_lat_addressed.attrs['standard_name']="latitude" ; dst_lat_addressed.attrs['units']="degrees_north" |
---|
364 | dst_area_addressed = xr.DataArray ( dst_srf.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] ) |
---|
365 | dst_area_addressed.attrs['long_name']="Grid area" ; dst_area_addressed.attrs['standard_name']="cell_area" ; dst_grid_area.attrs['units']="m2" |
---|
366 | dst_imask_addressed = xr.DataArray ( dst_msk.values.ravel()[dst_address-1].ravel(), dims = ['num_links', ] ) |
---|
367 | dst_imask_addressed.attrs['long_name']="Land-sea mask" ; dst_imask_addressed.attrs['units']="Land:1, Ocean:0" |
---|
368 | dst_imaskutil_addressed = xr.DataArray ( dst_mskutil.ravel()[dst_address-1].astype(np.float32), dims = ['num_links'] ) |
---|
369 | dst_imaskutil_addressed.attrs['long_name']="Land-sea mask" ; dst_imaskutil_addressed.attrs['units']="Land:1, Ocean:0" |
---|
370 | |
---|
371 | src_field = xr.DataArray ( src_field, dims = ['src_grid_size',] ) |
---|
372 | dst_field = xr.DataArray ( dst_field, dims = ['dst_grid_size',] ) |
---|
373 | |
---|
374 | f_calving = xr.Dataset ( { |
---|
375 | 'remap_matrix' : remap_matrix, |
---|
376 | 'src_address' : src_address, |
---|
377 | 'dst_address' : dst_address, |
---|
378 | 'src_grid_dims' : src_grid_dims, |
---|
379 | 'src_grid_center_lon' : src_grid_center_lat, |
---|
380 | 'src_grid_center_lat' : src_grid_center_lat, |
---|
381 | 'src_grid_corner_lon' : src_grid_corner_lon, |
---|
382 | 'src_grid_corner_lat' : src_grid_corner_lat, |
---|
383 | 'src_grid_area' : src_grid_area, |
---|
384 | 'src_grid_imask' : src_grid_imask, |
---|
385 | 'dst_grid_dims' : dst_grid_dims, |
---|
386 | 'dst_grid_center_lon' : dst_grid_center_lon, |
---|
387 | 'dst_grid_center_lat' : dst_grid_center_lat, |
---|
388 | 'dst_grid_corner_lon' : dst_grid_corner_lon, |
---|
389 | 'dst_grid_corner_lat' : dst_grid_corner_lat, |
---|
390 | 'dst_grid_area' : dst_grid_area, |
---|
391 | 'dst_grid_imask' : dst_grid_imask, |
---|
392 | 'dst_bassin' : dst_bassin, |
---|
393 | 'dst_repartition' : dst_repartition, |
---|
394 | 'dst_southLimit' : dst_southLimit, |
---|
395 | 'dst_northLimit' : dst_northLimit, |
---|
396 | 'dst_grid_imaskutil' : dst_grid_imaskutil, |
---|
397 | 'dst_grid_imaskclose' : dst_grid_imaskclose, |
---|
398 | 'dst_lon_addressed' : dst_lon_addressed, |
---|
399 | 'dst_lat_addressed' : dst_lat_addressed, |
---|
400 | 'dst_area_addressed' : dst_area_addressed, |
---|
401 | 'dst_imask_addressed' : dst_imask_addressed, |
---|
402 | 'dst_imaskutil_addressed' : dst_imaskutil_addressed, |
---|
403 | 'src_field' : src_field, |
---|
404 | 'dst_field' : dst_field, |
---|
405 | } ) |
---|
406 | |
---|
407 | f_calving.attrs['Conventions'] = "CF-1.6" |
---|
408 | f_calving.attrs['source'] = "IPSL Earth system model" |
---|
409 | f_calving.attrs['group'] = "ICMC IPSL Climate Modelling Center" |
---|
410 | f_calving.attrs['Institution'] = "IPSL https.//www.ipsl.fr" |
---|
411 | f_calving.attrs['Ocean'] = dst_Name + " https://www.nemo-ocean.eu" |
---|
412 | f_calving.attrs['Atmosphere'] = src_Name + " http://lmdz.lmd.jussieu.fr" |
---|
413 | if myargs.type in ['iceberg', 'iceshelf' ]: f_calving.attrs['originalFiles'] = myargs.repartition_file |
---|
414 | f_calving.attrs['associatedFiles'] = grids + " " + areas + " " + masks |
---|
415 | |
---|
416 | f_calving.attrs['description'] = "Generated with XIOS http://forge.ipsl.jussieu.fr/ioserver and MOSAIX https://forge.ipsl.jussieu.fr/igcmg/browser/TOOLS/MOSAIX" |
---|
417 | f_calving.attrs['title'] = calving |
---|
418 | f_calving.attrs['Program'] = "Generated by " + sys.argv[0] + " with flags " + ' '.join (sys.argv[1:]) |
---|
419 | |
---|
420 | f_calving.attrs['repartitionType'] = myargs.type |
---|
421 | if myargs.type in [ 'iceberg', 'iceshelf' ] : |
---|
422 | f_calving.attrs['repartitionFile'] = myargs.repartition_file |
---|
423 | f_calving.attrs['repartitionVar'] = repartitionVar |
---|
424 | f_calving.attrs['gridsFile'] = grids |
---|
425 | f_calving.attrs['areasFile'] = areas |
---|
426 | f_calving.attrs['masksFile'] = masks |
---|
427 | f_calving.attrs['timeStamp'] = time.asctime() |
---|
428 | try : f_calving.attrs['directory'] = os.getcwd () |
---|
429 | except : pass |
---|
430 | try : f_runoff.attrs['HOSTNAME'] = platform.node () |
---|
431 | except : pass |
---|
432 | try : f_runoff.attrs['LOGNAME'] = os.getlogin () |
---|
433 | except : pass |
---|
434 | try : f_runoff.attrs['Python'] = "Python version " + platform.python_version () |
---|
435 | except : pass |
---|
436 | try : f_runoff.attrs['OS'] = platform.system () |
---|
437 | except : pass |
---|
438 | try : f_runoff.attrs['release'] = platform.release () |
---|
439 | except : pass |
---|
440 | try : f_runoff.attrs['hardware'] = platform.machine () |
---|
441 | except : pass |
---|
442 | f_calving.attrs['conventions'] = "SCRIP" |
---|
443 | if src_name == 'lmd' : f_calving.attrs['source_grid'] = "curvilinear" |
---|
444 | if src_name == 'ico' : f_calving.attrs['source_grid'] = "unstructured" |
---|
445 | f_calving.attrs['dest_grid'] = "curvilinear" |
---|
446 | f_calving.attrs['Model'] = "IPSL CM6" |
---|
447 | f_calving.attrs['SVN_Author'] = "$Author$" |
---|
448 | f_calving.attrs['SVN_Date'] = "$Date$" |
---|
449 | f_calving.attrs['SVN_Revision'] = "$Revision$" |
---|
450 | f_calving.attrs['SVN_Id'] = "$Id$" |
---|
451 | f_calving.attrs['SVN_HeadURL'] = "$HeadURL$" |
---|
452 | |
---|
453 | ## |
---|
454 | f_calving.to_netcdf ( calving, mode='w', format=FmtNetcdf ) |
---|
455 | f_calving.close() |
---|
456 | |
---|
457 | print ('That''s all folks !') |
---|
458 | ## ====================================================================================== |
---|