source: TOOLS/CPLRESTART/create_flxat.py @ 6351

Last change on this file since 6351 was 6327, checked in by snguyen, 16 months ago

Fix broken code in CreateRestartAtm4Oasis.bash and create_flxat.py for regular lon lat grids. Now works for files in both lonlat regular grid and ICO unstructured grid.

File size: 10.8 KB
Line 
1### ===========================================================================
2###
3### Part of CPL Restart IPSL packages
4###
5### ===========================================================================
6##
7##  Warning, to install, configure, run, use any of Olivier Marti's
8##  software or to read the associated documentation you'll need at least
9##  one (1) brain in a reasonably working order. Lack of this implement
10##  will void any warranties (either express or implied).
11##  O. Marti assumes no responsability for errors, omissions,
12##  data loss, or any other consequences caused directly or indirectly by
13##  the usage of his software by incorrectly or partially configured
14##  personal.
15##
16###
17### Documentation : https://forge.ipsl.jussieu.fr/igcmg/wiki/IPSLCM6/MOSAIX
18###
19## SVN information
20#  $Author: omamce $
21#  $Date: 2020-09-18 17:02:09 +0200 (Fri, 18 Sep 2020) $
22#  $Revision: 5157 $
23#  $Id: CreateRestartAtm4Oasis.bash 5157 2020-09-18 15:02:09Z omamce $
24#  $HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/CPLRESTART/CreateRestartAtm4Oasis.bash $
25
26import numpy as np, xarray as xr
27import sys, argparse, textwrap
28
29
30## Reading the command line arguments
31#
32# Creating a parser
33# The first step in using the argparse is creating an ArgumentParser object:
34parser = argparse.ArgumentParser (description = textwrap.dedent ("""
35   create_flxat
36   """), epilog='-------- This is the end of the help message --------')
37
38# Adding arguments
39parser.add_argument ('-v', '--verbose', help='verbosity', action="store_true", default=False )
40parser.add_argument ('-l', '--level', help='verbosity level', type=int, default=0, choices=[0, 1, 2] )
41parser.add_argument ('-i', '--input'  , metavar='inputFile' , help="input file" , nargs='?', default="flxat_fields_notime.nc"  )
42parser.add_argument ('-o', '--output' , metavar='outputFile', help="output file", nargs='?', default="tmp_flxat.nc" )
43parser.add_argument ('-f', '--format' , metavar='NetCDF_Format', help="NetCDF format", nargs='?', default="NETCDF4",
44                         choices=["NETCDF4","NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC", "64bits"])
45parser.add_argument ('--IsUnstructured', default='no', choices=['yes', 'no'] )
46
47# Parse command line
48myargs = parser.parse_args ()
49
50NcFormat = myargs.format
51if NcFormat == '64bit' : NcFormat = NETCDF4
52
53## Read Data - promote them to 64 bits
54d_In = xr.open_dataset (myargs.input)
55
56lon       = d_In.lon.astype(np.float64).values
57lat       = d_In.lat.astype(np.float64).values
58evap_oce  = d_In.evap_oce.astype(np.float64).values
59evap_sic  = d_In.evap_sic.astype(np.float64).values
60fract_oce = d_In.fract_oce.astype(np.float64).values
61fract_sic = d_In.fract_sic.astype(np.float64).values
62precip    = d_In.precip.astype(np.float64).values
63snow      = d_In.snow.astype(np.float64).values
64soll      = d_In.soll.astype(np.float64).values
65sols      = d_In.sols.astype(np.float64).values
66taux_oce  = d_In.taux_oce.astype(np.float64).values
67taux_sic  = d_In.taux_sic.astype(np.float64).values
68tauy_oce  = d_In.tauy_oce.astype(np.float64).values
69tauy_sic  = d_In.tauy_sic.astype(np.float64).values
70wind10m   = d_In.wind10m.astype(np.float64).values
71
72##
73yxshape = lat.shape
74ny, nx = yxshape
75
76## Computations
77np.seterr (divide='ignore', invalid='ignore')
78fract_oce_plus_sic  = (fract_oce + fract_sic) ; ## ocean fraction
79fract_oce_norm = np.where (fract_oce_plus_sic >  0.0, fract_oce/fract_oce_plus_sic, 0.0) # free ocean vs. total ocen fraction
80fract_sic_norm = np.where (fract_oce_plus_sic >  0.0, fract_sic/fract_oce_plus_sic, 0.0) # sea ice vs. total ocean fraction
81##
82COTOTRAI = xr.DataArray (precip-snow, dims=('y', 'x'))
83COTOTRAI.attrs['long_name'] = 'Liquid precip'
84COTOTRAI.attrs['coordinates'] = "lat lon"
85
86COTOTSNO = xr.DataArray (snow       , dims=('y', 'x'))
87COTOTSNO.attrs['long_name'] ='Solid precipitation'
88COTOTSNO.attrs['coordinates'] = "lat lon"
89
90COTOTEVA = xr.DataArray (evap_oce*fract_oce_norm + evap_sic*fract_sic_norm, dims=('y', 'x')) 
91COTOTEVA.attrs['coordinates'] = "lat lon"
92
93COICEVAP = xr.DataArray (evap_sic   , dims=('y', 'x'))
94COICEVAP.attrs['long_name'] = 'Evaporation on sea ice'
95COICEVAP.attrs['coordinates'] = "lat lon"
96
97COQSRMIX = xr.DataArray (sols       , dims=('y', 'x'))
98COQSRMIX.attrs['long_name'] = 'Heat flux short wave'
99COQSRMIX.attrs['units'] = 'W/m2'
100COQSRMIX.attrs['coordinates'] = "lat lon"
101
102COQNSMIX = xr.DataArray (soll       , dims=('y', 'x'))
103COQNSMIX.attrs['long_name'] = 'Heat flux minus short wave'
104COQNSMIX.attrs['units'] = 'W/m2'
105COQNSMIX.attrs['coordinates'] = "lat lon"
106
107COSHFICE = xr.DataArray (sols       , dims=('y', 'x'))
108COSHFICE.attrs['long_name'] = 'Heat flux short wave over sea ice'
109COSHFICE.attrs['units'] = 'W/m2'
110COSHFICE.attrs['coordinates'] = "lat lon"
111
112CONSFICE = xr.DataArray (soll       , dims=('y', 'x'))
113CONSFICE.attrs['long_name'] = 'Heat flux minus short wave over sea ice'
114CONSFICE.attrs['units'] = 'W/m2'
115CONSFICE.attrs['coordinates'] = "lat lon"
116
117COWINDSP = xr.DataArray (wind10m    , dims=('y', 'x'))
118COWINDSP.attrs['long_name'] = 'Wind speed at 10m high'
119COWINDSP.attrs['units'] = 'm/s'
120COWINDSP.attrs['coordinates'] = "lat lon"
121
122CODFLXDT = xr.DataArray (-20.0*np.ones(yxshape), dims=('y', 'x'))
123CODFLXDT.attrs['long_name'] = 'dQ/dT - Derivative over temperatue of turbulent heat fluxes'
124CODFLXDT.attrs['units'] = 'W/m2/K'     
125CODFLXDT.attrs['coordinates'] = "lat lon"
126
127COCALVIN = xr.DataArray (  0.0*np.ones(yxshape), dims=('y', 'x'))
128COCALVIN.attrs['long_name'] = 'Calving of icebergs, solid'
129COCALVIN.attrs['coordinates'] = "lat lon"
130
131COLIQRUN = xr.DataArray (  0.0*np.ones(yxshape), dims=('y', 'x'))
132COLIQRUN.attrs['long_name'] = 'River run-off, liquid'
133COLIQRUN.attrs['coordinates'] = "lat lon"
134
135## Wind stress
136tau_x = (taux_oce*fract_oce_norm + taux_sic*fract_sic_norm)
137tau_y = (tauy_oce*fract_oce_norm + tauy_sic*fract_sic_norm)
138COTAUMOD = xr.DataArray (np.sqrt ( (tau_x*tau_x + tau_y*tau_y) ) , dims=('y', 'x'))
139COTAUMOD.attrs['long_name'] = 'Wind stress modulus'
140COTAUMOD.attrs['units'] = 'Pa'
141COTAUMOD.attrs['coordinates'] = "lat lon"
142
143## Wind stress, from east/north components to geocentric
144rad = np.deg2rad (1.0)
145COTAUXXU = xr.DataArray (-tau_x * np.sin(rad * lon) - tau_y * np.sin(rad * lat) * np.cos(rad * lon) , dims=('y', 'x'))
146COTAUYYU = xr.DataArray ( tau_x * np.cos(rad * lon) - tau_y * np.sin(rad * lat) * np.sin(rad * lon) , dims=('y', 'x'))
147COTAUZZU = xr.DataArray ( tau_y * np.cos(rad * lat)                                                 , dims=('y', 'x'))
148
149
150## Value at North Pole
151if myargs.IsUnstructured == 'yes' :
152    ## Value at North Pole for DYNAMICO grid
153    COTAUXXU = xr.where ( lat >= 89.999, -tau_y, COTAUXXU)
154    COTAUYYU = xr.where ( lat >= 89.999,  tau_x, COTAUYYU)
155    ## Value at South Pole for DYNAMICO grid ?
156   
157else :
158    ## Value at North Pole for LMDZ lon/lat grid
159    COTAUXXU[0,:] = ( -tau_x [0, 0] )
160    COTAUYYU[0,:] = ( -tau_y [0, 0] )
161    COTAUZZU[0,:] =  0.0 ; 
162    ## Value at south Pole for LMDZ lon/lat grid
163    COTAUXXU[-1,:] = ( -tau_x [-1, 0] )
164    COTAUYYU[-1,:] = ( -tau_y [-1, 0] )
165
166##
167COTAUXXU.attrs['long_name'] = 'Wind stress in geocentric referential - x-component'
168COTAUYYU.attrs['long_name'] = 'Wind stress in geocentric referential - y-component'
169COTAUZZU.attrs['long_name'] = 'Wind stress in geocentric referential - z-component'
170
171COTAUXXU.attrs['units'] = 'Pa'
172COTAUYYU.attrs['units'] = 'Pa'
173COTAUZZU.attrs['units'] = 'Pa'
174
175COTAUXXU.attrs['coordinates'] = "lat lon"
176COTAUYYU.attrs['coordinates'] = "lat lon"
177COTAUZZU.attrs['coordinates'] = "lat lon"
178
179COTAUXXV = COTAUXXU ; COTAUYYV = COTAUYYU ; COTAUZZV = COTAUZZU
180
181## check if bounds for lon lat are present and add them to dataset or drop them
182ifbnds=True if ('bounds_lon' in d_In.data_vars) and ('bounds_lat' in d_In.data_vars) else False
183
184## Creates final Dataset
185lon = xr.DataArray (lon, dims=('y', 'x'))
186lon.attrs['name']      = 'longitude'
187lon.attrs['long_name'] = 'Longitude'
188lon.attrs['units']     = 'degrees_east'
189if ifbnds: lon.attrs['bounds']    = 'bounds_lon'
190
191lat = xr.DataArray (lat, dims=('y', 'x'))
192lat.attrs['name']      = 'latitude'
193lat.attrs['long_name'] = 'Latitude'
194lat.attrs['units']     = 'degrees_north'
195if ifbnds: lat.attrs['bounds']    = 'bounds_lat'
196
197if ifbnds: 
198    bounds_lon = d_In.bounds_lon.values.astype (np.float64)
199    bounds_lat = d_In.bounds_lat.values.astype (np.float64)
200    nvertex = bounds_lon.shape[-1]
201
202    bounds_lon = xr.DataArray ( np.reshape (bounds_lon, (ny,nx,nvertex)), dims=('y', 'x', 'nvertex'))
203    bounds_lat = xr.DataArray ( np.reshape (bounds_lat, (ny,nx,nvertex)), dims=('y', 'x', 'nvertex'))
204
205    bounds_lon.attrs['units'] = 'degrees_east'
206    bounds_lat.attrs['units'] = 'degrees_north'
207
208# prepare dictionnary to export dataset to netcdf file with or without bounds
209dictdataset = {'lat':lat, 'lon':lon }
210if ifbnds: dictdataset.update({'bounds_lon':bounds_lon, 'bounds_lat':bounds_lat})
211dictdataset.update({'COTOTRAI':COTOTRAI,
212    'COTOTSNO':COTOTSNO,
213    'COTOTEVA':COTOTEVA,
214    'COICEVAP':COICEVAP,
215    'COQSRMIX':COQSRMIX,
216    'COQNSMIX':COQNSMIX,
217    'COSHFICE':COSHFICE,
218    'CONSFICE':CONSFICE,
219    'CODFLXDT':CODFLXDT,
220    'COCALVIN':COCALVIN,
221    'COLIQRUN':COLIQRUN,
222    'COWINDSP':COWINDSP,
223    'COTAUMOD':COTAUMOD,
224    'COTAUXXU':COTAUXXU,
225    'COTAUYYU':COTAUYYU,
226    'COTAUZZU':COTAUZZU,
227    'COTAUXXV':COTAUXXV,
228    'COTAUYYV':COTAUYYV,
229    'COTAUZZV':COTAUZZV } )
230
231d_Out = xr.Dataset (dictdataset)
232
233d_Out.attrs['Associated files'] = myargs.input
234d_Out.attrs['Conventions']     = "CF-1.6"
235d_Out.attrs['source']          = "IPSL Earth system model"
236d_Out.attrs['group']           = "ICMC IPSL Climate Modelling Center"
237d_Out.attrs['Institution']     = "IPSL https.//www.ipsl.fr"
238try    : d_Out.attrs['directory'] = os.getcwd ()
239except : pass
240try    : d_Out.attrs['HOSTNAME'] = platform.node ()
241except : pass
242try    : d_Out.attrs['LOGNAME']  = os.getlogin ()
243except : pass
244try    : d_Out.attrs['Python']   = "Python version " +  platform.python_version ()
245except : pass
246try    : d_Out.attrs['OS']       = platform.system ()
247except : pass
248try    : d_Out.attrs['release']  = platform.release ()
249except : pass
250try    : d_Out.attrs['hardware'] = platform.machine ()
251except : pass
252d_Out.attrs['conventions']     = "SCRIP"
253d_Out.attrs['dest_grid']       = "curvilinear"
254d_Out.attrs['Model']           = "IPSL CM6"
255d_Out.attrs['SVN_Author']      = "$Author: omamce $"
256d_Out.attrs['SVN_Date']        = "$Date: 2022-07-06 11:06:07 +0200 (Wed, 06 Jul 2022) $"
257d_Out.attrs['SVN_Revision']    = "$Revision: 6190 $"
258d_Out.attrs['SVN_Id']          = "$Id: CalvingWeights.py 6190 2022-07-06 09:06:07Z omamce $"
259d_Out.attrs['SVN_HeadURL']     = "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/CalvingWeights.py $"
260
261
262d_Out.to_netcdf ( myargs.output, format=NcFormat )
263d_Out.close()
264## ===========================================================================
265##
266##                               That's all folk's !!!
267##
268## ===========================================================================
Note: See TracBrowser for help on using the repository browser.