1 | import os |
---|
2 | from netCDF4 import Dataset |
---|
3 | from argparse import ArgumentParser |
---|
4 | import numpy as np |
---|
5 | import sys |
---|
6 | |
---|
7 | # |
---|
8 | # Basic iceberg trajectory restart post-processing python script. |
---|
9 | # This script collects iceberg information from the distributed restarts written |
---|
10 | # out by each processing region and writes the information into a global restart file. |
---|
11 | # The global restart file must already exist and contain the collated 2D spatial fields |
---|
12 | # as prepared by utilities such as rebuild_nemo. This python script simply adds the |
---|
13 | # iceberg position and state data that is held using the unlimited dimension 'n' which |
---|
14 | # has been ignored by rebuild_nemo. Each processing region that contains icebergs will |
---|
15 | # (probably) have a different number of icebergs (indicated by differing values for the |
---|
16 | # current size of the unlimited dimension). This script collects all icebergs into a |
---|
17 | # single unordered list. |
---|
18 | # |
---|
19 | |
---|
20 | parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\ |
---|
21 | files, e.g. \n python ./icb_pp.py \ |
---|
22 | -f icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]') |
---|
23 | |
---|
24 | parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \ |
---|
25 | distributed trajectory restart file (usually completed with XXXX.nc, where \ |
---|
26 | XXXX is the 4 digit processor number)', |
---|
27 | default='icebergs_00692992_restart_') |
---|
28 | |
---|
29 | parser.add_argument('-n',dest='fnum',help='number of distributed files to process', |
---|
30 | type=int, default=None) |
---|
31 | |
---|
32 | parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \ |
---|
33 | global iceberg restart data to.', default='icebergs_restart.nc') |
---|
34 | |
---|
35 | parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \ |
---|
36 | append to an existing file.', \ |
---|
37 | action='store_true', default=False) |
---|
38 | |
---|
39 | args = parser.parse_args() |
---|
40 | |
---|
41 | default_used = 0 |
---|
42 | if args.froot is None: |
---|
43 | pathstart = 'icebergs_00692992_restart_' |
---|
44 | default_used = 1 |
---|
45 | else: |
---|
46 | pathstart = args.froot |
---|
47 | |
---|
48 | if args.fnum is None: |
---|
49 | procnum = 0 |
---|
50 | default_used = 1 |
---|
51 | else: |
---|
52 | procnum = args.fnum |
---|
53 | |
---|
54 | if args.fout is None: |
---|
55 | pathout = 'icebergs_restart.nc' |
---|
56 | default_used = 1 |
---|
57 | else: |
---|
58 | pathout = args.fout |
---|
59 | |
---|
60 | if default_used == 1: |
---|
61 | print('At least one default value will be used; command executing is:') |
---|
62 | print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout) |
---|
63 | |
---|
64 | if procnum < 1: |
---|
65 | print('Need some files to collate! procnum = ',procnum) |
---|
66 | sys.exit(11) |
---|
67 | |
---|
68 | icu = [] |
---|
69 | times = [] |
---|
70 | ntraj = 0 |
---|
71 | nk = 0 |
---|
72 | # |
---|
73 | # Loop through all distributed datasets to obtain the total number |
---|
74 | # of icebergs to transfer |
---|
75 | # |
---|
76 | for n in range(procnum): |
---|
77 | nn = '%4.4d' % n |
---|
78 | try: |
---|
79 | fw = Dataset(pathstart+nn+'.nc') |
---|
80 | except: |
---|
81 | print 'Error: unable to open input file: ' + pathstart+nn+'.nc' |
---|
82 | sys.exit(12) |
---|
83 | for d in fw.dimensions : |
---|
84 | if d == 'n' : |
---|
85 | if len(fw.dimensions['n']) > 0: |
---|
86 | # print 'icebergs found in: ' + pathstart+nn+'.nc' |
---|
87 | if len(fw.dimensions['k']) > nk : |
---|
88 | nk = len(fw.dimensions['k']) |
---|
89 | ntraj = ntraj + len(fw.dimensions['n']) |
---|
90 | fw.close() |
---|
91 | # |
---|
92 | print(ntraj, ' icebergs found across all datasets') |
---|
93 | # |
---|
94 | # Declare 2-D arrays to receive the data from all files |
---|
95 | # |
---|
96 | lons = np.zeros(ntraj) |
---|
97 | lats = np.zeros(ntraj) |
---|
98 | xis = np.zeros(ntraj) |
---|
99 | yjs = np.zeros(ntraj) |
---|
100 | uvs = np.zeros(ntraj) |
---|
101 | vvs = np.zeros(ntraj) |
---|
102 | mas = np.zeros(ntraj) |
---|
103 | ths = np.zeros(ntraj) |
---|
104 | wis = np.zeros(ntraj) |
---|
105 | les = np.zeros(ntraj) |
---|
106 | dys = np.zeros(ntraj) |
---|
107 | mss = np.zeros(ntraj) |
---|
108 | msb = np.zeros(ntraj) |
---|
109 | hds = np.zeros(ntraj) |
---|
110 | yrs = np.zeros(ntraj , dtype=int) |
---|
111 | num = np.zeros((ntraj, nk), dtype=int) |
---|
112 | # |
---|
113 | # loop through distributed datasets again, this time |
---|
114 | # collecting all trajectory data |
---|
115 | # |
---|
116 | nt = 0 |
---|
117 | for n in range(procnum): |
---|
118 | nn = '%4.4d' % n |
---|
119 | fw = Dataset(pathstart+nn+'.nc') |
---|
120 | for d in fw.dimensions : |
---|
121 | if d == 'n' : |
---|
122 | # Note many distributed datafiles will contain no iceberg data |
---|
123 | # so skip quickly over these |
---|
124 | m = len(fw.dimensions['n']) |
---|
125 | if m > 0: |
---|
126 | # print pathstart+nn+'.nc' |
---|
127 | lons[nt:nt+m] = fw.variables['lon'][:] |
---|
128 | lats[nt:nt+m] = fw.variables['lat'][:] |
---|
129 | xis[nt:nt+m] = fw.variables['xi'][:] |
---|
130 | yjs[nt:nt+m] = fw.variables['yj'][:] |
---|
131 | uvs[nt:nt+m] = fw.variables['uvel'][:] |
---|
132 | vvs[nt:nt+m] = fw.variables['vvel'][:] |
---|
133 | mas[nt:nt+m] = fw.variables['mass'][:] |
---|
134 | ths[nt:nt+m] = fw.variables['thickness'][:] |
---|
135 | wis[nt:nt+m] = fw.variables['width'][:] |
---|
136 | les[nt:nt+m] = fw.variables['length'][:] |
---|
137 | dys[nt:nt+m] = fw.variables['day'][:] |
---|
138 | mss[nt:nt+m] = fw.variables['mass_scaling'][:] |
---|
139 | msb[nt:nt+m] = fw.variables['mass_of_bits'][:] |
---|
140 | hds[nt:nt+m] = fw.variables['heat_density'][:] |
---|
141 | yrs[nt:nt+m] = fw.variables['year'][:] |
---|
142 | num[nt:nt+m,:] = fw.variables['number'][:,:] |
---|
143 | nt = nt + m |
---|
144 | fw.close() |
---|
145 | |
---|
146 | # Finally create the output file and write out the collated sets |
---|
147 | # |
---|
148 | if args.fcre : |
---|
149 | try: |
---|
150 | fo = Dataset(pathout, 'w', format='NETCDF4') |
---|
151 | except: |
---|
152 | print 'Error accessing output file: ' + pathout |
---|
153 | print 'Check it is a writable location.' |
---|
154 | sys.exit(13) |
---|
155 | else : |
---|
156 | # Copy 2D variables across to output file from input file. This step avoids problems if rebuild_nemo |
---|
157 | # has created an "n" dimension in the prototype rebuilt file (ie. if there are icebergs on the zeroth |
---|
158 | # processor). |
---|
159 | try: |
---|
160 | os.rename(pathout,pathout.replace('.nc','_WORK.nc')) |
---|
161 | except OSError: |
---|
162 | print 'Error: unable to move icebergs restart file: '+pathout |
---|
163 | sys.exit(14) |
---|
164 | # |
---|
165 | try: |
---|
166 | fi = Dataset(pathout.replace('.nc','_WORK.nc'), 'r') |
---|
167 | except: |
---|
168 | print 'Error: unable to open icebergs restart file: '+pathout.replace('.nc','_WORK.nc') |
---|
169 | sys.exit(15) |
---|
170 | fo = Dataset(pathout, 'w') |
---|
171 | for dim in ['x','y','c']: |
---|
172 | indim = fi.dimensions[dim] |
---|
173 | fo.createDimension(dim, len(indim)) |
---|
174 | for var in ['calving','calving_hflx','stored_ice','stored_heat']: |
---|
175 | invar = fi.variables[var] |
---|
176 | fo.createVariable(var, invar.datatype, invar.dimensions) |
---|
177 | fo.variables[var][:] = invar[:] |
---|
178 | fo.variables[var].long_name = invar.long_name |
---|
179 | fo.variables[var].units = invar.units |
---|
180 | os.remove(pathout.replace('.nc','_WORK.nc')) |
---|
181 | # |
---|
182 | add_k = 1 |
---|
183 | for d in fo.dimensions : |
---|
184 | if d == 'n' : |
---|
185 | print 'Error: dimension n already exists in output file' |
---|
186 | sys.exit(16) |
---|
187 | if d == 'k' : |
---|
188 | add_k = 0 |
---|
189 | onn = fo.createDimension('n', None) |
---|
190 | if add_k > 0 : |
---|
191 | onk = fo.createDimension('k', nk) |
---|
192 | olon = fo.createVariable('lon', 'f8',('n')) |
---|
193 | olat = fo.createVariable('lat', 'f8',('n')) |
---|
194 | oxis = fo.createVariable('xi', 'f8',('n')) |
---|
195 | oyjs = fo.createVariable('yj', 'f8',('n')) |
---|
196 | ouvs = fo.createVariable('uvel', 'f8',('n')) |
---|
197 | ovvs = fo.createVariable('vvel', 'f8',('n')) |
---|
198 | omas = fo.createVariable('mass', 'f8',('n')) |
---|
199 | oths = fo.createVariable('thickness', 'f8',('n')) |
---|
200 | owis = fo.createVariable('width', 'f8',('n')) |
---|
201 | oles = fo.createVariable('length', 'f8',('n')) |
---|
202 | odys = fo.createVariable('day', 'f8',('n')) |
---|
203 | omss = fo.createVariable('mass_scaling', 'f8',('n')) |
---|
204 | omsb = fo.createVariable('mass_of_bits', 'f8',('n')) |
---|
205 | ohds = fo.createVariable('heat_density', 'f8',('n')) |
---|
206 | oyrs = fo.createVariable('year', 'i4',('n')) |
---|
207 | onum = fo.createVariable('number', 'i4',('n','k')) |
---|
208 | # |
---|
209 | olon[:] = lons |
---|
210 | olon.long_name = "longitude" |
---|
211 | olon.units = "degrees_E" |
---|
212 | # |
---|
213 | olat[:] = lats |
---|
214 | olat.long_name = "latitude" |
---|
215 | olat.units = "degrees_N" |
---|
216 | # |
---|
217 | oxis[:] = xis |
---|
218 | oxis.long_name = "x grid box position" |
---|
219 | oxis.units = "fractional" |
---|
220 | # |
---|
221 | oyjs[:] = yjs |
---|
222 | oyjs.long_name = "y grid box position" |
---|
223 | oyjs.units = "fractional" |
---|
224 | # |
---|
225 | ouvs[:] = uvs |
---|
226 | ouvs.long_name = "zonal velocity" |
---|
227 | ouvs.units = "m/s" |
---|
228 | # |
---|
229 | ovvs[:] = vvs |
---|
230 | ovvs.long_name = "meridional velocity" |
---|
231 | ovvs.units = "m/s" |
---|
232 | # |
---|
233 | omas[:] = mas |
---|
234 | omas.long_name = "mass" |
---|
235 | omas.units = "kg" |
---|
236 | # |
---|
237 | oths[:] = ths |
---|
238 | oths.long_name = "thickness" |
---|
239 | oths.units = "m" |
---|
240 | # |
---|
241 | owis[:] = wis |
---|
242 | owis.long_name = "width" |
---|
243 | owis.units = "m" |
---|
244 | # |
---|
245 | oles[:] = les |
---|
246 | oles.long_name = "length" |
---|
247 | oles.units = "m" |
---|
248 | # |
---|
249 | odys[:] = dys |
---|
250 | odys.long_name = "year day of calving event" |
---|
251 | odys.units = "days" |
---|
252 | # |
---|
253 | omss[:] = mss |
---|
254 | omss.long_name = "scaling factor for mass of calving berg" |
---|
255 | omss.units = "none" |
---|
256 | # |
---|
257 | omsb[:] = msb |
---|
258 | omsb.long_name = "mass of bergy bits" |
---|
259 | omsb.units = "kg" |
---|
260 | # |
---|
261 | ohds[:] = hds |
---|
262 | ohds.long_name = "heat density" |
---|
263 | ohds.units = "J/kg" |
---|
264 | # |
---|
265 | oyrs[:] = yrs |
---|
266 | oyrs.long_name = "calendar year of calving event" |
---|
267 | oyrs.units = "years" |
---|
268 | # |
---|
269 | onum[:,:] = num |
---|
270 | onum.long_name = "iceberg number on this processor" |
---|
271 | onum.units = "count" |
---|
272 | # |
---|
273 | fo.close() |
---|