1 | from netCDF4 import Dataset |
---|
2 | from argparse import ArgumentParser |
---|
3 | import numpy as np |
---|
4 | import sys |
---|
5 | |
---|
6 | # |
---|
7 | # Basic iceberg trajectory restart post-processing python script. |
---|
8 | # This script collects iceberg information from the distributed restarts written |
---|
9 | # out by each processing region and writes the information into a global restart file. |
---|
10 | # The global restart file must already exist and contain the collated 2D spatial fields |
---|
11 | # as prepared by utilities such as rebuild_nemo. This python script simply adds the |
---|
12 | # iceberg position and state data that is held using the unlimited dimension 'n' which |
---|
13 | # has been ignored by rebuild_nemo. Each processing region that contains icebergs will |
---|
14 | # (probably) have a different number of icebergs (indicated by differing values for the |
---|
15 | # current size of the unlimited dimension). This script collects all icebergs into a |
---|
16 | # single unordered list. |
---|
17 | # |
---|
18 | |
---|
19 | parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\ |
---|
20 | files, e.g. \n python ./icb_pp.py \ |
---|
21 | -f icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]') |
---|
22 | |
---|
23 | parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \ |
---|
24 | distributed trajectory restart file (usually completed with XXXX.nc, where \ |
---|
25 | XXXX is the 4 digit processor number)', |
---|
26 | default='icebergs_00692992_restart_') |
---|
27 | |
---|
28 | parser.add_argument('-n',dest='fnum',help='number of distributed files to process', |
---|
29 | type=int, default=None) |
---|
30 | |
---|
31 | parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \ |
---|
32 | global iceberg restart data to.', default='icebergs_restart.nc') |
---|
33 | |
---|
34 | parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \ |
---|
35 | append to an existing file.', \ |
---|
36 | action='store_true', default=False) |
---|
37 | |
---|
38 | args = parser.parse_args() |
---|
39 | |
---|
40 | default_used = 0 |
---|
41 | if args.froot is None: |
---|
42 | pathstart = 'icebergs_00692992_restart_' |
---|
43 | default_used = 1 |
---|
44 | else: |
---|
45 | pathstart = args.froot |
---|
46 | |
---|
47 | if args.fnum is None: |
---|
48 | procnum = 0 |
---|
49 | default_used = 1 |
---|
50 | else: |
---|
51 | procnum = args.fnum |
---|
52 | |
---|
53 | if args.fout is None: |
---|
54 | pathout = 'icebergs_restart.nc' |
---|
55 | default_used = 1 |
---|
56 | else: |
---|
57 | pathout = args.fout |
---|
58 | |
---|
59 | if default_used == 1: |
---|
60 | print('At least one default value will be used; command executing is:') |
---|
61 | print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout) |
---|
62 | |
---|
63 | if procnum < 1: |
---|
64 | print('Need some files to collate! procnum = ',procnum) |
---|
65 | sys.exit() |
---|
66 | |
---|
67 | icu = [] |
---|
68 | times = [] |
---|
69 | ntraj = 0 |
---|
70 | nk = 0 |
---|
71 | # |
---|
72 | # Loop through all distributed datasets to obtain the total number |
---|
73 | # of icebergs to transfer |
---|
74 | # |
---|
75 | for n in range(procnum): |
---|
76 | nn = '%4.4d' % n |
---|
77 | try: |
---|
78 | fw = Dataset(pathstart+nn+'.nc') |
---|
79 | except: |
---|
80 | print 'Error: unable to open input file: ' + pathstart+nn+'.nc' |
---|
81 | sys.exit() |
---|
82 | for d in fw.dimensions : |
---|
83 | if d == 'n' : |
---|
84 | if len(fw.dimensions['n']) > 0: |
---|
85 | # print 'icebergs found in: ' + pathstart+nn+'.nc' |
---|
86 | if len(fw.dimensions['k']) > nk : |
---|
87 | nk = len(fw.dimensions['k']) |
---|
88 | ntraj = ntraj + len(fw.dimensions['n']) |
---|
89 | fw.close() |
---|
90 | # |
---|
91 | print(ntraj, ' icebergs found across all datasets') |
---|
92 | # |
---|
93 | # Declare 2-D arrays to receive the data from all files |
---|
94 | # |
---|
95 | lons = np.zeros(ntraj) |
---|
96 | lats = np.zeros(ntraj) |
---|
97 | xis = np.zeros(ntraj) |
---|
98 | yjs = np.zeros(ntraj) |
---|
99 | uvs = np.zeros(ntraj) |
---|
100 | vvs = np.zeros(ntraj) |
---|
101 | mas = np.zeros(ntraj) |
---|
102 | ths = np.zeros(ntraj) |
---|
103 | wis = np.zeros(ntraj) |
---|
104 | les = np.zeros(ntraj) |
---|
105 | dys = np.zeros(ntraj) |
---|
106 | mss = np.zeros(ntraj) |
---|
107 | msb = np.zeros(ntraj) |
---|
108 | hds = np.zeros(ntraj) |
---|
109 | yrs = np.zeros(ntraj , dtype=int) |
---|
110 | num = np.zeros((ntraj, nk), dtype=int) |
---|
111 | # |
---|
112 | # loop through distributed datasets again, this time |
---|
113 | # collecting all trajectory data |
---|
114 | # |
---|
115 | nt = 0 |
---|
116 | for n in range(procnum): |
---|
117 | nn = '%4.4d' % n |
---|
118 | fw = Dataset(pathstart+nn+'.nc') |
---|
119 | for d in fw.dimensions : |
---|
120 | if d == 'n' : |
---|
121 | # Note many distributed datafiles will contain no iceberg data |
---|
122 | # so skip quickly over these |
---|
123 | m = len(fw.dimensions['n']) |
---|
124 | if m > 0: |
---|
125 | # print pathstart+nn+'.nc' |
---|
126 | lons[nt:nt+m] = fw.variables['lon'][:] |
---|
127 | lats[nt:nt+m] = fw.variables['lat'][:] |
---|
128 | xis[nt:nt+m] = fw.variables['xi'][:] |
---|
129 | yjs[nt:nt+m] = fw.variables['yj'][:] |
---|
130 | uvs[nt:nt+m] = fw.variables['uvel'][:] |
---|
131 | vvs[nt:nt+m] = fw.variables['vvel'][:] |
---|
132 | mas[nt:nt+m] = fw.variables['mass'][:] |
---|
133 | ths[nt:nt+m] = fw.variables['thickness'][:] |
---|
134 | wis[nt:nt+m] = fw.variables['width'][:] |
---|
135 | les[nt:nt+m] = fw.variables['length'][:] |
---|
136 | dys[nt:nt+m] = fw.variables['day'][:] |
---|
137 | mss[nt:nt+m] = fw.variables['mass_scaling'][:] |
---|
138 | msb[nt:nt+m] = fw.variables['mass_of_bits'][:] |
---|
139 | hds[nt:nt+m] = fw.variables['heat_density'][:] |
---|
140 | yrs[nt:nt+m] = fw.variables['year'][:] |
---|
141 | num[nt:nt+m,:] = fw.variables['number'][:,:] |
---|
142 | nt = nt + m |
---|
143 | fw.close() |
---|
144 | |
---|
145 | # Finally create the output file and write out the collated sets |
---|
146 | # |
---|
147 | if args.fcre : |
---|
148 | try: |
---|
149 | fo = Dataset(pathout, 'w', format='NETCDF4') |
---|
150 | except: |
---|
151 | print 'Error accessing output file: ' + pathout |
---|
152 | print 'Check it is a writable location.' |
---|
153 | sys.exit() |
---|
154 | else : |
---|
155 | try: |
---|
156 | fo = Dataset(pathout, 'r+', format='NETCDF4') |
---|
157 | except: |
---|
158 | print 'Error accessing output file: ' + pathout |
---|
159 | print 'Check it exists and is writable.' |
---|
160 | print 'Or run adding the -O option to create an output file which will' |
---|
161 | print 'contain the iceberg state data only.' |
---|
162 | sys.exit() |
---|
163 | # |
---|
164 | add_k = 1 |
---|
165 | for d in fo.dimensions : |
---|
166 | if d == 'n' : |
---|
167 | print 'Error: dimension n already exists in output file' |
---|
168 | sys.exit() |
---|
169 | if d == 'k' : |
---|
170 | add_k = 0 |
---|
171 | onn = fo.createDimension('n', None) |
---|
172 | if add_k > 0 : |
---|
173 | onk = fo.createDimension('k', nk) |
---|
174 | olon = fo.createVariable('lon', 'f8',('n')) |
---|
175 | olat = fo.createVariable('lat', 'f8',('n')) |
---|
176 | oxis = fo.createVariable('xi', 'f8',('n')) |
---|
177 | oyjs = fo.createVariable('yj', 'f8',('n')) |
---|
178 | ouvs = fo.createVariable('uvel', 'f8',('n')) |
---|
179 | ovvs = fo.createVariable('vvel', 'f8',('n')) |
---|
180 | omas = fo.createVariable('mass', 'f8',('n')) |
---|
181 | oths = fo.createVariable('thickness', 'f8',('n')) |
---|
182 | owis = fo.createVariable('width', 'f8',('n')) |
---|
183 | oles = fo.createVariable('length', 'f8',('n')) |
---|
184 | odys = fo.createVariable('day', 'f8',('n')) |
---|
185 | omss = fo.createVariable('mass_scaling', 'f8',('n')) |
---|
186 | omsb = fo.createVariable('mass_of_bits', 'f8',('n')) |
---|
187 | ohds = fo.createVariable('heat_density', 'f8',('n')) |
---|
188 | oyrs = fo.createVariable('year', 'i4',('n')) |
---|
189 | onum = fo.createVariable('number', 'i4',('n','k')) |
---|
190 | # |
---|
191 | olon[:] = lons |
---|
192 | olon.long_name = "longitude" |
---|
193 | olon.units = "degrees_E" |
---|
194 | # |
---|
195 | olat[:] = lats |
---|
196 | olat.long_name = "latitude" |
---|
197 | olat.units = "degrees_N" |
---|
198 | # |
---|
199 | oxis[:] = xis |
---|
200 | oxis.long_name = "x grid box position" |
---|
201 | oxis.units = "fractional" |
---|
202 | # |
---|
203 | oyjs[:] = yjs |
---|
204 | oyjs.long_name = "y grid box position" |
---|
205 | oyjs.units = "fractional" |
---|
206 | # |
---|
207 | ouvs[:] = uvs |
---|
208 | ouvs.long_name = "zonal velocity" |
---|
209 | ouvs.units = "m/s" |
---|
210 | # |
---|
211 | ovvs[:] = vvs |
---|
212 | ovvs.long_name = "meridional velocity" |
---|
213 | ovvs.units = "m/s" |
---|
214 | # |
---|
215 | omas[:] = mas |
---|
216 | omas.long_name = "mass" |
---|
217 | omas.units = "kg" |
---|
218 | # |
---|
219 | oths[:] = ths |
---|
220 | oths.long_name = "thickness" |
---|
221 | oths.units = "m" |
---|
222 | # |
---|
223 | owis[:] = wis |
---|
224 | owis.long_name = "width" |
---|
225 | owis.units = "m" |
---|
226 | # |
---|
227 | oles[:] = les |
---|
228 | oles.long_name = "length" |
---|
229 | oles.units = "m" |
---|
230 | # |
---|
231 | odys[:] = dys |
---|
232 | odys.long_name = "year day of calving event" |
---|
233 | odys.units = "days" |
---|
234 | # |
---|
235 | omss[:] = mss |
---|
236 | omss.long_name = "scaling factor for mass of calving berg" |
---|
237 | omss.units = "none" |
---|
238 | # |
---|
239 | omsb[:] = msb |
---|
240 | omsb.long_name = "mass of bergy bits" |
---|
241 | omsb.units = "kg" |
---|
242 | # |
---|
243 | ohds[:] = hds |
---|
244 | ohds.long_name = "heat density" |
---|
245 | ohds.units = "J/kg" |
---|
246 | # |
---|
247 | oyrs[:] = yrs |
---|
248 | oyrs.long_name = "calendar year of calving event" |
---|
249 | oyrs.units = "years" |
---|
250 | # |
---|
251 | onum[:,:] = num |
---|
252 | onum.long_name = "iceberg number on this processor" |
---|
253 | onum.units = "count" |
---|
254 | # |
---|
255 | fo.close() |
---|