[6498] | 1 | import os |
---|
[5911] | 2 | from netCDF4 import Dataset |
---|
| 3 | from argparse import ArgumentParser |
---|
| 4 | import numpy as np |
---|
| 5 | import sys |
---|
| 6 | |
---|
| 7 | # |
---|
| 8 | # Basic iceberg trajectory restart post-processing python script. |
---|
| 9 | # This script collects iceberg information from the distributed restarts written |
---|
| 10 | # out by each processing region and writes the information into a global restart file. |
---|
| 11 | # The global restart file must already exist and contain the collated 2D spatial fields |
---|
| 12 | # as prepared by utilities such as rebuild_nemo. This python script simply adds the |
---|
| 13 | # iceberg position and state data that is held using the unlimited dimension 'n' which |
---|
| 14 | # has been ignored by rebuild_nemo. Each processing region that contains icebergs will |
---|
| 15 | # (probably) have a different number of icebergs (indicated by differing values for the |
---|
| 16 | # current size of the unlimited dimension). This script collects all icebergs into a |
---|
| 17 | # single unordered list. |
---|
| 18 | # |
---|
| 19 | |
---|
| 20 | parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\ |
---|
| 21 | files, e.g. \n python ./icb_pp.py \ |
---|
| 22 | -f icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]') |
---|
| 23 | |
---|
| 24 | parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \ |
---|
| 25 | distributed trajectory restart file (usually completed with XXXX.nc, where \ |
---|
| 26 | XXXX is the 4 digit processor number)', |
---|
| 27 | default='icebergs_00692992_restart_') |
---|
| 28 | |
---|
| 29 | parser.add_argument('-n',dest='fnum',help='number of distributed files to process', |
---|
| 30 | type=int, default=None) |
---|
| 31 | |
---|
| 32 | parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \ |
---|
| 33 | global iceberg restart data to.', default='icebergs_restart.nc') |
---|
| 34 | |
---|
| 35 | parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \ |
---|
| 36 | append to an existing file.', \ |
---|
| 37 | action='store_true', default=False) |
---|
| 38 | |
---|
| 39 | args = parser.parse_args() |
---|
| 40 | |
---|
| 41 | default_used = 0 |
---|
| 42 | if args.froot is None: |
---|
| 43 | pathstart = 'icebergs_00692992_restart_' |
---|
| 44 | default_used = 1 |
---|
| 45 | else: |
---|
| 46 | pathstart = args.froot |
---|
| 47 | |
---|
| 48 | if args.fnum is None: |
---|
| 49 | procnum = 0 |
---|
| 50 | default_used = 1 |
---|
| 51 | else: |
---|
| 52 | procnum = args.fnum |
---|
| 53 | |
---|
| 54 | if args.fout is None: |
---|
| 55 | pathout = 'icebergs_restart.nc' |
---|
| 56 | default_used = 1 |
---|
| 57 | else: |
---|
| 58 | pathout = args.fout |
---|
| 59 | |
---|
| 60 | if default_used == 1: |
---|
| 61 | print('At least one default value will be used; command executing is:') |
---|
| 62 | print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout) |
---|
| 63 | |
---|
| 64 | if procnum < 1: |
---|
| 65 | print('Need some files to collate! procnum = ',procnum) |
---|
[6498] | 66 | sys.exit(11) |
---|
[5911] | 67 | |
---|
| 68 | icu = [] |
---|
| 69 | times = [] |
---|
| 70 | ntraj = 0 |
---|
| 71 | nk = 0 |
---|
| 72 | # |
---|
| 73 | # Loop through all distributed datasets to obtain the total number |
---|
| 74 | # of icebergs to transfer |
---|
| 75 | # |
---|
| 76 | for n in range(procnum): |
---|
| 77 | nn = '%4.4d' % n |
---|
| 78 | try: |
---|
| 79 | fw = Dataset(pathstart+nn+'.nc') |
---|
| 80 | except: |
---|
| 81 | print 'Error: unable to open input file: ' + pathstart+nn+'.nc' |
---|
[6498] | 82 | sys.exit(12) |
---|
[5911] | 83 | for d in fw.dimensions : |
---|
| 84 | if d == 'n' : |
---|
| 85 | if len(fw.dimensions['n']) > 0: |
---|
| 86 | # print 'icebergs found in: ' + pathstart+nn+'.nc' |
---|
| 87 | if len(fw.dimensions['k']) > nk : |
---|
| 88 | nk = len(fw.dimensions['k']) |
---|
| 89 | ntraj = ntraj + len(fw.dimensions['n']) |
---|
| 90 | fw.close() |
---|
| 91 | # |
---|
| 92 | print(ntraj, ' icebergs found across all datasets') |
---|
| 93 | # |
---|
| 94 | # Declare 2-D arrays to receive the data from all files |
---|
| 95 | # |
---|
| 96 | lons = np.zeros(ntraj) |
---|
| 97 | lats = np.zeros(ntraj) |
---|
| 98 | xis = np.zeros(ntraj) |
---|
| 99 | yjs = np.zeros(ntraj) |
---|
| 100 | uvs = np.zeros(ntraj) |
---|
| 101 | vvs = np.zeros(ntraj) |
---|
| 102 | mas = np.zeros(ntraj) |
---|
| 103 | ths = np.zeros(ntraj) |
---|
| 104 | wis = np.zeros(ntraj) |
---|
| 105 | les = np.zeros(ntraj) |
---|
| 106 | dys = np.zeros(ntraj) |
---|
| 107 | mss = np.zeros(ntraj) |
---|
| 108 | msb = np.zeros(ntraj) |
---|
| 109 | hds = np.zeros(ntraj) |
---|
| 110 | yrs = np.zeros(ntraj , dtype=int) |
---|
| 111 | num = np.zeros((ntraj, nk), dtype=int) |
---|
| 112 | # |
---|
| 113 | # loop through distributed datasets again, this time |
---|
| 114 | # collecting all trajectory data |
---|
| 115 | # |
---|
| 116 | nt = 0 |
---|
| 117 | for n in range(procnum): |
---|
| 118 | nn = '%4.4d' % n |
---|
| 119 | fw = Dataset(pathstart+nn+'.nc') |
---|
| 120 | for d in fw.dimensions : |
---|
| 121 | if d == 'n' : |
---|
| 122 | # Note many distributed datafiles will contain no iceberg data |
---|
| 123 | # so skip quickly over these |
---|
| 124 | m = len(fw.dimensions['n']) |
---|
| 125 | if m > 0: |
---|
| 126 | # print pathstart+nn+'.nc' |
---|
| 127 | lons[nt:nt+m] = fw.variables['lon'][:] |
---|
| 128 | lats[nt:nt+m] = fw.variables['lat'][:] |
---|
| 129 | xis[nt:nt+m] = fw.variables['xi'][:] |
---|
| 130 | yjs[nt:nt+m] = fw.variables['yj'][:] |
---|
| 131 | uvs[nt:nt+m] = fw.variables['uvel'][:] |
---|
| 132 | vvs[nt:nt+m] = fw.variables['vvel'][:] |
---|
| 133 | mas[nt:nt+m] = fw.variables['mass'][:] |
---|
| 134 | ths[nt:nt+m] = fw.variables['thickness'][:] |
---|
| 135 | wis[nt:nt+m] = fw.variables['width'][:] |
---|
| 136 | les[nt:nt+m] = fw.variables['length'][:] |
---|
| 137 | dys[nt:nt+m] = fw.variables['day'][:] |
---|
| 138 | mss[nt:nt+m] = fw.variables['mass_scaling'][:] |
---|
| 139 | msb[nt:nt+m] = fw.variables['mass_of_bits'][:] |
---|
| 140 | hds[nt:nt+m] = fw.variables['heat_density'][:] |
---|
| 141 | yrs[nt:nt+m] = fw.variables['year'][:] |
---|
| 142 | num[nt:nt+m,:] = fw.variables['number'][:,:] |
---|
| 143 | nt = nt + m |
---|
| 144 | fw.close() |
---|
| 145 | |
---|
| 146 | # Finally create the output file and write out the collated sets |
---|
| 147 | # |
---|
| 148 | if args.fcre : |
---|
| 149 | try: |
---|
| 150 | fo = Dataset(pathout, 'w', format='NETCDF4') |
---|
| 151 | except: |
---|
| 152 | print 'Error accessing output file: ' + pathout |
---|
| 153 | print 'Check it is a writable location.' |
---|
[6498] | 154 | sys.exit(13) |
---|
[5911] | 155 | else : |
---|
[6498] | 156 | # Copy 2D variables across to output file from input file. This step avoids problems if rebuild_nemo |
---|
| 157 | # has created an "n" dimension in the prototype rebuilt file (ie. if there are icebergs on the zeroth |
---|
| 158 | # processor). |
---|
[5911] | 159 | try: |
---|
[6498] | 160 | os.rename(pathout,pathout.replace('.nc','_WORK.nc')) |
---|
| 161 | except OSError: |
---|
| 162 | print 'Error: unable to move icebergs restart file: '+pathout |
---|
| 163 | sys.exit(14) |
---|
| 164 | # |
---|
| 165 | try: |
---|
| 166 | fi = Dataset(pathout.replace('.nc','_WORK.nc'), 'r') |
---|
[5911] | 167 | except: |
---|
[6498] | 168 | print 'Error: unable to open icebergs restart file: '+pathout.replace('.nc','_WORK.nc') |
---|
| 169 | sys.exit(15) |
---|
| 170 | fo = Dataset(pathout, 'w') |
---|
[6793] | 171 | for dim in ['x','y','c','k']: |
---|
[6498] | 172 | indim = fi.dimensions[dim] |
---|
| 173 | fo.createDimension(dim, len(indim)) |
---|
[6793] | 174 | for var in ['kount','calving','calving_hflx','stored_ice','stored_heat']: |
---|
[6498] | 175 | invar = fi.variables[var] |
---|
| 176 | fo.createVariable(var, invar.datatype, invar.dimensions) |
---|
| 177 | fo.variables[var][:] = invar[:] |
---|
[6793] | 178 | if "long_name" in invar.ncattrs(): |
---|
| 179 | fo.variables[var].long_name = invar.long_name |
---|
| 180 | if "units" in invar.ncattrs(): |
---|
| 181 | fo.variables[var].units = invar.units |
---|
[7993] | 182 | os.remove(pathout.replace('.nc','_WORK.nc')) |
---|
[5911] | 183 | # |
---|
| 184 | add_k = 1 |
---|
| 185 | for d in fo.dimensions : |
---|
| 186 | if d == 'n' : |
---|
| 187 | print 'Error: dimension n already exists in output file' |
---|
[6498] | 188 | sys.exit(16) |
---|
[5911] | 189 | if d == 'k' : |
---|
| 190 | add_k = 0 |
---|
| 191 | onn = fo.createDimension('n', None) |
---|
| 192 | if add_k > 0 : |
---|
| 193 | onk = fo.createDimension('k', nk) |
---|
| 194 | olon = fo.createVariable('lon', 'f8',('n')) |
---|
| 195 | olat = fo.createVariable('lat', 'f8',('n')) |
---|
| 196 | oxis = fo.createVariable('xi', 'f8',('n')) |
---|
| 197 | oyjs = fo.createVariable('yj', 'f8',('n')) |
---|
| 198 | ouvs = fo.createVariable('uvel', 'f8',('n')) |
---|
| 199 | ovvs = fo.createVariable('vvel', 'f8',('n')) |
---|
| 200 | omas = fo.createVariable('mass', 'f8',('n')) |
---|
| 201 | oths = fo.createVariable('thickness', 'f8',('n')) |
---|
| 202 | owis = fo.createVariable('width', 'f8',('n')) |
---|
| 203 | oles = fo.createVariable('length', 'f8',('n')) |
---|
| 204 | odys = fo.createVariable('day', 'f8',('n')) |
---|
| 205 | omss = fo.createVariable('mass_scaling', 'f8',('n')) |
---|
| 206 | omsb = fo.createVariable('mass_of_bits', 'f8',('n')) |
---|
| 207 | ohds = fo.createVariable('heat_density', 'f8',('n')) |
---|
| 208 | oyrs = fo.createVariable('year', 'i4',('n')) |
---|
| 209 | onum = fo.createVariable('number', 'i4',('n','k')) |
---|
| 210 | # |
---|
| 211 | olon[:] = lons |
---|
| 212 | olon.long_name = "longitude" |
---|
| 213 | olon.units = "degrees_E" |
---|
| 214 | # |
---|
| 215 | olat[:] = lats |
---|
| 216 | olat.long_name = "latitude" |
---|
| 217 | olat.units = "degrees_N" |
---|
| 218 | # |
---|
| 219 | oxis[:] = xis |
---|
| 220 | oxis.long_name = "x grid box position" |
---|
| 221 | oxis.units = "fractional" |
---|
| 222 | # |
---|
| 223 | oyjs[:] = yjs |
---|
| 224 | oyjs.long_name = "y grid box position" |
---|
| 225 | oyjs.units = "fractional" |
---|
| 226 | # |
---|
| 227 | ouvs[:] = uvs |
---|
| 228 | ouvs.long_name = "zonal velocity" |
---|
| 229 | ouvs.units = "m/s" |
---|
| 230 | # |
---|
| 231 | ovvs[:] = vvs |
---|
| 232 | ovvs.long_name = "meridional velocity" |
---|
| 233 | ovvs.units = "m/s" |
---|
| 234 | # |
---|
| 235 | omas[:] = mas |
---|
| 236 | omas.long_name = "mass" |
---|
| 237 | omas.units = "kg" |
---|
| 238 | # |
---|
| 239 | oths[:] = ths |
---|
| 240 | oths.long_name = "thickness" |
---|
| 241 | oths.units = "m" |
---|
| 242 | # |
---|
| 243 | owis[:] = wis |
---|
| 244 | owis.long_name = "width" |
---|
| 245 | owis.units = "m" |
---|
| 246 | # |
---|
| 247 | oles[:] = les |
---|
| 248 | oles.long_name = "length" |
---|
| 249 | oles.units = "m" |
---|
| 250 | # |
---|
| 251 | odys[:] = dys |
---|
| 252 | odys.long_name = "year day of calving event" |
---|
| 253 | odys.units = "days" |
---|
| 254 | # |
---|
| 255 | omss[:] = mss |
---|
| 256 | omss.long_name = "scaling factor for mass of calving berg" |
---|
| 257 | omss.units = "none" |
---|
| 258 | # |
---|
| 259 | omsb[:] = msb |
---|
| 260 | omsb.long_name = "mass of bergy bits" |
---|
| 261 | omsb.units = "kg" |
---|
| 262 | # |
---|
| 263 | ohds[:] = hds |
---|
| 264 | ohds.long_name = "heat density" |
---|
| 265 | ohds.units = "J/kg" |
---|
| 266 | # |
---|
| 267 | oyrs[:] = yrs |
---|
| 268 | oyrs.long_name = "calendar year of calving event" |
---|
| 269 | oyrs.units = "years" |
---|
| 270 | # |
---|
| 271 | onum[:,:] = num |
---|
| 272 | onum.long_name = "iceberg number on this processor" |
---|
| 273 | onum.units = "count" |
---|
| 274 | # |
---|
| 275 | fo.close() |
---|