New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
icb_combrest.py in trunk/NEMOGCM/TOOLS/REBUILD_NEMO – NEMO

source: trunk/NEMOGCM/TOOLS/REBUILD_NEMO/icb_combrest.py @ 6423

Last change on this file since 6423 was 6423, checked in by davestorkey, 9 years ago
  1. Bug fix for icb_combrest.py. Will now cope with a set of iceberg restart files with icebergs on the zeroth processor.
  2. Add nonzero sys.exit codes for icb_combrest.py and icb_pp.py.

See ticket #1708

  • Property svn:keywords set to Id
File size: 8.2 KB
Line 
1import os
2from netCDF4 import Dataset
3from argparse import ArgumentParser
4import numpy as np
5import sys
6
7#
8# Basic iceberg trajectory restart post-processing python script.
9# This script collects iceberg information from the distributed restarts written
10# out by each processing region and writes the information into a global restart file.
11# The global restart file must already exist and contain the collated 2D spatial fields
12# as prepared by utilities such as rebuild_nemo. This python script simply adds the
13# iceberg position and state data that is held using the unlimited dimension 'n' which
14# has been ignored by rebuild_nemo. Each processing region that contains icebergs will
15# (probably) have a different number of icebergs (indicated by differing values for the
16# current size of the unlimited dimension). This script collects all icebergs into a
17# single unordered list.
18#
19
20parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\
21                                     files, e.g. \n python ./icb_pp.py \
22                                     -f  icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]')
23
24parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \
25                     distributed trajectory restart file (usually completed with XXXX.nc, where \
26                     XXXX is the 4 digit processor number)', 
27                     default='icebergs_00692992_restart_')
28
29parser.add_argument('-n',dest='fnum',help='number of distributed files to process', 
30                     type=int, default=None)
31
32parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \
33                     global iceberg restart data to.', default='icebergs_restart.nc')
34
35parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \
36                     append to an existing file.', \
37                     action='store_true', default=False)
38
39args = parser.parse_args()
40
41default_used = 0
42if args.froot is None:
43    pathstart = 'icebergs_00692992_restart_'
44    default_used = 1
45else:
46    pathstart = args.froot
47
48if args.fnum is None:
49    procnum = 0
50    default_used = 1
51else:
52    procnum = args.fnum
53
54if args.fout is None:
55    pathout = 'icebergs_restart.nc'
56    default_used = 1
57else:
58    pathout = args.fout
59
60if default_used == 1:
61   print('At least one default value will be used; command executing is:')
62   print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout)
63
64if procnum < 1:
65   print('Need some files to collate! procnum = ',procnum)
66   sys.exit(11)
67
68icu = []
69times = []
70ntraj = 0
71nk = 0
72#
73# Loop through all distributed datasets to obtain the total number
74# of icebergs to transfer
75#
76for n in range(procnum):
77 nn = '%4.4d' % n
78 try:
79   fw = Dataset(pathstart+nn+'.nc')
80 except:
81   print 'Error: unable to open input file: ' + pathstart+nn+'.nc'
82   sys.exit(12)
83 for d in fw.dimensions :
84  if d == 'n' :
85   if len(fw.dimensions['n']) > 0:
86#    print 'icebergs found in: ' + pathstart+nn+'.nc'
87     if len(fw.dimensions['k']) > nk :
88       nk = len(fw.dimensions['k'])
89     ntraj = ntraj + len(fw.dimensions['n'])
90 fw.close()
91#
92print(ntraj, ' icebergs found across all datasets')
93#
94# Declare 2-D arrays to receive the data from all files
95#
96lons = np.zeros(ntraj)
97lats = np.zeros(ntraj)
98xis  = np.zeros(ntraj)
99yjs  = np.zeros(ntraj)
100uvs  = np.zeros(ntraj)
101vvs  = np.zeros(ntraj)
102mas  = np.zeros(ntraj)
103ths  = np.zeros(ntraj)
104wis  = np.zeros(ntraj)
105les  = np.zeros(ntraj)
106dys  = np.zeros(ntraj)
107mss  = np.zeros(ntraj)
108msb  = np.zeros(ntraj)
109hds  = np.zeros(ntraj)
110yrs  = np.zeros(ntraj      , dtype=int)
111num  = np.zeros((ntraj, nk), dtype=int)
112#
113# loop through distributed datasets again, this time
114# collecting all trajectory data
115#
116nt = 0
117for n in range(procnum):
118 nn = '%4.4d' % n
119 fw = Dataset(pathstart+nn+'.nc')
120 for d in fw.dimensions :
121  if d == 'n' :
122# Note many distributed datafiles will contain no iceberg data
123# so skip quickly over these
124    m  = len(fw.dimensions['n'])
125    if m > 0:
126#     print pathstart+nn+'.nc'
127      lons[nt:nt+m]  = fw.variables['lon'][:]
128      lats[nt:nt+m]  = fw.variables['lat'][:]
129      xis[nt:nt+m]   = fw.variables['xi'][:]
130      yjs[nt:nt+m]   = fw.variables['yj'][:]
131      uvs[nt:nt+m]   = fw.variables['uvel'][:]
132      vvs[nt:nt+m]   = fw.variables['vvel'][:]
133      mas[nt:nt+m]   = fw.variables['mass'][:]
134      ths[nt:nt+m]   = fw.variables['thickness'][:]
135      wis[nt:nt+m]   = fw.variables['width'][:]
136      les[nt:nt+m]   = fw.variables['length'][:]
137      dys[nt:nt+m]   = fw.variables['day'][:]
138      mss[nt:nt+m]   = fw.variables['mass_scaling'][:]
139      msb[nt:nt+m]   = fw.variables['mass_of_bits'][:]
140      hds[nt:nt+m]   = fw.variables['heat_density'][:]
141      yrs[nt:nt+m]   = fw.variables['year'][:]
142      num[nt:nt+m,:] = fw.variables['number'][:,:]
143      nt = nt + m
144 fw.close()
145
146# Finally create the output file and write out the collated sets
147#
148if args.fcre :
149  try:
150    fo = Dataset(pathout, 'w', format='NETCDF4')
151  except:
152    print 'Error accessing output file: ' + pathout
153    print 'Check it is a writable location.'
154    sys.exit(13)
155else :
156  # Copy 2D variables across to output file from input file. This step avoids problems if rebuild_nemo
157  # has created an "n" dimension in the prototype rebuilt file (ie. if there are icebergs on the zeroth
158  # processor).
159  try:
160    os.rename(pathout,pathout.replace('.nc','_WORK.nc'))
161  except OSError:
162    print 'Error: unable to move icebergs restart file: '+pathout
163    sys.exit(14)
164  #
165  try:
166    fi = Dataset(pathout.replace('.nc','_WORK.nc'), 'r')
167  except:
168    print 'Error: unable to open icebergs restart file: '+pathout.replace('.nc','_WORK.nc')
169    sys.exit(15)
170  fo = Dataset(pathout, 'w')
171  for dim in ['x','y','c']:
172    indim = fi.dimensions[dim]
173    fo.createDimension(dim, len(indim))
174  for var in ['calving','calving_hflx','stored_ice','stored_heat']:
175    invar = fi.variables[var]
176    fo.createVariable(var, invar.datatype, invar.dimensions)
177    fo.variables[var][:] = invar[:]
178    fo.variables[var].long_name = invar.long_name
179    fo.variables[var].units = invar.units
180    os.remove(pathout.replace('.nc','_WORK.nc'))
181#
182add_k = 1
183for d in fo.dimensions :
184  if d == 'n' :
185    print 'Error: dimension n already exists in output file'
186    sys.exit(16)
187  if d == 'k' :
188    add_k = 0
189onn  = fo.createDimension('n', None)
190if add_k > 0 :
191 onk = fo.createDimension('k', nk)
192olon = fo.createVariable('lon', 'f8',('n'))
193olat = fo.createVariable('lat', 'f8',('n'))
194oxis = fo.createVariable('xi', 'f8',('n'))
195oyjs = fo.createVariable('yj', 'f8',('n'))
196ouvs = fo.createVariable('uvel', 'f8',('n'))
197ovvs = fo.createVariable('vvel', 'f8',('n'))
198omas = fo.createVariable('mass', 'f8',('n'))
199oths = fo.createVariable('thickness', 'f8',('n'))
200owis = fo.createVariable('width', 'f8',('n'))
201oles = fo.createVariable('length', 'f8',('n'))
202odys = fo.createVariable('day', 'f8',('n'))
203omss = fo.createVariable('mass_scaling', 'f8',('n'))
204omsb = fo.createVariable('mass_of_bits', 'f8',('n'))
205ohds = fo.createVariable('heat_density', 'f8',('n'))
206oyrs = fo.createVariable('year', 'i4',('n'))
207onum = fo.createVariable('number', 'i4',('n','k'))
208#
209olon[:] = lons
210olon.long_name = "longitude"
211olon.units = "degrees_E" 
212#
213olat[:] = lats
214olat.long_name = "latitude"
215olat.units = "degrees_N"
216#
217oxis[:] = xis
218oxis.long_name = "x grid box position"
219oxis.units = "fractional"
220#
221oyjs[:] = yjs
222oyjs.long_name = "y grid box position"
223oyjs.units = "fractional"
224#
225ouvs[:] = uvs
226ouvs.long_name = "zonal velocity"
227ouvs.units = "m/s"
228#
229ovvs[:] = vvs
230ovvs.long_name = "meridional velocity"
231ovvs.units = "m/s"
232#
233omas[:] = mas
234omas.long_name = "mass"
235omas.units = "kg"
236#
237oths[:] = ths
238oths.long_name = "thickness"
239oths.units = "m"
240#
241owis[:] = wis
242owis.long_name = "width"
243owis.units = "m"
244#
245oles[:] = les
246oles.long_name = "length"
247oles.units = "m"
248#
249odys[:] = dys
250odys.long_name = "year day of calving event"
251odys.units = "days"
252#
253omss[:] = mss
254omss.long_name = "scaling factor for mass of calving berg"
255omss.units = "none"
256#
257omsb[:] = msb
258omsb.long_name = "mass of bergy bits"
259omsb.units = "kg"
260#
261ohds[:] = hds
262ohds.long_name = "heat density"
263ohds.units = "J/kg"
264#
265oyrs[:] = yrs
266oyrs.long_name = "calendar year of calving event"
267oyrs.units = "years"
268#
269onum[:,:] = num
270onum.long_name = "iceberg number on this processor"
271onum.units = "count"
272#
273fo.close()
Note: See TracBrowser for help on using the repository browser.