New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
icb_combrest.py in branches/UKMO/dev_r5518_test_GO6_package_update/NEMOGCM/TOOLS/REBUILD_NEMO – NEMO

source: branches/UKMO/dev_r5518_test_GO6_package_update/NEMOGCM/TOOLS/REBUILD_NEMO/icb_combrest.py @ 7877

Last change on this file since 7877 was 7877, checked in by frrh, 7 years ago

Merge missing swathe of revisions from branches/2015/nemo_v3_6_STABLE/NEMOGCM
using the command:
svn merge -r6424:6477 svn+ssh://forge.ipsl.jussieu.fr/ipsl/forge/projets/nemo/svn/branches/2015/nemo_v3_6_STABLE/NEMOGCM

Note: this required manual conflict resolution of the content of NEMOGCM/TOOLS/SIREN/src/docsrc/
since the existing contenets of those directories in the package branch are not consistent
with the contents of branches/2015/nemo_v3_6_STABLE at revision 6424. (This should be an
incidental matter as the content in question only relates to documentation of NEMO tools
and is not relevant to NEMO source code.)

File size: 8.3 KB
Line 
1import os
2from netCDF4 import Dataset
3from argparse import ArgumentParser
4import numpy as np
5import sys
6
7#
8# Basic iceberg trajectory restart post-processing python script.
9# This script collects iceberg information from the distributed restarts written
10# out by each processing region and writes the information into a global restart file.
11# The global restart file must already exist and contain the collated 2D spatial fields
12# as prepared by utilities such as rebuild_nemo. This python script simply adds the
13# iceberg position and state data that is held using the unlimited dimension 'n' which
14# has been ignored by rebuild_nemo. Each processing region that contains icebergs will
15# (probably) have a different number of icebergs (indicated by differing values for the
16# current size of the unlimited dimension). This script collects all icebergs into a
17# single unordered list.
18#
19
20parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\
21                                     files, e.g. \n python ./icb_pp.py \
22                                     -f  icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]')
23
24parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \
25                     distributed trajectory restart file (usually completed with XXXX.nc, where \
26                     XXXX is the 4 digit processor number)', 
27                     default='icebergs_00692992_restart_')
28
29parser.add_argument('-n',dest='fnum',help='number of distributed files to process', 
30                     type=int, default=None)
31
32parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \
33                     global iceberg restart data to.', default='icebergs_restart.nc')
34
35parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \
36                     append to an existing file.', \
37                     action='store_true', default=False)
38
39args = parser.parse_args()
40
41default_used = 0
42if args.froot is None:
43    pathstart = 'icebergs_00692992_restart_'
44    default_used = 1
45else:
46    pathstart = args.froot
47
48if args.fnum is None:
49    procnum = 0
50    default_used = 1
51else:
52    procnum = args.fnum
53
54if args.fout is None:
55    pathout = 'icebergs_restart.nc'
56    default_used = 1
57else:
58    pathout = args.fout
59
60if default_used == 1:
61   print('At least one default value will be used; command executing is:')
62   print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout)
63
64if procnum < 1:
65   print('Need some files to collate! procnum = ',procnum)
66   sys.exit(11)
67
68icu = []
69times = []
70ntraj = 0
71nk = 0
72#
73# Loop through all distributed datasets to obtain the total number
74# of icebergs to transfer
75#
76for n in range(procnum):
77 nn = '%4.4d' % n
78 try:
79   fw = Dataset(pathstart+nn+'.nc')
80 except:
81   print 'Error: unable to open input file: ' + pathstart+nn+'.nc'
82   sys.exit(12)
83 for d in fw.dimensions :
84  if d == 'n' :
85   if len(fw.dimensions['n']) > 0:
86#    print 'icebergs found in: ' + pathstart+nn+'.nc'
87     if len(fw.dimensions['k']) > nk :
88       nk = len(fw.dimensions['k'])
89     ntraj = ntraj + len(fw.dimensions['n'])
90 fw.close()
91#
92print(ntraj, ' icebergs found across all datasets')
93#
94# Declare 2-D arrays to receive the data from all files
95#
96lons = np.zeros(ntraj)
97lats = np.zeros(ntraj)
98xis  = np.zeros(ntraj)
99yjs  = np.zeros(ntraj)
100uvs  = np.zeros(ntraj)
101vvs  = np.zeros(ntraj)
102mas  = np.zeros(ntraj)
103ths  = np.zeros(ntraj)
104wis  = np.zeros(ntraj)
105les  = np.zeros(ntraj)
106dys  = np.zeros(ntraj)
107mss  = np.zeros(ntraj)
108msb  = np.zeros(ntraj)
109hds  = np.zeros(ntraj)
110yrs  = np.zeros(ntraj      , dtype=int)
111num  = np.zeros((ntraj, nk), dtype=int)
112#
113# loop through distributed datasets again, this time
114# collecting all trajectory data
115#
116nt = 0
117for n in range(procnum):
118 nn = '%4.4d' % n
119 fw = Dataset(pathstart+nn+'.nc')
120 for d in fw.dimensions :
121  if d == 'n' :
122# Note many distributed datafiles will contain no iceberg data
123# so skip quickly over these
124    m  = len(fw.dimensions['n'])
125    if m > 0:
126#     print pathstart+nn+'.nc'
127      lons[nt:nt+m]  = fw.variables['lon'][:]
128      lats[nt:nt+m]  = fw.variables['lat'][:]
129      xis[nt:nt+m]   = fw.variables['xi'][:]
130      yjs[nt:nt+m]   = fw.variables['yj'][:]
131      uvs[nt:nt+m]   = fw.variables['uvel'][:]
132      vvs[nt:nt+m]   = fw.variables['vvel'][:]
133      mas[nt:nt+m]   = fw.variables['mass'][:]
134      ths[nt:nt+m]   = fw.variables['thickness'][:]
135      wis[nt:nt+m]   = fw.variables['width'][:]
136      les[nt:nt+m]   = fw.variables['length'][:]
137      dys[nt:nt+m]   = fw.variables['day'][:]
138      mss[nt:nt+m]   = fw.variables['mass_scaling'][:]
139      msb[nt:nt+m]   = fw.variables['mass_of_bits'][:]
140      hds[nt:nt+m]   = fw.variables['heat_density'][:]
141      yrs[nt:nt+m]   = fw.variables['year'][:]
142      num[nt:nt+m,:] = fw.variables['number'][:,:]
143      nt = nt + m
144 fw.close()
145
146# Finally create the output file and write out the collated sets
147#
148if args.fcre :
149  try:
150    fo = Dataset(pathout, 'w', format='NETCDF4')
151  except:
152    print 'Error accessing output file: ' + pathout
153    print 'Check it is a writable location.'
154    sys.exit(13)
155else :
156  # Copy 2D variables across to output file from input file. This step avoids problems if rebuild_nemo
157  # has created an "n" dimension in the prototype rebuilt file (ie. if there are icebergs on the zeroth
158  # processor).
159  try:
160    os.rename(pathout,pathout.replace('.nc','_WORK.nc'))
161  except OSError:
162    print 'Error: unable to move icebergs restart file: '+pathout
163    sys.exit(14)
164  #
165  try:
166    fi = Dataset(pathout.replace('.nc','_WORK.nc'), 'r')
167  except:
168    print 'Error: unable to open icebergs restart file: '+pathout.replace('.nc','_WORK.nc')
169    sys.exit(15)
170  fo = Dataset(pathout, 'w')
171  for dim in ['x','y','c','k']:
172    indim = fi.dimensions[dim]
173    fo.createDimension(dim, len(indim))
174  for var in ['kount','calving','calving_hflx','stored_ice','stored_heat']:
175    invar = fi.variables[var]
176    fo.createVariable(var, invar.datatype, invar.dimensions)
177    fo.variables[var][:] = invar[:]
178    if "long_name" in invar.ncattrs():
179        fo.variables[var].long_name = invar.long_name
180    if "units" in invar.ncattrs():
181        fo.variables[var].units = invar.units
182  os.remove(pathout.replace('.nc','_WORK.nc'))
183#
184add_k = 1
185for d in fo.dimensions :
186  if d == 'n' :
187    print 'Error: dimension n already exists in output file'
188    sys.exit(16)
189  if d == 'k' :
190    add_k = 0
191onn  = fo.createDimension('n', None)
192if add_k > 0 :
193 onk = fo.createDimension('k', nk)
194olon = fo.createVariable('lon', 'f8',('n'))
195olat = fo.createVariable('lat', 'f8',('n'))
196oxis = fo.createVariable('xi', 'f8',('n'))
197oyjs = fo.createVariable('yj', 'f8',('n'))
198ouvs = fo.createVariable('uvel', 'f8',('n'))
199ovvs = fo.createVariable('vvel', 'f8',('n'))
200omas = fo.createVariable('mass', 'f8',('n'))
201oths = fo.createVariable('thickness', 'f8',('n'))
202owis = fo.createVariable('width', 'f8',('n'))
203oles = fo.createVariable('length', 'f8',('n'))
204odys = fo.createVariable('day', 'f8',('n'))
205omss = fo.createVariable('mass_scaling', 'f8',('n'))
206omsb = fo.createVariable('mass_of_bits', 'f8',('n'))
207ohds = fo.createVariable('heat_density', 'f8',('n'))
208oyrs = fo.createVariable('year', 'i4',('n'))
209onum = fo.createVariable('number', 'i4',('n','k'))
210#
211olon[:] = lons
212olon.long_name = "longitude"
213olon.units = "degrees_E" 
214#
215olat[:] = lats
216olat.long_name = "latitude"
217olat.units = "degrees_N"
218#
219oxis[:] = xis
220oxis.long_name = "x grid box position"
221oxis.units = "fractional"
222#
223oyjs[:] = yjs
224oyjs.long_name = "y grid box position"
225oyjs.units = "fractional"
226#
227ouvs[:] = uvs
228ouvs.long_name = "zonal velocity"
229ouvs.units = "m/s"
230#
231ovvs[:] = vvs
232ovvs.long_name = "meridional velocity"
233ovvs.units = "m/s"
234#
235omas[:] = mas
236omas.long_name = "mass"
237omas.units = "kg"
238#
239oths[:] = ths
240oths.long_name = "thickness"
241oths.units = "m"
242#
243owis[:] = wis
244owis.long_name = "width"
245owis.units = "m"
246#
247oles[:] = les
248oles.long_name = "length"
249oles.units = "m"
250#
251odys[:] = dys
252odys.long_name = "year day of calving event"
253odys.units = "days"
254#
255omss[:] = mss
256omss.long_name = "scaling factor for mass of calving berg"
257omss.units = "none"
258#
259omsb[:] = msb
260omsb.long_name = "mass of bergy bits"
261omsb.units = "kg"
262#
263ohds[:] = hds
264ohds.long_name = "heat density"
265ohds.units = "J/kg"
266#
267oyrs[:] = yrs
268oyrs.long_name = "calendar year of calving event"
269oyrs.units = "years"
270#
271onum[:,:] = num
272onum.long_name = "iceberg number on this processor"
273onum.units = "count"
274#
275fo.close()
Note: See TracBrowser for help on using the repository browser.