New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
icb_combrest.py in trunk/NEMOGCM/TOOLS/REBUILD_NEMO – NEMO

source: trunk/NEMOGCM/TOOLS/REBUILD_NEMO/icb_combrest.py @ 6140

Last change on this file since 6140 was 6140, checked in by timgraham, 9 years ago

Merge of branches/2015/dev_merge_2015 back into trunk. Merge excludes NEMOGCM/TOOLS/OBSTOOLS/ for now due to issues with the change of file type. Will sort these manually with further commits.

Branch merged as follows:
In the working copy of branch ran:
svn merge svn+ssh://forge.ipsl.jussieu.fr/ipsl/forge/projets/nemo/svn/trunk@HEAD
Small conflicts due to bug fixes applied to trunk since the dev_merge_2015 was copied. Bug fixes were applied to the branch as well so these were easy to resolve.
Branch committed at this stage

In working copy run:
svn switch svn+ssh://forge.ipsl.jussieu.fr/ipsl/forge/projets/nemo/svn/trunk
to switch working copy

Run:
svn merge --reintegrate svn+ssh://forge.ipsl.jussieu.fr/ipsl/forge/projets/nemo/svn/branches/2015/dev_merge_2015
to merge the branch into the trunk and then commit - no conflicts at this stage.

  • Property svn:keywords set to Id
File size: 7.5 KB
Line 
1from netCDF4 import Dataset
2from argparse import ArgumentParser
3import numpy as np
4import sys
5
6#
7# Basic iceberg trajectory restart post-processing python script.
8# This script collects iceberg information from the distributed restarts written
9# out by each processing region and writes the information into a global restart file.
10# The global restart file must already exist and contain the collated 2D spatial fields
11# as prepared by utilities such as rebuild_nemo. This python script simply adds the
12# iceberg position and state data that is held using the unlimited dimension 'n' which
13# has been ignored by rebuild_nemo. Each processing region that contains icebergs will
14# (probably) have a different number of icebergs (indicated by differing values for the
15# current size of the unlimited dimension). This script collects all icebergs into a
16# single unordered list.
17#
18
19parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\
20                                     files, e.g. \n python ./icb_pp.py \
21                                     -f  icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]')
22
23parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \
24                     distributed trajectory restart file (usually completed with XXXX.nc, where \
25                     XXXX is the 4 digit processor number)', 
26                     default='icebergs_00692992_restart_')
27
28parser.add_argument('-n',dest='fnum',help='number of distributed files to process', 
29                     type=int, default=None)
30
31parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \
32                     global iceberg restart data to.', default='icebergs_restart.nc')
33
34parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \
35                     append to an existing file.', \
36                     action='store_true', default=False)
37
38args = parser.parse_args()
39
40default_used = 0
41if args.froot is None:
42    pathstart = 'icebergs_00692992_restart_'
43    default_used = 1
44else:
45    pathstart = args.froot
46
47if args.fnum is None:
48    procnum = 0
49    default_used = 1
50else:
51    procnum = args.fnum
52
53if args.fout is None:
54    pathout = 'icebergs_restart.nc'
55    default_used = 1
56else:
57    pathout = args.fout
58
59if default_used == 1:
60   print('At least one default value will be used; command executing is:')
61   print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout)
62
63if procnum < 1:
64   print('Need some files to collate! procnum = ',procnum)
65   sys.exit()
66
67icu = []
68times = []
69ntraj = 0
70nk = 0
71#
72# Loop through all distributed datasets to obtain the total number
73# of icebergs to transfer
74#
75for n in range(procnum):
76 nn = '%4.4d' % n
77 try:
78   fw = Dataset(pathstart+nn+'.nc')
79 except:
80   print 'Error: unable to open input file: ' + pathstart+nn+'.nc'
81   sys.exit()
82 for d in fw.dimensions :
83  if d == 'n' :
84   if len(fw.dimensions['n']) > 0:
85#    print 'icebergs found in: ' + pathstart+nn+'.nc'
86     if len(fw.dimensions['k']) > nk :
87       nk = len(fw.dimensions['k'])
88     ntraj = ntraj + len(fw.dimensions['n'])
89 fw.close()
90#
91print(ntraj, ' icebergs found across all datasets')
92#
93# Declare 2-D arrays to receive the data from all files
94#
95lons = np.zeros(ntraj)
96lats = np.zeros(ntraj)
97xis  = np.zeros(ntraj)
98yjs  = np.zeros(ntraj)
99uvs  = np.zeros(ntraj)
100vvs  = np.zeros(ntraj)
101mas  = np.zeros(ntraj)
102ths  = np.zeros(ntraj)
103wis  = np.zeros(ntraj)
104les  = np.zeros(ntraj)
105dys  = np.zeros(ntraj)
106mss  = np.zeros(ntraj)
107msb  = np.zeros(ntraj)
108hds  = np.zeros(ntraj)
109yrs  = np.zeros(ntraj      , dtype=int)
110num  = np.zeros((ntraj, nk), dtype=int)
111#
112# loop through distributed datasets again, this time
113# collecting all trajectory data
114#
115nt = 0
116for n in range(procnum):
117 nn = '%4.4d' % n
118 fw = Dataset(pathstart+nn+'.nc')
119 for d in fw.dimensions :
120  if d == 'n' :
121# Note many distributed datafiles will contain no iceberg data
122# so skip quickly over these
123    m  = len(fw.dimensions['n'])
124    if m > 0:
125#     print pathstart+nn+'.nc'
126      lons[nt:nt+m]  = fw.variables['lon'][:]
127      lats[nt:nt+m]  = fw.variables['lat'][:]
128      xis[nt:nt+m]   = fw.variables['xi'][:]
129      yjs[nt:nt+m]   = fw.variables['yj'][:]
130      uvs[nt:nt+m]   = fw.variables['uvel'][:]
131      vvs[nt:nt+m]   = fw.variables['vvel'][:]
132      mas[nt:nt+m]   = fw.variables['mass'][:]
133      ths[nt:nt+m]   = fw.variables['thickness'][:]
134      wis[nt:nt+m]   = fw.variables['width'][:]
135      les[nt:nt+m]   = fw.variables['length'][:]
136      dys[nt:nt+m]   = fw.variables['day'][:]
137      mss[nt:nt+m]   = fw.variables['mass_scaling'][:]
138      msb[nt:nt+m]   = fw.variables['mass_of_bits'][:]
139      hds[nt:nt+m]   = fw.variables['heat_density'][:]
140      yrs[nt:nt+m]   = fw.variables['year'][:]
141      num[nt:nt+m,:] = fw.variables['number'][:,:]
142      nt = nt + m
143 fw.close()
144
145# Finally create the output file and write out the collated sets
146#
147if args.fcre :
148  try:
149    fo = Dataset(pathout, 'w', format='NETCDF4')
150  except:
151    print 'Error accessing output file: ' + pathout
152    print 'Check it is a writable location.'
153    sys.exit()
154else :
155  try:
156    fo = Dataset(pathout, 'r+', format='NETCDF4')
157  except:
158    print 'Error accessing output file: ' + pathout
159    print 'Check it exists and is writable.'
160    print 'Or run adding the -O option to create an output file which will'
161    print 'contain the iceberg state data only.'
162    sys.exit()
163#
164add_k = 1
165for d in fo.dimensions :
166  if d == 'n' :
167    print 'Error: dimension n already exists in output file'
168    sys.exit()
169  if d == 'k' :
170    add_k = 0
171onn  = fo.createDimension('n', None)
172if add_k > 0 :
173 onk = fo.createDimension('k', nk)
174olon = fo.createVariable('lon', 'f8',('n'))
175olat = fo.createVariable('lat', 'f8',('n'))
176oxis = fo.createVariable('xi', 'f8',('n'))
177oyjs = fo.createVariable('yj', 'f8',('n'))
178ouvs = fo.createVariable('uvel', 'f8',('n'))
179ovvs = fo.createVariable('vvel', 'f8',('n'))
180omas = fo.createVariable('mass', 'f8',('n'))
181oths = fo.createVariable('thickness', 'f8',('n'))
182owis = fo.createVariable('width', 'f8',('n'))
183oles = fo.createVariable('length', 'f8',('n'))
184odys = fo.createVariable('day', 'f8',('n'))
185omss = fo.createVariable('mass_scaling', 'f8',('n'))
186omsb = fo.createVariable('mass_of_bits', 'f8',('n'))
187ohds = fo.createVariable('heat_density', 'f8',('n'))
188oyrs = fo.createVariable('year', 'i4',('n'))
189onum = fo.createVariable('number', 'i4',('n','k'))
190#
191olon[:] = lons
192olon.long_name = "longitude"
193olon.units = "degrees_E" 
194#
195olat[:] = lats
196olat.long_name = "latitude"
197olat.units = "degrees_N"
198#
199oxis[:] = xis
200oxis.long_name = "x grid box position"
201oxis.units = "fractional"
202#
203oyjs[:] = yjs
204oyjs.long_name = "y grid box position"
205oyjs.units = "fractional"
206#
207ouvs[:] = uvs
208ouvs.long_name = "zonal velocity"
209ouvs.units = "m/s"
210#
211ovvs[:] = vvs
212ovvs.long_name = "meridional velocity"
213ovvs.units = "m/s"
214#
215omas[:] = mas
216omas.long_name = "mass"
217omas.units = "kg"
218#
219oths[:] = ths
220oths.long_name = "thickness"
221oths.units = "m"
222#
223owis[:] = wis
224owis.long_name = "width"
225owis.units = "m"
226#
227oles[:] = les
228oles.long_name = "length"
229oles.units = "m"
230#
231odys[:] = dys
232odys.long_name = "year day of calving event"
233odys.units = "days"
234#
235omss[:] = mss
236omss.long_name = "scaling factor for mass of calving berg"
237omss.units = "none"
238#
239omsb[:] = msb
240omsb.long_name = "mass of bergy bits"
241omsb.units = "kg"
242#
243ohds[:] = hds
244ohds.long_name = "heat density"
245ohds.units = "J/kg"
246#
247oyrs[:] = yrs
248oyrs.long_name = "calendar year of calving event"
249oyrs.units = "years"
250#
251onum[:,:] = num
252onum.long_name = "iceberg number on this processor"
253onum.units = "count"
254#
255fo.close()
Note: See TracBrowser for help on using the repository browser.