Changeset 8509
- Timestamp:
- 2017-09-07T17:01:58+02:00 (6 years ago)
- Location:
- trunk/NEMOGCM
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/NEMOGCM/NEMO/OPA_SRC/TRA/trabbl.F90
r7753 r8509 545 545 CALL wrk_dealloc( jpi, jpj, zmbk ) 546 546 547 ! !* sign of grad(H) at u- and v-points548 mgrhu( jpi,:) = 0 ; mgrhu(:,jpj) = 0 ; mgrhv(jpi,:) = 0 ; mgrhv(:,jpj) = 0547 !* sign of grad(H) at u- and v-points; zero if grad(H) = 0 548 mgrhu(:,:) = 0 ; mgrhv(:,:) = 0 549 549 DO jj = 1, jpjm1 550 550 DO ji = 1, jpim1 551 mgrhu(ji,jj) = INT( SIGN( 1.e0, gdept_0(ji+1,jj,mbkt(ji+1,jj)) - gdept_0(ji,jj,mbkt(ji,jj)) ) ) 552 mgrhv(ji,jj) = INT( SIGN( 1.e0, gdept_0(ji,jj+1,mbkt(ji,jj+1)) - gdept_0(ji,jj,mbkt(ji,jj)) ) ) 551 IF( gdept_0(ji+1,jj,mbkt(ji+1,jj)) - gdept_0(ji,jj,mbkt(ji,jj)) /= 0._wp ) THEN 552 mgrhu(ji,jj) = INT( SIGN( 1.e0, gdept_0(ji+1,jj,mbkt(ji+1,jj)) - gdept_0(ji,jj,mbkt(ji,jj)) ) ) 553 ENDIF 554 ! 555 IF( gdept_0(ji,jj+1,mbkt(ji,jj+1)) - gdept_0(ji,jj,mbkt(ji,jj)) /= 0._wp ) THEN 556 mgrhv(ji,jj) = INT( SIGN( 1.e0, gdept_0(ji,jj+1,mbkt(ji,jj+1)) - gdept_0(ji,jj,mbkt(ji,jj)) ) ) 557 ENDIF 553 558 END DO 554 559 END DO -
trunk/NEMOGCM/TOOLS/MISCELLANEOUS/icb_pp.py
r6423 r8509 12 12 # cases the missing instances are filled with invalid (NaN) values. 13 13 # 14 # Version 2.0 August 2017. Adapted to process all variables and retain original 15 # datatypes. (acc@noc.ac.uk) 14 16 15 parser = ArgumentParser(description='produce collated trajectory file from distributed output\ 16 files, e.g. \n python ./icb_pp.py \ 17 -t trajectory_icebergs_004248_ -n 296 -o trajsout.nc' ) 17 parser = ArgumentParser(description='produce collated trajectory file \ 18 from distributed output files, e.g. \ 19 \n python ./icb_pp.py \ 20 -t trajectory_icebergs_004248_ \ 21 -n 296 -o trajsout.nc' ) 18 22 19 parser.add_argument('-t',dest='froot',help='fileroot_of_distrbuted_data; root name of \ 20 distributed trajectory output (usually completed with XXXX.nc, where \ 21 XXXX is the 4 digit processor number)', 22 default='trajectory_icebergs_004248_') 23 parser.add_argument('-t',dest='froot', 24 help='fileroot_of_distrbuted_data; root name \ 25 of distributed trajectory output (usually \ 26 completed with XXXX.nc, where XXXX is the \ 27 4 digit processor number)', 28 default='trajectory_icebergs_004248_') 23 29 24 30 parser.add_argument('-n',dest='fnum',help='number of distributed files to process', 25 type=int, default=None)31 type=int, default=None) 26 32 27 parser.add_argument('-o',dest='fout',help='collated_output_file; file name to receive the \ 28 collated trajectory data', default='trajsout.nc') 33 parser.add_argument('-o',dest='fout', 34 help='collated_output_file; file name to receive \ 35 the collated trajectory data', default='trajsout.nc') 29 36 30 37 args = parser.parse_args() … … 64 71 # 65 72 for n in range(procnum): 66 nn = '%4.4d' % n 67 fw = Dataset(pathstart+nn+'.nc') 68 if len(fw.dimensions['n']) > 0: 69 print pathstart+nn+'.nc' 70 ic = fw.variables['iceberg_number'][:,0] 71 ts = fw.variables['timestep'][:] 72 icv = np.unique(ic) 73 ts = np.unique(ts) 74 print('Min Max ts: ',ts.min(), ts.max()) 75 print('Number unique icebergs= ',icv.shape[0]) 76 icu.append(icv) 77 times.append(ts) 78 fw.close() 73 nn = '%4.4d' % n 74 fw = Dataset(pathstart+nn+'.nc') 75 # keep a list of the variables in the first dataset 76 if n == 0: 77 varlist = fw.variables 78 # 79 # skip any files with no icebergs 80 if len(fw.dimensions['n']) > 0: 81 print pathstart+nn+'.nc' 82 ic = fw.variables['iceberg_number'][:,0] 83 ts = fw.variables['timestep'][:] 84 icv = np.unique(ic) 85 ts = np.unique(ts) 86 print('Min Max ts: ',ts.min(), ts.max()) 87 print('Number unique icebergs= ',icv.shape[0]) 88 icu.append(icv) 89 times.append(ts) 90 fw.close() 79 91 # 80 92 # Now flatten the lists and reduce to the unique spanning set … … 89 101 print('times range from: ',times.min(), 'to: ', times.max()) 90 102 # 91 # Declare 2-D arrays to receive the data from all files103 # Declare array to receive data from all files 92 104 # 93 105 nt = times.shape[0] 94 lons = np.zeros((ntraj, nt)) 95 lats = np.zeros((ntraj, nt)) 96 tims = np.zeros((ntraj, nt)) 97 xis = np.zeros((ntraj, nt)) 98 yjs = np.zeros((ntraj, nt)) 106 # 107 n=0 108 for key, value in varlist.iteritems() : 109 if key != "iceberg_number" : 110 n = n + 1 111 inarr = np.zeros((n, ntraj, nt)) 99 112 # 100 113 # initially fill with invalid data 101 114 # 102 lons.fill(np.nan) 103 lats.fill(np.nan) 104 xis.fill(np.nan) 105 yjs.fill(np.nan) 106 tims.fill(np.nan) 115 inarr.fill(np.nan) 116 # 117 # Declare some lists to store variable names, types and long_name and units attributes 118 # iceberg_number gets special treatment 119 innam = [] 120 intyp = [] 121 inlngnam = [] 122 inunits = [] 123 for key, value in varlist.iteritems() : 124 if key != "iceberg_number" : 125 innam.append(key) 126 # 127 # reopen the first datset to collect variable attributes 128 # (long_name and units only) 129 # 130 nn = '%4.4d' % 0 131 fw = Dataset(pathstart+nn+'.nc') 132 for key, value in varlist.iteritems() : 133 if key != "iceberg_number" : 134 intyp.append(fw.variables[key].dtype) 135 inlngnam.append(fw.variables[key].getncattr('long_name')) 136 inunits.append(fw.variables[key].getncattr('units')) 137 fw.close() 107 138 # 108 139 # loop through distributed datasets again, this time 109 140 # checking indices against icu and times lists and 110 141 # inserting data into the correct locations in the 111 # 2-Dcollated sets.142 # collated sets. 112 143 # 113 144 for n in range(procnum): 114 nn = '%4.4d' % n 115 fw = Dataset(pathstart+nn+'.nc') 145 nn = '%4.4d' % n 146 fw = Dataset(pathstart+nn+'.nc') 147 # 116 148 # Note many distributed datafiles will contain no iceberg data 117 149 # so skip quickly over these 118 m = len(fw.dimensions['n']) 119 if m > 0: 120 inx = np.zeros(m, dtype=int) 121 tsx = np.zeros(m, dtype=int) 122 print pathstart+nn+'.nc' 123 ic = fw.variables['iceberg_number'][:,0] 124 ts = fw.variables['timestep'][:] 125 lns = fw.variables['lon'][:] 126 lts = fw.variables['lat'][:] 127 xxs = fw.variables['xi'][:] 128 yys = fw.variables['yj'][:] 129 for k in range(m): 130 inxx = np.where(icu == ic[k]) 131 inx[k] = inxx[0] 132 for k in range(m): 133 inxx = np.where(times == ts[k]) 134 tsx[k] = inxx[0] 135 lons[inx[:],tsx[:]] = lns[:] 136 lats[inx[:],tsx[:]] = lts[:] 137 tims[inx[:],tsx[:]] = ts[:] 138 xis[inx[:],tsx[:]] = xxs[:] 139 yjs[inx[:],tsx[:]] = yys[:] 140 fw.close() 141 150 m = len(fw.dimensions['n']) 151 if m > 0: 152 inx = np.zeros(m, dtype=int) 153 tsx = np.zeros(m, dtype=int) 154 #print pathstart+nn+'.nc' 155 ic = fw.variables['iceberg_number'][:,0] 156 ts = fw.variables['timestep'][:] 157 for k in range(m): 158 inxx = np.where(icu == ic[k]) 159 inx[k] = inxx[0] 160 for k in range(m): 161 inxx = np.where(times == ts[k]) 162 tsx[k] = inxx[0] 163 n = 0 164 for key, value in varlist.iteritems() : 165 if key != "iceberg_number" : 166 insmall = fw.variables[innam[n]][:] 167 inarr[n,inx[:],tsx[:]] = insmall[:] 168 n = n + 1 169 fw.close() 170 # 142 171 # Finally create the output file and write out the collated sets 143 172 # 144 fo = Dataset(pathout, 'w', format='NETCDF4 ')173 fo = Dataset(pathout, 'w', format='NETCDF4_CLASSIC') 145 174 ntrj = fo.createDimension('ntraj', ntraj) 146 175 nti = fo.createDimension('ntime', None) 147 olon = fo.createVariable('lon', 'f4',('ntraj','ntime')) 148 olat = fo.createVariable('lat', 'f4',('ntraj','ntime')) 149 otim = fo.createVariable('ttim', 'f4',('ntraj','ntime')) 150 oxis = fo.createVariable('xis', 'f4',('ntraj','ntime')) 151 oyjs = fo.createVariable('yjs', 'f4',('ntraj','ntime')) 152 icbn = fo.createVariable('icbn', 'f4',('ntraj')) 153 olon[:,:] = lons 154 olat[:,:] = lats 155 otim[:,:] = tims 156 oxis[:,:] = xis 157 oyjs[:,:] = yjs 176 icbn = fo.createVariable('iceberg_number', 'i4',('ntraj')) 158 177 icbn[:] = icu 178 n = 0 179 for key, value in varlist.iteritems() : 180 if key != "iceberg_number" : 181 oout = fo.createVariable(innam[n], intyp[n], ('ntraj','ntime'), 182 zlib=True, complevel=1, chunksizes=(1,nt)) 183 oout[:,:] = inarr[n,:,:] 184 oout.long_name = inlngnam[n] 185 oout.units = inunits[n] 186 n = n + 1 159 187 fo.close()
Note: See TracChangeset
for help on using the changeset viewer.