New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
Changeset 8841 for branches/UKMO/ROMS_WAD_7832/NEMOGCM/TOOLS/MISCELLANEOUS – NEMO

Ignore:
Timestamp:
2017-11-29T05:08:05+01:00 (7 years ago)
Author:
deazer
Message:

Bring in Trunk Changes at version 8814
This revision wont run as is, requires next revision with merged changes
This revision serves as a reference point to what changes from the trunk at brought in by the merge
in the next revision

Location:
branches/UKMO/ROMS_WAD_7832/NEMOGCM/TOOLS/MISCELLANEOUS
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • branches/UKMO/ROMS_WAD_7832/NEMOGCM/TOOLS/MISCELLANEOUS/chk_iomput.sh

    r4162 r8841  
    3737       echo '      ./chk_iomput.sh' 
    3838       echo '      ./chk_iomput.sh --help' 
    39        echo '      ./chk_iomput.sh ../../CONFIG/ORCA2_LIM/EXP00/iodef.xml "../../NEMO/OPA_SRC/ ../../NEMO/LIM_SRC_2/"' 
     39       echo '      ./chk_iomput.sh ../../CONFIG/ORCA2_LIM/EXP00/context_nemo.xml "../../NEMO/OPA_SRC/ ../../NEMO/LIM_SRC_2/"' 
    4040       echo 
    4141       exit ;; 
     
    5151done  
    5252# 
    53 [ ! -f "$xmlfile" ] && echo "$xmlfile not found, we stop..." && exit 
     53echo $xmlfile 
     54echo $srcdir 
     55 
     56for i in $xmlfile 
     57do 
     58    [ ! -f "$xmlfile" ] && echo "$xmlfile not found, we stop..." && exit 
     59done 
    5460for i in $srcdir  
    5561do 
     
    6066# 
    6167external=$( grep -c "<field_definition  *\([^ ].* \)*src=" $xmlfile ) 
    62 if [ $external -eq 1 ] 
     68if [ $external -ge 1 ] 
    6369then 
    6470    xmlfield_def=$( grep "<field_definition  *\([^ ].* \)*src=" $xmlfile | sed -e 's/.*src="\([^"]*\)".*/\1/' ) 
    65     xmlfield_def=$( dirname $xmlfile )/$xmlfield_def    
     71    tmp_def="" 
     72    for fdef in $xmlfield_def ; do tmp_def="$tmp_def $( dirname $xmlfile )/$fdef" ; done 
     73    xmlfield_def=$tmp_def 
     74    echo $xmlfield_def 
    6675else 
    6776    xmlfield_def=$xmlfile 
    6877fi 
     78external=$( grep -c "<file_definition  *\([^ ].* \)*src=" $xmlfile ) 
     79if [ $external -ge 1 ] 
     80then 
     81    xmlfile_def=$( grep "<file_definition  *\([^ ].* \)*src=" $xmlfile | sed -e 's/.*src="\([^"]*\)".*/\1/' ) 
     82    tmp_def="" 
     83    for fdef in $xmlfile_def ; do tmp_def="$tmp_def $( dirname $xmlfile )/$fdef" ; done 
     84    xmlfile_def=$tmp_def 
     85    echo $xmlfile_def 
     86else 
     87    xmlfile_def=$xmlfile 
     88fi 
     89 
    6990[ $inxml -eq 1 ] && grep "< *field  *\([^ ].* \)*id *=" $xmlfield_def 
    7091[ $insrc -eq 1 ] && find $srcdir -name "*.[Ffh]90" -exec grep -iH "^[^\!]*call  *iom_put *(" {} \; 
     
    95116# list of variables to be outputed in the xml file 
    96117# 
    97 varlistout=$( grep "< *field  *\([^ ].* \)*field_ref *=" $xmlfile  | sed -e "s/^.*< *field .*field_ref *= *[\"\']\([^\"\']*\)[\"\'].*/\1/" | sort -d ) 
     118varlistout=$( grep "< *field  *\([^ ].* \)*field_ref *=" $xmlfile_def  | sed -e "s/^.*< *field .*field_ref *= *[\"\']\([^\"\']*\)[\"\'].*/\1/" | sort -d ) 
    98119# 
    99120echo "--------------------------------------------------" 
  • branches/UKMO/ROMS_WAD_7832/NEMOGCM/TOOLS/MISCELLANEOUS/icb_pp.py

    r6423 r8841  
    1212# cases the missing instances are filled with invalid (NaN) values. 
    1313# 
     14# Version 2.0 August 2017. Adapted to process all variables and retain original 
     15#                          datatypes. (acc@noc.ac.uk) 
    1416 
    15 parser = ArgumentParser(description='produce collated trajectory file from distributed output\ 
    16                                      files, e.g. \n python ./icb_pp.py \ 
    17                                      -t  trajectory_icebergs_004248_ -n 296 -o trajsout.nc' ) 
     17parser = ArgumentParser(description='produce collated trajectory file \ 
     18                                     from distributed output files, e.g. \ 
     19                                     \n python ./icb_pp.py \ 
     20                                     -t  trajectory_icebergs_004248_ \ 
     21                                     -n 296 -o trajsout.nc' ) 
    1822 
    19 parser.add_argument('-t',dest='froot',help='fileroot_of_distrbuted_data; root name of \ 
    20                      distributed trajectory output (usually completed with XXXX.nc, where \ 
    21                      XXXX is the 4 digit processor number)',  
    22                      default='trajectory_icebergs_004248_') 
     23parser.add_argument('-t',dest='froot', 
     24                         help='fileroot_of_distrbuted_data; root name \ 
     25                               of  distributed trajectory output (usually \ 
     26                               completed with XXXX.nc, where  XXXX is the \ 
     27                               4 digit processor number)',  
     28                      default='trajectory_icebergs_004248_') 
    2329 
    2430parser.add_argument('-n',dest='fnum',help='number of distributed files to process',  
    25                      type=int, default=None) 
     31                         type=int, default=None) 
    2632 
    27 parser.add_argument('-o',dest='fout',help='collated_output_file; file name to receive the \ 
    28                      collated trajectory data', default='trajsout.nc') 
     33parser.add_argument('-o',dest='fout', 
     34                         help='collated_output_file; file name to receive \ 
     35                              the collated trajectory data', default='trajsout.nc') 
    2936 
    3037args = parser.parse_args() 
     
    6471# 
    6572for n in range(procnum): 
    66  nn = '%4.4d' % n 
    67  fw = Dataset(pathstart+nn+'.nc') 
    68  if len(fw.dimensions['n']) > 0: 
    69    print pathstart+nn+'.nc' 
    70    ic = fw.variables['iceberg_number'][:,0] 
    71    ts = fw.variables['timestep'][:] 
    72    icv = np.unique(ic) 
    73    ts = np.unique(ts) 
    74    print('Min Max ts: ',ts.min(), ts.max()) 
    75    print('Number unique icebergs= ',icv.shape[0]) 
    76    icu.append(icv) 
    77    times.append(ts) 
    78  fw.close() 
     73    nn = '%4.4d' % n 
     74    fw = Dataset(pathstart+nn+'.nc') 
     75    # keep a list of the variables in the first dataset 
     76    if n == 0: 
     77        varlist = fw.variables 
     78    # 
     79    # skip any files with no icebergs 
     80    if len(fw.dimensions['n']) > 0: 
     81        print pathstart+nn+'.nc' 
     82        ic = fw.variables['iceberg_number'][:,0] 
     83        ts = fw.variables['timestep'][:] 
     84        icv = np.unique(ic) 
     85        ts = np.unique(ts) 
     86        print('Min Max ts: ',ts.min(), ts.max()) 
     87        print('Number unique icebergs= ',icv.shape[0]) 
     88        icu.append(icv) 
     89        times.append(ts) 
     90    fw.close() 
    7991# 
    8092# Now flatten the lists and reduce to the unique spanning set 
     
    89101print('times range from:        ',times.min(), 'to: ', times.max()) 
    90102# 
    91 # Declare 2-D arrays to receive the data from all files 
     103# Declare array to receive data from all files 
    92104# 
    93105nt = times.shape[0] 
    94 lons = np.zeros((ntraj, nt)) 
    95 lats = np.zeros((ntraj, nt)) 
    96 tims = np.zeros((ntraj, nt)) 
    97 xis  = np.zeros((ntraj, nt)) 
    98 yjs  = np.zeros((ntraj, nt)) 
     106# 
     107n=0 
     108for key, value in varlist.iteritems() : 
     109    if key != "iceberg_number" : 
     110        n = n + 1 
     111inarr = np.zeros((n, ntraj, nt)) 
    99112# 
    100113# initially fill with invalid data 
    101114# 
    102 lons.fill(np.nan) 
    103 lats.fill(np.nan) 
    104 xis.fill(np.nan) 
    105 yjs.fill(np.nan) 
    106 tims.fill(np.nan) 
     115inarr.fill(np.nan) 
     116# 
     117# Declare some lists to store variable names, types and long_name and units attributes 
     118# iceberg_number gets special treatment 
     119innam = [] 
     120intyp = [] 
     121inlngnam = [] 
     122inunits = [] 
     123for key, value in varlist.iteritems() : 
     124    if key != "iceberg_number" : 
     125        innam.append(key) 
     126# 
     127# reopen the first datset to collect variable attributes 
     128# (long_name and units only) 
     129# 
     130nn = '%4.4d' % 0 
     131fw = Dataset(pathstart+nn+'.nc') 
     132for key, value in varlist.iteritems() : 
     133    if key != "iceberg_number" : 
     134        intyp.append(fw.variables[key].dtype) 
     135        inlngnam.append(fw.variables[key].getncattr('long_name')) 
     136        inunits.append(fw.variables[key].getncattr('units')) 
     137fw.close() 
    107138# 
    108139# loop through distributed datasets again, this time 
    109140# checking indices against icu and times lists and 
    110141# inserting data into the correct locations in the  
    111 # 2-D collated sets. 
     142# collated sets. 
    112143# 
    113144for n in range(procnum): 
    114  nn = '%4.4d' % n 
    115  fw = Dataset(pathstart+nn+'.nc') 
     145    nn = '%4.4d' % n 
     146    fw = Dataset(pathstart+nn+'.nc') 
     147# 
    116148# Note many distributed datafiles will contain no iceberg data 
    117149# so skip quickly over these 
    118  m  = len(fw.dimensions['n']) 
    119  if m > 0: 
    120    inx = np.zeros(m, dtype=int) 
    121    tsx = np.zeros(m, dtype=int) 
    122    print pathstart+nn+'.nc' 
    123    ic = fw.variables['iceberg_number'][:,0] 
    124    ts = fw.variables['timestep'][:] 
    125    lns = fw.variables['lon'][:] 
    126    lts = fw.variables['lat'][:] 
    127    xxs = fw.variables['xi'][:] 
    128    yys = fw.variables['yj'][:] 
    129    for k in range(m): 
    130      inxx   = np.where(icu == ic[k]) 
    131      inx[k] = inxx[0] 
    132    for k in range(m): 
    133      inxx   = np.where(times == ts[k]) 
    134      tsx[k] = inxx[0] 
    135    lons[inx[:],tsx[:]] = lns[:] 
    136    lats[inx[:],tsx[:]] = lts[:] 
    137    tims[inx[:],tsx[:]] = ts[:] 
    138    xis[inx[:],tsx[:]] = xxs[:] 
    139    yjs[inx[:],tsx[:]] = yys[:] 
    140  fw.close() 
    141  
     150    m  = len(fw.dimensions['n']) 
     151    if m > 0: 
     152        inx = np.zeros(m, dtype=int) 
     153        tsx = np.zeros(m, dtype=int) 
     154        #print pathstart+nn+'.nc' 
     155        ic = fw.variables['iceberg_number'][:,0] 
     156        ts = fw.variables['timestep'][:] 
     157        for k in range(m): 
     158            inxx   = np.where(icu == ic[k]) 
     159            inx[k] = inxx[0] 
     160        for k in range(m): 
     161            inxx   = np.where(times == ts[k]) 
     162            tsx[k] = inxx[0] 
     163        n = 0 
     164        for key, value in varlist.iteritems() : 
     165            if key != "iceberg_number" : 
     166                insmall = fw.variables[innam[n]][:] 
     167                inarr[n,inx[:],tsx[:]] = insmall[:] 
     168                n = n + 1 
     169    fw.close() 
     170# 
    142171# Finally create the output file and write out the collated sets 
    143172# 
    144 fo = Dataset(pathout, 'w', format='NETCDF4') 
     173fo = Dataset(pathout, 'w', format='NETCDF4_CLASSIC') 
    145174ntrj = fo.createDimension('ntraj', ntraj) 
    146175nti  = fo.createDimension('ntime', None) 
    147 olon = fo.createVariable('lon', 'f4',('ntraj','ntime')) 
    148 olat = fo.createVariable('lat', 'f4',('ntraj','ntime')) 
    149 otim = fo.createVariable('ttim', 'f4',('ntraj','ntime')) 
    150 oxis = fo.createVariable('xis', 'f4',('ntraj','ntime')) 
    151 oyjs = fo.createVariable('yjs', 'f4',('ntraj','ntime')) 
    152 icbn = fo.createVariable('icbn', 'f4',('ntraj')) 
    153 olon[:,:] = lons 
    154 olat[:,:] = lats 
    155 otim[:,:] = tims 
    156 oxis[:,:] = xis 
    157 oyjs[:,:] = yjs 
     176icbn = fo.createVariable('iceberg_number', 'i4',('ntraj')) 
    158177icbn[:] = icu 
     178n = 0 
     179for key, value in varlist.iteritems() : 
     180    if key != "iceberg_number" : 
     181        oout = fo.createVariable(innam[n], intyp[n], ('ntraj','ntime'), 
     182                                 zlib=True, complevel=1, chunksizes=(1,nt)) 
     183        oout[:,:] = inarr[n,:,:] 
     184        oout.long_name = inlngnam[n] 
     185        oout.units = inunits[n] 
     186        n = n + 1 
    159187fo.close() 
Note: See TracChangeset for help on using the changeset viewer.