Changeset 708 for trunk


Ignore:
Timestamp:
04/27/16 01:37:34 (8 years ago)
Author:
llmlod
Message:

Correction with spatialy smoothed coef (to be checked)

Location:
trunk/src
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/file_Tbcorr_daily.py

    r707 r708  
    1313import os 
    1414from pylab import date2num 
    15 from numpy import arange, size, hstack, dstack, dtype, argsort, nanmean, isnan 
     15from numpy import arange, size, hstack, dstack, dtype, argsort, mean, nanmean, isnan 
    1616from dateutil.rrule import rrule, DAILY 
    1717from useCorrTb_daily import applycorr2orb, SatName 
     
    8585                        type=valid_date, 
    8686                        metavar='<yyyymmdd>', 
    87                         default=datetime.date(2008, 5, 1), 
     87                        default=datetime.date(2011, 1, 3), 
    8888                        help=('first date yyyymmdd')) 
    8989 
     
    9393                        type=valid_date, 
    9494                        metavar='<yyyymmdd>', 
    95                         default=datetime.date(2008, 5, 3), 
     95                        default=datetime.date(2011, 1, 3), 
    9696                        help=('last date yyyymmdd')) 
    9797 
     
    150150 
    151151    # number of processes 
    152     npr = 10 
    153  
     152    npr = 30 
    154153    for day in rrule(DAILY, dtstart=dmin, until=dmax): 
    155154        logger.info('{0}'.format(day)) 
     
    159158        repsat = [ii+ss for ii, ss in zip(inst, sat)] 
    160159        fname = hstack([glob('/bdd/AMSU-1C/%s/L1C/*/%s/*.L1C' % (reps, repd)) for reps, repd in it.product(repsat, repday)]) 
    161      
     160 
    162161        ### Read files (in parallel with Pool) 
    163162        pool = multiprocessing.Pool(processes=npr)  
     
    183182 
    184183        # Fillvalue 
    185         tbcorr[isnan(tbcorr)] = -99.99 
     184        fvalue = -9999 
     185        tbcorr[isnan(tbcorr)] = fvalue 
    186186 
    187187        # Save in netcdf file 
    188188        logger.info('Save in nc') 
    189         nsat = satvec.size 
    190189        nlon = lonvec.size 
    191190        nlat = latvec.size 
    192191        torig = datetime.date(1970, 1, 1) 
    193192        for cc, ich in enumerate(ch): 
     193            # Index to remove orbites completly  
     194            iorb = (mean(tbcorr[cc,:,:].squeeze(),axis=0) != fvalue) 
     195            nsat = satvec[iorb].size 
    194196            # Output file name 
    195197            output = '%sTbCorr_AMSUMHS_%s_%3.2fdeg_%s.nc' % (rep_fic, ich, resol, day.strftime('%Y%m%d')) 
     
    217219            sats.long_name = 'Satellite' 
    218220            sats.description = [str(num)+' = '+ns +', ' for num, ns in zip(satdef.satnum, satdef.satname)] 
    219             sats[:] = satnum 
     221            sats[:] = satnum[iorb] 
    220222 
    221223            # Creation variable 
     
    223225            dates.long_name = 'Time' 
    224226            dates.units = 'hours since %s'%torig.strftime('%Y-%m-%d %H:%M:%S') 
    225             dates[:] = ((tobs-date2num(torig))*24.).reshape(nlat, nlon, nsat) 
     227            dates[:] = ((tobs[:,iorb]-date2num(torig))*24.).reshape(nlat, nlon, nsat) 
    226228 
    227229            sclfactor = .01 
    228             tbfinal = ncfile.createVariable('tbcorr', dtype('int16').char, ('lat', 'lon', 'satellite', ), fill_value=-9999.) 
     230            tbfinal = ncfile.createVariable('tbcorr', dtype('int16').char, ('lat', 'lon', 'satellite', ), fill_value=fvalue) 
    229231            tbfinal.description = 'Corrected Tb of channel %s'%ich 
    230232            tbfinal.units = 'Kelvin' 
    231233            tbfinal.scale_factor = sclfactor 
    232             tbfinal[:] = tbcorr[cc, :, :].reshape(nlat, nlon, nsat) 
     234            tbfinal[:] = tbcorr[cc, :, iorb].reshape(nlat, nlon, nsat) 
    233235 
    234236            # Fermeture du fichier 
  • trunk/src/useCorrTb_daily.py

    r707 r708  
    88import time 
    99import netCDF4 
    10 from pylab import date2num, num2date, flatten 
    11 from numpy import array, arange, float32, NaN, transpose, int16, diff, logical_and, meshgrid, ravel, zeros, unique, nanmean 
     10from pylab import date2num, num2date#, flatten 
     11from numpy import array, arange, float32, NaN, transpose, int16, diff, logical_and, meshgrid, ravel, zeros, unique, nanmean, isnan, hstack 
    1212from ReadRawFile import ReadRawFile 
    1313from scipy import spatial 
     
    5050    logger = logging.getLogger() 
    5151 
     52    if ch[0].lower() == 'all': 
     53        ch = ['B1', 'B2', 'B3', 'B4', 'B5'] 
     54 
    5255    # Read orbit file 
    5356    lon, lat, dt, tb, fov = load_amsub_mhs(fname, ch, area) 
    5457 
    5558    try: 
    56         # Class of each orbit points 
    57         clz = find_class(lat, lon) 
    58         nozero = (clz != 0) 
    59  
    60     except ValueError: 
     59        # Coef of correction as a function of classification, fov and day of year 
     60        coefz = find_corrcoef(dt, lon, lat, fov, ch) 
     61        nonan = ~isnan(coefz[0,:]) 
     62 
     63    except StandardError: 
    6164        logger.error('Orbit is out of the area: {0}'.format(fname)) 
    6265        return [], [], [] 
    6366 
    6467    else: 
    65         # Coef of correction as a function of classification, fov and day of year 
    66         coefz = find_corrcoef(dt[nozero], fov[nozero], clz[nozero], ch=ch) 
    67         tbcorr = array(tb)[:, nozero] - coefz 
     68        tbcorr = array(tb)[:,nonan] - coefz[:,nonan] 
    6869        # Average on new grid 
    69         tbcorrm, tobsm = newgrid(lonvec, latvec, lon[nozero], lat[nozero], dt[nozero], tbcorr) 
     70        tbcorrm, tobsm = newgrid(lonvec, latvec, lon[nonan], lat[nonan], dt[nonan], tbcorr) 
    7071 
    7172    logger.info('Deal {0} in: {1}'.format(fname, time.time()-t)) 
     
    7879    *Inputs: 
    7980    fname = file name 
    80     ch    = list of extrated channel ('B1', 'B2', 'B3', 'B4', 'B5' or 'All'), default 'All' 
    81     area  = limits [LS, LN, LW, LE] for extracted area, default is world (ie [-35, 35, -180., 180]) 
     81    ch    = list of extrated channel ('B1', 'B2', 'B3', 'B4', 'B5') 
     82    area  = limits [LS, LN, LW, LE] for extracted area, default is Tropics (ie [-35, 35, -180., 180]) 
    8283 
    8384    *Outputs: 
     
    9091    """ 
    9192 
    92     # Index of channel 
    93     ch = [cch.lower() for cch in ch] 
    94     if ch[0] == 'all': 
    95         ich = arange(5) 
    96     else: 
    97         ich = array([int(cch[1])-1 for cch in ch]) 
     93    # Index channel to extract 
     94    ich = array([int(cch[1])-1 for cch in ch]) 
    9895 
    9996    project = os.environ['PROJECT'] 
     
    188185    return h, m, s 
    189186 
    190 # ---Find classes--- # 
    191 def find_class(lat, lon): 
    192     """Find class of each given (lat, lon) 
    193     *Inputs: 
     187# ---Find coefficient of correction--- # 
     188def find_corrcoef(dt, lon, lat, fov, ch): 
     189    """Find the coefficient of correction 
     190    *Inputs: 
     191    dt  = Python date 
    194192    lat  = Latitude (degrees) 
    195193    lon  = Longitude (degrees) 
    196  
    197     *Outputs: 
    198     clpt = class of each point (same shape as lat and lon) 
    199     """ 
    200     project = os.environ['PROJECT'] 
    201     # Read the classification file 
    202     fic_class = '{0}/data/classifERAI_Trop.nc'.format(project) 
    203     fileID = netCDF4.Dataset(fic_class) 
    204     lat_class = fileID.variables['lat'][:] 
    205     lon_class = fileID.variables['lon'][:] 
    206     classif = int16(fileID.variables['classif_tot'][:]) 
    207     fileID.close() 
    208  
    209     # Latitude limits of classification (ie tropical band) 
    210     latstep = abs(diff(lat_class))[0] 
    211     latmin = lat_class.min()-latstep/2. 
    212     latmax = lat_class.max()+latstep/2. 
    213     # Only points in the area of classification 
    214     inclass = logical_and(lat >= latmin, lat <= latmax) 
    215  
    216     # Geographic projection (lon/lat in degrees to x/y in m) 
    217     projG = pyproj.Proj("+proj=merc +ellps=WGS84 +datum=WGS84") 
    218     longd_class, latgd_class = meshgrid(lon_class, lat_class) 
    219     xclass, yclass = projG(longd_class, latgd_class) 
    220     xx, yy = projG(lon[inclass], lat[inclass]) 
    221  
    222     # Find the nearest neighbour (euclidian distance) with a kd-tree 
    223     tree = spatial.cKDTree(zip(ravel(xclass), ravel(yclass))) 
    224     _, ind = tree.query(zip(xx, yy)) 
    225     clpt = zeros(lat.shape, dtype=int16) 
    226     clpt[inclass] = classif.flatten()[ind] 
    227  
    228     return clpt 
    229  
    230 # ---Find the coefficient of correction--- # 
    231 def find_corrcoef(dt, fov, cl, ch=['All']): 
    232     """Find the coefficient of correction 
    233     *Inputs: 
    234     dt  = Python date 
    235194    fov = Location in the swath 
    236     cl  = Class 
    237     ch  = list of extrated channel ('B1', 'B2', 'B3', 'B4', 'B5' or 'All'), default 'All' 
     195    ch  = list of extrated channel ('B1', 'B2', 'B3', 'B4', 'B5') 
    238196 
    239197    *Outputs: 
    240198    coef = Coefficient of correction 
    241199    """ 
    242     # Index of channel 
    243     ch = [cch.lower() for cch in ch] 
    244     if ch[0] == 'all': 
    245         ich = arange(5) 
    246     else: 
    247         ich = array([int(cch[1])-1 for cch in ch]) 
    248  
    249     # Read the classification file 
    250     project = os.environ['PROJECT'] 
    251     fic_corr = '{0}/data/corrTb_AMSUB_MHS_L1C.nc'.format(project) 
    252     fileID = netCDF4.Dataset(fic_corr) 
    253     day_corr = fileID.variables['day'][:] 
    254     coef_corr = fileID.variables['coefcorr'][:, :, :, ich] 
    255     fileID.close() 
    256200 
    257201    # Find the day of year 
    258202    yday = array([d.timetuple().tm_yday for d in dt]) 
    259     # Find the day of correction 
    260     nday = day_corr.size 
    261     daystep = diff(day_corr)[0] 
    262     icorr = list(flatten([[ii]*daystep for ii in xrange(nday)])) + [0]*6 
    263     iday = array(icorr)[yday-1] 
    264  
    265     # Find correction 
    266     noclass = (cl == 0) 
    267     icl = array(cl-1, dtype=int) 
    268     ifov = array(fov-1, dtype=int) 
     203 
    269204    coef = [] 
    270     for ii in ich: 
    271         dd = zeros(cl.shape)+NaN 
    272         dd[~noclass] = coef_corr[iday[~noclass], icl[~noclass], ifov[~noclass], ii] 
    273         coef.append(dd) 
     205    # Read the coefficient correction file for each channel  
     206    for ich in ch: 
     207        fic_corr = '/homedata/mleduc/CorrTb/corrTb_AMSUB_MHS_L1C_smooth_%s.nc' % ich 
     208        fileID = netCDF4.Dataset(fic_corr) 
     209        coef_corr = fileID.variables['corrTb'][:] 
     210        if ich == ch[0]: 
     211            lat_corr = fileID.variables['lat'][:] 
     212            lon_corr = fileID.variables['lon'][:] 
     213            day_corr = fileID.variables['day'][:] 
     214 
     215            # Latitude limits of classification (ie tropical band) 
     216            latstep = abs(diff(lat_corr))[0] 
     217            latmin = lat_corr.min()-latstep/2. 
     218            latmax = lat_corr.max()+latstep/2. 
     219 
     220            # Geographic projection (lon/lat in degrees to x/y in m) 
     221            projG = pyproj.Proj("+proj=merc +ellps=WGS84 +datum=WGS84") 
     222            longd_corr, latgd_corr = meshgrid(lon_corr, lat_corr) 
     223            xcorr, ycorr = projG(longd_corr, latgd_corr) 
     224 
     225            # Kd-tree to find the nearest neighbour (euclidian distance) 
     226            tree = spatial.cKDTree(zip(ravel(xcorr), ravel(ycorr))) 
     227 
     228            # Find the day of correction 
     229            nday = day_corr.size 
     230            daystep = int(abs(diff(day_corr))[0]) 
     231            icorr = [[ii]*daystep for ii in xrange(nday)] + [0]*6 
     232 
     233        fileID.close() 
     234        # Only points in the area of classification 
     235        inclass = logical_and(lat >= latmin, lat <= latmax) 
     236        # index for days 
     237        iday = hstack(icorr)[yday[inclass]-1] 
     238        # index for location in the swath 
     239        ifov = array(fov[inclass]-1, dtype=int) 
     240        # index for lon/lat 
     241        xx, yy = projG(lon[inclass], lat[inclass]) 
     242        _, ind = tree.query(zip(xx, yy)) 
     243        # Find coefficient of correction 
     244        coefch = zeros(lat.shape, dtype=float32)+NaN 
     245        coefch[inclass] = [coef_corr[dd, :, :, ff].flatten()[ii] for dd, ff, ii in zip(iday, ifov, ind)] 
     246        # Concatenate channel 
     247        coef.append(coefch) 
    274248 
    275249    return float32(coef) 
Note: See TracChangeset for help on using the changeset viewer.