def read_radiation_fluxes(t, 
                          fdir = gerb_like_dir,
                          do_cutout = True):
    
    '''
    Reads and scales radiation flux data based on GERB-like SEVIRI retrievals.
    
    
    Parameters
    ----------
    t : datetime object
        a time slot
        
    fdir : str, optional, default =  gerb_like_dir
        file directory name

    do_cutout : bool, optional, default = True
        if SEVIRI cutout is applied
        
        
    Returns
    -------
    lwf : numpy array, 2dim
        long-wave radiation flux
        
    swf_net : numpy array, 2dim
        net short-wave radiation flux
    '''
    
    # get time string
    time_string = t.strftime('%Y%m%d_%H%M')
    
    # input data from hdf files
    fname = '%s/GL_SEV3_L20_HR_SOL_TH_%s00_ED01.hdf' % (fdir, time_string)

    
    lwflux_up = hio.read_var_from_hdf(fname, 'Thermal Flux', subpath='Radiometry').astype(np.int16)
    sfflux_up = hio.read_var_from_hdf(fname, 'Solar Flux', subpath='Radiometry').astype(np.int16)
    sfflux_down = hio.read_var_from_hdf(fname, 'Incoming Solar Flux', subpath='Angles').astype(np.int16)
    
    # do the scaling
    lwf = scale_radiation( lwflux_up )
    swf_down = scale_radiation( sfflux_down )
    swf_up = scale_radiation( sfflux_up )
    swf_net = swf_up  -  swf_down

    if do_cutout:
        return gi.cutout_fields([lwf, swf_net, swf_up], SEVIRI_cutout)
    else:
        return lwf, swf_net, swf_up
def read_solar_flux(t, 
                    fluxtype = 'incoming', 
                    fdir = gerb_like_dir,
                    do_cutout = True):
    
    '''
    Reads and scales incoming solar radiation flux data based on GERB-like SEVIRI retrievals.
    
    
    Parameters
    ----------
    t : datetime object
        a time slot

    fluxtype : str, optional, default = 'incoming',
        specify the type of solar flux ('incoming' or 'downwelling' vs. 'upwelling')       
        
    fdir : str, optional, default =  gerb_like_dir
        file directory name

    do_cutout : bool, optional, default = True
        if SEVIRI cutout is applied
        
        
    Returns
    -------
    swf : numpy array, 2dim
        up- or downwelling short-wave radiation flux
    '''
    
    # get time string
    time_string = t.strftime('%Y%m%d_%H%M')
    
    # input data from hdf files
    fname = '%s/GL_SEV3_L20_HR_SOL_TH_%s00_ED01.hdf' % (fdir, time_string)

    if fluxtype in ['incoming', 'downwelling']:
        sfflux = hio.read_var_from_hdf(fname, 'Incoming Solar Flux', subpath='Angles').astype(np.int16)
    elif fluxtype == 'upwelling':
        sfflux = hio.read_var_from_hdf(fname, 'Solar Flux', subpath='Radiometry').astype(np.int16)

    
    # do the scaling
    swf = scale_radiation( sfflux )

    if do_cutout:
        return gi.cutout_fields(swf, SEVIRI_cutout)
    else:
        return swf
示例#3
0
def read_vstack_from_hdflist(flist,
                             vname,
                             itime0=None,
                             nsub=None,
                             subpath=None):
    '''
    Reads a variable stack from hdf list.


    Parameters
    ----------
    flist : list
       list of hdf file names

    vname : str
       variable name 

    itime0 : int, optional, default = None
       first time index to count out data temporally

    nsub : int, optional, default = None
       subsampling factor

    subpath : str, optional, default = None
       subpath for grouped variables, e.g. /main_group/sub_group1/...'
    
    

    Returns
    --------
    v : numpy array
        stack of variable
    '''

    v = []
    for f in flist:

        try:
            vs = hio.read_var_from_hdf(f, vname, subpath=subpath)

            # do subsampling ?
            if nsub != None:
                vs = vs[:, ::nsub, ::nsub]

            if itime0 != None:
                vs = vs[itime0:]

            v.append(vs)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            print 'ERROR for file %s' % f

    v = np.vstack(v)

    return v
def read_cc_from_fluxdata(t, 
                          fdir = gerb_like_dir,
                          do_cutout = True):
    
    '''
    Reads and scales cloud cover based on GERB-like SEVIRI retrievals.
    
    
    Parameters
    ----------
    t : datetime object
        a time slot
        
    fdir : str, optional, default =  gerb_like_dir
        file directory name

    do_cutout : bool, optional, default = True
        if SEVIRI cutout is applied
        
        
    Returns
    -------
    cc_scaled : numpy array, 2dim
        cloud cover field
    '''
    
    # get time string
    time_string = t.strftime('%Y%m%d_%H%M')
    
    # input data from hdf files
    fname = '%s/GL_SEV3_L20_HR_SOL_TH_%s00_ED01.hdf' % (fdir, time_string)


    cc = hio.read_var_from_hdf(fname, 'Cloud Cover', subpath='Scene Identification').astype(np.int16)
    
    # do the scaling
    cc_scaled = scale_radiation( cc, factor = 0.01 )

    if do_cutout:
        return gi.cutout_fields(cc_scaled, SEVIRI_cutout)
    else:
        return cc_scaled
示例#5
0
def read_mask(region='full_region'):
    '''
    Read region mask.


    Parameters
    ----------
    region : str, optional, default = 'full_region'
        region keyword 


    Returns 
    --------
    dset : dict
        dataset dictionary

    '''

    # also get mask
    mfile = nawdex_regions_file
    dset = {'mask': hio.read_var_from_hdf(mfile, region)}

    return dset
def nn_reproj_with_index(dset, ind, vnames='all', apply_mask=True, Nan=0):
    '''
    Reprojects data field intpo Meteosat SEVIRI projection.


    Parameters
    ----------
    dset : dict of numpy arrays
        set of fields that should be reprojected

    ind : numpy array
        interpolation index that maps the field in dset into SEVIRI grid

    vnames : string of list of strings, optional, default = 'all'
        list of variable names to be interpolated

    apply_mask : bool, optional, default = True
        switch if masking with domain mask should be applied

    Nan : float
        value inserted for positions with mask == False


    Returns
    --------
    dset_inter :  dict of numpy arrays
        set of fields that have been interpolated onto SEVIRI grid
    '''

    # prepare masking
    if apply_mask:
        mask = hio.read_var_from_hdf(nawdex_regions_file, 'full_region')
        mask = mask.astype(np.bool)

    # prepare variable list
    if vnames == 'all':
        vlist = list(dset.keys())

    elif type(vnames) == type(''):
        vlist = [
            vnames,
        ]

    else:
        vlist = vnames

    # apply interpolation index
    dset_inter = {}

    for vname in vlist:

        # do interpolation
        v = dset[vname][ind]

        # apply masking if wanted
        if apply_mask:
            v = np.where(mask, v, Nan)

        dset_inter[vname] = v[:]

    return dset_inter
示例#7
0
######################################################################

if __name__ == "__main__":

    # ----------------------------------------------------------------
    date_stamp = sys.argv[1]
    # ================================================================

    # input data -----------------------------------------------------
    ad = '%s/SEVIRI/tseries' % local_data_path
    region_flag = 'de'
    fname = '%s/tseries-%s-%s-rss-%s.h5' % (ad, region_flag, '*', date_stamp)
    fname = glob.glob(fname)[0]

    # bts ............................................................
    bt = hio.read_var_from_hdf(fname, 'bt_108') / 100.
    time_list = hio.read_var_from_hdf(fname, 'time')

    ntime = bt.shape[0]

    # and geo ref ....................................................
    geo = hio.read_dict_from_hdf('%s/georef-de.h5' % ad)
    lon, lat = geo['lon'], geo['lat']
    Nmax, L = get_domain_properties(lon, lat)
    # ================================================================

    # loop over cluster analysis -------------------------------------
    dmins = [10., 20.]
    threshs = [210., 240.]
    out = {}
    for n in range(ntime):
示例#8
0
def read_narval_data(fname):
    '''
    Reads the time stack of Narval data, either meteoat or synsat.

    
    Parameters
    ----------
    fname : str
       filename of data file
    
    
    Returns
    --------
    dset : dict
       dataset dictionary containing georef and bt108 data.
    '''

    # read land sea data ---------------------------------------------
    narval_dir = '%s/icon/narval' % local_data_path
    lsm_name = '%s/aux/narval_landsea_coast_mask.h5' % narval_dir

    print '... read land-sea-mask from %s' % lsm_name
    dset = hio.read_dict_from_hdf(lsm_name)

    lsm = dset['mask50']
    # ================================================================

    # read bt108 -----------------------------------------------------
    print '... read BT10.8 from %s' % fname
    basename, file_ext = os.path.splitext(os.path.basename(fname))

    date = basename.split('_')[-1]
    t0 = datetime.datetime.strptime(date, '%Y%m%d')

    # check if its is obs or sim?
    ftype = basename.split('_')[0]
    if ftype in ['msevi', 'trans']:
        subpath = None
    elif ftype == 'synsat':
        subpath = 'synsat_oper'

    # read bt108 from hdf
    if file_ext == '.h5':
        b3d = hio.read_var_from_hdf(fname, 'IR_108', subpath=subpath) / 100.
    elif file_ext == '.nc':
        vname = 'bt108'
        b3d = ncio.read_icon_4d_data(fname, vname, itime=None)[vname]

    b3d = np.ma.masked_invalid(b3d)
    b3d = np.ma.masked_less(b3d, 100.)

    ntime, nrow, ncol = b3d.shape
    # ================================================================

    # prepare time vector --------------------------------------------
    rel_time = np.arange(1, ntime + 1)
    index_time = np.arange(ntime)

    day_shift = t0 - datetime.datetime(1970, 1, 1)
    day_shift = day_shift.total_seconds() / (24. * 3600)

    abs_time = day_shift + rel_time / 24.
    # ================================================================

    # get georef .....................................................
    gfile = '%s/aux/target_grid_geo_reference_narval.h5' % narval_dir
    geo = hio.read_dict_from_hdf(gfile)
    lon, lat = geo['lon'], geo['lat']

    # centered sinusoidal
    x, y = gi.ll2xyc(lon, lat)
    area = np.abs(gi.simple_pixel_area(lon, lat))
    # ================================================================

    # prepare output .................................................
    dset = {}
    vnames = [
        'x', 'y', 'lon', 'lat', 'lsm', 'area', 'rel_time', 'abs_time',
        'index_time'
    ]
    vvec = [x, y, lon, lat, lsm, area, rel_time, abs_time, index_time]
    for i, vname in enumerate(vnames):
        dset[vname] = vvec[i]

    dset['bt108'] = b3d
    dset['input_dir'] = narval_dir
    # ================================================================

    return dset
示例#9
0
def read_cluster_props( vlist,
                        ftype = 'msevi',
                        flist = None,
                        expname = 'basic',
                        filepart = '_narval_DOM01_',
                        fdir = '%s/cluster_properties' % narval_dir,
                        subpath = None,
                        time_masking = True,
                        date = '', 
                        as_array = True):

    '''
    Reads cluster properties from a stack of files.

    
    Parameters
    ----------
    vlist : list
        list of variables (cell properties)
    
    ftype : str, optional, default = 'msevi'
        file type (or mode) that is used for input, e.g. 'msevi' vs. 'synsat'

    flist : list of str, optional, default = None
        input file list

    expname : str, optional, default = 'basic'
        name of the segmentation setup

    filepart : str, optional, default = '_narval_DOM01_'
        part of the filenam edirectly after the ftype string

    fdir : str, optional, default = '%s/cluster_properties' % narval_dir
        file directory
    
    subpath : str, optional, default = None
        subpath for grouped variables, e.g. /main_group/sub_group1/...'
     
  
    time_masking  : str, optional, default = True
        switch if masking based on time variable should be done

    date : str, optional, default = ''
        date or part of date string

    as_array : str, optional, default = True
        if return is provided as numpy array



    Returns
    --------
    dset : dict
       dictionary of cell properties
    '''



    if type(vlist) == type(''):
        vlist = [vlist,]

    if flist is None:
        fname = 'clust_prop_%s%s*%s*_%s.h5' % (ftype, filepart, date, expname)
        flist = glob.glob('%s/%s' % (fdir, fname))

    dset = {}
    for f in sorted(flist):

        print '... read data from %s' % f
        fname = os.path.basename(f)
        dset[fname] = {}

        for vname in vlist:

            try:
                dset[fname][vname] = hio.read_var_from_hdf(f, vname, subpath = subpath)
            except:
                print 'Error: %s not in %s' % (vname, f)

    dout = {}
    if as_array:
        for f in sorted(dset.keys()):

            if dset[f].has_key('rel_time') and time_masking:
                tr = dset[f]['rel_time']
                m = (tr < 120)
            else:
                vname = dset[f].keys()[0]
                m = np.ones_like(dset[f][vname]).astype(np.bool)
                
            for vname in dset[f].keys():
                
                try:
                    d = dset[f][vname][m]
                
                    try:
                        dout[vname].append( d )
                    except:
                        dout[vname] = [d,]

                except:
                    # if no masking applies e.g. for rbins
                    dout[vname] = dset[f][vname]


        for vname in dout.keys():

            if np.ndim( dout[vname][0] ) == 1:
                stack = np.hstack

            elif np.ndim( dout[vname][0] ) >= 2:
                stack = np.row_stack


            dout[vname] = stack( dout[vname] )

    else:
        dout = dset

    return dout