コード例 #1
0
ファイル: from_igra.py プロジェクト: MBlaschek/radiosonde
def from_igra(ident, var=['p', 't', 'r', 'dpd', 'wd', 'ws'], filename=None, attach=None, save=False, verbose=0, **kwargs):
    """Read IGRA data from text

    Parameters
    ----------
    ident       Radiosonde ID (WMO or IGRA ID)
    wmo         convert WMO ID to IGRA
    variables   select variables
    filename    filename of txt.gz
    attach      radiosonde object to attach with
    verbose     Verbosity

    Returns
    -------
    radiosonde object / save to HDF5
    """

    wmoid = None
    igraid = wmo2igra(ident)  # try to convert WMO to IGRA ?
    if igraid is not None:
        wmoid = ident   # WMO ID
        ident = igraid  # found IGRA ID
        print_verbose("[IGRA] WMO %s to IGRA %s"%(wmoid, ident), verbose)

    data = from_igra_to_dataframe(ident, filename=filename, verbose=verbose)

    lon = data.ix[-1, 'lon'] / 10000.  # IGRA specific
    lat = data.ix[-1, 'lat'] / 10000.
    data = data[var]
    #
    if isinstance(attach, radiosonde):
        attach.add_data('igra', data)
        attach.add_attr('igraid', ident)
        if 'lon' not in attach.attrs:
            attach.add_attr('lon', lon)
        if 'lat' not in attach.attrs:
            attach.add_attr('lat', lat)

        journal("[IGRA] data read with %s" % ident, attach.history, verbose)

    else:
        # try to convert to WMO ID
        if wmoid is None:
            wmoid = igra2wmo(ident)

        if wmoid is None:
            out = radiosonde(id=ident)
        else:
            out = radiosonde(id=wmoid)
            out.add_attr('igraid', ident)

        out.add_data('igra', data)
        out.add_attr('lon', lon)
        out.add_attr('lat', lat)  # convert to float
        out.history.append(now() + " IGRA Data read")
        if save:
            out.save()
        else:
            return out
コード例 #2
0
def assessment_report(network, ids=None, directory=None, verbose=0, **kwargs):
    """
    Create figures for an assessment report
    Parameters
    ----------
    network
    ids
    var
    verbose
    kwargs

    Returns
    -------

    """
    import numpy as np
    import matplotlib.pyplot as plt
    from matplotlib.backends.backend_pdf import PdfPages
    from raso.plot import event_plot
    from raso.support import print_verbose

    if directory is None:
        directory = network.directory

    plt.ioff()
    plot_stationmap(network, ids=ids, save=True, **kwargs)

    # network infos
    nsondes, nvars = network.infos.shape  # 2D
    npages = int(np.ceil(nsondes/100.))  # 100 per page
    pdf = PdfPages(directory + "infos.pdf")
    figsize = kwargs.pop('figsize', (12, 9))  # TODO: make it dependent on nvars
    info_columns = network.infos.columns.tolist()
    for ipage in range(npages):
        print_verbose("INFOS: %s " % ipage, verbose)
        ax = network.infos.plot(subplots=True, kind='bar', legend=False, sharex=True, title=network.name, grid=True,
                                figsize=figsize)
        for i, iax in enumerate(ax):
            iax.set_title('')
            iax.set_ylabel(info_columns[i])  # LABEL
        fig = ax.get_figure()
        pdf.savefig(fig)
        plt.close(fig)
    pdf.close()
    plt.close('all')

    # network Availability
    network.load_infodata()
    if len(network.infodata) > 0:
        for ivar in network.infodata:
            filename = directory + '%s.pdf' % ivar
            print_verbose("AVAIL: %s "% ivar, verbose)
            event_plot(getattr(network, ivar),save=True, filename=filename, vmin=0, vmax=70)
コード例 #3
0
ファイル: radiosonde.py プロジェクト: MBlaschek/radiosonde
    def load_data(self, name, directory=None, variable=None, suffix=None, verbose=0):
        """ Load data from Store

        Parameters
        ----------
        name
        directory
        suffix
        verbose

        Returns
        -------

        """
        from raso.config import rasodir
        if not hasattr(name, '__iter__'):
            name = [name]

        if directory is None:
            if self.directory is None:
                directory = rasodir

            else:
                directory = self.directory

        if suffix is None:
            suffix = ""

        else:
            if suffix[-1] != '_':
                suffix += '_'

        default = directory + '/%s' % self.id
        for j in name:
            if os.path.isfile(default+'/%s.h5' % j):
                with pd.HDFStore(default+'/%s.h5' % j) as store:
                    for ivar in store.keys():
                        if variable is not None:
                            if ivar.replace('/','') != variable:
                                continue

                        iname = ivar.replace('/', suffix)
                        self.add_data(iname, store[ivar], replace=True)
                        self.is_saved = True   # Update Vars
                        self.var_saved[iname] = (iname == j)  # Update Vars
                        print_verbose("[LD] %s : %s : %s" % (j, iname, str(store[ivar].shape)), verbose)

            else:
                print "%s not found " % j
        self.is_saved = np.any(self.var_saved.values())
コード例 #4
0
ファイル: from_gpsro.py プロジェクト: MBlaschek/radiosonde
def read_nearest_events(directory, pattern, ilon, ilat, radius, esat='murphy_koop', verbose=0):
    """ Internal Function to find nearest events

    Parameters
    ----------
    directory
    pattern
    ilon
    ilat
    radius
    esat
    verbose

    Returns
    -------

    """
    from raso.met import esat_functions, vap2sh
    from ncio import read_variable

    print_verbose("Collecting information ...", verbose)
    infos = gather_information(directory, pattern, verbose=verbose)
    # calculate distance to ilon ilat
    print_verbose("Calculating distances ...", verbose)
    infos['distance'] = distance(infos['lon'], infos['lat'], ilon, ilat)
    n = infos.shape[0]
    infos = infos.query('distance<%f' % radius)

    if infos.shape[0] == 0:
        raise RuntimeError("No GPS RO events (%d) found within the specified radius (%f)!" % (n, radius))

    vpfunc = getattr(esat_functions, esat)
    data = []
    print_verbose("Procesing Profiles (%d) ..." % n, verbose)
    for iprof, irow in infos.iterrows():
        tmp = pd.DataFrame(read_variable(irow['file']))
        tmp.columns = ['lat', 'lon', 'p', 'ref', 'ref_obs', 't', 'vp']
        tmp.index.name = 'alt'
        tmp['date'] = irow['date']
        tmp = tmp.reset_index().set_index('date')
        tmp['t'] += 273.15
        tmp['p'] *= 100.  # mb (hPa) to Pa
        tmp['alt'] *= 1000.  # km to m
        tmp['vp'] *= 100.  # mb (hPa) to Pa
        tmp['r'] = tmp['vp'] / vpfunc(tmp['t'])
        tmp['q'] = vap2sh(tmp['vp'], tmp['p'])
        tmp['dist'] = distance(tmp['lon'], tmp['lat'], ilon, ilat)
        data.append(tmp)

    data = pd.concat(data, axis=0)  # list of DataFrames
    data.dropna(subset=['p', 'ref', 'ref_obs', 't', 'vp'], how='all', inplace=True)     # drop missing
    if data.duplicated().any():
        print "Duplicates found! ", data.duplicated().sum()  # check for duplicates ?

    print_verbose("GPSRO Events: %d" % data.index.unique().size, verbose)
    return infos, data
コード例 #5
0
ファイル: detections.py プロジェクト: MBlaschek/radiosonde
def _2d_detection(data, var, window, dist, thres, min_levels, verbose=0):
    """ Detect Breakpoints in 2D (time x pressure levels)

    1. Run SNHT per level
    2. Count significant peaks from all levels
    2.1. Local Maxima (>END)
    2.2. Above Threshold
    3. add breaks variable with 1 and 2 (break in that level)

    Parameters
    ----------
    data        Panel       Inputdata (vars, time, p)
    var         str         Variable to consider
    window      float       Window size of Detection
    dist        float       distance between breakpoints
    thres       int         SNHT threshold
    min_levels  int         Minimum required levels to detect breakpoint
    verbose

    Returns
    -------

    """
    stest = np.squeeze(np.apply_along_axis(snht, 0, data[var].values, window, window / 4))  # per level
    data['%s_snht' % var] = stest
    data['%s_breaks' % var] = 0
    imax = np.asarray(local_maxima(np.sum(stest, 1), dist=dist))  # for all levels
    if len(imax) == 0:
        print_verbose(
            "No Breakpoints detected: %s min_levels (%d) found: %f (%f)" % (var, min_levels, np.max(stest), thres),
            verbose)
        return False

    print_verbose("Local Maxima (%s): %d" % (var, len(imax)), verbose)

    # how many above threshold
    tstat = np.sum(stest[imax, :] > thres, 1)  # could weight levels ? upper levels are less relevant?
    if not np.any(tstat >= min_levels):
        print_verbose("No Breakpoints detected: %s min_levels (%d) found: %d" % (var, min_levels, np.max(tstat)),
                      verbose)
        return False
    else:
        if verbose > 1:
            print "Breaks (%s): " % var
            for i, ib in enumerate(tstat):
                print "%s (%d) %s" % (str(data.major_axis[imax[i]]), ib, color_boolean(ib >= min_levels))

        print_verbose("Final Breaks (%s): %d" % (var, np.sum(tstat >= min_levels)), verbose)

    itx = imax[tstat >= min_levels]  # indices of breaks
    ivar = data.items.get_loc("%s_breaks" % var)  # index of variable
    iarray = 1 + np.int_(stest[itx, :] > thres)
    data.iloc[ivar, itx, :] = iarray[np.newaxis, ::]  # (1,...) is required to set
    return True
コード例 #6
0
ファイル: from_mars.py プロジェクト: MBlaschek/radiosonde
def mars_to_dataframe(ident, filename=None, save=False, verbose=0):
    """ Read MARS DUMP HDF5 file

    Parameters
    ----------
    ident
    filename
    save
    verbose

    Returns
    -------
    DataFrame
    """
    from raso.config import marsdir, marspattern, rasodir

    if filename is None:
        ident = "%06d" % int(ident)
        filename = rasodir + '/%s/mars_dump.h5' % ident  # Default DUMP FILE

    if os.path.isfile(filename):
        print_verbose("[MARS] %s" % filename, verbose)
        try:
            return pd.read_hdf(filename, 'mars')

        except KeyError:
            store = pd.HDFStore(filename)
            print "Using first of:", store.keys()," for MARS"
            store['mars'] = store[store.keys()[0]]
            del store[store.keys()[0]]
            store.close()
            return pd.read_hdf(filename, 'mars')
    else:
        filename = marsdir + '/' + marspattern % int(ident)
        print_verbose("[MARS] %s" % filename, verbose)
        return mars_gzip(filename, save=save, verbose=verbose)
コード例 #7
0
ファイル: detections.py プロジェクト: MBlaschek/radiosonde
def _1d_detection(data, var, window, dist, thres, verbose=0):
    stest = snht(data[var].values, window, window / 4)
    data['%s_snht' % var] = stest

    # could be too close together
    imax = np.asarray(local_maxima(stest, dist=dist))  # local maxima within a certain distance!
    if len(imax) == 0:
        print_verbose("No Breakpoints detected: %s found: %f (%f)" % (var, np.max(stest), thres), verbose)
        return False

    print_verbose("Local Maxima (%s): %d" % (var, len(imax)), verbose)
    tstat = stest[imax] > thres  # above threshold
    if not np.any(tstat > 0):
        print_verbose("No Breakpoints detected: %s" % (var), verbose)
        return False
    else:
        print_verbose("Final Breaks (%s): %d" % (var, np.sum(tstat > 0)), verbose)

    data['%s_breaks' % var] = 0
    ivar = data.columns.get_loc('%s_breaks' % var)
    data.iloc[imax[tstat > 0], ivar] = 2
    return True
コード例 #8
0
ファイル: batch.py プロジェクト: MBlaschek/radiosonde
def snht(ident, levels=None, outputdir=None, verbose=0):
    """

    Parameters
    ----------
    ident
    levels
    outputdir
    verbose

    Returns
    -------

    """
    from raso import load_radiosonde
    from raso.config import outdir
    import matplotlib.pyplot as plt

    yplevs = [1000, 3000, 7000, 12500, 17500, 22500, 30000, 40000, 50000, 60000, 85000, 92500]  # Pressure Levels, Ticks

    isonde = load_radiosonde(ident, variables=[])
    if outputdir is None:
        outputdir = outdir + '/figures'
    #
    # DATA
    #
    if 'detect_opts' not in isonde.attrs:
        print "No Detection Options found ?", ident
        return

    if levels is None:
        levels = [10000, 30000, 50000, 60000, 70000, 85000, 92500]

    for icor, iopts in isonde.detect_opts.items():
        iname = iopts['savename']
        isonde.load_data(iname)
        ithres = iopts['thres']
        ivar = iopts['variable']
        idata = getattr(isonde, iname)
        daynight = iopts.get('daynight', False)
        #
        #  CONTOUR
        #
        if daynight:
            timeseries_snht(idata[:, idata.major_axis.hour == 12, :], '%s_dep' % ivar,
                            levels=list(ithres * np.array([1, 2, 5, 10, 20])), yticklabels=yplevs)
            plt.savefig("%s/%s_%s_%s_12Z_snht.pdf" % (outputdir, ident, iname, ivar))
            print_verbose("%s/%s_%s_%s_12Z_snht.pdf" % (outputdir, ident, iname, ivar), verbose)

            timeseries_snht(idata[:, idata.major_axis.hour == 0, :], '%s_dep' % ivar,
                            levels=list(ithres * np.array([1, 2, 5, 10, 20])), yticklabels=yplevs)
            plt.savefig("%s/%s_%s_%s_00Z_snht.pdf" % (outputdir, ident, iname, ivar))
            print_verbose("%s/%s_%s_%s_00Z_snht.pdf" % (outputdir, ident, iname, ivar), verbose)

        else:
            timeseries_snht(idata, '%s_dep' % ivar, levels=list(ithres * np.array([1, 2, 5, 10, 20])),
                            yticklabels=yplevs)
            plt.savefig("%s/%s_%s_%s_snht.pdf" % (outputdir, ident, iname, ivar))
            print_verbose("%s/%s_%s_%s_snht.pdf" % (outputdir, ident, iname, ivar), verbose)
        #
        # LEVEL
        #
        for ilev in levels:
            fig, ax = plt.subplots(1, 1, figsize=(12, 2))

            if daynight:
                timeseries_line_snht(idata[:, idata.major_axis.hour == 12, :], '%s_dep' % ivar,
                                     '%s_dep_snht' % ivar, ilev, '%s dep (12Z) [K]' % ivar,
                                     breaks='%s_dep_breaks' % ivar, ax=ax, ylim=(-10, 10))
                ax.set_xlabel('Time (RM:60)')
                plt.savefig('%s/%s_%s_%s_12Z_snht_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)))
                print_verbose('%s/%s_%s_%s_12Z_snht_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)), verbose)

                timeseries_line_snht(idata[:, idata.major_axis.hour == 0, :], '%s_dep' % ivar,
                                     '%s_dep_snht' % ivar, ilev, '%s dep (00Z) [K]' % ivar,
                                     breaks='%s_dep_breaks' % ivar, ax=ax, ylim=(-10, 10))
                ax.set_xlabel('Time (RM:60)')
                plt.savefig('%s/%s_%s_%s_00Z_snht_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)))
                print_verbose('%s/%s_%s_%s_00Z_snht_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)), verbose)

            else:
                timeseries_line_snht(idata, '%s_dep' % ivar, '%s_dep_snht' % ivar, ilev, '%s dep [K]' % ivar,
                                     breaks='%s_dep_breaks' % ivar, ax=ax, ylim=(-10, 10))
                ax.set_xlabel('Time (RM:60)')
                plt.savefig('%s/%s_%s_%s_snht_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)))
                print_verbose('%s/%s_%s_%s_snht_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)), verbose)
    plt.close('all')
コード例 #9
0
ファイル: from_era.py プロジェクト: MBlaschek/radiosonde
def from_era(ident, alt=0, lon=0, lat=0, savename='extract_ERA', verbose=0):
    """ Updated Version of profiles and surface
    # Can read ERA data from retrieved and converted MARS Requests

    ERADIR

    Parameters
    ----------
    ident       str/int/float   Radiosonde ID
    alt         int/float       Radiosonde Station Altitude
    lon         int/float       Radiosonde Station Longitude
    lat         int/float       Radiosonde Station Latitude
    savename    str             Extract savename
    verbose     int             verboseness

    Returns
    -------

    """
    from raso.support import print_verbose, panel_to_database
    from raso.met.conversion import sh2ppmv, sh2vap, bolton_inverse, vap2sh
    from raso.met.esat_functions import foeewm
    from raso.config import rasodir # eradir
    from raso.qc import profile_limits
    # read new era file
    if isinstance(ident, (int, float)):
        ident = "%06d" % ident

    filename = rasodir + "/%s/%s.h5" % (ident, savename)

    cnames = ['t2m', 'q2m', 'msl', 'u10', 'v10', 'fetch', 'skt', 'FASTEM_1', 'FASTEM_2', 'FASTEM_3',
              'FASTEM_4', 'FASTEM_5', 'surf_type', 'water_type', 'elev', 'lat', 'lon', 'sat_zen', 'sat_azi',
              'sol_zen', 'sol_azi', 'cltp', 'clf']

    surface = {}  # pd.DataFrame(columns=cnames)
    era = {}
    if not os.path.isfile(filename):
        raise IOError('File not found: %s' % filename)

    with pd.HDFStore(filename, 'r') as store:
        for ikey in store.keys():
            iname = ikey.replace('/', '')
            if ikey[1:] in ['t', 'q', 'o3']:
                # profile data
                tmp = store[ikey]
                tmp.index.name = 'date'
                tmp.columns.name = 'p'
                # tmp = tmp.reset_index().drop_duplicates('date').set_index('date')  # remove duplicates
                era[iname] = tmp  # DataFrame (time x p)

            else:
                # surface data
                if iname == 'sp':
                    surface['msl'] = store[ikey][0] / 100.  # [dates] / 100.  # hPa

                elif iname == 'ci':
                    # sea ice cover
                    surface['surf_type'] = store[ikey][0]

                elif iname == 'u10':
                    surface['u10'] = store[ikey][0]

                elif iname == 'v10':
                    surface['v10'] = store[ikey][0]

                else:
                    surface[iname] = store[ikey][0]

    era = pd.Panel(era)
    era = panel_to_database(era, remove=False)
    # ERA can have negative Q values !?
    qerror = era.query('q<=0').q.count()
    print_verbose("[%s] Found %d negative Q-values!" % (ident, qerror), verbose)
    # Interpolate limits
    rt = profile_limits(tohpa=True, simple_names=True)
    rt['qmin'] = vap2sh(rt.vpmin.values, rt.p.values)  # both missing hPa factor
    qmins = np.interp(np.log(era.p.values), np.log(rt.p.values*100.), rt.qmin.values, left=rt.qmin.min(), right=rt.qmin.max())  # Interpolate Minimum Q
    era['q'] = np.where(era.q < qmins, qmins, era.q)  # replace with RTTOV limit
    # Make a dataframe
    surface = pd.DataFrame(surface)
    surface['q2m'] = sh2ppmv(era.query('p==100000')['q'], 100000.)  # ppmv
    surface['fetch'] = 100000.  # m
    # FASTEM Surface model Parameters:
    surface['FASTEM_1'] = 3.0
    surface['FASTEM_2'] = 5.0
    surface['FASTEM_3'] = 15.0
    surface['FASTEM_4'] = 0.1
    surface['FASTEM_5'] = 0.3
    # surface properties
    if 'surf_type' in surface.columns:
        if surface['surf_type'].isnull().any(0):
            if surface['surf_type'].count() > 0:
                surface['surf_type'].fillna(method='bfill')  # backwards fill
                surface['surf_type'].fillna(method='ffill')  # forward fill

            else:
                surface['surf_type'] = 0.  # surface type (0=land, 1=sea, 2=sea-ice)

    else:
        surface['surf_type'] = 0.  # surface type (0=land, 1=sea, 2=sea-ice)

    surface['water_type'] = 0.  # water type (0=fresh, 1=ocean)
    surface['elev'] = alt / 1000.  # ELEVATION km
    surface['lat'] = lat  # LAT
    surface['lon'] = lon  # LON
    surface['sat_azi'] = 0.  # Satellite Azimuth Angle
    surface['sat_zen'] = 53.1  # OSCAR , SSMIS
    surface['sol_zen'] = 0.  # Solar Zenith Angle
    surface['sol_azi'] = 0.  # Solar Azimuth Angle
    surface['cltp'] = 500.  # Cloud top pressure
    surface['clf'] = 0.  # Cloud fraction

    # right order
    surface = surface[cnames].copy()

    # CONVERT TO WATER VAPOR PRESSURE in Pa
    era['vp'] = sh2vap(era['q'], era['p'])  # formula
    era['td'] = bolton_inverse(era.vp.values)  # default method from Murphy and Koop (iterative)
    logic = era['td'] > era['t']  # impossible
    era.loc[:, 'td'] = np.where(logic, era['t'].values, era['td'].values)
    era.loc[:, 'vp'] = np.where(logic, foeewm(era['t'].values), era['vp'].values)  # ? fixes some problems ?
    return surface, era
コード例 #10
0
ファイル: detections.py プロジェクト: MBlaschek/radiosonde
def detection(data, var='t', thres=50, window=1460, dist=730, levels=None, min_levels=3, database=False, freq='12h',
              valid_times=[0, 12], verbose=0, **kwargs):
    """Break Detection in timeseries using a Standard Normal Homogeneity Test (SNHT)

    Parameters
    ----------
    data        Series/DataFrame/Panel      Input Radiosonde data (standard)
    var         str/list                    Variable to use for detection
    thres       number                      Threshold for SNHT
    window      number                      Window size for SNHT
    dist        number                      Distance between breakpoints
    levels      list                        only these levels
    min_levels  int                         Minimum required number of levels
    database    bool
    verbose     int
    kwargs      **

    Returns
    -------
    input + var_breaks + var_snht
    """
    funcid = '[BD] '

    if not isinstance(data, (pd.Series, pd.DataFrame, pd.Panel)):
        raise ValueError(funcid + " Require a Series, DataFrame or Panel")

    if not isinstance(var, (list, str)):
        raise ValueError(funcid + " Variable needs to be a list or string")

    if isinstance(var, str):
        var = [var]  # as list

    data = data.copy()

    if len(valid_times) == 1:
        if freq != '24h':
            raise RuntimeError("Frequency must be 24 hours then")
    elif len(valid_times) == 2:
        if freq != '12h':
            raise RuntimeError("Frequency must be 12 hours then")
    else:
        raise RuntimeWarning("Make sure the freqency fits to the valid times")

    if isinstance(data, pd.Series):
        data = data.to_frame()
        if len(var) > 1:
            raise ValueError("Series object has only one variable")
        data.columns = var
        data = data.ix[np.in1d(data.index.hour, valid_times), :]  # VIEW
        data = data.resample(freq).asfreq()
        # run 1d
        found_breaks = _1d_detection(data, var, window, dist, thres, verbose=verbose)
        print_verbose(var + ": " + str(filter_series(data['%s_breaks' % var] > 0).unique()), verbose)
        return found_breaks, data  # dict, dataframe

    if isinstance(data, pd.DataFrame):
        for ivar in var:
            if ivar not in data.columns:
                raise ValueError(funcid + "Variable not found: %s in %s" % (ivar, str(data.columns)))

        data = database_to_panel(data, levels=levels, verbose=verbose)

        if isinstance(data, pd.DataFrame):
            # only one level
            data = data.ix[np.in1d(data.index.hour, valid_times), :]  # VIEW
            data = data.resample(freq).asfreq()
            found_breaks = {}
            for ivar in var:
                found_breaks[ivar] = _1d_detection(data, ivar, window, dist, thres, verbose=verbose)
                print_verbose(ivar + ": " + str(filter_series(data['%s_breaks' % ivar] > 0).unique()), verbose)

            return found_breaks, data  # dict, dataframe

    # Panel
    for ivar in var:
        if ivar not in data.items:
            raise ValueError(funcid + "Variable not found: %s in %s" % (ivar, str(data.items)))

    # Select times:
    data = data.loc[:, np.in1d(data.major_axis.hour, valid_times), :]
    # Resample to frequency
    if data.major_axis.inferred_freq != freq:
        print_verbose(funcid + "Resampling to %s frequency" % freq, verbose)
        newdates = pd.date_range(data.major_axis[0], data.major_axis[-1], freq=freq)
        data = data.reindex(major_axis=newdates)

    data.major_axis.name = 'date'
    if levels is not None:
        data = data.ix[:, :, data.minor_axis.isin(levels)].copy()
        print_verbose(funcid + "Selecting only specified levels ...", verbose)
        print_verbose(funcid + "p-Levels: " + ",".join(["%d" % (ip / 100) for ip in levels]) + ' hPa', verbose)

    # 2D detection
    found_breaks = {}
    for ivar in var:
        found_breaks[ivar] = _2d_detection(data, ivar, window, dist, thres, min_levels,
                                           verbose=verbose)

    if verbose > 0:
        print "Minimum amount of levels required: ", min_levels
        print "Breakpoints"
        for ivar in var:
            print ivar + ": ", str(data.major_axis[(data['%s_breaks' % ivar] > 0).all(1)])

    # Output as Database
    if database:
        print_verbose(funcid + "as database.", verbose)
        data = data.to_frame(filter_observations=False).reset_index().sort_values(by=['date', 'p']).set_index('date', drop=True)

    return found_breaks, data  # dict, panel / database
コード例 #11
0
ファイル: from_mars.py プロジェクト: MBlaschek/radiosonde
def mars_gzip(filename, convert=True, save=False, verbose=0):
    """Read compressed ECMWF Mars dump as done by L. Haimberger,
    15.12.2014
    """
    from raso.config import rasodir
    compressed = True  # Can be compressed but might not ?
    if not os.path.isfile(filename):
        if '.gz' in filename:
            if not os.path.isfile(filename.replace('.gz', '')):
                raise IOError("File not found: %s" % filename)

            filename = filename.replace('.gz', '')
            compressed = False

    if '.gz' not in filename:
        compressed = False

    try:
        if compressed:
            infile = gzip.open(filename, 'rb')

        else:
            infile = open(filename, 'r')

        tmp = infile.read()  # alternative readlines (slower)
        data = tmp.splitlines()  # Memory (faster)

    finally:
        infile.close()

    nl = len(data)
    dates = []
    nf = len(data[0].strip().replace('NULL', 'NaN').split(' '))
    raw = np.zeros((nl, nf), dtype=float)
    itx = []
    if verbose > 0:
        pbar = ProgressBar(maxval=nl)
        pbar.start()
    # 'i4,i4,i4,i4,i4,f4,f4,f4,f4,i4,f8,f8,f8,f8'))
    for i, line in enumerate(data):
        # datetime , int , int, float, float .... int float int (flags)
        # 0     1    2       3         4         5     6  7    8     9     10    11        12      13
        # date time obstype codetype sonde_type ident lat lon stalt press varno obsvalue biascorr fg_depar
        #    14     15      16    17
        # an_depar status anflag event1
        try:
            raw[i] = np.float_(line.strip().replace('NULL', 'NaN').split(' '))
            dates.append(datetime.datetime.strptime("%8d%04d" % (raw[i, 0], raw[i, 1]), '%Y%m%d%H%M%S'))
            itx.append(i)
            if verbose > 0:
                pbar.update(i + 1)

        except:
            continue

    if verbose > 0:
        pbar.finish()

    if not convert:
        out = pd.DataFrame(data=raw[itx], index=dates)
        print_verbose("[MARS] Not converted!", verbose)
        print_verbose("[MARS] %20s ( %12d ) <> ( %12d )" % (filename.split('/')[-1], nl, out.shape[0]), verbose)
        return out

    out = pd.DataFrame(data=raw[np.ix_(itx, [5, 2, 3, 4, 10, 6, 7, 8, 9, 11, 12, 13, 14])], index=dates,
                       columns=['id', 'obs_type', 'code_type', 'sonde_type', 'varno', 'lat', 'lon', 'alt', 'p', 'value',
                                'biascorr', 'fg_dep', 'an_dep'])

    out.dropna(subset=['value'], inplace=True)
    print_verbose("%20s ( %12d ) <> ( %12d )" % (filename.split('/')[-1], nl, out.shape[0]), verbose)
    if len(out.id.unique()) > 1:
        print_verbose("Multiple IDs: " + str(out.id.unique()), verbose)
        return out

    if save:
        ident = "%06d" % int(out['id'][0])
        filename_out = rasodir + '/%s/mars_dump.h5' % ident
        out.to_hdf(filename_out, 'mars', format='table', complib="blosc", complevel=9)
        print_verbose("[%06d] Saved: %s" % (int(ident), filename_out), verbose)

    return out
コード例 #12
0
ファイル: radiosonde.py プロジェクト: MBlaschek/radiosonde
    def save(self, var=None, directory=None, update=False, hdfformat='table', hdfcomplevel=9, hdfcomplib='blosc', verbose=0):
        """ Write Radiosonde Class Object to Directory

        Parameters
        ----------
        var             str/list
        directory       str
        update          bool
        force           bool
        mode            str
        hdfformat       str
        hdfcomplevel    int
        hdfcomplib      str
        verbose         int
        """
        from raso.config import rasodir
        import pickle

        if directory is None:
            if self.directory is None:
                directory = rasodir
            else:
                directory = self.directory

        default = directory + '/%s/' % self.id
        if not os.path.isdir(default):
            os.makedirs(default)

        if var is None:
            varis = self.vars  # All Variables

        else:
            if isinstance(var, str):
                varis = [var]
            else:
                varis = var
            update = True  # Make sure we write it

        journal("Saving to dir: %s" % default, self.history, verbose)
        for ivar in varis:
            written = False
            if self.var_saved[ivar] and not update:
                if os.path.isfile(default + "%s.h5" % ivar) or os.path.isfile(default + "%s.pickle" % ivar):
                    print_verbose("%20s [%5s]" % (ivar, color_boolean(written)), verbose)
                    continue

            tmp = getattr(self, ivar)
            #
            # Try as HDF
            #
            if isinstance(tmp, (pd.Series, pd.DataFrame, pd.Panel)):
                try:
                    tmp.to_hdf(default + "%s.h5" % ivar, ivar, mode='a', format=hdfformat, complevel=hdfcomplevel, complib=hdfcomplib)
                    fio = 'H5'
                    written = True
                    n = tmp.shape

                except Exception as e:
                    if verbose:
                        print ivar, " >> ", repr(e)
            #
            # And as Pickle
            #
            if not written:
                try:
                    pickle.dump(tmp, open(default + "%s.pickle" % ivar, 'w'))
                    fio = 'PKL'
                    written = True
                    n = len(tmp)

                except Exception as e:
                    if verbose:
                        print ivar, " >> ", repr(e)

            print_verbose("%20s %20s [%5s] %s" % (ivar, n, color_boolean(written), fio), verbose)

        attrs = {}
        for iatt in self.attrs:
            attrs[iatt] = getattr(self, iatt)

        pickle.dump(attrs, open(default + 'attributes.pickle', 'w'))
        attrs = {}
        for s in self.history:
            if 'Saved' in s:
                self.history.pop(self.history.index(s))

        journal("Saved", self.history, 0)
        for iatt in ['id', 'is_saved', 'is_empty']:
            attrs[iatt] = getattr(self, iatt)

        pickle.dump(attrs, open(default + 'radiosonde.pickle', 'w'))
        f = open(default+'history.txt', 'w')
        f.write("\n".join(self.history))
        f.close()

        if self.notes != "":
            f = open(default + 'notes.txt', 'w')
            f.write(self.notes)
            f.close()

        self.directory = directory
コード例 #13
0
ファイル: batch.py プロジェクト: MBlaschek/radiosonde
def data(ident, outputdir=None, verbose=0):
    """ Automated Data count plots

    Parameters
    ----------
    ident
    outputdir
    verbose

    Returns
    -------

    """
    from raso import load_radiosonde
    from raso.config import outdir
    import matplotlib.pyplot as plt

    yplevs = [1000, 3000, 7000, 12500, 17500, 22500, 30000, 40000, 50000, 60000, 85000, 92500]  # Pressure Levels, Ticks

    isonde = load_radiosonde(ident, variables=['igra', 'data', 'std_data'])
    if outputdir is None:
        outputdir = outdir + '/figures'
    #
    # DATA
    #
    if 'data' in isonde.vars:
        varis = ['t', 'r']
        if isonde.data.columns.isin(varis).sum() == len(varis):
            dates = pd.Series(1, index=pd.DatetimeIndex(
                isonde.data[varis].dropna().index.unique()))  # events with t,r variables
        else:
            varis = isonde.data.columns.tolist()
            dates = pd.Series(1, index=pd.DatetimeIndex(isonde.data.index.unique()))  # events with all variables
        events = dates.groupby(dates.index.to_period(freq='M')).count()  # events per month
        events.plot(grid=True, kind='area')
        plt.title("Radiosonde %s (DATA), Sounding Events (%s)" % (isonde.id, ",".join(varis)))
        plt.xlabel("Month")
        plt.ylabel("Soundings per Month [#]")
        plt.savefig('%s/%s_data_counts.pdf' % (outputdir, ident))
        print_verbose('%s/%s_data_counts.pdf' % (outputdir, ident), verbose)
        plt.close()
    #
    # IGRA
    #
    if 'igra' in isonde.vars:
        varis = ['t', 'dpd']
        if isonde.igra.columns.isin(varis).sum() == len(varis):
            dates = pd.Series(1, index=pd.DatetimeIndex(
                isonde.igra[varis].dropna().index.unique()))  # events with t,r variables
        else:
            varis = isonde.igra.columns.tolist()
            dates = pd.Series(1, index=pd.DatetimeIndex(isonde.igra.index.unique()))  # events with all variables
        events = dates.groupby(dates.index.to_period(freq='M')).count()  # events per month
        events.plot(grid=True, kind='area')
        plt.title("Radiosonde %s (IGRA), Sounding Events (%s)" % (isonde.id, ",".join(varis)))
        plt.xlabel("Month")
        plt.ylabel("Soundings per Month [#]")
        plt.savefig('%s/%s_igra_counts.pdf' % (outputdir, ident))
        print_verbose('%s/%s_igra_counts.pdf' % (outputdir, ident), verbose)
        plt.close()
    #
    # STD DATA
    #
    if 'std_data' in isonde.vars:
        varis = ['t', 'r']
        if isonde.std_data.columns.isin(varis).sum() == len(varis):
            dates = pd.Series(1, index=pd.DatetimeIndex(
                isonde.std_data[varis].dropna().index.unique()))  # events with t,r variables
        else:
            varis = isonde.std_data.columns.tolist()
            dates = pd.Series(1, index=pd.DatetimeIndex(isonde.std_data.index.unique()))  # events with all variables
        events = dates.groupby(dates.index.to_period(freq='M')).count()  # events per month
        events.plot(grid=True, kind='area')
        plt.title("Radiosonde %s (STDDATA), Sounding Events (%s)" % (isonde.id, ",".join(varis)))
        plt.xlabel("Month")
        plt.ylabel("Soundings per Month [#]")
        plt.savefig('%s/%s_std_data_counts.pdf' % (outputdir, ident))
        plt.close()
        #
        for ivar in isonde.std_data.columns.tolist():
            if ivar in ['p', 'u', 'v', 'qual']:
                continue
            timeseries_var(isonde.std_data, ivar, freq='M', title='RASO %s' % isonde.id, yticklabels=yplevs)
            plt.savefig("%s/%s_%s_contour.pdf" % (outputdir, ident, ivar))
            plt.close()
    plt.close('all')
コード例 #14
0
ファイル: batch.py プロジェクト: MBlaschek/radiosonde
def corrections(ident, levels=None, outputdir=None, verbose=0):
    """

    Parameters
    ----------
    ident
    levels
    outputdir
    verbose

    Returns
    -------

    """
    from raso import load_radiosonde
    from raso.config import outdir
    import matplotlib.pyplot as plt

    yplevs = [1000, 3000, 7000, 12500, 17500, 22500, 30000, 40000, 50000, 60000, 85000, 92500]  # Pressure Levels, Ticks

    isonde = load_radiosonde(ident, variables=[])
    if outputdir is None:
        outputdir = outdir + '/figures'
    #
    # DATA
    #
    if 'detect_opts' not in isonde.attrs:
        print "No Detection Options found ?", ident
        return

    if levels is None:
        levels = [10000, 30000, 50000, 60000, 70000, 85000, 92500]

    for icor, iopts in isonde.detect_opts.items():
        iname = iopts['savename']
        isonde.load_data(iname)
        ithres = iopts['thres']
        ivar = iopts['variable']
        idata = getattr(isonde, iname)
        daynight = iopts.get('daynight', False)
        #
        # CONTOUR
        #
        for j, jvar in enumerate(['%s_mcor' % ivar, '%s_qcor' % ivar, '%s_qecor' % ivar]):
            if jvar not in idata.items:
                continue

            if daynight:
                timeseries_var(idata[:, idata.major_axis.hour == 12, :], jvar, freq='M', yticklabels=yplevs)
                plt.savefig("%s/%s_%s_%s_12Z.pdf" % (outputdir, ident, iname, jvar))
                print_verbose("%s/%s_%s_%s_12Z.pdf" % (outputdir, ident, iname, jvar), verbose)

                timeseries_anomaly(idata[:, idata.major_axis.hour == 12, :], jvar, '%s_era' % ivar, freq='M',
                                   yticklabels=yplevs)
                plt.savefig("%s/%s_%s_%s_dep_12Z.pdf" % (outputdir, ident, iname, jvar))
                print_verbose("%s/%s_%s_%s_dep_12Z.pdf" % (outputdir, ident, iname, jvar), verbose)

                timeseries_var(idata[:, idata.major_axis.hour == 0, :], jvar, freq='M', yticklabels=yplevs)
                plt.savefig("%s/%s_%s_%s_00Z.pdf" % (outputdir, ident, iname, jvar))
                print_verbose("%s/%s_%s_%s_00Z.pdf" % (outputdir, ident, iname, jvar), verbose)

                timeseries_anomaly(idata[:, idata.major_axis.hour == 0, :], jvar, '%s_era' % ivar, freq='M',
                                   yticklabels=yplevs)
                plt.savefig("%s/%s_%s_%s_dep_00Z.pdf" % (outputdir, ident, iname, jvar))
                print_verbose("%s/%s_%s_%s_dep_00Z.pdf" % (outputdir, ident, iname, jvar), verbose)

            else:
                timeseries_var(idata, jvar, freq='M', yticklabels=yplevs)
                plt.savefig("%s/%s_%s_%s.pdf" % (outputdir, ident, iname, jvar))
                print_verbose("%s/%s_%s_%s.pdf" % (outputdir, ident, iname, jvar), verbose)

                timeseries_anomaly(idata, jvar, '%s_era' % ivar, freq='M', yticklabels=yplevs)
                plt.savefig("%s/%s_%s_%s_dep.pdf" % (outputdir, ident, iname, jvar))
                print_verbose("%s/%s_%s_%s_dep.pdf" % (outputdir, ident, iname, jvar), verbose)
        #
        # LEVEL
        #
        for ilev in levels:
            if daynight:
                fig, ax = plt.subplots(3, 1, figsize=(12, 6), sharex=True, sharey=True)
                varnames = ['M', 'Q', 'QE']
                ym = int(idata["%s_dep" % ivar, idata.major_axis.hour == 12, ilev].mean())
                for j, jvar in enumerate(['%s_mcor' % ivar, '%s_qcor' % ivar, '%s_qecor' % ivar]):
                    if jvar not in idata.items:
                        continue

                    timeseries_line_correction(idata[:, idata.major_axis.hour == 12, :], jvar,
                                               '%s_dep_snht' % ivar, ilev, '%s dep [K]' % ivar,
                                               breaks='%s_dep_breaks' % ivar, ax=ax[j],
                                               departures='%s_era' % ivar, ylim=(ym - 5, ym + 5),
                                               force_snht=True, post_snht=True, plabel=None)
                    idata['%s_dep' % ivar, idata.major_axis.hour == 12, ilev].resample('12h').mean().rolling(window=60,
                                                                                                             min_periods=1).mean().plot(
                        ax=ax[j], color='gray', zorder=1)
                    ax[j].set_xlim('1979', '2016')
                    ax[j].set_ylabel(varnames[j] + ' dep (ERA) [K]')
                    ax[j].grid(True)
                ax[0].set_title('Corrections at %d hPa 12Z' % (ilev / 100))
                ax[-1].set_xlabel('Time (RM:60)')
                plt.savefig('%s/%s_%s_%s_corr_12Z_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)))
                print_verbose('%s/%s_%s_%s_corr_12Z_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)), verbose)

                fig, ax = plt.subplots(3, 1, figsize=(12, 6), sharex=True, sharey=True)
                ym = int(idata["%s_dep" % ivar, idata.major_axis.hour == 0, ilev].mean())
                for j, jvar in enumerate(['%s_mcor' % ivar, '%s_qcor' % ivar, '%s_qecor' % ivar]):
                    if jvar not in idata.items:
                        continue

                    timeseries_line_correction(idata[:, idata.major_axis.hour == 0, :], jvar,
                                               '%s_dep_snht' % ivar, ilev, '%s dep [K]' % ivar,
                                               breaks='%s_dep_breaks' % ivar, ax=ax[j],
                                               departures='%s_era' % ivar, ylim=(ym - 5, ym + 5),
                                               force_snht=True, post_snht=True, plabel=None)
                    idata['%s_dep' % ivar, idata.major_axis.hour == 0, ilev].resample('12h').mean().rolling(window=60,
                                                                                                            min_periods=1).mean().plot(
                        ax=ax[j], color='gray', zorder=1)
                    ax[j].set_xlim('1979', '2016')
                    ax[j].set_ylabel(varnames[j] + ' dep (ERA) [K]')
                    ax[j].grid(True)
                ax[0].set_title('Corrections at %d hPa 00Z' % (ilev / 100))
                ax[-1].set_xlabel('Time (RM:60)')
                plt.savefig('%s/%s_%s_%s_corr_00Z_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)))
                print_verbose('%s/%s_%s_%s_corr_00Z_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)), verbose)

            else:
                fig, ax = plt.subplots(3, 1, figsize=(12, 6), sharex=True, sharey=True)
                varnames = ['M', 'Q', 'QE']
                ym = int(idata["%s_dep" % ivar, :, ilev].mean())
                for j, jvar in enumerate(['%s_mcor' % ivar, '%s_qcor' % ivar, '%s_qecor' % ivar]):
                    if jvar not in idata.items:
                        continue

                    timeseries_line_correction(idata, jvar, '%s_dep_snht' % ivar, ilev, '%s dep [K]' % ivar,
                                               breaks='%s_dep_breaks' % ivar, ax=ax[j],
                                               departures='%s_era' % ivar, ylim=(ym - 5, ym + 5),
                                               force_snht=True, post_snht=True, plabel=None)
                    idata['%s_dep' % ivar, :, ilev].resample('12h').mean().rolling(window=60,
                                                                                   min_periods=1).mean().plot(ax=ax[j],
                                                                                                              color='gray',
                                                                                                              zorder=1)
                    ax[j].set_xlim('1979', '2016')
                    ax[j].set_ylabel(varnames[j] + ' dep (ERA) [K]')
                    ax[j].grid(True)
                ax[0].set_title('Corrections at %d hPa' % (ilev / 100))
                ax[-1].set_xlabel('Time (RM:60)')
                plt.savefig('%s/%s_%s_%s_corr_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)))
                print_verbose('%s/%s_%s_%s_corr_%04d.pdf' % (outputdir, ident, iname, ivar, (ilev / 100)), verbose)

    plt.close('all')
コード例 #15
0
ファイル: from_store.py プロジェクト: MBlaschek/radiosonde
def from_store_deprecated(ident, filename=None, variables=None, directory=None, attributes=True, verbose=0, **kwargs):
    """Read previously saved Radiosonde from default directory
    pandas HDFStore HDF5

    Parameters
    ----------
    ident       Radiosonde Station Number
    filename    filename of HDFStore
    variables   select only given variables
    directory   directory to search for raso_######.h5 with station number

    Returns
    -------
    radiosonde class object
    """
    from raso.sonde.radiosonde import radiosonde
    from raso.config import rasodir, wkdir
    #
    if isinstance(ident, (float, int)):
        ident = "%06d" % ident
    #
    if filename is not None:
        if 'h5' not in filename:
            raise ValueError("This routine can only read HDF5 files from pandas!")
        default = filename

    elif directory is None:
        default = rasodir + '/raso_%s.h5' % ident

    else:
        default = directory + '/raso_%s.h5' % ident

    print_verbose("[STORE] %s" % default, verbose)
    #
    if not os.path.isfile(default):
        raise IOError("File %s not found" % default.replace(wkdir, '.'))
    #
    out = radiosonde(id=ident)
    metadata = False
    if variables is not None:
        if isinstance(variables, str):
            variables = [variables]  # convert to list
    #
    with pd.HDFStore(default, complevel=9, complib='blosc') as store:
        for key in store.keys():
            # drop /
            if key[1:] == 'metadata':
                metadata = True
                continue

            elif variables is not None:
                if key[1:] in variables:
                    out.add_data(key[1:], store[key])  # only certain variables

            else:
                out.add_data(key[1:], store[key])  # default

            print_verbose("[STORE] %s " % key, verbose)
        #
        if metadata:
            print_verbose("[STORE] METADATA ", verbose)
            imeta = store.get_storer('metadata')
            #
            # Attributes
            if 'metadata' in imeta.attrs:
                for ikey, ival in imeta.attrs['metadata'].items():
                    if ikey in ['metadata']:
                        continue  # ? do we need that?

                    if attributes:
                        out.add_attr(ikey, ival)
                    elif ikey in ['lon', 'lat', 'alt']:
                        out.add_attr(ikey, ival)
                    else:
                        pass
            #
            # Infos (id history, saved)
            if 'iparams' in imeta.attrs:
                for ikey, ival in imeta.attrs['iparams'].items():
                    if ikey in ['infos']:
                        continue

                    setattr(out, ikey, ival)

    out.filename = default
    return out
コード例 #16
0
ファイル: from_store.py プロジェクト: MBlaschek/radiosonde
def from_store(ident, variables=None, directory=None, verbose=0):
    """ Read Radiosonde from Store

    Skips any directory or file with extract in their name
    As well as mars_dump.h5

    Parameters
    ----------
    ident           str
    variables       list
    directory       str
    verbose         int

    Returns
    -------
    radiosonde

    Examples
    --------
    >>>import raso
    >>>raso.read.from_store('011035')

    """
    from raso.sonde.radiosonde import radiosonde
    from raso.config import rasodir

    if directory is None:
        directory = rasodir

    default = directory + '/%s/' % ident
    if not os.path.isdir(default):
        raise IOError("[from_store] Requires a valid directory: %s" % default)

    if variables is not None:
        if isinstance(variables, str):
            variables = [variables]


    out = radiosonde(id=ident)
    for ifile in os.listdir(default):
        if ifile == 'attributes.pickle':
            attrs = pickle.load(open(default + 'attributes.pickle'))
            for ikey, ival in attrs.items():
                out.add_attr(ikey, ival)

        elif ifile == 'radiosonde.pickle':
            attrs = pickle.load(open(default + 'radiosonde.pickle'))
            for ikey, ival in attrs.items():
                setattr(out, ikey, ival)

        elif ifile == 'history.txt':
            f = open(default + 'history.txt')
            setattr(out, 'history', f.read().splitlines())
            f.close()

        elif ifile == 'notes.txt':
            f = open(default + 'notes.txt')
            setattr(out, 'notes', f.read())
            f.close()

        elif ifile == 'mars_dump.h5':
            print_verbose("%20s Skipped" % ifile, verbose)
            continue

        elif os.path.isdir(default+ifile):
            continue  # any directory is ignored

        elif 'extract' in ifile:
            print_verbose("%20s Skipped" % ifile, verbose)  # extract is a codename for temp files
            continue

        else:
            varname = ifile.split('.')[0]
            if variables is not None:
                if varname not in variables:
                    print_verbose("%20s [%5s]" % (varname, color_boolean(False)), verbose)
                    continue
            # TODO add file timestamp as info
            # print varname, "created: %s" % time.ctime(os.path.getctime(default + ifile)), time.ctime(os.path.getmtime(default + ifile))
            if 'h5' in ifile:
                out.add_data(varname, pd.read_hdf(default + ifile, varname), verbose=-1)  # not reported

            else:
                out.add_data(varname, pickle.load(open(default + ifile)), verbose=-1)  # not reported
            out.var_saved[varname] = True
            print_verbose("%20s [%5s]" % (varname, color_boolean(True),), verbose)

    out.directory = directory
    out.is_saved = True
    return out
コード例 #17
0
ファイル: merge.py プロジェクト: MBlaschek/radiosonde
def merge(isonde, jsonde, update=False, save=False, verbose=0):
    """
     Read and merge two radiosonde class to one radiosonde

    Parameters
    ----------
    isonde      radiosonde
    jsonde      radiosonde
    update      bool
    save        bool
    verbose     int

    Returns
    -------
    radiosonde / save
    """
    from . import radiosonde

    if not isinstance(isonde, radiosonde):
        raise ValueError("[MERGE] Requires a radiosonde class: isonde")

    if not isinstance(jsonde, radiosonde):
        raise ValueError("[MERGE] Requires a radiosonde class: jsonde")

    for ivar in jsonde.vars:
        if ivar in isonde.vars:
            if update:
                setattr(isonde, ivar, getattr(jsonde, ivar))

            print_verbose("[MERGE] %s replaced [%s]" % (ivar, color_boolean(update)), verbose)

        else:
            isonde.add(ivar, getattr(jsonde, ivar))
            print_verbose("[MERGE] New variable: %s" % ivar, verbose)

    # Attributes ?
    for iatt in jsonde.attrs:
        if iatt in isonde.attrs:
            if getattr(isonde, iatt) == getattr(jsonde, iatt):
                continue

            elif iatt in ['history', 'is_saved']:
                continue

            elif update:
                isonde.add_attr(iatt, getattr(jsonde, iatt))

            else:
                isonde.add_attr("new_" + iatt, getattr(jsonde, iatt))

            print_verbose("[MERGE] %s replaced [%s]" % (iatt, color_boolean(update)), verbose)

        else:
            isonde.add_attr(iatt, getattr(jsonde, iatt))
            print_verbose("[MERGE] New Attribute: %s " % ("new_" + iatt), verbose)

    # History ?
    isonde.history = history_duplicates(isonde.history + jsonde.history)

    if save:
        isonde.save(verbose=verbose)

    else:
        isonde.is_saved = False
        return isonde
コード例 #18
0
ファイル: from_gruan.py プロジェクト: MBlaschek/radiosonde
def from_gruan(sonde, directory=None, save=True,
               var=['cor_temp', 'temp', 'press', 'cor_rh', 'rh', 'WVMR', 'alt', 'u_temp', 'u_rh', 'u_press'],
               force=False, attach=None, attach_var=None, verbose=0, **kwargs):

    """
    Read GRUAN Data and convert to a radiosonde class object
    Read and convert time to std_times, rename temp and press to t and p.
    Drop duplicate entries.

    Possible Station Names:
    'BAR', 'BEL', 'BOU', 'CAB', 'LAU', 'LIN', 'MAN', 'NAU',
    'NYA', 'PAY', 'POT', 'REU', 'SGP', 'SOD', 'TAT'

    Parameters
    ----------
    sonde       str         Appreviation of Sonde Station
    directory   str         Directory of GRUAN data
    var         list        of variables to include
    save        bool        Save radiosonde to store ?
    force       bool        Force a raw data reread ?
    attach      radiosonde  Attach to radiosonde class object
    attach_var  list        Variables to attach from HDFStore
    verbose     int         Level of verbosity
    kwargs      dict        Additional Keywords

    Returns
    -------
    radiosonde class object

    Raises
    ------
    ValueError  sonde not in possible Stations

    """
    from raso.standard_dates_times import _fix_datetime
    from raso.config import outdir
    from raso import radiosonde
    from . import from_store
    from ncio import read_netcdf, read_history

    avail_sondes = ['BAR', 'BEL', 'BOU', 'CAB', 'LAU', 'LIN', 'MAN', 'NAU', 'NYA', 'PAY', 'POT', 'REU', 'SGP', 'SOD',
                    'TAT']

    if not isinstance(sonde,str):
        raise ValueError("Requires a str: sonde")

    if attach is not None:
        if not isinstance(attach,radiosonde):
            raise ValueError("Requires a radiosonde class object: attach")

    if sonde not in avail_sondes:
        raise ValueError("Only one of %s allowed!" % str(avail_sondes))

    filename = outdir + "/GRUAN_" + sonde + ".h5"

    if os.path.isfile(filename) and not force:
        print "Recovering: %s"%filename
        isonde = from_store(0, filename=filename)

        if attach is not None:
            if attach_var is None:
                attach_var = isonde.vars

            for ivar in isonde.vars:
                if ivar in attach_var:
                    attach.add_data(ivar, getattr(isonde,ivar), history="GRUAN, %s %s" %(sonde, ivar))

            return
        else:
            return isonde

    if directory is None:
        raise RuntimeError("[GRUAN] requires a directory!")
    #
    print_verbose("[GRUAN] Find files ...", verbose)
    files = find_files(directory, '*' + sonde + '*.nc', recursive=True)
    print_verbose("[GRUAN][%s] Files found: %d" % (sonde, len(files)), verbose)
    data = []
    if verbose > 0:
        pbar = ProgressBar(maxval=len(files))
        pbar.start()
        i = 0

    for ifile in files:
        tmp = pd.DataFrame(read_netcdf(ifile, verbose=verbose - 2))  # convert it all to a big Frame
        # select only necessary information
        tmp = tmp[ var ]
        tmp.index.name = 'orig_date' # retain original sounding times
        tmp.rename(columns=lambda x: x.replace('temp', 't').replace('press', 'p').replace('rh','r'), inplace=True) # rename
        tmp.p *= 100.  # from hPa to Pa
        # WVMR to q
        tmp['q'] = tmp.eval("WVMR/(WVMR+1)") # specific humidity
        tmp['date'] = tmp.index.to_period(freq='h').to_datetime() # truncate to hours
        tmp = tmp.reset_index().set_index('date') # change index
        rx = map(_fix_datetime, tmp.index)  # change time to 0, 6, 12, 18 UTC
        tmp.index = rx
        tmp.drop_duplicates(inplace=True)
        data.append(tmp)

        if verbose > 0:
            pbar.update(i + 1)
            i += 1

    if verbose > 0:
        pbar.finish()

    hist = read_history(ifile) #
    try:
        ident = "%06d" % int(hist['g.General.SiteWmoId'])
    except:
        ident = sonde

    lat = float(hist['g.MeasuringSystem.Latitude'].split(' ')[0])
    lon = float(hist['g.MeasuringSystem.Longitude'].split(' ')[0])
    alt = float(hist['g.MeasuringSystem.Altitude'].split(' ')[0])
    #
    print_verbose("[GRUAN] Concatenate data ...",verbose)
    data = pd.concat(data, axis=0)
    # drop duplicates
    # print_verbose("[GRUAN] Drop duplicates ...", verbose)
    # data.drop_duplicates(inplace=True)
    #
    if attach is None:
        print_verbose("[GRUAN] Create radiosonde object ...", verbose)
        isonde = radiosonde(ident)
        isonde.filename = filename
        isonde.add_attr('lon', lon)
        isonde.add_attr('lat', lat)
        isonde.add_attr('alt', alt)
        isonde.add_attr('is_gruan',True)
        isonde.add_data('gruan', data, history="GRUAN, %s " %(sonde))
        if save:
            isonde.save(filename=filename, verbose=verbose)
        return isonde
    else:
        attach.add_data('gruan', data, history="GRUAN, %s " %(sonde))
        if save:
            attach.save(verbose=verbose)
        return
コード例 #19
0
def detect_and_correct(isonde, data='std_data', var='dpd', quality_controlled=True, savename='sdcor', save=True,
                       tfirst=True, daynight=False, verbose=0, **kwargs):
    """Detect and Correct Radiosonde biases from Departure Statistics
    Use ERA-Interim departures to detect breakpoints and
    correct these with a mean and a quantile adjustment going back in time.

    uses raso.timeseries.breakpoint.detection / correction

    Parameters
    ----------
    isonde              radiosonde      Radiosonde class object
    data                str             Radiosonde variable
    var                 str             Variable
    quality_controlled  bool            Use QC to remove flagged values?
    savename            str             store name
    save                bool            Save?
    tfirst              bool            Correct Temperature first ?
    daynight            bool            Correct Day and Night Soundings separate?
    verbose             int             verbosness

    Additional Parameters
    ---------------------
    thres               int             [50]    SNHT Threshold
    quantilen           list/array      [0-100] Quantile Ranges
    levels              list            [None]  Pressure levels
    sample_size         int             [730]   minimum Sample size
    borders             int             [180]   biased sample before and after a break
    bias30k             bool            [T]     remove 30K Bias for dpd ?
    verbose             int             [0]     verboseness
    kwargs              dict                    breakpoint.detection, breakpoint.correction ...

    Returns
    -------

    """
    from ..detect_and_correct import detect_and_correct as detect_and_correct_data
    from ..detect_and_correct import detect_and_correct_daynight
    from .. import standard_dewpoint_depression, standard_water_vapor

    funcid = "[DC] Sonde "
    if not isinstance(isonde, radiosonde):
        raise ValueError(funcid + "requires a radiosonde object!")

    if isonde.is_empty:
        raise ValueError(funcid + "Radiosonde is empty!")

    funcid = "[DC] %s " % isonde.id
    if data not in isonde.vars:
        raise ValueError(funcid + "Required variable (%s) not present" % data)

    prof = getattr(isonde, data).copy()  # GET DATA
    print_verbose(funcid+"Savename: %s" % savename, verbose)
    if hasnames(prof, 'qual'):
        #
        # drop all the values that have a qual flag
        #
        if quality_controlled:
            journal(funcid + "QC of %s " % data, isonde.history, verbose)
            prof = enforcer(prof)  # Work on FLAGS, but not DPD 30, makes a copy
            #
            # set DPD30 to missing
            #
            if hasnames(prof, 'dpd'):
                prof['dpd'] = np.where(prof.qual.str.contains('3'), np.nan, prof.dpd.values)  # Apply? BAD, GOOD

            if hasnames(prof, 'td'):
                prof['td'] = np.where(prof.qual.str.contains('3'), np.nan, prof.td.values)  # Apply? BAD, GOOD

        del prof['qual']  # prof.drop('qual', 1, inplace=True)  # remove all flag information
        print_verbose(funcid + " dropping qual ...", verbose)

    if hasnames(prof, 'orig'):
        del prof['orig']  # prof.drop('orig', 1, inplace=True)  # indicates interpolated or not

    if 'dpd' in var:
        if not hasnames(prof, 'dpd_era'):
            prof['dpd_era'] = prof['t_era'] - prof['td_era']
            print_verbose(funcid + " Calculating dpd_era ...", verbose)

    tbreaks = None
    if var == 't' and tfirst:
        tfirst = False
        print_verbose(funcid + "tfirst=True only with temperature dependent variables", 1)

    if tfirst:
        journal(funcid + "Running T D&C first! ", isonde.history, verbose)
        #
        # Only Mean Adjustment for Temperature
        #
        if not daynight:
            prof, tbreaks = detect_and_correct_data(prof, var='t', correct_q=False, bounded=None,
                                                    report=isonde.history, verbose=verbose - 1, **kwargs)
        else:
            prof, tbreaks = detect_and_correct_daynight(prof, var='t', correct_q=False, bounded=None,
                                                        report=isonde.history, verbose=verbose - 1, **kwargs)
            tbreaks['breaks'] = tbreaks['00Z'] + tbreaks['12Z']  # 00Z and 12Z breaks
        #
        # new Columns: t_mcor, t_dep, t_dep_breaks, t_dep_snht
        #
        if len(tbreaks['breaks']) > 0:
            journal(funcid + "T-breaks: %s" % str(tbreaks['breaks']), isonde.history, verbose)
            # prof.major_axis.name = 'date'
            # prof.minor_axis.name = 'p'
            # prof = panel_to_database(prof)  # to DataFrame -> Function requires it > Deprecated now
            # Recalculate Temperature Dependent Variables:
            prof = standard_water_vapor(prof, tvar='t_mcor', vpvar='vp_tcor', replace=True, report=isonde.history)
            prof = standard_dewpoint_depression(prof, tvar='t_mcor', dpdvar='dpd_tcor', vpvar='vp_tcor',
                                                tdvar='td_tcor', replace=True, report=isonde.history)
            del prof['qual']  # prof.drop('qual', 1, inplace=True)  # remove quality Flag again
            # prof.rename(items={var: '%s_orig' % var}, inplace=True)  # Rochade
            # prof.rename(items={'%s_tcor' % var: var}, inplace=True)  #
            #
            if hasnames(prof, '%s_tcor' % var):
                journal(funcid + "Running t-correct %s D&C ..." % var, isonde.history, verbose)
                prof['%s_tcor_era' % var] = prof['%s_era' % var]
                if not daynight:
                    prof, dbreaks = detect_and_correct_data(prof, var='%s_tcor' % var, report=isonde.history,
                                                            verbose=verbose - 1, **kwargs)
                else:
                    prof, dbreaks = detect_and_correct_daynight(prof, var='%s_tcor' % var, report=isonde.history,
                                                                verbose=verbose - 1, **kwargs)
                    dbreaks['breaks'] = dbreaks['00Z'] + dbreaks['12Z']  # Combine

                del prof['%s_tcor_era' % var]
            # prof.rename(items={var: '%s_tcor' % var,
            #                    '%s_mcor' % var: '%s_tcor_mcor' % var,
            #                    '%s_qcor' % var: '%s_tcor_qcor' % var,
            #                    '%s_qecor' % var: '%s_tcor_qecor' % var,
            #                    '%s_dep' % var: '%s_tcor_dep' % var,
            #                    '%s_dep_breaks' % var: '%s_tcor_dep_breaks' % var,
            #                    '%s_dep_snht' % var: '%s_tcor_dep_snht' % var,
            #                    '%s_orig' % var: var}, inplace=True)

        else:
            journal(funcid + "No T breakpoints. ", isonde.history, verbose)

        # prof = panel_to_database(prof)  # Convert to DataFrame (after Detection) > Deprecated now

    journal(funcid + "Running %s D&C" % var, isonde.history, verbose)
    if not daynight:
        prof, stat = detect_and_correct_data(prof, var=var, report=isonde.history, verbose=verbose, **kwargs)

    else:
        prof, stat = detect_and_correct_daynight(prof, var=var, report=isonde.history, verbose=verbose, **kwargs)
        stat['breaks'] = stat['00Z'] + stat['12Z']  # Combine lists

    #
    isonde.add_data(savename, prof, replace=True, verbose=verbose)  # DID anything change ?
    #
    # Options
    #
    thres = kwargs.get('thres', 50)
    borders = kwargs.get('borders', 180)
    sample_size = kwargs.get('sample_size', 730)
    quantilen = kwargs.get('quantilen', np.arange(0, 101, 10))  # missing from history > add
    journal(funcid + "%s (T:%d, N:%d, B:%d, Q:%d)" % (var, int(thres), sample_size, borders, len(quantilen)),
            isonde.history, 0)
    if tbreaks is not None:
        if len(tbreaks['breaks']) > 0:
            stat['t-breaks'] = tbreaks['breaks']
            if len(dbreaks['breaks']) > 0:
                stat['t-dpd-breaks'] = dbreaks['breaks']

    stat['thres'] = thres
    stat['borders'] = borders
    stat['sample_size'] = sample_size
    stat['quantilen'] = quantilen
    stat['source'] = data
    stat['savename'] = savename
    stat['variable'] = var
    stat['daynight'] = daynight

    if 'detect_opts' not in isonde.attrs:
        isonde.add_attr('detect_opts', {"%s_%s" % (savename, var): stat})  # sdcor_dpd
    else:
        isonde.detect_opts["%s_%s" % (savename, var)] = stat  # update

    if save:
        isonde.save(var=savename, update=True, verbose=verbose)

    return stat
コード例 #20
0
ファイル: batch.py プロジェクト: MBlaschek/radiosonde
def breakpoints(ident, levels=None, outputdir=None, verbose=0):
    """

    Parameters
    ----------
    ident
    levels
    outputdir
    verbose

    Returns
    -------

    """
    from raso import load_radiosonde
    from raso.config import outdir
    import matplotlib.pyplot as plt

    isonde = load_radiosonde(ident, variables=[])
    if outputdir is None:
        outputdir = outdir + '/figures'
    #
    # DATA
    #
    if 'detect_opts' not in isonde.attrs:
        print "No Detection Options found ?", ident
        return

    if levels is None:
        levels = [10000, 30000, 50000, 60000, 70000, 85000, 92500]

    for icor, iopts in isonde.detect_opts.items():
        iname = iopts['savename']
        isonde.load_data(iname)
        ithres = iopts['thres']
        ivar = iopts['variable']
        idata = getattr(isonde, iname)
        daynight = iopts.get('daynight', False)
        #
        # LEVEL
        #
        for ilev in levels:
            if daynight:
                ym = int(idata[ivar, idata.major_axis.hour == 12, ilev].mean())
                breakpoint_timeseries(idata[:, idata.major_axis.hour == 12, :], ivar, p=ilev, post_snht=True,
                                      window=365, min_periods=1, ylim=(ym - 5, ym + 5))
                plt.savefig("%s/%s_%s_%s_12Z_breakstat_%04d.pdf" % (outputdir, ident, iname, ivar, ilev / 100))
                print_verbose("%s/%s_%s_%s_12Z_breakstat_%04d.pdf" % (outputdir, ident, iname, ivar, ilev / 100),
                              verbose)

                ym = int(idata[ivar, idata.major_axis.hour == 0, ilev].mean())
                breakpoint_timeseries(idata[:, idata.major_axis.hour == 0, :], ivar, p=ilev, post_snht=True,
                                      window=365, min_periods=1, ylim=(ym - 5, ym + 5))
                plt.savefig("%s/%s_%s_%s_00Z_breakstat_%04d.pdf" % (outputdir, ident, iname, ivar, ilev / 100))
                print_verbose("%s/%s_%s_%s_00Z_breakstat_%04d.pdf" % (outputdir, ident, iname, ivar, ilev / 100),
                              verbose)

            else:
                ym = int(idata[ivar, :, ilev].mean())
                breakpoint_timeseries(idata, ivar, p=ilev, post_snht=True, window=730, min_periods=1,
                                      ylim=(ym - 5, ym + 5))
                plt.savefig("%s/%s_%s_%s_breakstat_%04d.pdf" % (outputdir, ident, iname, ivar, ilev / 100))
                print_verbose("%s/%s_%s_%s_breakstat_%04d.pdf" % (outputdir, ident, iname, ivar, ilev / 100), verbose)

    plt.close('all')
コード例 #21
0
ファイル: from_gpsro.py プロジェクト: MBlaschek/radiosonde
def from_gpsro(directory, pattern, ident, radius, save=True, esat='murphy_koop', verbose=0):
    """
    Read GPS RO events into a timeseries for a certain location

    Parameters
    ----------
    directory   str         search directory
    pattern     str         search pattern
    ident       int/str     radiosonde ID
    radius      float       allowed distance to ilon, ilat of GPS RO events [km]
    save        bool        save radiosonde class object
    verbose     int         verbosity

    Raises
    ------
    ValueError     wrong input types
    RuntimeError   no GPS RO event within specified distance
    """
    from . import from_store
    from raso.config import rasodir
    from raso import radiosonde

    if not isinstance(directory, str):
        raise ValueError("Require a string: directory")

    if not isinstance(pattern, str):
        raise ValueError("Require a string: pattern")

    if not isinstance(ident, (int, str, float)):
        raise ValueError("Require a int, str, float: ident")

    if not isinstance(radius, (float, int)):
        raise ValueError("Require a int, float: radius")

    if isinstance(ident, (int, float)):
        ident = "%06d" % ident

    name = "gpsro_%06d" % int(radius)
    iname = "gpsro_infos_%06d" % int(radius)
    filename = rasodir + "/%s/%s.h5" % (ident, name)
    # RECOVER
    if os.path.isfile(filename):
        print_verbose("Recovering previous: %s" % filename, verbose)
        sonde = from_store(ident, variables=[name, iname])
        return sonde

    if not os.path.isdir(rasodir + "/%s" % ident):
        os.makedirs(rasodir + "/%s" % ident)

    try:
        sonde = from_store(ident, variables=[])  # Reads the empty Radiosonde class >> IOError
        ilon = sonde.lon  # >> Attribute Error
        ilat = sonde.lat

    except IOError:
        sonde = radiosonde(ident)  # Make a new one
        sondes = radiosondelist()
        ilon = sondes.ix[ident, 'lon'].values
        ilat = sondes.ix[ident, 'lat'].values

    except AttributeError:
        sondes = radiosondelist()
        ilon = sondes.ix[ident, 'lon'].values
        ilat = sondes.ix[ident, 'lat'].values

    if ilon is None or ilat is None:
        raise RuntimeError("Can't find any useful lon, lat Information on Radiosonde: %s" % ident)

    infos, data = read_nearest_events(directory, pattern, ilon, ilat, radius, esat=esat, verbose=verbose)
    journal("GPSRO Data: r=%d km [%s/%s]" % (radius, directory, pattern), sonde.history, verbose)
    sonde.add_attr('lon', ilon)
    sonde.add_attr('lat', ilat)
    sonde.add_data(iname, infos)
    sonde.add_data(name, data)
    if save:
        sonde.save(verbose=verbose)
    else:
        return sonde
コード例 #22
0
ファイル: batch.py プロジェクト: MBlaschek/radiosonde
def trends(ident, levels=None, outputdir=None, verbose=0):
    """

    Parameters
    ----------
    ident
    levels
    outputdir
    verbose

    Returns
    -------

    """
    from raso import load_radiosonde
    from raso.config import outdir
    import matplotlib.pyplot as plt

    yplevs = [1000, 3000, 7000, 12500, 17500, 22500, 30000, 40000, 50000, 60000, 85000, 92500]  # Pressure Levels, Ticks

    isonde = load_radiosonde(ident, variables=[])
    if outputdir is None:
        outputdir = outdir + '/figures'

    #
    # DATA
    #

    if levels is None:
        levels = [10000, 30000, 50000, 60000, 70000, 85000, 92500]

    if 'detect_opts' not in isonde.attrs:
        print "No Detection Options found ?", ident
        return

    for icor, iopts in isonde.detect_opts.items():
        iname = iopts['savename']
        isonde.load_data(iname)
        ivar = iopts['variable']
        idata = getattr(isonde, iname)
        daynight = iopts.get('daynight', False)
        #
        # Profile
        #
        varis = [ivar, '%s_mcor' % ivar, '%s_qcor' % ivar, '%s_qecor' % ivar, '%s_era_adj' % ivar, '%s_era' % ivar]
        varnames = ['U', 'M', 'Q', 'QE', 'EA', 'E']

        # Period 1979 - 2015 (32y)
        if daynight:
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 12, :], slice('1979', '2015'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_12Z_79-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_12Z_79-15.pdf' % (outputdir, ident, iname, ivar), verbose)
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 0, :], slice('1979', '2015'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_00Z_79-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_00Z_79-15.pdf' % (outputdir, ident, iname, ivar), verbose)

        else:
            ax, lgd = _trend_plotter(idata[varis], slice('1979', '2015'), varis, yplevs, varnames)
            plt.savefig('%s/%s_%s_%s_trends_79-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_79-15.pdf' % (outputdir, ident, iname, ivar), verbose)

        # Period 1990 - 2015 (15y)
        if daynight:
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 12, :], slice('1990', '2015'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_12Z_90-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_12Z_90-15.pdf' % (outputdir, ident, iname, ivar), verbose)
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 0, :], slice('1990', '2015'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_00Z_90-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_00Z_90-15.pdf' % (outputdir, ident, iname, ivar), verbose)

        else:
            ax, lgd = _trend_plotter(idata[varis], slice('1990', '2015'), varis, yplevs, varnames)
            plt.savefig('%s/%s_%s_%s_trends_79-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_90-15.pdf' % (outputdir, ident, iname, ivar), verbose)

        # Period 1980 - 2000 (20y)
        if daynight:
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 12, :], slice('1980', '2000'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_12Z_80-00.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_12Z_80-00.pdf' % (outputdir, ident, iname, ivar), verbose)
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 0, :], slice('1980', '2000'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_00Z_80-00.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_00Z_80-00.pdf' % (outputdir, ident, iname, ivar), verbose)

        else:
            ax, lgd = _trend_plotter(idata[varis], slice('1980', '2000'), varis, yplevs, varnames)
            plt.savefig('%s/%s_%s_%s_trends_80-00.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_80-00.pdf' % (outputdir, ident, iname, ivar), verbose)

        # Period 1975 - 2015 (30y)
        if daynight:
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 12, :], slice('1985', '2015'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_12Z_85-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_12Z_85-15.pdf' % (outputdir, ident, iname, ivar), verbose)
            ax, lgd = _trend_plotter(idata[varis, idata.major_axis.hour == 0, :], slice('1985', '2015'), varis, yplevs,
                                     varnames)
            plt.savefig('%s/%s_%s_%s_trends_00Z_85-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_00Z_85-15.pdf' % (outputdir, ident, iname, ivar), verbose)

        else:
            ax, lgd = _trend_plotter(idata[varis], slice('1985', '2015'), varis, yplevs, varnames)
            plt.savefig('%s/%s_%s_%s_trends_85-15.pdf' % (outputdir, ident, iname, ivar), bbox_extra_artists=(lgd,),
                        bbox_inches='tight')
            print_verbose('%s/%s_%s_%s_trends_85-15.pdf' % (outputdir, ident, iname, ivar), verbose)

        plt.close('all')
        #
        # LEVEL
        #
        for ilev in levels:
            if daynight:
                fig, ax = plt.subplots(len(varis), 1, figsize=(12, len(varis)), sharex=True, sharey=True)
                ym = int(idata[ivar, idata.major_axis.hour == 12, ilev].mean())
                for j, jvar in enumerate(varis):
                    if jvar in idata.items:
                        ax[j] = timeseries_line_trend(idata[:, idata.major_axis.hour == 12, :], jvar,
                                                   '%s_dep_snht' % ivar, ilev, '%s [K]' % varnames[j], ax=ax[j],
                                                   plabel=ym + 4, ylim=(ym - 5, ym + 5), window=365,
                                                   label=varnames[j])
                    ax[j].grid(True)
                # lgd = ax.legend(bbox_to_anchor=(1.1, 1), loc='upper left', fontsize=10)
                    ax[j].set_xlim('1979', '2016')
                ax[-1].set_xlabel('Time (RM:365)')
                plt.savefig('%s/%s_%s_%s_trends_12Z_%04d.pdf' % (outputdir, ident, iname, ivar, ilev / 100)) #, bbox_extra_artists=(lgd,), bbox_inches='tight')
                print_verbose('%s/%s_%s_%s_trends_12Z_%04d.pdf' % (outputdir, ident, iname, ivar, ilev / 100), verbose)
                plt.close()
                #
                fig, ax = plt.subplots(1, 1, figsize=(12, 4))
                ym = int(idata[ivar, idata.major_axis.hour == 0, ilev].mean())
                for j, jvar in enumerate(varis):
                    if jvar in idata.items:
                        ax = timeseries_line_trend(idata[:, idata.major_axis.hour == 0, :], jvar,
                                                   '%s_dep_snht' % ivar, ilev, '%s [K]' % ivar, ax=ax,
                                                   plabel=ym + 4, ylim=(ym - 5, ym + 5), window=365,
                                                   label=varnames[j])
                ax.grid(True)
                lgd = ax.legend(bbox_to_anchor=(1.1, 1), loc='upper left', fontsize=10)
                ax.set_xlim('1979', '2016')
                ax.set_xlabel('Time (RM:365)')
                plt.savefig('%s/%s_%s_%s_trends_00Z_%04d.pdf' % (outputdir, ident, iname, ivar, ilev / 100),
                            bbox_extra_artists=(lgd,), bbox_inches='tight')
                print_verbose('%s/%s_%s_%s_trends_12Z_%04d.pdf' % (outputdir, ident, iname, ivar, ilev / 100), verbose)
                plt.close()

            else:
                fig, ax = plt.subplots(1, 1, figsize=(12, 4))
                ym = int(idata[ivar, :, ilev].mean())
                for j, jvar in enumerate(varis):
                    if jvar in idata.items:
                        ax = timeseries_line_trend(idata, jvar, '%s_dep_snht' % ivar, ilev, '%s [K]' % ivar,
                                                   ax=ax, plabel=ym + 4, ylim=(ym - 5, ym + 5), window=730,
                                                   label=varnames[j])
                ax.grid(True)
                lgd = ax.legend(bbox_to_anchor=(1.1, 1), loc='upper left', fontsize=10)
                ax.set_xlim('1979', '2016')
                ax.set_xlabel('Time (RM:730)')
                plt.savefig('%s/%s_%s_%s_trends_%04d.pdf' % (outputdir, ident, iname, ivar, ilev / 100),
                            bbox_extra_artists=(lgd,), bbox_inches='tight')
                print_verbose('%s/%s_%s_%s_trends_%04d.pdf' % (outputdir, ident, iname, ivar, ilev / 100), verbose)
                plt.close()
    plt.close('all')
コード例 #23
0
ファイル: from_mars.py プロジェクト: MBlaschek/radiosonde
def from_mars(ident, var=None, filename=None, save=False, update=True, mars_save=False, attach=None,
              force=False, verbose=0, **kwargs):
    """Read MARS (ECMWF) dump from ODB

    1. Read MARS dump or ODB dump
    2. Convert to pandas dataframes

    Typical data in mars dump:

    data      RAW radiosonde data (time,p, variables)
    an        Analysis departures ERA-Interim
    fg        First-guess departures ERA-Interim
    bias      Bias estimate
    station   Station (lon,lat,alt,sonde_type)


    Parameters
    ----------
    ident       int/str     Radiosonde ID
    var         list        Variables in the MARS
    filename    str         filename of mars dump
    save        bool        save radiosonde class ?
    mars_save   bool        save mars dump to HDF5 ?
    attach      radiosonde  attach to this radiosonde
    verbose     int         verbosness
    kwargs      dict        **

    Returns
    -------
    radiosonde

    Examples
    --------
    >>> isonde = from_mars(11035, verbose=1)
    """
    from raso import radiosonde

    selection = False
    if var is not None:
        if isinstance(var, str):
            var = [var]

        for ivar in var:
            if ivar not in ['data', 'bias', 'fg', 'an']:
                raise ValueError("Unknown variable! %s" % ivar)

        selection = True

    try:
        ident = int(ident)  # ident needs to be a number here
        tmp = mars_to_dataframe(ident, filename=filename, save=mars_save, verbose=verbose - 1)
        print_verbose('[MARS] %d : %s' % (ident, str(tmp.shape)), verbose)
        station = tmp[['lon', 'lat', 'alt', 'sonde_type']].drop_duplicates()

        if 'std_time' in tmp.columns:
            # legacy support
            tmp.drop('std_time', axis=1, inplace=True)

        tmp.drop(['sonde_type', 'obs_type', 'code_type', 'id', 'lon', 'lat', 'alt'], axis=1, inplace=True)
        # ['varno' 'p' 'value' 'biascorr' 'fg_dep' 'an_dep']
        tmp.replace({'varno': {2: 't', 3: 'u', 4: 'v', 7: 'q', 29: 'r'}}, inplace=True)  # rename varno to vars
        # group by index and p
        data = tmp.groupby([tmp.index, tmp.p, tmp.varno]).first()['value'].unstack(level=-1).reset_index().rename(
            columns={'level_0': 'date'}).set_index('date', drop=True)
        #
        if (selection and 'bias' in var) or not selection:
            bias = tmp.groupby([tmp.index, tmp.p, tmp.varno]).first()['biascorr'].unstack(
                level=-1).reset_index().rename(columns={'level_0': 'date'}).set_index('date', drop=True)
            bias.ix[:, ~(bias.columns == 'p')] = data.ix[:, ~(data.columns == 'p')] + bias.ix[:, ~(bias.columns == 'p')]
        #
        if (selection and 'fg' in var) or not selection:
            fg = tmp.groupby([tmp.index, tmp.p, tmp.varno]).first()['fg_dep'].unstack(level=-1).reset_index().rename(
                columns={'level_0': 'date'}).set_index('date', drop=True)
            fg.ix[:, ~(fg.columns == 'p')] = data.ix[:, ~(data.columns == 'p')] + fg.ix[:, ~(fg.columns == 'p')]
        #
        if (selection and 'an' in var) or not selection:
            an = tmp.groupby([tmp.index, tmp.p, tmp.varno]).first()['an_dep'].unstack(level=-1).reset_index().rename(
                columns={'level_0': 'date'}).set_index('date', drop=True)
            an.ix[:, ~(an.columns == 'p')] = data.ix[:, ~(data.columns == 'p')] + an.ix[:, ~(an.columns == 'p')]

        if attach is None:
            out = radiosonde(id=ident)

        else:
            out = attach
        #
        if selection:
            print_verbose("MARS: %s" % (str(var)), verbose)
            for ivar in var:
                out.add_data(ivar, locals()[ivar])

        else:
            out.add_data('data', data)
            out.add_data('bias', bias)
            out.add_data('fg', fg)
            out.add_data('an', an)
        #
        out.add_data('station', station)
        try:
            out.add_attr('lon', station.ix[-1, 'lon'])
            out.add_attr('lat', station.ix[-1, 'lat'])
            out.add_attr('alt', station.ix[-1, 'alt'])

        except IndexError:
            out.add_attr('lon', -1)
            out.add_attr('lat', -1)
            out.add_attr('alt', -1)

        if attach is None:
            if save:
                out.save(update=update, force=force, verbose=verbose)

            else:
                return out

    except:
        raise