Esempio n. 1
0
def batch_process(name,
                  data=None,
                  write_tif=True,
                  write_png=True,
                  resultsdir=None,
                  interpolate=True,
                  verbose=False):
    rc = data['rain_c']
    tstr = data['tim']
    tparser = lambda ts: datetime.datetime.strptime(ts[0][0], '%Y%m%d_%H%M%S')
    t = map(tparser, tstr)
    tr = map(round_datetime, t)
    n_timesteps = rc.shape[2]
    rs = [rc[:, :, i] for i in range(n_timesteps)]
    i = 0
    for j, (r0, r1, t0,
            t1) in enumerate(itertools.izip(rs, rs[1:], tr, tr[1:])):
        if interpolate:
            dt = t1 - t0
            dt_minutes = int(dt.total_seconds() / 60)
            if dt_minutes > 5:
                eprint('Long interpolation period of {} minutes.'.format(
                    dt_minutes))
            ri = interpolation.interp(r0, r1, n=dt_minutes - 1)
            ra = [r0] + ri
        else:
            ra = [r0]
        for extra_minutes, r in enumerate(ra):
            i += 1
            tim = t0 + datetime.timedelta(minutes=extra_minutes)
            if verbose:
                print(str(tim))
            basename = tim.strftime('%Y%m%d_%H%M')
            if write_png:
                fig, axdd = plot_frame(r, j, tim, data=data)
                framedir = ensure_join(resultsdir, name, 'png')
                framepath = path.join(framedir, basename + '.png')
                fig.savefig(framepath)
                plt.close(fig)
            if write_tif:
                tifdir = ensure_join(resultsdir, name, 'tif')
                tif_out_fpath = path.join(tifdir, basename + '.tif')
                meta = {'TIFFTAG_DATETIME': str(tim)}
                savetif(r.astype(rasterio.float32), meta, tif_out_fpath)
Esempio n. 2
0
def plot_centroids_ensemble(cc, kdp_max=None, no_t=False, **kws):
    """class centroids as separate figs"""
    from scr_plot_ensemble import lineboxplots
    if kdp_max is not None:
        plotting.vis.VMAXS['KDP'] = kdp_max
    name = conf.SCHEME_ID_RAIN if cc.has_ml else conf.SCHEME_ID_SNOW
    savedir = ensure_join(RESULTS_DIR, 'classes_summary', name,
                          'class_vp_ensemble')
    fields = ['kdp', 'zdr', 'zh']
    if not (cc.has_ml or no_t):
        fields.append('T')
    axarrlist = lineboxplots(cc, savedir=savedir, fields=fields, **kws)
    if kdp_max is not None:
        import importlib
        importlib.reload(plotting.vis)
    return axarrlist
Esempio n. 3
0
def plot_quicklooks(cases_iterator,
                    save=True,
                    saveid='everything',
                    params=None,
                    savedir=None,
                    **kws):
    """Plot and save quicklooks."""
    params = params or ['zh', 'zdr', 'kdp', 'RHO']
    if save:
        savedir = savedir or ensure_join(RESULTS_DIR, 'quicklooks', saveid)
    for caseid, c in cases_iterator:
        print(caseid)
        fig, _ = c.plot(params=params, **kws)
        if save:
            filename = path.join(savedir, c.name() + '.png')
            fig.savefig(filename, bbox_inches='tight')
            plt.close(fig)
Esempio n. 4
0
def make_plots(vpc,
               save_plots=False,
               savedir=None,
               plt_silh=True,
               plt_sca=True,
               plt_top=True):
    """class summary and statistics plots"""
    fig, axarr, i = vpc.plot_cluster_centroids(
        colorful_bars='blue', fig_scale_factor=0.8)  #, cmap='viridis')
    ax_sca = vpc.scatter_class_pca(plot3d=True) if plt_sca else None
    #
    if plt_silh:
        fig_s, ax_s = plt.subplots(dpi=110)
        vpc.plot_silhouette(ax=ax_s)
    else:
        ax_s = None
    #
    stat = bm_stats(c)
    fig_bm, ax_bm = plt.subplots(dpi=110)
    plotting.plot_bm_stats(stat, ax=ax_bm)
    #
    if plt_top:
        fig_top, ax_top = plt.subplots(dpi=110)
        plotting.boxplot_t_echotop(c,
                                   ax=ax_top,
                                   whis=[2.5, 97.5],
                                   showfliers=False)
    else:
        ax_top = None
    #
    if save_plots:
        savekws = {'bbox_inches': 'tight'}
        if savedir is None:
            name = c.vpc.name()
            savedir = ensure_join(RESULTS_DIR, 'classes_summary', name)
        fig.savefig(path.join(savedir, 'centroids.png'), **savekws)
        if plt_silh:
            fig_s.savefig(path.join(savedir, 'silhouettes.png'), **savekws)
        fig_bm.savefig(path.join(savedir, 'benchmark.png'), **savekws)
        if plt_top:
            fig_top.savefig(path.join(savedir, 't_top.png'), **savekws)
        cl_data_std = c.cl_data_scaled.std().mean()
        cl_data_std.name = 'std'
        cl_data_std.to_csv(path.join(savedir, 'cl_data_stats.csv'))
    return axarr, ax_sca, ax_s, ax_bm, ax_top
Esempio n. 5
0
def subdir_vpc(vpc, subdir):
    name = vpc.name()
    return ensure_join(RESULTS_DIR, subdir, name)
Esempio n. 6
0
    return cases.case.iteritems()


def iterate_mat2case(datadir, fname_glob='*.mat'):
    """iterator over case objects from data files in a directory"""
    datafiles = glob(path.join(datadir, fname_glob))
    for datafile in datafiles:
        cid = path.basename(datafile)[:8]
        try:
            c = case.Case.from_mat(datafile)
            yield cid, c
        except ValueError as e:
            print(cid, e)


if __name__ == '__main__':
    interactive = False
    plt.close('all')
    #params = ['ZH', 'ZDR', 'KDP', 'RHO']
    params = None
    datadir = path.expanduser('~/DATA/vprhi2')
    savedir = ensure_join(datadir, 'quicklooks', 'jet')
    iterator = iterate_mat2case(datadir)  #, fname_glob='201501*.mat')
    if interactive:
        plt.ion()
        save = False
    else:
        plt.ioff()
        save = True
    plot_quicklooks(iterator, save=save, params=params, savedir=savedir)
Esempio n. 7
0
N_COMB_INTERVALS = 2

dtformat_default = '%Y-%m-%d %H:%M'
dtformat_snex = '%Y %d %B %H UTC'
dtformat_paper = '%Y %b %d %H:%M'
cond = lambda df: (df.intensity>0.2) & (df.D_0_gamma>0.6) & \
                  (df.density==df.density) & (df['count']>800)
RHO_LIMITS = (0, 100, 200, 1000)
#rholimits = (0, 150, 300, 800)
resultspath = path.join(RESULTS_DIR, 'pip2015')
paperpath = path.join(resultspath, 'paper')

paths = {
    'results': ensure_dir(resultspath),
    'paper': paperpath,
    'tables': ensure_join(paperpath, 'tables')
}
files = {
    'h5nov14': path.join(DATA_DIR, '2014nov1-23.h5'),
    'h5w1415': path.join(DATA_DIR, 'dec-jan1415.h5'),
    'h5baecc': H5_PATH,
    'params_cache': path.join(caching.CACHE_DIR,
                              'param_table' + caching.MSGTLD)
}


def cases_filepath(name):
    return path.join(USER_DIR, 'cases', name + '.csv')


def find_interval(x, limits=(0, 100, 200, 1000)):
Esempio n. 8
0
if __name__ == '__main__':
    # TODO: weird memory leak with rain cases
    save = True
    plt.ioff() if save else plt.ion()
    plt.close('all')
    rain_season = True
    if DEBUG:
        rain_season = True
        case_set = 'rain_vpc1'
    else:
        case_set = conf.CASES_RAIN if rain_season else conf.CASES_SNOW
    name = conf.SCHEME_ID_RAIN if rain_season else conf.SCHEME_ID_SNOW
    cases = conf.init_cases(cases_id=case_set)
    if DEBUG:
        results_dir = ensure_join(RESULTS_DIR, 'debug', name, case_set)
    else:
        results_dir = ensure_join(RESULTS_DIR, 'classified', name, case_set)
    for i, c in cases.case.iteritems():
        print(i)
        c.load_classification(name)
        try:
            c.load_pluvio()
        except FileNotFoundError:
            warn('Pluvio data not found.')
        #c.plot_classes()
        #c.plot_cluster_centroids()
        fig, axarr = c.plot(params=['kdp', 'zh', 'zdr'],
                            n_extra_ax=0,
                            plot_extras=['ts', 'silh', 'cl'],
                            t_contour_ax_ind='all',
Esempio n. 9
0
N_COMB_INTERVALS = 2

dtformat_default = '%Y-%m-%d %H:%M'
dtformat_snex = '%Y %d %B %H UTC'
dtformat_paper = '%Y %b %d %H:%M'
cond = lambda df: (df.intensity>0.2) & (df.D_0_gamma>0.6) & \
                  (df.density==df.density) & (df['count']>800)
RHO_LIMITS = (0, 100, 200, 1000)
#rholimits = (0, 150, 300, 800)
resultspath = path.join(RESULTS_DIR, 'pip2015')
paperpath = path.join(resultspath, 'paper')

paths = {'results': ensure_dir(resultspath),
         'paper': paperpath,
         'tables': ensure_join(paperpath, 'tables')}
files = {'h5nov14': path.join(DATA_DIR, '2014nov1-23.h5'),
         'h5w1415': path.join(DATA_DIR, 'dec-jan1415.h5'),
         'h5baecc': H5_PATH,
         'params_cache': path.join(caching.CACHE_DIR, 'param_table'+ caching.MSGTLD)}


def cases_filepath(name):
    return path.join(USER_DIR, 'cases', name + '.csv')

def find_interval(x, limits=(0, 100, 200, 1000)):
    """Find rightmost value less than x and leftmost value greater than x."""
    i = bisect.bisect_right(limits, x)
    return limits[i-1:i+1]

Esempio n. 10
0
                       extra_weight=0.75)

VPC_PARAMS_RAIN = dict(basename='rain',
                       has_ml=True,
                       params=PARAMS,
                       hlimits=(290, 10e3),
                       n_eigens=30,
                       n_clusters=10,
                       reduced=True)

SEED = 0

SCHEME_ID_SNOW = classification.scheme_name(**VPC_PARAMS_SNOW)
SCHEME_ID_RAIN = classification.scheme_name(**VPC_PARAMS_RAIN)

P1_FIG_DIR = ensure_join(RESULTS_DIR, 'paper1')
POSTER_FIG_DIR = ensure_join(RESULTS_DIR, 'poster')


def init_cases(cases_id=None, season=''):
    """initialize cases data"""
    if cases_id is None:
        if season == 'snow':
            cases_id = CASES_SNOW
        elif season == 'rain':
            cases_id = CASES_RAIN
    cases = multicase.read_cases(cases_id)
    if cases.ml.astype(bool).all():
        cases = cases[cases['ml_ok'].fillna(0).astype(bool)]
    return cases
Esempio n. 11
0
# coding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
__metaclass__ = type

import pandas as pd
from os import path
from glob import glob
from baecc.instruments import pluvio, pip_psd, pip_v
from j24 import home, ensure_join


RESULTS_DIR = ensure_join(home(), 'results', 'density18')


def make_hdf():
    datadir = path.join(home(), 'DATA', 'HYY_DenData')
    p200dir = path.join(datadir, 'Pluvio200')
    p400dir = path.join(datadir, 'Pluvio400')
    pipdir = path.join(datadir, 'PIP')
    psddir = path.join(pipdir, 'f_1_4_DSD_Tables_ascii')
    vdir = path.join(pipdir, 'f_2_2_Velocity_Tables')
    h5file = path.join(datadir, 'annakaisa15-18.h5')
    pluv_paths = dict(pluvio200=p200dir, pluvio400=p400dir)
    instr = dict()
    # read pluvio data
    for p in ['pluvio200', 'pluvio400']:
        fnames = glob(path.join(pluv_paths[p], '*.txt'))
        fnames.sort()
        pluv = pluvio.Pluvio(fnames, name=p)
        selection = pluv.data.i_rt.apply(lambda x: type(x)==float)
        pluv.data = pluv.data[selection].astype(float)
Esempio n. 12
0
    border = basemap.border()

    x0 = 1.1e5
    y0 = 6.55e6
    x1 = 6.5e5
    y1 = 7e6
    #urls = fmi.available_maps(**t_range)
    fakeindex = pd.DatetimeIndex(freq='5min',
                                 start=t_range['starttime'],
                                 end=t_range['endtime'])
    urls = pd.Series(index=fakeindex)  # fake urls
    #url = fmi.gen_url(timestamp='2017-10-17T07:00:00Z')
    dl = fmi.available_maps().tail(2)
    #fmi.download_maps(urls)
    paths = fmi.download_maps(dl)
    savedir = ensure_join(home(), 'results', 'sataako')

    ### FORECAST AND SAVE LOGIC ###
    rads = paths.apply(rasterio.open)
    crops, tr, meta = raster.crop_rasters(rads, **raster.DEFAULT_CORNERS)
    dtype = meta['dtype']
    rad_crs = rads.iloc[0].read_crs().data
    rads.apply(lambda x: x.close())
    rr = raster.raw2rr(crops)
    fcast = forecast.forecast(rr)
    savepaths = fcast.copy()
    pngpaths = fcast.copy()
    for t, fc in fcast.iteritems():
        savepath = path.join(savedir, t.strftime(fmi.FNAME_FORMAT))
        savepaths.loc[t] = savepath
        raster.write_rr_geotiff(fc, meta, savepath)
Esempio n. 13
0
# coding: utf-8

import matplotlib.pyplot as plt
from os import path
from radcomp.vertical import case
from j24 import ensure_join

if __name__ == '__main__':
    plt.close('all')
    datadir = path.expanduser('~/DATA/IKA/20180818RHI')
    #datadir = path.expanduser('~/DATA/IKA/test')
    vpdir = path.expanduser('~/results/radcomp/vertical/vp_dmitri')
    vpfiles = []
    #vpfiles.append(path.join(vpdir, '20160818_IKA_VP_from_RHI.mat'))
    #vpfiles.append(path.join(vpdir, '20160818_IKA_VP_from_RHI_1km_median.mat'))
    #vpfiles.append(path.join(vpdir, '20160818_IKA_VP_from_RHI_1km_mean.mat'))
    vpfiles.append(
        path.join(vpdir, '20160818_IKA_VP_from_RHI_1km_median_maesaka.mat'))
    vpfiles.append(path.join(vpdir, '20160818_IKA_vprhi_1km_median_m.mat'))
    for vpfile in vpfiles:
        c = case.Case.from_mat(vpfile)
        figc, axarrc = c.plot(params=['ZH', 'KDP', 'kdp', 'ZDR', 'zdr', 'RHO'],
                              cmap='viridis',
                              plot_snd=False)
        #figc, axarrc = c.plot(params=['KDP', 'kdp'], cmap='viridis', plot_snd=False)
        fname = path.basename(vpfile)
        axarrc[0].set_title(fname)
        figdir = ensure_join(vpdir, 'png')
        figpath = path.join(figdir, fname[:-3] + 'png')
        figc.savefig(figpath, bbox_inches='tight')
Esempio n. 14
0
# coding: utf-8

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from os import path
from datetime import timedelta
from j24 import home, ensure_join

CACHE_DIR = ensure_join(home(), '.pysonde', 'cache')
CACHE_KEY_FMT = 'wyo%Y%m%d%H'


def round_hours(timestamp, hres=12):
    """round timestamp to hres hours"""
    tt = timestamp + timedelta(hours=hres / 2)
    dt = timedelta(hours=tt.hour % hres, minutes=tt.minute, seconds=tt.second)
    return tt - dt


def sounding_url(t, dtype='text'):
    out_type = dict(pdf='PDF%3ASTUVE', text='TEXT%3ASIMPLE')
    baseurl = 'http://weather.uwyo.edu/cgi-bin/sounding'
    query = '?region=europe&TYPE={type}&YEAR={year}&MONTH={month:02d}&FROM={day:02d}{hour:02d}&TO={day:02d}{hour:02d}&STNM=02963'
    urlformat = baseurl + query
    return urlformat.format(type=out_type[dtype],
                            year=t.year,
                            month=t.month,
                            day=t.day,
                            hour=t.hour)
Esempio n. 15
0
# coding: utf-8
"""radcomp"""

import locale
from os import path
from j24 import ensure_join


locale.setlocale(locale.LC_ALL, 'C')
HOME = path.expanduser('~')
USER_DIR = path.join(HOME, '.radcomp')
RESULTS_DIR = ensure_join(HOME, 'results', 'radcomp')
CACHE_DIR = ensure_join(HOME, '.cache', 'radcomp')
CACHE_TMP_DIR = ensure_join(CACHE_DIR, 'tmp')
Esempio n. 16
0
# coding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals

__metaclass__ = type

import pandas as pd
from os import path
from glob import glob
from baecc.instruments import pluvio, pip_psd, pip_v
from j24 import home, ensure_join

RESULTS_DIR = ensure_join(home(), 'results', 'density18')


def make_hdf():
    datadir = path.join(home(), 'DATA', 'HYY_DenData')
    p200dir = path.join(datadir, 'Pluvio200')
    p400dir = path.join(datadir, 'Pluvio400')
    pipdir = path.join(datadir, 'PIP')
    psddir = path.join(pipdir, 'f_1_4_DSD_Tables_ascii')
    vdir = path.join(pipdir, 'f_2_2_Velocity_Tables')
    h5file = path.join(datadir, 'annakaisa15-18.h5')
    pluv_paths = dict(pluvio200=p200dir, pluvio400=p400dir)
    instr = dict()
    # read pluvio data
    for p in ['pluvio200', 'pluvio400']:
        fnames = glob(path.join(pluv_paths[p], '*.txt'))
        fnames.sort()
        pluv = pluvio.Pluvio(fnames, name=p)
        selection = pluv.data.i_rt.apply(lambda x: type(x) == float)
        pluv.data = pluv.data[selection].astype(float)
Esempio n. 17
0
def scatter_kdpg(kdpg, t, ax=None):
    ax = ax or plt.gca()
    selection = ~np.isnan(kdpg.values.flatten())
    kdpg_flat = kdpg.values.flatten()[selection]
    t_flat = t.values.flatten()[selection]
    scatter_kde(kdpg_flat, t_flat, ax=ax)
    ax.set_xlim(left=-2, right=6)
    ax.set_ylim(bottom=-35, top=0)
    ax.invert_yaxis()
    ax.set_ylabel(LABELS['T'])
    ax.set_xlabel(LABELS['KDPG'])
    ax.axvline(0, color='black')
    hlines(ax)
    return ax


if __name__ == '__main__':
    plt.ioff()
    for cl in range(cc.vpc.n_clusters):
        fig, axarr = plt.subplots(1, 2, figsize=(10, 5))
        t = cc.data['T'].loc[:, cc.classes()==cl]
        kdp = cc.data.kdp.loc[:, cc.classes()==cl]
        kdpg = cc.data.kdpg.loc[:, cc.classes()==cl]
        scatter_kdp(kdp, t, ax=axarr[0])
        title = 'Class {}'.format(cl)
        scatter_kdpg(kdpg, t, ax=axarr[1])
        fig.suptitle(title)
        outdir = ensure_join(RESULTS_DIR, 'kdp-t_scatter')
        outfile = path.join(outdir, 'cl{}.png'.format(cl))
        fig.savefig(outfile, bbox_inches='tight')
        plt.close(fig)
Esempio n. 18
0
            fileseasonchar = seasonchar.lower() + fname_extra
            fname = fnamefmt.format(seasonchar=fileseasonchar, cl=cl)
            fpath = path.join(savedir, fname)
            print(fpath)
            fig.savefig(fpath, **SAVE_KWS)
        axarrlist.append(axarr)
    return axarrlist


if __name__ == '__main__':
    plt.ioff()
    plt.close('all')
    cases_id = 'snow'
    rain_season = cases_id in ('rain', )
    flag = 'ml_ok' if rain_season else None
    c = multicase.MultiCase.from_caselist(cases_id,
                                          filter_flag=flag,
                                          has_ml=rain_season)
    name = conf.SCHEME_ID_RAIN if rain_season else conf.SCHEME_ID_SNOW
    c.load_classification(name)
    savedir = ensure_join(RESULTS_DIR, 'classes_summary', name,
                          'class_vp_ensemble')
    axarrlist = lineboxplots(
        c,
        savedir=savedir,
        #xlim_override=True,
        fields=('kdp', 'zdr', 'zh', 'T'))
    if not rain_season:
        fig, ax, lines = plotting.boxplot_t_surf(c)
        fig.savefig(path.join(savedir, 't_boxplot.png'), **SAVE_KWS)
Esempio n. 19
0
# -*- coding: utf-8 -*-
"""
@author: Jussi Tiira
"""
import matplotlib.pyplot as plt
from os import path
from j24 import home, ensure_join

from scr_snowfall import pip2015events

#plt.close('all')
plt.ion()

basepath = ensure_join(home(),'results','pip2015','density')
dtfmt = '%Y%m%d'

e = pip2015events()

rho_label = 'bulk density (kg m$^{-3}$)'
t_label = 'time'

for c in e.events.paper.values:
    savepath = basepath
    rho = c.density()
    rho.to_csv(path.join(savepath, c.dtstr(dtfmt) + '.csv'))
    c.instr['pluvio'].tdelta().to_csv(path.join(savepath, 'timedelta_' + c.dtstr(dtfmt) + '.csv'))
    plt.figure(dpi=120)
    rho.plot(drawstyle='steps')
    plt.title(c.dtstr())
    plt.xlabel(t_label)
    plt.ylabel(rho_label)