Exemplo n.º 1
0
def main():
    """Go Main"""
    df = get_database_data()
    print(df)
    vals = {}
    labels = {}
    for wfo, row in df.iterrows():
        if wfo == 'JSJ':
            wfo = 'SJU'
        vals[wfo] = row['percent']
        labels[wfo] = '%.0f%%' % (row['percent'], )
        #if row['count'] == 0:
        #    labels[wfo] = '-'

    bins = np.arange(0, 101, 10)    
    #bins = [1, 25, 50, 75, 100, 125, 150, 200, 300]
    #bins = [-50, -25, -10, -5, 0, 5, 10, 25, 50]
    # bins[0] = 1
    #clevlabels = ['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']
    cmap = plt.get_cmap('PuOr')
    mp = MapPlot(sector='nws', continentalcolor='white', figsize=(12., 9.),
                 title=("2018 Percentage of Time with 1+ Flood Warning Active"),
                 subtitle=('1 January - 30 September 2018, based on IEM archives'))
    mp.fill_cwas(vals, bins=bins, lblformat='%s', labels=labels,
                 cmap=cmap, ilabel=True,  # clevlabels=clevlabels,
                 units='percent')
    
    mp.postprocess(filename='test.png')
Exemplo n.º 2
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    from pyiem.plot import MapPlot
    utc = datetime.datetime.utcnow()
    bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    phenomena = fdict.get('phenomena', 'TO')
    significance = fdict.get('significance', 'W')

    cursor.execute("""
     select wfo,  extract(days from ('TODAY'::date - max(issue))) as m
     from warnings where significance = %s and phenomena = %s
     GROUP by wfo ORDER by m ASC
    """, (significance, phenomena))
    data = {}
    rows = []
    for row in cursor:
        wfo = row[0] if row[0] != 'JSJ' else 'SJU'
        rows.append(dict(wfo=wfo, days=row[1]))
        data[wfo] = max([row[1], 0])
    df = pd.DataFrame(rows)

    m = MapPlot(sector='nws', axisbg='white', nocaption=True,
                title='Days since Last %s %s by NWS Office' % (
                        vtec._phenDict.get(phenomena, phenomena),
                        vtec._sigDict.get(significance, significance)),
                subtitle='Valid %s' % (utc.strftime("%d %b %Y %H%M UTC"),))
    m.fill_cwas(data, bins=bins, ilabel=True, units='Days',
                lblformat='%.0f')

    return m.fig, df
Exemplo n.º 3
0
def main():
    """Go MAin"""
    df = pd.read_csv('flood_emergencies.csv')
    df2 = df[['source', 'eventid', 'phenomena', 'significance', 'year']
             ].drop_duplicates()
    gdf = df2.groupby('source').count()
    vals = {}
    labels = {}
    for wfo, row in gdf.iterrows():
        if wfo == 'TJSJ':
            wfo = 'SJU'
        else:
            wfo = wfo[1:]
        vals[wfo] = int(row['eventid'])
        labels[wfo] = "%s" % (row['eventid'], )

    bins = list(range(0, 31, 3))
    bins[0] = 1.
    cmap = plt.get_cmap('plasma_r')
    cmap.set_over('black')
    cmap.set_under('white')
    mp = MapPlot(sector='nws', continentalcolor='white', figsize=(12., 9.),
                 title=("2003-2018 Flash Flood Emergency Events"),
                 subtitle=('based on unofficial IEM archives, searching '
                           '"FFS", "FLW", "FFS".'))
    mp.fill_cwas(vals, bins=bins, lblformat='%s', labels=labels,
                 cmap=cmap, ilabel=True,  # clevlabels=month_abbr[1:],
                 units='count')
    mp.postprocess(filename='test.png')
Exemplo n.º 4
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("postgis")
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx["sdate"]
    sts = sts.replace(tzinfo=pytz.utc)
    ets = ctx["edate"]
    by = ctx["by"]
    ets = ets.replace(tzinfo=pytz.utc)
    myfilter = ctx["filter"]
    if myfilter == "NONE":
        tlimiter = ""
    elif myfilter == "NRS":
        tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') "
    elif myfilter == "CON":
        tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', "
                    "'TSTM WND DMG') ")
    else:
        tlimiter = " and typetext = '%s' " % (myfilter, )

    df = read_sql(
        """
    WITH data as (
        SELECT distinct wfo, state, valid, type, magnitude, geom from lsrs
        where valid >= %s and valid < %s """ + tlimiter + """
    )
    SELECT """ + by + """, count(*) from data GROUP by """ + by + """
    """,
        pgconn,
        params=(sts, ets),
        index_col=by,
    )
    data = {}
    for idx, row in df.iterrows():
        if idx == "JSJ":
            idx = "SJU"
        data[idx] = row["count"]
    maxv = df["count"].max()
    bins = np.linspace(1, maxv, 12, dtype="i")
    bins[-1] += 1
    mp = MapPlot(
        sector="nws",
        axisbg="white",
        title=("Preliminary/Unfiltered Local Storm Report Counts %s") %
        (PDICT[by], ),
        subtitlefontsize=10,
        subtitle=("Valid %s - %s UTC, type limiter: %s") % (
            sts.strftime("%d %b %Y %H:%M"),
            ets.strftime("%d %b %Y %H:%M"),
            MDICT.get(myfilter),
        ),
    )
    cmap = plt.get_cmap(ctx["cmap"])
    if by == "wfo":
        mp.fill_cwas(data, bins=bins, cmap=cmap, ilabel=True)
    else:
        mp.fill_states(data, bins=bins, cmap=cmap, ilabel=True)

    return mp.fig, df
Exemplo n.º 5
0
def main():
    """Go Main Go"""
    cursor = POSTGIS.cursor()
    cursor2 = POSTGIS.cursor()

    phenomena = 'WS'

    cursor.execute("""
    SELECT ugc, issue, init_expire, wfo from warnings where phenomena = %s and
    significance = 'A' and issue > '2005-10-01' ORDER by issue ASC
    """, (phenomena, ))
    total = cursor.rowcount
    print('Events is %s' % (total, ))

    hits = {}
    hits2 = {}
    totals = {}
    misses = 0
    for row in tqdm(cursor, total=total):
        wfo = row[3]
        if wfo not in hits:
            hits[wfo] = {}
        if wfo not in totals:
            totals[wfo] = 0
        totals[wfo] += 1
        cursor2.execute("""
        SELECT distinct phenomena, significance from warnings
        where ugc = %s and expire > %s and issue < %s and wfo = %s
        """, (row[0], row[1], row[2], wfo))
        for row2 in cursor2:
            key = "%s.%s" % (row2[0], row2[1])
            if key not in hits[wfo]:
                hits[wfo][key] = 0
            hits[wfo][key] += 1
            if key not in hits2:
                hits2[key] = 0
            hits2[key] += 1
        if cursor2.rowcount == 0:
            misses += 1

    data = {}
    for wfo in hits:
        data[wfo] = hits[wfo].get(
                    '%s.W' % (phenomena,), 0) / float(totals[wfo]) * 100.0

    mp = MapPlot(sector='nws', axisbg='white',
                 title=("Conversion [%] of Winter Storm Watch "
                        "Counties/Parishes into Winter Storm Warnings"),
                 titlefontsize=14,
                 subtitle=('1 Oct 2005 - 29 Mar 2018, Overall %s/%s %.1f%%'
                           ) % (hits2['%s.W' % (phenomena, )], total,
                                hits2['%s.W' % (phenomena, )] / float(total) * 100.))
    mp.fill_cwas(data, ilabel=True, lblformat='%.0f')
    mp.postprocess(filename='test.png')

    print('Misses %s %.1f%%' % (misses, misses / float(total) * 100.0))
    for key in hits2:
        print('%s %s %.1f%%' % (key, hits2[key],
                                hits2[key] / float(total) * 100.0))
Exemplo n.º 6
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    sts = sts.replace(tzinfo=pytz.utc)
    ets = ctx['edate']
    by = ctx['by']
    ets = ets.replace(tzinfo=pytz.utc)
    myfilter = ctx['filter']
    if myfilter == 'NONE':
        tlimiter = ''
    elif myfilter == 'NRS':
        tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') "
    elif myfilter == 'CON':
        tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', "
                    "'TSTM WND DMG') ")
    else:
        tlimiter = " and typetext = '%s' " % (myfilter, )

    df = read_sql("""
    WITH data as (
        SELECT distinct wfo, state, valid, type, magnitude, geom from lsrs
        where valid >= %s and valid < %s """ + tlimiter + """
    )
    SELECT """ + by + """, count(*) from data GROUP by """ + by + """
    """,
                  pgconn,
                  params=(sts, ets),
                  index_col=by)
    data = {}
    for idx, row in df.iterrows():
        if idx == 'JSJ':
            idx = 'SJU'
        data[idx] = row['count']
    maxv = df['count'].max()
    bins = np.linspace(1, maxv, 12, dtype='i')
    bins[-1] += 1
    mp = MapPlot(
        sector='nws',
        axisbg='white',
        title=('Preliminary/Unfiltered Local Storm Report Counts %s') %
        (PDICT[by], ),
        subtitlefontsize=10,
        subtitle=('Valid %s - %s UTC, type limiter: %s') %
        (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"),
         MDICT.get(myfilter)))
    cmap = plt.get_cmap(ctx['cmap'])
    if by == 'wfo':
        mp.fill_cwas(data, bins=bins, cmap=cmap, ilabel=True)
    else:
        mp.fill_states(data, bins=bins, cmap=cmap, ilabel=True)

    return mp.fig, df
Exemplo n.º 7
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    from pyiem.plot import MapPlot
    bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
    pgconn = get_dbconn('postgis')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    edate = ctx.get('edate')
    if edate is not None:
        edate = utc(edate.year, edate.month, edate.day, 0, 0)
        cursor.execute(
            """
         select wfo,  extract(days from (%s::date - max(issue))) as m
         from warnings where significance = %s and phenomena = %s
         and issue < %s
         GROUP by wfo ORDER by m ASC
        """, (edate, significance, phenomena, edate))
    else:
        cursor.execute(
            """
         select wfo,  extract(days from ('TODAY'::date - max(issue))) as m
         from warnings where significance = %s and phenomena = %s
         GROUP by wfo ORDER by m ASC
        """, (significance, phenomena))
        edate = datetime.datetime.utcnow()

    if cursor.rowcount == 0:
        raise ValueError(
            ("No Events Found for %s (%s.%s)") % (vtec.get_ps_string(
                phenomena, significance), phenomena, significance))
    data = {}
    rows = []
    for row in cursor:
        wfo = row[0] if row[0] != 'JSJ' else 'SJU'
        rows.append(dict(wfo=wfo, days=row[1]))
        data[wfo] = max([row[1], 0])
    df = pd.DataFrame(rows)
    df.set_index('wfo', inplace=True)

    mp = MapPlot(sector='nws',
                 axisbg='white',
                 nocaption=True,
                 title='Days since Last %s by NWS Office' %
                 (vtec.get_ps_string(phenomena, significance), ),
                 subtitle='Valid %s' % (edate.strftime("%d %b %Y %H%M UTC"), ))
    mp.fill_cwas(data, bins=bins, ilabel=True, units='Days', lblformat='%.0f')

    return mp.fig, df
Exemplo n.º 8
0
def plot():
    """Make a pretty plot"""
    df = pd.read_csv('wfo.csv')
    df.set_index('wfo', inplace=True)
    m = MapPlot(sector='conus',
                title="Percentage of Flash Flood Watches receiving 1+ FFW",
                subtitle='PRELIMINARY PLOT! Please do not share :)')
    cmap = plt.get_cmap('jet')
    df2 = df[df['freq'].notnull()]
    m.fill_cwas(df2['freq'].to_dict(), cmap=cmap, units='%',
                lblformat='%.0f', ilabel=True)
    m.postprocess(filename='test.png')
    m.close()
Exemplo n.º 9
0
def make_map():
    """Generate a map plot"""
    df = pd.read_csv('vertex_intersects.csv')
    gdf = df.groupby('wfo').sum()
    allvals = (gdf['allhits'] - gdf['cwahits']) / gdf['verticies'] * 100.
    avgv = ((gdf['allhits'].sum() - gdf['cwahits'].sum()) /
            gdf['verticies'].sum() * 100.)
    mp = MapPlot(sector='nws', continentalcolor='white',
                 title=('Percent of SVR+TOR Warning Vertices within 2km '
                        'of County Border'),
                 subtitle=('1 Oct 2007 through 29 Mar 2018, '
                           'Overall Avg: %.1f%%, * CWA Borders Excluded'
                           ) % (avgv,))
    mp.fill_cwas(allvals.to_dict(), ilabel=True, lblformat='%.0f')
    mp.postprocess(filename='test.png')
Exemplo n.º 10
0
Arquivo: p92.py Projeto: akrherz/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    from pyiem.plot import MapPlot
    bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    edate = ctx.get('edate')
    if edate is None:
        edate = datetime.datetime.utcnow()
    else:
        edate = datetime.datetime(edate.year, edate.month,
                                  edate.day, 0, 0)
    edate = edate.replace(tzinfo=pytz.timezone("UTC"))

    cursor.execute("""
     select wfo,  extract(days from (%s::date - max(issue))) as m
     from warnings where significance = %s and phenomena = %s
     and issue < %s
     GROUP by wfo ORDER by m ASC
    """, (edate, significance, phenomena, edate))
    if cursor.rowcount == 0:
        return ("No Events Found for %s %s (%s.%s)"
                ) % (vtec._phenDict.get(phenomena, phenomena),
                     vtec._sigDict.get(significance, significance),
                     phenomena, significance)
    data = {}
    rows = []
    for row in cursor:
        wfo = row[0] if row[0] != 'JSJ' else 'SJU'
        rows.append(dict(wfo=wfo, days=row[1]))
        data[wfo] = max([row[1], 0])
    df = pd.DataFrame(rows)
    df.set_index('wfo', inplace=True)

    m = MapPlot(sector='nws', axisbg='white', nocaption=True,
                title='Days since Last %s %s by NWS Office' % (
                        vtec._phenDict.get(phenomena, phenomena),
                        vtec._sigDict.get(significance, significance)),
                subtitle='Valid %s' % (edate.strftime("%d %b %Y %H%M UTC"),))
    m.fill_cwas(data, bins=bins, ilabel=True, units='Days',
                lblformat='%.0f')

    return m.fig, df
Exemplo n.º 11
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    sts = sts.replace(tzinfo=pytz.utc)
    ets = ctx['edate']
    ets = ets.replace(tzinfo=pytz.utc)
    myfilter = ctx['filter']
    if myfilter == 'NONE':
        tlimiter = ''
    elif myfilter == 'NRS':
        tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') "
    elif myfilter == 'CON':
        tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', "
                    "'TSTM WND DMG') ")
    else:
        tlimiter = " and typetext = '%s' " % (myfilter, )

    df = read_sql("""
    SELECT wfo, count(*) from lsrs
    WHERE valid >= %s and valid < %s """ + tlimiter + """
    GROUP by wfo ORDER by wfo ASC
    """,
                  pgconn,
                  params=(sts, ets),
                  index_col='wfo')
    data = {}
    for wfo, row in df.iterrows():
        data[wfo] = row['count']
    maxv = df['count'].max()
    bins = np.linspace(0, maxv, 12, dtype='i')
    bins[-1] += 1
    p = MapPlot(sector='nws',
                axisbg='white',
                title='Local Storm Report Counts by NWS Office',
                subtitlefontsize=10,
                subtitle=('Valid %s - %s UTC, type limiter: %s') %
                (sts.strftime("%d %b %Y %H:%M"),
                 ets.strftime("%d %b %Y %H:%M"), MDICT.get(myfilter)))
    p.fill_cwas(data, bins=bins, ilabel=True)

    return p.fig, df
Exemplo n.º 12
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    from pyiem.plot import MapPlot
    bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    edate = ctx.get('edate')
    if edate is None:
        edate = datetime.datetime.utcnow()
    else:
        edate = datetime.datetime(edate.year, edate.month, edate.day, 0, 0)
    edate = edate.replace(tzinfo=pytz.timezone("UTC"))

    cursor.execute(
        """
     select wfo,  extract(days from (%s::date - max(issue))) as m
     from warnings where significance = %s and phenomena = %s
     and issue < %s
     GROUP by wfo ORDER by m ASC
    """, (edate, significance, phenomena, edate))
    data = {}
    rows = []
    for row in cursor:
        wfo = row[0] if row[0] != 'JSJ' else 'SJU'
        rows.append(dict(wfo=wfo, days=row[1]))
        data[wfo] = max([row[1], 0])
    df = pd.DataFrame(rows)
    df.set_index('wfo', inplace=True)

    m = MapPlot(sector='nws',
                axisbg='white',
                nocaption=True,
                title='Days since Last %s %s by NWS Office' %
                (vtec._phenDict.get(phenomena, phenomena),
                 vtec._sigDict.get(significance, significance)),
                subtitle='Valid %s' % (edate.strftime("%d %b %Y %H%M UTC"), ))
    m.fill_cwas(data, bins=bins, ilabel=True, units='Days', lblformat='%.0f')

    return m.fig, df
Exemplo n.º 13
0
Arquivo: p163.py Projeto: akrherz/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    sts = sts.replace(tzinfo=pytz.utc)
    ets = ctx['edate']
    ets = ets.replace(tzinfo=pytz.utc)
    myfilter = ctx['filter']
    if myfilter == 'NONE':
        tlimiter = ''
    elif myfilter == 'NRS':
        tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW') "
    else:
        tlimiter = " and typetext = '%s' " % (myfilter,)

    df = read_sql("""
    SELECT wfo, count(*) from lsrs
    WHERE valid >= %s and valid < %s """ + tlimiter + """
    GROUP by wfo ORDER by wfo ASC
    """, pgconn, params=(sts, ets), index_col='wfo')
    data = {}
    for wfo, row in df.iterrows():
        data[wfo] = row['count']
    maxv = df['count'].max()
    bins = np.linspace(0, maxv, 12, dtype='i')
    bins[-1] += 1
    p = MapPlot(sector='nws', axisbg='white',
                title='Local Storm Report Counts by NWS Office',
                subtitle=('Valid %s - %s UTC, type limiter: %s'
                          ) % (sts.strftime("%d %b %Y %H:%M"),
                               ets.strftime("%d %b %Y %H:%M"),
                               MDICT.get(myfilter)))
    p.fill_cwas(data, bins=bins, ilabel=True)

    return p.fig, df
Exemplo n.º 14
0
def main():
    """Go Main"""
    vals = {}
    for line in data.split("\n"):
        wfo, valid = line.strip().split("|")
        wfo = wfo.strip()
        year = valid.strip()[:4]
        wfo = wfo[1:]
        if wfo == 'JSJ':
            wfo = 'SJU'
        vals[wfo] = int(year)
    print(vals)
    #bins = [1, 25, 50, 75, 100, 125, 150, 200, 300]
    bins = np.arange(2002, 2019, 2)
    cmap = plt.get_cmap('PuOr')
    mp = MapPlot(sector='nws', continentalcolor='white', figsize=(12., 9.),
                 title=("Year of Last RWS Text Product Issuance"),
                 subtitle=('based on IEM archives'))
    mp.fill_cwas(vals, bins=bins, lblformat='%s',  # , labels=labels,
                 cmap=cmap, ilabel=True,  # clevlabels=clevlabels,
                 units='Year')
    
    mp.postprocess(filename='test.png')
Exemplo n.º 15
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    from pyiem.plot import MapPlot
    utc = datetime.datetime.utcnow()
    bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    phenomena = fdict.get('phenomena', 'TO')
    significance = fdict.get('significance', 'W')

    cursor.execute(
        """
     select wfo,  extract(days from ('TODAY'::date - max(issue))) as m
     from warnings where significance = %s and phenomena = %s
     GROUP by wfo ORDER by m ASC
    """, (significance, phenomena))
    data = {}
    rows = []
    for row in cursor:
        wfo = row[0] if row[0] != 'JSJ' else 'SJU'
        rows.append(dict(wfo=wfo, days=row[1]))
        data[wfo] = max([row[1], 0])
    df = pd.DataFrame(rows)

    m = MapPlot(sector='nws',
                axisbg='white',
                nocaption=True,
                title='Days since Last %s %s by NWS Office' %
                (vtec._phenDict.get(phenomena, phenomena),
                 vtec._sigDict.get(significance, significance)),
                subtitle='Valid %s' % (utc.strftime("%d %b %Y %H%M UTC"), ))
    m.fill_cwas(data, bins=bins, ilabel=True, units='Days', lblformat='%.0f')

    return m.fig, df
Exemplo n.º 16
0
labels = {}
uniq = []
for line in text.split("\n"):
    tokens = line.replace(" ", "").split("|")
    wfo = tokens[0][1:]
    if tokens[0][0] == 'P':
        wfo = tokens[0]
    key = "%s" % (tokens[1], )
    if not nt.sts.has_key(wfo):
        continue
    # P
    wfo = tokens[0][1:]
    if not key in uniq:
        uniq.append(key)
    data[wfo] = len(uniq) - 1
    labels[wfo] = key
    if wfo == 'JSJ':
        labels['SJU'] = labels['JSJ']

bins = range(len(uniq) + 1)
uniq.append('')

p = MapPlot(sector='nws',
            axisbg='white',
            title="2009-2013 Most Frequently issued non-SHEF 3char AWIPS ID",
            subtitle='RR* products were excluded from this analysis')
p.fill_cwas(data, bins=bins, labels=labels, lblformat='%s', clevlabels=uniq)
p.postprocess(filename='test.png')
#import iemplot
#iemplot.makefeature('test')
Exemplo n.º 17
0
from pyiem.plot import MapPlot
import pandas as pd

df = pd.read_csv('vertex.csv', index_col='WFO', na_values=['None'])
df['r'] = (df['CNTY_HITS'] - df['CWA_HITS']) / df['ALL'] * 100.
avgv = (df['CNTY_HITS'] - df['CWA_HITS']).sum() / float(df['ALL'].sum()) * 100.

m = MapPlot(
    sector='nws',
    axisbg='white',
    title='Percent of SVR+TOR Warning Vertices within 2km of County Border',
    subtitle=
    '1 Oct 2007 through 23 May 2016, Overall Avg: %.1f%%, * CWA Borders Excluded'
    % (avgv, ))
m.fill_cwas(df['r'], ilabel=True, lblformat='%.0f')
m.postprocess(filename='test.png')
Exemplo n.º 18
0
def test_plot22():
    """plot cwas that are filled"""
    mp = MapPlot(sector='iowa', continentalcolor='white', nocaption=True)
    mp.fill_cwas({'DMX': 80, 'MKX': 5, 'SJU': 30, 'AJK': 40, 'HFO': 50},
                 units='NWS Something or Another')
    return mp.fig
Exemplo n.º 19
0
    cursor2.execute(
        """
    SELECT distinct phenomena, significance from warnings
    where ugc = %s and expire > %s and issue < %s and wfo = %s 
    """, (row[0], row[1], row[2], wfo))
    for row2 in cursor2:
        key = "%s.%s" % (row2[0], row2[1])
        if not hits[wfo].has_key(key):
            hits[wfo][key] = 0
        hits[wfo][key] += 1
    if cursor2.rowcount == 0:
        misses += 1

data = {}
for wfo in hits.keys():
    data[wfo] = hits[wfo].get('WS.W', 0) / float(totals[wfo]) * 100.0

from pyiem.plot import MapPlot

m = MapPlot(
    sector='nws',
    title=
    'Conversion [%] of Winter Storm Watch Zones into Winter Storm Warnings',
    subtitle='1 Oct 2005 - 28 Feb 2014')
m.fill_cwas(data)
m.postprocess(filename='test.png')

#print 'Misses %s %.1f%%' % (misses, misses / float(total) * 100.0)
#for key in hits.keys():
#    print '%s %s %.1f%%' % (key, hits[key], hits[key] / float(total) * 100.0)
Exemplo n.º 20
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')

    sts = datetime.datetime.strptime(fdict.get('sdate', '2015-01-01 0000'),
                                     '%Y-%m-%d %H%M')
    sts = sts.replace(tzinfo=pytz.timezone("UTC"))
    ets = datetime.datetime.strptime(fdict.get('edate', '2015-02-01 0000'),
                                     '%Y-%m-%d %H%M')
    ets = ets.replace(tzinfo=pytz.timezone("UTC"))
    p1 = fdict.get('phenomenav1', 'SV')[:2]
    p2 = fdict.get('phenomenav2', '  ')[:2]
    p3 = fdict.get('phenomenav3', '  ')[:2]
    p4 = fdict.get('phenomenav4', '  ')[:2]
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p != '  ':
            phenomena.append(p)
    s1 = fdict.get('significancev1', 'W')[0]
    s2 = fdict.get('significancev2', ' ')[0]
    s3 = fdict.get('significancev3', ' ')[0]
    s4 = fdict.get('significancev4', ' ')[0]
    significance = []
    for s in [s1, s2, s3, s4]:
        if s != '  ':
            significance.append(s)

    pstr = []
    subtitle = ""
    title = ""
    for p, s in zip(phenomena, significance):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        subtitle += "%s.%s " % (p, s)
        title += "%s %s" % (vtec._phenDict.get(p, p),
                            vtec._sigDict.get(s, s))
    if len(phenomena) > 1:
        title = "VTEC Unique Event"
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr,)

    df = read_sql("""
with total as (
  select distinct wfo, extract(year from issue at time zone 'UTC') as year,
  phenomena, significance, eventid from warnings
  where """ + pstr + """ and
  issue >= %s and issue < %s
)

SELECT wfo, phenomena, significance, year, count(*) from total
GROUP by wfo, phenomena, significance, year
    """, pgconn, params=(sts, ets))

    df2 = df.groupby('wfo')['count'].sum()

    nt = NetworkTable("WFO")
    for sid in nt.sts:
        sid = sid[-3:]
        if sid not in df2:
            df2[sid] = 0
    maxv = df2.max()
    bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200]
    if maxv > 200:
        bins = [0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250,
                500, 750, 1000]
    elif maxv > 1000:
        bins = [0, 1, 5, 10, 50, 100, 150, 200, 250,
                500, 750, 1000, 1250, 1500, 2000]

    p = MapPlot(sector='nws', axisbg='white',
                title='%s Counts by NWS Office' % (title,),
                subtitle=('Valid %s - %s UTC, based on VTEC: %s'
                          ) % (sts.strftime("%d %b %Y %H:%M"),
                               ets.strftime("%d %b %Y %H:%M"),
                               subtitle))
    p.fill_cwas(df2, bins=bins, ilabel=True)

    return p.fig, df
Exemplo n.º 21
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    sts = sts.replace(tzinfo=pytz.timezone("UTC"))
    ets = ctx['edate']
    ets = ets.replace(tzinfo=pytz.timezone("UTC"))
    p1 = ctx['phenomenav1']
    p2 = ctx['phenomenav2']
    p3 = ctx['phenomenav3']
    p4 = ctx['phenomenav4']
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p is not None:
            phenomena.append(p[:2])
    s1 = ctx['significancev1']
    s2 = ctx['significancev2']
    s3 = ctx['significancev3']
    s4 = ctx['significancev4']
    significance = []
    for s in [s1, s2, s3, s4]:
        if s is not None:
            significance.append(s[0])

    pstr = []
    subtitle = ""
    title = ""
    for p, s in zip(phenomena, significance):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        subtitle += "%s.%s " % (p, s)
        title += vtec.get_ps_string(p, s)
    if len(phenomena) > 1:
        title = "VTEC Unique Event"
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr,)

    df = read_sql("""
with total as (
  select distinct wfo, extract(year from issue at time zone 'UTC') as year,
  phenomena, significance, eventid from warnings
  where """ + pstr + """ and
  issue >= %s and issue < %s
)

SELECT wfo, phenomena, significance, year, count(*) from total
GROUP by wfo, phenomena, significance, year
    """, pgconn, params=(sts, ets))

    df2 = df.groupby('wfo')['count'].sum()

    nt = NetworkTable("WFO")
    for sid in nt.sts:
        sid = sid[-3:]
        if sid not in df2:
            df2[sid] = 0
    maxv = df2.max()
    bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200]
    if maxv > 200:
        bins = [0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250,
                500, 750, 1000]
    elif maxv > 1000:
        bins = [0, 1, 5, 10, 50, 100, 150, 200, 250,
                500, 750, 1000, 1250, 1500, 2000]

    mp = MapPlot(sector='nws', axisbg='white',
                 title='%s Counts by NWS Office' % (title,),
                 subtitle=('Valid %s - %s UTC, based on VTEC: %s'
                           ) % (sts.strftime("%d %b %Y %H:%M"),
                                ets.strftime("%d %b %Y %H:%M"),
                                subtitle))
    mp.fill_cwas(df2, bins=bins, ilabel=True)

    return mp.fig, df
Exemplo n.º 22
0
Arquivo: p109.py Projeto: nbackas/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')

    sts = datetime.datetime.strptime(fdict.get('sdate', '2015-01-01 0000'),
                                     '%Y-%m-%d %H%M')
    sts = sts.replace(tzinfo=pytz.timezone("UTC"))
    ets = datetime.datetime.strptime(fdict.get('edate', '2015-02-01 0000'),
                                     '%Y-%m-%d %H%M')
    ets = ets.replace(tzinfo=pytz.timezone("UTC"))
    p1 = fdict.get('phenomenav1', 'SV')[:2]
    p2 = fdict.get('phenomenav2', '  ')[:2]
    p3 = fdict.get('phenomenav3', '  ')[:2]
    p4 = fdict.get('phenomenav4', '  ')[:2]
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p != '  ':
            phenomena.append(p)
    s1 = fdict.get('significancev1', 'W')[0]
    s2 = fdict.get('significancev2', ' ')[0]
    s3 = fdict.get('significancev3', ' ')[0]
    s4 = fdict.get('significancev4', ' ')[0]
    significance = []
    for s in [s1, s2, s3, s4]:
        if s != '  ':
            significance.append(s)

    pstr = []
    subtitle = ""
    title = ""
    for p, s in zip(phenomena, significance):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        subtitle += "%s.%s " % (p, s)
        title += "%s %s" % (vtec._phenDict.get(p, p), vtec._sigDict.get(s, s))
    if len(phenomena) > 1:
        title = "VTEC Unique Event"
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr, )

    df = read_sql("""
with total as (
  select distinct wfo, extract(year from issue at time zone 'UTC') as year,
  phenomena, significance, eventid from warnings
  where """ + pstr + """ and
  issue >= %s and issue < %s
)

SELECT wfo, phenomena, significance, year, count(*) from total
GROUP by wfo, phenomena, significance, year
    """,
                  pgconn,
                  params=(sts, ets))

    df2 = df.groupby('wfo')['count'].sum()

    nt = NetworkTable("WFO")
    for sid in nt.sts:
        sid = sid[-3:]
        if sid not in df2:
            df2[sid] = 0
    maxv = df2.max()
    bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200]
    if maxv > 200:
        bins = [
            0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250, 500, 750, 1000
        ]
    elif maxv > 1000:
        bins = [
            0, 1, 5, 10, 50, 100, 150, 200, 250, 500, 750, 1000, 1250, 1500,
            2000
        ]

    p = MapPlot(sector='nws',
                axisbg='white',
                title='%s Counts by NWS Office' % (title, ),
                subtitle=('Valid %s - %s UTC, based on VTEC: %s') %
                (sts.strftime("%d %b %Y %H:%M"),
                 ets.strftime("%d %b %Y %H:%M"), subtitle))
    p.fill_cwas(df2, bins=bins, ilabel=True)

    return p.fig, df
Exemplo n.º 23
0
def test_plot2():
    """ Exercise NWS plot API """
    mp = MapPlot(sector='nws', continentalcolor='white', nocaption=True)
    mp.fill_cwas({'DMX': 80, 'MKX': 5, 'SJU': 30, 'AJK': 40, 'HFO': 50},
                 units='NWS Something or Another', ilabel=True)
    return mp.fig
Exemplo n.º 24
0
aly    0.39560439560439560440
wpc    0.36263736263736263736
afc    0.31868131868131868132
lkn    0.31868131868131868132
akq    0.26373626373626373626
abr    0.26373626373626373626
bro    0.23076923076923076923
zan    0.21978021978021978022
mtr    0.18681318681318681319
unr    0.18681318681318681319
sto    0.17582417582417582418
pih    0.15384615384615384615
eka    0.06593406593406593407
roc    0.03296703296703296703
lms    0.02197802197802197802
zjx    0.02197802197802197802
zma    0.02197802197802197802
rev    0.02197802197802197802"""
d = {}
for line in data.split("\n"):
    tokens = line.split()
    d[tokens[0].upper()] = float(tokens[1])

m = MapPlot(sector='nws', axisbg='white', nologo=True,
            subtitle='Main NWS WFO Rooms Only, NWSBot messages not included', caption='@akrherz',
            title='22 Oct 2015 - 21 Jan 2016 NWSChat Avg Number of Room Messages per Day')
m.fill_cwas(d, lblformat='%.1f', ilabel=True, cmap=james(),
            bins=[0, 0.5, 1, 1.5, 2, 3, 4, 5, 10, 15, 20, 40])
m.postprocess(filename='test.png')

Exemplo n.º 25
0
    cursor2.execute("""
    SELECT distinct phenomena, significance from warnings_temp
    where ugc = %s and expire > %s and issue < %s and wfo = %s
    """, (row[0], row[1], row[2], wfo))
    for row2 in cursor2:
        key = "%s.%s" % (row2[0], row2[1])
        if key not in hits[wfo]:
            hits[wfo][key] = 0
        hits[wfo][key] += 1
        if key not in hits2:
            hits2[key] = 0
        hits2[key] += 1
    if cursor2.rowcount == 0:
        misses += 1

data = {}
for wfo in hits.keys():
    data[wfo] = hits[wfo].get(
                '%s.W' % (phenomena,), 0) / float(totals[wfo]) * 100.0

m = MapPlot(sector='nws', axisbg='white',
            title=("Conversion [%] of Severe T'Storm Watch Counties/Parishes into "
                   "SVR Warnings"), titlefontsize=14,
            subtitle='1 Oct 2005 - 19 May 2016')
m.fill_cwas(data, ilabel=True, lblformat='%.0f')
m.postprocess(filename='test.png')

print 'Misses %s %.1f%%' % (misses, misses / float(total) * 100.0)
for key in hits2.keys():
    print '%s %s %.1f%%' % (key, hits2[key], hits2[key] / float(total) * 100.0)
Exemplo n.º 26
0
data = {}
labels = {}
uniq = []
for line in text.split("\n"):
    tokens = line.replace(" ", "").split("|")
    wfo = tokens[0][1:]
    if tokens[0][0] == 'P':
        wfo = tokens[0]
    key = "%s" % (tokens[1], )
    if not nt.sts.has_key(wfo):
        continue
    # P
    wfo = tokens[0][1:]
    if not key in uniq:
        uniq.append( key )
    data[ wfo ] = len(uniq) -1
    labels[ wfo ] = key
    if wfo == 'JSJ':
        labels['SJU'] = labels['JSJ']

bins = range(len(uniq)+1)
uniq.append('')

p = MapPlot(sector='nws', axisbg='white',
                 title="2009-2013 Most Frequently issued non-SHEF 3char AWIPS ID",
                 subtitle='RR* products were excluded from this analysis')
p.fill_cwas(data, bins=bins, labels=labels, lblformat='%s', clevlabels=uniq)
p.postprocess(filename='test.png')
#import iemplot
#iemplot.makefeature('test')
Exemplo n.º 27
0
    """, (row[0], row[1], row[2], wfo))
    for row2 in cursor2:
        key = "%s.%s" % (row2[0], row2[1])
        if key not in hits[wfo]:
            hits[wfo][key] = 0
        hits[wfo][key] += 1
        if key not in hits2:
            hits2[key] = 0
        hits2[key] += 1
    if cursor2.rowcount == 0:
        misses += 1

data = {}
for wfo in hits.keys():
    data[wfo] = hits[wfo].get('%s.W' %
                              (phenomena, ), 0) / float(totals[wfo]) * 100.0

m = MapPlot(
    sector='nws',
    axisbg='white',
    title=("Conversion [%] of Severe T'Storm Watch Counties/Parishes into "
           "SVR Warnings"),
    titlefontsize=14,
    subtitle='1 Oct 2005 - 19 May 2016')
m.fill_cwas(data, ilabel=True, lblformat='%.0f')
m.postprocess(filename='test.png')

print 'Misses %s %.1f%%' % (misses, misses / float(total) * 100.0)
for key in hits2.keys():
    print '%s %s %.1f%%' % (key, hits2[key], hits2[key] / float(total) * 100.0)
Exemplo n.º 28
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    sts = sts.replace(tzinfo=pytz.UTC)
    ets = ctx['edate']
    ets = ets.replace(tzinfo=pytz.UTC)
    p1 = ctx['phenomenav1']
    p2 = ctx['phenomenav2']
    p3 = ctx['phenomenav3']
    p4 = ctx['phenomenav4']
    varname = ctx['var']
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p is not None:
            phenomena.append(p[:2])
    s1 = ctx['significancev1']
    s2 = ctx['significancev2']
    s3 = ctx['significancev3']
    s4 = ctx['significancev4']
    significance = []
    for s in [s1, s2, s3, s4]:
        if s is not None:
            significance.append(s[0])

    pstr = []
    subtitle = ""
    title = ""
    for p, s in zip(phenomena, significance):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        subtitle += "%s.%s " % (p, s)
        title += vtec.get_ps_string(p, s)
    if len(phenomena) > 1:
        title = "VTEC Unique Event"
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr, )
    cmap = plt.get_cmap(ctx['cmap'])

    if varname == 'count':
        df = read_sql("""
    with total as (
    select distinct wfo, extract(year from issue at time zone 'UTC') as year,
    phenomena, significance, eventid from warnings
    where """ + pstr + """ and
    issue >= %s and issue < %s
    )

    SELECT wfo, phenomena, significance, year, count(*) from total
    GROUP by wfo, phenomena, significance, year
        """,
                      pgconn,
                      params=(sts, ets))

        df2 = df.groupby('wfo')['count'].sum()
        maxv = df2.max()
        bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200]
        if maxv > 5000:
            bins = [
                0, 5, 10, 50, 100, 250, 500, 750, 1000, 1500, 2000, 3000, 5000,
                7500, 10000
            ]
        elif maxv > 1000:
            bins = [
                0, 1, 5, 10, 50, 100, 150, 200, 250, 500, 750, 1000, 1250,
                1500, 2000
            ]
        elif maxv > 200:
            bins = [
                0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250, 500, 750,
                1000
            ]
        units = 'Count'
        lformat = '%.0f'
    elif varname == 'days':
        df = read_sql("""
        WITH data as (
            SELECT distinct wfo, generate_series(greatest(issue, %s),
            least(expire, %s), '1 minute'::interval) as ts from warnings
            WHERE issue > %s and expire < %s and """ + pstr + """
        ), agg as (
            SELECT distinct wfo, date(ts) from data
        )
        select wfo, count(*) as days from agg
        GROUP by wfo ORDER by days DESC
        """,
                      pgconn,
                      params=(sts, ets, sts - datetime.timedelta(days=90),
                              ets + datetime.timedelta(days=90)),
                      index_col='wfo')

        df2 = df['days']
        if df2.max() < 10:
            bins = list(range(1, 11, 1))
        else:
            bins = np.linspace(1, df['days'].max() + 11, 10, dtype='i')
        units = 'Days'
        lformat = '%.0f'
        cmap.set_under('white')
        cmap.set_over('#EEEEEE')
    else:
        total_minutes = (ets - sts).total_seconds() / 60.
        df = read_sql("""
        WITH data as (
            SELECT distinct wfo, generate_series(greatest(issue, %s),
            least(expire, %s), '1 minute'::interval) as ts from warnings
            WHERE issue > %s and expire < %s and """ + pstr + """
        )
        select wfo, count(*) / %s * 100. as tpercent from data
        GROUP by wfo ORDER by tpercent DESC
        """,
                      pgconn,
                      params=(sts, ets, sts - datetime.timedelta(days=90),
                              ets + datetime.timedelta(days=90),
                              total_minutes),
                      index_col='wfo')

        df2 = df['tpercent']
        bins = list(range(0, 101, 10))
        if df2.max() < 5:
            bins = np.arange(0, 5.1, 0.5)
        elif df2.max() < 10:
            bins = list(range(0, 11, 1))
        units = 'Percent'
        lformat = '%.1f'

    nt = NetworkTable("WFO")
    for sid in nt.sts:
        sid = sid[-3:]
        if sid not in df2:
            df2[sid] = 0

    mp = MapPlot(sector='nws',
                 axisbg='white',
                 title='%s %s by NWS Office' % (title, PDICT[varname]),
                 subtitle=('Valid %s - %s UTC, based on VTEC: %s') %
                 (sts.strftime("%d %b %Y %H:%M"),
                  ets.strftime("%d %b %Y %H:%M"), subtitle))
    mp.fill_cwas(df2,
                 bins=bins,
                 ilabel=True,
                 units=units,
                 lblformat=lformat,
                 cmap=cmap)

    return mp.fig, df
Exemplo n.º 29
0
def test_plot():
    """ Exercise the API """
    mp = MapPlot(sector='midwest', nocaption=True)
    mp.fill_cwas({'DMX': 80, 'MKX': 5, 'SJU': 30, 'AJK': 40},
                 units='no units')
    return mp.fig
Exemplo n.º 30
0
import psycopg2
from pyiem.plot import MapPlot

dbconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
cursor = dbconn.cursor()

cursor.execute("""
 WITh data as (
 select wfo, phenomena, significance,
 extract(year from issue) as yr, count(*)
 from sbw WHERE issue > '2007-10-01' and
 issue < '2015-05-01' and phenomena = 'SV' and significance = 'W'
 GROUP by wfo, phenomena, significance, yr, eventid)

 SELECT wfo, sum(case when count > 2 then 1 else 0 end), count(*) from
 data GROUP by wfo
""")

data = {}
for row in cursor:
    data[row[0]] = row[1] / float(row[2]) * 100.

m = MapPlot(sector='nws', axisbg='white',
            title="Percentage of Severe T'Storm Warnings with 2 or more SVS Updates",
            subtitle='period: 1 Oct 2007 - 1 May 2015')

m.fill_cwas(data)

m.postprocess(filename='test.png')
Exemplo n.º 31
0
Arquivo: plot.py Projeto: akrherz/iem
from pyiem.plot import MapPlot
import pandas as pd

df = pd.read_csv('vertex.csv', index_col='WFO', na_values=['None'])
df['r'] = (df['CNTY_HITS'] - df['CWA_HITS']) / df['ALL'] * 100.
avgv = (df['CNTY_HITS'] - df['CWA_HITS']).sum() / float(df['ALL'].sum()) * 100.

m = MapPlot(sector='nws', axisbg='white',
            title='Percent of SVR+TOR Warning Vertices within 2km of County Border',
            subtitle='1 Oct 2007 through 23 May 2016, Overall Avg: %.1f%%, * CWA Borders Excluded' % (avgv,))
m.fill_cwas(df['r'], ilabel=True, lblformat='%.0f')
m.postprocess(filename='test.png')