def plotter(fdict): """ Go """ bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800] pgconn = get_dbconn("postgis") cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) phenomena = ctx["phenomena"] significance = ctx["significance"] edate = ctx.get("edate") if edate is not None: edate = utc(edate.year, edate.month, edate.day, 0, 0) cursor.execute( """ select wfo, extract(days from (%s::date - max(issue))) as m from warnings where significance = %s and phenomena = %s and issue < %s GROUP by wfo ORDER by m ASC """, (edate, significance, phenomena, edate), ) else: cursor.execute( """ select wfo, extract(days from ('TODAY'::date - max(issue))) as m from warnings where significance = %s and phenomena = %s GROUP by wfo ORDER by m ASC """, (significance, phenomena), ) edate = datetime.datetime.utcnow() if cursor.rowcount == 0: raise NoDataFound(("No Events Found for %s (%s.%s)") % ( vtec.get_ps_string(phenomena, significance), phenomena, significance, )) data = {} rows = [] for row in cursor: wfo = row[0] if row[0] != "JSJ" else "SJU" rows.append(dict(wfo=wfo, days=row[1])) data[wfo] = max([row[1], 0]) df = pd.DataFrame(rows) df.set_index("wfo", inplace=True) mp = MapPlot( sector="nws", axisbg="white", nocaption=True, title="Days since Last %s by NWS Office" % (vtec.get_ps_string(phenomena, significance), ), subtitle="Valid %s" % (edate.strftime("%d %b %Y %H%M UTC"), ), ) mp.fill_cwas(data, bins=bins, ilabel=True, units="Days", lblformat="%.0f") return mp.fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800] pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) phenomena = ctx['phenomena'] significance = ctx['significance'] edate = ctx.get('edate') if edate is not None: edate = utc(edate.year, edate.month, edate.day, 0, 0) cursor.execute( """ select wfo, extract(days from (%s::date - max(issue))) as m from warnings where significance = %s and phenomena = %s and issue < %s GROUP by wfo ORDER by m ASC """, (edate, significance, phenomena, edate)) else: cursor.execute( """ select wfo, extract(days from ('TODAY'::date - max(issue))) as m from warnings where significance = %s and phenomena = %s GROUP by wfo ORDER by m ASC """, (significance, phenomena)) edate = datetime.datetime.utcnow() if cursor.rowcount == 0: raise ValueError( ("No Events Found for %s (%s.%s)") % (vtec.get_ps_string( phenomena, significance), phenomena, significance)) data = {} rows = [] for row in cursor: wfo = row[0] if row[0] != 'JSJ' else 'SJU' rows.append(dict(wfo=wfo, days=row[1])) data[wfo] = max([row[1], 0]) df = pd.DataFrame(rows) df.set_index('wfo', inplace=True) mp = MapPlot(sector='nws', axisbg='white', nocaption=True, title='Days since Last %s by NWS Office' % (vtec.get_ps_string(phenomena, significance), ), subtitle='Valid %s' % (edate.strftime("%d %b %Y %H%M UTC"), )) mp.fill_cwas(data, bins=bins, ilabel=True, units='Days', lblformat='%.0f') return mp.fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) get_df(ctx) (fig, ax) = plt.subplots() v1 = "%s.%s" % (ctx["p1"], ctx["s1"]) hty = ctx["df"][v1 + "%"] ax.bar( ctx["df"].index.values, hty, label=get_ps_string(ctx["p1"], ctx["s1"]), color=NWS_COLORS[v1], ) v2 = "%s.%s" % (ctx["p2"], ctx["s2"]) ehw = ctx["df"][v2 + "%"] ax.bar( ctx["df"].index.values, ehw.values, bottom=hty.values, label=get_ps_string(ctx["p2"], ctx["s2"]), color=NWS_COLORS[v2], ) non = ctx["df"]["None%"] ax.bar( ctx["df"].index.values, non, bottom=(hty + ehw).values, label="No Headline", color="#EEEEEE", ) ax.legend(loc=(-0.03, -0.22), ncol=3) ax.set_position([0.1, 0.2, 0.8, 0.7]) ax.grid(True) ax.set_xlabel( (r"Feels Like $^\circ$F, %s") % ("All Obs Considered" if ctx["opt"] == "no" else "Only Additive Obs") ) ax.set_ylabel("Frequency [%]") ax.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) ax.set_title(ctx["title"]) # Clip the plot in the case of wind chill if ctx["var"] == "chill": vals = non[non < 100] if len(vals.index) > 0: ax.set_xlim(right=vals.index.values[-1] + 2) return fig, ctx["df"]
def get_df(lon, lat, sdate, edate): """Generate a report of VTEC ETNs used for a WFO and year Args: wfo (str): 3 character WFO identifier year (int): year to run for """ pgconn = get_dbconn("postgis") df = read_sql( """ WITH myugcs as ( select gid from ugcs where ST_Contains(geom, ST_SetSRID(ST_GeomFromEWKT('POINT(%s %s)'),4326)) ) SELECT to_char(issue at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as iso_issued, to_char(expire at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as iso_expired, eventid, phenomena, significance, wfo, hvtec_nwsli from warnings w JOIN myugcs u on (w.gid = u.gid) WHERE issue > %s and issue < %s ORDER by issue ASC """, pgconn, params=(lon, lat, sdate, edate), ) if df.empty: return df df["name"] = df[["phenomena", "significance" ]].apply(lambda x: get_ps_string(x[0], x[1]), axis=1) df["ph_name"] = df["phenomena"].map(VTEC_PHENOMENA) df["sig_name"] = df["significance"].map(VTEC_SIGNIFICANCE) return df
def get_df(ugc, sdate, edate): """ Answer the request! """ pgconn = get_dbconn("postgis") df = read_sql( """ SELECT to_char(issue at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as iso_issued, to_char(issue at time zone 'UTC', 'YYYY-MM-DD hh24:MI') as issued, to_char(expire at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as iso_expired, to_char(expire at time zone 'UTC', 'YYYY-MM-DD hh24:MI') as expired, eventid, phenomena, significance, hvtec_nwsli, wfo from warnings WHERE ugc = %s and issue > %s and issue < %s ORDER by issue ASC """, pgconn, params=(ugc, sdate, edate), ) if df.empty: return df df["name"] = df[["phenomena", "significance"]].apply( lambda x: get_ps_string(x[0], x[1]), axis=1 ) df["ph_name"] = df["phenomena"].map(VTEC_PHENOMENA) df["sig_name"] = df["significance"].map(VTEC_SIGNIFICANCE) return df
def handler(valid): """Handler""" pgconn = get_dbconn("postgis") if valid is None: valid = utc() valid = valid.replace(tzinfo=timezone.utc) # Oh boy, the concept of "valid" is tough for in the future watches. df = read_postgis( """ SELECT product_issue at time zone 'UTC' as utc_product_issue, issue at time zone 'UTC' as utc_issue, expire at time zone 'UTC' as utc_expire, w.phenomena || '.' || w.significance as ph_sig, w.wfo, eventid, phenomena, significance, w.ugc, null as nws_color, null as event_label, u.simple_geom as geom from warnings w JOIN ugcs u on (w.gid = u.gid) WHERE w.product_issue <= %s and w.expire > %s ORDER by w.product_issue ASC """, pgconn, geom_col="geom", params=(valid, valid), index_col=None, ) df["nws_color"] = df["ph_sig"].apply(NWS_COLORS.get) df["event_label"] = df["ph_sig"].apply( lambda x: get_ps_string(*x.split(".")) ) return df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt from pyiem.plot import MapPlot ctx = get_autoplot_context(fdict, get_description()) # Covert datetime to UTC ctx['sdate'] = ctx['sdate'].replace(tzinfo=pytz.utc) ctx['edate'] = ctx['edate'].replace(tzinfo=pytz.utc) state = ctx['state'] phenomena = ctx['phenomena'] significance = ctx['significance'] station = ctx['station'][:4] t = ctx['t'] ilabel = (ctx['ilabel'] == 'yes') geo = ctx['geo'] nt = NetworkTable("WFO") if geo == 'ugc': do_ugc(ctx) elif geo == 'polygon': do_polygon(ctx) subtitle = "based on IEM Archives %s" % (ctx.get('subtitle', ''), ) if t == 'cwa': subtitle = "Plotted for %s (%s), %s" % (nt.sts[station]['name'], station, subtitle) else: subtitle = "Plotted for %s, %s" % (state_names[state], subtitle) m = MapPlot(sector=('state' if t == 'state' else 'cwa'), state=state, cwa=(station if len(station) == 3 else station[1:]), axisbg='white', title=('%s %s (%s.%s)') % (ctx['title'], vtec.get_ps_string( phenomena, significance), phenomena, significance), subtitle=subtitle, nocaption=True, titlefontsize=16) if geo == 'ugc': cmap = plt.get_cmap('Paired') cmap.set_under('white') cmap.set_over('white') m.fill_ugcs(ctx['data'], ctx['bins'], cmap=cmap, ilabel=ilabel) else: cmap = plt.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') res = m.pcolormesh(ctx['lons'], ctx['lats'], ctx['data'], ctx['bins'], cmap=cmap, units='count') # Cut down on SVG et al size res.set_rasterized(True) if ctx['drawc'] == 'yes': m.drawcounties() return m.fig, ctx['df']
def plotter(fdict): """ Go """ import seaborn as sns pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] phenomena = ctx['phenomena'] significance = ctx['significance'] opt = ctx['opt'] state = ctx['state'] ctx['_nt'].sts['_ALL'] = {'name': 'All Offices'} wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else station[1:], ) if station == '_ALL': wfo_limiter = '' if opt == 'state': wfo_limiter = " and substr(ugc, 1, 2) = '%s'" % (state, ) # NB we added a hack here that may lead to some false positives when events # cross over months, sigh, recall the 2017 eventid pain df = read_sql(""" with data as ( SELECT distinct extract(year from issue)::int as yr, extract(month from issue)::int as mo, wfo, eventid from warnings where phenomena = %s and significance = %s """ + wfo_limiter + """ GROUP by yr, mo, wfo, eventid) SELECT yr, mo, count(*) from data GROUP by yr, mo ORDER by yr, mo ASC """, pgconn, params=(phenomena, significance), index_col=None) if df.empty: raise NoDataFound("Sorry, no data found!") (fig, ax) = plt.subplots(1, 1, figsize=(8, 8)) df2 = df.pivot('yr', 'mo', 'count') df2 = df2.reindex(index=range(df2.index.min(), df2.index.max() + 1), columns=range(1, 13)) title = "NWS %s" % (ctx['_nt'].sts[station]['name'], ) if opt == 'state': title = ("NWS Issued for Counties/Zones for State of %s") % ( reference.state_names[state], ) title += ("\n%s (%s.%s) Issued by Year,Month") % (vtec.get_ps_string( phenomena, significance), phenomena, significance) ax.set_title(title) sns.heatmap(df2, annot=True, fmt=".0f", linewidths=.5, ax=ax, vmin=1) ax.set_xticks(np.arange(12) + 0.5) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_ylabel("Year") ax.set_xlabel("Month") return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) wfo = ctx['station'] phenomena = ctx['phenomena'] significance = ctx['significance'] nt = NetworkTable("WFO") (fig, ax) = plt.subplots(1, 1) tzname = nt.sts[wfo]['tzname'] cursor.execute( """ WITH data as ( SELECT extract(year from issue) as yr, eventid, min(issue at time zone %s) as minissue from warnings WHERE phenomena = %s and significance = %s and wfo = %s GROUP by yr, eventid) SELECT extract(hour from minissue) as hr, count(*) from data GROUP by hr """, (tzname, phenomena, significance, wfo)) if cursor.rowcount == 0: raise ValueError("No Results Found") data = np.zeros((24, ), 'f') for row in cursor: data[int(row[0])] = row[1] df = pd.DataFrame( dict(hour=pd.Series(np.arange(24)), count=pd.Series(data))) ax.bar(np.arange(24), data / float(sum(data)) * 100., ec='b', fc='b', align='center') ax.grid() ax.set_xticks(range(0, 25, 1)) ax.set_xlim(-0.5, 23.5) ax.set_xticklabels([ "Mid", "", "", "3 AM", "", "", "6 AM", "", "", '9 AM', "", "", "Noon", "", "", "3 PM", "", "", "6 PM", "", "", "9 PM", "", "", "Mid" ]) ax.set_xlabel("Timezone: %s (Daylight or Standard)" % (tzname, )) ax.set_ylabel("Frequency [%%] out of %.0f Events" % (sum(data), )) ax.set_title( ("[%s] %s :: Issuance Time Frequency\n%s (%s.%s)") % (wfo, nt.sts[wfo]['name'], vtec.get_ps_string( phenomena, significance), phenomena, significance)) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) wfo = ctx['station'] phenomena = ctx['phenomena'] significance = ctx['significance'] nt = NetworkTable("WFO") (fig, ax) = plt.subplots(1, 1) tzname = nt.sts[wfo]['tzname'] df = read_sql(""" WITH data as ( SELECT extract(year from issue) as yr, eventid, min(issue at time zone %s) as minissue, max(expire at time zone %s) as maxexpire from warnings WHERE phenomena = %s and significance = %s and wfo = %s GROUP by yr, eventid), events as (select count(*) from data), timedomain as ( SELECT generate_series(minissue, least(maxexpire, minissue + '24 hours'::interval) , '1 minute'::interval) as ts from data ), data2 as ( SELECT extract(hour from ts)::int * 60 + extract(minute from ts)::int as minute, count(*) from timedomain GROUP by minute ORDER by minute ASC) select d.minute, d.count, e.count as total from data2 d, events e """, pgconn, params=(tzname, tzname, phenomena, significance, wfo), index_col='minute') if df.empty: raise ValueError("No Results Found") df['frequency'] = df['count'] / df['total'] * 100. ax.bar(df.index.values, df['frequency'].values, ec='b', fc='b', align='center') ax.grid() if df['frequency'].max() > 70: ax.set_ylim(0, 101) ax.set_xticks(range(0, 25 * 60, 60)) ax.set_xlim(-0.5, 24 * 60 + 1) ax.set_xticklabels(["Mid", "", "", "3 AM", "", "", "6 AM", "", "", '9 AM', "", "", "Noon", "", "", "3 PM", "", "", "6 PM", "", "", "9 PM", "", "", "Mid"]) ax.set_xlabel("Timezone: %s (Daylight or Standard)" % (tzname,)) ax.set_ylabel("Percentage [%%] out of %.0f Events" % (df['total'].max(), )) ax.set_title(("[%s] %s :: Time of Day Frequency\n%s (%s.%s)" ) % (wfo, nt.sts[wfo]['name'], vtec.get_ps_string(phenomena, significance), phenomena, significance)) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] limit = ctx['limit'] phenomena = ctx['phenomena'] significance = ctx['significance'] nt = NetworkTable('WFO') nt.sts['_ALL'] = {'name': 'All Offices'} wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else station[1:], ) if station == '_ALL': wfo_limiter = '' doy_limiter = '' title = "Entire Year" if limit.lower() == 'yes': title = "thru ~%s" % (datetime.date.today().strftime("%-d %b"), ) doy_limiter = (" and extract(doy from issue) <= " "extract(doy from 'TODAY'::date) ") df = read_sql(""" with data as ( SELECT distinct extract(year from issue) as yr, wfo, eventid from warnings where phenomena = %s and significance = %s """ + wfo_limiter + doy_limiter + """) SELECT yr, count(*) from data GROUP by yr ORDER by yr ASC """, pgconn, params=(phenomena, significance)) if df.empty: raise ValueError("Sorry, no data found!") (fig, ax) = plt.subplots(1, 1) ax.bar(df['yr'], df['count'], align='center') ax.set_xlim(df['yr'].min() - 0.5, df['yr'].max() + 0.5) ax.grid(True) ax.set_ylabel("Yearly Count") ax.set_title( ("NWS %s [%s]\n%s (%s.%s) Count") % (nt.sts[station]['name'], title, vtec.get_ps_string(phenomena, significance), phenomena, significance)) if limit == 'yes': ax.set_xlabel(("thru approximately %s") % (datetime.date.today().strftime("%-d %b"), )) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] limit = ctx["limit"] phenomena = ctx["phenomena"] significance = ctx["significance"] ctx["_nt"].sts["_ALL"] = {"name": "All Offices"} wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else station[1:], ) if station == "_ALL": wfo_limiter = "" doy_limiter = "" title = "Entire Year" if limit.lower() == "yes": title = "thru ~%s" % (datetime.date.today().strftime("%-d %b"), ) doy_limiter = (" and extract(doy from issue) <= " "extract(doy from 'TODAY'::date) ") df = read_sql( """ with data as ( SELECT distinct extract(year from issue)::int as yr, wfo, eventid from warnings where phenomena = %s and significance = %s """ + wfo_limiter + doy_limiter + """) SELECT yr, count(*) from data GROUP by yr ORDER by yr ASC """, pgconn, params=(phenomena, significance), ) if df.empty: raise NoDataFound("Sorry, no data found!") (fig, ax) = plt.subplots(1, 1) ax.bar(df["yr"], df["count"], align="center") ax.set_xlim(df["yr"].min() - 0.5, df["yr"].max() + 0.5) ax.grid(True) ax.set_ylabel("Yearly Count") ax.set_title(("NWS %s [%s]\n%s (%s.%s) Count") % ( ctx["_nt"].sts[station]["name"], title, vtec.get_ps_string(phenomena, significance), phenomena, significance, )) if limit == "yes": ax.set_xlabel(("thru approximately %s") % (datetime.date.today().strftime("%-d %b"), )) ax.yaxis.set_major_locator(MaxNLocator(integer=True)) return fig, df
def handler(begints, endts, wfo, only_new, ph): """Handler""" pgconn = get_dbconn("postgis") begints = begints.replace(tzinfo=timezone.utc) endts = endts.replace(tzinfo=timezone.utc) params = {"begints": begints, "endts": endts} wfolimiter = "" statuslimiter = "" phlimiter = "" if ph is not None: params["ph"] = tuple(ph) phlimiter = "AND phenomena IN :ph " if wfo is not None: params["wfo"] = tuple(wfo) wfolimiter = " and wfo in :wfo " if only_new: statuslimiter = " and status = 'NEW' " df = read_postgis( text( f""" SELECT issue at time zone 'UTC' as utc_issue, expire at time zone 'UTC' as utc_expire, polygon_begin at time zone 'UTC' as utc_polygon_begin, polygon_end at time zone 'UTC' as utc_polygon_end, w.phenomena || '.' || w.significance as ph_sig, w.wfo, eventid, phenomena, significance, null as nws_color, null as event_label, status, geom from sbw w WHERE w.polygon_begin >= :begints and w.polygon_begin < :endts {wfolimiter} {statuslimiter} {phlimiter} ORDER by w.polygon_begin ASC """ ), pgconn, geom_col="geom", params=params, index_col=None, ) df["nws_color"] = df["ph_sig"].apply(NWS_COLORS.get) df["event_label"] = df["ph_sig"].apply( lambda x: get_ps_string(*x.split(".")) ) return df
def get_events(ctx): """ Get Events """ data = {"sbws": [], "lon": ctx["lon"], "lat": ctx["lat"], "valid": None} data["generation_time"] = utc().strftime(ISO) valid_limiter = "" if "valid" in ctx: valid_limiter = " and issue <= '%s+00' and expire > '%s' " % ( ctx["valid"].strftime("%Y-%m-%d %H:%M"), ctx["valid"].strftime("%Y-%m-%d %H:%M"), ) data["valid"] = ctx["valid"].strftime(ISO) pgconn = get_dbconn("postgis") df = read_sql( """ select wfo, significance, phenomena, to_char(issue at time zone 'UTC', 'YYYY-MM-DDThh24:MIZ') as iso_issued, to_char(expire at time zone 'UTC', 'YYYY-MM-DDThh24:MIZ') as iso_expired, to_char(issue at time zone 'UTC', 'YYYY-MM-DD hh24:MI') as issued, to_char(expire at time zone 'UTC', 'YYYY-MM-DD hh24:MI') as expired, eventid, tml_direction, tml_sknt, hvtec_nwsli from sbw where status = 'NEW' and ST_Contains(geom, ST_SetSRID(ST_GeomFromEWKT('POINT(%s %s)'),4326)) and issue > '2005-10-01' """ + valid_limiter + """ ORDER by issue ASC """, pgconn, params=(ctx["lon"], ctx["lat"]), ) if df.empty: return data, df df["name"] = df[["phenomena", "significance" ]].apply(lambda x: get_ps_string(x[0], x[1]), axis=1) df["ph_name"] = df["phenomena"].map(VTEC_PHENOMENA) df["sig_name"] = df["significance"].map(VTEC_SIGNIFICANCE) return data, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'][:4] phenomena = ctx['phenomena'] significance = ctx['significance'] split = ctx['split'] opt = ctx['opt'] state = ctx['state'] nt = NetworkTable('WFO') wfolimiter = " wfo = '%s' " % (station, ) if opt == 'state': wfolimiter = " substr(ugc, 1, 2) = '%s' " % (state, ) if split == 'jan1': sql = """ SELECT extract(year from issue)::int as year, min(issue at time zone 'UTC') as min_issue, max(issue at time zone 'UTC') as max_issue, count(distinct wfo || eventid) from warnings where """ + wfolimiter + """ and phenomena = %s and significance = %s GROUP by year ORDER by year ASC """ else: sql = """ SELECT extract(year from issue - '6 months'::interval)::int as year, min(issue at time zone 'UTC') as min_issue, max(issue at time zone 'UTC') as max_issue, count(distinct wfo || eventid) from warnings where """ + wfolimiter + """ and phenomena = %s and significance = %s GROUP by year ORDER by year ASC """ df = read_sql(sql, pgconn, params=(phenomena, significance), index_col=None) if df.empty: raise ValueError("No data found for query") # Since many VTEC events start in 2005, we should not trust any # data that has its first year in 2005 if df['year'].min() == 2005: df = df[df['year'] > 2005] def myfunc(row): year = row[0] valid = row[1] if year == valid.year: return int(valid.strftime("%j")) else: days = (datetime.date(year + 1, 1, 1) - datetime.date(year, 1, 1)).days return int(valid.strftime("%j")) + days df['startdoy'] = df[['year', 'min_issue']].apply(myfunc, axis=1) df['enddoy'] = df[['year', 'max_issue']].apply(myfunc, axis=1) df.set_index('year', inplace=True) # allow for small bars when there is just one event df.loc[df['enddoy'] == df['startdoy'], 'enddoy'] = df['enddoy'] + 1 ends = df['enddoy'].values starts = df['startdoy'].values years = df.index.values fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.7, 0.8]) ax.barh(years, (ends - starts), left=starts, fc='blue', align='center') ax.axvline(np.average(starts[:-1]), lw=2, color='red') ax.axvline(np.average(ends[:-1]), lw=2, color='red') ax.set_xlabel(("Avg Start Date: %s, End Date: %s") % ((datetime.date(2000, 1, 1) + datetime.timedelta( days=int(np.average(starts[:-1])))).strftime("%-d %b"), (datetime.date(2000, 1, 1) + datetime.timedelta( days=int(np.average(ends[:-1])))).strftime("%-d %b")), color='red') title = "[%s] NWS %s" % (station, nt.sts[station]['name']) if opt == 'state': title = ("NWS Issued Alerts for State of %s") % ( reference.state_names[state], ) ax.set_title(("%s\nPeriod between First and Last %s") % (title, vtec.get_ps_string(phenomena, significance))) ax.grid() days = [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335] days = days + [x + 365 for x in days] ax.set_xticks(days) ax.set_xticklabels(calendar.month_abbr[1:] + calendar.month_abbr[1:]) ax.set_xlim(df['startdoy'].min() - 10, df['enddoy'].max() + 10) ax.set_ylabel("Year") ax.set_ylim(years[0] - 0.5, years[-1] + 0.5) xFormatter = FormatStrFormatter('%d') ax.yaxis.set_major_formatter(xFormatter) ax = plt.axes([0.82, 0.1, 0.13, 0.8]) ax.barh(years, df['count'], fc='blue', align='center') ax.set_ylim(years[0] - 0.5, years[-1] + 0.5) plt.setp(ax.get_yticklabels(), visible=False) ax.grid(True) ax.set_xlabel("# Events") ax.yaxis.set_major_formatter(xFormatter) xloc = plt.MaxNLocator(3) ax.xaxis.set_major_locator(xloc) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") ctx = get_autoplot_context(fdict, get_description()) sts = ctx["sdate"] ets = ctx["edate"] wfo = ctx["wfo"] p1 = ctx["phenomenav1"] p2 = ctx["phenomenav2"] p3 = ctx["phenomenav3"] p4 = ctx["phenomenav4"] phenomena = [] for p in [p1, p2, p3, p4]: if p is not None: phenomena.append(p[:2]) s1 = ctx["significancev1"] s2 = ctx["significancev2"] s3 = ctx["significancev3"] s4 = ctx["significancev4"] significance = [] for s in [s1, s2, s3, s4]: if s is not None: significance.append(s[0]) pstr = [] title = "" for i, (p, s) in enumerate(zip(phenomena, significance)): pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s)) if i == 2: title += "\n" title += "%s %s.%s, " % (vtec.get_ps_string(p, s), p, s) pstr = " or ".join(pstr) pstr = "(%s)" % (pstr, ) if ctx["w"] == "wfo": ctx["_nt"].sts["_ALL"] = { "name": "All Offices", "tzname": "America/Chicago", } if wfo not in ctx["_nt"].sts: raise NoDataFound("No Data Found.") wfo_limiter = (" and wfo = '%s' ") % (wfo if len(wfo) == 3 else wfo[1:], ) if wfo == "_ALL": wfo_limiter = "" tzname = ctx["_nt"].sts[wfo]["tzname"] else: wfo_limiter = " and substr(ugc, 1, 2) = '%s' " % (ctx["state"], ) tzname = "America/Chicago" df = read_sql( """ with events as ( select wfo, min(issue at time zone %s) as localissue, extract(year from issue) as year, phenomena, significance, eventid from warnings where """ + pstr + """ """ + wfo_limiter + """ and issue >= %s and issue < %s GROUP by wfo, year, phenomena, significance, eventid ) SELECT date(localissue), count(*) from events GROUP by date(localissue) """, pgconn, params=( tzname, sts - datetime.timedelta(days=2), ets + datetime.timedelta(days=2), ), index_col="date", ) data = {} now = sts while now <= ets: data[now] = {"val": 0} now += datetime.timedelta(days=1) for date, row in df.iterrows(): data[date] = {"val": row["count"]} if ctx["w"] == "wfo": title2 = "NWS %s [%s]" % (ctx["_nt"].sts[wfo]["name"], wfo) if wfo == "_ALL": title2 = "All NWS Offices" else: title2 = state_names[ctx["state"]] fig = calendar_plot( sts, ets, data, heatmap=(ctx["heatmap"] == "yes"), title=("Number of VTEC Events for %s by Local Calendar Date") % (title2, ), subtitle="Valid %s - %s for %s" % (sts.strftime("%d %b %Y"), ets.strftime("%d %b %Y"), title), ) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) opt = ctx['opt'] state = ctx['state'][:2] phenomena = ctx['phenomena'] significance = ctx['significance'] station = ctx['station'][:4] nt = NetworkTable('WFO') sts = datetime.datetime(2012, 1, 1) xticks = [] for i in range(1, 13): ts = sts.replace(month=i) xticks.append(int(ts.strftime("%j"))) (fig, ax) = plt.subplots(2, 1, sharex=True) limiter = " wfo = '%s' " % (station,) title = "[%s] NWS %s" % (station, nt.sts[station]['name']) if opt == 'state': title = "State of %s" % (reference.state_names[state],) limiter = " substr(ugc, 1, 2) = '%s' " % (state,) df = read_sql(""" with obs as ( SELECT distinct extract(year from issue) as yr, extract(week from issue) as week, wfo, eventid from warnings WHERE """ + limiter + """ and phenomena = %s and significance = %s ) SELECT yr, week, count(*) from obs GROUP by yr, week ORDER by yr ASC """, pgconn, params=(phenomena, significance), index_col=None) if df.empty: raise ValueError("ERROR: No Results Found!") # Top Panel: count gdf = df.groupby('week').count() ax[0].bar((gdf.index.values - 1) * 7, gdf['yr'], width=7) ax[0].set_title(("%s\n%s (%s.%s) Events - %i to %i" ) % (title, vtec.get_ps_string(phenomena, significance), phenomena, significance, df['yr'].min(), df['yr'].max())) ax[0].grid() ax[0].set_ylabel("Years with 1+ Event") # Bottom Panel: events gdf = df.groupby('week').sum() ax[1].bar((gdf.index.values - 1) * 7, gdf['count'], width=7) ax[1].set_ylabel("Total Event Count") ax[1].grid() ax[1].set_xlabel("Partitioned by Week of the Year") ax[1].set_xticks(xticks) ax[1].set_xticklabels(calendar.month_abbr[1:]) ax[1].set_xlim(0, 366) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] limit = ctx["limit"] combo = ctx["c"] phenomena = ctx["phenomena"][:2] significance = ctx["significance"][:2] if phenomena in ["SV", "TO", "FF"] and significance == "W": pass else: ctx["syear"] = max(ctx["syear"], 2005) opt = ctx["opt"] state = ctx["state"][:2] eyear = ctx["eyear"] ctx["_nt"].sts["_ALL"] = {"name": "All Offices"} if station not in ctx["_nt"].sts: raise NoDataFound("No Data Found.") lastdoy = 367 if limit.lower() == "yes": lastdoy = int(datetime.datetime.today().strftime("%j")) + 1 wfolimiter = " and wfo = '%s' " % (station,) if opt == "state": wfolimiter = " and substr(ugc, 1, 2) = '%s' " % (state,) if opt == "wfo" and station == "_ALL": wfolimiter = "" eventlimiter = "" if combo == "svrtor": eventlimiter = " or (phenomena = 'SV' and significance = 'W') " phenomena = "TO" significance = "W" cursor.execute( """ WITH data as ( SELECT extract(year from issue)::int as yr, issue, phenomena, significance, eventid, wfo from warnings WHERE ((phenomena = %s and significance = %s) """ + eventlimiter + """) and extract(year from issue) >= %s and extract(year from issue) <= %s and extract(doy from issue) <= %s """ + wfolimiter + """), agg1 as ( SELECT yr, min(issue) as min_issue, eventid, wfo, phenomena, significance from data GROUP by yr, eventid, wfo, phenomena, significance), agg2 as ( SELECT yr, extract(doy from min_issue) as doy, count(*) from agg1 GROUP by yr, doy) SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC) from agg2 ORDER by yr ASC, doy ASC """, (phenomena, significance, ctx["syear"], eyear, lastdoy), ) if cursor.rowcount == 0: raise NoDataFound("No Data Found.") data = {} for yr in range(ctx["syear"], eyear + 1): data[yr] = {"doy": [0], "counts": [0]} rows = [] for row in cursor: data[row[0]]["doy"].append(row[1]) data[row[0]]["counts"].append(row[2]) rows.append(dict(year=row[0], day_of_year=row[1], count=row[2])) # append on a lastdoy value so all the plots go to the end for yr in range(ctx["syear"], eyear + 1): if data[yr]["doy"][-1] >= lastdoy: continue if yr == utc().year: # append today data[yr]["doy"].append(int(utc().strftime("%j"))) else: data[yr]["doy"].append(lastdoy) data[yr]["counts"].append(data[yr]["counts"][-1]) df = pd.DataFrame(rows) title = vtec.get_ps_string(phenomena, significance) if combo == "svrtor": title = "Severe Thunderstorm + Tornado Warning" ptitle = "NWS WFO: %s (%s)" % (ctx["_nt"].sts[station]["name"], station) if opt == "state": ptitle = ("NWS Issued for %s in %s") % ( "Parishes" if state == "LA" else "Counties", reference.state_names[state], ) ctx["title"] = "%s\n %s Count" % (ptitle, title) ctx["xlabel"] = "entire year plotted" if lastdoy < 367: ctx["xlabel"] = ("thru approximately %s") % ( datetime.date.today().strftime("%-d %B"), ) if ctx["plot"] == "bar": return make_barplot(ctx, df) (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) ann = [] for yr in range(ctx["syear"], eyear + 1): if len(data[yr]["doy"]) < 2: continue lp = ax.plot( data[yr]["doy"], data[yr]["counts"], lw=2, label="%s (%s)" % (str(yr), data[yr]["counts"][-1]), drawstyle="steps-post", ) ann.append( ax.text( data[yr]["doy"][-1] + 1, data[yr]["counts"][-1], "%s" % (yr,), color="w", va="center", fontsize=10, bbox=dict( facecolor=lp[0].get_color(), edgecolor=lp[0].get_color() ), ) ) mask = np.zeros(fig.canvas.get_width_height(), bool) fig.canvas.draw() attempts = 10 while ann and attempts > 0: attempts -= 1 removals = [] for a in ann: bbox = a.get_window_extent() x0 = int(bbox.x0) x1 = int(math.ceil(bbox.x1)) y0 = int(bbox.y0) y1 = int(math.ceil(bbox.y1)) s = np.s_[x0 : x1 + 1, y0 : y1 + 1] if np.any(mask[s]): a.set_position([a._x - int(lastdoy / 14), a._y]) else: mask[s] = True removals.append(a) for rm in removals: ann.remove(rm) ax.legend(loc=2, ncol=2, fontsize=10) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)) ax.set_xticklabels(calendar.month_abbr[1:]) plot_common(ctx, ax) ax.set_ylim(bottom=0) ax.set_xlim(0, lastdoy) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') pcursor = pgconn.cursor() ctx = get_autoplot_context(fdict, get_description()) syear = ctx['syear'] eyear = ctx['eyear'] + 1 station = ctx['station'][:4] sts = datetime.date(syear, 1, 1) ets = datetime.date(eyear, 1, 1) nt = NetworkTable('WFO') wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else station[1:], ) if station == '_ALL': wfo_limiter = '' pcursor.execute( """ select phenomena, significance, min(issue), count(*) from warnings where ugc is not null and issue > %s and issue < %s """ + wfo_limiter + """ GROUP by phenomena, significance ORDER by count DESC """, (sts, ets)) labels = [] vals = [] cnt = 1 rows = [] for row in pcursor: label = ("%s. %s (%s.%s)") % (cnt, vtec.get_ps_string( row[0], row[1]), row[0], row[1]) if cnt < 26: labels.append(label) vals.append(row[3]) rows.append( dict(phenomena=row[0], significance=row[1], count=row[3], wfo=station)) cnt += 1 df = pd.DataFrame(rows) (fig, ax) = plt.subplots(1, 1, figsize=(7, 10)) vals = np.array(vals) ax.barh(np.arange(len(vals)), vals / float(vals[0]) * 100.0, align='center') for i in range(1, len(vals)): y = vals[i] / float(vals[0]) * 100.0 ax.text(y + 1, i, '%.1f%%' % (y, ), va='center') fig.text(0.5, 0.95, "%s-%s NWS %s Watch/Warning/Advisory Totals" % (syear, eyear - 1 if (eyear - 1 != syear) else '', "ALL WFOs" if station == '_ALL' else nt.sts[station]['name']), ha='center') fig.text(0.5, 0.05, "Event+County/Zone Count, Relative to #%s" % (labels[0], ), ha='center', fontsize=10) ax.set_ylim(len(vals), -0.5) ax.grid(True) ax.set_yticklabels(labels) ax.set_yticks(np.arange(len(vals))) ax.set_position([0.5, 0.1, 0.45, 0.83]) ax.set_xticks([0, 10, 25, 50, 75, 90, 100]) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] ets = ctx['edate'] wfo = ctx['wfo'] p1 = ctx['phenomenav1'] p2 = ctx['phenomenav2'] p3 = ctx['phenomenav3'] p4 = ctx['phenomenav4'] phenomena = [] for p in [p1, p2, p3, p4]: if p is not None: phenomena.append(p[:2]) s1 = ctx['significancev1'] s2 = ctx['significancev2'] s3 = ctx['significancev3'] s4 = ctx['significancev4'] significance = [] for s in [s1, s2, s3, s4]: if s is not None: significance.append(s[0]) pstr = [] title = "" for i, (p, s) in enumerate(zip(phenomena, significance)): pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s)) if i == 2: title += "\n" title += "%s %s.%s, " % (vtec.get_ps_string(p, s), p, s) pstr = " or ".join(pstr) pstr = "(%s)" % (pstr, ) if ctx['w'] == 'wfo': nt = NetworkTable("WFO") nt.sts['_ALL'] = {'name': 'All Offices', 'tzname': 'America/Chicago'} wfo_limiter = (" and wfo = '%s' ") % (wfo if len(wfo) == 3 else wfo[1:], ) if wfo == '_ALL': wfo_limiter = '' tzname = nt.sts[wfo]['tzname'] else: wfo_limiter = " and substr(ugc, 1, 2) = '%s' " % (ctx['state'], ) tzname = 'America/Chicago' df = read_sql(""" with events as ( select wfo, min(issue at time zone %s) as localissue, extract(year from issue) as year, phenomena, significance, eventid from warnings where """ + pstr + """ """ + wfo_limiter + """ and issue >= %s and issue < %s GROUP by wfo, year, phenomena, significance, eventid ) SELECT date(localissue), count(*) from events GROUP by date(localissue) """, pgconn, params=(tzname, sts - datetime.timedelta(days=2), ets + datetime.timedelta(days=2)), index_col='date') data = {} now = sts while now <= ets: data[now] = {'val': 0} now += datetime.timedelta(days=1) for date, row in df.iterrows(): data[date] = {'val': row['count']} fig = calendar_plot(sts, ets, data, heatmap=(ctx['heatmap'] == 'yes')) if ctx['w'] == 'wfo': title2 = "NWS %s [%s]" % (nt.sts[wfo]['name'], wfo) if wfo == '_ALL': title2 = "All NWS Offices" else: title2 = state_names[ctx['state']] fig.text( 0.5, 0.95, ("Number of VTEC Events for %s by Local Calendar Date" "\nValid %s - %s for %s") % (title2, sts.strftime("%d %b %Y"), ets.strftime("%d %b %Y"), title), ha='center', va='center') return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.colors as mpcolors import matplotlib.patheffects as PathEffects pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] phenomena = ctx['phenomena'] significance = ctx['significance'] opt = ctx['opt'] state = ctx['state'] nt = NetworkTable('WFO') nt.sts['_ALL'] = {'name': 'All Offices'} wfo_limiter = (" and wfo = '%s' " ) % (station if len(station) == 3 else station[1:],) if station == '_ALL': wfo_limiter = '' if opt == 'state': wfo_limiter = " and substr(ugc, 1, 2) = '%s'" % (state, ) # NB we added a hack here that may lead to some false positives when events # cross over months, sigh, recall the 2017 eventid pain df = read_sql(""" with data as ( SELECT distinct extract(year from issue) as yr, extract(month from issue) as mo, wfo, eventid from warnings where phenomena = %s and significance = %s """ + wfo_limiter + """ GROUP by yr, mo, wfo, eventid) SELECT yr, mo, count(*) from data GROUP by yr, mo ORDER by yr, mo ASC """, pgconn, params=(phenomena, significance), index_col=None) if df.empty: raise ValueError("Sorry, no data found!") (fig, ax) = plt.subplots(1, 1, figsize=(8, 8)) minyear = df['yr'].min() maxyear = df['yr'].max() data = np.zeros((int(maxyear - minyear + 1), 12), 'i') for _, row in df.iterrows(): data[int(row['yr'] - minyear), int(row['mo'] - 1)] = row['count'] txt = ax.text(row['mo'], row['yr'], "%.0f" % (row['count'],), va='center', ha='center', color='white') txt.set_path_effects([PathEffects.withStroke(linewidth=2, foreground="k")]) cmap = plt.get_cmap('jet') cmap.set_under('white') maxval = max([df['count'].max(), 11]) bounds = np.linspace(1, maxval, 10, dtype='i') norm = mpcolors.BoundaryNorm(bounds, cmap.N) res = ax.imshow(data, extent=[0.5, 12.5, maxyear + 0.5, minyear - 0.5], interpolation='nearest', aspect='auto', norm=norm, cmap=cmap) fig.colorbar(res, label='count') ax.grid(True) ax.set_xticks(range(1, 13)) ax.set_xticklabels(calendar.month_abbr[1:]) title = "NWS %s" % (nt.sts[station]['name'], ) if opt == 'state': title = ("NWS Issued for Counties/Zones for State of %s" ) % (reference.state_names[state],) title += ("\n%s (%s.%s) Issued by Year,Month" ) % (vtec.get_ps_string(phenomena, significance), phenomena, significance) ax.set_title(title) return fig, df
def test_fireweather(): """Do we return different things for FW""" res = vtec.get_ps_string("FW", "A") assert res == "Fire Weather Watch" res = vtec.get_ps_string("FW", "W") assert res == "Red Flag Warning"
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") pcursor = pgconn.cursor() ctx = get_autoplot_context(fdict, get_description()) ctx["_nt"].sts["_ALL"] = dict(name="ALL WFOs") syear = ctx["syear"] eyear = ctx["eyear"] + 1 station = ctx["station"][:4] sts = datetime.date(syear, 1, 1) ets = datetime.date(eyear, 1, 1) wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else station[1:], ) if station == "_ALL": wfo_limiter = "" pcursor.execute( f""" select phenomena, significance, min(issue), count(*) from warnings where ugc is not null and issue > %s and issue < %s {wfo_limiter} GROUP by phenomena, significance ORDER by count DESC """, (sts, ets), ) if pcursor.rowcount == 0: raise NoDataFound("No data found.") labels = [] vals = [] cnt = 1 rows = [] for row in pcursor: label = ("%s. %s (%s.%s)") % ( cnt, vtec.get_ps_string(row[0], row[1]), row[0], row[1], ) if cnt < 26: labels.append(label) vals.append(row[3]) rows.append( dict( phenomena=row[0], significance=row[1], count=row[3], wfo=station, )) cnt += 1 df = pd.DataFrame(rows) (fig, ax) = plt.subplots(1, 1, figsize=(7, 10)) vals = np.array(vals) ax.barh(np.arange(len(vals)), vals / float(vals[0]) * 100.0, align="center") for i in range(1, len(vals)): y = vals[i] / float(vals[0]) * 100.0 ax.text(y + 1, i, "%.1f%%" % (y, ), va="center") fig.text( 0.5, 0.95, "%s-%s NWS %s Watch/Warning/Advisory Totals" % ( syear, eyear - 1 if (eyear - 1 != syear) else "", ctx["_nt"].sts[station]["name"], ), ha="center", ) fig.text( 0.5, 0.05, "Event+County/Zone Count, Relative to #%s" % (labels[0], ), ha="center", fontsize=10, ) ax.set_ylim(len(vals), -0.5) ax.grid(True) ax.set_yticklabels(labels) ax.set_yticks(np.arange(len(vals))) ax.set_position([0.5, 0.1, 0.45, 0.83]) ax.set_xticks([0, 10, 25, 50, 75, 90, 100]) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] sts = ctx["sdate"] sts = datetime.datetime(sts.year, sts.month, sts.day) days = ctx["days"] tz = pytz.timezone(ctx["_nt"].sts[station]["tzname"]) sts = sts.replace(tzinfo=tz) ets = sts + datetime.timedelta(days=days) df = read_sql( """ SELECT phenomena, significance, eventid, min(issue at time zone 'UTC') as minissue, max(expire at time zone 'UTC') as maxexpire, max(coalesce(init_expire, expire) at time zone 'UTC') as maxinitexpire, extract(year from product_issue) as year from warnings WHERE wfo = %s and issue > %s and issue < %s GROUP by phenomena, significance, eventid, year ORDER by minissue ASC """, pgconn, params=(station, sts, ets), index_col=None, ) if df.empty: raise NoDataFound("No events were found for WFO and time period.") events = [] labels = [] types = [] for i, row in df.iterrows(): endts = max(row[4], row[5]).replace(tzinfo=pytz.utc) events.append((row[3].replace(tzinfo=pytz.utc), endts, row[2])) labels.append(vtec.get_ps_string(row[0], row[1])) types.append("%s.%s" % (row[0], row[1])) # If we have lots of WWA, we need to expand vertically a bunch, lets # assume we can plot 5 WAA per 100 pixels if len(events) > 20: height = int(len(events) / 6.0) + 1 (fig, ax) = plt.subplots(figsize=(8, height)) fontsize = 8 else: (fig, ax) = plt.subplots(figsize=(8, 6)) fontsize = 10 used = [] def get_label(i): if types[i] in used: return "" used.append(types[i]) return "%s (%s)" % (labels[i], types[i]) halfway = sts + datetime.timedelta(days=days / 2.0) for i, e in enumerate(events): secs = abs((e[1] - e[0]).days * 86400.0 + (e[1] - e[0]).seconds) ax.barh( i + 1, secs / 86400.0, left=e[0], align="center", fc=vtec.NWS_COLORS.get(types[i], "k"), ec=vtec.NWS_COLORS.get(types[i], "k"), label=get_label(i), ) align = "left" xpos = e[0] + datetime.timedelta(seconds=secs + 3600) if xpos > halfway: align = "right" xpos = e[0] - datetime.timedelta(minutes=90) textcolor = vtec.NWS_COLORS.get( types[i] if types[i] != "TO.A" else "X", "k") ax.text( xpos, i + 1, labels[i].replace("Weather", "Wx") + " " + str(e[2]), color=textcolor, ha=align, va="center", bbox=dict(color="white", boxstyle="square,pad=0"), fontsize=fontsize, ) ax.set_ylabel("Sequential Product Number") ax.set_title(("%s-%s NWS %s\nissued Watch/Warning/Advisories") % ( sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y"), ctx["_nt"].sts[station]["name"], )) ax.set_ylim(0.4, len(events) + 1) ax.xaxis.set_minor_locator(mdates.DayLocator(interval=1, tz=tz)) xinterval = int(days / 7) + 1 ax.xaxis.set_major_locator(mdates.DayLocator(interval=xinterval, tz=tz)) ax.xaxis.set_major_formatter(mdates.DateFormatter("%-d %b", tz=tz)) ax.grid(True) ax.set_xlim(sts, ets) # Shrink current axis's height by 10% on the bottom box = ax.get_position() ax.set_position( [box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8]) ax.legend( loc="upper center", bbox_to_anchor=(0.5, -0.1), fancybox=True, shadow=True, ncol=3, scatterpoints=1, fontsize=8, ) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) # Covert datetime to UTC ctx["sdate"] = ctx["sdate"].replace(tzinfo=pytz.utc) ctx["edate"] = ctx["edate"].replace(tzinfo=pytz.utc) state = ctx["state"] phenomena = ctx["phenomena"] significance = ctx["significance"] station = ctx["station"][:4] t = ctx["t"] ilabel = ctx["ilabel"] == "yes" geo = ctx["geo"] if geo == "ugc": do_ugc(ctx) elif geo == "polygon": do_polygon(ctx) subtitle = "based on IEM Archives %s" % (ctx.get("subtitle", ""), ) if t == "cwa": subtitle = "Plotted for %s (%s), %s" % ( ctx["_nt"].sts[station]["name"], station, subtitle, ) else: subtitle = "Plotted for %s, %s" % (state_names[state], subtitle) m = MapPlot( sector=("state" if t == "state" else "cwa"), state=state, cwa=(station if len(station) == 3 else station[1:]), axisbg="white", title=("%s %s (%s.%s)") % ( ctx["title"], vtec.get_ps_string(phenomena, significance), phenomena, significance, ), subtitle=subtitle, nocaption=True, titlefontsize=16, ) cmap = plt.get_cmap(ctx["cmap"]) cmap.set_under("white") cmap.set_over("white") if geo == "ugc": m.fill_ugcs(ctx["data"], ctx["bins"], cmap=cmap, ilabel=ilabel) else: res = m.pcolormesh( ctx["lons"], ctx["lats"], ctx["data"], ctx["bins"], cmap=cmap, units="count", ) # Cut down on SVG et al size res.set_rasterized(True) if ctx["drawc"] == "yes": m.drawcounties() return m.fig, ctx["df"]
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] limit = ctx['limit'] combo = ctx['c'] phenomena = ctx['phenomena'][:2] significance = ctx['significance'][:2] opt = ctx['opt'] state = ctx['state'][:2] syear = ctx['syear'] eyear = ctx['eyear'] ctx['_nt'].sts['_ALL'] = {'name': 'All Offices'} if station not in ctx['_nt'].sts: raise NoDataFound("No Data Found.") lastdoy = 367 if limit.lower() == 'yes': lastdoy = int(datetime.datetime.today().strftime("%j")) + 1 wfolimiter = " and wfo = '%s' " % (station, ) if opt == 'state': wfolimiter = " and substr(ugc, 1, 2) = '%s' " % (state, ) if opt == 'wfo' and station == '_ALL': wfolimiter = '' eventlimiter = "" if combo == 'svrtor': eventlimiter = " or (phenomena = 'SV' and significance = 'W') " phenomena = 'TO' significance = 'W' cursor.execute( """ WITH data as ( SELECT extract(year from issue) as yr, issue, phenomena, significance, eventid, wfo from warnings WHERE ((phenomena = %s and significance = %s) """ + eventlimiter + """) and extract(year from issue) >= %s and extract(year from issue) <= %s and extract(doy from issue) <= %s """ + wfolimiter + """), agg1 as ( SELECT yr, min(issue) as min_issue, eventid, wfo, phenomena, significance from data GROUP by yr, eventid, wfo, phenomena, significance), agg2 as ( SELECT yr, extract(doy from min_issue) as doy, count(*) from agg1 GROUP by yr, doy) SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC) from agg2 ORDER by yr ASC, doy ASC """, (phenomena, significance, syear, eyear, lastdoy)) if cursor.rowcount == 0: raise NoDataFound("No Data Found.") data = {} for yr in range(syear, eyear + 1): data[yr] = {'doy': [0], 'counts': [0]} rows = [] for row in cursor: data[row[0]]['doy'].append(row[1]) data[row[0]]['counts'].append(row[2]) rows.append(dict(year=row[0], day_of_year=row[1], count=row[2])) # append on a lastdoy value so all the plots go to the end for yr in range(syear, eyear): if len(data[yr]['doy']) == 1 or data[yr]['doy'][-1] >= lastdoy: continue data[yr]['doy'].append(lastdoy) data[yr]['counts'].append(data[yr]['counts'][-1]) if data[eyear]['doy']: data[eyear]['doy'].append( int(datetime.datetime.today().strftime("%j")) + 1) data[eyear]['counts'].append(data[eyear]['counts'][-1]) df = pd.DataFrame(rows) (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) ann = [] for yr in range(syear, eyear + 1): if len(data[yr]['doy']) < 2: continue lp = ax.plot(data[yr]['doy'], data[yr]['counts'], lw=2, label="%s (%s)" % (str(yr), data[yr]['counts'][-1]), drawstyle='steps-post') ann.append( ax.text(data[yr]['doy'][-1] + 1, data[yr]['counts'][-1], "%s" % (yr, ), color='w', va='center', fontsize=10, bbox=dict(facecolor=lp[0].get_color(), edgecolor=lp[0].get_color()))) mask = np.zeros(fig.canvas.get_width_height(), bool) fig.canvas.draw() attempts = 10 while ann and attempts > 0: attempts -= 1 removals = [] for a in ann: bbox = a.get_window_extent() x0 = int(bbox.x0) x1 = int(math.ceil(bbox.x1)) y0 = int(bbox.y0) y1 = int(math.ceil(bbox.y1)) s = np.s_[x0:x1 + 1, y0:y1 + 1] if np.any(mask[s]): a.set_position([a._x - int(lastdoy / 14), a._y]) else: mask[s] = True removals.append(a) for rm in removals: ann.remove(rm) ax.legend(loc=2, ncol=2, fontsize=10) ax.set_xlim(1, 367) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.grid(True) ax.set_ylabel("Accumulated Count") ax.set_ylim(bottom=0) title = vtec.get_ps_string(phenomena, significance) if combo == 'svrtor': title = "Severe Thunderstorm + Tornado Warning" ptitle = "%s" % (ctx['_nt'].sts[station]['name'], ) if opt == 'state': ptitle = ("NWS Issued for Counties/Parishes in %s") % ( reference.state_names[state], ) ax.set_title(("%s\n %s Count") % (ptitle, title)) ax.set_xlim(0, lastdoy) if lastdoy < 367: ax.set_xlabel(("thru approximately %s") % (datetime.date.today().strftime("%-d %B"), )) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) ugc = ctx['ugc'] phenomena = ctx['phenomena'] significance = ctx['significance'] (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) cursor.execute(""" SELECT s.wfo, s.tzname, u.name from ugcs u JOIN stations s on (u.wfo = s.id) where ugc = %s and end_ts is null and s.network = 'WFO' """, (ugc,)) wfo = None tzname = None name = "" if cursor.rowcount == 1: row = cursor.fetchone() tzname = row[1] wfo = row[0] name = row[2] cursor.execute(""" SELECT count(*), min(issue at time zone %s), max(issue at time zone %s) from warnings WHERE ugc = %s and phenomena = %s and significance = %s and wfo = %s """, (tzname, tzname, ugc, phenomena, significance, wfo)) row = cursor.fetchone() cnt = row[0] sts = row[1] ets = row[2] if sts is None: return "No Results Found, try flipping zone/county" cursor.execute(""" WITH coverage as ( SELECT extract(year from issue) as yr, eventid, generate_series(issue at time zone %s, expire at time zone %s, '1 minute'::interval) as s from warnings where ugc = %s and phenomena = %s and significance = %s and wfo = %s), minutes as (SELECT distinct yr, eventid, (extract(hour from s)::numeric * 60. + extract(minute from s)::numeric) as m from coverage) SELECT minutes.m, count(*) from minutes GROUP by m """, (tzname, tzname, ugc, phenomena, significance, wfo)) data = np.zeros((1440,), 'f') for row in cursor: data[int(row[0])] = row[1] df = pd.DataFrame(dict(minute=pd.Series(np.arange(1440)), events=pd.Series(data))) vals = data / float(cnt) * 100. ax.bar(np.arange(1440), vals, ec='b', fc='b') if np.max(vals) > 50: ax.set_ylim(0, 100) ax.set_yticks([0, 10, 25, 50, 75, 90, 100]) ax.grid() ax.set_xticks(range(0, 1440, 60)) ax.set_xticklabels(["Mid", "", "", "3 AM", "", "", "6 AM", "", "", '9 AM', "", "", "Noon", "", "", "3 PM", "", "", "6 PM", "", "", "9 PM", "", "", "Mid"]) ax.set_xlabel("Timezone: %s (Daylight or Standard)" % (tzname,)) ax.set_ylabel("Frequency [%%] out of %s Events" % (cnt,)) ax.set_title(("[%s] %s :: %s (%s.%s)\n%s Events - %s to %s" ) % (ugc, name, vtec.get_ps_string(phenomena, significance), phenomena, significance, cnt, sts.strftime("%Y-%m-%d %I:%M %p"), ets.strftime("%Y-%m-%d %I:%M %p"))) ax.set_xlim(0, 1441) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) wfo = ctx['station'] phenomena = ctx['phenomena'] significance = ctx['significance'] if ctx['season'] == 'all': months = range(1, 13) elif ctx['season'] == 'water_year': months = range(1, 13) elif ctx['season'] == 'spring': months = [3, 4, 5] elif ctx['season'] == 'spring2': months = [4, 5, 6] elif ctx['season'] == 'fall': months = [9, 10, 11] elif ctx['season'] == 'summer': months = [6, 7, 8] elif ctx['season'] == 'winter': months = [12, 1, 2] else: ts = datetime.datetime.strptime("2000-" + ctx['season'] + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] nt = NetworkTable("WFO") (fig, ax) = plt.subplots(1, 1) tzname = nt.sts[wfo]['tzname'] df = read_sql(""" WITH data as ( SELECT extract(year from issue) as yr, eventid, min(issue at time zone %s) as minissue, max(expire at time zone %s) as maxexpire from warnings WHERE phenomena = %s and significance = %s and wfo = %s and extract(month from issue) in %s GROUP by yr, eventid), events as ( select count(*) from data), timedomain as ( SELECT generate_series(minissue, least(maxexpire, minissue + '24 hours'::interval) , '1 minute'::interval) as ts from data ), data2 as ( SELECT extract(hour from ts)::int * 60 + extract(minute from ts)::int as minute, count(*) from timedomain GROUP by minute ORDER by minute ASC) select d.minute, d.count, e.count as total from data2 d, events e """, pgconn, params=( tzname, tzname, phenomena, significance, wfo, tuple(months)), index_col='minute') if df.empty: raise ValueError("No Results Found") df['frequency'] = df['count'] / df['total'] * 100. ax.bar(df.index.values, df['frequency'].values, ec='b', fc='b', align='center') ax.grid() if df['frequency'].max() > 70: ax.set_ylim(0, 101) ax.set_xticks(range(0, 25 * 60, 60)) ax.set_xlim(-0.5, 24 * 60 + 1) ax.set_xticklabels(["Mid", "", "", "3 AM", "", "", "6 AM", "", "", '9 AM', "", "", "Noon", "", "", "3 PM", "", "", "6 PM", "", "", "9 PM", "", "", "Mid"]) ax.set_xlabel("Timezone: %s (Daylight or Standard)" % (tzname,)) ax.set_ylabel("Percentage [%%] out of %.0f Events" % (df['total'].max(), )) title = "[%s] %s :: Time of Day Frequency" % (wfo, nt.sts[wfo]['name']) subtitle = "%s (%s.%s) [%s]" % ( vtec.get_ps_string(phenomena, significance), phenomena, significance, MDICT[ctx['season']] ) fitbox(fig, title, 0.05, 0.95, 0.95, 0.99, ha='center') fitbox(fig, subtitle, 0.05, 0.95, 0.91, 0.945, ha='center') return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] sts = sts.replace(tzinfo=pytz.UTC) ets = ctx['edate'] ets = ets.replace(tzinfo=pytz.UTC) p1 = ctx['phenomenav1'] p2 = ctx['phenomenav2'] p3 = ctx['phenomenav3'] p4 = ctx['phenomenav4'] varname = ctx['var'] phenomena = [] for p in [p1, p2, p3, p4]: if p is not None: phenomena.append(p[:2]) s1 = ctx['significancev1'] s2 = ctx['significancev2'] s3 = ctx['significancev3'] s4 = ctx['significancev4'] significance = [] for s in [s1, s2, s3, s4]: if s is not None: significance.append(s[0]) pstr = [] subtitle = "" title = "" for p, s in zip(phenomena, significance): pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s)) subtitle += "%s.%s " % (p, s) title += vtec.get_ps_string(p, s) if len(phenomena) > 1: title = "VTEC Unique Event" pstr = " or ".join(pstr) pstr = "(%s)" % (pstr, ) cmap = plt.get_cmap(ctx['cmap']) if varname == 'count': df = read_sql(""" with total as ( select distinct wfo, extract(year from issue at time zone 'UTC') as year, phenomena, significance, eventid from warnings where """ + pstr + """ and issue >= %s and issue < %s ) SELECT wfo, phenomena, significance, year, count(*) from total GROUP by wfo, phenomena, significance, year """, pgconn, params=(sts, ets)) df2 = df.groupby('wfo')['count'].sum() maxv = df2.max() bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200] if maxv > 5000: bins = [ 0, 5, 10, 50, 100, 250, 500, 750, 1000, 1500, 2000, 3000, 5000, 7500, 10000 ] elif maxv > 1000: bins = [ 0, 1, 5, 10, 50, 100, 150, 200, 250, 500, 750, 1000, 1250, 1500, 2000 ] elif maxv > 200: bins = [ 0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250, 500, 750, 1000 ] units = 'Count' lformat = '%.0f' elif varname == 'days': df = read_sql(""" WITH data as ( SELECT distinct wfo, generate_series(greatest(issue, %s), least(expire, %s), '1 minute'::interval) as ts from warnings WHERE issue > %s and expire < %s and """ + pstr + """ ), agg as ( SELECT distinct wfo, date(ts) from data ) select wfo, count(*) as days from agg GROUP by wfo ORDER by days DESC """, pgconn, params=(sts, ets, sts - datetime.timedelta(days=90), ets + datetime.timedelta(days=90)), index_col='wfo') df2 = df['days'] if df2.max() < 10: bins = list(range(1, 11, 1)) else: bins = np.linspace(1, df['days'].max() + 11, 10, dtype='i') units = 'Days' lformat = '%.0f' cmap.set_under('white') cmap.set_over('#EEEEEE') else: total_minutes = (ets - sts).total_seconds() / 60. df = read_sql(""" WITH data as ( SELECT distinct wfo, generate_series(greatest(issue, %s), least(expire, %s), '1 minute'::interval) as ts from warnings WHERE issue > %s and expire < %s and """ + pstr + """ ) select wfo, count(*) / %s * 100. as tpercent from data GROUP by wfo ORDER by tpercent DESC """, pgconn, params=(sts, ets, sts - datetime.timedelta(days=90), ets + datetime.timedelta(days=90), total_minutes), index_col='wfo') df2 = df['tpercent'] bins = list(range(0, 101, 10)) if df2.max() < 5: bins = np.arange(0, 5.1, 0.5) elif df2.max() < 10: bins = list(range(0, 11, 1)) units = 'Percent' lformat = '%.1f' nt = NetworkTable("WFO") for sid in nt.sts: sid = sid[-3:] if sid not in df2: df2[sid] = 0 mp = MapPlot(sector='nws', axisbg='white', title='%s %s by NWS Office' % (title, PDICT[varname]), subtitle=('Valid %s - %s UTC, based on VTEC: %s') % (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), subtitle)) mp.fill_cwas(df2, bins=bins, ilabel=True, units=units, lblformat=lformat, cmap=cmap) return mp.fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] sts = sts.replace(tzinfo=pytz.timezone("UTC")) ets = ctx['edate'] ets = ets.replace(tzinfo=pytz.timezone("UTC")) p1 = ctx['phenomenav1'] p2 = ctx['phenomenav2'] p3 = ctx['phenomenav3'] p4 = ctx['phenomenav4'] phenomena = [] for p in [p1, p2, p3, p4]: if p is not None: phenomena.append(p[:2]) s1 = ctx['significancev1'] s2 = ctx['significancev2'] s3 = ctx['significancev3'] s4 = ctx['significancev4'] significance = [] for s in [s1, s2, s3, s4]: if s is not None: significance.append(s[0]) pstr = [] subtitle = "" title = "" for p, s in zip(phenomena, significance): pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s)) subtitle += "%s.%s " % (p, s) title += vtec.get_ps_string(p, s) if len(phenomena) > 1: title = "VTEC Unique Event" pstr = " or ".join(pstr) pstr = "(%s)" % (pstr,) df = read_sql(""" with total as ( select distinct wfo, extract(year from issue at time zone 'UTC') as year, phenomena, significance, eventid from warnings where """ + pstr + """ and issue >= %s and issue < %s ) SELECT wfo, phenomena, significance, year, count(*) from total GROUP by wfo, phenomena, significance, year """, pgconn, params=(sts, ets)) df2 = df.groupby('wfo')['count'].sum() nt = NetworkTable("WFO") for sid in nt.sts: sid = sid[-3:] if sid not in df2: df2[sid] = 0 maxv = df2.max() bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200] if maxv > 200: bins = [0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250, 500, 750, 1000] elif maxv > 1000: bins = [0, 1, 5, 10, 50, 100, 150, 200, 250, 500, 750, 1000, 1250, 1500, 2000] mp = MapPlot(sector='nws', axisbg='white', title='%s Counts by NWS Office' % (title,), subtitle=('Valid %s - %s UTC, based on VTEC: %s' ) % (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), subtitle)) mp.fill_cwas(df2, bins=bins, ilabel=True) return mp.fig, df