def test_get_autoplot_context(): """See that we can do things.""" form = dict(station='AMW', network='IA_ASOS', type2='bogus', t=15, type3=['max-high', 'bogus', 'min-high']) form['type'] = 'max-low' pdict = OrderedDict([ ('max-high', 'Maximum High'), ('avg-high', 'Average High'), ('min-high', 'Minimum High'), ('max-low', 'Maximum Low')]) cfg = dict(arguments=[ dict(type='station', name='station', default='IA0000'), dict(type='select', name='type', default='max-high', options=pdict), dict(type='select', name='type2', default='max-high', options=pdict), dict(type='select', name='type3', default='max-high', options=pdict, multiple=True), dict(type='select', name='type4', default='max-high', options=pdict, multiple=True, optional=True), dict(type='select', name='type5', default='max-high', options=pdict), dict(type='int', name='threshold', default=-99), dict(type='int', name='t', default=9, min=0, max=10), dict(type='date', name='d', default='2011/11/12'), dict(type='datetime', name='d2', default='2011/11/12 0000', max='2017/12/12 1212', min='2011/01/01 0000'), dict(type='year', name='year', default='2011', optional=True), dict(type='float', name='f', default=1.10)]) ctx = util.get_autoplot_context(form, cfg) assert ctx['station'] == 'AMW' assert ctx['network'] == 'IA_ASOS' assert isinstance(ctx['threshold'], int) assert ctx['type'] == 'max-low' assert ctx['type2'] == 'max-high' assert isinstance(ctx['f'], float) assert ctx['t'] == 9 assert ctx['d'] == datetime.date(2011, 11, 12) assert ctx['d2'] == datetime.datetime(2011, 11, 12) assert 'year' not in ctx assert 'bogus' not in ctx['type3'] assert 'type4' not in ctx form = dict(zstation='DSM') cfg = dict(arguments=[ dict(type='zstation', name='station', default='DSM', network='IA_ASOS')]) ctx = util.get_autoplot_context(form, cfg) assert ctx['network'] == 'IA_ASOS'
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) station = ctx['station'] nt = NetworkTable("CSCAP") clstation = nt.sts[station]['climate_site'] (model, scenario) = ctx['model'].split("=") (fig, ax) = plt.subplots(1, 1) cursor.execute(""" SELECT extract(year from day) as yr, sum(case when precip > 0 THEN 1 else 0 end) from hayhoe_daily WHERE precip is not null and station = %s and model = %s and scenario = %s GROUP by yr ORDER by yr ASC """, (clstation, model, scenario)) years = [] precip = [] for row in cursor: years.append(row[0]) precip.append(row[1]) ax.bar(years, precip, ec='b', fc='b') ax.grid(True) ax.set_ylabel("Days Per Year") ax.set_title("%s %s\n%s %s :: Days per Year with Measurable Precip" % ( station, nt.sts[station]['name'], model, scenario)) return fig
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] days = ctx['days'] nt = NetworkTable("CSCAP") clstation = nt.sts[station]['climate_site'] (model, scenario) = ctx['model'].split("=") (fig, ax) = plt.subplots(1, 1) df = read_sql(""" WITH data as ( SELECT day, sum(precip) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) from hayhoe_daily WHERE precip is not null and station = %s and model = %s and scenario = %s ) SELECT extract(year from day) as yr, sum(case when sum < 0.01 then 1 else 0 end) as precip from data WHERE extract(month from day) in (3,4,5,6,7,8) GROUP by yr ORDER by yr ASC """, pgconn, params=(days - 1, clstation, model, scenario), index_col='yr') ax.bar(df.index.values, df['precip'].values, ec='b', fc='b') ax.grid(True) ax.set_ylabel("Days Per Year") ax.set_title(("%s %s\n%s %s :: Spring/Summer with No Precip over %s days" ) % (station, nt.sts[station]['name'], model, scenario, days)) return fig, df
def test_vtecps(): """Can we properly handle the vtecps form type""" cfg = dict(arguments=[ dict(type='vtec_ps', name='v1', default='TO.W', label='VTEC Phenomena and Significance 1'), dict(type='vtec_ps', name='v2', default='TO.A', optional=True, label='VTEC Phenomena and Significance 2'), dict(type='vtec_ps', name='v3', default=None, optional=True, label='VTEC Phenomena and Significance 3'), dict(type='vtec_ps', name='v4', default='FL.Y', optional=True, label='VTEC Phenomena and Significance 4'), dict(type='vtec_ps', name='v5', default='UNUSED', optional=True, label='VTEC Phenomena and Significance 5')]) form = dict(phenomenav1='SV', significancev1='A', phenomenav4='TO', significancev4='W') ctx = util.get_autoplot_context(form, cfg) # For v1, we were explicitly provided by from the form assert ctx['phenomenav1'] == 'SV' assert ctx['significancev1'] == 'A' # For v2, optional is on, so our values should be None assert ctx.get('phenomenav2') is None # For v3, should be None as well assert ctx.get('phenomenav3') is None # For v4, we provided a value via form assert ctx['significancev4'] == 'W' # For v5, we have a bad default set assert ctx.get('phenomenav5') is None
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] t1 = ctx['t1'] t2 = ctx['t2'] table = "alldata_%s" % (station[:2],) nt = NetworkTable("%sCLIMATE" % (station[:2],)) df = read_sql(""" SELECT year, min(low) as min_low, min(case when low < %s then extract(doy from day) else 999 end) as t1_doy, min(case when low < %s then extract(doy from day) else 999 end) as t2_doy from """+table+""" where station = %s and month > 6 GROUP by year ORDER by year ASC """, pgconn, params=(t1, t2, station), index_col='year') df = df[df['t2_doy'] < 400] doy = np.array(df['t1_doy'], 'i') doy2 = np.array(df['t2_doy'], 'i') sts = datetime.datetime(2000, 1, 1) xticks = [] xticklabels = [] for i in range(min(doy), max(doy2)+1): ts = sts + datetime.timedelta(days=i) if ts.day in [1, 8, 15, 22]: xticks.append(i) fmt = "%b %-d" if ts.day == 1 else "%-d" xticklabels.append(ts.strftime(fmt)) (fig, ax) = plt.subplots(1, 1) ax.set_xticks(xticks) ax.set_xticklabels(xticklabels) ax.scatter(doy, doy2-doy) for x in xticks: ax.plot((x-100, x), (100, 0), ':', c=('#000000')) ax.set_ylim(-1, max(doy2-doy)+4) ax.set_xlim(min(doy)-4, max(doy)+4) ax.set_title("[%s] %s\nFirst Fall Temperature Occurences" % ( station, nt.sts[station]['name'])) ax.set_ylabel("Days until first sub %s$^{\circ}\mathrm{F}$" % (t2,)) ax.set_xlabel("First day of sub %s$^{\circ}\mathrm{F}$" % (t1,)) ax.grid(True) return fig, df
def get_context(fdict): pgconn = psycopg2.connect(database='hads', host='iemdb-hads', user='******') cursor = pgconn.cursor() ctx = get_autoplot_context(fdict, get_description()) ctx['station'] = ctx['station'].upper() station = ctx['station'] dt = ctx['dt'] # Attempt to get station information cursor.execute(""" SELECT name from stations where id = %s and network ~* 'DCP' """, (station,)) ctx['name'] = "" if cursor.rowcount > 0: row = cursor.fetchone() ctx['name'] = row[0] ctx['df'] = read_sql("""with fx as ( select id, issued, primaryname, primaryunits, secondaryname, secondaryunits from hml_forecast where station = %s and generationtime between %s and %s) SELECT f.id, f.issued, d.valid, d.primary_value, f.primaryname, f.primaryunits, d.secondary_value, f.secondaryname, f.secondaryunits from hml_forecast_data_""" + str(dt.year) + """ d JOIN fx f on (d.hml_forecast_id = f.id) ORDER by f.id ASC, d.valid ASC """, pgconn, params=(station, dt - datetime.timedelta(days=3), dt + datetime.timedelta(days=1)), index_col=None) if len(ctx['df'].index) > 0: ctx['primary'] = "%s[%s]" % (ctx['df'].iloc[0]['primaryname'], ctx['df'].iloc[0]['primaryunits']) ctx['secondary'] = "%s[%s]" % (ctx['df'].iloc[0]['secondaryname'], ctx['df'].iloc[0]['secondaryunits']) # get obs mints = ctx['df']['valid'].min() maxts = ctx['df']['valid'].max() else: mints = dt - datetime.timedelta(days=3) maxts = dt + datetime.timedelta(days=3) df = read_sql(""" SELECT valid, h.label, value from hml_observed_data_""" + str(dt.year) + """ d JOIN hml_observed_keys h on (d.key = h.id) WHERE station = %s and valid between %s and %s ORDER by valid """, pgconn, params=(station, mints, maxts), index_col=None) ctx['odf'] = df.pivot('valid', 'label', 'value') if len(ctx['df'].index) > 0: ctx['df'] = pd.merge(ctx['df'], ctx['odf'], left_on='valid', right_index=True, how='left', sort=False) ctx['title'] = "[%s] %s" % (ctx['station'], ctx['name']) ctx['subtitle'] = ctx['dt'].strftime("%d %b %Y %H:%M UTC") if len(ctx['df'].index) == 0 and len(ctx['odf'].index) > 0: ctx['primary'] = ctx['odf'].columns[0] ctx['secondary'] = ctx['odf'].columns[1] return ctx
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] varname = ctx['var'] thedate = ctx['thedate'] date = ctx['date'] nt = NetworkTable(network) pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') table = "alldata_%s" % (station[:2], ) if date == 'exact': df = read_sql(""" SELECT year, high, day, precip from """ + table + """ WHERE station = %s and sday = %s ORDER by year ASC """, pgconn, params=(station, thedate.strftime("%m%d")), index_col='year') subtitle = thedate.strftime("%B %-d") else: if date == 'memorial': days = memorial_days() elif date == 'thanksgiving': days = thanksgiving() else: days = labor_days() df = read_sql(""" SELECT year, high, day, precip from """ + table + """ WHERE station = %s and day in %s ORDER by year ASC """, pgconn, params=(station, tuple(days)), index_col='year') subtitle = PDICT[date] (fig, ax) = plt.subplots(1, 1) ax.bar(df.index.values, df[varname], fc='r', ec='r', align='center') mean = df[varname].mean() ax.axhline(mean) ax.text(df.index.values[-1] + 1, mean, '%.2f' % (mean,), ha='left', va='center') ax.grid(True) ax.set_title(("%s [%s] Daily %s\non %s" ) % (nt.sts[station]['name'], station, PDICT2[varname], subtitle)) ax.set_xlim(df.index.values.min() - 1, df.index.values.max() + 1) ax.set_ylabel(PDICT2[varname]) if varname != 'precip': ax.set_ylim(df[varname].min() - 5, df[varname].max() + 5) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] units = ctx['units'] nt = NetworkTable(network) df = read_sql(""" select date_trunc('hour', valid) as ts, avg(sknt) as sknt, max(drct) as drct from alldata WHERE station = %s and sknt is not null and drct is not null GROUP by ts """, pgconn, params=(station, ), parse_dates=('ts',), index_col=None) sknt = speed(df['sknt'].values, 'KT') drct = direction(df['drct'].values, 'DEG') df['u'], df['v'] = [x.value('MPS') for x in meteorology.uv(sknt, drct)] df['month'] = df['ts'].dt.month grp = df[['month', 'u', 'v', 'sknt']].groupby('month').mean() grp['u_%s' % (units,)] = speed(grp['u'].values, 'KT').value(units.upper()) grp['v_%s' % (units,)] = speed(grp['u'].values, 'KT').value(units.upper()) grp['sped_%s' % (units,)] = speed(grp['sknt'].values, 'KT').value(units.upper()) drct = meteorology.drct(speed(grp['u'].values, 'KT'), speed(grp['v'].values, 'KT')) grp['drct'] = drct.value('DEG') maxval = grp['sped_%s' % (units,)].max() (fig, ax) = plt.subplots(1, 1) ax.barh(grp.index.values, grp['sped_%s' % (units,)].values, align='center') ax.set_xlabel("Average Wind Speed [%s]" % (UNITS[units],)) ax.set_yticks(grp.index.values) ax.set_yticklabels(calendar.month_abbr[1:]) ax.grid(True) ax.set_xlim(0, maxval * 1.2) for mon, row in grp.iterrows(): ax.text(maxval * 1.1, mon, drct2text(row['drct']), ha='center', va='center', bbox=dict(color='white')) ax.text(row['sped_%s' % (units,)] * 0.98, mon, "%.1f" % (row['sped_%s' % (units,)],), ha='right', va='center', bbox=dict(color='white', boxstyle='square,pad=0.03',)) ax.set_ylim(12.5, 0.5) ax.set_title(("[%s] %s [%s-%s]\nMonthly Average Wind Speed and" " Vector Average Direction" ) % (station, nt.sts[station]['name'], df['ts'].min().year, df['ts'].max().year)) return fig, grp
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = int(ctx['month']) thres = ctx['t'] mydir = ctx['dir'] nt = NetworkTable(network) tzname = nt.sts[station]['tzname'] df = read_sql(""" WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v, tmpf from alldata where station = %s and tmpf > -90 and tmpf < 150 and extract(month from valid) = %s and report_type = 2) SELECT extract(hour from v) as hour, sum(case when tmpf::int < %s THEN 1 ELSE 0 END) as below, sum(case when tmpf::int >= %s THEN 1 ELSE 0 END) as above, count(*) from data GROUP by hour ORDER by hour ASC """, pgconn, params=(tzname, station, month, thres, thres), index_col='hour') df['below_freq'] = df['below'].values.astype('f') / df['count'] * 100. df['above_freq'] = df['above'].values.astype('f') / df['count'] * 100. freq = df[mydir+"_freq"].values hours = df.index.values (fig, ax) = plt.subplots(1, 1) bars = ax.bar(hours-0.4, freq, fc='blue') for i, bar in enumerate(bars): ax.text(i, bar.get_height()+3, "%.0f" % (bar.get_height(),), ha='center', fontsize=10) ax.set_xticks(range(0, 25, 3)) ax.set_xticklabels(['Mid', '3 AM', '6 AM', '9 AM', 'Noon', '3 PM', '6 PM', '9 PM']) ax.grid(True) ax.set_ylim(0, 100) ax.set_yticks([0, 25, 50, 75, 100]) ax.set_ylabel("Frequency [%]") ax.set_xlabel("Hour Timezone: %s" % (tzname,)) ax.set_xlim(-0.5, 23.5) ax.set_title(("%s [%s]\nFrequency of %s Hour, %s: %s$^\circ$F" ) % (nt.sts[station]['name'], station, calendar.month_name[month], PDICT[mydir], thres)) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt ASOS = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = ASOS.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] nt = NetworkTable(network) cursor.execute(""" WITH obs as (select valid at time zone %s + '10 minutes'::interval as v, tmpf from alldata WHERE station = %s and tmpf >= -90 and tmpf < 150), s as (SELECT generate_series(0, 23, 1) || ' hours' as series), daily as (select s.series, v + s.series::interval as t, tmpf from obs, s), sums as (select series, date(t), max(tmpf), min(tmpf) from daily GROUP by series, date) SELECT series, avg(max), avg(min) from sums GROUP by series """, (nt.sts[station]['tzname'], station)) rows = [] hrs = range(25) highs = [None]*25 lows = [None]*25 for row in cursor: i = int(row[0].split()[0]) highs[24-i] = row[1] lows[24-i] = row[2] rows.append(dict(offset=(24-i), avg_high=row[1], avg_low=row[2])) rows.append(dict(offset=0, avg_high=highs[24], avg_low=lows[24])) highs[0] = highs[24] lows[0] = lows[24] df = pd.DataFrame(rows) (fig, ax) = plt.subplots(1, 1) ax.plot(hrs, np.array(highs) - highs[0], label="High Temp", lw=2, color='r') ax.plot(hrs, np.array(lows) - lows[0], label="Low Temp", lw=2, color='b') ax.set_title(("[%s] %s %s-%s\n" "Bias of 24 Hour 'Day' Split for Average High + Low Temp" ) % (station, nt.sts[station]['name'], nt.sts[station]['archive_begin'].year, datetime.date.today().year)) ax.set_ylabel("Average Temperature Difference $^\circ$F") ax.set_xlim(0, 24) ax.set_xticks((0, 4, 8, 12, 16, 20, 24)) ax.set_xticklabels(('Mid', '4 AM', '8 AM', 'Noon', '4 PM', '8 PM', 'Mid')) ax.grid(True) ax.set_xlabel("Hour Used for 24 Hour Summary") ax.legend(loc='best') return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) CATS = np.array([0.01, 0.5, 1., 2., 3., 4.]) startyear = nt.sts[station]['archive_begin'].year # 0.01, 0.5, 1, 2, 3, 4 df = read_sql(""" SELECT year, month, sum(case when precip >= %s then 1 else 0 end) as cat1, sum(case when precip >= %s then 1 else 0 end) as cat2, sum(case when precip >= %s then 1 else 0 end) as cat3, sum(case when precip >= %s then 1 else 0 end) as cat4, sum(case when precip >= %s then 1 else 0 end) as cat5, sum(case when precip >= %s then 1 else 0 end) as cat6 from """ + table + """ WHERE station = %s GROUP by year, month ORDER by year, month """, pgconn, params=(CATS[0], CATS[1], CATS[2], CATS[3], CATS[4], CATS[5], station), index_col=['year', 'month']) res = """\ # IEM Climodat https://mesonet.agron.iastate.edu/climodat/ # Report Generated: %s # Climate Record: %s -> %s # Site Information: [%s] %s # Contact Information: Daryl Herzmann [email protected] 515.294.5978 # Number of days per year with precipitation at or above threshold [inch] # Partitioned by month of the year, 'ANN' represents the entire year """ % (datetime.date.today().strftime("%d %b %Y"), nt.sts[station]['archive_begin'].date(), datetime.date.today(), station, nt.sts[station]['name']) for i, cat in enumerate(CATS): col = "cat%s" % (i+1,) res += ("YEAR %4.2f JAN FEB MAR APR MAY JUN " "JUL AUG SEP OCT NOV DEC ANN\n") % (cat,) for yr in range(startyear, datetime.date.today().year + 1): res += "%s %4.2f " % (yr, cat) for mo in range(1, 13): if (yr, mo) in df.index: res += "%3.0f " % (df.at[(yr, mo), col], ) else: res += "%3s " % ('M', ) res += "%3.0f\n" % (df.loc[(yr, slice(1, 12)), col].sum(), ) return None, df, res
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import calendar_plot pgconn = psycopg2.connect(database='iem', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['var'] network = ctx['network'] sdate = ctx['sdate'] edate = ctx['edate'] nt = NetworkTable(network) # Get Climatology cdf = read_sql("""SELECT to_char(valid, 'mmdd') as sday, high, low, precip from ncdc_climate81 WHERE station = %s """, psycopg2.connect(database='coop', host='iemdb', user='******'), params=(nt.sts[station]['ncdc81'],), index_col='sday') cursor.execute(""" SELECT day, max_tmpf, min_tmpf, max_dwpf, min_dwpf, pday, coalesce(avg_sknt, 0) as avg_sknt from summary s JOIN stations t on (t.iemid = s.iemid) WHERE s.day >= %s and s.day <= %s and t.id = %s and t.network = %s ORDER by day ASC """, (sdate, edate, station, network)) rows = [] data = {} for row in cursor: hd = row['max_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'high'] ld = row['min_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'low'] rows.append(dict(day=row['day'], max_tmpf=row['max_tmpf'], avg_smph=speed(row['avg_sknt'], 'KT').value('MPH'), min_dwpf=row['min_dwpf'], max_dwpf=row['max_dwpf'], high_departure=hd, low_departure=ld, min_tmpf=row['min_tmpf'], pday=row['pday'])) data[row[0]] = {'val': safe(rows[-1], varname)} if varname == 'high_departure': data[row[0]]['color'] = 'b' if hd < 0 else 'r' elif varname == 'low_departure': data[row[0]]['color'] = 'b' if ld < 0 else 'r' df = pd.DataFrame(rows) title = ('[%s] %s Daily %s\n%s thru %s' ) % (station, nt.sts[station]['name'], PDICT.get(varname), sdate.strftime("%-d %b %Y"), edate.strftime("%-d %b %Y")) fig = calendar_plot(sdate, edate, data, title=title) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt from matplotlib.ticker import MaxNLocator ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] days = ctx['days'] varname = ctx['var'] nt = NetworkTable(network) df = get_data(fdict) if len(df.index) == 0: return 'Error, no results returned!' ax = plt.axes([0.1, 0.3, 0.8, 0.6]) lax = plt.axes([0.1, 0.1, 0.8, 0.2]) title = PDICT.get(varname) if days == 1: title = title.replace("Average ", "") ax.set_title(("%s [%s]\n%i Day Period with %s" ) % (nt.sts[station]['name'], station, days, title)) ax.barh(df.index.values, [days]*len(df.index), left=df['doy'].values, edgecolor='tan', facecolor='tan') ax.grid(True) lax.grid(True) xticks = [] xticklabels = [] for i in np.arange(df['doy'].min() - 5, df['doy'].max() + 5, 1): ts = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=i) if ts.day == 1: xticks.append(i) xticklabels.append(ts.strftime("%-d %b")) ax.set_xticks(xticks) lax.set_xticks(xticks) lax.set_xticklabels(xticklabels) counts = np.zeros(366*2) for _, row in df.iterrows(): counts[row['doy']:row['doy']+days] += 1 lax.bar(np.arange(366*2), counts, edgecolor='blue', facecolor='blue') lax.set_ylabel("Years") lax.text(0.02, 0.9, "Frequency of Day\nwithin period", transform=lax.transAxes, va='top') ax.set_ylim(df.index.values.min() - 3, df.index.values.max() + 3) ax.set_xlim(df['doy'].min() - 10, df['doy'].max() + 10) lax.set_xlim(df['doy'].min() - 10, df['doy'].max() + 10) ax.yaxis.set_major_locator(MaxNLocator(prune='lower')) return plt.gcf(), df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt COOP = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['var'] month = ctx['month'] threshold = float(ctx['thres']) if PDICT.get(varname) is None: return drct = ctx['dir'] if PDICT2.get(drct) is None: return operator = ">=" if drct == 'above' else '<' table = "alldata_%s" % (station[:2],) nt = network.Table("%sCLIMATE" % (station[:2],)) df = read_sql(""" SELECT sday, sum(case when """+varname+""" """+operator+""" %s then 1 else 0 end) as hit, count(*) as total from """+table+""" WHERE station = %s and month = %s GROUP by sday ORDER by sday ASC """, COOP, params=(threshold, station, month), index_col='sday') df['freq'] = df['hit'] / df['total'] * 100. fig, ax = plt.subplots(1, 1) bars = ax.bar(np.arange(1, len(df.index)+1)-0.4, df['freq']) for i, bar in enumerate(bars): ax.text(i+1, bar.get_height() + 0.3, '%s' % (df['hit'][i],), ha='center') msg = ("[%s] %s %s %s %s during %s (Avg: %.2f days/year)" ) % (station, nt.sts[station]['name'], PDICT.get(varname), PDICT2.get(drct), threshold, calendar.month_abbr[month], df['hit'].sum() / float(df['total'].sum()) * len(df.index)) tokens = msg.split() sz = len(tokens) / 2 ax.set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:])) ax.set_ylabel("Frequency (%)") ax.set_xlabel(("Day of %s, years (out of %s) meeting criteria labelled" ) % (calendar.month_name[month], np.max(df['total'],))) ax.grid(True) ax.set_xlim(0.5, 31.5) ax.set_ylim(0, df['freq'].max() + 5) return fig, df
def get_context(fdict): pgconn = psycopg2.connect(database='iem', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] varname = ctx['var'] mydir = ctx['dir'] threshold = ctx['thres'] ctx['nt'] = NetworkTable(network) offset = 'day' if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] offset = "day + '1 month'::interval" elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] opp = ">=" if mydir == 'aoa' else '<' ctx['df'] = read_sql(""" SELECT extract(year from """ + offset + """)::int as year, sum(case when """ + varname + """ """ + opp + """ %s then 1 else 0 end) as count from summary s JOIN stations t on (s.iemid = t.iemid) WHERE t.id = %s and t.network = %s and extract(month from day) in %s GROUP by year ORDER by year ASC """, pgconn, params=(threshold, station, network, tuple(months)), index_col='year') ctx['title'] = "(%s) %s %s %.0f" % (MDICT[ctx['month']], METRICS[ctx['var']], DIRS[ctx['dir']], ctx['thres']) ctx['subtitle'] = "%s [%s]" % (ctx['nt'].sts[ctx['zstation']]['name'], ctx['zstation']) return ctx
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800] pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) phenomena = ctx['phenomena'] significance = ctx['significance'] edate = ctx.get('edate') if edate is None: edate = datetime.datetime.utcnow() else: edate = datetime.datetime(edate.year, edate.month, edate.day, 0, 0) edate = edate.replace(tzinfo=pytz.timezone("UTC")) cursor.execute(""" select wfo, extract(days from (%s::date - max(issue))) as m from warnings where significance = %s and phenomena = %s and issue < %s GROUP by wfo ORDER by m ASC """, (edate, significance, phenomena, edate)) if cursor.rowcount == 0: return ("No Events Found for %s %s (%s.%s)" ) % (vtec._phenDict.get(phenomena, phenomena), vtec._sigDict.get(significance, significance), phenomena, significance) data = {} rows = [] for row in cursor: wfo = row[0] if row[0] != 'JSJ' else 'SJU' rows.append(dict(wfo=wfo, days=row[1])) data[wfo] = max([row[1], 0]) df = pd.DataFrame(rows) df.set_index('wfo', inplace=True) m = MapPlot(sector='nws', axisbg='white', nocaption=True, title='Days since Last %s %s by NWS Office' % ( vtec._phenDict.get(phenomena, phenomena), vtec._sigDict.get(significance, significance)), subtitle='Valid %s' % (edate.strftime("%d %b %Y %H%M UTC"),)) m.fill_cwas(data, bins=bins, ilabel=True, units='Days', lblformat='%.0f') return m.fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] threshold = float(ctx['threshold']) table = "alldata_%s" % (station[:2],) nt = NetworkTable("%sCLIMATE" % (station[:2],)) cursor.execute(""" WITH monthly as ( SELECT year, month, max(precip), sum(precip) from """+table+""" WHERE station = %s and precip is not null GROUP by year, month) SELECT month, sum(case when max > (sum * %s) then 1 else 0 end), count(*) from monthly GROUP by month ORDER by month ASC """, (station, threshold / 100.)) df = pd.DataFrame(dict(freq=pd.Series(), events=pd.Series(), month=pd.Series(calendar.month_abbr[1:], index=range(1, 13))), index=pd.Series(range(1, 13), name='mo')) for row in cursor: df.at[row[0], 'events'] = row[1] df.at[row[0], 'freq'] = row[1] / float(row[2]) * 100. (fig, ax) = plt.subplots(1, 1) ax.bar(df.index - 0.4, df.freq) for i, row in df.iterrows(): ax.text(i, row['freq']+2, "%.1f%%" % (row['freq'],), ha='center') ax.set_title(("[%s] %s\nFreq of One Day Having %.0f%% of That Month's " "Precip Total" ) % (station, nt.sts[station]['name'], threshold)) ax.grid(True) ax.set_xlim(0.5, 12.5) ax.set_ylim(0, 100) ax.set_ylabel("Percentage of Years") ax.set_yticks([0, 10, 25, 50, 75, 90, 100]) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xticks(range(1, 13)) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] month = ctx['month'] year = ctx['year'] table = "alldata_%s" % (station[:2],) nt = NetworkTable("%sCLIMATE" % (station[:2],)) # beat month df = read_sql(""" SELECT year, sum(precip) as precip, sum(snow) as snow from """+table+""" WHERE station = %s and month = %s and precip >= 0 and snow >= 0 GROUP by year ORDER by year ASC """, pgconn, params=(station, month), index_col='year') (fig, ax) = plt.subplots(1, 1) ax.scatter(df['precip'], df['snow'], s=40, marker='s', color='b', zorder=2) if year in df.index: row = df.loc[year] ax.scatter(row['precip'], row['snow'], s=60, marker='o', color='r', zorder=3, label=str(year)) ax.set_title(("[%s] %s\n%s Snowfall vs Precipitation Totals" ) % (station, nt.sts[station]['name'], calendar.month_name[month])) ax.grid(True) ax.axhline(df['snow'].mean(), lw=2, color='black') ax.axvline(df['precip'].mean(), lw=2, color='black') ax.set_xlim(left=-0.1) ax.set_ylim(bottom=-0.1) ylim = ax.get_ylim() ax.text(df['precip'].mean(), ylim[1], "%.2f" % (df['precip'].mean(),), va='top', ha='center', color='white', bbox=dict(color='black')) xlim = ax.get_xlim() ax.text(xlim[1], df['snow'].mean(), "%.1f" % (df['snow'].mean(),), va='center', ha='right', color='white', bbox=dict(color='black')) ax.set_ylabel("Snowfall Total [inch]") ax.set_xlabel("Precipitation Total (liquid + melted) [inch]") ax.legend(loc=2, scatterpoints=1) return fig, df
def get_context(fdict): pgconn = psycopg2.connect(database='talltowers', host='talltowers-db.local', user='******') ctx = get_autoplot_context(fdict, get_description()) dt = ctx['dt'] station = ctx['station'] minutes = ctx['minutes'] nt = NetworkTable("TALLTOWERS") towerid = nt.sts[station]['remote_id'] ctx['df'] = read_sql(""" SELECT * from data_analog where tower = %s and valid between %s and %s ORDER by valid ASC """, pgconn, params=(towerid, dt, dt + datetime.timedelta(minutes=minutes)), index_col='valid') return ctx
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) res = """\ # IEM Climodat https://mesonet.agron.iastate.edu/climodat/ # Report Generated: %s # Climate Record: %s -> %s # Site Information: [%s] %s # Contact Information: Daryl Herzmann [email protected] 515.294.5978 # Number of days exceeding given temperature thresholds # -20, -10, 0, 32 are days with low temperature at or below value # 50, 70, 80, 93, 100 are days with high temperature at or above value """ % (datetime.date.today().strftime("%d %b %Y"), nt.sts[station]['archive_begin'].date(), datetime.date.today(), station, nt.sts[station]['name']) res += ("%s %4s %4s %4s %4s %4s %4s %4s %4s %4s\n" "") % ('YEAR', -20, -10, 0, 32, 50, 70, 80, 93, 100) df = read_sql("""SELECT year, sum(case when low <= -20 THEN 1 ELSE 0 END) as m20, sum(case when low <= -10 THEN 1 ELSE 0 END) as m10, sum(case when low <= 0 THEN 1 ELSE 0 END) as m0, sum(case when low <= 32 THEN 1 ELSE 0 END) as m32, sum(case when high >= 50 THEN 1 ELSE 0 END) as e50, sum(case when high >= 70 THEN 1 ELSE 0 END) as e70, sum(case when high >= 80 THEN 1 ELSE 0 END) as e80, sum(case when high >= 93 THEN 1 ELSE 0 END) as e93, sum(case when high >= 100 THEN 1 ELSE 0 END) as e100 from """+table+""" WHERE station = %s GROUP by year ORDER by year ASC """, pgconn, params=(station,), index_col=None) for _, row in df.iterrows(): res += ("%(year)4i %(m20)4i %(m10)4i %(m0)4i %(m32)4i %(e50)4i " "%(e70)4i %(e80)4i %(e93)4i %(e100)4i\n") % row return None, df, res
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot pgconn = psycopg2.connect(dbname='coop', host='iemdb', user='******') ctx = util.get_autoplot_context(fdict, get_description()) sector = ctx['sector'] varname = ctx['var'] year = ctx['year'] popt = ctx['popt'] threshold = ctx['threshold'] table = "alldata_%s" % (sector,) df = read_sql(""" WITH data as ( SELECT station, """ + SQLOPT[varname] + """ as doy from """ + table + """ WHERE year = %s GROUP by station ) select station, doy, st_x(geom) as lon, st_y(geom) as lat from data d JOIN stations t on (d.station = t.id) WHERE t.network = %s and substr(station, 3, 4) != '0000' and substr(station, 3, 1) != 'C' and doy not in (0, 400) ORDER by doy """, pgconn, params=(threshold, year, '%sCLIMATE' % (sector,)), index_col='station') if len(df.index) == 0: return "No data found!" def f(val): ts = datetime.date(year, 1, 1) + datetime.timedelta(days=(val - 1)) return ts.strftime("%-m/%-d") df['pdate'] = df['doy'].apply(f) m = MapPlot(sector='state', state=sector, axisbg='white', nocaption=True, title="%s %s %s$^\circ$F" % (year, PDICT2[varname], threshold), subtitle='based on NWS COOP and IEM Daily Estimates') levs = np.linspace(df['doy'].min() - 3, df['doy'].max() + 3, 7, dtype='i') levlables = map(f, levs) if popt == 'contour': m.contourf(df['lon'], df['lat'], df['doy'], levs, clevlabels=levlables) m.plot_values(df['lon'], df['lat'], df['pdate'], labelbuffer=5) m.drawcounties() return m.fig, df
def get_data(fdict): """ Get the data""" pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] days = ctx['days'] varname = ctx['var'] table = "alldata_%s" % (station[:2], ) offset = 6 if varname.startswith('coldest') else 0 df = read_sql(""" WITH data as ( SELECT day, extract(year from day + '%s months'::interval) as season, avg((high+low)/2.) OVER (ORDER by day ASC ROWS %s preceding) as avg_temp, avg(high) OVER (ORDER by day ASC ROWS %s preceding) as avg_hitemp, avg(low) OVER (ORDER by day ASC ROWS %s preceding) as avg_lotemp, sum(precip) OVER (ORDER by day ASC ROWS %s preceding) as sum_precip from """+table+""" WHERE station = %s), agg1 as ( SELECT season, day, avg_temp, rank() OVER (PARTITION by season ORDER by avg_temp ASC) as coldest_temp_rank, rank() OVER (PARTITION by season ORDER by avg_hitemp ASC) as coldest_hitemp_rank, rank() OVER (PARTITION by season ORDER by avg_lotemp ASC) as coldest_lotemp_rank, rank() OVER (PARTITION by season ORDER by avg_temp DESC) as warmest_temp_rank, rank() OVER (PARTITION by season ORDER by avg_hitemp DESC) as warmest_hitemp_rank, rank() OVER (PARTITION by season ORDER by avg_lotemp DESC) as warmest_lotemp_rank, rank() OVER (PARTITION by season ORDER by sum_precip DESC) as wettest_rank, count(*) OVER (PARTITION by season) from data) SELECT season, day, extract(doy from day - '%s days'::interval) as doy, avg_temp from agg1 where """+varname+"""_rank = 1 and count > 270 """, pgconn, params=(offset, days - 1, days - 1, days - 1, days - 1, station, days - 1), index_col='season') if varname.startswith('coldest'): df.loc[df['doy'] < 183, 'doy'] += 365. return df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) station = ctx['station'] nt = NetworkTable("CSCAP") threshold = ctx['threshold'] clstation = nt.sts[station]['climate_site'] (model, scenario) = fdict.get('model', 'hadgem=a1b').split("=") (fig, ax) = plt.subplots(1, 1) cursor.execute(""" WITH data as ( SELECT day, precip, lag(precip) OVER (ORDER by day ASC) from hayhoe_daily WHERE station = %s and model = %s and scenario = %s and precip is not null) SELECT extract(year from day) as yr, sum(case when (precip+lag) >= %s THEN 1 else 0 end) from data GROUP by yr ORDER by yr ASC """, (clstation, model, scenario, threshold / 25.4)) years = [] precip = [] for row in cursor: years.append(row[0]) precip.append(row[1]) ax.bar(years, precip, ec='b', fc='b') ax.grid(True) ax.set_ylabel("Days Per Year") ax.set_title("%s %s\n%s %s :: Two Day total over %.2f mm" % ( station, nt.sts[station]['name'], model, scenario, threshold)) return fig
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] sts = sts.replace(tzinfo=pytz.utc) ets = ctx['edate'] ets = ets.replace(tzinfo=pytz.utc) myfilter = ctx['filter'] if myfilter == 'NONE': tlimiter = '' elif myfilter == 'NRS': tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW') " else: tlimiter = " and typetext = '%s' " % (myfilter,) df = read_sql(""" SELECT wfo, count(*) from lsrs WHERE valid >= %s and valid < %s """ + tlimiter + """ GROUP by wfo ORDER by wfo ASC """, pgconn, params=(sts, ets), index_col='wfo') data = {} for wfo, row in df.iterrows(): data[wfo] = row['count'] maxv = df['count'].max() bins = np.linspace(0, maxv, 12, dtype='i') bins[-1] += 1 p = MapPlot(sector='nws', axisbg='white', title='Local Storm Report Counts by NWS Office', subtitle=('Valid %s - %s UTC, type limiter: %s' ) % (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), MDICT.get(myfilter))) p.fill_cwas(data, bins=bins, ilabel=True) return p.fig, df
def get_data(fdict): ctx = get_autoplot_context(fdict, get_description()) ASOS = psycopg2.connect(database='asos', host='iemdb', user='******') COOP = psycopg2.connect(database='coop', host='iemdb', user='******') ccursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx['station'] = ctx['zstation'] ctx['nt'] = NetworkTable(ctx['network']) sdate = ctx['sdate'] days = ctx['days'] ctx['edate'] = sdate + datetime.timedelta(days=days) today = datetime.date.today() if ctx['edate'] > today: ctx['edate'] = today ctx['days'] = (ctx['edate'] - sdate).days ctx['climo'] = None if ctx['var'] == 'tmpf': ctx['climo'] = {} ccursor.execute(""" SELECT valid, high, low from ncdc_climate81 where station = %s """, (ctx['nt'].sts[ctx['station']]['ncdc81'],)) for row in ccursor: ctx['climo'][row[0].strftime("%m%d")] = dict(high=row[1], low=row[2]) col = "tmpf::int" if ctx['var'] == 'tmpf' else ctx['var'] col = "dwpf::int" if ctx['var'] == 'dwpf' else col ctx['df'] = read_sql(""" SELECT valid, extract(epoch from valid) * 1000 as ticks, """ + col + """ as datum from alldata WHERE station = %s and valid > %s and valid < %s and """ + ctx['var'] + """ is not null and report_type = 2 ORDER by valid ASC """, ASOS, params=(ctx['station'], sdate, sdate + datetime.timedelta(days=days)), index_col='valid') return ctx
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] month = ctx["month"] varname = ctx["var"] mydir = ctx["dir"] threshold = ctx["thres"] year = ctx["year"] offset = "ts" if month == "all": months = range(1, 13) elif month == "fall": months = [9, 10, 11] elif month == "winter": months = [12, 1, 2] offset = "ts + '1 month'::interval" elif month == "spring": months = [3, 4, 5] elif month == "summer": months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d") # make sure it is length two for the trick below in SQL months = [ts.month] opp = ">=" if mydir == "aoa" else "<" df = read_sql( """WITH hourly as ( SELECT date_trunc('hour', valid + '10 minutes'::interval) at time zone %s as ts, max(case when """ + varname + """::int """ + opp + """ %s then 1 else 0 end) as hit from alldata where station = %s and report_type = 2 GROUP by ts) SELECT extract(year from """ + offset + """)::int as year, extract(hour from ts)::int as hour, sum(hit) as hits, count(*) as obs from hourly WHERE extract(month from ts) in %s GROUP by year, hour """, pgconn, params=( ctx["_nt"].sts[station]["tzname"], threshold, station, tuple(months), ), index_col=None, ) if df.empty: raise NoDataFound("Error, no results returned!") (fig, ax) = plt.subplots(2, 1, figsize=(8, 6)) ydf = df.groupby("year").sum() ax[0].set_title(("(%s) %s Hours %s %s%s\n" "%s [%s] (%.0f-%.0f)") % ( MDICT[month], METRICS[varname], DIRS[mydir], threshold, "%" if varname == "relh" else "F", ctx["_nt"].sts[station]["name"], station, ydf.index.min(), ydf.index.max(), )) ax[0].bar(ydf.index.values, ydf["hits"], align="center", fc="green", ec="green") # Loop over plot years and background highlight any years with less than # 80% data coverage obscount = len(months) * 30 * 24 * 0.8 for _year in range(ydf.index.values[0], ydf.index.values[-1] + 1): if _year not in ydf.index or ydf.at[_year, "obs"] < obscount: ax[0].axvspan(_year - 0.5, _year + 0.5, color="#cfebfd", zorder=-3) if year in ydf.index.values: val = ydf.loc[year] ax[0].bar( year, val["hits"], align="center", fc="orange", ec="orange", zorder=5, ) ax[0].grid(True) ax[0].set_ylabel("Hours") ax[0].set_xlim(ydf.index.min() - 0.5, ydf.index.max() + 0.5) ax[0].xaxis.set_major_locator(MaxNLocator(integer=True)) ax[0].set_xlabel("Years with blue shading have more than 20% missing data") df2 = ydf[ydf["obs"] > obscount] years = len(df2.index) df2 = df[df["year"].isin(df2.index.values)] hdf = df2.groupby("hour").sum() / years ax[1].bar( hdf.index.values, hdf["hits"], align="center", fc="b", ec="b", label="Avg", ) thisyear = df[df["year"] == year] if not thisyear.empty: ax[1].bar( thisyear["hour"].values, thisyear["hits"], align="center", width=0.25, zorder=5, fc="orange", ec="orange", label="%s" % (year, ), ) ax[1].set_xlim(-0.5, 23.5) ax[1].grid(True) ax[1].legend(loc=(0.7, -0.22), ncol=2, fontsize=10) ax[1].set_ylabel("Days Per Period") ax[1].set_xticks(range(0, 24, 4)) ax[1].set_xticklabels(["Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM"]) ax[1].set_xlabel("Hour of Day (%s)" % (ctx["_nt"].sts[station]["tzname"], ), ha="right") return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = get_dbconn('coop') cursor = pgconn.cursor() ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] thres1 = ctx['thres1'] thres2 = ctx['thres2'] thres3 = ctx['thres3'] thres4 = ctx['thres4'] thres = [thres1, thres2, thres3, thres4] prs = [make(thres1), make(thres2), make(thres3), make(thres4)] table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) s = nt.sts[station]['archive_begin'] e = datetime.date.today() res = """\ # IEM Climodat https://mesonet.agron.iastate.edu/climodat/ # Report Generated: %s # Climate Record: %s -> %s # Site Information: [%s] %s # Contact Information: Daryl Herzmann [email protected] 515.294.5978 # SEASONAL TEMPERATURE CYCLES PER YEAR # 1 CYCLE IS A TEMPERATURE VARIATION FROM A VALUE BELOW A THRESHOLD # TO A VALUE EXCEEDING A THRESHOLD. THINK OF IT AS FREEZE/THAW CYCLES # FIRST DATA COLUMN WOULD BE FOR CYCLES EXCEEDING 26 AND 38 DEGREES F THRES %2.0f-%2.0f %2.0f-%2.0f %2.0f-%2.0f %2.0f-%2.0f %2.0f-%2.0f %2.0f-%2.0f %2.0f-%2.0f %2.0f-%2.0f YEAR SPRING FALL SPRING FALL SPRING FALL SPRING FALL """ % ( datetime.date.today().strftime("%d %b %Y"), nt.sts[station]['archive_begin'].date(), datetime.date.today(), station, nt.sts[station]['name'], prs[0][0], prs[0][1], prs[0][0], prs[0][1], prs[1][0], prs[1][1], prs[1][0], prs[1][1], prs[2][0], prs[2][1], prs[2][0], prs[2][1], prs[3][0], prs[3][1], prs[3][0], prs[3][1], ) df = pd.DataFrame( { thres1 + 's': 0, thres1 + 'f': 0, thres2 + 's': 0, thres2 + 'f': 0, thres3 + 's': 0, thres3 + 'f': 0, thres4 + 's': 0, thres4 + 'f': 0 }, index=pd.Series(range(s.year, e.year + 1), name='year')) cycle_pos = [-1, -1, -1, -1] cursor.execute( """ SELECT day, high, low from """ + table + """ WHERE station = %s and high is not null and low is not null ORDER by day ASC """, (station, )) for row in cursor: ts = row[0] high = int(row[1]) low = int(row[2]) for i, (lower, upper) in enumerate(prs): ckey = thres[i] + ('s' if ts.month < 7 else 'f') # cycles lower if cycle_pos[i] == 1 and low < lower: # print 'Cycled lower', low, ts cycle_pos[i] = -1 df.loc[ts.year, ckey] += 0.5 # cycled higher if cycle_pos[i] == -1 and high > upper: # print 'Cycled higher', high, ts cycle_pos[i] = 1 df.loc[ts.year, ckey] += 0.5 for yr, row in df.iterrows(): res += ("%s %-8i%-8i%-8i%-8i%-8i%-8i%-8i%-8i\n") % ( yr, row[thres1 + 's'], row[thres1 + 'f'], row[thres2 + 's'], row[thres2 + 'f'], row[thres3 + 's'], row[thres3 + 'f'], row[thres4 + 's'], row[thres4 + 'f']) res += ("AVG %-8.1f%-8.1f%-8.1f%-8.1f%-8.1f%-8.1f%-8.1f%-8.1f\n") % ( df[thres1 + 's'].mean(), df[thres1 + 'f'].mean(), df[thres2 + 's'].mean(), df[thres2 + 'f'].mean(), df[thres3 + 's'].mean(), df[thres3 + 'f'].mean(), df[thres4 + 's'].mean(), df[thres4 + 'f'].mean()) return None, df, res
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] varname = ctx["var"] if PDICT.get(varname) is None: return table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) df = read_sql( """ with data as ( select extract(doy from day) as doy, day, """ + varname + """ as v from """ + table + """ WHERE station = %s), doyagg as ( SELECT doy, stddev(v) from data GROUP by doy), deltas as ( SELECT doy, (v - lag(v) OVER (ORDER by day ASC)) as d from data), deltaagg as ( SELECT doy, stddev(d) from deltas GROUP by doy) SELECT d.doy, d.stddev as d2d_stddev, y.stddev as doy_stddev from deltaagg d JOIN doyagg y ON (y.doy = d.doy) WHERE d.doy < 366 ORDER by d.doy ASC """, pgconn, params=(station, ), index_col="doy", ) fig, ax = plt.subplots(2, 1, sharex=True, figsize=(8, 6)) ax[0].plot(df.index.values, df["doy_stddev"], lw=2, color="r", label="Single Day") ax[0].plot(df.index.values, df["d2d_stddev"], lw=2, color="b", label="Day to Day") ax[0].legend(loc="best", fontsize=10, ncol=2) ax[0].set_ylabel(r"Temperature Std. Deviation $^\circ$F") ax[0].grid(True) msg = ("[%s] %s Daily %s Standard Deviations") % ( station, nt.sts[station]["name"], PDICT.get(varname), ) tokens = msg.split() sz = int(len(tokens) / 2) ax[0].set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:])) ax[1].plot(df.index.values, df["doy_stddev"] / df["d2d_stddev"], lw=2, color="g") ax[1].set_ylabel("Ratio SingleDay/Day2Day") ax[1].grid(True) ax[1].set_xticks( (1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax[1].set_xticklabels(calendar.month_abbr[1:]) ax[1].set_xlim(0, 366) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) ctx["qc"] = loadqc(date=ctx["date"]) ctx["pgconn"] = get_dbconn("isuag") ctx["nt"] = NetworkTable("ISUSM") if not ctx["nt"].sts: raise NoDataFound("No station metadata found.") # Adjust stations to make some room ctx["nt"].sts["BOOI4"]["lon"] -= 0.15 ctx["nt"].sts["BOOI4"]["lat"] -= 0.15 ctx["nt"].sts["AHTI4"]["lon"] += 0.25 ctx["nt"].sts["AHTI4"]["lat"] += 0.25 title = "TBD" subtitle = "TBD" if ctx["opt"] == "1": title = "ISU Soil Moisture Max/Min 4 Inch Soil Temperature" subtitle = "based on available hourly observations" data, df = plot1(ctx) elif ctx["opt"] == "2": title = "ISU Soil Moisture Max/Min Air Temperature" subtitle = "based on available daily summary data" data, df = plot2(ctx) elif ctx["opt"] == "3": title = "ISU Soil Moisture Average 4 Inch Soil Temperature" subtitle = "based on available daily summary data" data, df = plot3(ctx) elif ctx["opt"] == "4": title = "ISU Soil Moisture Solar Radiation [MJ]" subtitle = "based on available daily summary data" data, df = plot4(ctx) elif ctx["opt"] == "5": title = "ISU Soil Moisture Potential Evapotranspiration [inch]" subtitle = "based on available daily summary data" data, df = plot5(ctx, "dailyet") elif ctx["opt"] == "6": title = "ISU Soil Moisture Precipitation [inch]" subtitle = ( "based on available daily summary data, liquid equiv of snow " "estimated") data, df = plot5(ctx, "rain_mm_tot") elif ctx["opt"] == "7": title = "ISU Soil Moisture Peak Wind Gust [MPH]" subtitle = "based on available daily summary data" data, df = plot7(ctx) elif ctx["opt"] == "8": title = "ISU Soil Moisture Average Wind Speed [MPH]" subtitle = "based on available daily summary data" data, df = plot8(ctx) tle = ctx["date"].strftime("%b %-d, %Y") mp = MapPlot( sector="iowa", continentalcolor="white", nocaption=True, title="%s %s" % (tle, title), subtitle=subtitle, ) mp.drawcounties("#EEEEEE") mp.plot_station(data, fontsize=12) return mp.fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] t1 = ctx["t1"] t2 = ctx["t2"] t3 = ctx["t3"] t4 = ctx["t4"] t5 = ctx["t5"] v = ctx["var"] cursor.execute( """ SELECT extract(week from valid) as w, sum(case when """ + v + """::int < %s then 1 else 0 end), sum(case when """ + v + """::int < %s then 1 else 0 end), sum(case when """ + v + """::int < %s then 1 else 0 end), sum(case when """ + v + """::int < %s then 1 else 0 end), sum(case when """ + v + """::int < %s then 1 else 0 end), count(*) from alldata where station = %s and """ + v + """ is not null and extract(minute from valid - '1 minute'::interval) > 49 and report_type = 2 GROUP by w ORDER by w ASC """, (t1, t2, t3, t4, t5, station), ) weeks = [] d1 = [] d2 = [] d3 = [] d4 = [] d5 = [] d6 = [] for row in cursor: weeks.append(row[0] - 1) d1.append(float(row[1]) / float(row[6]) * 100.0) d2.append(float(row[2]) / float(row[6]) * 100.0) d3.append(float(row[3]) / float(row[6]) * 100.0) d4.append(float(row[4]) / float(row[6]) * 100.0) d5.append(float(row[5]) / float(row[6]) * 100.0) d6.append(100.0) df = pd.DataFrame( dict( week=pd.Series(weeks), d1=pd.Series(d1), d2=pd.Series(d2), d3=pd.Series(d3), d4=pd.Series(d4), d5=pd.Series(d5), )) sts = datetime.datetime(2012, 1, 1) xticks = [] for i in range(1, 13): ts = sts.replace(month=i) xticks.append(float(ts.strftime("%j")) / 7.0) (fig, ax) = plt.subplots(1, 1) ax.bar(weeks, d6, width=1, fc="red", ec="None", label="%s & Above" % (t5, )) ax.bar(weeks, d5, width=1, fc="tan", ec="None", label="%s-%s" % (t4, t5 - 1)) ax.bar( weeks, d4, width=1, fc="yellow", ec="None", label="%s-%s" % (t3, t4 - 1), ) ax.bar(weeks, d3, width=1, fc="green", ec="None", label="%s-%s" % (t2, t3 - 1)) ax.bar(weeks, d2, width=1, fc="blue", ec="None", label="%s-%s" % (t1, t2 - 1)) ax.bar(weeks, d1, width=1, fc="purple", ec="None", label="Below %s" % (t1)) ax.grid(True, zorder=11) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") ax.set_title(("%s [%s]\n" r"Hourly %s ($^\circ$F) Frequencies (%s-%s)") % ( ctx["_nt"].sts[station]["name"], station, PDICT[v], ab.year, datetime.datetime.now().year, )) ax.set_ylabel("Frequency [%]") ax.set_xticks(xticks) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(0, 53) ax.set_ylim(0, 100) ax.set_yticks([0, 10, 25, 50, 75, 90, 100]) # Shrink current axis's height by 10% on the bottom box = ax.get_position() ax.set_position( [box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8]) ax.legend( loc="upper center", bbox_to_anchor=(0.5, -0.1), fancybox=True, shadow=True, ncol=3, scatterpoints=1, fontsize=12, ) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] thresholds = [ctx['t1'], ctx['t2'], ctx['t3'], ctx['t4']] table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) # Load up dict of dates.. df = pd.DataFrame( { 'dates': pd.date_range("2000/01/29", "2000/06/30"), '%scnts' % (thresholds[0], ): 0, '%scnts' % (thresholds[1], ): 0, '%scnts' % (thresholds[2], ): 0, '%scnts' % (thresholds[3], ): 0 }, index=range(29, 183)) df.index.name = 'doy' for base in thresholds: # Query Last doy for each year in archive df2 = read_sql(""" select year, max(case when low <= %s then extract(doy from day) else 0 end) as doy from """ + table + """ WHERE month < 7 and station = %s and year < %s GROUP by year """, pgconn, params=(base, station, datetime.date.today().year), index_col=None) for _, row in df2.iterrows(): if row['doy'] == 0: continue df.loc[0:row['doy'], '%scnts' % (base, )] += 1 df['%sfreq' % (base, )] = df['%scnts' % (base, )] / len(df2.index) * 100. res = """\ # IEM Climodat https://mesonet.agron.iastate.edu/climodat/ # Report Generated: %s # Climate Record: %s -> %s # Site Information: [%s] %s # Contact Information: Daryl Herzmann [email protected] 515.294.5978 # Low Temperature exceedence probabilities # (On a certain date, what is the chance a temperature below a certain # threshold would be observed again that spring season) DOY Date <%s <%s <%s <%s """ % (datetime.date.today().strftime("%d %b %Y"), nt.sts[station]['archive_begin'].date(), datetime.date.today(), station, nt.sts[station]['name'], thresholds[0] + 1, thresholds[1] + 1, thresholds[2] + 1, thresholds[3] + 1) fcols = ['%sfreq' % (s, ) for s in thresholds] mindate = None for doy, row in df.iterrows(): if doy % 2 != 0: continue if row[fcols[3]] < 100 and mindate is None: mindate = row['dates'] - datetime.timedelta(days=5) res += (" %3s %s %3i %3i %3i %3i\n") % ( row['dates'].strftime("%-j"), row['dates'].strftime("%b %d"), row[fcols[0]], row[fcols[1]], row[fcols[2]], row[fcols[3]]) (fig, ax) = plt.subplots(1, 1) for base in thresholds: ax.plot(df['dates'].values, df['%sfreq' % (base, )], label="%s" % (base, ), lw=2) ax.legend(loc='best') ax.set_xlim(mindate) ax.xaxis.set_major_locator(mdates.DayLocator([1, 7, 14, 21])) ax.xaxis.set_major_formatter(mdates.DateFormatter("%-d\n%b")) ax.set_title( ("Frequency of Last Spring Temperature\n" "%s %s (%s-%s)") % (station, nt.sts[station]['name'], nt.sts[station]['archive_begin'].year, datetime.date.today().year)) ax.grid(True) df.reset_index(inplace=True) return fig, df, res
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt ctx = util.get_autoplot_context(fdict, get_description()) date = ctx['date'] sector = ctx['sector'] days = ctx['trailing'] threshold = ctx['threshold'] window_sts = date - datetime.timedelta(days=days) if window_sts.year != date.year: raise ValueError('Sorry, do not support multi-year plots yet!') idx0 = iemre.daily_offset(window_sts) idx1 = iemre.daily_offset(date) ncfn = "/mesonet/data/iemre/%s_mw_mrms_daily.nc" % (date.year, ) ncvar = 'p01d' if not os.path.isfile(ncfn): raise ValueError("No data for that year, sorry.") nc = netCDF4.Dataset(ncfn, 'r') today = distance(nc.variables[ncvar][idx1, :, :], 'MM').value('IN') if (idx1 - idx0) < 32: p01d = distance(np.sum(nc.variables[ncvar][idx0:idx1, :, :], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i+10, idx1]) if idx0 == i: p01d = distance(np.sum(nc.variables[ncvar][i:i2, :, :], 0), 'MM').value('IN') else: p01d += distance(np.sum(nc.variables[ncvar][i:i2, :, :], 0), 'MM').value('IN') nc.close() # Get climatology nc = netCDF4.Dataset("/mesonet/data/iemre/mw_mrms_dailyc.nc") if (idx1 - idx0) < 32: c_p01d = distance(np.sum(nc.variables[ncvar][idx0:idx1, :, :], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i+10, idx1]) if idx0 == i: c_p01d = distance(np.sum(nc.variables[ncvar][i:i2, :, :], 0), 'MM').value('IN') else: c_p01d += distance(np.sum(nc.variables[ncvar][i:i2, :, :], 0), 'MM').value('IN') nc.close() # Get the state weight nc = netCDF4.Dataset('/mesonet/data/iemre/state_weights_mrms.nc') weights = nc.variables[sector][:] nc.close() # we actually don't care about weights at this fine of scale cells = np.sum(np.where(weights > 0, 1, 0)) departure = p01d - c_p01d # Update departure and today to values unconsidered below when out of state departure = np.where(weights > 0, departure, -9999) today = np.where(weights > 0, today, 0) ranges = [[-99, -3], [-3, -2], [-2, -1], [-1, 0], [0, 1], [1, 2], [2, 3], [3, 99]] x = [] x2 = [] labels = [] for (minv, maxv) in ranges: labels.append("%.0f to %.0f" % (minv, maxv)) # How many departure cells in this range hits = np.logical_and(departure < maxv, departure > minv) hits2 = np.logical_and(hits, today > threshold) x.append(np.sum(np.where(hits, 1, 0)) / float(cells) * 100.) x2.append(np.sum(np.where(hits2, 1, 0)) / float(cells) * 100.) (fig, ax) = plt.subplots(1, 1) ax.set_title(("%s NOAA MRMS %s %.2f inch Precip Coverage" ) % (PDICT[sector], date.strftime("%-d %b %Y"), threshold)) ax.bar(np.arange(8) - 0.2, x, align='center', width=0.4, label='Trailing %s Day Departure' % (days,)) ax.bar(np.arange(8) + 0.2, x2, align='center', width=0.4, label='%s Coverage (%.1f%% Tot)' % (date.strftime("%-d %b %Y"), sum(x2))) for i, (_x1, _x2) in enumerate(zip(x, x2)): ax.text(i - 0.2, _x1 + 1, "%.1f" % (_x1, ), ha='center') ax.text(i + 0.2, _x2 + 1, "%.1f" % (_x2, ), ha='center') ax.set_xticks(np.arange(8)) ax.set_xticklabels(labels) ax.set_xlabel("Trailing %s Day Precip Departure [in]" % (days,)) ax.set_position([0.1, 0.2, 0.8, 0.7]) ax.legend(loc=(0., -0.2), ncol=2) ax.set_ylabel("Areal Coverage of %s [%%]" % (PDICT[sector], )) ax.grid(True) ax.set_xlim(-0.5, 7.5) ax.set_ylim(0, max([max(x2), max(x)]) + 5) return fig
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.dates as mdates ASOS = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = ASOS.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] threshold = ctx['threshold'] mydir = ctx['dir'] hours = ctx['hours'] varname = ctx['var'] month = ctx['m'] if fdict.get('month') is None else fdict.get('month') nt = NetworkTable(network) if month == 'all': months = range(1, 13) sts = datetime.datetime(2000, 1, 1) ets = datetime.datetime(2000, 12, 31) elif month == 'fall': months = [9, 10, 11] sts = datetime.datetime(2000, 9, 1) ets = datetime.datetime(2000, 11, 30) elif month == 'spring': months = [3, 4, 5] sts = datetime.datetime(2000, 3, 1) ets = datetime.datetime(2000, 5, 31) elif month == 'summer': months = [6, 7, 8] sts = datetime.datetime(2000, 6, 1) ets = datetime.datetime(2000, 8, 31) else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] sts = datetime.datetime(2000, ts.month, 1) ets = sts + datetime.timedelta(days=35) ets = ets.replace(day=1) cursor.execute(""" SELECT valid, round(""" + varname + """::numeric,0) from alldata where station = %s and """ + varname + """ is not null and extract(month from valid) in %s ORDER by valid ASC """, (station, tuple(months))) (fig, ax) = plt.subplots(1, 1) interval = datetime.timedelta(hours=hours) valid = [] tmpf = [] year = 0 lines = [] for row in cursor: if year != row[0].year: year = row[0].year lines = plot(ax, interval, valid, tmpf, year, lines) valid = [] tmpf = [] if ((mydir == 'above' and row[1] >= threshold) or (mydir == 'below' and row[1] < threshold)): valid.append(row[0].replace(year=2000)) tmpf.append(row[1]) if ((mydir == 'above' and row[1] < threshold) or (mydir == 'below' and row[1] >= threshold)): valid.append(row[0].replace(year=2000)) tmpf.append(row[1]) lines = plot(ax, interval, valid, tmpf, year, lines) valid = [] tmpf = [] lines = plot(ax, interval, valid, tmpf, year, lines) rows = [] x0 = [] x1 = [] for line in lines: xdata = line.get_xdata() x0.append(xdata[0]) x1.append(xdata[-1]) rows.append(dict(start=xdata[0].replace(year=line.year), end=xdata[-1].replace(year=line.year), hours=line.hours, days=line.days)) df = pd.DataFrame(rows) if len(lines) > 0: sts = min(x0) ets = max(x1) ax.set_xlim(sts, ets) if (ets - sts).days > 10: ax.xaxis.set_major_locator( mdates.DayLocator(interval=((ets - sts).days / 10), tz=pytz.timezone(nt.sts[station]['tzname']))) ax.xaxis.set_major_formatter(mdates.DateFormatter('%-d\n%b')) ax.grid(True) ax.set_ylabel("%s $^\circ$F" % (PDICT2.get(varname),)) ax.set_xlabel("Timezone %s" % (nt.sts[station]['tzname'],)) ax.set_title(("%s-%s [%s] %s\n%s :: %.1f+ Day Streaks %s %s$^\circ$F" ) % (nt.sts[station]['archive_begin'].year, datetime.datetime.now().year, station, nt.sts[station]['name'], MDICT.get(month), hours / 24.0, mydir, threshold)) # ax.axhline(32, linestyle='-.', linewidth=2, color='k') # ax.set_ylim(bottom=43) box = ax.get_position() ax.set_position([box.x0, box.y0 + box.height * 0.15, box.width, box.height * 0.85]) ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.1), fancybox=True, shadow=True, ncol=5, fontsize=12, columnspacing=1) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use("agg") import matplotlib.pyplot as plt pgconn = psycopg2.connect(database="postgis", host="iemdb", user="******") cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) ugc = ctx["ugc"] phenomena = ctx["phenomena"] significance = ctx["significance"] (fig, ax) = plt.subplots(1, 1) cursor.execute( """ SELECT s.wfo, s.tzname, u.name from ugcs u JOIN stations s on (u.wfo = s.id) where ugc = %s and end_ts is null and s.network = 'WFO' """, (ugc,), ) wfo = None tzname = None name = "" if cursor.rowcount == 1: row = cursor.fetchone() tzname = row[1] wfo = row[0] name = row[2] cursor.execute( """ SELECT count(*), min(issue at time zone %s), max(issue at time zone %s) from warnings WHERE ugc = %s and phenomena = %s and significance = %s and wfo = %s """, (tzname, tzname, ugc, phenomena, significance, wfo), ) row = cursor.fetchone() cnt = row[0] sts = row[1] ets = row[2] if sts is None: ax.text(0.5, 0.5, "No Results Found, try flipping zone/county", transform=ax.transAxes, ha="center") return fig cursor.execute( """ WITH coverage as ( SELECT extract(year from issue) as yr, eventid, generate_series(issue at time zone %s, expire at time zone %s, '1 minute'::interval) as s from warnings where ugc = %s and phenomena = %s and significance = %s and wfo = %s), minutes as (SELECT distinct yr, eventid, (extract(hour from s)::numeric * 60. + extract(minute from s)::numeric) as m from coverage) SELECT minutes.m, count(*) from minutes GROUP by m """, (tzname, tzname, ugc, phenomena, significance, wfo), ) data = np.zeros((1440,), "f") for row in cursor: data[int(row[0])] = row[1] df = pd.DataFrame(dict(minute=pd.Series(np.arange(1440)), events=pd.Series(data))) ax.bar(np.arange(1440), data / float(cnt) * 100.0, ec="b", fc="b") ax.set_ylim(0, 100) ax.set_yticks([0, 10, 25, 50, 75, 90, 100]) ax.grid() ax.set_xticks(range(0, 1440, 60)) ax.set_xticklabels( [ "Mid", "", "", "3 AM", "", "", "6 AM", "", "", "9 AM", "", "", "Noon", "", "", "3 PM", "", "", "6 PM", "", "", "9 PM", "", "", "Mid", ] ) ax.set_xlabel("Timezone: %s (Daylight or Standard)" % (tzname,)) ax.set_ylabel("Frequency [%%] out of %s Events" % (cnt,)) ax.set_title( ("[%s] %s :: %s %s (%s.%s)\n%s Events - %s to %s") % ( ugc, name, vtec._phenDict[phenomena], vtec._sigDict[significance], phenomena, significance, cnt, sts.strftime("%Y-%m-%d %I:%M %p"), ets.strftime("%Y-%m-%d %I:%M %p"), ) ) ax.set_xlim(0, 1441) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(dbname='coop', host='iemdb', user='******') ctx = util.get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] nt = NetworkTable(network) table = "alldata_%s" % (station[:2],) df = read_sql(""" with data as ( select day, high, year, rank() OVER (PARTITION by high ORDER by sday DESC) from """ + table + """ where station = %s) SELECT day, year, high, rank from data WHERE rank = 1 ORDER by high DESC, day DESC """, pgconn, params=(station, ), index_col=None) if len(df.index) == 0: return "No data found!" (fig, ax) = plt.subplots(1, 1, figsize=(6, 8)) current = {'d2000': datetime.date(2000, 1, 1), 'date': datetime.date(2000, 1, 1), 'ties': False} x = [] y = [] for level in np.arange(df['high'].max(), 0, -1): if level not in df['high']: continue df2 = df[df['high'] == level] row = df2.iloc[0] if row['day'].replace(year=2000) > current['d2000']: current['d2000'] = row['day'].replace(year=2000) current['date'] = row['day'] current['ties'] = (len(df2.index) > 1) if current['date'].month == 12 and current['date'].day > 20: break y.append(level) x.append(int(current['d2000'].strftime("%j"))) ax.text(x[-1] + 3, level, "%s -- %s %s%s" % (level, current['d2000'].strftime("%-d %b"), current['date'].year, " **" if current['ties'] else ""), va='center') ax.barh(y, x, align='center') ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(min(x) - 5, 400) ax.set_ylim(y[-1] - 1, y[0] + 1) ax.grid(True) ax.set_title(("Most Recent & Latest Date of High Temperature\n" "[%s] %s (%s-%s)" ) % (station, nt.sts[station]['name'], nt.sts[station]['archive_begin'].year, datetime.date.today().year)) ax.set_ylabel("High Temperature $^\circ$F") ax.set_xlabel("** denotes ties") return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] binsize = ctx['binsize'] month = ctx['month'] year = ctx.get('year') table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] ddf = read_sql(""" SELECT high, low, year, month from """ + table + """ WHERE station = %s and year > 1892 and high >= low and month in %s """, pgconn, params=(station, tuple(months)), index_col=None) if ddf.empty: raise NoDataFound("No Data Found.") bins = np.arange(-40, 121, binsize) hist, xedges, yedges = np.histogram2d(ddf['low'], ddf['high'], bins) rows = [] for i, xedge in enumerate(xedges[:-1]): for j, yedge in enumerate(yedges[:-1]): rows.append(dict(high=yedge, low=xedge, count=hist[i, j])) df = pd.DataFrame(rows) ab = nt.sts[station]['archive_begin'] if ab is None: raise NoDataFound("Unknown station metadata.") years = float(datetime.datetime.now().year - ab.year) hist = np.ma.array(hist / years) hist.mask = np.where(hist < (1. / years), True, False) ar = np.argwhere(hist.max() == hist) (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) res = ax.pcolormesh(xedges, yedges, hist.T) fig.colorbar(res, label="Days per Year") ax.grid(True) ax.set_title(("%s [%s]\n" "Daily High vs Low Temp Histogram (month=%s)") % (nt.sts[station]['name'], station, month.upper())) ax.set_ylabel(r"High Temperature $^{\circ}\mathrm{F}$") ax.set_xlabel(r"Low Temperature $^{\circ}\mathrm{F}$") xmax = ar[0][0] ymax = ar[0][1] ax.text(0.65, 0.15, ("Largest Frequency: %.1f days\n" "High: %.0f-%.0f Low: %.0f-%.0f") % (hist[xmax, ymax], yedges[ymax], yedges[ymax + 1], xedges[xmax], xedges[xmax + 1]), ha='center', va='center', transform=ax.transAxes, bbox=dict(color='white')) ax.axhline(32, linestyle='-', lw=1, color='k') ax.text(120, 32, r"32$^\circ$F", va='center', ha='right', color='white', bbox=dict(color='k'), fontsize=8) ax.axvline(32, linestyle='-', lw=1, color='k') ax.text(32, 117, r"32$^\circ$F", va='top', ha='center', color='white', bbox=dict(facecolor='k', edgecolor='none'), fontsize=8) if year: label = str(year) if month == 'winter': ddf['year'] = ddf[((ddf['month'] == 1) | (ddf['month'] == 2))]['year'] - 1 label = "Dec %s - Feb %s" % (year, year + 1) ddf2 = ddf[ddf['year'] == year] ax.scatter(ddf2['low'], ddf2['high'], marker='x', label=label, edgecolor='white', facecolor='red') ax.legend() return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('iem') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['var'] network = ctx['network'] sdate = ctx['sdate'] edate = ctx['edate'] nt = NetworkTable(network) # Get Climatology cdf = read_sql("""SELECT to_char(valid, 'mmdd') as sday, high, low, (high + low) / 2. as avg, precip from ncdc_climate81 WHERE station = %s """, get_dbconn('coop'), params=(nt.sts[station]['ncdc81'], ), index_col='sday') cursor.execute( """ SELECT day, max_tmpf, min_tmpf, max_dwpf, min_dwpf, (max_tmpf + min_tmpf) / 2. as avg_tmpf, pday, coalesce(avg_sknt, 0) as avg_sknt from summary s JOIN stations t on (t.iemid = s.iemid) WHERE s.day >= %s and s.day <= %s and t.id = %s and t.network = %s ORDER by day ASC """, (sdate, edate, station, network)) rows = [] data = {} for row in cursor: hd = row['max_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'high'] ld = row['min_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'low'] ad = row['avg_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'avg'] rows.append( dict(day=row['day'], max_tmpf=row['max_tmpf'], avg_smph=speed(row['avg_sknt'], 'KT').value('MPH'), min_dwpf=row['min_dwpf'], max_dwpf=row['max_dwpf'], high_departure=hd, low_departure=ld, avg_departure=ad, min_tmpf=row['min_tmpf'], pday=row['pday'])) data[row[0]] = {'val': safe(rows[-1], varname)} if varname == 'high_departure': data[row[0]]['color'] = 'b' if hd < 0 else 'r' elif varname == 'low_departure': data[row[0]]['color'] = 'b' if ld < 0 else 'r' elif varname == 'avg_departure': data[row[0]]['color'] = 'b' if ad < 0 else 'r' df = pd.DataFrame(rows) title = '[%s] %s Daily %s' % (station, nt.sts[station]['name'], PDICT.get(varname)) subtitle = '%s thru %s' % (sdate.strftime("%-d %b %Y"), edate.strftime("%-d %b %Y")) fig = calendar_plot(sdate, edate, data, title=title, subtitle=subtitle) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] nt = NetworkTable(network) p1 = ctx['p1'] p2 = ctx['p2'] p3 = ctx['p3'] pvar = ctx['pvar'] sts = ctx['sdate'] ets = ctx['edate'] how = ctx['how'] maxdays = max([p1, p2, p3]) pgconn = get_dbconn('coop') table = "alldata_%s" % (station[:2], ) df = read_sql(""" -- Get all period averages with avgs as ( SELECT day, sday, count(high) OVER (ORDER by day ASC ROWS %s PRECEDING) as counts, avg(high) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_high, avg(high) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_high, avg(high) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_high, avg(low) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_low, avg(low) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_low, avg(low) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_low, avg((high+low)/2.) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_avgt, avg((high+low)/2.) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_avgt, avg((high+low)/2.) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_avgt, sum(precip) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_precip, sum(precip) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_precip, sum(precip) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_precip from """ + table + """ WHERE station = %s ), -- Get sday composites sdays as ( SELECT sday, avg(p1_high) as p1_high_avg, stddev(p1_high) as p1_high_stddev, avg(p2_high) as p2_high_avg, stddev(p2_high) as p2_high_stddev, avg(p3_high) as p3_high_avg, stddev(p3_high) as p3_high_stddev, avg(p1_low) as p1_low_avg, stddev(p1_low) as p1_low_stddev, avg(p2_low) as p2_low_avg, stddev(p2_low) as p2_low_stddev, avg(p3_low) as p3_low_avg, stddev(p3_low) as p3_low_stddev, avg(p1_avgt) as p1_avgt_avg, stddev(p1_avgt) as p1_avgt_stddev, avg(p2_avgt) as p2_avgt_avg, stddev(p2_avgt) as p2_avgt_stddev, avg(p3_avgt) as p3_avgt_avg, stddev(p3_avgt) as p3_avgt_stddev, avg(p1_precip) as p1_precip_avg, stddev(p1_precip) as p1_precip_stddev, avg(p2_precip) as p2_precip_avg, stddev(p2_precip) as p2_precip_stddev, avg(p3_precip) as p3_precip_avg, stddev(p3_precip) as p3_precip_stddev from avgs WHERE counts = %s GROUP by sday ) -- Now merge to get obs SELECT day, s.sday, p1_high - p1_high_avg as p1_high_diff, p2_high - p2_high_avg as p2_high_diff, p3_high - p3_high_avg as p3_high_diff, p1_low - p1_low_avg as p1_low_diff, p2_low - p2_low_avg as p2_low_diff, p3_low - p3_low_avg as p3_low_diff, p1_avgt - p1_avgt_avg as p1_avgt_diff, p2_avgt - p2_avgt_avg as p2_avgt_diff, p3_avgt - p3_avgt_avg as p3_avgt_diff, p1_precip - p1_precip_avg as p1_precip_diff, p2_precip - p2_precip_avg as p2_precip_diff, p3_precip - p3_precip_avg as p3_precip_diff, (p1_high - p1_high_avg) / p1_high_stddev as p1_high_sigma, (p2_high - p2_high_avg) / p2_high_stddev as p2_high_sigma, (p3_high - p3_high_avg) / p3_high_stddev as p3_high_sigma, (p1_low - p1_low_avg) / p1_low_stddev as p1_low_sigma, (p2_low - p2_low_avg) / p2_low_stddev as p2_low_sigma, (p3_low - p3_low_avg) / p3_low_stddev as p3_low_sigma, (p1_avgt - p1_avgt_avg) / p1_avgt_stddev as p1_avgt_sigma, (p2_avgt - p2_avgt_avg) / p2_avgt_stddev as p2_avgt_sigma, (p3_avgt - p3_avgt_avg) / p3_avgt_stddev as p3_avgt_sigma, (p1_precip - p1_precip_avg) / p1_precip_stddev as p1_precip_sigma, (p2_precip - p2_precip_avg) / p2_precip_stddev as p2_precip_sigma, (p3_precip - p3_precip_avg) / p3_precip_stddev as p3_precip_sigma from avgs a JOIN sdays s on (a.sday = s.sday) WHERE day >= %s and day <= %s ORDER by day ASC """, pgconn, params=(maxdays - 1, p1 - 1, p2 - 1, p3 - 1, p1 - 1, p2 - 1, p3 - 1, p1 - 1, p2 - 1, p3 - 1, p1 - 1, p2 - 1, p3 - 1, station, maxdays, sts, ets), index_col='day') (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) ax.set_position([0.1, 0.14, 0.85, 0.71]) l1, = ax.plot(df.index.values, df['p1_'+pvar+'_'+how], lw=2, label='%s Day' % (p1, ), zorder=5) l2, = ax.plot(df.index.values, df['p2_'+pvar+'_'+how], lw=2, label='%s Day' % (p2, ), zorder=5) l3, = ax.plot(df.index.values, df['p3_'+pvar+'_'+how], lw=2, label='%s Day' % (p3, ), zorder=5) fig.text(0.5, 0.93, ("[%s] %s\n" "Trailing %s, %s, %s Day Departures & " "US Drought Monitor" ) % (station, nt.sts[station]['name'], p1, p2, p3), ha='center', fontsize=14) ax.xaxis.set_major_formatter(mdates.DateFormatter('%b\n%Y')) ax.set_ylabel(("%s [%s] %s" ) % (PDICT.get(pvar), UNITS[pvar] if how == 'diff' else r"$\sigma$", PDICT2[how])) ax.grid(True) legend = plt.legend(handles=[l1, l2, l3], ncol=3, fontsize=12, loc='best') ax.add_artist(legend) ax.text(1, -0.14, "%s to %s" % (sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y")), va='bottom', ha='right', fontsize=12, transform=ax.transAxes) if station[2:] != "0000": try: underlay_usdm(ax, sts, ets, nt.sts[station]['lon'], nt.sts[station]['lat']) except Exception as exp: sys.stderr.write(str(exp)) ax.set_xlim(df.index.min().toordinal() - 2, df.index.max().toordinal() + 2) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] month = ctx['month'] year = ctx['year'] table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) df = read_sql(""" SELECT year, max(high) as max_high, min(low) as min_low from """ + table + """ where station = %s and month = %s and high is not null and low is not null GROUP by year ORDER by year ASC """, pgconn, params=(station, month), index_col='year') df['rng'] = df['max_high'] - df['min_low'] (fig, ax) = plt.subplots(2, 1, sharex=True) bars = ax[0].bar(df.index.values, df['rng'], bottom=df['min_low'], fc='b', ec='b') idx = list(df.index.values.astype('i')).index(year) bars[idx].set_facecolor('r') bars[idx].set_edgecolor('r') ax[0].axhline(df['max_high'].mean(), lw=2, color='k', zorder=2) ax[0].text(df.index.values[-1] + 2, df['max_high'].mean(), "%.0f" % (df['max_high'].mean(), ), ha='left', va='center') ax[0].axhline(df['min_low'].mean(), lw=2, color='k', zorder=2) ax[0].text(df.index.values[-1] + 2, df['min_low'].mean(), "%.0f" % (df['min_low'].mean(), ), ha='left', va='center') ax[0].grid(True) ax[0].set_ylabel("Temperature $^\circ$F") ax[0].set_xlim(df.index.min() - 1.5, df.index.max() + 1.5) ax[0].set_title( ("%s %s\n%s Temperature Range (Max High - Min Low)") % (station, nt.sts[station]['name'], calendar.month_name[month])) bars = ax[1].bar(df.index.values, df['rng'], fc='b', ec='b', zorder=1) bars[idx].set_facecolor('r') bars[idx].set_edgecolor('r') ax[1].set_title( ("Year %s [Hi: %s Lo: %s Rng: %s] Highlighted") % (year, df.at[year, 'max_high'], df.at[year, 'min_low'], df.at[year, 'rng']), color='r') ax[1].axhline(df['rng'].mean(), lw=2, color='k', zorder=2) ax[1].text(df.index.max() + 2, df['rng'].mean(), "%.0f" % (df['rng'].mean(), ), ha='left', va='center') ax[1].set_ylabel("Temperature Range $^\circ$F") ax[1].grid(True) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.colors as mpcolors ASOS = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] threshold = ctx['threshold'] direction = ctx['direction'] varname = ctx['var'] nt = NetworkTable(network) mydir = "<" if direction == 'below' else '>=' df = read_sql(""" WITH data as ( SELECT extract(week from valid) as week, extract(hour from (valid + '10 minutes'::interval) at time zone %s) as hour, """ + varname + """ as d from alldata where station = %s and """ + varname + """ between -70 and 140 ) SELECT week::int, hour::int, sum(case when d """ + mydir + """ %s then 1 else 0 end), count(*) from data GROUP by week, hour """, ASOS, params=(nt.sts[station]['tzname'], station, threshold), index_col=None) data = np.zeros((24, 53), 'f') df['freq[%]'] = df['sum'] / df['count'] * 100. for _, row in df.iterrows(): data[row['hour'], row['week'] - 1] = row['freq[%]'] sts = datetime.datetime(2012, 1, 1) xticks = [] for i in range(1, 13): ts = sts.replace(month=i) xticks.append(float(ts.strftime("%j")) / 7.0) (fig, ax) = plt.subplots(1, 1) cmap = plt.get_cmap('jet') cmap.set_under('white') bins = np.arange(0, 101, 5) bins[0] = 1 norm = mpcolors.BoundaryNorm(bins, cmap.N) res = ax.imshow(data, interpolation='nearest', aspect='auto', extent=[0, 53, 24, 0], cmap=cmap, norm=norm) fig.colorbar(res, label='%', extend='min') ax.grid(True, zorder=11) ax.set_title( "%s [%s]\nHourly %s %s %s$^\circ$F (%s-%s)" % (nt.sts[station]['name'], station, PDICT2[varname], PDICT[direction], threshold, nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=12) ax.set_xticks(xticks) ax.set_ylabel("%s Timezone" % (nt.sts[station]['tzname'], )) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(0, 53) ax.set_ylim(0, 24) ax.set_yticks([0, 4, 8, 12, 16, 20, 24]) ax.set_yticklabels( ['12 AM', '4 AM', '8 AM', 'Noon', '4 PM', '8 PM', 'Mid']) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = util.get_dbconn('coop') ctx = util.get_autoplot_context(fdict, get_description()) station = ctx['station'] threshold = ctx['threshold'] ptype = ctx['type'] syear = ctx['syear'] eyear = ctx['eyear'] table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) df = read_sql(""" SELECT year, max(high) as "max-high", min(high) as "min-high", avg(high) as "avg-high", max(low) as "max-low", min(low) as "min-low", avg(low) as "avg-low", max(precip) as "max-precip", sum(precip) as "sum-precip", sum(case when high >= %s then 1 else 0 end) as "days-high-above", sum(case when low >= %s then 1 else 0 end) as "days-lows-above", sum(case when low < %s then 1 else 0 end) as "days-lows-below", avg(precip) as "avg-precip", avg(case when precip >= 0.01 then precip else null end) as "avg-precip2", sum(case when precip >= %s then 1 else 0 end) as "days-precip" from """ + table + """ where station = %s and year >= %s and year <= %s GROUP by year ORDER by year ASC """, pgconn, params=(threshold, threshold, threshold, threshold, station, syear, eyear), index_col='year') (fig, ax) = plt.subplots(1, 1) avgv = df[ptype].mean() data = df[ptype].values years = df.index.values # Compute 30 year trailing average tavg = [None] * 30 for i in range(30, len(data)): tavg.append(np.average(data[i - 30:i])) a1981_2010 = df.loc[1981:2011, ptype].mean() colorabove = 'tomato' colorbelow = 'dodgerblue' precision = "%.1f" if ptype in [ 'max-precip', 'sum-precip', 'avg-precip', 'avg-precip2', 'days-precip' ]: colorabove = 'dodgerblue' colorbelow = 'tomato' precision = "%.2f" bars = ax.bar(np.array(years) - 0.4, data, fc=colorabove, ec=colorabove) for i, bar in enumerate(bars): if data[i] < avgv: bar.set_facecolor(colorbelow) bar.set_edgecolor(colorbelow) lbl = "Avg: " + precision % (avgv, ) ax.axhline(avgv, lw=2, color='k', zorder=2, label=lbl) lbl = "1981-2010: " + precision % (a1981_2010, ) ax.axhline(a1981_2010, lw=2, color='brown', zorder=2, label=lbl) ax.plot(years, tavg, lw=1.5, color='g', zorder=4, label='Trailing 30yr') ax.plot(years, tavg, lw=3, color='yellow', zorder=3) ax.set_xlim(years[0] - 1, years[-1] + 1) if ptype.find('precip') == -1 and ptype.find('days') == -1: ax.set_ylim(min(data) - 5, max(data) + 5) ax.set_xlabel("Year") units = "$^\circ$F" if ptype.find('days') > 0: units = "days" elif ptype.find('precip') > 0: units = "inches" ax.set_ylabel("%s [%s]" % (PDICT[ptype], units)) ax.grid(True) ax.legend(ncol=3, loc='best', fontsize=10) msg = ("[%s] %s %s-%s %s") % (station, nt.sts[station]['name'], min(years), max(years), PDICT[ptype]) if ptype.find("days") == 0: msg += " (%s)" % (threshold, ) tokens = msg.split() sz = len(tokens) / 2 ax.set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:])) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot, nwsprecip ctx = util.get_autoplot_context(fdict, get_description()) ptype = ctx['ptype'] sdate = ctx['sdate'] edate = ctx['edate'] if sdate.year != edate.year: return 'Sorry, do not support multi-year plots yet!' days = (edate - sdate).days sector = ctx['sector'] idx0 = iemre.daily_offset(sdate) idx1 = iemre.daily_offset(edate) + 1 ncfn = "/mesonet/data/iemre/%s_mw_mrms_daily.nc" % (sdate.year, ) if not os.path.isfile(ncfn): return "No MRMS data for that year, sorry." nc = netCDF4.Dataset(ncfn, 'r') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] if (idx1 - idx0) < 32: p01d = distance(np.sum(nc.variables['p01d'][idx0:idx1, :, :], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i+10, idx1]) if idx0 == i: p01d = distance(np.sum(nc.variables['p01d'][i:i2, :, :], 0), 'MM').value('IN') else: p01d += distance(np.sum(nc.variables['p01d'][i:i2, :, :], 0), 'MM').value('IN') nc.close() if sdate == edate: title = sdate.strftime("%-d %B %Y") else: title = "%s to %s (inclusive)" % (sdate.strftime("%-d %b %Y"), edate.strftime("%-d %b %Y")) if sector == 'midwest': state = None else: state = sector sector = 'state' m = MapPlot(sector=sector, state=state, axisbg='white', nocaption=True, title='NOAA MRMS Q3:: %s Total Precip' % (title,), subtitle='Data from NOAA MRMS Project, GaugeCorr and RadarOnly' ) if np.ma.is_masked(np.max(p01d)): return 'Data Unavailable' clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] if days > 6: clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20] if days > 29: clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35] if days > 90: clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40] x, y = np.meshgrid(lons, lats) cmap = nwsprecip() cmap.set_over('k') if ptype == 'c': m.contourf(x, y, p01d, clevs, cmap=cmap, label='inches') else: m.pcolormesh(x, y, p01d, clevs, cmap=cmap, label='inches') if sector != 'midwest': m.drawcounties() m.drawcities() return m.fig
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] which = ctx["which"] data = np.zeros((24, 52), "f") sql = "in ('BKN','OVC')" if which == "cloudy" else "= 'CLR'" df = read_sql( """ WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v, tmpf, skyc1, skyc2, skyc3, skyc4 from alldata WHERE station = %s and valid > '1973-01-01' and tmpf is not null and tmpf > -99 and tmpf < 150), climo as ( select extract(week from v) as w, extract(hour from v) as hr, avg(tmpf) from data GROUP by w, hr), cloudy as ( select extract(week from v) as w, extract(hour from v) as hr, avg(tmpf) from data WHERE skyc1 """ + sql + """ or skyc2 """ + sql + """ or skyc3 """ + sql + """ or skyc4 """ + sql + """ GROUP by w, hr) SELECT l.w as week, l.hr as hour, l.avg - c.avg as difference from cloudy l JOIN climo c on (l.w = c.w and l.hr = c.hr) """, pgconn, params=(ctx["_nt"].sts[station]["tzname"], station), ) for _, row in df.iterrows(): if row[0] > 52: continue data[int(row["hour"]), int(row["week"]) - 1] = row["difference"] (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) maxv = np.ceil(max([np.max(data), 0 - np.min(data)])) + 0.2 cs = ax.imshow( data, aspect="auto", interpolation="nearest", vmin=(0 - maxv), vmax=maxv, cmap=plt.get_cmap(ctx["cmap"]), ) a = fig.colorbar(cs) a.ax.set_ylabel(r"Temperature Departure $^{\circ}\mathrm{F}$") ax.grid(True) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") ax.set_title( ("[%s] %s %s-%s\nHourly Temp Departure " "(skies were %s vs all)") % ( station, ctx["_nt"].sts[station]["name"], max([ab.year, 1973]), datetime.date.today().year, PDICT[ctx["which"]], ) ) ax.set_ylim(-0.5, 23.5) ax.set_ylabel( "Local Hour of Day, %s" % (ctx["_nt"].sts[station]["tzname"],) ) ax.set_yticks((0, 4, 8, 12, 16, 20)) ax.set_xticks(range(0, 55, 7)) ax.set_xticklabels( ( "Jan 1", "Feb 19", "Apr 8", "May 27", "Jul 15", "Sep 2", "Oct 21", "Dec 9", ) ) ax.set_yticklabels(("Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM")) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadatab.") syear = max([ctx["syear"], ab.year]) eyear = ctx["eyear"] sts = datetime.date(syear, 11, 1) ets = datetime.date(eyear + 1, 6, 1) table = "alldata_%s" % (station[:2], ) eyear = datetime.datetime.now().year obs = np.ma.ones((eyear - syear + 1, 183), "f") * -1 df = read_sql( """ SELECT year, extract(doy from day) as doy, snowd, day, case when month < 6 then year - 1 else year end as winter_year from """ + table + """ WHERE station = %s and month in (11, 12, 1, 2, 3, 4) and snowd >= 0 and day between %s and %s """, pgconn, params=(station, sts, ets), index_col="day", ) if df.empty: raise NoDataFound("No Data Found.") minyear = df["year"].min() maxyear = df["year"].max() for _, row in df.iterrows(): doy = row["doy"] if row["doy"] < 180 else (row["doy"] - 365) obs[int(row["winter_year"]) - syear, int(doy) + 61] = row["snowd"] obs.mask = np.where(obs < 0, True, False) # obs[obs == 0] = -1 fig = plt.figure(figsize=(8, 8)) ax = fig.add_axes([0.1, 0.1, 0.93, 0.8]) ax.set_xticks((0, 29, 60, 91, 120, 151, 181)) ax.set_xticklabels( ["Nov 1", "Dec 1", "Jan 1", "Feb 1", "Mar 1", "Apr 1", "May 1"]) ax.set_ylabel("Year of Nov,Dec of Season Labeled") ax.set_xlabel("Date of Winter Season") ax.set_title(("[%s] %s\nDaily Snow Depth (%s-%s) [inches]") % (station, ctx["_nt"].sts[station]["name"], minyear, eyear)) cmap = nwssnow() norm = mpcolors.BoundaryNorm(LEVELS, cmap.N) cmap.set_bad("#EEEEEE") cmap.set_under("white") res = ax.imshow( obs, aspect="auto", rasterized=True, norm=norm, interpolation="nearest", cmap=cmap, extent=[0, 182, eyear + 1 - 0.5, syear - 0.5], ) fig.colorbar(res, spacing="proportional", ticks=LEVELS, extend="max") ax.grid(True) ax.set_ylim(maxyear + 0.5, minyear - 0.5) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("iem") cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] varname = ctx["var"] sdate = ctx["sdate"] edate = ctx["edate"] # Get Climatology cdf = read_sql( "SELECT to_char(valid, 'mmdd') as sday, " "round(high::numeric, 0) as high, " "round(low::numeric, 0) as low, " "round(((high + low) / 2.)::numeric, 0) as avg, " "precip from ncdc_climate81 WHERE station = %s ORDER by sday ASC", get_dbconn("coop"), params=(ctx["_nt"].sts[station]["ncdc81"], ), index_col="sday", ) if cdf.empty: raise NoDataFound("No Data Found.") cursor.execute( """ SELECT day, max_tmpf, min_tmpf, max_dwpf, min_dwpf, (max_tmpf + min_tmpf) / 2. as avg_tmpf, pday, avg_sknt from summary s JOIN stations t on (t.iemid = s.iemid) WHERE s.day >= %s and s.day <= %s and t.id = %s and t.network = %s ORDER by day ASC """, (sdate, edate, station, ctx["network"]), ) rows = [] data = {} for row in cursor: hd = diff(row["max_tmpf"], cdf.at[row[0].strftime("%m%d"), "high"]) ld = diff(row["min_tmpf"], cdf.at[row[0].strftime("%m%d"), "low"]) ad = diff(row["avg_tmpf"], cdf.at[row[0].strftime("%m%d"), "avg"]) avg_sknt = row["avg_sknt"] if avg_sknt is None: if varname == "avg_smph": continue avg_sknt = 0 rows.append( dict( day=row["day"], max_tmpf=row["max_tmpf"], avg_smph=(avg_sknt * units("knot")).to(units("mile / hour")).m, min_dwpf=row["min_dwpf"], max_dwpf=row["max_dwpf"], high_departure=hd, low_departure=ld, avg_departure=ad, min_tmpf=row["min_tmpf"], pday=row["pday"], )) data[row[0]] = {"val": safe(rows[-1], varname)} if data[row[0]]["val"] == "0": data[row[0]]["color"] = "k" elif varname == "high_departure": data[row[0]]["color"] = "b" if hd < 0 else "r" elif varname == "low_departure": data[row[0]]["color"] = "b" if ld < 0 else "r" elif varname == "avg_departure": data[row[0]]["color"] = "b" if ad < 0 else "r" df = pd.DataFrame(rows) title = "[%s] %s Daily %s" % ( station, ctx["_nt"].sts[station]["name"], PDICT.get(varname), ) subtitle = "%s thru %s" % ( sdate.strftime("%-d %b %Y"), edate.strftime("%-d %b %Y"), ) fig = calendar_plot(sdate, edate, data, title=title, subtitle=subtitle) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] varname = ctx["var"] table = "alldata_%s" % (station[:2], ) today = datetime.date.today().replace(day=1) df = read_sql( """ SELECT year, month, case when month in (10, 11, 12) then year + 1 else year end as water_year, sum(precip) as precip, avg(high) as avg_high, avg(low) as avg_low, avg((high+low)/2.) as avg_temp from """ + table + """ WHERE station = %s and day < %s GROUP by year, water_year, month ORDER by year ASC, month ASC """, pgconn, params=(station, today), index_col=None, ) if df.empty: raise NoDataFound("No Data Found.") res = ("# IEM Climodat https://mesonet.agron.iastate.edu/climodat/\n" "# Report Generated: %s\n" "# Climate Record: %s -> %s, " "WYEAR column is Water Year Oct 1 - Sep 30\n" "# Site Information: [%s] %s\n" "# Contact Information: " "Daryl Herzmann [email protected] 515.294.5978\n") % ( datetime.date.today().strftime("%d %b %Y"), ctx["_nt"].sts[station]["archive_begin"].date(), datetime.date.today(), station, ctx["_nt"].sts[station]["name"], ) res += ("# %s\n" "YEAR JAN FEB MAR APR MAY JUN JUL AUG SEP " "OCT NOV DEC ANN WYEAR\n") % (LABELS[varname], ) years = df["year"].unique() years.sort() grouped = df.set_index(["year", "month"]) yrsum = df.groupby("year")[varname].sum() wyrsum = df.groupby("water_year")[varname].sum() yrmean = df.groupby("year")[varname].mean() wyrmean = df.groupby("water_year")[varname].mean() prec = 2 if varname == "precip" else 0 for year in years: yrtot = yrsum[year] wyrtot = wyrsum.get(year, 0) if varname != "precip": yrtot = yrmean[year] wyrtot = wyrmean.get(year, 0) res += ("%s%6s%6s%6s%6s%6s%6s%6s%6s%6s%6s%6s%6s%6s%6s\n") % ( year, p(grouped, year, 1, varname, prec), p(grouped, year, 2, varname, prec), p(grouped, year, 3, varname, prec), p(grouped, year, 4, varname, prec), p(grouped, year, 5, varname, prec), p(grouped, year, 6, varname, prec), p(grouped, year, 7, varname, prec), p(grouped, year, 8, varname, prec), p(grouped, year, 9, varname, prec), p(grouped, year, 10, varname, prec), p(grouped, year, 11, varname, prec), p(grouped, year, 12, varname, prec), myformat(yrtot, 2), myformat(wyrtot, 2), ) yrtot = yrmean.mean() if varname != "precip" else yrsum.mean() wyrtot = wyrmean.mean() if varname != "precip" else wyrsum.mean() res += ("MEAN%6.2f%6.2f%6.2f%6.2f%6.2f%6.2f%6.2f%6.2f%6.2f%6.2f" "%6.2f%6.2f%6.2f%6.2f\n") % ( df[df["month"] == 1][varname].mean(), df[df["month"] == 2][varname].mean(), df[df["month"] == 3][varname].mean(), df[df["month"] == 4][varname].mean(), df[df["month"] == 5][varname].mean(), df[df["month"] == 6][varname].mean(), df[df["month"] == 7][varname].mean(), df[df["month"] == 8][varname].mean(), df[df["month"] == 9][varname].mean(), df[df["month"] == 10][varname].mean(), df[df["month"] == 11][varname].mean(), df[df["month"] == 12][varname].mean(), yrtot, wyrtot, ) # create a better resulting dataframe resdf = pd.DataFrame(index=years) resdf.index.name = "YEAR" for i, month_abbr in enumerate(calendar.month_abbr): if i == 0: continue resdf[month_abbr.upper()] = df[df["month"] == i].set_index( "year")[varname] resdf["ANN"] = yrmean if varname != "precip" else yrsum resdf["WATER YEAR"] = wyrmean if varname != "precip" else wyrsum return None, resdf, res
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] varname = ctx["var"] table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) rows = [] for dy in range(1, 32): ccursor.execute( """ with data as ( select day, avg(""" + varname + """) OVER (ORDER by day ASC rows between %s preceding and current row), min(""" + varname + """) OVER (ORDER by day ASC rows between %s preceding and current row), max(""" + varname + """) OVER (ORDER by day ASC rows between %s preceding and current row) from """ + table + """ where station = %s) SELECT max(avg), min(avg), max(min), min(min), max(max), min(max) from data """, (dy - 1, dy - 1, dy - 1, station), ) row = ccursor.fetchone() rows.append( dict( days=dy, highest_avg=row[0], lowest_avg=row[1], highest_min=row[2], lowest_min=row[3], highest_max=row[4], lowest_max=row[5], )) df = pd.DataFrame(rows) fig, ax = plt.subplots(1, 1, figsize=(8, 6)) if varname == "precip": ax.plot( np.arange(1, 32), df["highest_avg"], color="b", label="Highest Average", lw=2, ) ax.plot( np.arange(1, 32), df["highest_min"], color="g", label="Consec Days Over", lw=2, ) ax.plot( np.arange(1, 32), df["lowest_min"], color="r", label="Consec Days Under", lw=2, ) else: ax.plot(np.arange(1, 32), df["highest_avg"], label="Highest Average", lw=2) ax.plot(np.arange(1, 32), df["lowest_avg"], label="Lowest Average", lw=2) ax.plot(np.arange(1, 32), df["highest_min"], label="Highest Above", lw=2) ax.plot(np.arange(1, 32), df["lowest_max"], label="Lowest Below", lw=2) msg = ("[%s] %s Statistics of %s over 1-31 Consecutive Days") % ( station, nt.sts[station]["name"], PDICT.get(varname), ) tokens = msg.split() sz = int(len(tokens) / 2) ax.set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:])) ax.set_ylabel("%s (%s)" % (PDICT.get(varname), UNITS.get(varname))) ax.set_xlabel("Consecutive Days") ax.grid(True) ax.set_xlim(0.5, 31.5) # Shrink current axis's height by 10% on the bottom box = ax.get_position() ax.set_position( [box.x0, box.y0 + box.height * 0.15, box.width, box.height * 0.85]) ax.legend( loc="upper center", bbox_to_anchor=(0.5, -0.1), fancybox=True, shadow=True, ncol=3, scatterpoints=1, fontsize=12, ) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.cm as cm pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) state = ctx['state'][:2] unit_desc = ctx['unit_desc'].upper() commodity_desc = ctx['commodity_desc'].upper() util_practice_desc = ('GRAIN' if (unit_desc == 'PCT HARVESTED' and commodity_desc == 'CORN') else 'ALL UTILIZATION PRACTICES') df = read_sql(""" select year, week_ending, num_value from nass_quickstats where commodity_desc = %s and statisticcat_desc = 'PROGRESS' and unit_desc = %s and state_alpha = %s and util_practice_desc = %s and num_value is not null ORDER by week_ending ASC """, pgconn, params=(commodity_desc, unit_desc, state, util_practice_desc), index_col=None) if df.empty: raise ValueError("ERROR: No data found!") df['yeari'] = df['year'] - df['year'].min() (fig, ax) = plt.subplots(1, 1) year0 = int(df['year'].min()) lastyear = int(df['year'].max()) data = np.ma.ones((df['yeari'].max() + 1, 366), 'f') * -1 data.mask = np.where(data == -1, True, False) lastrow = None for _, row in df.iterrows(): if lastrow is None: lastrow = row continue date = row["week_ending"] ldate = lastrow["week_ending"] val = int(row['num_value']) lval = int(lastrow['num_value']) d0 = int(ldate.strftime("%j")) d1 = int(date.strftime("%j")) if ldate.year == date.year: delta = (val - lval) / float(d1 - d0) for i, jday in enumerate(range(d0, d1 + 1)): data[date.year - year0, jday] = lval + i * delta else: data[ldate.year - year0, d0:] = 100 lastrow = row dlast = np.max(data[-1, :]) for year in range(year0, lastyear): idx = np.digitize([ dlast, ], data[year - year0, :]) ax.text(idx[0], year, "X", va='center', zorder=2, color='white') cmap = cm.get_cmap('jet') res = ax.imshow(data, extent=[1, 367, lastyear + 0.5, year0 - 0.5], aspect='auto', interpolation='none', cmap=cmap) fig.colorbar(res) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) # We need to compute the domain of this plot maxv = np.max(data, 0) minv = np.min(data, 0) ax.set_xlim(np.argmax(maxv > 0) - 7, np.argmax(minv > 99) + 7) ax.set_ylim(lastyear + 0.5, year0 - 0.5) ax.grid(True) lastweek = df['week_ending'].max() ax.set_xlabel("X denotes %s value of %.0f%%" % (lastweek.strftime("%d %b %Y"), dlast)) ax.set_title(("USDA NASS %i-%i %s %s %s Progress\n" "Daily Linear Interpolated Values Between Weekly Reports") % (year0, lastyear, state, PDICT2.get(commodity_desc), PDICT.get(unit_desc))) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] df = read_sql(""" SELECT tmpf::int as t, SUM(case when (skyc1 = 'OVC' or skyc2 = 'OVC' or skyc3 = 'OVC' or skyc4 = 'OVC') then 1 else 0 end) as hits, count(*) from alldata where station = %s and tmpf is not null and extract(month from valid) in %s GROUP by t ORDER by t ASC """, pgconn, params=(station, tuple(months)), index_col=None) df['freq'] = df['hits'] / df['count'] * 100. df2 = df[df['count'] > 2] avg = df['hits'].sum() / float(df['count'].sum()) * 100. (fig, ax) = plt.subplots(1, 1) ax.bar(df2['t'], df2['freq'], ec='green', fc='green', width=1) ax.grid(True, zorder=11) ax.set_title( ("%s [%s]\nFrequency of Overcast Clouds by " "Air Temperature (month=%s) " "(%s-%s)\n" "(must have 3+ hourly observations at the given temperature)") % (nt.sts[station]['name'], station, month.upper(), nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=10) ax.set_ylabel("Cloudiness Frequency [%]") ax.set_ylim(0, 100) ax.set_xlabel(r"Air Temperature $^\circ$F") if df2['t'].min() < 30: ax.axvline(32, lw=2, color='k') ax.text(32, -4, "32", ha='center') ax.axhline(avg, lw=2, color='k') ax.text(df2['t'].min() + 5, avg + 2, "Avg: %.1f%%" % (avg, )) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) ugc = ctx['ugc'] phenomena = ctx['phenomena'] significance = ctx['significance'] (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) cursor.execute(""" SELECT s.wfo, s.tzname, u.name from ugcs u JOIN stations s on (u.wfo = s.id) where ugc = %s and end_ts is null and s.network = 'WFO' """, (ugc,)) wfo = None name = "" if cursor.rowcount == 1: row = cursor.fetchone() wfo = row[0] name = row[2] cursor.execute(""" SELECT expire - issue, init_expire - issue, issue at time zone 'UTC' from warnings WHERE ugc = %s and phenomena = %s and significance = %s and wfo = %s and expire > issue and init_expire > issue """, (ugc, phenomena, significance, wfo)) if cursor.rowcount < 2: raise ValueError("No Results Found, try flipping zone/county") rows = [] for row in cursor: rows.append(dict(final=row[0].total_seconds() / 60., initial=row[1].total_seconds() / 60., issue=row[2])) df = pd.DataFrame(rows) titles = {'initial': 'Initial Issuance', 'final': 'Final Duration'} for col in ['final', 'initial']: sortd = df.sort_values(by=col) x = [] y = [] i = 0 for _, row in sortd.iterrows(): i += 1 if i == 1: x.append(row[col]) y.append(i) continue if x[-1] == row[col]: y[-1] = i continue y.append(i) x.append(row[col]) ax.plot(x, np.array(y) / float(y[-1]) * 100., lw=2, label=titles[col]) ax.grid() ax.legend(loc=2, ncol=2, fontsize=12) ax.set_ylim(0, 100) ax.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) if x[-1] < 120: xmax = x[-1] + 10 - (x[-1] % 10) ax.set_xlim(0, xmax) ax.set_xticks(np.arange(0, xmax+1, 10)) ax.set_xlabel("Duration [minutes]") else: xmax = x[-1] + 60 - (x[-1] % 60) ax.set_xlim(0, xmax) ax.set_xticks(np.arange(0, xmax+1, 60)) ax.set_xticklabels(np.arange(0, (xmax+1)/60)) ax.set_xlabel("Duration [hours]") ax.set_ylabel("Frequency [%%] out of %s Events" % (y[-1],)) ax.set_title(("[%s] %s :: %s (%s.%s)\n" "Distribution of Event Time Duration %s-%s" ) % (ugc, name, vtec.get_ps_string(phenomena, significance), phenomena, significance, min(df['issue']).strftime("%-d %b %Y"), max(df['issue']).strftime("%-d %b %Y"))) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) sector = ctx["sector"] if len(sector) != 2: raise NoDataFound("Sorry, this app doesn't support multi-state plots.") varname = ctx["var"] year = ctx["year"] popt = ctx["popt"] threshold = ctx["threshold"] table = "alldata_%s" % (sector, ) nt = NetworkTable("%sCLIMATE" % (sector, )) syear = ctx.get("syear", 1893) eyear = ctx.get("eyear", datetime.date.today().year) df = read_sql( """ -- get the domain of data WITH events as ( SELECT station, month, case when month < 7 then year - 1 else year end as winter_year, year, extract(doy from day) as doy, day from """ + table + """ WHERE """ + SQLOPT[varname] + """ and month in %s and substr(station, 3, 4) != '0000' and substr(station, 3, 1) not in ('C', 'T') and year >= %s and year <= %s ), agg as ( SELECT station, winter_year, year, doy, day, case when month < 7 then doy + 366 else doy end as winter_doy, rank() OVER ( PARTITION by """ + YRGP[varname] + """, station ORDER by day """ + ORDER[varname] + """) from events) select * from agg where rank = 1 """, pgconn, params=(threshold, tuple(MONTH_DOMAIN[varname]), syear, eyear), index_col="station", ) doy = USEDOY[varname] def f(val): """Make a pretty date.""" base = datetime.date(2000, 1, 1) date = base + datetime.timedelta(days=int(val)) return date.strftime("%-m/%-d") if ctx.get("p") is None: df2 = df[df[YRGP[varname]] == year].copy() title = r"%s %s %s$^\circ$F" % (year, PDICT2[varname], threshold) df2["pdate"] = df2["day"].apply(lambda x: x.strftime("%-m/%-d")) extra = "" else: df2 = df[[doy]].groupby("station").quantile(ctx["p"] / 100.0).copy() title = r"%.0f%s Percentile Date of %s %s$^\circ$F" % ( ctx["p"], th(str(ctx["p"])), PDICT2[varname], threshold, ) df2["pdate"] = df2[doy].apply(f) extra = ", period of record: %.0f-%.0f" % ( df["year"].min(), df["year"].max(), ) if df2.empty: raise NoDataFound("No Data was found") for station in df2.index.values: if station not in nt.sts: continue df2.at[station, "lat"] = nt.sts[station]["lat"] df2.at[station, "lon"] = nt.sts[station]["lon"] mp = MapPlot( sector="state", state=ctx["sector"], continental_color="white", nocaption=True, title=title, subtitle="based on NWS COOP and IEM Daily Estimates%s" % (extra, ), ) levs = np.linspace(df2[doy].min() - 1, df2[doy].max() + 1, 7, dtype="i") if "cint" in ctx: levs = np.arange(df2[doy].min() - 1, df2[doy].max() + 1, ctx["cint"], dtype="i") levlables = list(map(f, levs)) if popt == "contour" and (levs[-1] - levs[0]) > 5: mp.contourf( df2["lon"], df2["lat"], df2[doy], levs, clevlabels=levlables, cmap=ctx["cmap"], ) mp.plot_values(df2["lon"], df2["lat"], df2["pdate"], labelbuffer=5) mp.drawcounties() return mp.fig, df[["year", "winter_doy", "doy"]]
def get_ctx(fdict): """Get the plotting context """ pgconn = get_dbconn('coop') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] date = ctx['date'] opt = ctx['opt'] table = "alldata_%s" % (station[:2],) nt = NetworkTable("%sCLIMATE" % (station[:2],)) cursor.execute(""" SELECT year, extract(doy from day) as doy, precip from """+table+""" where station = %s and precip is not null """, (station,)) baseyear = nt.sts[station]['archive_begin'].year - 1 ctx['years'] = (datetime.datetime.now().year - baseyear) + 1 data = np.zeros((ctx['years'], 367*2)) # 1892 1893 # 1893 1894 # 1894 1895 for row in cursor: # left hand data[int(row['year'] - baseyear), int(row['doy'])] = row['precip'] # right hand data[int(row['year'] - baseyear - 1), int(row['doy']) + 366] = row['precip'] _temp = date.replace(year=2000) _doy = int(_temp.strftime("%j")) xticks = [] xticklabels = [] for i in range(-366, 0): ts = _temp + datetime.timedelta(days=i) if ts.day == 1: xticks.append(i) xticklabels.append(ts.strftime("%b")) ranks = [] departures = [] percentages = [] totals = [] maxes = [] avgs = [] spi = [] ptile = [] myyear = date.year - baseyear - 1 for days in range(1, 366): idx0 = _doy + 366 - days idx1 = _doy + 366 sums = np.sum(data[:, idx0:idx1], 1) thisyear = sums[myyear] sums = np.sort(sums) arr = np.digitize([thisyear, ], sums) if thisyear == 0: rank = ctx['years'] else: rank = ctx['years'] - arr[0] + 1 ranks.append(rank) ptile.append(rank / float(len(sums)) * 100.) totals.append(thisyear) maxes.append(sums[-1]) avgs.append(np.nanmean(sums)) departures.append(thisyear - avgs[-1]) percentages.append(thisyear / avgs[-1] * 100) spi.append((thisyear - avgs[-1]) / np.nanstd(sums)) ctx['sdate'] = date - datetime.timedelta(days=360) ctx['title'] = "%s %s" % (station, nt.sts[station]['name']) ctx['subtitle'] = ("Trailing Days Precip %s [%s-%s] to %s" ) % (PDICT[opt], baseyear+2, datetime.datetime.now().year, date.strftime("%-d %b %Y")) ctx['ranks'] = ranks ctx['departures'] = departures ctx['percentages'] = percentages ctx['spi'] = spi ctx['percentiles'] = ptile if opt == 'per': ctx['y2'] = ctx['percentages'] elif opt == 'dep': ctx['y2'] = ctx['departures'] elif opt == 'spi': ctx['y2'] = ctx['spi'] elif opt == 'ptile': ctx['y2'] = ctx['percentiles'] else: ctx['y2'] = ctx['ranks'] ctx['totals'] = totals ctx['maxes'] = maxes ctx['avgs'] = avgs ctx['xticks'] = xticks ctx['xticklabels'] = xticklabels ctx['station'] = station ctx['y2label'] = PDICT[opt] return ctx
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] days = ctx["days"] gddbase = ctx["base"] gddceil = ctx["ceil"] sts = datetime.date(2012, ctx["month"], ctx["day"]) ets = sts + datetime.timedelta(days=(days - 1)) varname = ctx["varname"] year = ctx["year"] threshold = ctx["thres"] table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) sdays = [] for i in range(days): ts = sts + datetime.timedelta(days=i) sdays.append(ts.strftime("%m%d")) doff = (days + 1) if ets.year != sts.year else 0 df = read_sql( """ SELECT extract(year from day - '""" + str(doff) + """ days'::interval) as yr, avg((high+low)/2.) as avg_temp, avg(high) as avg_high_temp, sum(gddxx(%s, %s, high, low)) as gdd, avg(low) as avg_low_temp, sum(precip) as precip, sum(snow) as snow, min(low) as min_low, max(low) as max_low, max(high) as max_high, min(high) as min_high, sum(case when high >= %s then 1 else 0 end) as "days-high-above", sum(case when high < %s then 1 else 0 end) as "days-high-below", sum(case when low >= %s then 1 else 0 end) as "days-lows-above", sum(case when low < %s then 1 else 0 end) as "days-lows-below", count(*) from """ + table + """ WHERE station = %s and sday in %s GROUP by yr ORDER by yr ASC """, pgconn, params=( gddbase, gddceil, threshold, threshold, threshold, threshold, station, tuple(sdays), ), ) if df.empty: raise NoDataFound("No Data Found.") # require at least 90% coverage df = df[df["count"] >= (days * 0.9)] # require values , not nan df = df[df[varname].notnull()] (fig, ax) = plt.subplots(2, 1, figsize=(8, 6)) bars = ax[0].bar(df["yr"], df[varname], facecolor="r", edgecolor="r") thisvalue = "M" for mybar, x, y in zip(bars, df["yr"], df[varname]): if x == year: mybar.set_facecolor("g") mybar.set_edgecolor("g") thisvalue = y ax[0].set_xlabel("Year, %s = %s" % (year, nice(thisvalue))) ax[0].axhline(df[varname].mean(), lw=2, label="Avg: %.2f" % (df[varname].mean(), )) ylabel = r"Temperature $^\circ$F" if varname in ["precip"]: ylabel = "Precipitation [inch]" elif varname in ["snow"]: ylabel = "Snowfall [inch]" elif varname.find("days") > -1: ylabel = "Days" elif varname == "gdd": ylabel = r"Growing Degree Days (%s,%s) $^\circ$F" % (gddbase, gddceil) ax[0].set_ylabel(ylabel) title = PDICT.get(varname).replace("(threshold)", str(threshold)) ax[0].set_title( ("[%s] %s\n%s from %s through %s") % ( station, nt.sts[station]["name"], title, sts.strftime("%d %b"), ets.strftime("%d %b"), ), fontsize=12, ) ax[0].grid(True) ax[0].legend(ncol=2, fontsize=10) ax[0].set_xlim(df["yr"].min() - 1, df["yr"].max() + 1) rng = df[varname].max() - df[varname].min() if varname in ["snow", "precip"] or varname.startswith("days"): ax[0].set_ylim(-0.1, df[varname].max() + rng * 0.3) else: ax[0].set_ylim(df[varname].min() - rng * 0.3, df[varname].max() + rng * 0.3) box = ax[0].get_position() ax[0].set_position([box.x0, box.y0 + 0.02, box.width, box.height * 0.98]) # Plot 2: CDF df2 = df[df[varname].notnull()] X2 = np.sort(df2[varname]) ptile = np.percentile(df2[varname], [0, 5, 50, 95, 100]) N = len(df2[varname]) F2 = np.array(range(N)) / float(N) * 100.0 ax[1].plot(X2, 100.0 - F2) ax[1].set_xlabel(ylabel) ax[1].set_ylabel("Observed Frequency [%]") ax[1].grid(True) ax[1].set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) if thisvalue != "M": ax[1].axvline(thisvalue, color="g") mysort = df.sort_values(by=varname, ascending=True) info = ("Min: %.2f %.0f\n95th: %.2f\nMean: %.2f\nSTD: %.2f\n5th: %.2f\n" "Max: %.2f %.0f") % ( df2[varname].min(), df["yr"][mysort.index[0]], ptile[1], np.average(df2[varname]), np.std(df2[varname]), ptile[3], df2[varname].max(), df["yr"][mysort.index[-1]], ) ax[1].text( 0.8, 0.95, info, transform=ax[1].transAxes, va="top", bbox=dict(facecolor="white", edgecolor="k"), ) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) typ = ctx["typ"] sort = ctx["sort"] date = ctx["date"] pgconn = get_dbconn("postgis") sts = utc(date.year, date.month, date.day) ets = sts + datetime.timedelta(hours=24) opts = { "W": { "fnadd": "-wfo", "sortby": "wfo ASC, phenomena ASC, eventid ASC", }, "S": {"fnadd": "", "sortby": "size DESC"}, "T": {"fnadd": "-time", "sortby": "issue ASC"}, } phenoms = {"W": ["TO", "SV"], "F": ["FF"], "M": ["MA"]} # Defaults thumbpx = 100 cols = 10 mybuffer = 10000 header = 35 # Find largest polygon either in height or width gdf = read_postgis( """ SELECT wfo, phenomena, eventid, issue, ST_area2d(ST_transform(geom,2163)) as size, (ST_xmax(ST_transform(geom,2163)) + ST_xmin(ST_transform(geom,2163))) /2.0 as xc, (ST_ymax(ST_transform(geom,2163)) + ST_ymin(ST_transform(geom,2163))) /2.0 as yc, ST_transform(geom, 2163) as utmgeom, (ST_xmax(ST_transform(geom,2163)) - ST_xmin(ST_transform(geom,2163))) as width, (ST_ymax(ST_transform(geom,2163)) - ST_ymin(ST_transform(geom,2163))) as height from sbw_""" + str(sts.year) + """ WHERE status = 'NEW' and issue >= %s and issue < %s and phenomena IN %s and eventid is not null ORDER by """ + opts[sort]["sortby"] + """ """, pgconn, params=(sts, ets, tuple(phenoms[typ])), geom_col="utmgeom", index_col=None, ) # For size reduction work df = read_sql( """ SELECT w.wfo, phenomena, eventid, sum(ST_area2d(ST_transform(u.geom,2163))) as county_size from warnings_""" + str(sts.year) + """ w JOIN ugcs u on (u.gid = w.gid) WHERE issue >= %s and issue < %s and significance = 'W' and phenomena IN %s GROUP by w.wfo, phenomena, eventid """, pgconn, params=(sts, ets, tuple(phenoms[typ])), index_col=["wfo", "phenomena", "eventid"], ) # Join the columns gdf = gdf.merge(df, on=["wfo", "phenomena", "eventid"]) gdf["ratio"] = (1.0 - (gdf["size"] / gdf["county_size"])) * 100.0 # Make mosaic image events = len(df.index) rows = int(events / cols) + 1 if events % cols == 0: rows -= 1 if rows == 0: rows = 1 ypixels = (rows * thumbpx) + header fig = plt.figure(figsize=(thumbpx * cols / 100.0, ypixels / 100.0)) plt.axes([0, 0, 1, 1], facecolor="black") imagemap = StringIO() utcnow = utc() imagemap.write( "<!-- %s %s -->\n" % (utcnow.strftime("%Y-%m-%d %H:%M:%S"), sort) ) imagemap.write("<map name='mymap'>\n") # Write metadata to image mydir = os.sep.join( [os.path.dirname(os.path.abspath(__file__)), "../../../images"] ) logo = mpimage.imread("%s/logo_reallysmall.png" % (mydir,)) y0 = fig.get_figheight() * 100.0 - logo.shape[0] - 5 fig.figimage(logo, 5, y0, zorder=3) i = 0 # amount of NDC y space we have for axes plotting ytop = 1 - header / float((rows * 100) + header) dy = ytop / float(rows) ybottom = ytop # Sumarize totals y = ytop dy2 = (1.0 - ytop) / 2.0 for phenomena, df2 in gdf.groupby("phenomena"): car = (1.0 - df2["size"].sum() / df2["county_size"].sum()) * 100.0 fitbox( fig, ("%i %s.W: Avg size %5.0f km^2 CAR: %.0f%%") % (len(df2.index), phenomena, df2["size"].mean() / 1e6, car), 0.8, 0.99, y, y + dy2, color=COLORS[phenomena], ) y += dy2 fitbox( fig, "NWS %s Storm Based Warnings issued %s UTC" % ( " + ".join([VTEC_PHENOMENA[p] for p in phenoms[typ]]), sts.strftime("%d %b %Y"), ), 0.05, 0.79, ytop + dy2, 0.999, color="white", ) fitbox( fig, "Generated: %s UTC, IEM Autplot #203" % (utcnow.strftime("%d %b %Y %H:%M:%S"),), 0.05, 0.79, ytop, 0.999 - dy2, color="white", ) # We want to reserve 14pts at the bottom and buffer the plot by 10km # so we compute this in the y direction, since it limits us max_dimension = max([gdf["width"].max(), gdf["height"].max()]) yspacing = max_dimension / 2.0 + mybuffer xspacing = yspacing * 1.08 # approx for _, row in gdf.iterrows(): # - Map each polygon x0 = float(row["xc"]) - xspacing x1 = float(row["xc"]) + xspacing y0 = float(row["yc"]) - yspacing - (yspacing * 0.14) y1 = float(row["yc"]) + yspacing - (yspacing * 0.14) col = i % 10 if col == 0: ybottom -= dy ax = plt.axes( [col * 0.1, ybottom, 0.1, dy], facecolor="black", xticks=[], yticks=[], aspect="auto", ) for x in ax.spines: ax.spines[x].set_visible(False) ax.set_xlim(x0, x1) ax.set_ylim(y0, y1) for poly in row["utmgeom"]: xs, ys = poly.exterior.xy color = COLORS[row["phenomena"]] ax.plot(xs, ys, color=color, lw=2) car = "NA" carColor = "white" if not pd.isnull(row["ratio"]): carf = row["ratio"] car = "%.0f" % (carf,) if carf > 75: carColor = "green" if carf < 25: carColor = "red" # Draw Text! issue = row["issue"] s = "%s.%s.%s.%s" % ( row["wfo"], row["phenomena"], row["eventid"], issue.strftime("%H%M"), ) # (w, h) = font10.getsize(s) # print s, h ax.text( 0, 0, s, transform=ax.transAxes, color="white", va="bottom", fontsize=7, ) s = "%.0f sq km %s%%" % (row["size"] / 1000000.0, car) ax.text( 0, 0.1, s, transform=ax.transAxes, color=carColor, va="bottom", fontsize=7, ) # Image map url = ("/vtec/#%s-O-NEW-K%s-%s-%s-%04i") % ( sts.year, row["wfo"], row["phenomena"], "W", row["eventid"], ) altxt = "Click for text/image" pos = ax.get_position() mx0 = pos.x0 * 1000.0 my = (1.0 - pos.y1) * ypixels imagemap.write( ( '<area href="%s" alt="%s" title="%s" ' 'shape="rect" coords="%.0f,%.0f,%.0f,%.0f">\n' ) % (url, altxt, altxt, mx0, my, mx0 + thumbpx, my + thumbpx) ) i += 1 faux = plt.axes([0, 0, 1, 1], facecolor="None", zorder=100) for i in range(1, rows): faux.axhline(i * dy, lw=1.0, color="blue") imagemap.write("</map>") imagemap.seek(0) if gdf.empty: fitbox(fig, "No warnings Found!", 0.2, 0.8, 0.2, 0.5, color="white") df = gdf.drop(["utmgeom", "issue"], axis=1) return fig, df, imagemap.read()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.cm as cm pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] year = ctx['year'] month = ctx['month'] nt = NetworkTable(network) # Extract the range of forecasts for each day for approximately # the given month sts = datetime.datetime(year, month, 1, 0, 0) sts = sts.replace(tzinfo=pytz.timezone("UTC")) ets = (sts + datetime.timedelta(days=35)).replace(day=1) days = (ets-sts).days data = np.ones((250, days * 24)) * -1 df = read_sql(""" SELECT valid, skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, skyl4 from alldata where station = %s and valid BETWEEN %s and %s ORDER by valid ASC """, pgconn, params=(station, sts, ets), index_col=None) lookup = {'CLR': 0, 'FEW': 25, 'SCT': 50, 'BKN': 75, 'OVC': 100} if len(df.index) == 0: return "No database entries found for station, sorry!" for _, row in df.iterrows(): delta = (row['valid'] - sts).total_seconds() / 3600 - 1 data[:, delta] = 0 for i in range(1, 5): a = lookup.get(row['skyc%s' % (i,)], -1) if a >= 0: l = row['skyl%s' % (i,)] if l is not None and l > 0: l = l / 100 if l >= 250: continue data[l:l+4, delta] = a data[l+3:, delta] = min(a, 75) data = np.ma.array(data, mask=np.where(data < 0, True, False)) (fig, ax) = plt.subplots(1, 1) ax.set_axis_bgcolor('skyblue') ax.set_xticks(np.arange(0, days*24+1, 24)) ax.set_xticklabels(np.arange(1, days+1)) ax.set_title(('[%s] %s %s Clouds\nbased on ASOS METAR Cloud Amount ' 'and Level Reports' ) % (station, nt.sts[station]['name'], sts.strftime("%b %Y"))) cmap = cm.get_cmap('gray_r') cmap.set_bad('white') cmap.set_under('skyblue') ax.imshow(np.flipud(data), aspect='auto', extent=[0, days*24, 0, 250], cmap=cmap, vmin=1) ax.set_yticks(range(0, 260, 50)) ax.set_yticklabels(range(0, 25, 5)) ax.set_ylabel("Cloud Levels [1000s feet]") ax.set_xlabel("Day of %s (UTC Timezone)" % (sts.strftime("%b %Y"),)) from matplotlib.patches import Rectangle r = Rectangle((0, 0), 1, 1, fc='skyblue') r2 = Rectangle((0, 0), 1, 1, fc='white') r3 = Rectangle((0, 0), 1, 1, fc='k') r4 = Rectangle((0, 0), 1, 1, fc='#EEEEEE') ax.grid(True) # Shrink current axis's height by 10% on the bottom box = ax.get_position() ax.set_position([box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9]) ax.legend([r, r4, r2, r3], ['Clear', 'Some', 'Unknown', 'Obscured by Overcast'], loc='upper center', fontsize=14, bbox_to_anchor=(0.5, -0.09), fancybox=True, shadow=True, ncol=4) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.dates as mdates IEM = psycopg2.connect(database='iem', host='iemdb', user='******') cursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] nt = NetworkTable(network) year = ctx['year'] emphasis = ctx['emphasis'] opt = ctx['opt'] table = "summary_%s" % (year, ) cursor.execute( """ select day, (case when max_dwpf > -90 and max_dwpf < 120 then max_dwpf else null end) as "max-dwpf", (case when min_dwpf > -90 and min_dwpf < 120 then min_dwpf else null end) as "min-dwpf" from """ + table + """ where iemid = (select iemid from stations where id = %s and network = %s) ORDER by day ASC """, (station, network)) rows = [] for row in cursor: if row['max-dwpf'] is None or row['min-dwpf'] is None: continue rows.append( dict(day=row['day'], min_dwpf=row['min-dwpf'], max_dwpf=row['max-dwpf'])) if not rows: raise Exception("No Data Found!") df = pd.DataFrame(rows) days = np.array(df['day']) (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) bars = ax.bar(df['day'].values, (df['max_dwpf'] - df['min_dwpf']).values, ec='g', fc='g', bottom=df['min_dwpf'].values, zorder=1) ax.xaxis.set_major_formatter(mdates.DateFormatter('%-d\n%b')) hits = [] if emphasis > -99: for i, mybar in enumerate(bars): y = mybar.get_y() + mybar.get_height() if ((y >= emphasis and opt == 'touches') or (mybar.get_y() >= emphasis and opt == 'above')): mybar.set_facecolor('r') mybar.set_edgecolor('r') hits.append(df.loc[i, 'day']) ax.axhline(emphasis, lw=2, color='k') ax.text(days[-1] + datetime.timedelta(days=2), emphasis, "%s" % (emphasis, ), ha='left', va='center') ax.grid(True) ax.set_ylabel(r"Dew Point Temperature $^\circ$F") ax.set_title("%s [%s] %s Daily Min/Max Dew Point\nPeriod: %s to %s" % (nt.sts[station]['name'], station, year, min(days).strftime("%-d %b"), max(days).strftime("%-d %b"))) box = ax.get_position() ax.set_position( [box.x0, box.y0 + box.height * 0.05, box.width, box.height * 0.95]) ax.set_xlabel(("Days meeting emphasis: %s, first: %s last: %s") % (len(hits), hits[0].strftime("%B %d") if hits else 'None', hits[-1].strftime("%B %d") if hits else 'None')) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt from matplotlib.font_manager import FontProperties font0 = FontProperties() font0.set_family('monospace') font0.set_size(16) pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) varname = ctx['var'] month = ctx['month'] network = ctx['network'] station = ctx['zstation'] hour = ctx['hour'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] elif month == 'gs': months = [5, 6, 7, 8, 9] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month] df = read_sql(""" WITH obs as ( SELECT (valid + '10 minutes'::interval) at time zone %s as ts, tmpf::int as itmpf, dwpf::int as idwpf from alldata where station = %s and tmpf is not null and dwpf is not null and extract(month from valid at time zone %s) in %s), agg1 as ( SELECT date_trunc('hour', ts) as hts, avg(itmpf) as avg_itmpf, avg(idwpf) as avg_idwpf from obs WHERE extract(hour from ts) = %s GROUP by hts) SELECT extract(year from hts) as year, avg(avg_itmpf) as avg_tmpf, count(*) as cnt from agg1 GROUP by year ORDER by year ASC """, pgconn, params=(nt.sts[station]['tzname'], station, nt.sts[station]['tzname'], tuple(months), hour), index_col='year') minfreq = len(months) * 30 * 0.8 df2 = df[df['cnt'] > minfreq] (fig, ax) = plt.subplots(1, 1) ax.bar(df2.index.values, df2[varname], align='center', ec='b', fc='b') m = df2[varname].mean() ax.axhline(m, lw=2, zorder=5, color='k') slp, intercept, r, _, _ = stats.linregress(df2.index.values, df2[varname].values) ax.plot(df2.index.values, intercept + (df2.index.values * slp), color='r', lw=2, zorder=6) ax.text(0.02, 0.92, r"$\frac{^\circ}{decade} = %.2f,R^2=%.2f, avg = %.1f$" % ( slp * 10.0, r ** 2, m), va='bottom', transform=ax.transAxes, bbox=dict(color='white')) ax.set_ylim([df2[varname].min() - 5, df2[varname].max() + 5]) ax.set_xlim([df2.index.min() - 1, df2.index.max() + 1]) ax.grid(True) lts = datetime.datetime(2000, 1, 1, int(hour), 0) fig.text(0.5, 0.91, ("%s [%s] %s Local %s-%s\n" "%s [%s]" ) % (nt.sts[station]['name'], station, lts.strftime("%-I %p"), nt.sts[station]['archive_begin'].year, datetime.date.today().year, PDICT[varname], MDICT[month]), ha='center') return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] season = ctx["season"] table = "alldata_%s" % (station[:2],) nt = network.Table("%sCLIMATE" % (station[:2],)) year = ( "case when month > 6 then year + 1 else year end" if season == "fall" else "year" ) df = read_sql( f""" WITH obs as ( SELECT day, month, high, low, {year} as season from {table} WHERE station = %s), data as ( SELECT season, day, max(high) OVER (PARTITION by season ORDER by day ASC ROWS BETWEEN 366 PRECEDING and CURRENT ROW) as mh, min(low) OVER (PARTITION by season ORDER by day ASC ROWS BETWEEN 366 PRECEDING and CURRENT ROW) as ml from obs), lows as ( SELECT season, day, ml as level, rank() OVER (PARTITION by season, ml ORDER by day ASC) from data), highs as ( SELECT season, day, mh as level, rank() OVER (PARTITION by season, mh ORDER by day ASC) from data) (SELECT season as year, day, extract(doy from day) as doy, level, 'fall' as typ from lows WHERE rank = 1) UNION (SELECT season as year, day, extract(doy from day) as doy, level, 'spring' as typ from highs WHERE rank = 1) """, pgconn, params=[station], ) if df.empty: raise NoDataFound("No Data Found.") df2 = df[df["typ"] == season] (fig, ax) = plt.subplots(3, 1, figsize=(7, 10)) dyear = df2.groupby(["year"]).count() ax[0].bar(dyear.index, dyear["level"], facecolor="tan", edgecolor="tan") ax[0].axhline(dyear["level"].mean(), lw=2) ax[0].set_ylabel("Yearly Events Avg: %.1f" % (dyear["level"].mean(),)) ax[0].set_xlim(dyear.index.min() - 1, dyear.index.max() + 1) title = "%s Steps %s" % ( PDICT[season], "Down" if season == "fall" else "Up", ) ax[0].set_title( "%s [%s]\n%s in Temperature" % (nt.sts[station]["name"], station, title) ) ax[0].grid(True) ax[1].hist( np.array(df2["level"], "f"), bins=np.arange(df2["level"].min(), df2["level"].max() + 1, 2), density=True, facecolor="tan", ) ax[1].set_ylabel("Probability Density") ax[1].axvline(32, lw=2) ax[1].grid(True) ax[1].set_xlabel(r"Temperature $^\circ$F, 32 degrees highlighted") ax[2].hist( np.array(df2["doy"], "f"), bins=np.arange(df2["doy"].min(), df2["doy"].max() + 1, 3), density=True, facecolor="tan", ) ax[2].set_xticks( (1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365) ) ax[2].set_xticklabels(calendar.month_abbr[1:]) ax[2].set_xlim(df2["doy"].min() - 3, df2["doy"].max() + 3) ax[2].set_ylabel("Probability Density") ax[2].grid(True) ax[2].set_xlabel("Day of Year, 3 Day Bins") return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] varname = ctx['var'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] elif month == 'octmar': months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] (agg, dbvar) = varname.split("_") sorder = 'DESC' if agg == 'max' else 'ASC' df = read_sql("""WITH data as ( SELECT valid at time zone %s as v, p01i from alldata WHERE station = %s and extract(month from valid at time zone %s) in %s) SELECT v as valid, p01i from data ORDER by """ + dbvar + """ """ + sorder + """ NULLS LAST LIMIT 100 """, pgconn, params=(nt.sts[station]['tzname'], station, nt.sts[station]['tzname'], tuple(months)), index_col=None) if len(df.index) == 0: return 'Error, no results returned!' ylabels = [] fmt = '%.2f' if varname in ['max_p01i', ] else '%.0f' hours = [] y = [] lastval = -99 ranks = [] currentrank = 0 for _, row in df.iterrows(): key = row['valid'].strftime("%Y%m%d%H") if key in hours: continue hours.append(key) y.append(row[dbvar]) lbl = fmt % (row[dbvar], ) lbl += " -- %s" % (row['valid'].strftime("%b %d, %Y %-I:%M %p"),) ylabels.append(lbl) if row[dbvar] != lastval: currentrank += 1 ranks.append(currentrank) lastval = row[dbvar] if len(ylabels) == 10: break ax = plt.axes([0.1, 0.1, 0.5, 0.8]) ax.barh(range(10, 0, -1), y, ec='green', fc='green', height=0.8, align='center') ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, 11)) ax.set_yticks(range(1, 11)) ax.set_yticklabels(["#%s" % (x,) for x in ranks][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel(("Precipitation [inch]" if varname in ['max_p01i'] else 'Temperature $^\circ$F' )) ax.set_title(("%s [%s] Top 10 Events\n" "%s (%s) " "(%s-%s)" ) % (nt.sts[station]['name'], station, METRICS[varname], MDICT[month], nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=12) plt.gcf().text(0.98, 0.02, "Timezone: %s" % (nt.sts[station]['tzname'],), ha='right') return plt.gcf(), df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.patheffects as PathEffects pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['varname'] year = ctx['year'] network = "%sCLIMATE" % (station[:2], ) nt = NetworkTable(network) table = "alldata_%s" % (station[:2], ) df = read_sql(""" with obs as (select month, year, high, lag(high) OVER (ORDER by day ASC) as lhigh, low, lag(low) OVER (ORDER by day ASC) as llow from """ + table + """ where station = %s) SELECT year, month, sum(case when high > lhigh then 1 else 0 end)::numeric as high_greater, sum(case when high = lhigh then 1 else 0 end)::numeric as high_unch, sum(case when high < lhigh then 1 else 0 end)::numeric as high_lower, sum(case when low > llow then 1 else 0 end)::numeric as low_greater, sum(case when low = llow then 1 else 0 end)::numeric as low_unch, sum(case when low < llow then 1 else 0 end)::numeric as low_lower from obs GROUP by year, month ORDER by year, month """, pgconn, params=(station, ), index_col=None) gdf = df.groupby('month').sum() gyear = df[df['year'] == year].groupby('month').sum() increase = gdf[varname + '_greater'] nochange = gdf[varname + '_unch'] decrease = gdf[varname + '_lower'] increase2 = gyear[varname + '_greater'] nochange2 = gyear[varname + '_unch'] decrease2 = gyear[varname + '_lower'] (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) total = decrease + nochange + increase total2 = decrease2 + nochange2 + increase2 ax.bar(total.index.values - 0.2, decrease / total * 100.0, fc='b', label='Decrease', width=0.4, align='center') ax.bar(total2.index.values + 0.2, decrease2 / total2 * 100.0, fc='lightblue', width=0.4, label="%s ''" % (year, ), align='center') ax.bar(total.index.values - 0.2, nochange / total * 100.0, bottom=(decrease / total * 100.0), fc='g', label="No Change", width=0.4, align='center') ax.bar(total2.index.values + 0.2, nochange2 / total2 * 100.0, bottom=(decrease2 / total2 * 100.0), fc='lightgreen', width=0.4, label="%s ''" % (year, ), align='center') ax.bar(total.index.values - 0.2, increase / total * 100.0, bottom=(decrease + nochange) / total * 100.0, fc='r', width=0.4, label="Increase", align='center') ax.bar(total2.index.values + 0.2, increase2 / total2 * 100.0, bottom=(decrease2 + nochange2) / total2 * 100.0, fc='pink', width=0.4, label="%s ''" % (year, ), align='center') offset = -0.2 for _df in [gdf, gyear]: increase = _df[varname + '_greater'] nochange = _df[varname + '_unch'] decrease = _df[varname + '_lower'] total = decrease + nochange + increase for i, _ in _df.iterrows(): txt = ax.text(i + offset, decrease[i] / total[i] * 100.0 - 5, "%.0f" % (decrease[i] / total[i] * 100.0), ha='center', fontsize=10) txt.set_path_effects( [PathEffects.withStroke(linewidth=2, foreground="white")]) ymid = (decrease[i] + (nochange[i] / 2.)) / total[i] * 100. txt = ax.text(i + offset, ymid, "%.0f" % (nochange[i] / total[i] * 100.0), ha='center', va='center', fontsize=10) txt.set_path_effects( [PathEffects.withStroke(linewidth=2, foreground="white")]) txt = ax.text(i + offset, (decrease[i] + nochange[i]) / total[i] * 100.0 + 2, "%.0f" % (increase[i] / total[i] * 100.0), ha='center', fontsize=10) txt.set_path_effects( [PathEffects.withStroke(linewidth=2, foreground="white")]) offset += 0.4 ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xticks(np.arange(1, 13)) ax.legend(ncol=3, fontsize=12, loc=9, framealpha=1) ax.set_xlim(0.5, 12.5) ax.set_ylim(0, 100) ax.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) ax.set_ylabel("Percentage of Days [%]") ax.set_xlabel(("Dark Shades are long term averages, lighter are %s " "actuals") % (year, )) ax.set_title(("%s [%s]\nDay to Day %s Temperature Change") % (nt.sts[station]['name'], station, varname.title())) return fig, df