def plot_sky(days, vsby, data, station, nt, sts): """Sky plot variant.""" fig = plt.figure(figsize=(8, 6)) # vsby plot ax = plt.axes([0.1, 0.08, 0.8, 0.03]) ax.set_xticks(np.arange(0, days*24+1, 24)) ax.set_xticklabels(np.arange(1, days+1)) ax.set_yticks([]) cmap = cm.get_cmap('gray') cmap.set_bad('white') res = ax.imshow( vsby, aspect='auto', extent=[0, days*24, 0, 1], vmin=0, cmap=cmap, vmax=10) cax = plt.axes([0.915, 0.08, 0.035, 0.2]) fig.colorbar(res, cax=cax) fig.text(0.02, 0.09, "Visibility\n[miles]", va='center') # clouds ax = plt.axes([0.1, 0.16, 0.8, 0.7]) ax.set_facecolor('skyblue') ax.set_xticks(np.arange(0, days*24+1, 24)) ax.set_xticklabels(np.arange(1, days+1)) fig.text( 0.5, 0.935, ('[%s] %s %s Clouds & Visibility\nbased on ASOS METAR Cloud Amount ' '/Level and Visibility Reports' ) % (station, nt.sts[station]['name'], sts.strftime("%b %Y")), ha='center', fontsize=14) cmap = cm.get_cmap('gray_r') cmap.set_bad('white') cmap.set_under('skyblue') ax.imshow(np.flipud(data), aspect='auto', extent=[0, days*24, 0, 250], cmap=cmap, vmin=1) ax.set_yticks(range(0, 260, 50)) ax.set_yticklabels(range(0, 25, 5)) ax.set_ylabel("Cloud Levels [1000s feet]") fig.text(0.45, 0.02, "Day of %s (UTC Timezone)" % (sts.strftime("%b %Y"),)) r1 = Rectangle((0, 0), 1, 1, fc='skyblue') r2 = Rectangle((0, 0), 1, 1, fc='white') r3 = Rectangle((0, 0), 1, 1, fc='k') r4 = Rectangle((0, 0), 1, 1, fc='#EEEEEE') ax.grid(True) ax.legend( [r1, r4, r2, r3], ['Clear', 'Some', 'Unknown', 'Obscured by Overcast'], loc='lower center', fontsize=14, bbox_to_anchor=(0.5, 0.99), fancybox=True, shadow=True, ncol=4) return fig
def plot_vsby(days, vsby, station, ctx, sts): """Sky plot variant.""" fig = plt.figure(figsize=(8, 6)) # need to convert vsby to 2-d data = np.ones((100, days * 24)) * -3 for i in range(days * 24): val = vsby[0, i] if np.ma.is_masked(val): continue val = min([int(val * 10), 100]) data[val:, i] = val / 10.0 data[:val, i] = -1 data = np.ma.array(data, mask=np.where(data < -1, True, False)) # clouds ax = plt.axes([0.1, 0.1, 0.8, 0.8]) ax.set_facecolor("skyblue") ax.set_xticks(np.arange(0, days * 24 + 1, 24)) ax.set_xticklabels(np.arange(1, days + 1)) fig.text( 0.5, 0.935, ("[%s] %s %s Visibility\nbased on hourly ASOS METAR Visibility Reports" ) % (station, ctx["_nt"].sts[station]["name"], sts.strftime("%b %Y")), ha="center", fontsize=14, ) cmap = cm.get_cmap("gray") cmap.set_bad("white") cmap.set_under("skyblue") res = ax.imshow( np.flipud(data), aspect="auto", extent=[0, days * 24, 0, 100], cmap=cmap, vmin=0, vmax=10, ) cax = plt.axes([0.915, 0.08, 0.035, 0.2]) fig.colorbar(res, cax=cax) ax.set_yticks(range(0, 101, 10)) ax.set_yticklabels(range(0, 11, 1)) ax.set_ylabel("Visibility [miles]") fig.text(0.45, 0.02, "Day of %s (UTC Timezone)" % (sts.strftime("%b %Y"), )) ax.grid(True) return fig
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] table = "alldata_%s" % (station[:2], ) df = read_sql( """ WITH data as ( SELECT sday, day, year, rank() OVER (PARTITION by sday ORDER by high DESC) as max_high_rank, rank() OVER (PARTITION by sday ORDER by high ASC) as min_high_rank, rank() OVER (PARTITION by sday ORDER by low DESC) as max_low_rank, rank() OVER (PARTITION by sday ORDER by low ASC) as min_low_rank from """ + table + """ WHERE station = %s and high is not null and low is not null) SELECT *, extract(doy from ('2000-'||substr(sday, 1, 2)||'-'||substr(sday, 3, 2))::date) as doy from data WHERE max_high_rank = 1 or min_high_rank = 1 or max_low_rank = 1 or min_low_rank = 1 ORDER by day ASC """, pgconn, params=(station, ), index_col=None, ) if df.empty: raise NoDataFound("No Data Found.") fig = plt.figure(figsize=(12, 6)) fig.text( 0.5, 0.95, ("[%s] %s Year of Daily Records, ties included") % (station, ctx["_nt"].sts[station]["name"]), ha="center", fontsize=16, ) ax = plt.axes([0.04, 0.55, 0.35, 0.35]) magic(ax, df, "max_high_rank", "Maximum High (warm)", ctx) ax = plt.axes([0.04, 0.1, 0.35, 0.35]) magic(ax, df, "min_high_rank", "Minimum High (cold)", ctx) ax = plt.axes([0.54, 0.55, 0.35, 0.35]) magic(ax, df, "max_low_rank", "Maximum Low (warm)", ctx) ax = plt.axes([0.54, 0.1, 0.35, 0.35]) magic(ax, df, "min_low_rank", "Minimum Low (cold)", ctx) return plt.gcf(), df
def make_colorbar(clevs, norm, cmap): """ Manual Color Bar """ ax = plt.axes([0.02, 0.1, 0.05, 0.8], frameon=False, yticks=[], xticks=[]) under = clevs[0] - (clevs[1] - clevs[0]) over = clevs[-1] + (clevs[-1] - clevs[-2]) blevels = np.concatenate([[under], clevs, [over]]) cb2 = mpcolorbar.ColorbarBase( ax, cmap=cmap, norm=norm, boundaries=blevels, extend="both", ticks=None, spacing="uniform", orientation="vertical", ) for i, lev in enumerate(clevs): y = float(i) / (len(clevs) - 1) fmt = "%g" txt = cb2.ax.text(0.5, y, fmt % (lev,), va="center", ha="center") txt.set_path_effects( [PathEffects.withStroke(linewidth=2, foreground="w")] ) ax.yaxis.set_ticklabels([])
def magic(ax, df, colname, title, ctx): """You can do magic""" df2 = df[df[colname] == 1] ax.text(0, 1.02, title, transform=ax.transAxes) ax.set_xlim(0, 367) ax.grid(True) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) bbox = ax.get_position() sideax = plt.axes([bbox.x1 + 0.01, bbox.y0, 0.09, 0.35]) ylim = [df['year'].min(), df['year'].max()] year0 = ylim[0] - (ylim[0] % 10) year1 = ylim[1] + (10 - ylim[1] % 10) cmap = plt.get_cmap(ctx['cmap']) norm = mpcolors.BoundaryNorm(np.arange(year0, year1 + 1, 10), cmap.N) ax.scatter(df2['doy'], df2['year'], color=cmap(norm(df2['year'].values))) ax.set_yticks(np.arange(year0, year1, 20)) ax.set_ylim(*ylim) cnts, edges = np.histogram(df2['year'].values, np.arange(year0, year1 + 1, 10)) sideax.barh(edges[:-1], cnts, height=10, align='edge', color=cmap(norm(edges[:-1]))) sideax.set_yticks(np.arange(year0, year1, 20)) sideax.set_yticklabels([]) sideax.set_ylim(*ylim) sideax.grid(True) sideax.set_xlabel("Decade Count")
def main(argv): """Go Main Go.""" huc12 = argv[1] fpath = argv[2] year = int(argv[3]) prop_cycle = plt.rcParams["axes.prop_cycle"] colors = prop_cycle.by_key()["color"] data = {} for scenario in range(59, 70): df = read_crop("/i/%s/crop/%s/%s/%s_%s.crop" % (scenario, huc12[:8], huc12[8:], huc12, fpath)) data[scenario] = df[df["ofe"] == 1].set_index("date") ax1 = plt.axes([0.15, 0.5, 0.85, 0.35]) ax2 = plt.axes([0.15, 0.1, 0.85, 0.35]) baseline = data[59][data[59].index.year == year] for scenario in range(60, 70): color = colors[scenario - 60] date = datetime.date(2000, 4, 15) + datetime.timedelta(days=(scenario - 60) * 5) scendata = data[scenario][data[scenario]["year"] == year] delta = scendata["canopy_percent"] - baseline["canopy_percent"] x = delta.index.to_pydatetime() ax1.plot( x, scendata["canopy_percent"] * 100.0, label=date.strftime("%b %d"), color=color, ) ax2.plot(x, delta.values * 100.0, color=color) ax1.set_xlim(datetime.date(year, 4, 15), datetime.date(year, 7, 15)) ax2.set_xlim(datetime.date(year, 4, 15), datetime.date(year, 7, 15)) ax1.xaxis.set_major_locator(mdates.DayLocator([1])) ax1.xaxis.set_major_formatter(mdates.DateFormatter("%b")) ax2.xaxis.set_major_locator(mdates.DayLocator([1])) ax2.xaxis.set_major_formatter(mdates.DateFormatter("%b")) ax1.set_ylabel("Coverage [%]") ax2.set_ylabel("Absolute Differnece from Apr 10 [%]") ax2.set_ylim(-101, 0) ax1.set_title("huc12: %s fpath: %s\n%s Canopy Coverage by Planting Date" % (huc12, fpath, year)) ax1.grid() ax2.grid() ax1.legend(loc=2, ncol=2) plt.gcf().savefig("test.png")
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) df = read_sql(""" WITH data as ( SELECT sday, day, year, rank() OVER (PARTITION by sday ORDER by high DESC) as max_high_rank, rank() OVER (PARTITION by sday ORDER by high ASC) as min_high_rank, rank() OVER (PARTITION by sday ORDER by low DESC) as max_low_rank, rank() OVER (PARTITION by sday ORDER by low ASC) as min_low_rank from """ + table + """ WHERE station = %s and high is not null and low is not null) SELECT *, extract(doy from ('2000-'||substr(sday, 1, 2)||'-'||substr(sday, 3, 2))::date) as doy from data WHERE max_high_rank = 1 or min_high_rank = 1 or max_low_rank = 1 or min_low_rank = 1 ORDER by day ASC """, pgconn, params=(station, ), index_col=None) fig = plt.figure(figsize=(12, 6)) fig.text(0.5, 0.95, ("[%s] %s Year of Daily Records, ties included") % (station, nt.sts[station]['name']), ha='center', fontsize=16) ax = plt.axes([0.04, 0.55, 0.35, 0.35]) magic(ax, df, 'max_high_rank', 'Maximum High (warm)', ctx) ax = plt.axes([0.04, 0.1, 0.35, 0.35]) magic(ax, df, 'min_high_rank', 'Minimum High (cold)', ctx) ax = plt.axes([0.54, 0.55, 0.35, 0.35]) magic(ax, df, 'max_low_rank', 'Maximum Low (warm)', ctx) ax = plt.axes([0.54, 0.1, 0.35, 0.35]) magic(ax, df, 'min_low_rank', 'Minimum Low (cold)', ctx) return plt.gcf(), df
def main(argv): """Go Main Go.""" huc12 = argv[1] year = int(argv[2]) prop_cycle = plt.rcParams["axes.prop_cycle"] colors = prop_cycle.by_key()["color"] pgconn = get_dbconn("idep") df = read_sql( """ SELECT scenario, huc_12, avg_delivery, valid from results_by_huc12 WHERE scenario >= 59 and scenario < 70 and extract(year from valid) = %s and huc_12 = %s ORDER by valid ASC """, pgconn, params=(year, huc12), ) df["valid"] = pd.to_datetime(df["valid"]) ax = plt.axes([0.2, 0.1, 0.75, 0.75]) baseline = df[df["scenario"] == 59].copy().set_index("valid") yticklabels = [] col = "avg_delivery" for scenario in range(60, 70): color = colors[scenario - 60] date = datetime.date(2000, 4, 15) + datetime.timedelta(days=(scenario - 60) * 5) scendata = df[df["scenario"] == scenario].copy().set_index("valid") delta = scendata[col] - baseline[col] delta = delta[delta != 0] total = ((scendata[col].sum() - baseline[col].sum()) / baseline[col].sum()) * 100.0 yticklabels.append("%s %4.2f%%" % (date.strftime("%b %d"), total)) x = delta.index.to_pydatetime() # res = ax.scatter(x, delta.values + (scenario - 60)) for idx, val in enumerate(delta): ax.arrow( x[idx], scenario - 60, 0, val * 10.0, head_width=4, head_length=0.1, fc=color, ec=color, ) ax.axhline(scenario - 60, color=color) ax.set_xlim(datetime.date(year, 1, 1), datetime.date(year + 1, 1, 1)) ax.set_ylim(-0.5, 10) ax.xaxis.set_major_locator(mdates.DayLocator([1])) ax.xaxis.set_major_formatter(mdates.DateFormatter("%b")) ax.set_title("huc12: %s \n%s Daily Change in Delivery vs Apr 10 Planting" % (huc12, year)) ax.grid(axis="x") ax.set_yticks(range(10)) ax.set_yticklabels(yticklabels) plt.gcf().savefig("test.png")
def main(argv): """Go Main Go.""" huc12 = argv[1] fpath = argv[2] year = int(argv[3]) prop_cycle = plt.rcParams["axes.prop_cycle"] colors = prop_cycle.by_key()["color"] data = {} for scenario in range(59, 70): data[scenario] = read_env( "/i/%s/env/%s/%s/%s_%s.env" % (scenario, huc12[:8], huc12[8:], huc12, fpath)).set_index("date") print(data[scenario]["av_det"].sum()) ax = plt.axes([0.2, 0.1, 0.75, 0.75]) baseline = data[59][data[59].index.year == year] yticklabels = [] for scenario in range(60, 70): color = colors[scenario - 60] date = datetime.date(2000, 4, 15) + datetime.timedelta(days=(scenario - 60) * 5) scendata = data[scenario][data[scenario].index.year == year] delta = scendata["sed_del"] - baseline["sed_del"] delta = delta[delta != 0] total = ((scendata["sed_del"].sum() - baseline["sed_del"].sum()) / baseline["sed_del"].sum()) * 100.0 yticklabels.append("%s %4.2f%%" % (date.strftime("%b %d"), total)) x = delta.index.to_pydatetime() # res = ax.scatter(x, delta.values + (scenario - 60)) for idx, val in enumerate(delta): ax.arrow( x[idx], scenario - 60, 0, val, head_width=0.5, head_length=0.1, fc=color, ec=color, ) ax.axhline(scenario - 60, color=color) ax.set_xlim(datetime.date(year, 1, 1), datetime.date(year + 1, 1, 1)) ax.set_ylim(-0.5, 10) ax.xaxis.set_major_locator(mdates.DayLocator([1])) ax.xaxis.set_major_formatter(mdates.DateFormatter("%b")) ax.set_title( "huc12: %s fpath: %s\n%s Daily Change in Delivery vs Apr 10 Planting" % (huc12, fpath, year)) ax.grid(axis="x") ax.set_yticks(range(10)) ax.set_yticklabels(yticklabels) plt.gcf().savefig("test.png")
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] lagmonths = ctx['lag'] months = ctx['months'] month = ctx['month'] highyears = [int(x) for x in ctx['year'].split(",")] h = ctx['h'] wantmonth = month + lagmonths yearoffset = 0 if month + lagmonths < 1: wantmonth = 12 - (month + lagmonths) yearoffset = 1 wanted = [] deltas = [] for m in range(month, month + months): if m < 13: wanted.append(m) deltas.append(0) else: wanted.append(m - 12) deltas.append(-1) table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) elnino = {} ccursor.execute("""SELECT monthdate, soi_3m, anom_34 from elnino""") for row in ccursor: if row[0].month != wantmonth: continue elnino[row[0].year + yearoffset] = dict(soi_3m=row[1], anom_34=row[2]) ccursor.execute( """ SELECT year, month, sum(precip), avg((high+low)/2.) from """ + table + """ where station = %s GROUP by year, month """, (station, )) yearly = {} for row in ccursor: (_year, _month, _precip, _temp) = row if _month not in wanted: continue effectiveyear = _year + deltas[wanted.index(_month)] nino = elnino.get(effectiveyear, {}).get('soi_3m', None) if nino is None: continue data = yearly.setdefault(effectiveyear, dict(precip=0, temp=[], nino=nino)) data['precip'] += _precip data['temp'].append(float(_temp)) fig = plt.figure(figsize=(10, 6)) ax = plt.axes([0.1, 0.12, 0.5, 0.75]) msg = ("[%s] %s\n%s\n%s SOI (3 month average)") % ( station, nt.sts[station]['name'], title(wanted), datetime.date(2000, wantmonth, 1).strftime("%B")) ax.set_title(msg) cmap = plt.get_cmap("RdYlGn") zdata = np.arange(-2.0, 2.1, 0.5) norm = mpcolors.BoundaryNorm(zdata, cmap.N) rows = [] xs = [] ys = [] for year in yearly: x = yearly[year]['precip'] y = np.average(yearly[year]['temp']) xs.append(x) ys.append(y) val = yearly[year]['nino'] c = cmap(norm([val])[0]) if h == 'hide' and val > -0.5 and val < 0.5: ax.scatter(x, y, facecolor='#EEEEEE', edgecolor='#EEEEEE', s=30, zorder=2, marker='s') else: ax.scatter(x, y, facecolor=c, edgecolor='k', s=60, zorder=3, marker='o') if year in highyears: ax.text(x, y + 0.2, "%s" % (year, ), ha='center', va='bottom', zorder=5) rows.append(dict(year=year, precip=x, tmpf=y, soi3m=val)) ax.axhline(np.average(ys), lw=2, color='k', linestyle='-.', zorder=2) ax.axvline(np.average(xs), lw=2, color='k', linestyle='-.', zorder=2) sm = plt.cm.ScalarMappable(norm, cmap) sm.set_array(zdata) cb = plt.colorbar(sm, extend='both') cb.set_label("<-- El Nino :: SOI :: La Nina -->") ax.grid(True) ax.set_xlim(left=-0.01) ax.set_xlabel("Total Precipitation [inch], Avg: %.2f" % (np.average(xs), )) ax.set_ylabel((r"Average Temperature $^\circ$F, " "Avg: %.1f") % (np.average(ys), )) df = pd.DataFrame(rows) ax2 = plt.axes([0.67, 0.6, 0.28, 0.35]) ax2.scatter(df['soi3m'].values, df['tmpf'].values) ax2.set_xlabel("<-- El Nino :: SOI :: La Nina -->") ax2.set_ylabel(r"Avg Temp $^\circ$F") slp, intercept, r_value, _, _ = stats.linregress(df['soi3m'].values, df['tmpf'].values) y1 = -2.0 * slp + intercept y2 = 2.0 * slp + intercept ax2.plot([-2, 2], [y1, y2]) ax2.text(0.97, 0.9, "R$^2$=%.2f" % (r_value**2, ), ha='right', transform=ax2.transAxes, bbox=dict(color='white')) ax2.grid(True) ax3 = plt.axes([0.67, 0.1, 0.28, 0.35]) ax3.scatter(df['soi3m'].values, df['precip'].values) ax3.set_xlabel("<-- El Nino :: SOI :: La Nina -->") ax3.set_ylabel("Total Precip [inch]") slp, intercept, r_value, _, _ = stats.linregress(df['soi3m'].values, df['precip'].values) y1 = -2.0 * slp + intercept y2 = 2.0 * slp + intercept ax3.plot([-2, 2], [y1, y2]) ax3.text(0.97, 0.9, "R$^2$=%.2f" % (r_value**2, ), ha='right', transform=ax3.transAxes, bbox=dict(color='white')) ax3.grid(True) return fig, df
def main(argv): """Go Main Go""" pgconn = get_dbconn('postgis') pcursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) pcursor2 = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) # Preparation sortOpt = argv[1] ts = datetime.datetime.utcnow() - datetime.timedelta(hours=1) sts = ts.replace(tzinfo=pytz.utc, hour=0, minute=0, second=0, microsecond=0) if len(argv) == 5: sts = sts.replace(year=int(argv[1]), month=int(argv[2]), day=int(argv[3])) sortOpt = argv[4] ets = sts + datetime.timedelta(hours=24) opts = { 'W': { 'fnadd': '-wfo', 'sortby': 'wfo ASC, phenomena ASC, eventid ASC' }, 'S': { 'fnadd': '', 'sortby': 'size DESC' }, 'T': { 'fnadd': '-time', 'sortby': 'issue ASC' } } # Defaults thumbpx = 100 cols = 10 # Find largest polygon either in height or width sql = """SELECT *, ST_area2d(ST_transform(geom,2163)) as size, (ST_xmax(ST_transform(geom,2163)) - ST_xmin(ST_transform(geom,2163))) as width, (ST_ymax(ST_transform(geom,2163)) - ST_ymin(ST_transform(geom,2163))) as height from sbw_%s WHERE status = 'NEW' and issue >= '%s' and issue < '%s' and phenomena IN ('TO','SV') """ % (sts.year, sts, ets) pcursor.execute(sql) maxDimension = 0 mybuffer = 10000 i = 0 torCount = 0 torSize = 0 svrCount = 0 svrSize = 0 for row in pcursor: w = float(row['width']) h = float(row['height']) if w > maxDimension: maxDimension = w if h > maxDimension: maxDimension = h if row['phenomena'] == "SV": svrCount += 1 svrSize += float(row['size']) if row['phenomena'] == "TO": torCount += 1 torSize += float(row['size']) i += 1 sql = """ SELECT phenomena, sum( ST_area2d(ST_transform(u.geom,2163)) ) as size from warnings_%s w JOIN ugcs u on (u.gid = w.gid) WHERE issue >= '%s' and issue < '%s' and significance = 'W' and phenomena IN ('TO','SV') GROUP by phenomena """ % (sts.year, sts, ets) pcursor.execute(sql) for row in pcursor: if row['phenomena'] == "TO": totalTorCar = 100.0 * (1.0 - (torSize / float(row['size']))) if row['phenomena'] == "SV": totalSvrCar = 100.0 * (1.0 - (svrSize / float(row['size']))) # Make mosaic image header = 35 mosaic = Image.new('RGB', (thumbpx * cols, ((int(i / cols) + 1) * thumbpx) + header)) draw = ImageDraw.Draw(mosaic) imagemap = open('imap.txt', 'w') utcnow = datetime.datetime.utcnow() imagemap.write("<!-- %s %s -->\n" % (utcnow.strftime("%Y-%m-%d %H:%M:%S"), sortOpt)) imagemap.write("<map name='mymap'>\n") # Find my polygons gdf = read_postgis(""" SELECT *, ST_area2d(ST_transform(geom,2163)) as size, (ST_xmax(ST_transform(geom,2163)) + ST_xmin(ST_transform(geom,2163))) /2.0 as xc, (ST_ymax(ST_transform(geom,2163)) + ST_ymin(ST_transform(geom,2163))) /2.0 as yc, ST_transform(geom, 2163) as utmgeom from sbw_""" + str(sts.year) + """ WHERE status = 'NEW' and issue >= %s and issue < %s and phenomena IN ('TO','SV') and eventid is not null ORDER by """ + opts[sortOpt]['sortby'] + """ """, pgconn, params=(sts, ets), geom_col='utmgeom', index_col=None) # Write metadata to image tmp = Image.open("logo_small.png") mosaic.paste(tmp, (3, 2)) s = "IEM Summary of NWS Storm Based Warnings issued %s UTC" % ( sts.strftime("%d %b %Y"), ) (w, h) = FONT2.getsize(s) draw.text((54, 3), s, font=FONT2) s = "Generated: %s UTC" % ( datetime.datetime.utcnow().strftime("%d %b %Y %H:%M:%S"), ) draw.text((54, 3 + h), s, font=FONT10) if svrCount > 0: s = ("%3i SVR: Avg Size %5.0f km^2 CAR: %.0f%%") % ( svrCount, (svrSize / float(svrCount)) / 1000000, totalSvrCar) draw.text((54 + w + 10, 8), s, font=FONT10, fill="#ffff00") if torCount > 0: s = ("%3i TOR: Avg Size %5.0f km^2 CAR: %.0f%%") % ( torCount, (torSize / float(torCount)) / 1000000, totalTorCar) draw.text((54 + w + 10, 22), s, font=FONT10, fill="#ff0000") if pcursor.rowcount == 0: s = "No warnings in database for this date" draw.text((100, 78), s, font=FONT2, fill="#ffffff") i = 0 for _, row in gdf.iterrows(): # - Map each polygon x0 = float(row['xc']) - (maxDimension / 2.0) - mybuffer x1 = float(row['xc']) + (maxDimension / 2.0) + mybuffer y0 = float(row['yc']) - (maxDimension / 2.0) - 1.75 * mybuffer y1 = float(row['yc']) + (maxDimension / 2.0) + 0.25 * mybuffer fig = plt.figure(figsize=(thumbpx / 100., thumbpx / 100.)) ax = plt.axes([0, 0, 1, 1], facecolor='black') ax.set_xlim(x0, x1) ax.set_ylim(y0, y1) for poly in row['utmgeom']: xs, ys = poly.exterior.xy color = 'r' if row['phenomena'] == 'TO' else 'yellow' ax.plot(xs, ys, color=color, lw=2) fig.savefig('tmp.png') plt.close() my = int(i / cols) * thumbpx + header mx0 = (i % cols) * thumbpx # - Add each polygon to mosaic tmp = Image.open("tmp.png") mosaic.paste(tmp, (mx0, my)) del tmp os.remove("tmp.png") # Compute CAR! sql = """ select sum(ST_area2d(ST_transform(u.geom,2163))) as csize from warnings_%s w JOIN ugcs u on (u.gid = w.gid) WHERE phenomena = '%s' and significance = '%s' and eventid = %s and w.wfo = '%s' """ % (row['issue'].year, row['phenomena'], row['significance'], row['eventid'], row['wfo']) pcursor2.execute(sql) row2 = pcursor2.fetchone() car = "NA" carColor = (255, 255, 255) if row2 and row2['csize'] is not None: csize = float(row2['csize']) carF = 100.0 * (1.0 - (row['size'] / csize)) car = "%.0f" % (carF, ) if carF > 75: carColor = (0, 255, 0) if carF < 25: carColor = (255, 0, 0) # Draw Text! issue = row['issue'] s = "%s.%s.%s.%s" % (row['wfo'], row['phenomena'], row['eventid'], issue.strftime("%H%M")) # (w, h) = font10.getsize(s) # print s, h draw.text((mx0 + 2, my + thumbpx - 10), s, font=FONT10) s = "%.0f sq km %s%%" % (row['size'] / 1000000.0, car) draw.text((mx0 + 2, my + thumbpx - (20)), s, font=FONT10, fill=carColor) # Image map url = ("/vtec/#%s-O-NEW-K%s-%s-%s-%04i") % ( ts.year, row['wfo'], row['phenomena'], row['significance'], row['eventid']) altxt = "Click for text/image" imagemap.write( ("<area href=\"%s\" alt=\"%s\" title=\"%s\" " "shape=\"rect\" coords=\"%s,%s,%s,%s\">\n") % (url, altxt, altxt, mx0, my, mx0 + thumbpx, my + thumbpx)) i += 1 for i in range(len(gdf.index)): my = int(i / cols) * thumbpx + header mx0 = (i % cols) * thumbpx if mx0 == 0: draw.line( (0, my + thumbpx + 2, (thumbpx * cols), my + thumbpx + 2), (0, 120, 200)) mosaic.save("test.png") del mosaic imagemap.write("</map>") imagemap.close() cmd = ("/home/ldm/bin/pqinsert -p " "'plot a %s0000 blah sbwsum%s.png png' test.png") % ( sts.strftime("%Y%m%d"), opts[sortOpt]['fnadd']) subprocess.call(cmd, shell=True) cmd = ("/home/ldm/bin/pqinsert -p " "'plot a %s0000 blah sbwsum-imap%s.txt txt' imap.txt") % ( sts.strftime("%Y%m%d"), opts[sortOpt]['fnadd']) subprocess.call(cmd, shell=True) os.remove("test.png") os.remove("imap.txt")
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] month1 = ctx["month1"] month2 = ctx["month2"] highlight = ctx["highlight"] varname = ctx["var"] p1 = ctx.get("p1") p2 = ctx.get("p2") days = ctx["days"] opt = ctx["opt"] table = "alldata_%s" % (station[:2], ) m1data, y1, y2 = get_data(pgconn, table, station, month1, p1, varname, days, opt) m2data, y3, y4 = get_data(pgconn, table, station, month2, p2, varname, days, opt) pc1 = np.percentile(m1data, range(0, 101, 1)) pc2 = np.percentile(m2data, range(0, 101, 1)) df = pd.DataFrame({ "%s_%s_%s_%s" % (MDICT[month1], varname, y1, y2): pd.Series(pc1), "%s_%s_%s_%s" % (MDICT[month2], varname, y3, y4): pd.Series(pc2), "quantile": pd.Series(range(0, 101, 5)), }) s_slp, s_int, s_r, _, _ = stats.linregress(pc1, pc2) fig = plt.gcf() fig.set_size_inches(10.24, 7.68) ax = plt.axes([0.1, 0.11, 0.4, 0.76]) ax.scatter(pc1[::5], pc2[::5], s=40, marker="s", color="b", zorder=3) ax.plot( pc1, pc1 * s_slp + s_int, lw=3, color="r", zorder=2, label=r"Fit R$^2$=%.2f" % (s_r**2, ), ) ax.axvline(highlight, zorder=1, color="k") y = highlight * s_slp + s_int ax.axhline(y, zorder=1, color="k") ax.text( pc1[0], y, r"%.0f $^\circ$F" % (y, ), va="center", bbox=dict(color="white"), ) ax.text( highlight, pc2[0], r"%.0f $^\circ$F" % (highlight, ), ha="center", rotation=90, bbox=dict(color="white"), ) t2 = PDICT[varname] if days > 1: t2 = "%s %s over %s days" % (ODICT[opt], PDICT[varname], days) fig.suptitle(("[%s] %s\n%s (%s-%s) vs %s (%s-%s)\n%s") % ( station, ctx["_nt"].sts[station]["name"], MDICT[month2], y1, y2, MDICT[month1], y3, y4, t2, )) ax.set_xlabel(r"%s (%s-%s) %s $^\circ$F" % (MDICT[month1], y1, y2, PDICT[varname])) ax.set_ylabel(r"%s (%s-%s) %s $^\circ$F" % (MDICT[month2], y3, y4, PDICT[varname])) ax.text( 0.95, 0.05, "Quantile - Quantile Plot", transform=ax.transAxes, ha="right", ) ax.grid(True) ax.legend(loc=2) # Second ax = plt.axes([0.55, 0.18, 0.27, 0.68]) ax.set_title("Distribution") v1 = ax.violinplot(m1data, positions=[0], showextrema=True, showmeans=True) b = v1["bodies"][0] m = np.mean(b.get_paths()[0].vertices[:, 0]) b.get_paths()[0].vertices[:, 0] = np.clip(b.get_paths()[0].vertices[:, 0], -np.inf, m) b.set_color("r") for lbl in ["cmins", "cmeans", "cmaxes"]: v1[lbl].set_color("r") v1 = ax.violinplot(m2data, positions=[0], showextrema=True, showmeans=True) b = v1["bodies"][0] m = np.mean(b.get_paths()[0].vertices[:, 0]) b.get_paths()[0].vertices[:, 0] = np.clip(b.get_paths()[0].vertices[:, 0], m, np.inf) b.set_color("b") for lbl in ["cmins", "cmeans", "cmaxes"]: v1[lbl].set_color("b") pr0 = plt.Rectangle((0, 0), 1, 1, fc="r") pr1 = plt.Rectangle((0, 0), 1, 1, fc="b") ax.legend( (pr0, pr1), ( r"%s (%s-%s), $\mu$=%.1f" % (MDICT[month1], y1, y2, np.mean(m1data)), r"%s (%s-%s), $\mu$=%.1f" % (MDICT[month2], y3, y4, np.mean(m2data)), ), ncol=1, loc=(0.5, -0.15), ) ax.set_ylabel(r"%s $^\circ$F" % (PDICT[varname], )) ax.grid() # Third monofont = FontProperties(family="monospace") y = 0.86 x = 0.83 col1 = "%s_%s_%s_%s" % (MDICT[month1], varname, y1, y2) col2 = "%s_%s_%s_%s" % (MDICT[month2], varname, y3, y4) fig.text(x, y + 0.04, "Percentile Data Diff") for percentile in [ 100, 99, 98, 97, 96, 95, 92, 90, 75, 50, 25, 10, 8, 5, 4, 3, 2, 1, ]: row = df.loc[percentile] fig.text(x, y, "%3i" % (percentile, ), fontproperties=monofont) fig.text( x + 0.025, y, "%5.1f" % (row[col1], ), fontproperties=monofont, color="r", ) fig.text( x + 0.07, y, "%5.1f" % (row[col2], ), fontproperties=monofont, color="b", ) fig.text( x + 0.11, y, "%5.1f" % (row[col2] - row[col1], ), fontproperties=monofont, ) y -= 0.04 return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) typ = ctx["typ"] sort = ctx["sort"] date = ctx["date"] pgconn = get_dbconn("postgis") sts = utc(date.year, date.month, date.day) ets = sts + datetime.timedelta(hours=24) opts = { "W": { "fnadd": "-wfo", "sortby": "wfo ASC, phenomena ASC, eventid ASC", }, "S": { "fnadd": "", "sortby": "size DESC" }, "T": { "fnadd": "-time", "sortby": "issue ASC" }, } phenoms = {"W": ["TO", "SV"], "F": ["FF"], "M": ["MA"]} # Defaults thumbpx = 100 cols = 10 mybuffer = 10000 header = 35 # Find largest polygon either in height or width gdf = read_postgis( """ SELECT wfo, phenomena, eventid, issue, ST_area2d(ST_transform(geom,2163)) as size, (ST_xmax(ST_transform(geom,2163)) + ST_xmin(ST_transform(geom,2163))) /2.0 as xc, (ST_ymax(ST_transform(geom,2163)) + ST_ymin(ST_transform(geom,2163))) /2.0 as yc, ST_transform(geom, 2163) as utmgeom, (ST_xmax(ST_transform(geom,2163)) - ST_xmin(ST_transform(geom,2163))) as width, (ST_ymax(ST_transform(geom,2163)) - ST_ymin(ST_transform(geom,2163))) as height from sbw_""" + str(sts.year) + """ WHERE status = 'NEW' and issue >= %s and issue < %s and phenomena IN %s and eventid is not null ORDER by """ + opts[sort]["sortby"] + """ """, pgconn, params=(sts, ets, tuple(phenoms[typ])), geom_col="utmgeom", index_col=None, ) # For size reduction work df = read_sql( """ SELECT w.wfo, phenomena, eventid, sum(ST_area2d(ST_transform(u.geom,2163))) as county_size from warnings_""" + str(sts.year) + """ w JOIN ugcs u on (u.gid = w.gid) WHERE issue >= %s and issue < %s and significance = 'W' and phenomena IN %s GROUP by w.wfo, phenomena, eventid """, pgconn, params=(sts, ets, tuple(phenoms[typ])), index_col=["wfo", "phenomena", "eventid"], ) # Join the columns gdf = gdf.merge(df, on=["wfo", "phenomena", "eventid"]) gdf["ratio"] = (1.0 - (gdf["size"] / gdf["county_size"])) * 100.0 # Make mosaic image events = len(df.index) rows = int(events / cols) + 1 if events % cols == 0: rows -= 1 if rows == 0: rows = 1 ypixels = (rows * thumbpx) + header fig = plt.figure(figsize=(thumbpx * cols / 100.0, ypixels / 100.0)) plt.axes([0, 0, 1, 1], facecolor="black") imagemap = StringIO() utcnow = utc() imagemap.write("<!-- %s %s -->\n" % (utcnow.strftime("%Y-%m-%d %H:%M:%S"), sort)) imagemap.write("<map name='mymap'>\n") # Write metadata to image mydir = os.sep.join( [os.path.dirname(os.path.abspath(__file__)), "../../../images"]) logo = mpimage.imread("%s/logo_reallysmall.png" % (mydir, )) y0 = fig.get_figheight() * 100.0 - logo.shape[0] - 5 fig.figimage(logo, 5, y0, zorder=3) i = 0 # amount of NDC y space we have for axes plotting ytop = 1 - header / float((rows * 100) + header) dy = ytop / float(rows) ybottom = ytop # Sumarize totals y = ytop dy2 = (1.0 - ytop) / 2.0 for phenomena, df2 in gdf.groupby("phenomena"): car = (1.0 - df2["size"].sum() / df2["county_size"].sum()) * 100.0 fitbox( fig, ("%i %s.W: Avg size %5.0f km^2 CAR: %.0f%%") % (len(df2.index), phenomena, df2["size"].mean() / 1e6, car), 0.8, 0.99, y, y + dy2, color=COLORS[phenomena], ) y += dy2 fitbox( fig, "NWS %s Storm Based Warnings issued %s UTC" % ( " + ".join([VTEC_PHENOMENA[p] for p in phenoms[typ]]), sts.strftime("%d %b %Y"), ), 0.05, 0.79, ytop + dy2, 0.999, color="white", ) fitbox( fig, "Generated: %s UTC, IEM Autplot #203" % (utcnow.strftime("%d %b %Y %H:%M:%S"), ), 0.05, 0.79, ytop, 0.999 - dy2, color="white", ) # We want to reserve 14pts at the bottom and buffer the plot by 10km # so we compute this in the y direction, since it limits us max_dimension = max([gdf["width"].max(), gdf["height"].max()]) yspacing = max_dimension / 2.0 + mybuffer xspacing = yspacing * 1.08 # approx for _, row in gdf.iterrows(): # - Map each polygon x0 = float(row["xc"]) - xspacing x1 = float(row["xc"]) + xspacing y0 = float(row["yc"]) - yspacing - (yspacing * 0.14) y1 = float(row["yc"]) + yspacing - (yspacing * 0.14) col = i % 10 if col == 0: ybottom -= dy ax = plt.axes( [col * 0.1, ybottom, 0.1, dy], facecolor="black", xticks=[], yticks=[], aspect="auto", ) for x in ax.spines: ax.spines[x].set_visible(False) ax.set_xlim(x0, x1) ax.set_ylim(y0, y1) for poly in row["utmgeom"]: xs, ys = poly.exterior.xy color = COLORS[row["phenomena"]] ax.plot(xs, ys, color=color, lw=2) car = "NA" carColor = "white" if not pd.isnull(row["ratio"]): carf = row["ratio"] car = "%.0f" % (carf, ) if carf > 75: carColor = "green" if carf < 25: carColor = "red" # Draw Text! issue = row["issue"] s = "%s.%s.%s.%s" % ( row["wfo"], row["phenomena"], row["eventid"], issue.strftime("%H%M"), ) # (w, h) = font10.getsize(s) # print s, h ax.text( 0, 0, s, transform=ax.transAxes, color="white", va="bottom", fontsize=7, ) s = "%.0f sq km %s%%" % (row["size"] / 1000000.0, car) ax.text( 0, 0.1, s, transform=ax.transAxes, color=carColor, va="bottom", fontsize=7, ) # Image map url = ("/vtec/#%s-O-NEW-K%s-%s-%s-%04i") % ( sts.year, row["wfo"], row["phenomena"], "W", row["eventid"], ) altxt = "Click for text/image" pos = ax.get_position() mx0 = pos.x0 * 1000.0 my = (1.0 - pos.y1) * ypixels imagemap.write( ('<area href="%s" alt="%s" title="%s" ' 'shape="rect" coords="%.0f,%.0f,%.0f,%.0f">\n') % (url, altxt, altxt, mx0, my, mx0 + thumbpx, my + thumbpx)) i += 1 faux = plt.axes([0, 0, 1, 1], facecolor="None", zorder=100) for i in range(1, rows): faux.axhline(i * dy, lw=1.0, color="blue") imagemap.write("</map>") imagemap.seek(0) if gdf.empty: fitbox(fig, "No warnings Found!", 0.2, 0.8, 0.2, 0.5, color="white") df = gdf.drop("utmgeom", axis=1) return fig, df, imagemap.read()
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) today = datetime.datetime.now() thisyear = today.year df = read_sql(""" with data as ( select year, month, extract(doy from day) as doy, generate_series(32, high) as t from """ + table + """ where station = %s and year < %s), agger as ( SELECT year, t, min(doy), max(doy) from data GROUP by year, t) SELECT t as tmpf, avg(min) as min_jday, avg(max) as max_jday from agger GROUP by t ORDER by t ASC """, pgconn, params=(station, thisyear), index_col='tmpf') if df.empty: raise NoDataFound("No Data Found.") fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.7, 0.8]) ax2 = plt.axes([0.81, 0.1, 0.15, 0.8]) height = df['min_jday'][:] + 365. - df['max_jday'] ax2.plot(height, df.index.values) ax2.set_xticks([30, 90, 180, 365]) plt.setp(ax2.get_yticklabels(), visible=False) ax2.set_ylim(32, df.index.values.max() + 5) ax2.grid(True) ax2.text(0.96, 0.02, "Days", transform=ax2.transAxes, bbox=dict(color='white'), ha='right') ax.text(0.96, 0.02, "Period", transform=ax.transAxes, bbox=dict(color='white'), ha='right') ax.set_ylim(32, df.index.values.max() + 5) ax.barh(df.index.values - 0.5, height, left=df['max_jday'].values, ec='tan', fc='tan', height=1.1) days = np.array([1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335]) days = np.concatenate([days, days + 365]) ax.set_xticks(days) months = calendar.month_abbr[1:] + calendar.month_abbr[1:] ax.set_xticklabels(months) ax.set_ylabel("High Temperature $^\circ$F") ax.set_xlim(min(df['max_jday']) - 1, max(df['max_jday'] + height) + 1) ax.grid(True) msg = ("[%s] %s Period Between Average Last and " "First High Temperature of Year") % (station, nt.sts[station]['name']) tokens = msg.split() sz = int(len(tokens) / 2) ax.set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:])) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] month = ctx['month'] varname = ctx['var'] days = ctx['days'] nt = NetworkTable(network) table = "alldata_%s" % (station[:2], ) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] elif month == 'octmar': months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] sorder = 'ASC' if varname in [ 'min_greatest_low', ] else 'DESC' df = read_sql("""WITH data as ( SELECT month, day, day - '%s days'::interval as start_date, count(*) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as count, sum(precip) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as total_precip, min(high) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as max_least_high, max(low) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as min_greatest_low from """ + table + """ WHERE station = %s) SELECT day as end_date, start_date, """ + varname + """ from data WHERE month in %s and extract(month from start_date) in %s and count = %s ORDER by """ + varname + """ """ + sorder + """ LIMIT 10 """, pgconn, params=(days - 1, days - 1, days - 1, days - 1, days - 1, station, tuple(months), tuple(months), days), index_col=None) if df.empty: raise ValueError('Error, no results returned!') ylabels = [] fmt = '%.2f' if varname in [ 'total_precip', ] else '%.0f' for _, row in df.iterrows(): # no strftime support for old days, so we hack at it lbl = fmt % (row[varname], ) if days > 1: sts = row['end_date'] - datetime.timedelta(days=(days - 1)) if sts.month == row['end_date'].month: lbl += " -- %s %s-%s, %s" % (calendar.month_abbr[sts.month], sts.day, row['end_date'].day, sts.year) else: lbl += " -- %s %s, %s to\n %s %s, %s" % ( calendar.month_abbr[sts.month], sts.day, sts.year, calendar.month_abbr[row['end_date'].month], row['end_date'].day, row['end_date'].year) else: lbl += " -- %s %s, %s" % ( calendar.month_abbr[row['end_date'].month], row['end_date'].day, row['end_date'].year) ylabels.append(lbl) ax = plt.axes([0.1, 0.1, 0.5, 0.8]) plt.gcf().set_size_inches(8, 6) ax.barh(range(10, 0, -1), df[varname], ec='green', fc='green', height=0.8, align='center') ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, 11)) ax.set_yticks(range(1, 11)) ax.set_yticklabels(["#%s" % (x, ) for x in range(1, 11)][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel(("Precipitation [inch]" if varname in ['total_precip'] else r'Temperature $^\circ$F')) ax.set_title(("%s [%s] Top 10 Events\n" "%s [days=%s] (%s) " "(%s-%s)") % (nt.sts[station]['name'], station, METRICS[varname], days, MDICT[month], nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=12) return plt.gcf(), df
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] varname = ctx['var'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] elif month == 'octmar': months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] (agg, dbvar) = varname.split("_") sorder = 'DESC' if agg == 'max' else 'ASC' df = read_sql("""WITH data as ( SELECT valid at time zone %s as v, p01i from alldata WHERE station = %s and extract(month from valid at time zone %s) in %s) SELECT v as valid, p01i from data ORDER by """ + dbvar + """ """ + sorder + """ NULLS LAST LIMIT 100 """, pgconn, params=(nt.sts[station]['tzname'], station, nt.sts[station]['tzname'], tuple(months)), index_col=None) if df.empty: raise ValueError('Error, no results returned!') ylabels = [] fmt = '%.2f' if varname in [ 'max_p01i', ] else '%.0f' hours = [] y = [] lastval = -99 ranks = [] currentrank = 0 rows2keep = [] for idx, row in df.iterrows(): key = row['valid'].strftime("%Y%m%d%H") if key in hours: continue rows2keep.append(idx) hours.append(key) y.append(row[dbvar]) lbl = fmt % (row[dbvar], ) lbl += " -- %s" % (row['valid'].strftime("%b %d, %Y %-I:%M %p"), ) ylabels.append(lbl) if row[dbvar] != lastval: currentrank += 1 ranks.append(currentrank) lastval = row[dbvar] if len(ylabels) == 10: break fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.5, 0.8]) ax.barh(range(10, 0, -1), y, ec='green', fc='green', height=0.8, align='center') ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, 11)) ax.set_yticks(range(1, 11)) ax.set_yticklabels(["#%s" % (x, ) for x in ranks][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel(("Precipitation [inch]" if varname in ['max_p01i'] else r"Temperature $^\circ$F")) ax.set_title( ("%s [%s] Top 10 Events\n" "%s (%s) " "(%s-%s)") % (nt.sts[station]['name'], station, METRICS[varname], MDICT[month], nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=12) fig.text(0.98, 0.03, "Timezone: %s" % (nt.sts[station]['tzname'], ), ha='right') return fig, df.loc[rows2keep]
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] if station not in ctx["_nt"].sts: # This is needed. raise NoDataFound("Unknown station metadata.") varname = ctx["var"] ts = ctx["date"] hour = int(ctx["hour"]) ts = utc(ts.year, ts.month, ts.day, hour) which = ctx["which"] vlimit = "" if which == "month": vlimit = (" and extract(month from f.valid) = %s ") % (ts.month, ) name = ctx["_nt"].sts[station]["name"] stations = [station] if station.startswith("_"): name = ctx["_nt"].sts[station]["name"].split("--")[0] stations = ( ctx["_nt"].sts[station]["name"].split("--")[1].strip().split(" ")) pgconn = get_dbconn("postgis") df = read_sql( """ with data as ( select f.valid, p.pressure, count(*) OVER (PARTITION by p.pressure), min(valid at time zone 'UTC') OVER () as min_valid, max(valid at time zone 'UTC') OVER () as max_valid, p.tmpc, rank() OVER (PARTITION by p.pressure ORDER by p.tmpc ASC) as tmpc_rank, min(p.tmpc) OVER (PARTITION by p.pressure) as tmpc_min, max(p.tmpc) OVER (PARTITION by p.pressure) as tmpc_max, p.dwpc, rank() OVER (PARTITION by p.pressure ORDER by p.dwpc ASC) as dwpc_rank, min(p.dwpc) OVER (PARTITION by p.pressure) as dwpc_min, max(p.dwpc) OVER (PARTITION by p.pressure) as dwpc_max, p.height as hght, rank() OVER ( PARTITION by p.pressure ORDER by p.height ASC) as hght_rank, min(p.height) OVER (PARTITION by p.pressure) as hght_min, max(p.height) OVER (PARTITION by p.pressure) as hght_max, p.smps, rank() OVER (PARTITION by p.pressure ORDER by p.smps ASC) as smps_rank, min(p.smps) OVER (PARTITION by p.pressure) as smps_min, max(p.smps) OVER (PARTITION by p.pressure) as smps_max from raob_flights f JOIN raob_profile p on (f.fid = p.fid) WHERE f.station in %s and extract(hour from f.valid at time zone 'UTC') = %s """ + vlimit + """ and p.pressure in (925, 850, 700, 500, 400, 300, 250, 200, 150, 100, 70, 50, 10)) select * from data where valid = %s ORDER by pressure DESC """, pgconn, params=(tuple(stations), hour, ts), index_col="pressure", ) if df.empty: raise NoDataFound(("Sounding for %s was not found!") % (ts.strftime("%Y-%m-%d %H:%M"), )) df = df.drop("valid", axis=1) for key in PDICT3.keys(): df[key + "_percentile"] = df[key + "_rank"] / df["count"] * 100.0 # manual hackery to get 0 and 100th percentile df.loc[df[key] == df[key + "_max"], key + "_percentile"] = 100.0 df.loc[df[key] == df[key + "_min"], key + "_percentile"] = 0.0 ax = plt.axes([0.1, 0.12, 0.65, 0.75]) bars = ax.barh(range(len(df.index)), df[varname + "_percentile"], align="center") y2labels = [] fmt = "%.1f" if varname not in ["hght"] else "%.0f" for i, mybar in enumerate(bars): ax.text( mybar.get_width() + 1, i, "%.1f" % (mybar.get_width(), ), va="center", bbox=dict(color="white"), ) y2labels.append((fmt + " (" + fmt + " " + fmt + ")") % ( df.iloc[i][varname], df.iloc[i][varname + "_min"], df.iloc[i][varname + "_max"], )) ax.set_yticks(range(len(df.index))) ax.set_yticklabels(["%.0f" % (a, ) for a in df.index.values]) ax.set_ylim(-0.5, len(df.index) - 0.5) ax.set_xlabel("Percentile [100 = highest]") ax.set_ylabel("Mandatory Pressure Level (hPa)") plt.gcf().text( 0.5, 0.9, ("%s %s %s Sounding\n" "(%s-%s) Percentile Ranks (%s) for %s at %sz") % ( station, name, ts.strftime("%Y/%m/%d %H UTC"), pd.Timestamp(df.iloc[0]["min_valid"]).year, pd.Timestamp(df.iloc[0]["max_valid"]).year, ("All Year" if which == "none" else calendar.month_name[ts.month]), PDICT3[varname], hour, ), ha="center", va="bottom", ) ax.grid(True) ax.set_xticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) ax.set_xlim(0, 110) ax.text(1.02, 1, "Ob (Min Max)", transform=ax.transAxes) ax2 = ax.twinx() ax2.set_ylim(-0.5, len(df.index) - 0.5) ax2.set_yticks(range(len(df.index))) ax2.set_yticklabels(y2labels) return plt.gcf(), df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'][:4] phenomena = ctx['phenomena'] significance = ctx['significance'] split = ctx['split'] opt = ctx['opt'] state = ctx['state'] nt = NetworkTable('WFO') wfolimiter = " wfo = '%s' " % (station, ) if opt == 'state': wfolimiter = " substr(ugc, 1, 2) = '%s' " % (state, ) if split == 'jan1': sql = """ SELECT extract(year from issue)::int as year, min(issue at time zone 'UTC') as min_issue, max(issue at time zone 'UTC') as max_issue, count(distinct wfo || eventid) from warnings where """ + wfolimiter + """ and phenomena = %s and significance = %s GROUP by year ORDER by year ASC """ else: sql = """ SELECT extract(year from issue - '6 months'::interval)::int as year, min(issue at time zone 'UTC') as min_issue, max(issue at time zone 'UTC') as max_issue, count(distinct wfo || eventid) from warnings where """ + wfolimiter + """ and phenomena = %s and significance = %s GROUP by year ORDER by year ASC """ df = read_sql(sql, pgconn, params=(phenomena, significance), index_col=None) if df.empty: raise ValueError("No data found for query") # Since many VTEC events start in 2005, we should not trust any # data that has its first year in 2005 if df['year'].min() == 2005: df = df[df['year'] > 2005] def myfunc(row): year = row[0] valid = row[1] if year == valid.year: return int(valid.strftime("%j")) else: days = (datetime.date(year + 1, 1, 1) - datetime.date(year, 1, 1)).days return int(valid.strftime("%j")) + days df['startdoy'] = df[['year', 'min_issue']].apply(myfunc, axis=1) df['enddoy'] = df[['year', 'max_issue']].apply(myfunc, axis=1) df.set_index('year', inplace=True) # allow for small bars when there is just one event df.loc[df['enddoy'] == df['startdoy'], 'enddoy'] = df['enddoy'] + 1 ends = df['enddoy'].values starts = df['startdoy'].values years = df.index.values fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.7, 0.8]) ax.barh(years, (ends - starts), left=starts, fc='blue', align='center') ax.axvline(np.average(starts[:-1]), lw=2, color='red') ax.axvline(np.average(ends[:-1]), lw=2, color='red') ax.set_xlabel(("Avg Start Date: %s, End Date: %s") % ((datetime.date(2000, 1, 1) + datetime.timedelta( days=int(np.average(starts[:-1])))).strftime("%-d %b"), (datetime.date(2000, 1, 1) + datetime.timedelta( days=int(np.average(ends[:-1])))).strftime("%-d %b")), color='red') title = "[%s] NWS %s" % (station, nt.sts[station]['name']) if opt == 'state': title = ("NWS Issued Alerts for State of %s") % ( reference.state_names[state], ) ax.set_title(("%s\nPeriod between First and Last %s") % (title, vtec.get_ps_string(phenomena, significance))) ax.grid() days = [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335] days = days + [x + 365 for x in days] ax.set_xticks(days) ax.set_xticklabels(calendar.month_abbr[1:] + calendar.month_abbr[1:]) ax.set_xlim(df['startdoy'].min() - 10, df['enddoy'].max() + 10) ax.set_ylabel("Year") ax.set_ylim(years[0] - 0.5, years[-1] + 0.5) xFormatter = FormatStrFormatter('%d') ax.yaxis.set_major_formatter(xFormatter) ax = plt.axes([0.82, 0.1, 0.13, 0.8]) ax.barh(years, df['count'], fc='blue', align='center') ax.set_ylim(years[0] - 0.5, years[-1] + 0.5) plt.setp(ax.get_yticklabels(), visible=False) ax.grid(True) ax.set_xlabel("# Events") ax.yaxis.set_major_formatter(xFormatter) xloc = plt.MaxNLocator(3) ax.xaxis.set_major_locator(xloc) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['var'] network = 'RAOB' ts = ctx['date'] hour = int(ctx['hour']) ts = datetime.datetime(ts.year, ts.month, ts.day, hour) ts = ts.replace(tzinfo=pytz.utc) which = ctx['which'] vlimit = '' if which == 'month': vlimit = (" and extract(month from f.valid) = %s ") % (ts.month, ) nt = NetworkTable(network) name = nt.sts[station]['name'] stations = [ station, ] if station.startswith("_"): name = nt.sts[station]['name'].split("--")[0] stations = nt.sts[station]['name'].split("--")[1].strip().split(" ") pgconn = get_dbconn('postgis') df = read_sql(""" with data as ( select f.valid, p.pressure, count(*) OVER (PARTITION by p.pressure), min(valid) OVER () as min_valid, max(valid) OVER () as max_valid, p.tmpc, rank() OVER (PARTITION by p.pressure ORDER by p.tmpc ASC) as tmpc_rank, min(p.tmpc) OVER (PARTITION by p.pressure) as tmpc_min, max(p.tmpc) OVER (PARTITION by p.pressure) as tmpc_max, p.dwpc, rank() OVER (PARTITION by p.pressure ORDER by p.dwpc ASC) as dwpc_rank, min(p.dwpc) OVER (PARTITION by p.pressure) as dwpc_min, max(p.dwpc) OVER (PARTITION by p.pressure) as dwpc_max, p.height as hght, rank() OVER ( PARTITION by p.pressure ORDER by p.height ASC) as hght_rank, min(p.height) OVER (PARTITION by p.pressure) as hght_min, max(p.height) OVER (PARTITION by p.pressure) as hght_max, p.smps, rank() OVER (PARTITION by p.pressure ORDER by p.smps ASC) as smps_rank, min(p.smps) OVER (PARTITION by p.pressure) as smps_min, max(p.smps) OVER (PARTITION by p.pressure) as smps_max from raob_flights f JOIN raob_profile p on (f.fid = p.fid) WHERE f.station in %s and extract(hour from f.valid at time zone 'UTC') = %s """ + vlimit + """ and p.pressure in (925, 850, 700, 500, 400, 300, 250, 200, 150, 100, 70, 50, 10)) select * from data where valid = %s ORDER by pressure DESC """, pgconn, params=(tuple(stations), hour, ts), index_col='pressure') if df.empty: raise ValueError(("Sounding for %s was not found!") % (ts.strftime("%Y-%m-%d %H:%M"), )) for key in PDICT3.keys(): df[key + '_percentile'] = df[key + '_rank'] / df['count'] * 100. # manual hackery to get 0 and 100th percentile df.loc[df[key] == df[key + '_max'], key + '_percentile'] = 100. df.loc[df[key] == df[key + '_min'], key + '_percentile'] = 0. ax = plt.axes([0.1, 0.12, 0.65, 0.75]) bars = ax.barh(range(len(df.index)), df[varname + '_percentile'], align='center') y2labels = [] fmt = '%.1f' if varname not in [ 'hght', ] else '%.0f' for i, mybar in enumerate(bars): ax.text(mybar.get_width() + 1, i, '%.1f' % (mybar.get_width(), ), va='center', bbox=dict(color='white')) y2labels.append((fmt + ' (' + fmt + ' ' + fmt + ')') % (df.iloc[i][varname], df.iloc[i][varname + "_min"], df.iloc[i][varname + "_max"])) ax.set_yticks(range(len(df.index))) ax.set_yticklabels(['%.0f' % (a, ) for a in df.index.values]) ax.set_ylim(-0.5, len(df.index) - 0.5) ax.set_xlabel("Percentile [100 = highest]") ax.set_ylabel("Mandatory Pressure Level (hPa)") plt.gcf().text( 0.5, 0.9, ("%s %s %s Sounding\n" "(%s-%s) Percentile Ranks (%s) for %s") % (station, name, ts.strftime("%Y/%m/%d %H UTC"), df.iloc[0]['min_valid'].year, df.iloc[0]['max_valid'].year, ("All Year" if which == 'none' else calendar.month_name[ts.month]), PDICT3[varname]), ha='center', va='bottom') ax.grid(True) ax.set_xticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) ax.set_xlim(0, 110) ax.text(1.02, 1, 'Ob (Min Max)', transform=ax.transAxes) ax2 = ax.twinx() ax2.set_ylim(-0.5, len(df.index) - 0.5) ax2.set_yticks(range(len(df.index))) ax2.set_yticklabels(y2labels) return plt.gcf(), df
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] hours = ctx['hours'] mydir = ctx['dir'] month = ctx['month'] if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] nt = NetworkTable(network) tzname = nt.sts[station]['tzname'] # backwards intuitive sortdir = "ASC" if mydir == 'warm' else 'DESC' df = read_sql(""" WITH data as ( SELECT valid at time zone %s as valid, tmpf from alldata where station = %s and tmpf between -100 and 150 and extract(month from valid) in %s), doffset as ( SELECT valid - '%s hours'::interval as valid, tmpf from data), agg as ( SELECT d.valid, d.tmpf as tmpf1, o.tmpf as tmpf2 from data d JOIN doffset o on (d.valid = o.valid)) SELECT valid as valid1, valid + '%s hours'::interval as valid2, tmpf1, tmpf2 from agg ORDER by (tmpf1 - tmpf2) """ + sortdir + """ LIMIT 50 """, pgconn, params=(tzname, station, tuple(months), hours, hours), index_col=None) df['diff'] = (df['tmpf1'] - df['tmpf2']).abs() if df.empty: raise ValueError("No database entries found for station, sorry!") fig = plt.figure() ax = plt.axes([0.55, 0.1, 0.4, 0.8]) fig.text(0.5, 0.95, ('[%s] %s Top 10 %s\n' 'Over %s Hour Period (%s-%s) [%s]') % (station, nt.sts[station]['name'], MDICT[mydir], hours, nt.sts[station]['archive_begin'].year, datetime.date.today().year, MDICT2[month]), ha='center', va='center') labels = [] for i in range(10): row = df.iloc[i] ax.barh(i + 1, row['diff'], color='b', align='center') sts = row['valid1'] ets = row['valid2'] labels.append(("%.0f to %.0f -> %.0f\n%s - %s") % (row['tmpf1'], row['tmpf2'], row['diff'], sts.strftime("%-d %b %Y %I:%M %p"), ets.strftime("%-d %b %Y %I:%M %p"))) ax.set_yticks(range(1, 11)) ax.set_yticklabels(labels) ax.set_ylim(10.5, 0.5) ax.grid(True) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] hours = ctx["hours"] mydir = ctx["dir"] month = ctx["month"] if month == "all": months = range(1, 13) elif month == "fall": months = [9, 10, 11] elif month == "winter": months = [12, 1, 2] elif month == "spring": months = [3, 4, 5] elif month == "summer": months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d") # make sure it is length two for the trick below in SQL months = [ts.month, 999] tzname = ctx["_nt"].sts[station]["tzname"] # backwards intuitive sortdir = "ASC" if mydir == "warm" else "DESC" df = read_sql( """ WITH data as ( SELECT valid at time zone %s as valid, tmpf from alldata where station = %s and tmpf between -100 and 150 and extract(month from valid) in %s), doffset as ( SELECT valid - '%s hours'::interval as valid, tmpf from data), agg as ( SELECT d.valid, d.tmpf as tmpf1, o.tmpf as tmpf2 from data d JOIN doffset o on (d.valid = o.valid)) SELECT valid as valid1, valid + '%s hours'::interval as valid2, tmpf1, tmpf2 from agg ORDER by (tmpf1 - tmpf2) """ + sortdir + """ LIMIT 50 """, pgconn, params=(tzname, station, tuple(months), hours, hours), index_col=None, ) df["diff"] = (df["tmpf1"] - df["tmpf2"]).abs() if df.empty: raise NoDataFound("No database entries found for station, sorry!") fig = plt.figure() ax = plt.axes([0.55, 0.1, 0.4, 0.8]) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") fig.text( 0.5, 0.95, ("[%s] %s Top 10 %s\n" "Over %s Hour Period (%s-%s) [%s]") % ( station, ctx["_nt"].sts[station]["name"], MDICT[mydir], hours, ab.year, datetime.date.today().year, MDICT2[month], ), ha="center", va="center", ) labels = [] for i in range(10): row = df.iloc[i] ax.barh(i + 1, row["diff"], color="b", align="center") sts = row["valid1"] ets = row["valid2"] labels.append( ("%.0f to %.0f -> %.0f\n%s - %s") % ( row["tmpf1"], row["tmpf2"], row["diff"], sts.strftime("%-d %b %Y %I:%M %p"), ets.strftime("%-d %b %Y %I:%M %p"), ) ) ax.set_yticks(range(1, 11)) ax.set_yticklabels(labels) ax.set_ylim(10.5, 0.5) ax.grid(True) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] month = ctx["month"] varname = ctx["var"] days = ctx["days"] table = "alldata_%s" % (station[:2], ) if month == "all": months = range(1, 13) elif month == "fall": months = [9, 10, 11] elif month == "winter": months = [12, 1, 2] elif month == "spring": months = [3, 4, 5] elif month == "summer": months = [6, 7, 8] elif month == "octmar": months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d") # make sure it is length two for the trick below in SQL months = [ts.month, 999] sorder = "ASC" if varname in ["min_greatest_low"] else "DESC" df = read_sql( """WITH data as ( SELECT month, day, day - '%s days'::interval as start_date, count(*) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as count, sum(precip) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as total_precip, min(high) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as max_least_high, max(low) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and current row) as min_greatest_low from """ + table + """ WHERE station = %s) SELECT day as end_date, start_date, """ + varname + """ from data WHERE month in %s and extract(month from start_date) in %s and count = %s ORDER by """ + varname + """ """ + sorder + """ LIMIT 10 """, pgconn, params=( days - 1, days - 1, days - 1, days - 1, days - 1, station, tuple(months), tuple(months), days, ), index_col=None, ) if df.empty: raise NoDataFound("Error, no results returned!") ylabels = [] fmt = "%.2f" if varname in ["total_precip"] else "%.0f" for _, row in df.iterrows(): # no strftime support for old days, so we hack at it lbl = fmt % (row[varname], ) if days > 1: sts = row["end_date"] - datetime.timedelta(days=(days - 1)) if sts.month == row["end_date"].month: lbl += " -- %s %s-%s, %s" % ( calendar.month_abbr[sts.month], sts.day, row["end_date"].day, sts.year, ) else: lbl += " -- %s %s, %s to\n %s %s, %s" % ( calendar.month_abbr[sts.month], sts.day, sts.year, calendar.month_abbr[row["end_date"].month], row["end_date"].day, row["end_date"].year, ) else: lbl += " -- %s %s, %s" % ( calendar.month_abbr[row["end_date"].month], row["end_date"].day, row["end_date"].year, ) ylabels.append(lbl) ax = plt.axes([0.1, 0.1, 0.5, 0.8]) plt.gcf().set_size_inches(8, 6) ax.barh( range(10, 0, -1), df[varname], ec="green", fc="green", height=0.8, align="center", ) ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, 11)) ax.set_yticks(range(1, 11)) ax.set_yticklabels(["#%s" % (x, ) for x in range(1, 11)][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel(("Precipitation [inch]" if varname in ["total_precip"] else r"Temperature $^\circ$F")) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") ax.set_title( ("%s [%s] Top 10 Events\n" "%s [days=%s] (%s) " "(%s-%s)") % ( ctx["_nt"].sts[station]["name"], station, METRICS[varname], days, MDICT[month], ab.year, datetime.datetime.now().year, ), size=12, ) return plt.gcf(), df
def __init__(self, sector='iowa', figsize=(10.24, 7.68), **kwargs): """Construct a MapPlot Args: sector (str): plot domain, set 'custom' to bring your own projection kwargs: projection (cartopy.crs,optional): bring your own projection north (float,optional): Plot top bounds (degN Lat) south (float,optional): Plot bottom bounds (degN Lat) east (float,optional): Plot right bounds (degE Lon) west (float,optional): Plot left bounds (degE Lon) titlefontsize (int): fontsize to use for the plot title subtitlefontsize (int): fontsize to use for the plot subtitle continentalcolor (color): color to use for continental coloring debug (bool): enable debugging aspect (str): plot aspect, defaults to equal """ self.debug = kwargs.get('debug', False) self.fig = plt.figure(num=None, figsize=figsize, dpi=kwargs.get('dpi', 100)) # Storage of axes within this plot self.state = None self.cwa = None self.textmask = None # For our plot_values magic, to prevent overlap self.sector = sector self.cax = plt.axes(CAX_BOUNDS, frameon=False, yticks=[], xticks=[]) self.axes = [] self.ax = None # hack around sector=iowa if self.sector == 'iowa': self.sector = 'state' self.state = 'IA' sector_setter(self, MAIN_AX_BOUNDS, **kwargs) for _a in self.axes: if _a is None: continue # legacy usage of axisbg here _c = kwargs.get('axisbg', kwargs.get('continentalcolor', '#EEEEEE')) _a.add_feature(cfeature.LAND, facecolor=_c, zorder=Z_CF) coasts = cfeature.NaturalEarthFeature('physical', 'coastline', '10m', edgecolor='black', facecolor='none') _a.add_feature(coasts, lw=1.0, zorder=Z_POLITICAL) _a.add_feature(cfeature.BORDERS, lw=1.0, zorder=Z_POLITICAL) _a.add_feature(cfeature.OCEAN, facecolor=(0.4471, 0.6235, 0.8117), zorder=Z_CF) _a.add_feature(cfeature.LAKES, facecolor=(0.4471, 0.6235, 0.8117), zorder=Z_CF) if 'nostates' not in kwargs: states = load_pickle_geo('us_states.pickle') _a.add_geometries( [val[b'geom'] for key, val in states.items()], crs=ccrs.PlateCarree(), lw=1.0, edgecolor=kwargs.get('statebordercolor', 'k'), facecolor='None', zorder=Z_POLITICAL ) if not kwargs.get('nologo'): self.iemlogo() if "title" in kwargs: self.fig.text(0.09 if not kwargs.get('nologo') else 0.02, 0.94, kwargs.get("title"), fontsize=kwargs.get('titlefontsize', 18)) if "subtitle" in kwargs: self.fig.text(0.09 if not kwargs.get('nologo') else 0.02, 0.91, kwargs.get("subtitle"), fontsize=kwargs.get('subtitlefontsize', 12)) if 'nocaption' not in kwargs: self.fig.text(0.01, 0.03, ("%s :: generated %s" ) % ( kwargs.get('caption', 'Iowa Environmental Mesonet'), datetime.datetime.now().strftime("%d %B %Y %I:%M %p %Z"),))
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] month = ctx["month"] varname = ctx["var"] tzname = ctx["_nt"].sts[station]["tzname"] if ctx.get("sdate") and ctx.get("edate"): date_limiter = ( " and (to_char(valid at time zone '%s', 'mmdd') >= '%s'" " %s to_char(valid at time zone '%s', 'mmdd') <= '%s')") % ( tzname, ctx["sdate"].strftime("%m%d"), "or" if ctx["sdate"] > ctx["edate"] else "and", tzname, ctx["edate"].strftime("%m%d"), ) title = "between %s and %s" % ( ctx["sdate"].strftime("%-d %b"), ctx["edate"].strftime("%-d %b"), ) if ctx["sdate"] == ctx["edate"]: date_limiter = ( "and to_char(valid at time zone '%s', 'mmdd') = '%s'") % ( tzname, ctx["sdate"].strftime("%m%d")) title = "on %s" % (ctx["sdate"].strftime("%-d %b"), ) else: if month == "all": months = range(1, 13) elif month == "fall": months = [9, 10, 11] elif month == "winter": months = [12, 1, 2] elif month == "spring": months = [3, 4, 5] elif month == "summer": months = [6, 7, 8] elif month == "octmar": months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d") # make sure it is length two for the trick below in SQL months = [ts.month, 999] date_limiter = ( " and extract(month from valid at time zone '%s') in %s") % ( tzname, tuple(months)) title = MDICT[month] if ctx.get("hour") is not None: date_limiter += ( f" and extract(hour from valid at time zone '{tzname}' " f"+ '10 minutes'::interval) = {ctx['hour']}") dt = datetime.datetime(2000, 1, 1, ctx["hour"]) title += " @" + dt.strftime("%-I %p") (agg, dbvar) = varname.split("_") if agg in ["max", "min"]: titlelabel = "Top" sorder = "DESC" if agg == "max" else "ASC" df = read_sql( f""" WITH data as ( SELECT valid at time zone %s as v, {dbvar} from alldata WHERE station = %s {date_limiter}) SELECT v as valid, {dbvar} from data ORDER by {dbvar} {sorder} NULLS LAST LIMIT 100 """, pgconn, params=(ctx["_nt"].sts[station]["tzname"], station), index_col=None, ) else: titlelabel = "Most Recent" op = ">=" if agg == "above" else "<" threshold = float(ctx.get("threshold", 100)) df = read_sql( f"SELECT valid at time zone %s as valid, {dbvar} from alldata " f"WHERE station = %s {date_limiter} and {dbvar} {op} {threshold} " "ORDER by valid DESC LIMIT 100", pgconn, params=(ctx["_nt"].sts[station]["tzname"], station), index_col=None, ) if df.empty: raise NoDataFound("Error, no results returned!") ylabels = [] fmt = "%.0f" if dbvar in ["tmpf", "dwpf"] else "%.2f" hours = [] y = [] lastval = -99 ranks = [] currentrank = 0 rows2keep = [] for idx, row in df.iterrows(): key = row["valid"].strftime("%Y%m%d%H") if key in hours or pd.isnull(row[dbvar]): continue rows2keep.append(idx) hours.append(key) y.append(row[dbvar]) lbl = fmt % (row[dbvar], ) lbl += " -- %s" % (row["valid"].strftime("%b %d, %Y %-I:%M %p"), ) ylabels.append(lbl) if row[dbvar] != lastval or agg in ["above", "below"]: currentrank += 1 ranks.append(currentrank) lastval = row[dbvar] if len(ylabels) == 10: break if not y: raise NoDataFound("No data found.") fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.5, 0.8]) ax.barh( range(len(y), 0, -1), y, ec="green", fc="green", height=0.8, align="center", ) ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, len(y) + 1)) ax.set_yticks(range(1, len(y) + 1)) ax.set_yticklabels(["#%s" % (x, ) for x in ranks][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel("%s %s" % (METRICS[varname], UNITS[dbvar])) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") fitbox( fig, ("%s [%s] %s 10 Events\n%s %s (%s) (%s-%s)") % ( ctx["_nt"].sts[station]["name"], station, titlelabel, METRICS[varname], ctx.get("threshold") if agg in ["above", "below"] else "", title, ab.year, datetime.datetime.now().year, ), 0.01, 0.99, 0.91, 0.99, ha="center", ) fig.text( 0.98, 0.03, "Timezone: %s" % (ctx["_nt"].sts[station]["tzname"], ), ha="right", ) return fig, df.loc[rows2keep]
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] syear = ctx["syear"] eyear = ctx["eyear"] groupby = ctx["groupby"] sts = datetime.date(syear, 1, 1) ets = datetime.date(eyear + 1, 1, 1) code = ctx["code"] if code == "PSN": code = "+SN" PDICT["+SN"] = PDICT["PSN"] if groupby == "week": data = np.ma.zeros((24, 52), "f") df = read_sql( """ WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v from alldata where station = %s and array_to_string(wxcodes, '') LIKE '%%""" + code + """%%' and valid > %s and valid < %s), agg as ( SELECT distinct extract(week from v)::int as week, extract(doy from v)::int as doy, extract(year from v)::int as year, extract(hour from v)::int as hour from data) SELECT week, year, hour, count(*) from agg WHERE week < 53 GROUP by week, year, hour """, pgconn, params=(ctx["_nt"].sts[station]["tzname"], station, sts, ets), index_col=None, ) else: data = np.ma.zeros((24, 366), "f") df = read_sql( """ WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v from alldata where station = %s and array_to_string(wxcodes, '') LIKE '%%""" + code + """%%' and valid > %s and valid < %s), agg as ( SELECT distinct extract(doy from v)::int as doy, extract(year from v)::int as year, extract(hour from v)::int as hour from data) SELECT doy, year, hour, count(*) from agg GROUP by doy, year, hour """, pgconn, params=(ctx["_nt"].sts[station]["tzname"], station, sts, ets), index_col=None, ) if df.empty: raise NoDataFound("No data was found, sorry!") minyear = df["year"].min() maxyear = df["year"].max() for _, row in df.iterrows(): data[row["hour"], row[groupby] - 1] += 1 data.mask = np.where(data == 0, True, False) fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.11, 0.25, 0.7, 0.65]) cax = plt.axes([0.82, 0.04, 0.02, 0.15]) res = ax.imshow( data, aspect="auto", rasterized=True, interpolation="nearest" ) fig.colorbar(res, cax=cax) xloc = plt.MaxNLocator(4) cax.yaxis.set_major_locator(xloc) cax.set_ylabel("Count") ax.set_ylim(-0.5, 23.5) ax.set_yticks((0, 4, 8, 12, 16, 20)) ax.set_ylabel("Local Time, %s" % (ctx["_nt"].sts[station]["tzname"],)) ax.set_yticklabels(("Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM")) ax.set_title( ("[%s] %s %s Reports\n[%.0f - %.0f]" " by hour and %s") % ( station, ctx["_nt"].sts[station]["name"], PDICT[code], minyear, maxyear, PDICT2[groupby].replace("group ", ""), ) ) ax.grid(True) lax = plt.axes([0.11, 0.1, 0.7, 0.15]) if groupby == "week": ax.set_xticks(np.arange(0, 55, 7)) lax.bar(np.arange(0, 52), np.ma.sum(data, 0), facecolor="tan") lax.set_xlim(-0.5, 51.5) lax.set_xticks(np.arange(0, 55, 7)) lax.set_xticklabels( ( "Jan 1", "Feb 19", "Apr 8", "May 27", "Jul 15", "Sep 2", "Oct 21", "Dec 9", ) ) else: ax.set_xticks( [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365] ) lax.bar(np.arange(0, 366), np.ma.sum(data, 0), facecolor="tan") lax.set_xlim(-0.5, 365.5) lax.set_xticks( [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365] ) lax.set_xticklabels(calendar.month_abbr[1:]) plt.setp(ax.get_xticklabels(), visible=False) # Bottom grid lax.grid(True) yloc = plt.MaxNLocator(3) lax.yaxis.set_major_locator(yloc) lax.yaxis.get_major_ticks()[-1].label1.set_visible(False) # Right grid rax = plt.axes([0.81, 0.25, 0.15, 0.65]) rax.barh(np.arange(0, 24) - 0.4, np.ma.sum(data, 1), facecolor="tan") rax.set_ylim(-0.5, 23.5) rax.set_yticks([]) xloc = plt.MaxNLocator(3) rax.xaxis.set_major_locator(xloc) rax.xaxis.get_major_ticks()[0].label1.set_visible(False) rax.grid(True) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] date = ctx["date"] opt = ctx["opt"] varname = ctx["v"] tzname = ctx["_nt"].sts[station]["tzname"] # Resolve how to limit the query data limiter = "" if opt == "day": limiter = (f" and to_char(valid at time zone '{tzname}', 'mmdd') = " f"'{date.strftime('%m%d')}' ") subtitle = (f"For Date of {date.strftime('%-d %b')}, " f"{date.strftime('%-d %b %Y')} plotted in bottom panel") datefmt = "%I %p" elif opt == "week": limiter = f" and extract(week from valid) = {date.strftime('%V')} " subtitle = ( f"For ISO Week of {date.strftime('%V')}, " f"week of {date.strftime('%-d %b %Y')} plotted in bottom panel") datefmt = "%-d %b" elif opt == "month": limiter = f" and extract(month from valid) = {date.strftime('%m')} " subtitle = (f"For Month of {date.strftime('%B')}, " f"{date.strftime('%b %Y')} plotted in bottom panel") datefmt = "%-d" else: subtitle = f"All Year, {date.year} plotted in bottom panel" datefmt = "%-d %b" # Load up all the values, since we need pandas to do some heavy lifting obsdf = read_sql( f""" select valid at time zone 'UTC' as utc_valid, extract(year from valid at time zone %s) as year, extract(hour from valid at time zone %s + '10 minutes'::interval)::int as hr, {varname} from alldata WHERE station = %s and {varname} is not null {limiter} and report_type = 2 ORDER by valid ASC """, pgconn, params=(tzname, tzname, station), index_col=None, ) if obsdf.empty: raise NoDataFound("No data was found.") # Assign percentiles obsdf["quantile"] = obsdf[["hr", varname]].groupby("hr").rank(pct=True) # Compute actual percentiles qtile = (obsdf[["hr", varname ]].groupby("hr").quantile(np.arange(0, 1.01, 0.05)).reset_index()) qtile = qtile.rename(columns={"level_1": "quantile"}) (fig, ax) = plt.subplots(2, 1) cmap = get_cmap(ctx["cmap"]) for hr, gdf in qtile.groupby("hr"): ax[0].plot( gdf["quantile"].values * 100.0, gdf[varname].values, color=cmap(hr / 23.0), label=str(hr), ) ax[0].set_xlim(0, 100) ax[0].grid(True) ax[0].set_ylabel(PDICT[varname]) ax[0].set_xlabel("Percentile") ax[0].set_position([0.13, 0.55, 0.71, 0.34]) cax = plt.axes([0.86, 0.55, 0.03, 0.33], frameon=False, yticks=[], xticks=[]) cb = ColorbarBase(cax, cmap=cmap) cb.set_ticks(np.arange(0, 1, 4.0 / 24.0)) cb.set_ticklabels(["Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM"]) cb.set_label("Local Hour") thisyear = obsdf[obsdf["year"] == date.year] if not thisyear.empty: ax[1].plot(thisyear["utc_valid"].values, thisyear["quantile"].values * 100.0) ax[1].grid(True) ax[1].set_ylabel("Percentile") ax[1].set_ylim(-1, 101) ax[1].xaxis.set_major_formatter( mdates.DateFormatter(datefmt, tz=pytz.timezone(tzname))) if opt == "day": ax[1].set_xlabel(f"Timezone: {tzname}") title = ("%s %s %s Percentiles\n%s") % ( station, ctx["_nt"].sts[station]["name"], PDICT[varname], subtitle, ) fitbox(fig, title, 0.01, 0.99, 0.91, 0.99, ha="center", va="center") return fig, qtile
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'][:4] nt = NetworkTable('WFO') nt.sts['_ALL'] = {'name': 'All Offices'} fig = plt.figure(figsize=(8, 14 if station != '_ALL' else 21)) ax = [None, None] ax[0] = plt.axes([0.1, 0.75, 0.85, 0.2]) ax[1] = plt.axes([0.1, 0.05, 0.85, 0.65]) if station == '_ALL': df = read_sql(""" SELECT distinct extract(year from issue) as year, phenomena, significance from warnings WHERE phenomena is not null and significance is not null and issue > '2005-01-01' """, pgconn, index_col=None) else: df = read_sql(""" SELECT distinct extract(year from issue) as year, phenomena, significance from warnings WHERE wfo = %s and phenomena is not null and significance is not null and issue > '2005-01-01' """, pgconn, params=(station, ), index_col=None) df['wfo'] = station df['year'] = df['year'].astype('i') gdf = df.groupby('year').count() ax[0].bar(gdf.index.values, gdf['wfo'], width=0.8, fc='b', ec='b', align='center') for yr, row in gdf.iterrows(): ax[0].text(yr, row['wfo'] + 1, "%s" % (row['wfo'], ), ha='center') ax[0].set_title( ("[%s] NWS %s\nCount of Distinct VTEC Phenomena/" "Significance - %i to %i") % (station, nt.sts[station]['name'], df['year'].min(), df['year'].max())) ax[0].grid() ax[0].set_ylabel("Count") ax[0].set_xlim(gdf.index.values.min() - 0.5, gdf.index.values.max() + 0.5) pos = {} i = 1 df.sort_values(['phenomena', 'significance'], inplace=True) for _, row in df.iterrows(): key = "%s.%s" % (row['phenomena'], row['significance']) if key not in pos: pos[key] = i i += 1 ax[1].text(row['year'], pos[key], key, ha='center', va='center', fontsize=10, bbox=dict(color='white')) ax[1].set_title("VTEC <Phenomena.Significance> Issued by Year") ax[1].set_ylim(0, i) ax[1].grid(True) ax[1].set_xlim(gdf.index.values.min() - 0.5, gdf.index.values.max() + 0.5) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] year = ctx["year"] varname = ctx["var"] table = "alldata_%s" % (station[:2], ) df = read_sql( """ WITH agg as ( SELECT sday, max(coalesce(narr_srad, 0)) from """ + table + """ where station = %s and year > 1978 GROUP by sday), obs as ( SELECT sday, day, narr_srad, merra_srad, hrrr_srad from """ + table + """ WHERE station = %s and year = %s) SELECT a.sday, a.max as max_narr, o.day, o.narr_srad, o.merra_srad, o.hrrr_srad from agg a LEFT JOIN obs o on (a.sday = o.sday) ORDER by a.sday ASC """, pgconn, params=(station, station, year), index_col="sday", ) if df.empty: raise NoDataFound("No Data Found.") df["max_narr_smooth"] = (df["max_narr"].rolling(window=7, min_periods=1, center=True).mean()) df["best"] = (df["narr_srad"].fillna(df["merra_srad"]).fillna( df["hrrr_srad"])) # hack for leap day here if df["best"].loc["0229"] is None: df = df.drop("0229") fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.6, 0.8]) ax.fill_between( range(len(df.index)), 0, df["max_narr_smooth"], color="tan", label="Max", ) if not np.isnan(df[varname].max()): ax.bar( range(len(df.index)), df[varname], fc="g", ec="g", label="%s" % (year, ), ) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(0, 366) lyear = datetime.date.today().year - 1 ax.set_title(("[%s] %s Daily Solar Radiation\n" "1979-%s NARR Climatology w/ %s ") % (station, ctx["_nt"].sts[station]["name"], lyear, year)) ax.legend() ax.grid(True) ax.set_ylabel("Shortwave Solar Radiation $MJ$ $d^{-1}$") # Do the x,y scatter plots for i, combo in enumerate([ ("narr_srad", "merra_srad"), ("narr_srad", "hrrr_srad"), ("hrrr_srad", "merra_srad"), ]): ax3 = plt.axes([0.78, 0.1 + (0.3 * i), 0.2, 0.2]) xmax = df[combo[0]].max() xlabel = combo[0].replace("_srad", "").upper() ylabel = combo[1].replace("_srad", "").upper() ymax = df[combo[1]].max() if np.isnan(xmax) or np.isnan(ymax): ax3.text( 0.5, 0.5, "%s or %s\nis missing" % (xlabel, ylabel), ha="center", va="center", ) ax3.get_xaxis().set_visible(False) ax3.get_yaxis().set_visible(False) continue c = df[[combo[0], combo[1]]].corr() ax3.text( 0.5, 1.01, "Pearson Corr: %.2f" % (c.iat[1, 0], ), fontsize=10, ha="center", transform=ax3.transAxes, ) ax3.scatter(df[combo[0]], df[combo[1]], edgecolor="None", facecolor="green") maxv = max([ax3.get_ylim()[1], ax3.get_xlim()[1]]) ax3.set_ylim(0, maxv) ax3.set_xlim(0, maxv) ax3.plot([0, maxv], [0, maxv], color="k") ax3.set_xlabel( r"%s $\mu$=%.1f" % (xlabel, df[combo[0]].mean()), labelpad=0, fontsize=12, ) ax3.set_ylabel(r"%s $\mu$=%.1f" % (ylabel, df[combo[1]].mean()), fontsize=12) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) df = get_data(ctx) cmap = cm.get_cmap(ctx["cmap"]) maxval = df["delta"].max() if maxval > 50: bins = np.arange(0, 101, 10) elif maxval > 25: bins = np.arange(0, 51, 5) else: bins = np.arange(0, 21, 2) bins[0] = 0.01 norm = mpcolors.BoundaryNorm(bins, cmap.N) (fig, ax) = plt.subplots(1, 1, figsize=(6.4, 6.4)) yearmax = df[["year", "delta"]].groupby("year").max() for year, df2 in df.groupby("year"): for _, row in df2.iterrows(): # NOTE: minus 3.5 to center the 7 day bar ax.bar( row["doy"] - 3.5, 1, bottom=year - 0.5, width=7, ec="None", fc=cmap(norm([row["delta"]]))[0], ) sts = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=int(df["doy"].min())) ets = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=int(df["doy"].max())) now = sts interval = datetime.timedelta(days=1) jdays = [] labels = [] while now < ets: if now.day in [1, 8, 15, 22]: fmt = "%-d\n%b" if now.day == 1 else "%-d" jdays.append(int(now.strftime("%j"))) labels.append(now.strftime(fmt)) now += interval ax.set_xticks(jdays) ax.set_xticklabels(labels) minyear = df["year"].min() maxyear = df["year"].max() ax.set_yticks(range(minyear, maxyear + 1)) ylabels = [] for yr in range(minyear, maxyear + 1): if yr % 5 == 0: ylabels.append("%s %.0f" % (yr, yearmax.at[yr, "delta"])) else: ylabels.append("%.0f" % (yearmax.at[yr, "delta"], )) ax.set_yticklabels(ylabels, fontsize=10) ax.set_ylim(minyear - 0.5, maxyear + 0.5) ax.set_xlim(min(jdays), max(jdays)) ax.grid(linestyle="-", linewidth="0.5", color="#EEEEEE", alpha=0.7) ax.set_title(("USDA NASS Weekly %s %s Progress\n" "%s %% %s over weekly periods\n" "yearly max labelled on left hand side") % ( ctx["unit_desc"], PDICT2.get(ctx["commodity_desc"]), state_names[ctx["state"]], PDICT.get(ctx["unit_desc"]), )) ax.set_position([0.13, 0.1, 0.71, 0.78]) cax = plt.axes([0.86, 0.12, 0.03, 0.75], frameon=False, yticks=[], xticks=[]) cb = ColorbarBase(cax, norm=norm, cmap=cmap) cb.set_label("% Acres") return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] year = ctx["year"] gdd1 = ctx["gdd1"] gdd2 = ctx["gdd2"] table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) ccursor.execute( """ SELECT day, gddxx(%s, %s, high, low) as gdd from """ + table + """ WHERE year = %s and station = %s ORDER by day ASC """, (ctx["gddbase"], ctx["gddceil"], year, station), ) days = [] gdds = [] for row in ccursor: gdds.append(float(row["gdd"])) days.append(row["day"]) yticks = [] yticklabels = [] jan1 = datetime.datetime(year, 1, 1) for i in range(110, 330): ts = jan1 + datetime.timedelta(days=i) if ts.day == 1 or ts.day % 12 == 1: yticks.append(i) yticklabels.append(ts.strftime("%-d %b")) gdds = np.array(gdds) sts = datetime.datetime(year, 4, 1) ets = datetime.datetime(year, 6, 10) now = sts sz = len(gdds) days2 = [] starts = [] heights = [] success = [] rows = [] while now < ets: idx = int(now.strftime("%j")) - 1 running = 0 while idx < sz and running < gdd1: running += gdds[idx] idx += 1 idx0 = idx while idx < sz and running < gdd2: running += gdds[idx] idx += 1 success.append(running >= gdd2) idx1 = idx days2.append(now) starts.append(idx0) heights.append(idx1 - idx0) rows.append( dict( plant_date=now, start_doy=idx0, end_doy=idx1, success=success[-1], )) now += datetime.timedelta(days=1) if True not in success: raise NoDataFound("No data, pick lower GDD values") df = pd.DataFrame(rows) heights = np.array(heights) success = np.array(success) starts = np.array(starts) cmap = get_cmap(ctx["cmap"]) bmin = min(heights[success]) - 1 bmax = max(heights[success]) + 1 bins = np.arange(bmin, bmax + 1.1) norm = mpcolors.BoundaryNorm(bins, cmap.N) ax = plt.axes([0.125, 0.125, 0.75, 0.75]) bars = ax.bar(days2, heights, bottom=starts, fc="#EEEEEE") for i, mybar in enumerate(bars): if success[i]: mybar.set_facecolor(cmap(norm([heights[i]])[0])) ax.grid(True) ax.set_yticks(yticks) ax.set_yticklabels(yticklabels) ax.set_ylim(min(starts) - 7, max(starts + heights) + 7) ax.xaxis.set_major_formatter(mdates.DateFormatter("%-d\n%b")) ax.set_xlabel("Planting Date") ax.set_title(("%s [%s] %s GDD [base=%s,ceil=%s]\n" "Period between GDD %s and %s, gray bars incomplete") % ( nt.sts[station]["name"], station, year, ctx["gddbase"], ctx["gddceil"], gdd1, gdd2, )) ax2 = plt.axes([0.92, 0.1, 0.07, 0.8], frameon=False, yticks=[], xticks=[]) ax2.set_xlabel("Days") for i, mybin in enumerate(bins): ax2.text(0.52, i, "%g" % (mybin, ), ha="left", va="center", color="k") # txt.set_path_effects([PathEffects.withStroke(linewidth=2, # foreground="k")]) ax2.barh( np.arange(len(bins[:-1])), [0.5] * len(bins[:-1]), height=1, color=cmap(norm(bins[:-1])), ec="None", ) ax2.set_xlim(0, 1) return plt.gcf(), df