def calendar_plot(sts, ets, data, **kwargs): """Create a plot that looks like a calendar Args: sts (datetime.date): start date of this plot ets (datetime.date): end date of this plot (inclusive) data (dict[dict]): dictionary with keys of dates and dicts for `val` value and optionally `color` for color kwargs (dict): heatmap (bool): background color for cells based on `val`, False cmap (str): color map to use for norm """ bounds = _compute_bounds(sts, ets) # Compute the number of month calendars we need. # We want 'square' boxes for each month's calendar, 4x3 fig = plt.figure(figsize=(10.24, 7.68)) if 'fontsize' not in kwargs: kwargs['fontsize'] = 12 if len(bounds) < 3: kwargs['fontsize'] = 18 elif len(bounds) < 5: kwargs['fontsize'] = 16 elif len(bounds) < 10: kwargs['fontsize'] = 14 if kwargs.get('heatmap', False): kwargs['cmap'] = plt.get_cmap(kwargs.get('cmap', 'viridis')) maxval = -1000 for key in data: if data[key]['val'] > maxval: maxval = data[key]['val'] # Need at least 3 slots maxval = 5 if maxval < 5 else maxval kwargs['norm'] = mpcolors.BoundaryNorm(np.arange(0, maxval), kwargs['cmap'].N) for month in bounds: ax = fig.add_axes(bounds[month]) _do_month(month, ax, data, sts, ets, kwargs) iemlogo(fig) title = kwargs.get('title') if title is not None: fitbox(fig, title, 0.1, 0.99, 0.95, 0.99) subtitle = kwargs.get('subtitle') if subtitle is not None: fitbox(fig, subtitle, 0.1, 0.99, 0.925, 0.945) return fig
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] lagmonths = ctx["lag"] months = ctx["months"] month = ctx["month"] highyears = [int(x) for x in ctx["year"].split(",")] h = ctx["h"] wantmonth = month + lagmonths yearoffset = 0 if month + lagmonths < 1: wantmonth = 12 - (month + lagmonths) yearoffset = 1 wanted = [] deltas = [] for m in range(month, month + months): if m < 13: wanted.append(m) deltas.append(0) else: wanted.append(m - 12) deltas.append(-1) table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) elnino = {} ccursor.execute("""SELECT monthdate, soi_3m, anom_34 from elnino""") for row in ccursor: if row[0].month != wantmonth: continue elnino[row[0].year + yearoffset] = dict(soi_3m=row[1], anom_34=row[2]) ccursor.execute( "SELECT year, month, sum(precip), avg((high+low)/2.) " f"from {table} where station = %s GROUP by year, month", (station, ), ) if ccursor.rowcount == 0: raise NoDataFound("No Data Found.") yearly = {} for row in ccursor: (_year, _month, _precip, _temp) = row if _month not in wanted: continue effectiveyear = _year + deltas[wanted.index(_month)] nino = elnino.get(effectiveyear, {}).get("soi_3m", None) if nino is None: continue data = yearly.setdefault(effectiveyear, dict(precip=0, temp=[], nino=nino)) data["precip"] += _precip data["temp"].append(float(_temp)) fig = plt.figure(figsize=(10, 6)) ax = plt.axes([0.1, 0.12, 0.5, 0.75]) msg = ("[%s] %s\n%s\n%s SOI (3 month average)") % ( station, nt.sts[station]["name"], title(wanted), datetime.date(2000, wantmonth, 1).strftime("%B"), ) ax.set_title(msg) cmap = get_cmap(ctx["cmap"]) zdata = np.arange(-2.0, 2.1, 0.5) norm = mpcolors.BoundaryNorm(zdata, cmap.N) rows = [] xs = [] ys = [] for year in yearly: x = yearly[year]["precip"] y = np.average(yearly[year]["temp"]) xs.append(x) ys.append(y) val = yearly[year]["nino"] c = cmap(norm([val])[0]) if h == "hide" and val > -0.5 and val < 0.5: ax.scatter( x, y, facecolor="#EEEEEE", edgecolor="#EEEEEE", s=30, zorder=2, marker="s", ) else: ax.scatter(x, y, facecolor=c, edgecolor="k", s=60, zorder=3, marker="o") if year in highyears: ax.text(x, y + 0.2, "%s" % (year, ), ha="center", va="bottom", zorder=5) rows.append(dict(year=year, precip=x, tmpf=y, soi3m=val)) ax.axhline(np.average(ys), lw=2, color="k", linestyle="-.", zorder=2) ax.axvline(np.average(xs), lw=2, color="k", linestyle="-.", zorder=2) sm = plt.cm.ScalarMappable(norm, cmap) sm.set_array(zdata) cb = plt.colorbar(sm, extend="both") cb.set_label("<-- El Nino :: SOI :: La Nina -->") ax.grid(True) ax.set_xlim(left=-0.01) ax.set_xlabel("Total Precipitation [inch], Avg: %.2f" % (np.average(xs), )) ax.set_ylabel((r"Average Temperature $^\circ$F, " "Avg: %.1f") % (np.average(ys), )) df = pd.DataFrame(rows) ax2 = plt.axes([0.67, 0.6, 0.28, 0.35]) ax2.scatter(df["soi3m"].values, df["tmpf"].values) ax2.set_xlabel("<-- El Nino :: SOI :: La Nina -->") ax2.set_ylabel(r"Avg Temp $^\circ$F") slp, intercept, r_value, _, _ = stats.linregress(df["soi3m"].values, df["tmpf"].values) y1 = -2.0 * slp + intercept y2 = 2.0 * slp + intercept ax2.plot([-2, 2], [y1, y2]) ax2.text( 0.97, 0.9, "R$^2$=%.2f" % (r_value**2, ), ha="right", transform=ax2.transAxes, bbox=dict(color="white"), ) ax2.grid(True) ax3 = plt.axes([0.67, 0.1, 0.28, 0.35]) ax3.scatter(df["soi3m"].values, df["precip"].values) ax3.set_xlabel("<-- El Nino :: SOI :: La Nina -->") ax3.set_ylabel("Total Precip [inch]") slp, intercept, r_value, _, _ = stats.linregress(df["soi3m"].values, df["precip"].values) y1 = -2.0 * slp + intercept y2 = 2.0 * slp + intercept ax3.plot([-2, 2], [y1, y2]) ax3.text( 0.97, 0.9, "R$^2$=%.2f" % (r_value**2, ), ha="right", transform=ax3.transAxes, bbox=dict(color="white"), ) ax3.grid(True) return fig, df
def do(valid, yawsource): """ Generate plot for a given timestamp """ if yawsource not in ["yaw", "yaw2", "yaw3"]: return yawdict = {"yaw": "Orginal", "yaw2": "daryl corrected", "yaw3": "daryl v2"} pgconn = get_dbconn("mec") cursor = pgconn.cursor() cursor.execute( """select turbineid, power, ST_x(geom), ST_y(geom), """ + yawsource + """, windspeed, pitch from sampled_data s JOIN turbines t on (t.id = s.turbineid) WHERE valid = %s and power is not null and """ + yawsource + """ is not null and windspeed is not null and pitch is not null""", (valid, ), ) lons = [] lats = [] vals = [] u = [] v = [] ws = [] yaw = [] pitch = [] for row in cursor: lons.append(row[2]) lats.append(row[3]) vals.append(row[1]) ws.append(row[5]) yaw.append(row[4]) a, b = uv(speed(row[5], "MPS"), direction(row[4], "deg")) u.append(a.value("MPS")) v.append(b.value("MPS")) pitch.append(row[6]) pitch = np.array(pitch) vals = np.array(vals) avgv = np.average(vals) # vals2 = vals - avgv fig = plt.figure(figsize=(12.8, 7.2)) ax = fig.add_axes([0.14, 0.1, 0.52, 0.8]) cmap = get_cmap("jet") cmap.set_under("tan") cmap.set_over("black") clevs = np.arange(0, 1651, 150) norm = mpcolors.BoundaryNorm(clevs, cmap.N) ax.quiver(lons, lats, u, v, zorder=1) ax.scatter( lons, lats, c=vals, norm=norm, edgecolor="none", cmap=cmap, s=100, zorder=2, ) ax.get_yaxis().get_major_formatter().set_useOffset(False) ax.get_xaxis().get_major_formatter().set_useOffset(False) ax.set_title(("Farm Turbine Power [kW] (1min sampled dataset)\n" "Valid: %s, yaw source: %s") % ( valid.strftime("%d %b %Y %I:%M %p"), yawdict.get(yawsource, yawsource), )) make_colorbar(clevs, norm, cmap) ax.text( 0.05, 0.05, "Turbine Power: $\mu$= %.1f $\sigma$= %.1f kW" % (avgv, np.std(vals)), transform=ax.transAxes, ) ax.text( 0.05, 0.01, "Wind $\mu$= %.1f $\sigma$= %.1f $ms^{-1}$" % (np.average(ws), np.std(ws)), transform=ax.transAxes, ) ax.set_xlabel("Longitude $^\circ$E") ax.set_ylabel("Latitude $^\circ$N") ax.set_xlim(-94.832, -94.673) ax.set_ylim(42.545, 42.671) ax.get_xaxis().set_ticks([]) ax.get_yaxis().set_ticks([]) # Next plot ax2 = fig.add_axes([0.7, 0.80, 0.28, 0.18]) ax2.scatter(ws, vals, edgecolor="k", c="k") ax2.text( 0.5, -0.25, "Wind Speed $ms^{-1}$", transform=ax2.transAxes, ha="center", ) ax2.set_xlim(0, 20) # ax2.set_ylabel("Power kW") ax2.grid(True) # Next plot ax3 = fig.add_axes([0.7, 0.57, 0.28, 0.18], sharey=ax2) ax3.scatter(yaw, vals, edgecolor="k", c="k") ax3.text(0.5, -0.25, "Yaw", transform=ax3.transAxes, ha="center") # ax3.set_ylabel("Power kW") ax3.set_xlim(0, 360) ax3.set_xticks(np.arange(0, 361, 45)) ax3.set_xticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"]) ax3.grid(True) # Next plot ax4 = fig.add_axes([0.7, 0.32, 0.28, 0.18], sharey=ax2) ax4.scatter(pitch, vals, edgecolor="k", c="k") ax4.text(0.5, -0.25, "Pitch $^\circ$", transform=ax4.transAxes, ha="center") ax4.set_ylim(-10, 1600) ax4.grid(True) # Next plot ax5 = fig.add_axes([0.7, 0.07, 0.28, 0.18], sharex=ax4) ax5.scatter(pitch, ws, edgecolor="k", c="k") ax5.text(0.5, -0.25, "Pitch $^\circ$", transform=ax5.transAxes, ha="center") ax5.grid(True) ax5.set_ylim(bottom=-10) # maxpitch = max(np.where(pitch > 20, 0, pitch)) # ax5.set_xlim(np.ma.minimum(pitch)-0.5, maxpitch+0.5) ax5.set_xlim(-3, 20.1) ax5.set_ylim(0, 20) ax5.text( -0.1, 0.5, "Wind Speed $ms^{-1}$", transform=ax5.transAxes, ha="center", va="center", rotation=90, )
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) typ = ctx['typ'] sort = ctx['sort'] date = ctx['date'] pgconn = get_dbconn('postgis') sts = utc(date.year, date.month, date.day) ets = sts + datetime.timedelta(hours=24) opts = { 'W': { 'fnadd': '-wfo', 'sortby': 'wfo ASC, phenomena ASC, eventid ASC' }, 'S': { 'fnadd': '', 'sortby': 'size DESC' }, 'T': { 'fnadd': '-time', 'sortby': 'issue ASC' } } phenoms = {'W': ['TO', 'SV'], 'F': ['FF'], 'M': ["MA"]} # Defaults thumbpx = 100 cols = 10 mybuffer = 10000 header = 35 # Find largest polygon either in height or width gdf = read_postgis(""" SELECT wfo, phenomena, eventid, issue, ST_area2d(ST_transform(geom,2163)) as size, (ST_xmax(ST_transform(geom,2163)) + ST_xmin(ST_transform(geom,2163))) /2.0 as xc, (ST_ymax(ST_transform(geom,2163)) + ST_ymin(ST_transform(geom,2163))) /2.0 as yc, ST_transform(geom, 2163) as utmgeom, (ST_xmax(ST_transform(geom,2163)) - ST_xmin(ST_transform(geom,2163))) as width, (ST_ymax(ST_transform(geom,2163)) - ST_ymin(ST_transform(geom,2163))) as height from sbw_""" + str(sts.year) + """ WHERE status = 'NEW' and issue >= %s and issue < %s and phenomena IN %s and eventid is not null ORDER by """ + opts[sort]['sortby'] + """ """, pgconn, params=(sts, ets, tuple(phenoms[typ])), geom_col='utmgeom', index_col=None) # For size reduction work df = read_sql(""" SELECT w.wfo, phenomena, eventid, sum(ST_area2d(ST_transform(u.geom,2163))) as county_size from warnings_""" + str(sts.year) + """ w JOIN ugcs u on (u.gid = w.gid) WHERE issue >= %s and issue < %s and significance = 'W' and phenomena IN %s GROUP by w.wfo, phenomena, eventid """, pgconn, params=(sts, ets, tuple(phenoms[typ])), index_col=['wfo', 'phenomena', 'eventid']) # Join the columns gdf = gdf.merge(df, on=['wfo', 'phenomena', 'eventid']) gdf['ratio'] = (1. - (gdf['size'] / gdf['county_size'])) * 100. # Make mosaic image events = len(df.index) rows = int(events / cols) + 1 if events % cols == 0: rows -= 1 if rows == 0: rows = 1 ypixels = (rows * thumbpx) + header fig = plt.figure(figsize=(thumbpx * cols / 100., ypixels / 100.)) faux = plt.axes([0, 0, 1, 1], facecolor='black') imagemap = StringIO() utcnow = utc() imagemap.write("<!-- %s %s -->\n" % (utcnow.strftime("%Y-%m-%d %H:%M:%S"), sort)) imagemap.write("<map name='mymap'>\n") # Write metadata to image mydir = os.sep.join( [os.path.dirname(os.path.abspath(__file__)), "../../../images"]) logo = mpimage.imread("%s/logo_reallysmall.png" % (mydir, )) y0 = y0 = fig.get_figheight() * 100.0 - logo.shape[0] - 5 fig.figimage(logo, 5, y0, zorder=3) i = 0 # amount of NDC y space we have for axes plotting ytop = 1 - header / float((rows * 100) + header) dy = ytop / float(rows) ybottom = ytop # Sumarize totals y = ytop dy2 = (1. - ytop) / 2. for phenomena, df2 in gdf.groupby('phenomena'): car = (1. - df2['size'].sum() / df2['county_size'].sum()) * 100. fitbox(fig, ("%i %s.W: Avg size %5.0f km^2 CAR: %.0f%%") % (len(df2.index), phenomena, df2['size'].mean() / 1e6, car), 0.8, 0.99, y, y + dy2, color=COLORS[phenomena]) y += dy2 fitbox(fig, "NWS %s Storm Based Warnings issued %s UTC" % ( " + ".join([VTEC_PHENOMENA[p] for p in phenoms[typ]]), sts.strftime("%d %b %Y"), ), 0.05, 0.79, ytop + dy2, 0.999, color='white') fitbox(fig, "Generated: %s UTC, IEM Autplot #203" % (utcnow.strftime("%d %b %Y %H:%M:%S"), ), 0.05, 0.79, ytop, 0.999 - dy2, color='white') # We want to reserve 14pts at the bottom and buffer the plot by 10km # so we compute this in the y direction, since it limits us max_dimension = max([gdf['width'].max(), gdf['height'].max()]) yspacing = max_dimension / 2. + mybuffer xspacing = yspacing * 1.08 # approx for _, row in gdf.iterrows(): # - Map each polygon x0 = float(row['xc']) - xspacing x1 = float(row['xc']) + xspacing y0 = float(row['yc']) - yspacing - (yspacing * 0.14) y1 = float(row['yc']) + yspacing - (yspacing * 0.14) col = i % 10 if col == 0: ybottom -= dy ax = plt.axes([col * 0.1, ybottom, 0.1, dy], facecolor='black', xticks=[], yticks=[], aspect='auto') for x in ax.spines: ax.spines[x].set_visible(False) ax.set_xlim(x0, x1) ax.set_ylim(y0, y1) for poly in row['utmgeom']: xs, ys = poly.exterior.xy color = COLORS[row['phenomena']] ax.plot(xs, ys, color=color, lw=2) car = "NA" carColor = 'white' if not pd.isnull(row['ratio']): carf = row['ratio'] car = "%.0f" % (carf, ) if carf > 75: carColor = 'green' if carf < 25: carColor = 'red' # Draw Text! issue = row['issue'] s = "%s.%s.%s.%s" % (row['wfo'], row['phenomena'], row['eventid'], issue.strftime("%H%M")) # (w, h) = font10.getsize(s) # print s, h ax.text(0, 0, s, transform=ax.transAxes, color='white', va='bottom', fontsize=7) s = "%.0f sq km %s%%" % (row['size'] / 1000000.0, car) ax.text(0, 0.1, s, transform=ax.transAxes, color=carColor, va='bottom', fontsize=7) # Image map url = ("/vtec/#%s-O-NEW-K%s-%s-%s-%04i") % ( sts.year, row['wfo'], row['phenomena'], 'W', row['eventid']) altxt = "Click for text/image" pos = ax.get_position() mx0 = pos.x0 * 1000. my = (1. - pos.y1) * ypixels imagemap.write( ("<area href=\"%s\" alt=\"%s\" title=\"%s\" " "shape=\"rect\" coords=\"%.0f,%.0f,%.0f,%.0f\">\n") % (url, altxt, altxt, mx0, my, mx0 + thumbpx, my + thumbpx)) i += 1 faux = plt.axes([0, 0, 1, 1], facecolor='None', zorder=100) for i in range(1, rows): faux.axhline(i * dy, lw=1., color='blue') imagemap.write("</map>") imagemap.seek(0) if gdf.empty: fitbox(fig, "No warnings Found!", 0.2, 0.8, 0.2, 0.5, color='white') df = gdf.drop('utmgeom', axis=1) return fig, df, imagemap.read()
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] varname = ctx['var'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] elif month == 'octmar': months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] (agg, dbvar) = varname.split("_") sorder = 'DESC' if agg == 'max' else 'ASC' df = read_sql("""WITH data as ( SELECT valid at time zone %s as v, p01i from alldata WHERE station = %s and extract(month from valid at time zone %s) in %s) SELECT v as valid, p01i from data ORDER by """ + dbvar + """ """ + sorder + """ NULLS LAST LIMIT 100 """, pgconn, params=(nt.sts[station]['tzname'], station, nt.sts[station]['tzname'], tuple(months)), index_col=None) if df.empty: raise ValueError('Error, no results returned!') ylabels = [] fmt = '%.2f' if varname in [ 'max_p01i', ] else '%.0f' hours = [] y = [] lastval = -99 ranks = [] currentrank = 0 rows2keep = [] for idx, row in df.iterrows(): key = row['valid'].strftime("%Y%m%d%H") if key in hours: continue rows2keep.append(idx) hours.append(key) y.append(row[dbvar]) lbl = fmt % (row[dbvar], ) lbl += " -- %s" % (row['valid'].strftime("%b %d, %Y %-I:%M %p"), ) ylabels.append(lbl) if row[dbvar] != lastval: currentrank += 1 ranks.append(currentrank) lastval = row[dbvar] if len(ylabels) == 10: break fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.5, 0.8]) ax.barh(range(10, 0, -1), y, ec='green', fc='green', height=0.8, align='center') ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, 11)) ax.set_yticks(range(1, 11)) ax.set_yticklabels(["#%s" % (x, ) for x in ranks][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel(("Precipitation [inch]" if varname in ['max_p01i'] else r"Temperature $^\circ$F")) ax.set_title( ("%s [%s] Top 10 Events\n" "%s (%s) " "(%s-%s)") % (nt.sts[station]['name'], station, METRICS[varname], MDICT[month], nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=12) fig.text(0.98, 0.03, "Timezone: %s" % (nt.sts[station]['tzname'], ), ha='right') return fig, df.loc[rows2keep]
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] highlightyear = ctx["year"] sdate = datetime.date(ctx["syear"], 1, 1) edate = datetime.date(ctx["eyear"] + 1, 1, 1) ytd = ctx["ytd"] varname = ctx["var"] inc = ctx["inc"] doylimiter = get_doylimit(ytd, varname) tmpflimit = "and tmpf >= 50" if varname != "windchill" else "and tmpf < 50" if varname not in ["windchill", "heatindex"]: tmpflimit = "" df = read_sql( "SELECT to_char(valid, 'YYYYmmddHH24') as d, avg(tmpf)::int as tmpf, " "avg(dwpf)::int as dwpf, avg(coalesce(sknt, 0)) as sknt " f"from alldata WHERE station = %s {tmpflimit} " "and dwpf <= tmpf and valid > %s and valid < %s and report_type = 2 " f"{doylimiter} GROUP by d", pgconn, params=(station, sdate, edate), index_col=None, ) if df.empty: raise NoDataFound("No Data Found.") df["year"] = df["d"].apply(lambda x: int(x[:4])) df2 = df title2 = VDICT[varname] compop = np.greater_equal inctitle = "" if varname == "heatindex": df["heatindex"] = (heat_index( df["tmpf"].values * units("degF"), relative_humidity_from_dewpoint( df["tmpf"].values * units("degF"), df["dwpf"].values * units("degF"), ), ).to(units("degF")).m) inctitle = " [All Obs Included]" if inc == "no": df2 = df[df["heatindex"] > df["tmpf"]] inctitle = " [Only Additive]" else: df2 = df maxval = int(df2["heatindex"].max() + 1) LEVELS[varname] = np.arange(80, maxval) elif varname == "windchill": compop = np.less_equal df["year"] = df["d"].apply(lambda x: (int(x[:4]) - 1) if int(x[4:6]) < 7 else int(x[:4])) df["windchill"] = (windchill( df["tmpf"].values * units("degF"), df["sknt"].values * units("knot"), ).to(units("degF")).m) inctitle = " [All Obs Included]" if inc == "no": df2 = df[df["windchill"] < df["tmpf"]] inctitle = " [Only Additive]" else: df2 = df minval = int(df2["windchill"].min() - 1) LEVELS[varname] = np.arange(minval, minval + 51) else: maxval = int(df2[varname].max() + 1) LEVELS[varname] = np.arange(maxval - 31, maxval) bs = ctx["_nt"].sts[station]["archive_begin"] if bs is None: raise NoDataFound("Unknown station metadata.") minyear = df["year"].min() maxyear = df["year"].max() years = float((maxyear - minyear) + 1) x = [] y = [] y2 = [] fig = plt.figure(figsize=(9, 6)) ax = fig.add_axes([0.1, 0.1, 0.6, 0.8]) yloc = 1.0 xloc = 1.13 yrlabel = ("%s" % (highlightyear, ) if varname != "windchill" else "%s-%s" % (highlightyear, highlightyear + 1)) ax.text(xloc + 0.08, yloc + 0.04, "Avg:", transform=ax.transAxes, color="b") ax.text(xloc + 0.21, yloc + 0.04, yrlabel, transform=ax.transAxes, color="r") df3 = df2[df2["year"] == highlightyear] for level in LEVELS[varname]: x.append(level) y.append(len(df2[compop(df2[varname], level)]) / years) y2.append(len(df3[compop(df3[varname], level)])) if level % 2 == 0: ax.text(xloc, yloc, "%s" % (level, ), transform=ax.transAxes) ax.text( xloc + 0.08, yloc, "%.1f" % (y[-1], ), transform=ax.transAxes, color="b", ) ax.text( xloc + 0.21, yloc, "%.0f" % (y2[-1], ), transform=ax.transAxes, color="r", ) yloc -= 0.04 ax.text(xloc, yloc, "n=%s" % (len(df2.index), ), transform=ax.transAxes) for x0, y0, y02 in zip(x, y, y2): ax.plot([x0, x0], [y0, y02], color="k") rdf = pd.DataFrame({"level": x, "avg": y, "d%s" % (highlightyear, ): y2}) x = np.array(x, dtype=np.float64) ax.scatter(x, y, color="b", label="Avg") ax.scatter(x, y2, color="r", label=yrlabel) ax.grid(True) ymax = int(max([max(y), max(y2)])) ax.set_xlim(x[0] - 0.5, x[-1] + 0.5) dy = 24 * (int(ymax / 240) + 1) ax.set_yticks(range(0, ymax, dy)) ax.set_ylim(-0.5, ymax + 5) ax2 = ax.twinx() ax2.set_ylim(-0.5, ymax + 5) ax2.set_yticks(range(0, ymax, dy)) ax2.set_yticklabels(["%.0f" % (s, ) for s in np.arange(0, ymax, dy) / 24]) ax2.set_ylabel("Expressed in 24 Hour Days") ax.set_ylabel("Hours Per Year") ax.set_xlabel(r"%s $^\circ$F" % (VDICT[varname], )) title = "till %s" % (datetime.date.today().strftime("%-d %b"), ) title = "Entire Year" if ytd == "no" else title ax.set_title(("[%s] %s %s-%s\n" "%s Histogram (%s)%s") % ( station, ctx["_nt"].sts[station]["name"], minyear, maxyear, title2, title, inctitle, )) ax.legend(loc="best", scatterpoints=1) return fig, rdf
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] year = ctx['year'] varname = ctx['var'] table = "alldata_%s" % (station[:2], ) df = read_sql(""" WITH agg as ( SELECT sday, max(coalesce(narr_srad, 0)) from """ + table + """ where station = %s and year > 1978 GROUP by sday), obs as ( SELECT sday, day, narr_srad, merra_srad, hrrr_srad from """ + table + """ WHERE station = %s and year = %s) SELECT a.sday, a.max as max_narr, o.day, o.narr_srad, o.merra_srad, o.hrrr_srad from agg a LEFT JOIN obs o on (a.sday = o.sday) ORDER by a.sday ASC """, pgconn, params=(station, station, year), index_col='sday') if df.empty: raise NoDataFound("No Data Found.") df['max_narr_smooth'] = df['max_narr'].rolling(window=7, min_periods=1, center=True).mean() df['best'] = df['narr_srad'].fillna(df['merra_srad']).fillna( df['hrrr_srad']) # hack for leap day here if df['best'].loc['0229'] is None: df = df.drop('0229') fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.6, 0.8]) ax.fill_between(range(len(df.index)), 0, df['max_narr_smooth'], color='tan', label="Max") if not np.isnan(df[varname].max()): ax.bar(range(len(df.index)), df[varname], fc='g', ec='g', label="%s" % (year, )) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(0, 366) lyear = datetime.date.today().year - 1 ax.set_title(("[%s] %s Daily Solar Radiation\n" "1979-%s NARR Climatology w/ %s ") % (station, ctx['_nt'].sts[station]['name'], lyear, year)) ax.legend() ax.grid(True) ax.set_ylabel("Shortwave Solar Radiation $MJ$ $d^{-1}$") # Do the x,y scatter plots for i, combo in enumerate([('narr_srad', 'merra_srad'), ('narr_srad', 'hrrr_srad'), ('hrrr_srad', 'merra_srad')]): ax3 = plt.axes([0.78, 0.1 + (0.3 * i), 0.2, 0.2]) xmax = df[combo[0]].max() xlabel = combo[0].replace("_srad", "").upper() ylabel = combo[1].replace("_srad", "").upper() ymax = df[combo[1]].max() if np.isnan(xmax) or np.isnan(ymax): ax3.text(0.5, 0.5, "%s or %s\nis missing" % (xlabel, ylabel), ha='center', va='center') ax3.get_xaxis().set_visible(False) ax3.get_yaxis().set_visible(False) continue c = df[[combo[0], combo[1]]].corr() ax3.text(0.5, 1.01, "Pearson Corr: %.2f" % (c.iat[1, 0], ), fontsize=10, ha='center', transform=ax3.transAxes) ax3.scatter(df[combo[0]], df[combo[1]], edgecolor='None', facecolor='green') maxv = max([ax3.get_ylim()[1], ax3.get_xlim()[1]]) ax3.set_ylim(0, maxv) ax3.set_xlim(0, maxv) ax3.plot([0, maxv], [0, maxv], color='k') ax3.set_xlabel(r"%s $\mu$=%.1f" % (xlabel, df[combo[0]].mean()), labelpad=0, fontsize=12) ax3.set_ylabel(r"%s $\mu$=%.1f" % (ylabel, df[combo[1]].mean()), fontsize=12) return fig, df
def plot_sky(days, vsby, data, station, ctx, sts): """Sky plot variant.""" fig = plt.figure(figsize=(8, 6)) # vsby plot ax = plt.axes([0.1, 0.08, 0.8, 0.03]) ax.set_xticks(np.arange(0, days * 24 + 1, 24)) ax.set_xticklabels(np.arange(1, days + 1)) ax.set_yticks([]) cmap = cm.get_cmap("gray") cmap.set_bad("white") res = ax.imshow( vsby, aspect="auto", extent=[0, days * 24, 0, 1], vmin=0, cmap=cmap, vmax=10, ) cax = plt.axes([0.915, 0.08, 0.035, 0.2]) fig.colorbar(res, cax=cax) fig.text(0.02, 0.09, "Visibility\n[miles]", va="center") # clouds ax = plt.axes([0.1, 0.16, 0.8, 0.7]) ax.set_facecolor("skyblue") ax.set_xticks(np.arange(0, days * 24 + 1, 24)) ax.set_xticklabels(np.arange(1, days + 1)) fig.text( 0.5, 0.935, ("[%s] %s %s Clouds & Visibility\nbased on ASOS METAR Cloud Amount " "/Level and Visibility Reports") % (station, ctx["_nt"].sts[station]["name"], sts.strftime("%b %Y")), ha="center", fontsize=14, ) cmap = cm.get_cmap("gray_r") cmap.set_bad("white") cmap.set_under("skyblue") ax.imshow( np.flipud(data), aspect="auto", extent=[0, days * 24, 0, 250], cmap=cmap, vmin=1, ) ax.set_yticks(range(0, 260, 50)) ax.set_yticklabels(range(0, 25, 5)) ax.set_ylabel("Cloud Levels [1000s feet]") fig.text(0.45, 0.02, "Day of %s (UTC Timezone)" % (sts.strftime("%b %Y"), )) r1 = Rectangle((0, 0), 1, 1, fc="skyblue") r2 = Rectangle((0, 0), 1, 1, fc="white") r3 = Rectangle((0, 0), 1, 1, fc="k") r4 = Rectangle((0, 0), 1, 1, fc="#EEEEEE") ax.grid(True) ax.legend( [r1, r4, r2, r3], ["Clear", "Some", "Unknown", "Obscured by Overcast"], loc="lower center", fontsize=14, bbox_to_anchor=(0.5, 0.99), fancybox=True, shadow=True, ncol=4, ) return fig
def windrose(station, database='asos', months=np.arange(1, 13), hours=np.arange(0, 24), sts=datetime.datetime(1970, 1, 1), ets=datetime.datetime(2050, 1, 1), units="mph", nsector=36, justdata=False, rmax=None, cursor=None, sname=None, sknt=None, drct=None, valid=None, level=None, bins=[], **kwargs): """Utility function that generates a windrose plot Args: station (str): station identifier to search database for database (str,optional): database name to look for data within months (list,optional): optional list of months to limit plot to hours (list,optional): optional list of hours to limit plot to sts (datetime,optional): start datetime ets (datetime,optional): end datetime units (str,optional): units to plot values as nsector (int,optional): number of bins to devide the windrose into justdata (boolean,optional): if True, write out the data only cursor (psycopg2.cursor,optional): provide a database cursor to run the query against. sname (str,optional): The name of this station, if not specified it will default to the ((`station`)) identifier sknt (list,optional): A list of wind speeds in knots already generated drct (list,optional): A list of wind directions (deg N) already generated valid (list,optional): A list of valid datetimes (with tzinfo set) level (int,optional): In case of RAOB, which level interests us (hPa) bins (list,optional): bins to use for the wind speed Returns: matplotlib.Figure instance or textdata """ monthinfo = _get_timeinfo(months, 'month', 12) hourinfo = _get_timeinfo(hours, 'hour', 24) if sknt is None or drct is None: df = _get_data(station, cursor, database, sts, ets, monthinfo, hourinfo, level) else: df = pd.DataFrame({'sknt': sknt, 'drct': drct, 'valid': valid}) # Convert wind speed into the units we want here if df['sknt'].max() > 0: df['speed'] = speed(df['sknt'].values, 'KT').value(units.upper()) if justdata: return _make_textresult(station, df, units, nsector, sname, monthinfo, hourinfo, level, bins) if len(df.index) < 5 or not df['sknt'].max() > 0: fig = plt.figure(figsize=(6, 7), dpi=80, facecolor='w', edgecolor='w') fig.text(0.17, 0.89, 'Not enough data available to generate plot') return fig return _make_plot(station, df, units, nsector, rmax, hours, months, sname, level, bins, **kwargs)
def _make_plot(station, df, units, nsector, rmax, hours, months, sname, level, bins, **kwargs): """Generate a matplotlib windrose plot Args: station (str): station identifier df (pd.DataFrame): observations drct (list): list of wind directions units (str): units of wind speed nsector (int): number of bins to use for windrose rmax (float): radius of the plot hours (list): hour limit for plot month (list): month limit for plot sname (str): station name level (int): RAOB level in hPa of interest bins (list): values for binning the wind speeds Returns: matplotlib.Figure """ # Generate figure fig = plt.figure(figsize=(8, 8), dpi=100, facecolor='w', edgecolor='w') rect = [0.12, 0.12, 0.76, 0.76] ax = WindroseAxes(fig, rect, facecolor='w', rmax=rmax) fig.add_axes(ax) wu = WINDUNITS[units] if level is None else RAOB_WINDUNITS[units] if bins: wu['bins'] = bins wu['binlbl'] = [] for i, mybin in enumerate(bins[1:-1]): wu['binlbl'].append("%g-%g" % (mybin, bins[i + 2])) wu['binlbl'].append("%g+" % (bins[-1], )) # Filters the missing values df2 = df[df['drct'] >= 0] try: # Unsure why this bombs out sometimes ax.bar(df2['drct'].values, df2['speed'].values, normed=True, bins=wu['bins'], opening=0.8, edgecolor='white', nsector=nsector) except Exception as exp: sys.stderr.write(str(exp)) # Figure out the shortest bar mindir = ax._info['dir'][np.argmin(np.sum(ax._info['table'], axis=0))] ax.set_rlabel_position((450 - mindir) % 360 - 15) # Adjust the limits so to get a empty center rmin, rmax = ax.get_ylim() ax.set_rorigin(0 - (rmax - rmin) * 0.2) # Make labels have % formatters ax.yaxis.set_major_formatter(FormatStrFormatter('%.1f%%')) handles = [] for p in ax.patches_list: color = p.get_facecolor() handles.append( plt.Rectangle((0, 0), 0.1, 0.3, facecolor=color, edgecolor='black')) legend = fig.legend(handles, wu['binlbl'], bbox_to_anchor=(0.01, 0.01, 0.98, 0.09), loc='center', ncol=6, title='Wind Speed [%s]' % (wu['abbr'], ), mode=None, columnspacing=0.9, handletextpad=0.45, fontsize=14) plt.setp(legend.get_texts(), fontsize=10) # Now we put some fancy debugging info on the plot tlimit = "Time Domain: " if len(hours) == 24 and len(months) == 12: tlimit = "All Year" if len(hours) < 24: if len(hours) > 4: tlimit += "%s-%s" % ( datetime.datetime(2000, 1, 1, hours[0]).strftime("%-I %p"), datetime.datetime(2000, 1, 1, hours[-1]).strftime("%-I %p")) else: for h in hours: tlimit += "%s," % (datetime.datetime(2000, 1, 1, h).strftime("%-I %p"), ) if len(months) < 12: for h in months: tlimit += "%s," % (datetime.datetime(2000, h, 1).strftime("%b"), ) label = """[%s] %s%s Windrose Plot [%s] Period of Record: %s - %s""" % ( station, sname if sname is not None else "((%s))" % (station, ), "" if level is None else " @%s hPa" % (level, ), tlimit, df['valid'].min().strftime("%d %b %Y"), df['valid'].max().strftime("%d %b %Y")) plt.gcf().text(0.14, 0.99, label, va='top', fontsize=14) plt.gcf().text( 0.5, 0.5, "Calm\n%.1f%%" % (len(df[df['sknt'] == 0].index) / float(len(df2.index)) * 100., ), ha='center', va='center', fontsize=14) plt.gcf().text( 0.96, 0.11, ("Summary\nobs count: %s\nMissing: %s\nAvg Speed: %.1f %s") % (len(df.index), len(df.index) - len(df2.index), df['speed'].mean(), wu['abbr']), ha='right', fontsize=14) if not kwargs.get('nogenerated', False): plt.gcf().text(0.02, 0.1, "Generated: %s" % (datetime.datetime.now().strftime("%d %b %Y"), ), verticalalignment="bottom", fontsize=14) # Denote the direction blowing from plt.gcf().text(0.02, 0.125, "Direction is where the wind is\nblowing from, not toward.", va='bottom') # Make a logo im = mpimage.imread('%s/%s' % (DATADIR, 'logo.png')) plt.figimage(im, 10, 735) return fig
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadatab.") syear = max([ctx["syear"], ab.year]) eyear = ctx["eyear"] sts = datetime.date(syear, 11, 1) ets = datetime.date(eyear + 1, 6, 1) table = "alldata_%s" % (station[:2], ) eyear = datetime.datetime.now().year obs = np.ma.ones((eyear - syear + 1, 183), "f") * -1 df = read_sql( """ SELECT year, extract(doy from day) as doy, snowd, day, case when month < 6 then year - 1 else year end as winter_year from """ + table + """ WHERE station = %s and month in (11, 12, 1, 2, 3, 4) and snowd >= 0 and day between %s and %s """, pgconn, params=(station, sts, ets), index_col="day", ) if df.empty: raise NoDataFound("No Data Found.") minyear = df["year"].min() maxyear = df["year"].max() for _, row in df.iterrows(): doy = row["doy"] if row["doy"] < 180 else (row["doy"] - 365) obs[int(row["winter_year"]) - syear, int(doy) + 61] = row["snowd"] obs.mask = np.where(obs < 0, True, False) # obs[obs == 0] = -1 fig = plt.figure(figsize=(8, 8)) ax = fig.add_axes([0.1, 0.1, 0.93, 0.8]) ax.set_xticks((0, 29, 60, 91, 120, 151, 181)) ax.set_xticklabels( ["Nov 1", "Dec 1", "Jan 1", "Feb 1", "Mar 1", "Apr 1", "May 1"]) ax.set_ylabel("Year of Nov,Dec of Season Labeled") ax.set_xlabel("Date of Winter Season") ax.set_title(("[%s] %s\nDaily Snow Depth (%s-%s) [inches]") % (station, ctx["_nt"].sts[station]["name"], minyear, eyear)) cmap = copy.copy(nwssnow()) norm = mpcolors.BoundaryNorm(LEVELS, cmap.N) cmap.set_bad("#EEEEEE") cmap.set_under("white") res = ax.imshow( obs, aspect="auto", rasterized=True, norm=norm, interpolation="nearest", cmap=cmap, extent=[0, 182, eyear + 1 - 0.5, syear - 0.5], ) fig.colorbar(res, spacing="proportional", ticks=LEVELS, extend="max") ax.grid(True) ax.set_ylim(maxyear + 0.5, minyear - 0.5) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] nt = NetworkTable("%sCLIMATE" % (station[:2], )) sdate = ctx['sdate'] edate = ctx['edate'] year2 = ctx.get('year2', 0) year3 = ctx.get('year3', 0) year4 = ctx.get('year4', 0) wantedyears = [sdate.year, year2, year3, year4] yearcolors = ['r', 'g', 'b', 'purple'] gddbase = ctx['base'] gddceil = ctx['ceil'] whichplots = ctx['which'] glabel = "gdd%s%s" % (gddbase, gddceil) table = "alldata_%s" % (station[:2], ) df = read_sql(""" WITH avgs as ( SELECT sday, avg(gddxx(%s, %s, high, low)) as c""" + glabel + """, avg(sdd86(high, low)) as csdd86, avg(precip) as cprecip from """ + table + """ WHERE station = %s GROUP by sday ) SELECT day, gddxx(%s, %s, high, low) as o""" + glabel + """, c""" + glabel + """, o.precip as oprecip, cprecip, sdd86(o.high, o.low) as osdd86, csdd86 from """ + table + """ o JOIN avgs a on (o.sday = a.sday) WHERE station = %s and o.sday != '0229' ORDER by day ASC """, pgconn, params=(gddbase, gddceil, station, gddbase, gddceil, station), index_col='day') df["precip_diff"] = df["oprecip"] - df["cprecip"] df[glabel + "_diff"] = df["o" + glabel] - df["c" + glabel] xlen = int((edate - sdate).days) + 1 # In case of leap day years = (datetime.datetime.now().year - nt.sts[station]['archive_begin'].year) + 1 acc = np.zeros((years, xlen)) acc[:] = np.nan pacc = np.zeros((years, xlen)) pacc[:] = np.nan sacc = np.zeros((years, xlen)) sacc[:] = np.nan if whichplots == 'all': fig = plt.figure(figsize=(9, 12)) ax1 = fig.add_axes([0.1, 0.7, 0.8, 0.2]) ax2 = fig.add_axes([0.1, 0.6, 0.8, 0.1], sharex=ax1, facecolor='#EEEEEE') ax3 = fig.add_axes([0.1, 0.35, 0.8, 0.2], sharex=ax1) ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.2], sharex=ax1) title = ("GDD(base=%.0f,ceil=%.0f), Precip, & " "SDD(base=86)") % (gddbase, gddceil) elif whichplots == 'gdd': fig = plt.figure() ax1 = fig.add_axes([0.14, 0.31, 0.8, 0.57]) ax2 = fig.add_axes([0.14, 0.11, 0.8, 0.2], sharex=ax1, facecolor='#EEEEEE') title = ("GDD(base=%.0f,ceil=%.0f)") % (gddbase, gddceil) elif whichplots == 'precip': fig = plt.figure() ax3 = fig.add_axes([0.1, 0.11, 0.8, 0.75]) ax1 = ax3 title = "Precipitation" elif whichplots == 'sdd': fig = plt.figure() ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.8]) ax1 = ax4 title = "Stress Degree Days (base=86)" ax1.set_title( ("Accumulated %s\n%s %s") % (title, station, nt.sts[station]['name']), fontsize=18 if whichplots == 'all' else 14) for year in range(nt.sts[station]['archive_begin'].year, datetime.datetime.now().year + 1): sts = sdate.replace(year=year) ets = sts + datetime.timedelta(days=(xlen - 1)) x = df.loc[sts:ets, 'o' + glabel].cumsum() if len(x.index) == 0: continue acc[(year - sdate.year), :len(x.index)] = x.values x = df.loc[sts:ets, 'oprecip'].cumsum() pacc[(year - sdate.year), :len(x.index)] = x.values x = df.loc[sts:ets, 'osdd86'].cumsum() sacc[(year - sdate.year), :len(x.index)] = x.values if year not in wantedyears: continue color = yearcolors[wantedyears.index(year)] yearlabel = sts.year if sts.year != ets.year: yearlabel = "%s-%s" % (sts.year, ets.year) if whichplots in ['gdd', 'all']: ax1.plot(range(len(x.index)), df.loc[sts:ets, "o" + glabel].cumsum().values, zorder=6, color=color, label='%s' % (yearlabel, ), lw=2) # Get cumulated precip p = df.loc[sts:ets, 'oprecip'].cumsum() if whichplots in ['all', 'precip']: ax3.plot(range(len(p.index)), p.values, color=color, lw=2, zorder=6, label='%s' % (yearlabel, )) p = df.loc[sts:ets, 'osdd86'].cumsum() if whichplots in ['all', 'sdd']: ax4.plot(range(len(p.index)), p.values, color=color, lw=2, zorder=6, label='%s' % (yearlabel, )) # Plot Climatology if wantedyears.index(year) == 0: x = df.loc[sts:ets, "c" + glabel].cumsum() if whichplots in ['all', 'gdd']: ax1.plot(range(len(x.index)), x.values, color='k', label='Climatology', lw=2, zorder=5) x = df.loc[sts:ets, "cprecip"].cumsum() if whichplots in ['all', 'precip']: ax3.plot(range(len(x.index)), x.values, color='k', label='Climatology', lw=2, zorder=5) x = df.loc[sts:ets, "csdd86"].cumsum() if whichplots in ['all', 'sdd']: ax4.plot(range(len(x.index)), x.values, color='k', label='Climatology', lw=2, zorder=5) x = df.loc[sts:ets, glabel + "_diff"].cumsum() if whichplots in ['all', 'gdd']: ax2.plot(range(len(x.index)), x.values, color=color, linewidth=2, linestyle='--') xmin = np.nanmin(acc, 0) xmax = np.nanmax(acc, 0) if whichplots in ['all', 'gdd']: ax1.fill_between(range(len(xmin)), xmin, xmax, color='lightblue') ax1.grid(True) ax2.grid(True) xmin = np.nanmin(pacc, 0) xmax = np.nanmax(pacc, 0) if whichplots in ['all', 'precip']: ax3.fill_between(range(len(xmin)), xmin, xmax, color='lightblue') ax3.set_ylabel("Precipitation [inch]", fontsize=16) ax3.grid(True) xmin = np.nanmin(sacc, 0) xmax = np.nanmax(sacc, 0) if whichplots in ['all', 'sdd']: ax4.fill_between(range(len(xmin)), xmin, xmax, color='lightblue') ax4.set_ylabel(r"SDD Base 86 $^{\circ}\mathrm{F}$", fontsize=16) ax4.grid(True) if whichplots in ['all', 'gdd']: ax1.set_ylabel((r"GDD Base %.0f Ceil %.0f $^{\circ}\mathrm{F}$") % (gddbase, gddceil), fontsize=16) ax1.text(0.5, 0.9, "%s/%s - %s/%s" % (sdate.month, sdate.day, edate.month, edate.day), transform=ax1.transAxes, ha='center') ylim = ax2.get_ylim() spread = max([abs(ylim[0]), abs(ylim[1])]) * 1.1 ax2.set_ylim(0 - spread, spread) ax2.text(0.02, 0.1, " Accumulated Departure ", transform=ax2.transAxes, bbox=dict(facecolor='white', ec='#EEEEEE')) ax2.yaxis.tick_right() xticks = [] xticklabels = [] wanted = [ 1, ] if xlen > 31 else [1, 7, 15, 22, 29] now = sdate i = 0 while now <= edate: if now.day in wanted: xticks.append(i) xticklabels.append(now.strftime("%-d\n%b")) now += datetime.timedelta(days=1) i += 1 if whichplots in ['all', 'gdd']: ax2.set_xticks(xticks) ax2.set_xticklabels(xticklabels) ax1.legend(loc=2, prop={'size': 12}) # Remove ticks on the top most plot for label in ax1.get_xticklabels(): label.set_visible(False) ax1.set_xlim(0, xlen + 1) if whichplots in ['all', 'precip']: ax3.set_xticks(xticks) ax3.set_xticklabels(xticklabels) ax3.legend(loc=2, prop={'size': 10}) ax3.set_xlim(0, xlen + 1) if whichplots in ['all', 'sdd']: ax4.set_xticks(xticks) ax4.set_xticklabels(xticklabels) ax4.legend(loc=2, prop={'size': 10}) ax4.set_xlim(0, xlen + 1) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] days = int(ctx["days"]) fdays = int(ctx["fdays"]) mdays = int(ctx["mdays"]) syear = int(ctx["syear"]) eyear = int(ctx["eyear"]) agg = ctx["agg"] # belt and suspenders assert agg in PDICT assert ctx["fstat"] in PDICT2 assert ctx["mstat"] in PDICT2 assert ctx["stat"] in PDICT2 assert ctx["var"] in PDICT3 month = ctx["month"] months = list(range(1, 13)) if month == "fall": months = [9, 10, 11] elif month == "winter": months = [12, 1, 2] elif month == "spring": months = [3, 4, 5] elif month == "summer": months = [6, 7, 8] elif month == "gs": months = [5, 6, 7, 8, 9] elif month != "year": months = [int(month)] table = "alldata_%s" % (station[:2],) obs = read_sql( """ select day, extract(week from day) - 1 as week, year, month, sday, """ + ctx["fstat"] + """(""" + ctx["var"] + """) OVER (ORDER by day ASC rows between %s FOLLOWING and %s FOLLOWING) as forward_stat, """ + ctx["fstat"] + """(""" + ctx["var"] + """) OVER (ORDER by day ASC rows between CURRENT ROW and %s FOLLOWING) as middle_stat, """ + ctx["stat"] + """(""" + ctx["var"] + """) OVER (ORDER by day ASC rows between %s PRECEDING and 1 PRECEDING) as trailing_stat from """ + table + """ where station = %s and month in %s and year >= %s and year <= %s ORDER by day ASC """, pgconn, params=( fdays, fdays + mdays - 1, fdays - 1, days, station, tuple(months), syear, eyear, ), ) if obs.empty: raise NoDataFound("No Data Found.") if ctx.get("thres") is not None: obs = obs[obs["trailing_stat"] >= ctx["thres"]] if obs.empty: raise NoDataFound("Failed to find events with trailing threshold") else: ctx["thres"] = None # We have daily observations above in the form of obs obs["two"] = obs["middle_stat"] - obs["trailing_stat"] obs["three"] = obs["middle_stat"] - obs["forward_stat"] if ctx["how"] == "three": up = obs[(obs["two"] >= 0) & (obs["three"] >= 0)] obs["change"] = up[["two", "three"]].min(axis=1) down = obs[(obs["two"] < 0) & (obs["three"] < 0)] obs.at[down.index, "change"] = down[["two", "three"]].max(axis=1) else: obs["change"] = obs["two"] weekly = obs[[agg, "change"]].groupby(agg).describe() df = weekly["change"] extreme = max([df["max"].max(), 0 - df["min"].min()]) + 10 fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.85, 0.65]) multiplier = 1 if agg == "week": multiplier = 7 sts = datetime.datetime(2012, 1, 1) xticks = [] for i in range(1, 13): ts = sts.replace(month=i) xticks.append(int(ts.strftime("%j"))) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xticks(xticks) ax.set_xlim(0, 366) elif agg == "month": ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xticks(range(1, 13)) ax.set_xlim(0, 13) elif agg == "year": for col in ["max", "min"]: h_slope, intercept, r_value, _, _ = stats.linregress( df.index.values, df[col] ) y = h_slope * df.index.values + intercept ax.plot(df.index.values, y, lw=2, zorder=10, color="k") yloc = 2 if df[col].max() > 0 else -5 color = "white" if yloc < 0 else "k" ax.text( df.index.values[-1], yloc, r"R^2=%.02f" % (r_value ** 2,), color=color, ha="right", ) ax.set_xlim(df.index.values[0] - 1, df.index.values[-1] + 1) ax.bar( df.index.values * multiplier, df["max"].values, width=multiplier, fc="pink", ec="pink", ) ax.bar( df.index.values * multiplier, df["min"].values, width=multiplier, fc="lightblue", ec="lightblue", ) for col in ["max", "min"]: c = "red" if col == "max" else "blue" ax.axhline(df[col].mean(), lw=2, color=c) ax.grid(True) ax.set_ylabel(r"Temperature Change $^\circ$F") title = "Backward (%s) %.0f Days and Forward (%s) %.0f Inclusive Days" % ( PDICT2[ctx["fstat"]], days, PDICT2[ctx["mstat"]], mdays, ) if ctx["how"] == "three": title = ("Back (%s) %.0fd, Middle (%s) %.0fd, Forward (%s) %.0fd") % ( PDICT2[ctx["fstat"]], days, PDICT2[ctx["mstat"]], mdays, PDICT2[ctx["fstat"]], fdays, ) subtitle = ( "" if ctx["thres"] is None else "\nBack Threshold of at least %.0f $^\circ$F" % (ctx["thres"],) ) ax.set_title( ("%s %s (%.0f-%.0f)\n" "Max Change in %s %s (%s)\n" "%s%s") % ( station, ctx["_nt"].sts[station]["name"], max([ctx["_nt"].sts[station]["archive_begin"].year, syear]), eyear, PDICT3[ctx["var"]].replace("Temperature", "Temp"), PDICT[agg].replace("Aggregate", "Agg"), MDICT[month], title, subtitle, ) ) ax.set_ylim(0 - extreme, extreme) xloc = (ax.get_xlim()[1] + ax.get_xlim()[0]) / 2.0 ax.text( xloc, extreme - 5, "Maximum Jump in %s (avg: %.1f)" % (PDICT3[ctx["var"]], df["max"].mean()), color="red", va="center", ha="center", bbox=dict(color="white"), ) ax.text( xloc, 0 - extreme + 5, "Maximum (Negative) Dip in %s (avg: %.1f)" % (PDICT3[ctx["var"]], df["min"].mean()), color="blue", va="center", ha="center", bbox=dict(color="white"), ) return fig, df.rename({"datum": agg})
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] hours = ctx['hours'] mydir = ctx['dir'] month = ctx['month'] if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] tzname = ctx['_nt'].sts[station]['tzname'] # backwards intuitive sortdir = "ASC" if mydir == 'warm' else 'DESC' df = read_sql(""" WITH data as ( SELECT valid at time zone %s as valid, tmpf from alldata where station = %s and tmpf between -100 and 150 and extract(month from valid) in %s), doffset as ( SELECT valid - '%s hours'::interval as valid, tmpf from data), agg as ( SELECT d.valid, d.tmpf as tmpf1, o.tmpf as tmpf2 from data d JOIN doffset o on (d.valid = o.valid)) SELECT valid as valid1, valid + '%s hours'::interval as valid2, tmpf1, tmpf2 from agg ORDER by (tmpf1 - tmpf2) """ + sortdir + """ LIMIT 50 """, pgconn, params=(tzname, station, tuple(months), hours, hours), index_col=None) df['diff'] = (df['tmpf1'] - df['tmpf2']).abs() if df.empty: raise NoDataFound("No database entries found for station, sorry!") fig = plt.figure() ax = plt.axes([0.55, 0.1, 0.4, 0.8]) ab = ctx['_nt'].sts[station]['archive_begin'] if ab is None: raise NoDataFound("Unknown station metadata.") fig.text(0.5, 0.95, ('[%s] %s Top 10 %s\n' 'Over %s Hour Period (%s-%s) [%s]' ) % (station, ctx['_nt'].sts[station]['name'], MDICT[mydir], hours, ab.year, datetime.date.today().year, MDICT2[month]), ha='center', va='center') labels = [] for i in range(10): row = df.iloc[i] ax.barh(i+1, row['diff'], color='b', align='center') sts = row['valid1'] ets = row['valid2'] labels.append(("%.0f to %.0f -> %.0f\n%s - %s" ) % (row['tmpf1'], row['tmpf2'], row['diff'], sts.strftime("%-d %b %Y %I:%M %p"), ets.strftime("%-d %b %Y %I:%M %p"))) ax.set_yticks(range(1, 11)) ax.set_yticklabels(labels) ax.set_ylim(10.5, 0.5) ax.grid(True) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] days = ctx['days'] varname = ctx['var'] ctx['nt'] = NetworkTable(network) df = get_data(ctx) if df.empty: raise ValueError('Error, no results returned!') fig = plt.figure(figsize=(8, 6)) ax = fig.add_axes([0.1, 0.3, 0.75, 0.6]) lax = fig.add_axes([0.1, 0.1, 0.75, 0.2]) cax = fig.add_axes([0.87, 0.3, 0.03, 0.6]) title = PDICT.get(varname) if days == 1: title = title.replace("Average ", "") ax.set_title(("%s [%s]\n%i Day Period with %s") % (ctx['nt'].sts[station]['name'], station, days, title)) cmap = plt.get_cmap('jet') minval = df[XREF[varname]].min() - 1. if varname == 'wettest' and minval < 0: minval = 0 maxval = df[XREF[varname]].max() + 1. ramp = np.linspace(minval, maxval, min([int(maxval - minval), 10]), dtype='i') norm = mpcolors.BoundaryNorm(ramp, cmap.N) cb = ColorbarBase(cax, norm=norm, cmap=cmap) cb.set_label("inch" if varname == 'wettest' else r"$^\circ$F") ax.barh(df.index.values, [days] * len(df.index), left=df['doy'].values, color=cmap(norm(df[XREF[varname]].values))) ax.grid(True) lax.grid(True) xticks = [] xticklabels = [] for i in np.arange(df['doy'].min() - 5, df['doy'].max() + 5, 1): ts = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=int(i)) if ts.day == 1: xticks.append(i) xticklabels.append(ts.strftime("%-d %b")) ax.set_xticks(xticks) lax.set_xticks(xticks) lax.set_xticklabels(xticklabels) counts = np.zeros(366 * 2) for _, row in df.iterrows(): counts[int(row['doy']):int(row['doy'] + days)] += 1 lax.bar(np.arange(366 * 2), counts, edgecolor='blue', facecolor='blue') lax.set_ylabel("Years") lax.text(0.02, 0.9, "Frequency of Day\nwithin period", transform=lax.transAxes, va='top') ax.set_ylim(df.index.values.min() - 3, df.index.values.max() + 3) ax.set_xlim(df['doy'].min() - 10, df['doy'].max() + 10) lax.set_xlim(df['doy'].min() - 10, df['doy'].max() + 10) ax.yaxis.set_major_locator(MaxNLocator(prune='lower')) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] table = "alldata_%s" % (station[:2], ) df = read_sql( """ with data as ( select station, year, sum(precip) from """ + table + """ WHERE year >= 1893 GROUP by station, year), stdata as ( select year, sum from data where station = %s ), agg as ( select station, year, avg(sum) OVER (PARTITION by year) as avgval, rank() OVER (PARTITION by year ORDER by sum ASC) / count(*) OVER (PARTITION by year)::float * 100. as percentile from data) select a.station, a.year, a.percentile, s.sum, a.avgval from agg a JOIN stdata s on (a.year = s.year) where a.station = %s ORDER by a.year ASC """, get_dbconn("coop"), params=(station, station), index_col="year", ) if df.empty: raise NoDataFound("No Data Found.") fig = plt.figure(figsize=(6, 7.5)) ax = fig.add_axes([0.13, 0.52, 0.8, 0.4]) meanval = df["percentile"].mean() bars = ax.bar(df.index.values, df["percentile"], color="b") for mybar in bars: if mybar.get_height() > meanval: mybar.set_color("red") ax.axhline(meanval, color="green", lw=2, zorder=5) ax.text(df.index.max() + 1, meanval, "%.1f" % (meanval, ), color="green") ax.set_xlim(df.index.min() - 1, df.index.max() + 1) ax.set_ylim(0, 100) ax.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) ax.set_ylabel("Percentile (no spatial weighting)") ax.grid(True) ax.set_title( ("[%s] %s\nYearly Precip Total Percentile for all %s stations ") % (station, ctx["_nt"].sts[station]["name"], station[:2])) # second plot ax = fig.add_axes([0.13, 0.07, 0.8, 0.4]) ax.bar(df.index.values, df["sum"] - df["avgval"]) meanval = (df["sum"] - df["avgval"]).mean() ax.axhline(meanval, color="green", lw=2, zorder=5) ax.text(df.index.max() + 1, meanval, "%.2f" % (meanval, ), color="green") ax.set_xlim(df.index.min() - 1, df.index.max() + 1) ax.set_ylabel("Bias to State Arithmetic Mean") ax.grid(True) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] highlightyear = ctx['year'] ytd = ctx['ytd'] varname = ctx['var'] inc = ctx['inc'] doylimiter = get_doylimit(ytd, varname) tmpflimit = "and tmpf >= 50" if varname != 'windchill' else 'and tmpf < 50' if varname not in ['windchill', 'heatindex']: tmpflimit = "" df = read_sql(""" SELECT to_char(valid, 'YYYYmmddHH24') as d, avg(tmpf)::int as tmpf, avg(dwpf)::int as dwpf, avg(coalesce(sknt, 0)) as sknt from alldata WHERE station = %s """ + tmpflimit + """ and dwpf <= tmpf and valid > '1973-01-01' and report_type = 2 """ + doylimiter + """ GROUP by d """, pgconn, params=(station, ), index_col=None) if df.empty: raise NoDataFound("No Data Found.") df['year'] = df['d'].apply(lambda x: int(x[:4])) df2 = df title2 = VDICT[varname] compop = np.greater_equal inctitle = '' if varname == 'heatindex': df['heatindex'] = pymet.heatindex(temperature(df['tmpf'].values, 'F'), temperature(df['dwpf'].values, 'F')).value('F') inctitle = " [All Obs Included]" if inc == 'no': df2 = df[df['heatindex'] > df['tmpf']] inctitle = " [Only Additive]" else: df2 = df maxval = int(df2['heatindex'].max() + 1) LEVELS[varname] = np.arange(80, maxval) elif varname == 'windchill': compop = np.less_equal df['year'] = df['d'].apply(lambda x: (int(x[:4]) - 1) if int(x[4:6]) < 7 else int(x[:4])) df['windchill'] = pymet.windchill(temperature(df['tmpf'].values, 'F'), speed(df['sknt'].values, 'KT')).value('F') inctitle = " [All Obs Included]" if inc == 'no': df2 = df[df['windchill'] < df['tmpf']] inctitle = " [Only Additive]" else: df2 = df minval = int(df2['windchill'].min() - 1) LEVELS[varname] = np.arange(minval, minval + 51) else: maxval = int(df2[varname].max() + 1) LEVELS[varname] = np.arange(maxval - 31, maxval) bs = ctx['_nt'].sts[station]['archive_begin'] if bs is None: raise NoDataFound("Unknown station metadata.") minyear = max([1973, bs.year]) maxyear = datetime.date.today().year years = float((maxyear - minyear) + 1) x = [] y = [] y2 = [] fig = plt.figure(figsize=(9, 6)) ax = fig.add_axes([0.1, 0.1, 0.6, 0.8]) yloc = 1.0 xloc = 1.13 yrlabel = ("%s" % (highlightyear, ) if varname != 'windchill' else '%s-%s' % (highlightyear, highlightyear + 1)) ax.text(xloc + 0.08, yloc + 0.04, 'Avg:', transform=ax.transAxes, color='b') ax.text(xloc + 0.21, yloc + 0.04, yrlabel, transform=ax.transAxes, color='r') df3 = df2[df2['year'] == highlightyear] for level in LEVELS[varname]: x.append(level) y.append(len(df2[compop(df2[varname], level)]) / years) y2.append(len(df3[compop(df3[varname], level)])) if level % 2 == 0: ax.text(xloc, yloc, '%s' % (level, ), transform=ax.transAxes) ax.text(xloc + 0.08, yloc, '%.1f' % (y[-1], ), transform=ax.transAxes, color='b') ax.text(xloc + 0.21, yloc, '%.0f' % (y2[-1], ), transform=ax.transAxes, color='r') yloc -= 0.04 ax.text(xloc, yloc, 'n=%s' % (len(df2.index), ), transform=ax.transAxes) for x0, y0, y02 in zip(x, y, y2): ax.plot([x0, x0], [y0, y02], color='k') rdf = pd.DataFrame({'level': x, 'avg': y, 'd%s' % (highlightyear, ): y2}) x = np.array(x, dtype=np.float64) ax.scatter(x, y, color='b', label='Avg') ax.scatter(x, y2, color='r', label=yrlabel) ax.grid(True) ymax = int(max([max(y), max(y2)])) ax.set_xlim(x[0] - 0.5, x[-1] + 0.5) dy = 24 * (int(ymax / 240) + 1) ax.set_yticks(range(0, ymax, dy)) ax.set_ylim(-0.5, ymax + 5) ax2 = ax.twinx() ax2.set_ylim(-0.5, ymax + 5) ax2.set_yticks(range(0, ymax, dy)) ax2.set_yticklabels(["%.0f" % (s, ) for s in np.arange(0, ymax, dy) / 24]) ax2.set_ylabel("Expressed in 24 Hour Days") ax.set_ylabel("Hours Per Year") ax.set_xlabel(r"%s $^\circ$F" % (VDICT[varname], )) title = 'till %s' % (datetime.date.today().strftime("%-d %b"), ) title = "Entire Year" if ytd == 'no' else title ax.set_title(("[%s] %s %s-%s\n" "%s Histogram (%s)%s") % (station, ctx['_nt'].sts[station]['name'], minyear, datetime.date.today().year, title2, title, inctitle)) ax.legend(loc='best', scatterpoints=1) return fig, rdf
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'][:4] phenomena = ctx['phenomena'] significance = ctx['significance'] split = ctx['split'] opt = ctx['opt'] state = ctx['state'] nt = NetworkTable('WFO') wfolimiter = " wfo = '%s' " % (station, ) if opt == 'state': wfolimiter = " substr(ugc, 1, 2) = '%s' " % (state, ) if split == 'jan1': sql = """SELECT extract(year from issue)::int as year, min(issue at time zone 'UTC') as min_issue, max(issue at time zone 'UTC') as max_issue, count(distinct wfo || eventid) from warnings where """ + wfolimiter + """ and phenomena = %s and significance = %s GROUP by year ORDER by year ASC""" else: sql = """SELECT extract(year from issue - '6 months'::interval)::int as year, min(issue at time zone 'UTC') as min_issue, max(issue at time zone 'UTC') as max_issue, count(distinct wfo || eventid) from warnings where """ + wfolimiter + """ and phenomena = %s and significance = %s GROUP by year ORDER by year ASC""" df = read_sql(sql, pgconn, params=(phenomena, significance), index_col=None) if df.empty: raise ValueError("No data found for query") # Since many VTEC events start in 2005, we should not trust any # data that has its first year in 2005 if df['year'].min() == 2005: df = df[df['year'] > 2005] def myfunc(row): year = row[0] valid = row[1] if year == valid.year: return int(valid.strftime("%j")) else: days = (datetime.date(year + 1, 1, 1) - datetime.date(year, 1, 1)).days return int(valid.strftime("%j")) + days df['startdoy'] = df[['year', 'min_issue']].apply(myfunc, axis=1) df['enddoy'] = df[['year', 'max_issue']].apply(myfunc, axis=1) df.set_index('year', inplace=True) ends = df['enddoy'].values starts = df['startdoy'].values years = df.index.values fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.7, 0.8]) ax.barh(years, (ends - starts), left=starts, fc='blue', align='center') ax.axvline(np.average(starts[:-1]), lw=2, color='red') ax.axvline(np.average(ends[:-1]), lw=2, color='red') ax.set_xlabel(("Avg Start Date: %s, End Date: %s") % ((datetime.date(2000, 1, 1) + datetime.timedelta( days=int(np.average(starts[:-1])))).strftime("%-d %b"), (datetime.date(2000, 1, 1) + datetime.timedelta( days=int(np.average(ends[:-1])))).strftime("%-d %b")), color='red') title = "[%s] NWS %s" % (station, nt.sts[station]['name']) if opt == 'state': title = ("NWS Issued Alerts for State of %s") % ( reference.state_names[state], ) ax.set_title(("%s\nPeriod between First and Last %s") % (title, vtec.get_ps_string(phenomena, significance))) ax.grid() days = [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335] days = days + [x + 365 for x in days] ax.set_xticks(days) ax.set_xticklabels(calendar.month_abbr[1:] + calendar.month_abbr[1:]) ax.set_xlim(df['startdoy'].min() - 10, df['enddoy'].max() + 10) ax.set_ylabel("Year") ax.set_ylim(years[0] - 0.5, years[-1] + 0.5) xFormatter = FormatStrFormatter('%d') ax.yaxis.set_major_formatter(xFormatter) ax = plt.axes([0.82, 0.1, 0.13, 0.8]) ax.barh(years, df['count'], fc='blue', align='center') ax.set_ylim(years[0] - 0.5, years[-1] + 0.5) plt.setp(ax.get_yticklabels(), visible=False) ax.grid(True) ax.set_xlabel("# Events") ax.yaxis.set_major_formatter(xFormatter) xloc = plt.MaxNLocator(3) ax.xaxis.set_major_locator(xloc) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] syear = ctx['syear'] eyear = ctx['eyear'] groupby = ctx['groupby'] sts = datetime.date(syear, 1, 1) ets = datetime.date(eyear + 1, 1, 1) nt = NetworkTable(network) code = ctx['code'] if code == 'PSN': code = "+SN" PDICT['+SN'] = PDICT['PSN'] if groupby == 'week': data = np.ma.zeros((24, 52), 'f') df = read_sql(""" WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v from alldata where station = %s and array_to_string(wxcodes, '') LIKE '%%""" + code + """%%' and valid > %s and valid < %s), agg as ( SELECT distinct extract(week from v)::int as week, extract(doy from v)::int as doy, extract(year from v)::int as year, extract(hour from v)::int as hour from data) SELECT week, year, hour, count(*) from agg WHERE week < 53 GROUP by week, year, hour """, pgconn, params=(nt.sts[station]['tzname'], station, sts, ets), index_col=None) else: data = np.ma.zeros((24, 366), 'f') df = read_sql(""" WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v from alldata where station = %s and array_to_string(wxcodes, '') LIKE '%%""" + code + """%%' and valid > %s and valid < %s), agg as ( SELECT distinct extract(doy from v)::int as doy, extract(year from v)::int as year, extract(hour from v)::int as hour from data) SELECT doy, year, hour, count(*) from agg GROUP by doy, year, hour """, pgconn, params=(nt.sts[station]['tzname'], station, sts, ets), index_col=None) if df.empty: raise ValueError("No data was found, sorry!") minyear = df['year'].min() maxyear = df['year'].max() for _, row in df.iterrows(): data[row['hour'], row[groupby] - 1] += 1 data.mask = np.where(data == 0, True, False) fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.11, 0.25, 0.7, 0.65]) cax = plt.axes([0.82, 0.04, 0.02, 0.15]) res = ax.imshow(data, aspect='auto', rasterized=True, interpolation='nearest') fig.colorbar(res, cax=cax) xloc = plt.MaxNLocator(4) cax.yaxis.set_major_locator(xloc) cax.set_ylabel("Count") ax.set_ylim(-0.5, 23.5) ax.set_yticks((0, 4, 8, 12, 16, 20)) ax.set_ylabel("Local Time, %s" % (nt.sts[station]['tzname'], )) ax.set_yticklabels(('Mid', '4 AM', '8 AM', 'Noon', '4 PM', '8 PM')) ax.set_title(("[%s] %s %s Reports\n[%.0f - %.0f]" " by hour and %s") % (station, nt.sts[station]['name'], PDICT[code], minyear, maxyear, PDICT2[groupby].replace("group ", ""))) ax.grid(True) lax = plt.axes([0.11, 0.1, 0.7, 0.15]) if groupby == 'week': ax.set_xticks(np.arange(0, 55, 7)) lax.bar(np.arange(0, 52), np.ma.sum(data, 0), facecolor='tan') lax.set_xlim(-0.5, 51.5) lax.set_xticks(np.arange(0, 55, 7)) lax.set_xticklabels(('Jan 1', 'Feb 19', 'Apr 8', 'May 27', 'Jul 15', 'Sep 2', 'Oct 21', 'Dec 9')) else: ax.set_xticks( [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365]) lax.bar(np.arange(0, 366), np.ma.sum(data, 0), facecolor='tan') lax.set_xlim(-0.5, 365.5) lax.set_xticks( [1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365]) lax.set_xticklabels(calendar.month_abbr[1:]) plt.setp(ax.get_xticklabels(), visible=False) # Bottom grid lax.grid(True) yloc = plt.MaxNLocator(3) lax.yaxis.set_major_locator(yloc) lax.yaxis.get_major_ticks()[-1].label1.set_visible(False) # Right grid rax = plt.axes([0.81, 0.25, 0.15, 0.65]) rax.barh(np.arange(0, 24) - 0.4, np.ma.sum(data, 1), facecolor='tan') rax.set_ylim(-0.5, 23.5) rax.set_yticks([]) xloc = plt.MaxNLocator(3) rax.xaxis.set_major_locator(xloc) rax.xaxis.get_major_ticks()[0].label1.set_visible(False) rax.grid(True) return fig, df
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) state = ctx["state"] syear = ctx["syear"] eyear = ctx["eyear"] fips = "" for key in state_fips: if state_fips[key] == state: fips = key payload = "{'area':'%s', 'type':'state', 'statstype':'2'}" % (fips,) headers = {} headers["Accept"] = "application/json, text/javascript, */*; q=0.01" headers["Content-Type"] = "application/json; charset=UTF-8" req = util.exponential_backoff( requests.post, SERVICE, payload, headers=headers ) if req is None: raise NoDataFound("Drought Web Service failed to deliver data.") jdata = req.json() if "d" not in jdata: raise NoDataFound("Data Not Found.") df = pd.DataFrame(jdata["d"]) df["Date"] = pd.to_datetime(df["ReleaseDate"]) df.sort_values("Date", ascending=True, inplace=True) df["x"] = df["Date"] + datetime.timedelta(hours=(3.5 * 24)) fig = plt.figure(figsize=(7, 9)) ax = fig.add_axes([0.1, 0.1, 0.87, 0.84]) lastrow = None for year, gdf in df.groupby(df.Date.dt.year): if year < syear or year > eyear: continue xs = [] ys = [] for _, row in gdf.iterrows(): if lastrow is None: lastrow = row delta = ( (lastrow["D4"] - row["D4"]) * 5.0 + (lastrow["D3"] - row["D3"]) * 4.0 + (lastrow["D2"] - row["D2"]) * 3.0 + (lastrow["D1"] - row["D1"]) * 2.0 + (lastrow["D0"] - row["D0"]) ) xs.append(int(row["Date"].strftime("%j"))) ys.append(year + (0 - delta) / 100.0) lastrow = row if len(xs) < 4: continue fcube = interp1d(xs, ys, kind="cubic") xnew = np.arange(xs[0], xs[-1]) yval = np.ones(len(xnew)) * year ynew = fcube(xnew) ax.fill_between( xnew, yval, ynew, where=(ynew < yval), facecolor="blue", interpolate=True, ) ax.fill_between( xnew, yval, ynew, where=(ynew >= yval), facecolor="red", interpolate=True, ) ax.set_ylim(eyear + 1, syear - 1) ax.set_xlim(0, 366) ax.set_xlabel( ( "curve height of 1 year is 1 effective drought category " "change over area of %s" ) % (state_names[state],) ) ax.set_ylabel("Year, thru %s" % (df.Date.max().strftime("%d %b %Y"),)) ax.set_title( ( "%.0f-%.0f US Drought Monitor Weekly Change for %s\n" "curve height represents change in intensity + coverage" ) % (syear, eyear, state_names[state]) ) ax.grid(True) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_yticks( np.arange(ax.get_ylim()[0] - 1, ax.get_ylim()[1], -1, dtype="i") ) fig.text(0.02, 0.03, "Blue areas are improving conditions", color="b") fig.text(0.4, 0.03, "Red areas are degrading conditions", color="r") return fig, df[["Date", "NONE", "D0", "D1", "D2", "D3", "D4"]]
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"][:4] ctx["_nt"].sts["_ALL"] = {"name": "All Offices"} fig = plt.figure(figsize=(8, 14 if station != "_ALL" else 21)) ax = [None, None] ax[0] = plt.axes([0.1, 0.75, 0.85, 0.2]) ax[1] = plt.axes([0.1, 0.05, 0.85, 0.65]) if station == "_ALL": df = read_sql( """ SELECT distinct extract(year from issue) as year, phenomena, significance from warnings WHERE phenomena is not null and significance is not null and issue > '2005-01-01' """, pgconn, index_col=None, ) else: df = read_sql( """ SELECT distinct extract(year from issue) as year, phenomena, significance from warnings WHERE wfo = %s and phenomena is not null and significance is not null and issue > '2005-01-01' """, pgconn, params=(station, ), index_col=None, ) if df.empty: raise NoDataFound("No data was found for this WFO.") df["wfo"] = station df["year"] = df["year"].astype("i") gdf = df.groupby("year").count() ax[0].bar(gdf.index.values, gdf["wfo"], width=0.8, fc="b", ec="b", align="center") for yr, row in gdf.iterrows(): ax[0].text(yr, row["wfo"] + 1, "%s" % (row["wfo"], ), ha="center") ax[0].set_title(("[%s] NWS %s\nCount of Distinct VTEC Phenomena/" "Significance - %i to %i") % ( station, ctx["_nt"].sts[station]["name"], df["year"].min(), df["year"].max(), )) ax[0].grid() ax[0].set_ylabel("Count") ax[0].set_xlim(gdf.index.values.min() - 0.5, gdf.index.values.max() + 0.5) pos = {} i = 1 df.sort_values(["phenomena", "significance"], inplace=True) for _, row in df.iterrows(): key = "%s.%s" % (row["phenomena"], row["significance"]) if key not in pos: pos[key] = i i += 1 ax[1].text( row["year"], pos[key], key, ha="center", va="center", fontsize=10, bbox=dict(color="white"), ) ax[1].set_title("VTEC <Phenomena.Significance> Issued by Year") ax[1].set_ylim(0, i) ax[1].grid(True) ax[1].set_xlim(gdf.index.values.min() - 0.5, gdf.index.values.max() + 0.5) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("iem") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] sdate = ctx["sdate"] edate = ctx["edate"] year2 = ctx.get("year2", 0) year3 = ctx.get("year3", 0) year4 = ctx.get("year4", 0) wantedyears = [sdate.year, year2, year3, year4] yearcolors = ["r", "g", "b", "purple"] gddbase = ctx["base"] gddceil = ctx["ceil"] whichplots = ctx["which"] glabel = "gdd%s%s" % (gddbase, gddceil) # build the climatology table = "alldata_%s" % (ctx["_nt"].sts[station]["climate_site"][:2], ) climo = read_sql( """ SELECT sday, avg(gddxx(%s, %s, high, low)) as c""" + glabel + """, avg(sdd86(high, low)) as csdd86, avg(precip) as cprecip from """ + table + """ WHERE station = %s GROUP by sday """, get_dbconn("coop"), params=(gddbase, gddceil, ctx["_nt"].sts[station]["climate_site"]), index_col=None, ) # build the obs df = read_sql( """ SELECT day, to_char(day, 'mmdd') as sday, gddxx(%s, %s, max_tmpf, min_tmpf) as o""" + glabel + """, pday as oprecip, sdd86(max_tmpf, min_tmpf) as osdd86 from summary s JOIN stations t ON (s.iemid = t.iemid) WHERE t.id = %s and t.network = %s and to_char(day, 'mmdd') != '0229' ORDER by day ASC """, pgconn, params=(gddbase, gddceil, station, ctx["network"]), index_col=None, ) # Now we need to join the frames df = pd.merge(df, climo, on="sday") df.sort_values("day", ascending=True, inplace=True) df.set_index("day", inplace=True) df["precip_diff"] = df["oprecip"] - df["cprecip"] df[glabel + "_diff"] = df["o" + glabel] - df["c" + glabel] xlen = int((edate - sdate).days) + 1 # In case of leap day ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") years = (datetime.datetime.now().year - ab.year) + 1 acc = np.zeros((years, xlen)) acc[:] = np.nan pacc = np.zeros((years, xlen)) pacc[:] = np.nan sacc = np.zeros((years, xlen)) sacc[:] = np.nan if whichplots == "all": fig = plt.figure(figsize=(9, 12)) ax1 = fig.add_axes([0.1, 0.7, 0.8, 0.2]) ax2 = fig.add_axes([0.1, 0.6, 0.8, 0.1], sharex=ax1, facecolor="#EEEEEE") ax3 = fig.add_axes([0.1, 0.35, 0.8, 0.2], sharex=ax1) ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.2], sharex=ax1) title = ("GDD(base=%.0f,ceil=%.0f), Precip, & " "SDD(base=86)") % ( gddbase, gddceil, ) elif whichplots == "gdd": fig = plt.figure() ax1 = fig.add_axes([0.14, 0.31, 0.8, 0.57]) ax2 = fig.add_axes([0.14, 0.11, 0.8, 0.2], sharex=ax1, facecolor="#EEEEEE") title = ("GDD(base=%.0f,ceil=%.0f)") % (gddbase, gddceil) elif whichplots == "precip": fig = plt.figure() ax3 = fig.add_axes([0.1, 0.11, 0.8, 0.75]) ax1 = ax3 title = "Precipitation" elif whichplots == "sdd": fig = plt.figure() ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.8]) ax1 = ax4 title = "Stress Degree Days (base=86)" ax1.set_title( ("Accumulated %s\n%s %s") % (title, station, ctx["_nt"].sts[station]["name"]), fontsize=18 if whichplots == "all" else 14, ) for year in range(ab.year, datetime.datetime.now().year + 1): sts = sdate.replace(year=year) ets = sts + datetime.timedelta(days=(xlen - 1)) x = df.loc[sts:ets, "o" + glabel].cumsum() if x.empty: continue acc[(year - sdate.year), :len(x.index)] = x.values x = df.loc[sts:ets, "oprecip"].cumsum() pacc[(year - sdate.year), :len(x.index)] = x.values x = df.loc[sts:ets, "osdd86"].cumsum() sacc[(year - sdate.year), :len(x.index)] = x.values if year not in wantedyears: continue color = yearcolors[wantedyears.index(year)] yearlabel = sts.year if sts.year != ets.year: yearlabel = "%s-%s" % (sts.year, ets.year) if whichplots in ["gdd", "all"]: ax1.plot( range(len(x.index)), df.loc[sts:ets, "o" + glabel].cumsum().values, zorder=6, color=color, label="%s" % (yearlabel, ), lw=2, ) # Get cumulated precip p = df.loc[sts:ets, "oprecip"].cumsum() if whichplots in ["all", "precip"]: ax3.plot( range(len(p.index)), p.values, color=color, lw=2, zorder=6, label="%s" % (yearlabel, ), ) p = df.loc[sts:ets, "osdd86"].cumsum() if whichplots in ["all", "sdd"]: ax4.plot( range(len(p.index)), p.values, color=color, lw=2, zorder=6, label="%s" % (yearlabel, ), ) # Plot Climatology if wantedyears.index(year) == 0: x = df.loc[sts:ets, "c" + glabel].cumsum() if whichplots in ["all", "gdd"]: ax1.plot( range(len(x.index)), x.values, color="k", label="Climatology", lw=2, zorder=5, ) x = df.loc[sts:ets, "cprecip"].cumsum() if whichplots in ["all", "precip"]: ax3.plot( range(len(x.index)), x.values, color="k", label="Climatology", lw=2, zorder=5, ) x = df.loc[sts:ets, "csdd86"].cumsum() if whichplots in ["all", "sdd"]: ax4.plot( range(len(x.index)), x.values, color="k", label="Climatology", lw=2, zorder=5, ) x = df.loc[sts:ets, glabel + "_diff"].cumsum() if whichplots in ["all", "gdd"]: ax2.plot( range(len(x.index)), x.values, color=color, linewidth=2, linestyle="--", ) xmin = np.nanmin(acc, 0) xmax = np.nanmax(acc, 0) if whichplots in ["all", "gdd"]: ax1.fill_between(range(len(xmin)), xmin, xmax, color="lightblue") ax1.grid(True) ax2.grid(True) xmin = np.nanmin(pacc, 0) xmax = np.nanmax(pacc, 0) if whichplots in ["all", "precip"]: ax3.fill_between(range(len(xmin)), xmin, xmax, color="lightblue") ax3.set_ylabel("Precipitation [inch]", fontsize=16) ax3.grid(True) xmin = np.nanmin(sacc, 0) xmax = np.nanmax(sacc, 0) if whichplots in ["all", "sdd"]: ax4.fill_between(range(len(xmin)), xmin, xmax, color="lightblue") ax4.set_ylabel(r"SDD Base 86 $^{\circ}\mathrm{F}$", fontsize=16) ax4.grid(True) if whichplots in ["all", "gdd"]: ax1.set_ylabel( (r"GDD Base %.0f Ceil %.0f $^{\circ}\mathrm{F}$") % (gddbase, gddceil), fontsize=16, ) ax1.text( 0.5, 0.9, "%s/%s - %s/%s" % (sdate.month, sdate.day, edate.month, edate.day), transform=ax1.transAxes, ha="center", ) ylim = ax2.get_ylim() spread = max([abs(ylim[0]), abs(ylim[1])]) * 1.1 ax2.set_ylim(0 - spread, spread) ax2.text( 0.02, 0.1, " Accumulated Departure ", transform=ax2.transAxes, bbox=dict(facecolor="white", ec="#EEEEEE"), ) ax2.yaxis.tick_right() xticks = [] xticklabels = [] wanted = [1] if xlen > 31 else [1, 7, 15, 22, 29] now = sdate i = 0 while now <= edate: if now.day in wanted: xticks.append(i) xticklabels.append(now.strftime("%-d\n%b")) now += datetime.timedelta(days=1) i += 1 if whichplots in ["all", "gdd"]: ax2.set_xticks(xticks) ax2.set_xticklabels(xticklabels) ax1.legend(loc=2, prop={"size": 12}) # Remove ticks on the top most plot for label in ax1.get_xticklabels(): label.set_visible(False) ax1.set_xlim(0, xlen + 1) if whichplots in ["all", "precip"]: ax3.set_xticks(xticks) ax3.set_xticklabels(xticklabels) ax3.legend(loc=2, prop={"size": 10}) ax3.set_xlim(0, xlen + 1) if whichplots in ["all", "sdd"]: ax4.set_xticks(xticks) ax4.set_xticklabels(xticklabels) ax4.legend(loc=2, prop={"size": 10}) ax4.set_xlim(0, xlen + 1) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] table = "alldata_%s" % (station[:2], ) nt = network.Table("%sCLIMATE" % (station[:2], )) today = datetime.datetime.now() thisyear = today.year df = read_sql( """ with data as ( select year, month, extract(doy from day) as doy, generate_series(32, high) as t from """ + table + """ where station = %s and year < %s), agger as ( SELECT year, t, min(doy), max(doy) from data GROUP by year, t) SELECT t as tmpf, avg(min) as min_jday, avg(max) as max_jday from agger GROUP by t ORDER by t ASC """, pgconn, params=(station, thisyear), index_col="tmpf", ) if df.empty: raise NoDataFound("No Data Found.") fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.7, 0.8]) ax2 = plt.axes([0.81, 0.1, 0.15, 0.8]) height = df["min_jday"][:] + 365.0 - df["max_jday"] ax2.plot(height, df.index.values) ax2.set_xticks([30, 90, 180, 365]) plt.setp(ax2.get_yticklabels(), visible=False) ax2.set_ylim(32, df.index.values.max() + 5) ax2.grid(True) ax2.text( 0.96, 0.02, "Days", transform=ax2.transAxes, bbox=dict(color="white"), ha="right", ) ax.text( 0.96, 0.02, "Period", transform=ax.transAxes, bbox=dict(color="white"), ha="right", ) ax.set_ylim(32, df.index.values.max() + 5) ax.barh( df.index.values - 0.5, height, left=df["max_jday"].values, ec="tan", fc="tan", height=1.1, ) days = np.array([1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335]) days = np.concatenate([days, days + 365]) ax.set_xticks(days) months = calendar.month_abbr[1:] + calendar.month_abbr[1:] ax.set_xticklabels(months) ax.set_ylabel("High Temperature $^\circ$F") ax.set_xlim(min(df["max_jday"]) - 1, max(df["max_jday"] + height) + 1) ax.grid(True) msg = ("[%s] %s Period Between Average Last and " "First High Temperature of Year") % (station, nt.sts[station]["name"]) tokens = msg.split() sz = int(len(tokens) / 2) ax.set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:])) return fig, df
def do(valid): """ Generate plot for a given timestamp """ pgconn = get_dbconn("scada") cursor = pgconn.cursor() cursor.execute( """select turbine_id, power, lon, lat, yawangle, windspeed, alpha1 from data s JOIN turbines t on (t.id = s.turbine_id) WHERE valid = %s and power is not null and yawangle is not null and windspeed is not null and alpha1 is not null""", (valid, ), ) lons = [] lats = [] vals = [] u = [] v = [] ws = [] yaw = [] pitch = [] for row in cursor: lons.append(row[2]) lats.append(row[3]) vals.append(row[1]) ws.append(row[5]) yaw.append(row[4]) a, b = uv(speed(row[5], "MPS"), direction(row[4], "deg")) u.append(a.value("MPS")) v.append(b.value("MPS")) pitch.append(row[6]) pitch = np.array(pitch) vals = np.array(vals) avgv = np.average(vals) # vals2 = vals - avgv fig = plt.figure(figsize=(12.8, 7.2)) ax = fig.add_axes([0.14, 0.1, 0.52, 0.8]) cmap = get_cmap("jet") cmap.set_under("tan") cmap.set_over("black") # cmap = get_cmap('seismic') # clevs = np.arange(-250, 251, 50) clevs = np.arange(0, 1501, 150) norm = mpcolors.BoundaryNorm(clevs, cmap.N) ax.quiver(lons, lats, u, v, zorder=1) ax.scatter( lons, lats, c=vals, norm=norm, edgecolor="none", cmap=cmap, s=100, zorder=2, ) ax.get_yaxis().get_major_formatter().set_useOffset(False) ax.get_xaxis().get_major_formatter().set_useOffset(False) ax.xaxis.set_major_formatter(plt.NullFormatter()) ax.yaxis.set_major_formatter(plt.NullFormatter()) ax.set_title(("Turbine Power [kW]\n" "Valid: %s") % (valid.strftime("%d %b %Y %I:%M %p"))) make_colorbar(clevs, norm, cmap) ax.text( 0.05, 0.05, "Turbine Power: $\mu$= %.1f $\sigma$= %.1f kW" % (avgv, np.std(vals)), transform=ax.transAxes, ) ax.text( 0.05, 0.01, "Wind $\mu$= %.1f $\sigma$= %.1f $ms^{-1}$" % (np.average(ws), np.std(ws)), transform=ax.transAxes, ) ax.set_xlabel("Longitude $^\circ$E") ax.set_ylabel("Latitude $^\circ$N") ax.set_xlim(-93.475, -93.328) ax.set_ylim(42.20, 42.31) # Next plot ax2 = fig.add_axes([0.7, 0.80, 0.28, 0.18]) ax2.scatter(ws, vals, edgecolor="k", c="k") ax2.text( 0.5, -0.25, "Wind Speed $ms^{-1}$", transform=ax2.transAxes, ha="center", ) ax2.set_xlim(0, 20) # ax2.set_ylabel("Power kW") ax2.grid(True) # Next plot ax3 = fig.add_axes([0.7, 0.57, 0.28, 0.18], sharey=ax2) ax3.scatter(yaw, vals, edgecolor="k", c="k") ax3.text(0.5, -0.25, "Yaw", transform=ax3.transAxes, ha="center") # ax3.set_ylabel("Power kW") ax3.set_xlim(0, 360) ax3.set_xticks(np.arange(0, 361, 45)) ax3.set_xticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"]) ax3.grid(True) # Next plot ax4 = fig.add_axes([0.7, 0.32, 0.28, 0.18], sharey=ax2) ax4.scatter(pitch, vals, edgecolor="k", c="k") ax4.text(0.5, -0.25, "Pitch $^\circ$", transform=ax4.transAxes, ha="center") ax4.set_ylim(-10, 1600) ax4.grid(True) # Next plot ax5 = fig.add_axes([0.7, 0.07, 0.28, 0.18], sharex=ax4) ax5.scatter(pitch, ws, edgecolor="k", c="k") ax5.text(0.5, -0.25, "Pitch $^\circ$", transform=ax5.transAxes, ha="center") ax5.grid(True) ax5.set_ylim(bottom=-10) # maxpitch = max(np.where(pitch > 20, 0, pitch)) # ax5.set_xlim(np.ma.minimum(pitch)-0.5, maxpitch+0.5) ax5.set_xlim(-3, 20.1) ax5.set_ylim(0, 20) ax5.text( -0.1, 0.5, "Wind Speed $ms^{-1}$", transform=ax5.transAxes, ha="center", va="center", rotation=90, )
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] sdate = ctx.get("sdate") plot_type = ctx["p"] if not ctx["_nt"].sts: raise NoDataFound( ("Network Identifier %s is unknown to IEM") % (ctx["network"], )) tzname = ctx["_nt"].sts[station]["tzname"] df = get_data(ctx["network"], station, tzname, sdate) if df.empty: raise NoDataFound("No data was found!") # if d1 is not None and d1 >= 0 and d1 <= 360: # if s is not None and s >= 0 and s < 200: # if t is not None and t >= -90 and t < 190: # if d is not None and d >= -90 and d < 190: # if v1 is not None and v1 >= 0 and v1 < 30: def ceilingfunc(row): """Our logic to compute a ceiling""" c = [row["skyc1"], row["skyc2"], row["skyc3"], row["skyc4"]] if "OVC" in c: pos = c.index("OVC") larr = [row["skyl1"], row["skyl2"], row["skyl3"], row["skyl4"]] return larr[pos] / 1000.0 df["ceiling"] = df.apply(ceilingfunc, axis=1) fig = plt.figure(figsize=(9, 9)) xalign = 0.1 xwidth = 0.8 ax = fig.add_axes([xalign, 0.7, xwidth, 0.25]) xmin = df.index.min() xmax = df.index.max() # ____________PLOT 1___________________________ df2 = df[df["tmpf"].notnull()] ax.plot( df2.index.values, df2["tmpf"], lw=2, label="Air Temp", color="#db6065", zorder=2, ) df2 = df[df["dwpf"].notnull()] ax.plot( df2.index.values, df2["dwpf"], lw=2, label="Dew Point", color="#346633", zorder=3, ) ax.set_title("[%s] %s\nRecent Time Series" % (station, ctx["_nt"].sts[station]["name"])) ax.grid(True) ax.text( -0.1, 0, "Temperature [F]", rotation=90, transform=ax.transAxes, verticalalignment="bottom", ) ax.set_ylim(bottom=(df["dwpf"].min() - 3)) plt.setp(ax.get_xticklabels(), visible=True) date_ticker(ax, pytz.timezone(tzname)) ax.set_xlim(xmin, xmax) ax.legend(loc="best", ncol=2) # _____________PLOT 2____________________________ ax = fig.add_axes([xalign, 0.4, xwidth, 0.25]) ax2 = ax.twinx() df2 = df[df["gust"].notnull()] if not df2.empty: ax2.fill_between( df2.index.values, 0, (df2["gust"].values * units("knot")).to(units("mile / hour")).m, color="#9898ff", zorder=2, ) df2 = df[df["sknt"].notnull()] if not df2.empty: ax2.fill_between( df2.index.values, 0, (df2["sknt"].values * units("knot")).to(units("mile / hour")).m, color="#373698", zorder=3, ) ax2.set_ylim(bottom=0) ax.set_yticks(range(0, 361, 45)) ax.set_yticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"]) ax.set_ylabel("Wind Direction") ax2.set_ylabel("Wind Speed [mph]") ax.set_ylim(0, 360.1) date_ticker(ax, pytz.timezone(tzname)) ax.scatter( df2.index.values, df2["drct"], facecolor="None", edgecolor="#b8bc74", zorder=4, ) ax.set_zorder(ax2.get_zorder() + 1) ax.patch.set_visible(False) ax.set_xlim(xmin, xmax) # _________ PLOT 3 ____ ax = fig.add_axes([xalign, 0.1, xwidth, 0.25]) if plot_type == "default": ax2 = ax.twinx() ax2.scatter( df.index.values, df["ceiling"], label="Visibility", marker="o", s=40, color="g", ) ax2.set_ylabel("Overcast Ceiling [k ft]", color="g") ax2.set_ylim(bottom=0) ax.scatter( df.index.values, df["vsby"], label="Visibility", marker="*", s=40, color="b", ) ax.set_ylabel("Visibility [miles]") ax.set_ylim(0, 14) elif plot_type == "two": df2 = df[(df["alti"] > 20.0) & (df["alti"] < 40.0)] ax.grid(True) vals = (df2["alti"].values * units("inch_Hg")).to(units("hPa")).m ax.fill_between(df2.index.values, 0, vals, color="#a16334") ax.set_ylim(bottom=(vals.min() - 1), top=(vals.max() + 1)) ax.set_ylabel("Pressure [mb]") ax.set_xlim(xmin, xmax) date_ticker(ax, pytz.timezone(tzname)) ax.set_xlabel("Plot Time Zone: %s" % (tzname, )) return fig, df
def __init__(self, sector='iowa', figsize=(10.24, 7.68), **kwargs): """Construct a MapPlot Args: sector (str): plot domain, set 'custom' to bring your own projection kwargs: projection (cartopy.crs,optional): bring your own projection north (float,optional): Plot top bounds (degN Lat) south (float,optional): Plot bottom bounds (degN Lat) east (float,optional): Plot right bounds (degE Lon) west (float,optional): Plot left bounds (degE Lon) titlefontsize (int): fontsize to use for the plot title subtitlefontsize (int): fontsize to use for the plot subtitle continentalcolor (color): color to use for continental coloring debug (bool): enable debugging aspect (str): plot aspect, defaults to equal """ self.debug = kwargs.get('debug', False) self.fig = plt.figure(num=None, figsize=figsize, dpi=kwargs.get('dpi', 100)) # Storage of axes within this plot self.state = None self.cwa = None self.textmask = None # For our plot_values magic, to prevent overlap self.sector = sector self.cax = plt.axes(CAX_BOUNDS, frameon=False, yticks=[], xticks=[]) self.axes = [] self.ax = None # hack around sector=iowa if self.sector == 'iowa': self.sector = 'state' self.state = 'IA' sector_setter(self, MAIN_AX_BOUNDS, **kwargs) for _a in self.axes: if _a is None: continue # legacy usage of axisbg here _c = kwargs.get('axisbg', kwargs.get('continentalcolor', '#EEEEEE')) _a.add_feature(cfeature.LAND, facecolor=_c, zorder=Z_CF) coasts = cfeature.NaturalEarthFeature('physical', 'coastline', '10m', edgecolor='black', facecolor='none') _a.add_feature(coasts, lw=1.0, zorder=Z_POLITICAL) _a.add_feature(cfeature.BORDERS, lw=1.0, zorder=Z_POLITICAL) _a.add_feature(cfeature.OCEAN, facecolor=(0.4471, 0.6235, 0.8117), zorder=Z_CF) _a.add_feature(cfeature.LAKES, facecolor=(0.4471, 0.6235, 0.8117), zorder=Z_CF) if 'nostates' not in kwargs: states = load_pickle_geo('us_states.pickle') _a.add_geometries( [val[b'geom'] for key, val in states.items()], crs=ccrs.PlateCarree(), lw=1.0, edgecolor=kwargs.get('statebordercolor', 'k'), facecolor='None', zorder=Z_POLITICAL ) if not kwargs.get('nologo'): self.iemlogo() if "title" in kwargs: self.fig.text(0.09 if not kwargs.get('nologo') else 0.02, 0.94, kwargs.get("title"), fontsize=kwargs.get('titlefontsize', 18)) if "subtitle" in kwargs: self.fig.text(0.09 if not kwargs.get('nologo') else 0.02, 0.91, kwargs.get("subtitle"), fontsize=kwargs.get('subtitlefontsize', 12)) if 'nocaption' not in kwargs: self.fig.text(0.01, 0.03, ("%s :: generated %s" ) % ( kwargs.get('caption', 'Iowa Environmental Mesonet'), datetime.datetime.now().strftime("%d %B %Y %I:%M %p %Z"),))
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] sdate = ctx.get('sdate') plot_type = ctx['p'] nt = NetworkTable(network) if not nt.sts: raise ValueError( ("Network Identifier %s is unknown to IEM") % (network, )) if station not in nt.sts: raise ValueError( ("Station %s does not exist in network %s") % (station, network)) tzname = nt.sts[station]['tzname'] df = get_data(network, station, tzname, sdate) # if d1 is not None and d1 >= 0 and d1 <= 360: # if s is not None and s >= 0 and s < 200: # if t is not None and t >= -90 and t < 190: # if d is not None and d >= -90 and d < 190: # if v1 is not None and v1 >= 0 and v1 < 30: def ceilingfunc(row): """Our logic to compute a ceiling""" c = [row['skyc1'], row['skyc2'], row['skyc3'], row['skyc4']] if 'OVC' in c: pos = c.index('OVC') larr = [row['skyl1'], row['skyl2'], row['skyl3'], row['skyl4']] return larr[pos] / 1000. df['ceiling'] = df.apply(ceilingfunc, axis=1) fig = plt.figure(figsize=(9, 9)) xalign = 0.1 xwidth = 0.8 ax = fig.add_axes([xalign, 0.7, xwidth, 0.25]) xmin = df.index.min() xmax = df.index.max() # ____________PLOT 1___________________________ df2 = df[df['tmpf'].notnull()] ax.plot(df2.index.values, df2['tmpf'], lw=2, label='Air Temp', color='#db6065', zorder=2) df2 = df[df['dwpf'].notnull()] ax.plot(df2.index.values, df2['dwpf'], lw=2, label='Dew Point', color='#346633', zorder=3) ax.set_title("[%s] %s\nRecent Time Series" % (station, nt.sts[station]['name'])) ax.grid(True) ax.text(-0.1, 0, "Temperature [F]", rotation=90, transform=ax.transAxes, verticalalignment='bottom') ax.set_ylim(bottom=(df['dwpf'].min() - 3)) plt.setp(ax.get_xticklabels(), visible=True) date_ticker(ax, pytz.timezone(tzname)) ax.set_xlim(xmin, xmax) ax.legend(loc='best', ncol=2) # _____________PLOT 2____________________________ ax = fig.add_axes([xalign, 0.4, xwidth, 0.25]) df2 = df[df['drct'].notnull()] ax2 = ax.twinx() df2 = df[df['gust'].notnull()] if not df2.empty: ax2.fill_between(df2.index.values, 0, dt.speed(df2['gust'], 'KT').value('MPH'), color='#9898ff', zorder=2) df2 = df[df['sknt'].notnull()] if not df2.empty: ax2.fill_between(df2.index.values, 0, dt.speed(df2['sknt'], 'KT').value('MPH'), color='#373698', zorder=3) ax2.set_ylim(bottom=0) ax.set_yticks(range(0, 361, 45)) ax.set_yticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', "N"]) ax.set_ylabel("Wind Direction") ax2.set_ylabel("Wind Speed [mph]") ax.set_ylim(0, 360.1) date_ticker(ax, pytz.timezone(tzname)) ax.scatter(df2.index.values, df2['drct'], facecolor='None', edgecolor='#b8bc74', zorder=4) ax.set_zorder(ax2.get_zorder() + 1) ax.patch.set_visible(False) ax.set_xlim(xmin, xmax) # _________ PLOT 3 ____ ax = fig.add_axes([xalign, 0.1, xwidth, 0.25]) if plot_type == 'default': ax2 = ax.twinx() ax2.scatter(df.index.values, df['ceiling'], label='Visibility', marker='o', s=40, color='g') ax2.set_ylabel("Overcast Ceiling [k ft]", color='g') ax2.set_ylim(bottom=0) ax.scatter(df.index.values, df['vsby'], label='Visibility', marker='*', s=40, color='b') ax.set_ylabel("Visibility [miles]") ax.set_ylim(0, 14) elif plot_type == 'two': df2 = df[(df['alti'] > 20.) & (df['alti'] < 40.)] ax.grid(True) vals = dt.pressure(df2['alti'], 'IN').value('MB') ax.fill_between(df2.index.values, 0, vals, color='#a16334') ax.set_ylim(bottom=(vals.min() - 1), top=(vals.max() + 1)) ax.set_ylabel("Pressure [mb]") ax.set_xlim(xmin, xmax) date_ticker(ax, pytz.timezone(tzname)) ax.set_xlabel("Plot Time Zone: %s" % (tzname, )) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] month = ctx["month"] varname = ctx["var"] tzname = ctx["_nt"].sts[station]["tzname"] if ctx.get("sdate") and ctx.get("edate"): date_limiter = ( " and (to_char(valid at time zone '%s', 'mmdd') >= '%s'" " %s to_char(valid at time zone '%s', 'mmdd') <= '%s')") % ( tzname, ctx["sdate"].strftime("%m%d"), "or" if ctx["sdate"] > ctx["edate"] else "and", tzname, ctx["edate"].strftime("%m%d"), ) title = "between %s and %s" % ( ctx["sdate"].strftime("%-d %b"), ctx["edate"].strftime("%-d %b"), ) if ctx["sdate"] == ctx["edate"]: date_limiter = ( "and to_char(valid at time zone '%s', 'mmdd') = '%s'") % ( tzname, ctx["sdate"].strftime("%m%d")) title = "on %s" % (ctx["sdate"].strftime("%-d %b"), ) else: if month == "all": months = range(1, 13) elif month == "fall": months = [9, 10, 11] elif month == "winter": months = [12, 1, 2] elif month == "spring": months = [3, 4, 5] elif month == "summer": months = [6, 7, 8] elif month == "octmar": months = [10, 11, 12, 1, 2, 3] else: ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d") # make sure it is length two for the trick below in SQL months = [ts.month, 999] date_limiter = ( " and extract(month from valid at time zone '%s') in %s") % ( tzname, tuple(months)) title = MDICT[month] if ctx.get("hour") is not None: date_limiter += ( f" and extract(hour from valid at time zone '{tzname}' " f"+ '10 minutes'::interval) = {ctx['hour']}") dt = datetime.datetime(2000, 1, 1, ctx["hour"]) title += " @" + dt.strftime("%-I %p") (agg, dbvar) = varname.split("_") if agg in ["max", "min"]: titlelabel = "Top" sorder = "DESC" if agg == "max" else "ASC" df = read_sql( f""" WITH data as ( SELECT valid at time zone %s as v, {dbvar} from alldata WHERE station = %s {date_limiter}) SELECT v as valid, {dbvar} from data ORDER by {dbvar} {sorder} NULLS LAST LIMIT 100 """, pgconn, params=(ctx["_nt"].sts[station]["tzname"], station), index_col=None, ) else: titlelabel = "Most Recent" op = ">=" if agg == "above" else "<" threshold = float(ctx.get("threshold", 100)) df = read_sql( f"SELECT valid at time zone %s as valid, {dbvar} from alldata " f"WHERE station = %s {date_limiter} and {dbvar} {op} {threshold} " "ORDER by valid DESC LIMIT 100", pgconn, params=(ctx["_nt"].sts[station]["tzname"], station), index_col=None, ) if df.empty: raise NoDataFound("Error, no results returned!") ylabels = [] fmt = "%.0f" if dbvar in ["tmpf", "dwpf"] else "%.2f" hours = [] y = [] lastval = -99 ranks = [] currentrank = 0 rows2keep = [] for idx, row in df.iterrows(): key = row["valid"].strftime("%Y%m%d%H") if key in hours or pd.isnull(row[dbvar]): continue rows2keep.append(idx) hours.append(key) y.append(row[dbvar]) lbl = fmt % (row[dbvar], ) lbl += " -- %s" % (row["valid"].strftime("%b %d, %Y %-I:%M %p"), ) ylabels.append(lbl) if row[dbvar] != lastval or agg in ["above", "below"]: currentrank += 1 ranks.append(currentrank) lastval = row[dbvar] if len(ylabels) == 10: break if not y: raise NoDataFound("No data found.") fig = plt.figure(figsize=(8, 6)) ax = plt.axes([0.1, 0.1, 0.5, 0.8]) ax.barh( range(len(y), 0, -1), y, ec="green", fc="green", height=0.8, align="center", ) ax2 = ax.twinx() ax2.set_ylim(0.5, 10.5) ax.set_ylim(0.5, 10.5) ax2.set_yticks(range(1, len(y) + 1)) ax.set_yticks(range(1, len(y) + 1)) ax.set_yticklabels(["#%s" % (x, ) for x in ranks][::-1]) ax2.set_yticklabels(ylabels[::-1]) ax.grid(True, zorder=11) ax.set_xlabel("%s %s" % (METRICS[varname], UNITS[dbvar])) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") fitbox( fig, ("%s [%s] %s 10 Events\n%s %s (%s) (%s-%s)") % ( ctx["_nt"].sts[station]["name"], station, titlelabel, METRICS[varname], ctx.get("threshold") if agg in ["above", "below"] else "", title, ab.year, datetime.datetime.now().year, ), 0.01, 0.99, 0.91, 0.99, ha="center", ) fig.text( 0.98, 0.03, "Timezone: %s" % (ctx["_nt"].sts[station]["tzname"], ), ha="right", ) return fig, df.loc[rows2keep]
def run(nexrad, name, network, cname): """Do some work!""" cmap = get_cmap(cname) cmap.set_bad("white") today = utc() pgconn = get_dbconn("radar", user="******") df = read_sql( """ SELECT drct, sknt, extract(doy from valid) as doy, valid from nexrad_attributes_log WHERE nexrad = %s and sknt > 0 """, pgconn, params=(nexrad, ), index_col=None, ) if df.empty: print("No results for %s" % (nexrad, )) return minvalid = df["valid"].min() years = (today - minvalid).days / 365.25 fig = plt.figure(figsize=(10.24, 7.68), dpi=100) ax = [None, None] ax[0] = fig.add_axes([0.06, 0.53, 0.99, 0.39]) ax[1] = fig.add_axes([0.06, 0.06, 0.99, 0.39]) H2, xedges, yedges = np.histogram2d( df["drct"].values, df["sknt"].values, bins=(36, 15), range=[[0, 360], [0, 70]], ) H2 = np.ma.array(H2 / years) H2.mask = np.where(H2 < 1, True, False) res = ax[0].pcolormesh(xedges, yedges, H2.transpose(), cmap=cmap) fig.colorbar(res, ax=ax[0], extend="neither") ax[0].set_xlim(0, 360) ax[0].set_ylabel("Storm Speed [kts]") ax[0].set_xlabel("Movement Direction (from)") ax[0].set_xticks((0, 90, 180, 270, 360)) ax[0].set_xticklabels(("N", "E", "S", "W", "N")) ax[0].set_title(("Storm Attributes Histogram\n%s - %s K%s %s (%s)\n" "%s total attrs, units are ~ (attrs+scans)/year") % ( minvalid.strftime("%d %b %Y"), today.strftime("%d %b %Y"), nexrad, name, network, len(df.index), )) ax[0].grid(True) H2, xedges, yedges = np.histogram2d( df["doy"].values, df["drct"].values, bins=(36, 36), range=[[0, 365], [0, 360]], ) H2 = np.ma.array(H2 / years) H2.mask = np.where(H2 < 1, True, False) res = ax[1].pcolormesh(xedges, yedges, H2.transpose(), cmap=cmap) fig.colorbar(res, ax=ax[1], extend="neither") ax[1].set_ylim(0, 360) ax[1].set_ylabel("Movement Direction (from)") ax[1].set_yticks((0, 90, 180, 270, 360)) ax[1].set_yticklabels(("N", "E", "S", "W", "N")) ax[1].set_xticks( (1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax[1].set_xticklabels(calendar.month_abbr[1:]) ax[1].set_xlim(0, 365) ax[1].grid(True) ax[1].set_xlabel(("Generated %s by Iowa Environmental Mesonet") % (today.strftime("%d %b %Y"), )) fig.savefig("%s_histogram.png" % (nexrad, )) plt.close()
def process(model, lon, lat): """ Generate a plot for this given combination """ fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.7, 0.8]) modelts = get_latest_time(model) if modelts is None: ax.text(0.5, 0.5, "No Data Found to Plot!", ha="center") ssw("Content-Type: image/png\n\n") fig.savefig(getattr(sys.stdout, "buffer", sys.stdout), format="png") return nc = ncopen( modelts.strftime( ("/mesonet/share/frost/" + model + "/%Y%m%d%H%M_iaoutput.nc"))) times = get_times(nc) (i, j) = get_ij(lon, lat, nc) ax.plot( times, temperature(nc.variables["bdeckt"][:, i, j], "K").value("F"), color="k", label="Bridge Deck Temp" if model == "bridget" else "Pavement", ) ax.plot( times, temperature(nc.variables["tmpk"][:, i, j], "K").value("F"), color="r", label="Air Temp", ) ax.plot( times, temperature(nc.variables["dwpk"][:, i, j], "K").value("F"), color="g", label="Dew Point", ) # ax.set_ylim(-30,150) ax.set_title(("ISUMM5 %s Timeseries\n" "i: %s j:%s lon: %.2f lat: %.2f Model Run: %s") % ( model, i, j, nc.variables["lon"][i, j], nc.variables["lat"][i, j], modelts.astimezone(pytz.timezone( "America/Chicago")).strftime("%-d %b %Y %-I:%M %p"), )) ax.xaxis.set_major_locator( mdates.DayLocator(interval=1, tz=pytz.timezone("America/Chicago"))) ax.xaxis.set_major_formatter( mdates.DateFormatter("%d %b\n%Y", tz=pytz.timezone("America/Chicago"))) ax.axhline(32, linestyle="-.") ax.grid(True) ax.set_ylabel(r"Temperature $^\circ$F") ymax = ax.get_ylim()[1] for i2, ifrost in enumerate(nc.variables["ifrost"][:-1, i, j]): ax.barh( ymax - 1, 1.0 / 24.0 / 4.0, left=times[i2], fc=get_ifrost_color(ifrost), ec="none", ) for i2, icond in enumerate(nc.variables["icond"][:-1, i, j]): ax.barh( ymax - 2, 1.0 / 24.0 / 4.0, left=times[i2], fc=get_icond_color(model, icond), ec="none", ) # Shrink current axis's height by 10% on the bottom box = ax.get_position() ax.set_position( [box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9]) ax.legend( loc="upper center", bbox_to_anchor=(0.5, -0.12), fancybox=True, shadow=True, ncol=3, ) add_labels(fig) ssw("Content-Type: image/png\n\n") fig.savefig(getattr(sys.stdout, "buffer", sys.stdout), format="png")
def plotter(fdict): """ Go """ font0 = FontProperties() font0.set_family('monospace') font0.set_size(16) pgconn = get_dbconn('iem') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] nt = NetworkTable(network) df = read_sql(""" SELECT day as date, max_tmpf as max, min_tmpf as min, max_tmpf::int - min_tmpf::int as difference from summary s JOIN stations t on (s.iemid = t.iemid) where t.id = %s and t.network = %s and extract(month from day) in %s and max_tmpf is not null and min_tmpf is not null ORDER by difference DESC, date DESC LIMIT 10 """, pgconn, params=(station, network, tuple(months)), parse_dates=('date', ), index_col=None) df['rank'] = df['difference'].rank(ascending=False, method='min') fig = plt.figure(figsize=(5.5, 4)) fig.text(0.5, 0.9, ("%s [%s] %s-%s\n" "Top 10 Local Calendar Day [%s] " "Temperature Differences") % (nt.sts[station]['name'], station, nt.sts[station]['archive_begin'].year, datetime.date.today().year, month.capitalize()), ha='center') fig.text(0.1, 0.81, " # Date Diff Low High", fontproperties=font0) y = 0.74 for _, row in df.iterrows(): fig.text(0.1, y, ("%2.0f %11s %3.0f %3.0f %3.0f") % (row['rank'], row['date'].strftime("%d %b %Y"), row['difference'], row['min'], row['max']), fontproperties=font0) y -= 0.07 return fig, df