def postprocess(self, view=False, filename=None, web=False, memcache=None, memcachekey=None, memcacheexpire=300, pqstr=None): """ postprocess into a slim and trim PNG """ tmpfn = tempfile.mktemp() ram = BytesIO() plt.savefig(ram, format='png') ram.seek(0) im = Image.open(ram) im2 = im.convert('RGB').convert('P', palette=Image.ADAPTIVE) if memcache and memcachekey: ram = BytesIO() im2.save(ram, format='png') ram.seek(0) r = ram.read() memcache.set(memcachekey, r, time=memcacheexpire) sys.stderr.write("memcached key %s set time %s" % (memcachekey, memcacheexpire)) if web: ssw("Content-Type: image/png\n\n") im2.save(getattr(sys.stdout, 'buffer', sys.stdout), format='png') return im2.save(tmpfn, format='PNG') if pqstr is not None: subprocess.call("/home/ldm/bin/pqinsert -p '%s' %s" % (pqstr, tmpfn), shell=True) if view: subprocess.call("xv %s" % (tmpfn,), shell=True) if filename is not None: shutil.copyfile(tmpfn, filename) os.unlink(tmpfn)
def make_plot(huc12, scenario): """Make the map""" import seaborn as sns os.chdir("/i/%s/slp/%s/%s" % (scenario, huc12[:8], huc12[8:])) res = [] for fn in glob.glob("*.slp"): slp = read_slp(fn) bulk = (slp[-1]['y'][-1]) / slp[-1]['x'][-1] length = slp[-1]['x'][-1] if bulk < -1: print("Greater than 100%% slope, %s %s" % (fn, bulk)) continue res.append([(0 - bulk) * 100., length]) data = np.array(res) g = sns.jointplot(data[:, 1], data[:, 0], s=40, stat_func=None, zorder=1, color='tan').plot_joint(sns.kdeplot, n_levels=6) g.ax_joint.set_xlabel("Slope Length [m]") g.ax_joint.set_ylabel("Bulk Slope [%]") g.fig.subplots_adjust(top=.8, bottom=0.2, left=0.15) g.ax_joint.grid() g.ax_marg_x.set_title( ("HUC12 %s DEP Hillslope\n" "Kernel Density Estimate (KDE) Overlain") % (huc12, ), fontsize=10) ram = BytesIO() plt.gcf().set_size_inches(3.6, 2.4) plt.savefig(ram, format='png', dpi=100) ram.seek(0) return ram.read()
def make_plot(huc12, scenario): """Make the map""" import seaborn as sns os.chdir("/i/%s/slp/%s/%s" % (scenario, huc12[:8], huc12[8:])) res = [] for fn in glob.glob("*.slp"): slp = read_slp(fn) bulk = (slp[-1]['y'][-1]) / slp[-1]['x'][-1] length = slp[-1]['x'][-1] if bulk < -1: print("Greater than 100%% slope, %s %s" % (fn, bulk)) continue res.append([(0 - bulk) * 100., length]) data = np.array(res) g = sns.jointplot( data[:, 1], data[:, 0], s=40, stat_func=None, zorder=1, color='tan' ).plot_joint(sns.kdeplot, n_levels=6) g.ax_joint.set_xlabel("Slope Length [m]") g.ax_joint.set_ylabel("Bulk Slope [%]") g.fig.subplots_adjust(top=.8, bottom=0.2, left=0.15) g.ax_joint.grid() g.ax_marg_x.set_title(( "HUC12 %s DEP Hillslope\n" "Kernel Density Estimate (KDE) Overlain") % (huc12, ), fontsize=10) ram = BytesIO() plt.gcf().set_size_inches(3.6, 2.4) plt.savefig(ram, format='png', dpi=100) ram.seek(0) return ram.read()
def process(uri): """Process this request This should look something like "/onsite/features/2016/11/161125.png" """ if uri is None: send_content_type("text") ssw("ERROR!") return match = PATTERN.match(uri) if match is None: send_content_type("text") ssw("ERROR!") sys.stderr.write("feature content failure: %s\n" % (repr(uri), )) return data = match.groupdict() fn = ("/mesonet/share/features/%(yyyy)s/%(mm)s/" "%(yymmdd)s%(extra)s.%(suffix)s") % data if os.path.isfile(fn): send_content_type(data['suffix']) ssw(open(fn, 'rb').read()) dblog(data['yymmdd']) else: send_content_type('png') from io import BytesIO from pyiem.plot.use_agg import plt (_, ax) = plt.subplots(1, 1) ax.text(0.5, 0.5, "Feature Image was not Found!", transform=ax.transAxes, ha='center') plt.axis('off') ram = BytesIO() plt.savefig(ram, format='png') ram.seek(0) ssw(ram.read())
def make_plot(huc12, scenario): """Make the map""" pgconn = get_dbconn("idep") cursor = pgconn.cursor() # Check that we have data for this date! cursor.execute( """ WITH data as ( SELECT extract(year from valid) as yr, extract(month from valid) as mo, sum(avg_loss) * 4.463 as val from results_by_huc12 WHERE huc_12 = %s and scenario = %s GROUP by mo, yr) SELECT mo, avg(val), stddev(val), count(*) from data GROUP by mo ORDER by mo ASC """, (huc12, scenario), ) months = [] data = [] confidence = [] for row in cursor: months.append(row[0]) data.append(row[1]) confidence.append(row[2] / (row[3]**2)) months = np.array(months) (_, ax) = plt.subplots(1, 1) bars = ax.bar(months - 0.4, data, color="tan", yerr=confidence) ax.grid(True) ax.set_ylabel("Soil Detachment (t/a)") ax.set_title( ("Monthly Average Soil Detachment (t/a)\nHUC12: %s") % (huc12, )) ax.set_xticks(np.arange(1, 13)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(0.5, 12.5) ax.set_ylim(0, max(data) * 1.1) def autolabel(rects): # attach some text labels for rect in rects: height = rect.get_height() ax.text( rect.get_x() + rect.get_width() / 2.0, 1.05 * height, "%.1f" % height, ha="center", va="bottom", ) autolabel(bars) ram = BytesIO() plt.savefig(ram, format="png", dpi=100) ram.seek(0) return ram.read()
def application(environ, start_response): """Go Main Go""" headers = [("Content-type", "image/png")] start_response("200 OK", headers) form = parse_formvars(environ) turbinename = form.get("turbinename", "I 050-350") ts = datetime.datetime.strptime(form.get("date", "20100401"), "%Y%m%d") workflow(turbinename, ts) bio = BytesIO() plt.savefig(bio) return [bio.getvalue()]
def error_image(message, fmt): """Create an error image""" plt.close() _, ax = plt.subplots(1, 1) msg = "IEM Autoplot generation resulted in an error\n%s" % (message,) ax.text(0.5, 0.5, msg, transform=ax.transAxes, ha="center", va="center") ram = BytesIO() plt.axis("off") plt.savefig(ram, format=fmt, dpi=100) ram.seek(0) plt.close() return ram.read()
def application(environ, start_response): """ Go Main Go """ form = parse_formvars(environ) if "lon" in form and "lat" in form: process( form.get("model"), float(form.get("lon")), float(form.get("lat")) ) start_response("200 OK", [("Content-type", "image/png")]) bio = BytesIO() plt.savefig(bio) return [bio.getvalue()]
def application(environ, start_response): """Go Main Go.""" form = parse_formvars(environ) ts = form.get("ts", "200006302000") ts = datetime.datetime.strptime(ts, "%Y%m%d%H%M") # yawsource = form.get("yawsource", "yaw") headers = [("Content-type", "image/png")] start_response("200 OK", headers) do(ts) bio = BytesIO() plt.savefig(bio) return [bio.getvalue()]
def process(env): """Process this request This should look something like "/onsite/features/2016/11/161125.png" """ uri = env.get('REQUEST_URI') if uri is None: send_content_type("text") ssw("ERROR!") return match = PATTERN.match(uri) if match is None: send_content_type("text") ssw("ERROR!") sys.stderr.write("feature content failure: %s\n" % (repr(uri), )) return data = match.groupdict() fn = ("/mesonet/share/features/%(yyyy)s/%(mm)s/" "%(yymmdd)s%(extra)s.%(suffix)s") % data if os.path.isfile(fn): rng = env.get("HTTP_RANGE", "bytes=0-") tokens = rng.replace("bytes=", "").split("-", 1) resdata = open(fn, 'rb').read() totalsize = len(resdata) stripe = slice( int(tokens[0]), totalsize if tokens[-1] == '' else (int(tokens[-1]) + 1)) send_content_type(data['suffix'], len(resdata), stripe) ssw(resdata[stripe]) dblog(data['yymmdd']) else: send_content_type('png') from io import BytesIO from pyiem.plot.use_agg import plt (_, ax) = plt.subplots(1, 1) ax.text(0.5, 0.5, "Feature Image was not Found!", transform=ax.transAxes, ha='center') plt.axis('off') ram = BytesIO() plt.savefig(ram, format='png') ram.seek(0) ssw(ram.read())
def application(environ, start_response): """Go Main Go""" nt = NetworkTable("ISUASI") form = parse_formvars(environ) if ("syear" in form and "eyear" in form and "smonth" in form and "emonth" in form and "sday" in form and "eday" in form and "shour" in form and "ehour" in form): sts = datetime.datetime( int(form["syear"].value), int(form["smonth"].value), int(form["sday"].value), int(form["shour"].value), 0, ) ets = datetime.datetime( int(form["eyear"].value), int(form["emonth"].value), int(form["eday"].value), int(form["ehour"].value), 0, ) else: sts = datetime.datetime(2012, 12, 1) ets = datetime.datetime(2012, 12, 3) station = form.getvalue("station", "ISU4003") if station not in nt.sts: start_response("200 OK", [("Content-type", "text/plain")]) return [b"ERROR"] pgconn = get_dbconn("other") icursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) sql = """ SELECT * from asi_data WHERE station = '%s' and valid BETWEEN '%s' and '%s' ORDER by valid ASC """ % ( station, sts.strftime("%Y-%m-%d %H:%M"), ets.strftime("%Y-%m-%d %H:%M"), ) icursor.execute(sql) data = {} for i in range(1, 13): data["ch%savg" % (i, )] = [] valid = [] for row in icursor: for i in range(1, 13): data["ch%savg" % (i, )].append(row["ch%savg" % (i, )]) valid.append(row["valid"]) for i in range(1, 13): data["ch%savg" % (i, )] = np.array(data["ch%savg" % (i, )]) if len(valid) < 3: (_fig, ax) = plt.subplots(1, 1) ax.text(0.5, 0.5, "Sorry, no data found!", ha="center") start_response("200 OK", [("Content-Type", "image/png")]) io = BytesIO() plt.savefig(io, format="png") io.seek(0) return [io.read()] (_fig, ax) = plt.subplots(2, 1, sharex=True) ax[0].grid(True) ax[0].plot(valid, data["ch1avg"], linewidth=2, color="r", zorder=2, label="48.5m") ax[0].plot( valid, data["ch3avg"], linewidth=2, color="purple", zorder=2, label="32m", ) ax[0].plot( valid, data["ch5avg"], linewidth=2, color="black", zorder=2, label="10m", ) ax[0].set_ylabel("Wind Speed [m/s]") ax[0].legend(loc=(0.05, -0.15), ncol=3) ax[0].set_xlim(min(valid), max(valid)) days = (max(valid) - min(valid)).days central = pytz.timezone("America/Chicago") if days >= 3: interval = max(int(days / 7), 1) ax[0].xaxis.set_major_locator( mdates.DayLocator(interval=interval, tz=central)) ax[0].xaxis.set_major_formatter( mdates.DateFormatter("%d %b\n%Y", tz=central)) else: ax[0].xaxis.set_major_locator( mdates.AutoDateLocator(maxticks=10, tz=central)) ax[0].xaxis.set_major_formatter( mdates.DateFormatter("%-I %p\n%d %b", tz=central)) ax[0].set_title("ISUASI Station: %s Timeseries" % (nt.sts[station]["name"], )) ax[1].plot(valid, data["ch10avg"], color="b", label="3m") ax[1].plot(valid, data["ch11avg"], color="r", label="48.5m") ax[1].grid(True) ax[1].set_ylabel("Air Temperature [C]") ax[1].legend(loc="best") start_response("200 OK", [("Content-Type", "image/png")]) io = BytesIO() plt.savefig(io, format="png") io.seek(0) return [io.read()]
def main(argv): """Go Main Go""" v = argv[1] agg = argv[2] ts = datetime.date(2008, 1, 1) ts2 = datetime.date(2017, 12, 31) scenario = 0 # suggested for runoff and precip if V2UNITS[v] in ['mm', 'inches']: colors = ['#ffffa6', '#9cf26d', '#76cc94', '#6399ba', '#5558a1'] # suggested for detachment elif v in ['avg_loss', 'avg_loss_metric']: colors = ['#cbe3bb', '#c4ff4d', '#ffff4d', '#ffc44d', '#ff4d4d', '#c34dee'] # suggested for delivery elif v in ['avg_delivery', 'avg_delivery_metric']: colors = ['#ffffd2', '#ffff4d', '#ffe0a5', '#eeb74d', '#ba7c57', '#96504d'] cmap = mpcolors.ListedColormap(colors, 'james') cmap.set_under('white') cmap.set_over('black') pgconn = get_dbconn('idep') title = "for %s" % (ts.strftime("%-d %B %Y"),) if ts != ts2: title = "between %s and %s" % (ts.strftime("%-d %b %Y"), ts2.strftime("%-d %b %Y")) mp = MapPlot(axisbg='#EEEEEE', nologo=True, sector='iowa', nocaption=True, title=("DEP %s %s %s" ) % (V2NAME[v.replace("_metric", "")], "Yearly Average" if agg == 'avg' else 'Total', title), caption='Daily Erosion Project') df = read_postgis(""" WITH data as ( SELECT huc_12, extract(year from valid) as yr, sum(""" + v.replace("_metric", "") + """) as d from results_by_huc12 WHERE scenario = %s and valid >= %s and valid <= %s GROUP by huc_12, yr), agg as ( SELECT huc_12, """ + agg + """(d) as d from data GROUP by huc_12) SELECT ST_Transform(simple_geom, 4326) as geo, coalesce(d.d, 0) as data from huc12 i LEFT JOIN agg d ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s and i.states ~* 'IA' """, pgconn, params=(scenario, ts, ts2, scenario), geom_col='geo', index_col=None) df['data'] = df['data'] * V2MULTI[v] if df['data'].max() < 0.01: bins = [0.01, 0.02, 0.03, 0.04, 0.05] else: bins = np.array(V2RAMP[v]) * (10. if agg == 'sum' else 1.) norm = mpcolors.BoundaryNorm(bins, cmap.N) # m.ax.add_geometries(df['geo'], ccrs.PlateCarree()) for _, row in df.iterrows(): c = cmap(norm([row['data'], ]))[0] arr = np.asarray(row['geo'].exterior) points = mp.ax.projection.transform_points(ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec='k', zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.drawcities() lbl = [round(_, 2) for _ in bins] u = "%s, Avg: %.2f" % (V2UNITS[v], df['data'].mean()) mp.draw_colorbar(bins, cmap, norm, clevlabels=lbl, units=u, title="%s :: %s" % (V2NAME[v], V2UNITS[v])) plt.savefig('%s_%s_%s%s.png' % (ts.year, ts2.year, v, "_sum" if agg == 'sum' else ''))
def do(valid, yawsource): """ Generate plot for a given timestamp """ pgconn = get_dbconn('scada') cursor = pgconn.cursor() cursor.execute("""select turbine_id, power, lon, lat, yawangle, windspeed, alpha1 from data s JOIN turbines t on (t.id = s.turbine_id) WHERE valid = %s and power is not null and yawangle is not null and windspeed is not null and alpha1 is not null""", (valid,)) lons = [] lats = [] vals = [] u = [] v = [] ws = [] yaw = [] pitch = [] for row in cursor: lons.append(row[2]) lats.append(row[3]) vals.append(row[1]) ws.append(row[5]) yaw.append(row[4]) a, b = uv(speed(row[5], 'MPS'), direction(row[4], 'deg')) u.append(a.value('MPS')) v.append(b.value('MPS')) pitch.append(row[6]) pitch = np.array(pitch) vals = np.array(vals) avgv = np.average(vals) # vals2 = vals - avgv fig = plt.figure(figsize=(12.8, 7.2)) ax = fig.add_axes([0.14, 0.1, 0.52, 0.8]) cmap = plt.cm.get_cmap('jet') cmap.set_under('tan') cmap.set_over('black') # cmap = plt.cm.get_cmap('seismic') # clevs = np.arange(-250, 251, 50) clevs = np.arange(0, 1501, 150) norm = mpcolors.BoundaryNorm(clevs, cmap.N) ax.quiver(lons, lats, u, v, zorder=1) ax.scatter(lons, lats, c=vals, norm=norm, edgecolor='none', cmap=cmap, s=100, zorder=2) ax.get_yaxis().get_major_formatter().set_useOffset(False) ax.get_xaxis().get_major_formatter().set_useOffset(False) ax.xaxis.set_major_formatter(plt.NullFormatter()) ax.yaxis.set_major_formatter(plt.NullFormatter()) ax.set_title(("Turbine Power [kW]\n" "Valid: %s" ) % (valid.strftime("%d %b %Y %I:%M %p"))) make_colorbar(clevs, norm, cmap) ax.text(0.05, 0.05, "Turbine Power: $\mu$= %.1f $\sigma$= %.1f kW" % ( avgv, np.std(vals)), transform=ax.transAxes) ax.text(0.05, 0.01, "Wind $\mu$= %.1f $\sigma$= %.1f $ms^{-1}$" % ( np.average(ws), np.std(ws)), transform=ax.transAxes) ax.set_xlabel("Longitude $^\circ$E") ax.set_ylabel("Latitude $^\circ$N") ax.set_xlim(-93.475, -93.328) ax.set_ylim(42.20, 42.31) # Next plot ax2 = fig.add_axes([0.7, 0.80, 0.28, 0.18]) ax2.scatter(ws, vals, edgecolor='k', c='k') ax2.text(0.5, -0.25, "Wind Speed $ms^{-1}$", transform=ax2.transAxes, ha='center') ax2.set_xlim(0, 20) # ax2.set_ylabel("Power kW") ax2.grid(True) # Next plot ax3 = fig.add_axes([0.7, 0.57, 0.28, 0.18], sharey=ax2) ax3.scatter(yaw, vals, edgecolor='k', c='k') ax3.text(0.5, -0.25, "Yaw", transform=ax3.transAxes, ha='center') # ax3.set_ylabel("Power kW") ax3.set_xlim(0, 360) ax3.set_xticks(np.arange(0, 361, 45)) ax3.set_xticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) ax3.grid(True) # Next plot ax4 = fig.add_axes([0.7, 0.32, 0.28, 0.18], sharey=ax2) ax4.scatter(pitch, vals, edgecolor='k', c='k') ax4.text(0.5, -0.25, "Pitch $^\circ$", transform=ax4.transAxes, ha='center') ax4.set_ylim(-10, 1600) ax4.grid(True) # Next plot ax5 = fig.add_axes([0.7, 0.07, 0.28, 0.18], sharex=ax4) ax5.scatter(pitch, ws, edgecolor='k', c='k') ax5.text(0.5, -0.25, "Pitch $^\circ$", transform=ax5.transAxes, ha='center') ax5.grid(True) ax5.set_ylim(bottom=-10) # maxpitch = max(np.where(pitch > 20, 0, pitch)) # ax5.set_xlim(np.ma.minimum(pitch)-0.5, maxpitch+0.5) ax5.set_xlim(-3, 20.1) ax5.set_ylim(0, 20) ax5.text(-0.1, 0.5, "Wind Speed $ms^{-1}$", transform=ax5.transAxes, ha='center', va='center', rotation=90) plt.savefig(getattr(sys.stdout, 'buffer', sys.stdout))
def workflow(turbinename, ts): """Go main()""" cursor.execute( """SELECT unitnumber from turbines where turbinename = %s""", (turbinename, ), ) unitnumber = cursor.fetchone()[0] ts1 = ts.strftime("%Y-%m-%d") ts2 = (ts + datetime.timedelta(hours=73)).strftime("%Y-%m-%d") df = read_sql( """ select coalesce(s.valid, d.valid) as stamp, s.power as s_power, s.pitch as s_pitch, s.yaw as s_yaw, s.windspeed as s_windspeed, d.power as d_power, d.pitch as d_pitch, d.yaw as d_yaw, d.windspeed as d_windspeed from sampled_data_""" + unitnumber + """ s FULL OUTER JOIN turbine_data_""" + unitnumber + """ d on (d.valid = s.valid) WHERE s.valid BETWEEN %s and %s ORDER by stamp ASC """, PGCONN, params=[ts1, ts2], ) (_, ax) = plt.subplots(4, 1, sharex=True, figsize=(8, 11)) ax[0].set_title("%s - %s Plot for Turbine: %s" % (ts1, ts2, turbinename)) ax[0].bar( df["stamp"], df["s_power"], width=1.0 / 1440.0, fc="tan", ec="tan", zorder=1, label="1 Minute Sampled", ) data = df[df["d_power"] > -10] ax[0].scatter( data["stamp"].values, data["d_power"].values, zorder=2, marker="+", s=40, label="Observations", ) ax[0].set_ylim(-50, 1600) ax[0].legend(loc=(0.0, -0.2), ncol=2) ax[0].set_ylabel("Power kW") ax[0].grid(True) # -------------------------------------------------------- ax[1].bar( df["stamp"], df["s_pitch"], width=1.0 / 1440.0, fc="tan", ec="tan", zorder=1, ) data = df[df["d_pitch"] > -10] ax[1].scatter( data["stamp"].values, data["d_pitch"].values, zorder=2, marker="+", s=40, ) ax[1].set_ylim(bottom=-5) ax[1].set_ylabel("Pitch $^\circ$") ax[1].grid(True) # -------------------------------------------------------- ax[2].bar( df["stamp"], df["s_yaw"], width=1.0 / 1440.0, fc="tan", ec="tan", zorder=1, ) data = df[df["d_yaw"] > -10] ax[2].scatter(data["stamp"].values, data["d_yaw"].values, zorder=2, marker="+", s=40) ax[2].text(0.05, -0.1, "* Uncorrected Yaw", transform=ax[2].transAxes) ax[2].set_ylim(0, 360) ax[2].set_yticks([0, 90, 180, 270, 360]) ax[2].set_yticklabels(["N", "E", "S", "W", "N"]) ax[2].grid(True) ax[2].set_ylabel("Turbine Yaw") # ----------------------------------------------------------- ax[3].bar( df["stamp"], df["s_windspeed"], width=1.0 / 1440.0, fc="tan", ec="tan", zorder=1, ) data = df[df["d_windspeed"] > -10] ax[3].scatter( data["stamp"].values, data["d_windspeed"].values, zorder=2, marker="+", s=40, ) ax[3].grid(True) ax[3].set_ylabel("Wind Speed mps") ax[3].set_ylim(bottom=0) ax[3].xaxis.set_major_formatter( mdates.DateFormatter("%-I %p\n%-d/%b", tz=pytz.timezone("America/Chicago"))) plt.savefig(getattr(sys.stdout, "buffer", sys.stdout))
def make_overviewmap(form): """Draw a pretty map of just the HUC.""" huc = form.get("huc") plt.close() projection = EPSG[5070] if huc is None: huclimiter = "" elif len(huc) >= 8: huclimiter = " and substr(huc_12, 1, 8) = '%s' " % (huc[:8],) with get_sqlalchemy_conn("idep") as conn: df = read_postgis( f""" SELECT simple_geom as geom, huc_12, ST_x(ST_Transform(ST_Centroid(geom), 4326)) as centroid_x, ST_y(ST_Transform(ST_Centroid(geom), 4326)) as centroid_y, hu_12_name from huc12 i WHERE i.scenario = 0 {huclimiter} """, conn, geom_col="geom", index_col="huc_12", ) minx, miny, maxx, maxy = df["geom"].total_bounds buf = float(form.get("zoom", 10.0)) * 1000.0 # 10km hucname = "" if huc not in df.index else df.at[huc, "hu_12_name"] subtitle = "The HUC8 is in tan" if len(huc) == 12: subtitle = "HUC12 highlighted in red, the HUC8 it resides in is in tan" m = MapPlot( axisbg="#EEEEEE", logo="dep", sector="custom", south=miny - buf, north=maxy + buf, west=minx - buf, east=maxx + buf, projection=projection, continentalcolor="white", title="DEP HUC %s:: %s" % (huc, hucname), subtitle=subtitle, titlefontsize=20, subtitlefontsize=18, caption="Daily Erosion Project", ) for _huc12, row in df.iterrows(): p = Polygon( row["geom"].exterior.coords, fc="red" if _huc12 == huc else "tan", ec="k", zorder=Z_OVERLAY2, lw=0.1, ) m.ax.add_patch(p) # If this is our HUC, add some text to prevent cities overlay overlap if _huc12 == huc: m.plot_values( [row["centroid_x"]], [row["centroid_y"]], [" . "], color="None", outlinecolor="None", ) if huc is not None: m.drawcounties() m.drawcities() ram = BytesIO() plt.savefig(ram, format="png", dpi=100) plt.close() ram.seek(0) return ram.read(), True
def main(): """Go Main Go""" nt = NetworkTable("ISUASI") form = cgi.FieldStorage() if ("syear" in form and "eyear" in form and "smonth" in form and "emonth" in form and "sday" in form and "eday" in form and "shour" in form and "ehour" in form): sts = datetime.datetime(int(form["syear"].value), int(form["smonth"].value), int(form["sday"].value), int(form["shour"].value), 0) ets = datetime.datetime(int(form["eyear"].value), int(form["emonth"].value), int(form["eday"].value), int(form["ehour"].value), 0) else: sts = datetime.datetime(2012, 12, 1) ets = datetime.datetime(2012, 12, 3) station = form.getvalue('station', 'ISU4003') if station not in nt.sts: print('Content-type: text/plain\n') print('ERROR') return pgconn = get_dbconn('other') icursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) sql = """ SELECT * from asi_data WHERE station = '%s' and valid BETWEEN '%s' and '%s' ORDER by valid ASC """ % (station, sts.strftime("%Y-%m-%d %H:%M"), ets.strftime("%Y-%m-%d %H:%M")) icursor.execute(sql) data = {} for i in range(1, 13): data['ch%savg' % (i, )] = [] valid = [] for row in icursor: for i in range(1, 13): data['ch%savg' % (i, )].append(row['ch%savg' % (i, )]) valid.append(row['valid']) for i in range(1, 13): data['ch%savg' % (i, )] = np.array(data['ch%savg' % (i, )]) if len(valid) < 3: (fig, ax) = plt.subplots(1, 1) ax.text(0.5, 0.5, "Sorry, no data found!", ha='center') print("Content-Type: image/png\n") plt.savefig(sys.stdout, format='png') return (fig, ax) = plt.subplots(2, 1, sharex=True) ax[0].grid(True) ax[0].plot(valid, data['ch1avg'], linewidth=2, color='r', zorder=2, label='48.5m') ax[0].plot(valid, data['ch3avg'], linewidth=2, color='purple', zorder=2, label='32m') ax[0].plot(valid, data['ch5avg'], linewidth=2, color='black', zorder=2, label='10m') ax[0].set_ylabel("Wind Speed [m/s]") ax[0].legend(loc=(0.05, -0.15), ncol=3) ax[0].set_xlim(min(valid), max(valid)) days = (max(valid) - min(valid)).days central = pytz.timezone("America/Chicago") if days >= 3: interval = max(int(days / 7), 1) ax[0].xaxis.set_major_locator( mdates.DayLocator(interval=interval, tz=central)) ax[0].xaxis.set_major_formatter( mdates.DateFormatter('%d %b\n%Y', tz=central)) else: ax[0].xaxis.set_major_locator( mdates.AutoDateLocator(maxticks=10, tz=central)) ax[0].xaxis.set_major_formatter( mdates.DateFormatter('%-I %p\n%d %b', tz=central)) ax[0].set_title("ISUASI Station: %s Timeseries" % (nt.sts[station]['name'], )) ax[1].plot(valid, data['ch10avg'], color='b', label='3m') ax[1].plot(valid, data['ch11avg'], color='r', label='48.5m') ax[1].grid(True) ax[1].set_ylabel("Air Temperature [C]") ax[1].legend(loc='best') ssw("Content-Type: image/png\n\n") fig.savefig(getattr(sys.stdout, 'buffer', sys.stdout), format='png')
def application(environ, start_response): """Process this request This should look something like "/onsite/features/2016/11/161125.png" """ headers = [("Accept-Ranges", "bytes")] uri = environ.get("REQUEST_URI") # Option 1, no URI is provided. if uri is None: headers.append(get_content_type("text")) start_response("500 Internal Server Error", headers) return [b"ERROR!"] match = PATTERN.match(uri) # Option 2, the URI pattern is unknown. if match is None: headers.append(get_content_type("text")) start_response("500 Internal Server Error", headers) sys.stderr.write("feature content failure: %s\n" % (repr(uri), )) return [b"ERROR!"] data = match.groupdict() fn = ("/mesonet/share/features/%(yyyy)s/%(mm)s/" "%(yymmdd)s%(extra)s.%(suffix)s") % data # Option 3, we have no file. if not os.path.isfile(fn): # lazy import to save the expense of firing this up when this loads # pylint: disable=import-outside-toplevel from pyiem.plot.use_agg import plt headers.append(get_content_type("png")) (_, ax) = plt.subplots(1, 1) ax.text( 0.5, 0.5, "Feature Image was not Found!", transform=ax.transAxes, ha="center", ) plt.axis("off") ram = BytesIO() plt.savefig(ram, format="png") plt.close() ram.seek(0) start_response("404 Not Found", headers) return [ram.read()] # Option 4, we can support this request. headers.append(get_content_type(data["suffix"])) rng = environ.get("HTTP_RANGE", "bytes=0-") tokens = rng.replace("bytes=", "").split("-", 1) resdata = open(fn, "rb").read() totalsize = len(resdata) stripe = slice( int(tokens[0]), totalsize if tokens[-1] == "" else (int(tokens[-1]) + 1), ) status = "200 OK" if totalsize != (stripe.stop - stripe.start): status = "206 Partial Content" headers.append(("Content-Length", "%.0f" % (stripe.stop - stripe.start, ))) if environ.get("HTTP_RANGE") and stripe is not None: secondval = ("" if environ.get("HTTP_RANGE") == "bytes=0-" else (stripe.stop - 1)) headers.append(( "Content-Range", "bytes %s-%s/%s" % (stripe.start, secondval, totalsize), )) dblog(data["yymmdd"]) start_response(status, headers) return [resdata[stripe]]
def make_map(huc, ts, ts2, scenario, v, form): """Make the map""" projection = EPSG[5070] plt.close() # suggested for runoff and precip if v in ["qc_precip", "avg_runoff"]: # c = ['#ffffa6', '#9cf26d', '#76cc94', '#6399ba', '#5558a1'] cmap = james() # suggested for detachment elif v in ["avg_loss"]: # c =['#cbe3bb', '#c4ff4d', '#ffff4d', '#ffc44d', '#ff4d4d', '#c34dee'] cmap = dep_erosion() # suggested for delivery elif v in ["avg_delivery"]: # c =['#ffffd2', '#ffff4d', '#ffe0a5', '#eeb74d', '#ba7c57', '#96504d'] cmap = dep_erosion() pgconn = get_dbconn("idep") cursor = pgconn.cursor() title = "for %s" % (ts.strftime("%-d %B %Y"),) if ts != ts2: title = "for period between %s and %s" % ( ts.strftime("%-d %b %Y"), ts2.strftime("%-d %b %Y"), ) if "averaged" in form: title = "averaged between %s and %s (2008-2017)" % ( ts.strftime("%-d %b"), ts2.strftime("%-d %b"), ) # Check that we have data for this date! cursor.execute( "SELECT value from properties where key = 'last_date_0'", ) lastts = datetime.datetime.strptime(cursor.fetchone()[0], "%Y-%m-%d") floor = datetime.date(2007, 1, 1) if ts > lastts.date() or ts2 > lastts.date() or ts < floor: plt.text( 0.5, 0.5, "Data Not Available\nPlease Check Back Later!", fontsize=20, ha="center", ) ram = BytesIO() plt.savefig(ram, format="png", dpi=100) plt.close() ram.seek(0) return ram.read(), False if huc is None: huclimiter = "" elif len(huc) == 8: huclimiter = " and substr(i.huc_12, 1, 8) = '%s' " % (huc,) elif len(huc) == 12: huclimiter = " and i.huc_12 = '%s' " % (huc,) if "iowa" in form: huclimiter += " and i.states ~* 'IA' " if "mn" in form: huclimiter += " and i.states ~* 'MN' " if "averaged" in form: # 11 years of data is standard # 10 years is for the switchgrass one-off with get_sqlalchemy_conn("idep") as conn: df = read_postgis( f""" WITH data as ( SELECT huc_12, sum({v}) / 10. as d from results_by_huc12 WHERE scenario = %s and to_char(valid, 'mmdd') between %s and %s and valid between '2008-01-01' and '2018-01-01' GROUP by huc_12) SELECT simple_geom as geom, coalesce(d.d, 0) * %s as data from huc12 i LEFT JOIN data d ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s {huclimiter} """, conn, params=( scenario, ts.strftime("%m%d"), ts2.strftime("%m%d"), V2MULTI[v], 0, ), geom_col="geom", ) else: with get_sqlalchemy_conn("idep") as conn: df = read_postgis( f""" WITH data as ( SELECT huc_12, sum({v}) as d from results_by_huc12 WHERE scenario = %s and valid between %s and %s GROUP by huc_12) SELECT simple_geom as geom, coalesce(d.d, 0) * %s as data from huc12 i LEFT JOIN data d ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s {huclimiter} """, conn, params=( scenario, ts.strftime("%Y-%m-%d"), ts2.strftime("%Y-%m-%d"), V2MULTI[v], 0, ), geom_col="geom", ) minx, miny, maxx, maxy = df["geom"].total_bounds buf = 10000.0 # 10km m = MapPlot( axisbg="#EEEEEE", logo="dep", sector="custom", south=miny - buf, north=maxy + buf, west=minx - buf, east=maxx + buf, projection=projection, title="DEP %s by HUC12 %s" % (V2NAME[v], title), titlefontsize=16, caption="Daily Erosion Project", ) if ts == ts2: # Daily bins = RAMPS["english"][0] else: bins = RAMPS["english"][1] norm = mpcolors.BoundaryNorm(bins, cmap.N) for _, row in df.iterrows(): p = Polygon( row["geom"].exterior.coords, fc=cmap(norm([row["data"]]))[0], ec="k", zorder=5, lw=0.1, ) m.ax.add_patch(p) label_scenario(m.ax, scenario, pgconn) lbl = [round(_, 2) for _ in bins] if huc is not None: m.drawcounties() m.drawcities() m.draw_colorbar( bins, cmap, norm, units=V2UNITS[v], clevlabels=lbl, spacing="uniform" ) if "progressbar" in form: fig = plt.gcf() avgval = df["data"].mean() fig.text( 0.01, 0.905, "%s: %4.1f T/a" % (ts.year if "averaged" not in form else "Avg", avgval), fontsize=14, ) bar_width = 0.758 # yes, a small one off with years having 366 days proportion = (ts2 - ts).days / 365.0 * bar_width rect1 = Rectangle( (0.15, 0.905), bar_width, 0.02, color="k", zorder=40, transform=fig.transFigure, figure=fig, ) fig.patches.append(rect1) rect2 = Rectangle( (0.151, 0.907), proportion, 0.016, color=cmap(norm([avgval]))[0], zorder=50, transform=fig.transFigure, figure=fig, ) fig.patches.append(rect2) if "cruse" in form: # Crude conversion of T/a to mm depth depth = avgval / 5.0 m.ax.text( 0.9, 0.92, "%.2fmm" % (depth,), zorder=1000, fontsize=24, transform=m.ax.transAxes, ha="center", va="center", bbox=dict(color="k", alpha=0.5, boxstyle="round,pad=0.1"), color="white", ) ram = BytesIO() plt.savefig(ram, format="png", dpi=100) plt.close() ram.seek(0) return ram.read(), True
def workflow(environ, form, fmt): """we need to return a status and content""" # q is the full query string that was rewritten to use by apache q = form.get("q", "") fdict = parser(q) # p=number is the python backend code called by this framework scriptnum = int(form.get("p", 0)) dpi = int(fdict.get("dpi", 100)) # memcache keys can not have spaces mckey = get_mckey(scriptnum, fdict, fmt) mc = memcache.Client(["iem-memcached:11211"], debug=0) # Don't fetch memcache when we have _cb set for an inbound CGI res = mc.get(mckey) if fdict.get("_cb") is None else None if res: return HTTP200, res # memcache failed to save us work, so work we do! start_time = datetime.datetime.utcnow() # res should be a 3 length tuple try: res, meta = get_res_by_fmt(scriptnum, fmt, fdict) except NoDataFound as exp: return HTTP400, handle_error(exp, fmt, environ.get("REQUEST_URI")) except Exception as exp: # Everything else should be considered fatal return HTTP500, handle_error(exp, fmt, environ.get("REQUEST_URI")) end_time = datetime.datetime.utcnow() sys.stderr.write( ("Autoplot[%3s] Timing: %7.3fs Key: %s\n") % (scriptnum, (end_time - start_time).total_seconds(), mckey) ) [mixedobj, df, report] = res # Our output content content = "" if fmt == "js" and isinstance(mixedobj, dict): content = ('$("#ap_container").highcharts(%s);') % ( json.dumps(mixedobj), ) elif fmt in ["js", "mapbox"]: content = mixedobj elif fmt in ["svg", "png", "pdf"] and isinstance(mixedobj, plt.Figure): # if our content is a figure, then add some fancy metadata to plot if meta.get("plotmetadata", True): plot_metadata(mixedobj, start_time, end_time, scriptnum) ram = BytesIO() plt.savefig(ram, format=fmt, dpi=dpi) plt.close() ram.seek(0) content = ram.read() del ram elif fmt in ["svg", "png", "pdf"] and mixedobj is None: return ( HTTP400, error_image( ("plot requested but backend " "does not support plots"), fmt ), ) elif fmt == "txt" and report is not None: content = report elif fmt in ["csv", "xlsx"] and df is not None: if fmt == "csv": content = df.to_csv(index=(df.index.name is not None), header=True) elif fmt == "xlsx": # Can't write to ram buffer yet, unimplmented upstream (_, tmpfn) = tempfile.mkstemp() df.index.name = None # Need to set engine as xlsx/xls can't be inferred with pd.ExcelWriter(tmpfn, engine="openpyxl") as writer: df.to_excel(writer, encoding="latin-1", sheet_name="Sheet1") content = open(tmpfn, "rb").read() os.unlink(tmpfn) del df else: sys.stderr.write( ("Undefined edge case: fmt: %s uri: %s\n") % (fmt, environ.get("REQUEST_URI")) ) raise Exception("Undefined autoplot action |%s|" % (fmt,)) try: mc.set(mckey, content, meta.get("cache", 43200)) except Exception as exp: sys.stderr.write( "Exception while writting key: %s\n%s\n" % (mckey, exp) ) if isinstance(mixedobj, plt.Figure): plt.close() return HTTP200, content
def do(valid, yawsource): """ Generate plot for a given timestamp """ if yawsource not in ["yaw", "yaw2", "yaw3"]: return yawdict = {"yaw": "Orginal", "yaw2": "daryl corrected", "yaw3": "daryl v2"} pgconn = get_dbconn("mec") cursor = pgconn.cursor() cursor.execute( """select turbineid, power, ST_x(geom), ST_y(geom), """ + yawsource + """, windspeed, pitch from sampled_data s JOIN turbines t on (t.id = s.turbineid) WHERE valid = %s and power is not null and """ + yawsource + """ is not null and windspeed is not null and pitch is not null""", (valid, ), ) lons = [] lats = [] vals = [] u = [] v = [] ws = [] yaw = [] pitch = [] for row in cursor: lons.append(row[2]) lats.append(row[3]) vals.append(row[1]) ws.append(row[5]) yaw.append(row[4]) a, b = uv(speed(row[5], "MPS"), direction(row[4], "deg")) u.append(a.value("MPS")) v.append(b.value("MPS")) pitch.append(row[6]) pitch = np.array(pitch) vals = np.array(vals) avgv = np.average(vals) # vals2 = vals - avgv fig = plt.figure(figsize=(12.8, 7.2)) ax = fig.add_axes([0.14, 0.1, 0.52, 0.8]) cmap = plt.cm.get_cmap("jet") cmap.set_under("tan") cmap.set_over("black") clevs = np.arange(0, 1651, 150) norm = mpcolors.BoundaryNorm(clevs, cmap.N) ax.quiver(lons, lats, u, v, zorder=1) ax.scatter( lons, lats, c=vals, norm=norm, edgecolor="none", cmap=cmap, s=100, zorder=2, ) ax.get_yaxis().get_major_formatter().set_useOffset(False) ax.get_xaxis().get_major_formatter().set_useOffset(False) ax.set_title(("Farm Turbine Power [kW] (1min sampled dataset)\n" "Valid: %s, yaw source: %s") % ( valid.strftime("%d %b %Y %I:%M %p"), yawdict.get(yawsource, yawsource), )) make_colorbar(clevs, norm, cmap) ax.text( 0.05, 0.05, "Turbine Power: $\mu$= %.1f $\sigma$= %.1f kW" % (avgv, np.std(vals)), transform=ax.transAxes, ) ax.text( 0.05, 0.01, "Wind $\mu$= %.1f $\sigma$= %.1f $ms^{-1}$" % (np.average(ws), np.std(ws)), transform=ax.transAxes, ) ax.set_xlabel("Longitude $^\circ$E") ax.set_ylabel("Latitude $^\circ$N") ax.set_xlim(-94.832, -94.673) ax.set_ylim(42.545, 42.671) ax.get_xaxis().set_ticks([]) ax.get_yaxis().set_ticks([]) # Next plot ax2 = fig.add_axes([0.7, 0.80, 0.28, 0.18]) ax2.scatter(ws, vals, edgecolor="k", c="k") ax2.text( 0.5, -0.25, "Wind Speed $ms^{-1}$", transform=ax2.transAxes, ha="center", ) ax2.set_xlim(0, 20) # ax2.set_ylabel("Power kW") ax2.grid(True) # Next plot ax3 = fig.add_axes([0.7, 0.57, 0.28, 0.18], sharey=ax2) ax3.scatter(yaw, vals, edgecolor="k", c="k") ax3.text(0.5, -0.25, "Yaw", transform=ax3.transAxes, ha="center") # ax3.set_ylabel("Power kW") ax3.set_xlim(0, 360) ax3.set_xticks(np.arange(0, 361, 45)) ax3.set_xticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"]) ax3.grid(True) # Next plot ax4 = fig.add_axes([0.7, 0.32, 0.28, 0.18], sharey=ax2) ax4.scatter(pitch, vals, edgecolor="k", c="k") ax4.text(0.5, -0.25, "Pitch $^\circ$", transform=ax4.transAxes, ha="center") ax4.set_ylim(-10, 1600) ax4.grid(True) # Next plot ax5 = fig.add_axes([0.7, 0.07, 0.28, 0.18], sharex=ax4) ax5.scatter(pitch, ws, edgecolor="k", c="k") ax5.text(0.5, -0.25, "Pitch $^\circ$", transform=ax5.transAxes, ha="center") ax5.grid(True) ax5.set_ylim(bottom=-10) # maxpitch = max(np.where(pitch > 20, 0, pitch)) # ax5.set_xlim(np.ma.minimum(pitch)-0.5, maxpitch+0.5) ax5.set_xlim(-3, 20.1) ax5.set_ylim(0, 20) ax5.text( -0.1, 0.5, "Wind Speed $ms^{-1}$", transform=ax5.transAxes, ha="center", va="center", rotation=90, ) plt.savefig(getattr(sys.stdout, "buffer", sys.stdout))
def main(argv): """Go Main Go""" v = argv[1] agg = argv[2] ts = datetime.date(2008, 1, 1) ts2 = datetime.date(2017, 12, 31) scenario = 0 # suggested for runoff and precip if V2UNITS[v] in ["mm", "inches"]: colors = ["#ffffa6", "#9cf26d", "#76cc94", "#6399ba", "#5558a1"] # suggested for detachment elif v in ["avg_loss", "avg_loss_metric"]: colors = [ "#cbe3bb", "#c4ff4d", "#ffff4d", "#ffc44d", "#ff4d4d", "#c34dee", ] # suggested for delivery elif v in ["avg_delivery", "avg_delivery_metric"]: colors = [ "#ffffd2", "#ffff4d", "#ffe0a5", "#eeb74d", "#ba7c57", "#96504d", ] cmap = mpcolors.ListedColormap(colors, "james") cmap.set_under("white") cmap.set_over("black") pgconn = get_dbconn("idep") title = "for %s" % (ts.strftime("%-d %B %Y"), ) if ts != ts2: title = "between %s and %s" % ( ts.strftime("%-d %b %Y"), ts2.strftime("%-d %b %Y"), ) mp = MapPlot( axisbg="#EEEEEE", nologo=True, sector="iowa", nocaption=True, title=("DEP %s %s %s") % ( V2NAME[v.replace("_metric", "")], "Yearly Average" if agg == "avg" else "Total", title, ), caption="Daily Erosion Project", ) df = read_postgis( """ WITH data as ( SELECT huc_12, extract(year from valid) as yr, sum(""" + v.replace("_metric", "") + """) as d from results_by_huc12 WHERE scenario = %s and valid >= %s and valid <= %s GROUP by huc_12, yr), agg as ( SELECT huc_12, """ + agg + """(d) as d from data GROUP by huc_12) SELECT ST_Transform(simple_geom, 4326) as geo, coalesce(d.d, 0) as data from huc12 i LEFT JOIN agg d ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s and i.states ~* 'IA' """, pgconn, params=(scenario, ts, ts2, scenario), geom_col="geo", index_col=None, ) df["data"] = df["data"] * V2MULTI[v] if df["data"].max() < 0.01: bins = [0.01, 0.02, 0.03, 0.04, 0.05] else: bins = np.array(V2RAMP[v]) * (10.0 if agg == "sum" else 1.0) norm = mpcolors.BoundaryNorm(bins, cmap.N) # m.ax.add_geometries(df['geo'], ccrs.PlateCarree()) for _, row in df.iterrows(): c = cmap(norm([row["data"]]))[0] arr = np.asarray(row["geo"].exterior) points = mp.ax.projection.transform_points(ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec="k", zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.drawcities() lbl = [round(_, 2) for _ in bins] u = "%s, Avg: %.2f" % (V2UNITS[v], df["data"].mean()) mp.draw_colorbar( bins, cmap, norm, clevlabels=lbl, units=u, title="%s :: %s" % (V2NAME[v], V2UNITS[v]), ) plt.savefig("%s_%s_%s%s.png" % (ts.year, ts2.year, v, "_sum" if agg == "sum" else ""))