def plot(): """Do plotting work""" cmap1 = plt.get_cmap('inferno_r') colors = list(cmap1(np.arange(10) / 10.)) cmap2 = plt.get_cmap('Pastel1') colors.extend(list(cmap2(np.arange(2) / 2.))) cmap = ListedColormap(colors) cmap.set_under('tan') cmap.set_over('white') minval = np.load('minval.npy') maxval = np.load('maxval.npy') diff = maxval - minval lons = np.load('lons.npy') lats = np.load('lats.npy') mp = MapPlot(sector='midwest', statebordercolor='white', title=(r"Diff between coldest wind chill and warmest " "air temp 29 Jan - 3 Feb 2019"), subtitle=("based on hourly NCEP Real-Time Mesoscale Analysis " "(RTMA) ending midnight CST")) levels = list(range(0, 101, 10)) levels.extend([105, 110]) mp.pcolormesh(lons, lats, diff, levels, cmap=cmap, clip_on=False, units=r"$^\circ$F", spacing='proportional') mp.postprocess(filename='test.png')
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) # Covert datetime to UTC ctx['sdate'] = ctx['sdate'].replace(tzinfo=pytz.utc) ctx['edate'] = ctx['edate'].replace(tzinfo=pytz.utc) state = ctx['state'] phenomena = ctx['phenomena'] significance = ctx['significance'] station = ctx['station'][:4] t = ctx['t'] ilabel = (ctx['ilabel'] == 'yes') geo = ctx['geo'] nt = NetworkTable("WFO") if geo == 'ugc': do_ugc(ctx) elif geo == 'polygon': do_polygon(ctx) subtitle = "based on IEM Archives %s" % (ctx.get('subtitle', ''), ) if t == 'cwa': subtitle = "Plotted for %s (%s), %s" % (nt.sts[station]['name'], station, subtitle) else: subtitle = "Plotted for %s, %s" % (state_names[state], subtitle) m = MapPlot(sector=('state' if t == 'state' else 'cwa'), state=state, cwa=(station if len(station) == 3 else station[1:]), axisbg='white', title=('%s %s (%s.%s)') % (ctx['title'], vtec.get_ps_string( phenomena, significance), phenomena, significance), subtitle=subtitle, nocaption=True, titlefontsize=16) if geo == 'ugc': cmap = plt.get_cmap('Paired') cmap.set_under('white') cmap.set_over('white') m.fill_ugcs(ctx['data'], ctx['bins'], cmap=cmap, ilabel=ilabel) else: cmap = plt.get_cmap('gist_ncar') cmap.set_under('white') cmap.set_over('black') res = m.pcolormesh(ctx['lons'], ctx['lats'], ctx['data'], ctx['bins'], cmap=cmap, units='count') # Cut down on SVG et al size res.set_rasterized(True) if ctx['drawc'] == 'yes': m.drawcounties() return m.fig, ctx['df']
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] sts = sts.replace(tzinfo=pytz.utc) ets = ctx['edate'] by = ctx['by'] ets = ets.replace(tzinfo=pytz.utc) myfilter = ctx['filter'] if myfilter == 'NONE': tlimiter = '' elif myfilter == 'NRS': tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') " elif myfilter == 'CON': tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', " "'TSTM WND DMG') ") else: tlimiter = " and typetext = '%s' " % (myfilter,) df = read_sql(""" WITH data as ( SELECT distinct wfo, state, valid, type, magnitude, geom from lsrs where valid >= %s and valid < %s """ + tlimiter + """ ) SELECT """ + by + """, count(*) from data GROUP by """ + by + """ """, pgconn, params=(sts, ets), index_col=by) data = {} for idx, row in df.iterrows(): if idx == 'JSJ': idx = 'SJU' data[idx] = row['count'] maxv = df['count'].max() bins = np.linspace(1, maxv, 12, dtype='i') bins[-1] += 1 mp = MapPlot( sector='nws', axisbg='white', title=( 'Preliminary/Unfiltered Local Storm Report Counts %s' ) % (PDICT[by],), subtitlefontsize=10, subtitle=('Valid %s - %s UTC, type limiter: %s' ) % (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), MDICT.get(myfilter))) if by == 'wfo': mp.fill_cwas(data, bins=bins, cmap=plt.get_cmap('plasma'), ilabel=True) else: mp.fill_states( data, bins=bins, cmap=plt.get_cmap('plasma'), ilabel=True) return mp.fig, df
def magic(ax, df, colname, title, ctx): """You can do magic""" df2 = df[df[colname] == 1] ax.text(0, 1.02, title, transform=ax.transAxes) ax.set_xlim(0, 367) ax.grid(True) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) bbox = ax.get_position() sideax = plt.axes([bbox.x1 + 0.01, bbox.y0, 0.09, 0.35]) ylim = [df['year'].min(), df['year'].max()] year0 = ylim[0] - (ylim[0] % 10) year1 = ylim[1] + (10 - ylim[1] % 10) cmap = plt.get_cmap(ctx['cmap']) norm = mpcolors.BoundaryNorm(np.arange(year0, year1 + 1, 10), cmap.N) ax.scatter(df2['doy'], df2['year'], color=cmap(norm(df2['year'].values))) ax.set_yticks(np.arange(year0, year1, 20)) ax.set_ylim(*ylim) cnts, edges = np.histogram(df2['year'].values, np.arange(year0, year1 + 1, 10)) sideax.barh(edges[:-1], cnts, height=10, align='edge', color=cmap(norm(edges[:-1]))) sideax.set_yticks(np.arange(year0, year1, 20)) sideax.set_yticklabels([]) sideax.set_ylim(*ylim) sideax.grid(True) sideax.set_xlabel("Decade Count")
def plot(): """Do plotting work""" cmap = plt.get_cmap('inferno_r') # cmap.set_under('black') # cmap.set_over('red') minval = (np.load('minval.npy') * units.degK).to(units.degF) maxval = (np.load('maxval.npy') * units.degK).to(units.degF) diff = maxval - minval lons = np.load('lons.npy') lats = np.load('lats.npy') mp = MapPlot(sector='conus', title=(r"Difference between warmest 3 Oct and coldest 4 " "Oct 2m Temperature"), subtitle=("based on hourly NCEP Real-Time Mesoscale Analysis " "(RTMA) ending midnight CDT")) mp.ax.text(0.5, 0.97, (r"Pixel Difference Range: %.1f$^\circ$F to %.1f$^\circ$F, " r"Domain Analysis Range: %.1f$^\circ$F to %.1f$^\circ$F" ) % (np.min(diff).magnitude, np.max(diff).magnitude, np.min(minval).magnitude, np.max(maxval).magnitude), transform=mp.ax.transAxes, fontsize=12, ha='center', bbox=dict(pad=0, color='white'), zorder=50) mp.pcolormesh(lons, lats, diff, range(0, 61, 5), cmap=cmap, clip_on=False, units=r"$^\circ$F") mp.postprocess(filename='test.png')
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) get_df(ctx) labels = {} data = {} for state, row in ctx['df'].iterrows(): val = row['departure'] data[state] = val if pd.isna(val): if pd.isna(row['avg']): subscript = 'M' else: subscript = "[-%.0f]" % (row['avg'], ) data[state] = 0 - row['avg'] else: subscript = "[%s%.0f]" % ("+" if val > 0 else "", val) subscript = "[0]" if subscript in ['[-0]', '[+0]'] else subscript labels[state] = "%s\n%s" % ('M' if pd.isna(row['thisval']) else int(row['thisval']), subscript) mp = MapPlot( sector='conus', title=ctx['title'], subtitle=ctx['subtitle'] ) levels = range(-40, 41, 10) cmap = plt.get_cmap(ctx['cmap']) cmap.set_bad('white') mp.fill_states(data, ilabel=True, labels=labels, bins=levels, cmap=cmap, units='Absolute %', labelfontsize=16) return mp.fig, ctx['df']
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) ts = ctx['ts'].replace(tzinfo=pytz.utc) hour = int(ctx['hour']) ilabel = (ctx['ilabel'] == 'yes') column = "hour%02i" % (hour,) pgconn = get_dbconn('postgis') df = read_sql(""" WITH data as ( SELECT ugc, rank() OVER (PARTITION by ugc ORDER by valid DESC), hour01, hour03, hour06, hour12, hour24 from ffg WHERE valid >= %s and valid <= %s) SELECT *, substr(ugc, 3, 1) as ztype from data where rank = 1 """, pgconn, params=(ts - datetime.timedelta(hours=24), ts), index_col='ugc') plot = MapPlot(sector=ctx['t'], continentalcolor='white', state=ctx['state'], cwa=ctx['wfo'], title=("NWS RFC %s Hour Flash Flood Guidance on " "%s UTC" ) % (hour, ts.strftime("%-d %b %Y %H")), subtitle=("Estimated amount of %s Rainfall " "needed for non-urban Flash Flooding to commence" ) % (HOURS[ctx['hour']], )) bins = [0.01, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.25, 2.5, 2.75, 3., 3.5, 4.0, 5.0] cmap = plt.get_cmap('gist_rainbow_r') df2 = df[df['ztype'] == 'C'] plot.fill_ugcs(df2[column].to_dict(), bins, cmap=cmap, plotmissing=False, ilabel=ilabel) df2 = df[df['ztype'] == 'Z'] plot.fill_ugcs(df2[column].to_dict(), bins, cmap=cmap, plotmissing=False, units='inches', ilabel=ilabel) return plot.fig, df
def main(): """Go MAin""" df = pd.read_csv('flood_emergencies.csv') df2 = df[['source', 'eventid', 'phenomena', 'significance', 'year'] ].drop_duplicates() gdf = df2.groupby('source').count() vals = {} labels = {} for wfo, row in gdf.iterrows(): if wfo == 'TJSJ': wfo = 'SJU' else: wfo = wfo[1:] vals[wfo] = int(row['eventid']) labels[wfo] = "%s" % (row['eventid'], ) bins = list(range(0, 31, 3)) bins[0] = 1. cmap = plt.get_cmap('plasma_r') cmap.set_over('black') cmap.set_under('white') mp = MapPlot(sector='nws', continentalcolor='white', figsize=(12., 9.), title=("2003-2018 Flash Flood Emergency Events"), subtitle=('based on unofficial IEM archives, searching ' '"FFS", "FLW", "FFS".')) mp.fill_cwas(vals, bins=bins, lblformat='%s', labels=labels, cmap=cmap, ilabel=True, # clevlabels=month_abbr[1:], units='count') mp.postprocess(filename='test.png')
def main(): """Go Main""" df = get_database_data() print(df) vals = {} labels = {} for wfo, row in df.iterrows(): if wfo == 'JSJ': wfo = 'SJU' vals[wfo] = row['percent'] labels[wfo] = '%.0f%%' % (row['percent'], ) #if row['count'] == 0: # labels[wfo] = '-' bins = np.arange(0, 101, 10) #bins = [1, 25, 50, 75, 100, 125, 150, 200, 300] #bins = [-50, -25, -10, -5, 0, 5, 10, 25, 50] # bins[0] = 1 #clevlabels = ['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N'] cmap = plt.get_cmap('PuOr') mp = MapPlot(sector='nws', continentalcolor='white', figsize=(12., 9.), title=("2018 Percentage of Time with 1+ Flood Warning Active"), subtitle=('1 January - 30 September 2018, based on IEM archives')) mp.fill_cwas(vals, bins=bins, lblformat='%s', labels=labels, cmap=cmap, ilabel=True, # clevlabels=clevlabels, units='percent') mp.postprocess(filename='test.png')
def plot_maxmin(ts, field): """Generate our plot.""" nc = ncopen(ts.strftime("/mesonet/data/ndfd/%Y%m%d%H_ndfd.nc")) if field == 'high_tmpk': data = np.max(nc.variables[field][:], 0) elif field == 'low_tmpk': data = np.min(nc.variables[field][:], 0) data = masked_array(data, units.degK).to(units.degF).m subtitle = ("Based on National Digital Forecast Database (NDFD) " "00 UTC Forecast made %s") % (ts.strftime("%-d %b %Y"), ) mp = MapPlot(title='NWS NDFD 7 Day (%s through %s) %s Temperature' % ( ts.strftime("%-d %b"), (ts + datetime.timedelta(days=6)).strftime("%-d %b"), 'Maximum' if field == 'high_tmpk' else 'Minimum', ), subtitle=subtitle, sector='iailin') mp.pcolormesh(nc.variables['lon'][:], nc.variables['lat'][:], data, np.arange(10, 121, 10), cmap=plt.get_cmap('jet'), units='Degrees F') mp.drawcounties() pqstr = ( "data c %s summary/cb_ndfd_7day_%s.png summary/cb_ndfd_7day_%s.png " "png") % (ts.strftime("%Y%m%d%H%M"), "max" if field == 'high_tmpk' else 'min', "max" if field == 'high_tmpk' else 'min') mp.postprocess(pqstr=pqstr) mp.close() nc.close()
def plotter(fdict): """ Go """ pgconn = get_dbconn("postgis") ctx = get_autoplot_context(fdict, get_description()) sts = ctx["sdate"] sts = sts.replace(tzinfo=pytz.utc) ets = ctx["edate"] by = ctx["by"] ets = ets.replace(tzinfo=pytz.utc) myfilter = ctx["filter"] if myfilter == "NONE": tlimiter = "" elif myfilter == "NRS": tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') " elif myfilter == "CON": tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', " "'TSTM WND DMG') ") else: tlimiter = " and typetext = '%s' " % (myfilter, ) df = read_sql( """ WITH data as ( SELECT distinct wfo, state, valid, type, magnitude, geom from lsrs where valid >= %s and valid < %s """ + tlimiter + """ ) SELECT """ + by + """, count(*) from data GROUP by """ + by + """ """, pgconn, params=(sts, ets), index_col=by, ) data = {} for idx, row in df.iterrows(): if idx == "JSJ": idx = "SJU" data[idx] = row["count"] maxv = df["count"].max() bins = np.linspace(1, maxv, 12, dtype="i") bins[-1] += 1 mp = MapPlot( sector="nws", axisbg="white", title=("Preliminary/Unfiltered Local Storm Report Counts %s") % (PDICT[by], ), subtitlefontsize=10, subtitle=("Valid %s - %s UTC, type limiter: %s") % ( sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), MDICT.get(myfilter), ), ) cmap = plt.get_cmap(ctx["cmap"]) if by == "wfo": mp.fill_cwas(data, bins=bins, cmap=cmap, ilabel=True) else: mp.fill_states(data, bins=bins, cmap=cmap, ilabel=True) return mp.fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) csector = ctx['csector'] date = ctx['date'] z = ctx['z'] period = ctx['f'] scale = ctx['scale'] valid = utc(date.year, date.month, date.day, int(z)) gribfn = valid.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/wpc/" "p" + period + "m_%Y%m%d%Hf" + period + ".grb")) if not os.path.isfile(gribfn): raise ValueError("gribfn %s missing" % (gribfn, )) grbs = pygrib.open(gribfn) grb = grbs[1] precip = distance(grb.values, 'MM').value('IN') lats, lons = grb.latlons() title = ("Weather Prediction Center %s Quantitative " "Precipitation Forecast") % (PDICT[period]) subtitle = ("%sWPC Forcast %s UTC to %s UTC") % ( ("US Drought Monitor Overlaid, " if ctx['opt'] == 'both' else ''), valid.strftime("%d %b %Y %H"), (valid + datetime.timedelta(hours=int(period))).strftime("%d %b %Y %H")) mp = MapPlot(sector=('state' if len(csector) == 2 else csector), state=ctx['csector'], title=title, subtitle=subtitle, continentalcolor='white', titlefontsize=16) cmap = plt.get_cmap('gist_ncar') cmap.set_under('#EEEEEE') cmap.set_over('black') if scale == 'auto': levs = np.linspace(0, np.max(precip) * 1.1, 10) levs = [round(lev, 2) for lev in levs] levs[0] = 0.01 elif scale == '10': levs = np.arange(0, 10.1, 1.) levs[0] = 0.01 elif scale == '7': levs = np.arange(0, 7.1, 0.5) levs[0] = 0.01 elif scale == '3.5': levs = np.arange(0, 3.6, 0.25) levs[0] = 0.01 mp.pcolormesh(lons, lats, precip, levs, cmap=cmap, units='inch', clip_on=False) if ctx['opt'] == 'both': mp.draw_usdm(valid=valid, filled=False, hatched=True) return mp.fig
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] days = ctx['days'] varname = ctx['var'] df = get_data(ctx) if df.empty: raise NoDataFound('Error, no results returned!') fig = plt.figure(figsize=(8, 6)) ax = fig.add_axes([0.1, 0.3, 0.75, 0.6]) lax = fig.add_axes([0.1, 0.1, 0.75, 0.2]) cax = fig.add_axes([0.87, 0.3, 0.03, 0.6]) title = PDICT.get(varname) if days == 1: title = title.replace("Average ", "") ax.set_title(("%s [%s]\n%i Day Period with %s" ) % (ctx['_nt'].sts[station]['name'], station, days, title)) cmap = plt.get_cmap(ctx['cmap']) minval = df[XREF[varname]].min() - 1. if varname == 'wettest' and minval < 0: minval = 0 maxval = df[XREF[varname]].max() + 1. ramp = np.linspace(minval, maxval, min([int(maxval - minval), 10]), dtype='i') norm = mpcolors.BoundaryNorm(ramp, cmap.N) cb = ColorbarBase(cax, norm=norm, cmap=cmap) cb.set_label("inch" if varname == 'wettest' else r"$^\circ$F") ax.barh(df.index.values, [days]*len(df.index), left=df['doy'].values, color=cmap(norm(df[XREF[varname]].values))) ax.grid(True) lax.grid(True) xticks = [] xticklabels = [] for i in np.arange(df['doy'].min() - 5, df['doy'].max() + 5, 1): ts = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=int(i)) if ts.day == 1: xticks.append(i) xticklabels.append(ts.strftime("%-d %b")) ax.set_xticks(xticks) lax.set_xticks(xticks) lax.set_xticklabels(xticklabels) counts = np.zeros(366*2) for _, row in df.iterrows(): counts[int(row['doy']):int(row['doy'] + days)] += 1 lax.bar(np.arange(366*2), counts, edgecolor='blue', facecolor='blue') lax.set_ylabel("Years") lax.text(0.02, 0.9, "Frequency of Day\nwithin period", transform=lax.transAxes, va='top') ax.set_ylim(df.index.values.min() - 3, df.index.values.max() + 3) ax.set_xlim(df['doy'].min() - 10, df['doy'].max() + 10) lax.set_xlim(df['doy'].min() - 10, df['doy'].max() + 10) ax.yaxis.set_major_locator(MaxNLocator(prune='lower')) return fig, df
def main(): """Go Main Go.""" pgconn = get_dbconn("idep") df = read_postgis( """ with centroids as ( select huc_12, st_centroid(geom) as center, simple_geom from huc12 where scenario = 0), agg as ( select c.huc_12, sum(case when st_y(center) < st_ymax(geom) then 1 else 0 end) as west, count(*) from flowpaths f JOIN centroids c on (f.huc_12 = c.huc_12) WHERE f.scenario = 0 GROUP by c.huc_12) select a.huc_12, st_transform(c.simple_geom, 4326) as geo, a.west, a.count from agg a JOIN centroids c ON (a.huc_12 = c.huc_12) """, pgconn, index_col=None, geom_col="geo", ) df["percent"] = df["west"] / df["count"] * 100.0 bins = np.arange(0, 101, 10) cmap = plt.get_cmap("RdBu") norm = mpcolors.BoundaryNorm(bins, cmap.N) mp = MapPlot( continentalcolor="thistle", nologo=True, sector="custom", south=36.8, north=48.0, west=-99.2, east=-88.9, subtitle="", title=("DEP Flowpaths North of HUC12 Centroid (%.0f/%.0f %.2f%%)" % ( df["west"].sum(), df["count"].sum(), df["west"].sum() / df["count"].sum() * 100.0, )), ) for _i, row in df.iterrows(): c = cmap(norm([row["percent"]]))[0] arr = np.asarray(row["geo"].exterior) points = mp.ax.projection.transform_points(ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec="None", zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.draw_colorbar(bins, cmap, norm, title="Percent", extend="neither") mp.postprocess(filename="/tmp/huc12_north.png")
def calendar_plot(sts, ets, data, **kwargs): """Create a plot that looks like a calendar Args: sts (datetime.date): start date of this plot ets (datetime.date): end date of this plot (inclusive) data (dict[dict]): dictionary with keys of dates and dicts for `val` value and optionally `color` for color kwargs (dict): heatmap (bool): background color for cells based on `val`, False cmap (str): color map to use for norm """ bounds = _compute_bounds(sts, ets) # Compute the number of month calendars we need. # We want 'square' boxes for each month's calendar, 4x3 fig = plt.figure(figsize=(10.24, 7.68)) if 'fontsize' not in kwargs: kwargs['fontsize'] = 12 if len(bounds) < 3: kwargs['fontsize'] = 18 elif len(bounds) < 5: kwargs['fontsize'] = 16 elif len(bounds) < 10: kwargs['fontsize'] = 14 if kwargs.get('heatmap', False): kwargs['cmap'] = plt.get_cmap(kwargs.get('cmap', 'viridis')) maxval = -1000 for key in data: if data[key]['val'] > maxval: maxval = data[key]['val'] # Need at least 3 slots maxval = 5 if maxval < 5 else maxval kwargs['norm'] = mpcolors.BoundaryNorm(np.arange(0, maxval), kwargs['cmap'].N) for month in bounds: ax = fig.add_axes(bounds[month]) _do_month(month, ax, data, sts, ets, kwargs) iemlogo(fig) title = kwargs.get('title') if title is not None: fitbox(fig, title, 0.1, 0.99, 0.95, 0.99) subtitle = kwargs.get('subtitle') if subtitle is not None: fitbox(fig, subtitle, 0.1, 0.99, 0.925, 0.945) return fig
def calendar_plot(sts, ets, data, **kwargs): """Create a plot that looks like a calendar Args: sts (datetime.date): start date of this plot ets (datetime.date): end date of this plot (inclusive) data (dict[dict]): dictionary with keys of dates and dicts for `val` value and optionally `color` for color kwargs (dict): heatmap (bool): background color for cells based on `val`, False cmap (str): color map to use for norm """ bounds = _compute_bounds(sts, ets) # Compute the number of month calendars we need. # We want 'square' boxes for each month's calendar, 4x3 fig = plt.figure(figsize=(10.24, 7.68)) if 'fontsize' not in kwargs: kwargs['fontsize'] = 12 if len(bounds) < 3: kwargs['fontsize'] = 18 elif len(bounds) < 5: kwargs['fontsize'] = 16 elif len(bounds) < 10: kwargs['fontsize'] = 14 if kwargs.get('heatmap', False): kwargs['cmap'] = plt.get_cmap(kwargs.get('cmap', 'viridis')) maxval = -1000 for key in data: if data[key]['val'] > maxval: maxval = data[key]['val'] # Need at least 3 slots maxval = 5 if maxval < 5 else maxval kwargs['norm'] = mpcolors.BoundaryNorm(np.arange(0, maxval), kwargs['cmap'].N) for month in bounds: ax = fig.add_axes(bounds[month]) _do_month(month, ax, data, sts, ets, kwargs) iemlogo(fig) title = kwargs.get('title') if title is not None: fitbox(fig, title, 0.1, 0.99, 0.95, 0.99) subtitle = kwargs.get('subtitle') if subtitle is not None: fitbox(fig, subtitle, 0.1, 0.99, 0.925, 0.945) return fig
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] sts = sts.replace(tzinfo=pytz.utc) ets = ctx['edate'] ets = ets.replace(tzinfo=pytz.utc) myfilter = ctx['filter'] if myfilter == 'NONE': tlimiter = '' elif myfilter == 'NRS': tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') " elif myfilter == 'CON': tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', " "'TSTM WND DMG') ") else: tlimiter = " and typetext = '%s' " % (myfilter, ) df = read_sql(""" SELECT wfo, count(*) from lsrs WHERE valid >= %s and valid < %s """ + tlimiter + """ GROUP by wfo ORDER by wfo ASC """, pgconn, params=(sts, ets), index_col='wfo') data = {} for wfo, row in df.iterrows(): if wfo == 'JSJ': wfo = 'SJU' data[wfo] = row['count'] maxv = df['count'].max() bins = np.linspace(0, maxv, 12, dtype='i') bins[-1] += 1 mp = MapPlot(sector='nws', axisbg='white', title='Local Storm Report Counts by NWS Office', subtitlefontsize=10, subtitle=('Valid %s - %s UTC, type limiter: %s') % (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), MDICT.get(myfilter))) mp.fill_cwas(data, bins=bins, cmap=plt.get_cmap('plasma'), ilabel=True) return mp.fig, df
def main(): """Go Main Go.""" pgconn = get_dbconn("idep") df = read_postgis( """ select f.huc_12, count(*) as fps, st_transform(h.simple_geom, 4326) as geo from flowpaths f JOIN huc12 h on (f.huc_12 = h.huc_12) WHERE f.scenario = 0 and h.scenario = 0 GROUP by f.huc_12, geo ORDER by fps ASC """, pgconn, index_col=None, geom_col="geo", ) bins = np.arange(1, 42, 2) cmap = plt.get_cmap("copper") cmap.set_over("white") cmap.set_under("thistle") norm = mpcolors.BoundaryNorm(bins, cmap.N) mp = MapPlot( continentalcolor="thistle", nologo=True, sector="custom", south=36.8, north=45.0, west=-99.2, east=-88.9, subtitle="", title=("DEP HUCs with <40 Flowpaths (%.0f/%.0f %.2f%%)" % ( len(df[df["fps"] < 40].index), len(df.index), len(df[df["fps"] < 40].index) / len(df.index) * 100.0, )), ) for _i, row in df.iterrows(): c = cmap(norm([row["fps"]]))[0] arr = np.asarray(row["geo"].exterior) points = mp.ax.projection.transform_points(ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec="None", zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.draw_colorbar(bins, cmap, norm, title="Count") mp.postprocess(filename="/tmp/huc12_cnts.png")
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) if ctx['t'] == 'state': bnds = reference.state_bounds[ctx['state']] title = reference.state_names[ctx['state']] else: bnds = reference.wfo_bounds[ctx['wfo']] nt = NetworkTable("WFO") title = "NWS CWA %s [%s]" % (nt.sts[ctx['wfo']]['name'], ctx['wfo']) df, valid = get_df(ctx, bnds) if df.empty: raise ValueError("No data was found for your query") mp = MapPlot(sector=('state' if ctx['t'] == 'state' else 'cwa'), state=ctx['state'], cwa=(ctx['wfo'] if len(ctx['wfo']) == 3 else ctx['wfo'][1:]), axisbg='white', title='%s for %s' % (PDICT2[ctx['v']], title), subtitle=('Map valid: %s UTC') % (valid.strftime("%d %b %Y %H:%M"), ), nocaption=True, titlefontsize=16) mp.contourf(df['lon'].values, df['lat'].values, df['vsby'].values, np.array([0.01, 0.1, 0.25, 0.5, 1, 2, 3, 5, 8, 9.9]), units='miles', cmap=plt.get_cmap('gray')) if ctx['t'] == 'state': df2 = df[df['state'] == ctx['state']] else: df2 = df[df['wfo'] == ctx['wfo']] mp.plot_values(df2['lon'].values, df2['lat'].values, df2['vsby'].values, '%.1f') mp.drawcounties() if ctx['t'] == 'cwa': mp.draw_cwas() return mp.fig, df
def plot_gdd(ts): """Generate our plot.""" nc = ncopen(ts.strftime("/mesonet/data/ndfd/%Y%m%d%H_ndfd.nc")) # compute our daily GDDs gddtot = np.zeros(np.shape(nc.variables["lon"][:])) for i in range(7): gddtot += gdd( temperature(nc.variables["high_tmpk"][i, :, :], "K"), temperature(nc.variables["low_tmpk"][i, :, :], "K"), ) cnc = ncopen("/mesonet/data/ndfd/ndfd_dailyc.nc") offset = daily_offset(ts) avggdd = np.sum(cnc.variables["gdd50"][offset:offset + 7], 0) data = gddtot - np.where(avggdd < 1, 1, avggdd) subtitle = ("Based on National Digital Forecast Database (NDFD) " "00 UTC Forecast made %s") % (ts.strftime("%-d %b %Y"), ) mp = MapPlot( title="NWS NDFD 7 Day (%s through %s) GDD50 Departure from Avg" % ( ts.strftime("%-d %b"), (ts + datetime.timedelta(days=6)).strftime("%-d %b"), ), subtitle=subtitle, sector="iailin", ) mp.pcolormesh( nc.variables["lon"][:], nc.variables["lat"][:], data, np.arange(-80, 81, 20), cmap=plt.get_cmap("RdBu_r"), units=r"$^\circ$F", spacing="proportional", ) mp.drawcounties() pqstr = ( "data c %s summary/cb_ndfd_7day_gdd.png summary/cb_ndfd_7day_gdd.png " "png") % (ts.strftime("%Y%m%d%H%M"), ) mp.postprocess(pqstr=pqstr) mp.close() nc.close()
def plotter(ctx): """ Go """ # Covert datetime to UTC do_polygon(ctx) m = MapPlot( title='2009-2018 Flash Flood Emergency Polygon Heatmap', sector='custom', axisbg='white', # west=-107, south=25.5, east=-88, north=41, # west=-82, south=36., east=-68, north=48, west=-85, south=31.8, north=45.2, east=-69, subtitle='based on unofficial IEM Archives', nocaption=True) cmap = plt.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') res = m.pcolormesh(ctx['lons'], ctx['lats'], ctx['data'], ctx['bins'], cmap=cmap, units='count') # Cut down on SVG et al size res.set_rasterized(True) m.postprocess(filename='test.png')
def main(): """Go Main Go.""" dbconn = get_dbconn("postgis") df = read_sql(""" WITH data as ( SELECT wfo, eventid, extract(year from issue) as year, max(case when svs is not null then 1 else 0 end) as hit from warnings where product_issue > '2014-04-01' and product_issue < '2019-02-22' and phenomena = 'SV' and date(issue) not in ('2017-08-25', '2017-08-26', '2017-08-27', '2017-08-28', '2017-08-29', '2017-08-30') and significance = 'W' GROUP by wfo, eventid, year ) SELECT wfo, sum(hit) as got_update, count(*) as total_events from data GROUP by wfo ORDER by total_events DESC """, dbconn, index_col='wfo') if 'JSJ' in df.index and 'SJU' not in df.index: df.loc['SJU'] = df.loc['JSJ'] df['no_update_percent'] = ( 100. - df['got_update'] / df['total_events'] * 100. ) df.to_csv("140401_190221_svr_nofls.csv") # NOTE: FFW followup is FFS mp = MapPlot( sector='nws', title='Percentage of Severe TStorm Warnings without a SVS Update Issued', subtitle=('1 April 2014 - 21 February 2019 (exclude Harvey 26-30 Aug ' '2017), based on unofficial data') ) cmap = plt.get_cmap("copper_r") cmap.set_under('white') cmap.set_over('black') ramp = range(0, 101, 5) mp.fill_cwas( df['no_update_percent'], bins=ramp, cmap=cmap, units='%', ilabel=True, lblformat='%.1f' ) mp.postprocess(filename='140401_190221_svr_nosvs.png')
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) get_df(ctx) labels = {} data = {} for state, row in ctx["df"].iterrows(): val = row["departure"] data[state] = val if pd.isna(val): if pd.isna(row["avg"]): subscript = "M" else: subscript = "[-%.0f]" % (row["avg"], ) data[state] = 0 - row["avg"] else: subscript = "[%s%.0f]" % ("+" if val > 0 else "", val) subscript = "[0]" if subscript in ["[-0]", "[+0]"] else subscript labels[state] = "%s\n%s" % ( "M" if pd.isna(row["thisval"]) else int(row["thisval"]), subscript, ) mp = MapPlot(sector="conus", title=ctx["title"], subtitle=ctx["subtitle"]) levels = range(-40, 41, 10) cmap = plt.get_cmap(ctx["cmap"]) cmap.set_bad("white") mp.fill_states( data, ilabel=True, labels=labels, bins=levels, cmap=cmap, units="Absolute %", labelfontsize=16, ) return mp.fig, ctx["df"]
def main(): """Go Main""" nc = netCDF4.Dataset('/tmp/sfav2_CONUS_2018093012_to_2019021312.nc') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] data = nc.variables['Data'][:] * 1000. / 25.4 nc.close() mp = MapPlot( sector='iowa', continentalcolor='tan', title=("National Snowfall Analysis - NOHRSC " "- Season Total Snowfall"), subtitle='Snowfall up until 7 AM 13 Feb 2019') cmap = plt.get_cmap('terrain_r') levs = [0.1, 2, 5, 8, 12, 18, 24, 30, 36, 42, 48] mp.pcolormesh( lons, lats, data, levs, cmap=cmap, units='inch', clip_on=False, spacing='proportional' ) mp.drawcounties() mp.drawcities() mp.postprocess(filename='test.png')
def main(): """Go Main Go""" data = get_data() mp = MapPlot(sector='midwest', title='8 July 2018 USDA NASS Corn Progress Percent Silking', subtitle=('Top value is 2018 percentage, bottom value is ' 'departure from 2008-2017 avg')) data2 = {} labels = {} for state in data: val = data[state]['d2017'] - data[state]['avg'] data2[state] = val labels[state.encode('utf-8')] = "%i%%\n%s%.1f%%" % (data[state]['d2017'], "+" if val > 0 else "", val) print(labels) levels = range(-40, 41, 10) mp.fill_states(data2, ilabel=True, labels=labels, bins=levels, cmap=plt.get_cmap('RdBu_r'), units='Absolute %', labelfontsize=16) mp.postprocess(filename='test.png') mp.close()
def main(): """Go Main""" vals = {} for line in data.split("\n"): wfo, valid = line.strip().split("|") wfo = wfo.strip() year = valid.strip()[:4] wfo = wfo[1:] if wfo == 'JSJ': wfo = 'SJU' vals[wfo] = int(year) print(vals) #bins = [1, 25, 50, 75, 100, 125, 150, 200, 300] bins = np.arange(2002, 2019, 2) cmap = plt.get_cmap('PuOr') mp = MapPlot(sector='nws', continentalcolor='white', figsize=(12., 9.), title=("Year of Last RWS Text Product Issuance"), subtitle=('based on IEM archives')) mp.fill_cwas(vals, bins=bins, lblformat='%s', # , labels=labels, cmap=cmap, ilabel=True, # clevlabels=clevlabels, units='Year') mp.postprocess(filename='test.png')
def main(): """Go Main Go.""" pgconn = get_dbconn('idep') df = read_postgis(""" select f.huc_12, count(*) as fps, st_transform(h.simple_geom, 4326) as geo from flowpaths f JOIN huc12 h on (f.huc_12 = h.huc_12) WHERE f.scenario = 0 and h.scenario = 0 GROUP by f.huc_12, geo ORDER by fps ASC """, pgconn, index_col=None, geom_col='geo') bins = np.arange(1, 42, 2) cmap = plt.get_cmap('copper') cmap.set_over('white') cmap.set_under('thistle') norm = mpcolors.BoundaryNorm(bins, cmap.N) mp = MapPlot( continentalcolor='thistle', nologo=True, sector='custom', south=36.8, north=45.0, west=-99.2, east=-88.9, subtitle='', title=('DEP HUCs with <40 Flowpaths (%.0f/%.0f %.2f%%)' % ( len(df[df['fps'] < 40].index), len(df.index), len(df[df['fps'] < 40].index) / len(df.index) * 100. ))) for _i, row in df.iterrows(): c = cmap(norm([row['fps'], ]))[0] arr = np.asarray(row['geo'].exterior) points = mp.ax.projection.transform_points( ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec='None', zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.draw_colorbar( bins, cmap, norm, title='Count') mp.postprocess(filename='/tmp/huc12_cnts.png')
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) varname = ctx["v"] if ctx["t"] == "state": bnds = reference.state_bounds[ctx["state"]] title = reference.state_names[ctx["state"]] else: bnds = reference.wfo_bounds[ctx["wfo"]] title = "NWS CWA %s [%s]" % ( ctx["_nt"].sts[ctx["wfo"]]["name"], ctx["wfo"], ) df, valid = get_df(ctx, bnds) if df.empty: raise NoDataFound("No data was found for your query") mp = MapPlot( sector=("state" if ctx["t"] == "state" else "cwa"), state=ctx["state"], cwa=(ctx["wfo"] if len(ctx["wfo"]) == 3 else ctx["wfo"][1:]), axisbg="white", title="%s for %s" % (PDICT2[ctx["v"]], title), subtitle=("Map valid: %s UTC") % (valid.strftime("%d %b %Y %H:%M"),), nocaption=True, titlefontsize=16, ) if varname == "vsby": ramp = np.array([0.01, 0.1, 0.25, 0.5, 1, 2, 3, 5, 8, 9.9]) valunit = "miles" elif varname == "feel": valunit = "F" df["feel"] = ( apparent_temperature( df["tmpf"].values * units("degF"), df["relh"].values * units("percent"), df["sknt"].values * units("knots"), ) .to(units("degF")) .m ) # Data QC, cough if ctx.get("above"): df = df[df[varname] < ctx["above"]] if ctx.get("below"): df = df[df[varname] > ctx["below"]] # with QC done, we compute ramps if varname != "vsby": ramp = np.linspace( df[varname].min() - 5, df[varname].max() + 5, 10, dtype="i" ) mp.contourf( df["lon"].values, df["lat"].values, df[varname].values, ramp, units=valunit, cmap=plt.get_cmap(ctx["cmap"]), ) if ctx["t"] == "state": df2 = df[df["state"] == ctx["state"]] else: df2 = df[df["wfo"] == ctx["wfo"]] mp.plot_values( df2["lon"].values, df2["lat"].values, df2[varname].values, "%.1f", labelbuffer=10, ) mp.drawcounties() if ctx["t"] == "cwa": mp.draw_cwas() return mp.fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn("asos") ctx = get_autoplot_context(fdict, get_description()) station = ctx["zstation"] which = ctx["which"] data = np.zeros((24, 52), "f") sql = "in ('BKN','OVC')" if which == "cloudy" else "= 'CLR'" df = read_sql( """ WITH data as ( SELECT valid at time zone %s + '10 minutes'::interval as v, tmpf, skyc1, skyc2, skyc3, skyc4 from alldata WHERE station = %s and valid > '1973-01-01' and tmpf is not null and tmpf > -99 and tmpf < 150), climo as ( select extract(week from v) as w, extract(hour from v) as hr, avg(tmpf) from data GROUP by w, hr), cloudy as ( select extract(week from v) as w, extract(hour from v) as hr, avg(tmpf) from data WHERE skyc1 """ + sql + """ or skyc2 """ + sql + """ or skyc3 """ + sql + """ or skyc4 """ + sql + """ GROUP by w, hr) SELECT l.w as week, l.hr as hour, l.avg - c.avg as difference from cloudy l JOIN climo c on (l.w = c.w and l.hr = c.hr) """, pgconn, params=(ctx["_nt"].sts[station]["tzname"], station), ) for _, row in df.iterrows(): if row[0] > 52: continue data[int(row["hour"]), int(row["week"]) - 1] = row["difference"] (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) maxv = np.ceil(max([np.max(data), 0 - np.min(data)])) + 0.2 cs = ax.imshow( data, aspect="auto", interpolation="nearest", vmin=(0 - maxv), vmax=maxv, cmap=plt.get_cmap(ctx["cmap"]), ) a = fig.colorbar(cs) a.ax.set_ylabel(r"Temperature Departure $^{\circ}\mathrm{F}$") ax.grid(True) ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") ax.set_title(("[%s] %s %s-%s\nHourly Temp Departure " "(skies were %s vs all)") % ( station, ctx["_nt"].sts[station]["name"], max([ab.year, 1973]), datetime.date.today().year, PDICT[ctx["which"]], )) ax.set_ylim(-0.5, 23.5) ax.set_ylabel("Local Hour of Day, %s" % (ctx["_nt"].sts[station]["tzname"], )) ax.set_yticks((0, 4, 8, 12, 16, 20)) ax.set_xticks(range(0, 55, 7)) ax.set_xticklabels(( "Jan 1", "Feb 19", "Apr 8", "May 27", "Jul 15", "Sep 2", "Oct 21", "Dec 9", )) ax.set_yticklabels(("Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM")) return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) level = ctx['level'] station = ctx['station'][:4] t = ctx['t'] p = ctx['p'] month = ctx['month'] if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] ones = np.ones((int(YSZ), int(XSZ))) counts = np.zeros((int(YSZ), int(XSZ))) # counts = np.load('counts.npy') lons = np.arange(GRIDWEST, GRIDEAST, griddelta) lats = np.arange(GRIDSOUTH, GRIDNORTH, griddelta) pgconn = get_dbconn('postgis') hour = int(p.split(".")[2]) df = read_postgis( """ WITH data as ( select product_issue, issue, expire, geom, rank() OVER (PARTITION by issue ORDER by product_issue DESC) from spc_outlooks where outlook_type = %s and day = %s and threshold = %s and category = %s and ST_Within(geom, ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))')) and extract(hour from product_issue at time zone 'UTC') in %s and extract(month from product_issue) in %s ) SELECT * from data where rank = 1 """, pgconn, params=(p.split(".")[1], p.split(".")[0], level.split(".", 1)[1], level.split(".")[0], GRIDWEST, GRIDSOUTH, GRIDWEST, GRIDNORTH, GRIDEAST, GRIDNORTH, GRIDEAST, GRIDSOUTH, GRIDWEST, GRIDSOUTH, tuple([hour - 1, hour, hour + 1]), tuple(months)), geom_col='geom') if df.empty: raise NoDataFound("No results found for query") for _, row in df.iterrows(): zs = zonal_stats(row['geom'], ones, affine=PRECIP_AFF, nodata=-1, all_touched=True, raster_out=True) for z in zs: aff = z['mini_raster_affine'] west = aff.c north = aff.f raster = np.flipud(z['mini_raster_array']) x0 = int((west - GRIDWEST) / griddelta) y1 = int((north - GRIDSOUTH) / griddelta) dy, dx = np.shape(raster) x1 = x0 + dx y0 = y1 - dy counts[y0:y1, x0:x1] += np.where(raster.mask, 0, 1) mindate = datetime.datetime(2014, 10, 1) if level not in ['CATEGORICAL.MRGL', 'CATEGORICAL.ENH']: mindate = datetime.datetime(2002, 1, 1) if p.split(".")[1] == 'F': mindate = datetime.datetime(2017, 1, 1) years = (datetime.datetime.now() - mindate).total_seconds() / 365.25 / 86400. data = counts / years subtitle = "Found %s events for CONUS between %s and %s" % ( len(df.index), df['issue'].min().strftime("%d %b %Y"), df['issue'].max().strftime("%d %b %Y")) if t == 'cwa': sector = 'cwa' subtitle = "Plotted for %s (%s). %s" % ( ctx['_nt'].sts[station]['name'], station, subtitle) else: sector = 'state' if len(ctx['csector']) == 2 else ctx['csector'] mp = MapPlot(sector=sector, state=ctx['csector'], cwa=(station if len(station) == 3 else station[1:]), axisbg='white', title='SPC %s Outlook [%s] of at least %s' % ( ISSUANCE[p], month.capitalize(), OUTLOOKS[level].split("(")[0].strip(), ), subtitle=subtitle, nocaption=True, titlefontsize=16) # Get the main axes bounds if t == 'state' and ctx['csector'] == 'conus': domain = data lons, lats = np.meshgrid(lons, lats) df2 = pd.DataFrame() else: (west, east, south, north) = mp.ax.get_extent(ccrs.PlateCarree()) i0 = int((west - GRIDWEST) / griddelta) j0 = int((south - GRIDSOUTH) / griddelta) i1 = int((east - GRIDWEST) / griddelta) j1 = int((north - GRIDSOUTH) / griddelta) jslice = slice(j0, j1) islice = slice(i0, i1) domain = data[jslice, islice] lons, lats = np.meshgrid(lons[islice], lats[jslice]) df2 = pd.DataFrame({ 'lat': lats.ravel(), 'lon': lons.ravel(), 'freq': domain.ravel() }) rng = [ round(x, 2) for x in np.linspace(max([0.01, np.min(domain) - 0.5]), np.max(domain) + 0.5, 10) ] cmap = plt.get_cmap(ctx['cmap']) cmap.set_under('white') cmap.set_over('black') res = mp.pcolormesh(lons, lats, domain, rng, cmap=cmap, clip_on=False, units='days per year') # Cut down on SVG et al size res.set_rasterized(True) if ctx['drawc'] == 'yes': mp.drawcounties() return mp.fig, df2
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) # Covert datetime to UTC ctx["sdate"] = ctx["sdate"].replace(tzinfo=pytz.utc) ctx["edate"] = ctx["edate"].replace(tzinfo=pytz.utc) state = ctx["state"] phenomena = ctx["phenomena"] significance = ctx["significance"] station = ctx["station"][:4] t = ctx["t"] ilabel = ctx["ilabel"] == "yes" geo = ctx["geo"] if geo == "ugc": do_ugc(ctx) elif geo == "polygon": do_polygon(ctx) subtitle = "based on IEM Archives %s" % (ctx.get("subtitle", ""), ) if t == "cwa": subtitle = "Plotted for %s (%s), %s" % ( ctx["_nt"].sts[station]["name"], station, subtitle, ) else: subtitle = "Plotted for %s, %s" % (state_names[state], subtitle) m = MapPlot( sector=("state" if t == "state" else "cwa"), state=state, cwa=(station if len(station) == 3 else station[1:]), axisbg="white", title=("%s %s (%s.%s)") % ( ctx["title"], vtec.get_ps_string(phenomena, significance), phenomena, significance, ), subtitle=subtitle, nocaption=True, titlefontsize=16, ) cmap = plt.get_cmap(ctx["cmap"]) cmap.set_under("white") cmap.set_over("white") if geo == "ugc": m.fill_ugcs(ctx["data"], ctx["bins"], cmap=cmap, ilabel=ilabel) else: res = m.pcolormesh( ctx["lons"], ctx["lats"], ctx["data"], ctx["bins"], cmap=cmap, units="count", ) # Cut down on SVG et al size res.set_rasterized(True) if ctx["drawc"] == "yes": m.drawcounties() return m.fig, ctx["df"]
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) csector = ctx["csector"] date = ctx["date"] z = ctx["z"] period = ctx["f"] scale = ctx["scale"] valid = utc(date.year, date.month, date.day, int(z)) gribfn = valid.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/wpc/" "p" + period + "m_%Y%m%d%Hf" + period + ".grb")) if not os.path.isfile(gribfn): raise NoDataFound("gribfn %s missing" % (gribfn, )) grbs = pygrib.open(gribfn) grb = grbs[1] precip = distance(grb.values, "MM").value("IN") lats, lons = grb.latlons() title = ("Weather Prediction Center %s Quantitative " "Precipitation Forecast") % (PDICT[period]) subtitle = ("%sWPC Forecast %s UTC to %s UTC") % ( ("US Drought Monitor Overlaid, " if ctx["opt"] == "both" else ""), valid.strftime("%d %b %Y %H"), (valid + datetime.timedelta(hours=int(period))).strftime("%d %b %Y %H"), ) mp = MapPlot( sector=("state" if len(csector) == 2 else csector), state=ctx["csector"], title=title, subtitle=subtitle, continentalcolor="white", titlefontsize=16, ) cmap = plt.get_cmap(ctx["cmap"]) cmap.set_under("#EEEEEE") cmap.set_over("black") if scale == "auto": levs = np.linspace(0, np.max(precip) * 1.1, 10) levs = [round(lev, 2) for lev in levs] levs[0] = 0.01 elif scale == "10": levs = np.arange(0, 10.1, 1.0) levs[0] = 0.01 elif scale == "7": levs = np.arange(0, 7.1, 0.5) levs[0] = 0.01 elif scale == "3.5": levs = np.arange(0, 3.6, 0.25) levs[0] = 0.01 mp.pcolormesh( lons, lats, precip, levs, cmap=cmap, units="inch", clip_on=(ctx["csector"] == "iailin"), ) if ctx["opt"] == "both": mp.draw_usdm(valid=valid, filled=False, hatched=True) if ctx["csector"] == "iailin": mp.drawcounties() return mp.fig
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) state = ctx['state'] varname = ctx['var'] sector = ctx['sector'] threshold = ctx['threshold'] opt = ctx['opt'] month = ctx['month'] p1syear = ctx['p1syear'] p1eyear = ctx['p1eyear'] p1yearreq = (p1eyear - p1syear) p2syear = ctx['p2syear'] p2eyear = ctx['p2eyear'] p2yearreq = (p2eyear - p2syear) opt1 = ctx['opt1'] if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] elif month == 'gs': months = [5, 6, 7, 8, 9] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month] table = "alldata" if sector == 'state': # optimization table = "alldata_%s" % (state,) df = read_sql(""" WITH period1 as ( SELECT station, year, sum(precip) as total_precip, avg((high+low) / 2.) as avg_temp, avg(high) as avg_high, avg(low) as avg_low, sum(gddxx(50, 86, high, low)) as sum_gdd, sum(case when high > 86 then high - 86 else 0 end) as sum_sdd, sum(case when high >= %s then 1 else 0 end) as days_high_above from """ + table + """ WHERE year >= %s and year < %s and month in %s GROUP by station, year), period2 as ( SELECT station, year, sum(precip) as total_precip, avg((high+low) / 2.) as avg_temp, avg(high) as avg_high, avg(low) as avg_low, sum(gddxx(50, 86, high, low)) as sum_gdd, sum(case when high > 86 then high - 86 else 0 end) as sum_sdd, sum(case when high >= %s then 1 else 0 end) as days_high_above from """ + table + """ WHERE year >= %s and year < %s and month in %s GROUP by station, year), p1agg as ( SELECT station, avg(total_precip) as precip, avg(avg_temp) as avg_temp, avg(avg_high) as avg_high, avg(avg_low) as avg_low, avg(sum_sdd) as sdd, avg(sum_gdd) as gdd, avg(days_high_above) as avg_days_high_above, count(*) as count from period1 GROUP by station), p2agg as ( SELECT station, avg(total_precip) as precip, avg(avg_temp) as avg_temp, avg(avg_high) as avg_high, avg(avg_low) as avg_low, avg(sum_sdd) as sdd, avg(sum_gdd) as gdd, avg(days_high_above) as avg_days_high_above, count(*) as count from period2 GROUP by station), agg as ( SELECT p2.station, p2.precip as p2_total_precip, p1.precip as p1_total_precip, p2.gdd as p2_gdd, p1.gdd as p1_gdd, p2.sdd as p2_sdd, p1.sdd as p1_sdd, p2.avg_temp as p2_avg_temp, p1.avg_temp as p1_avg_temp, p1.avg_high as p1_avg_high, p2.avg_high as p2_avg_high, p1.avg_low as p1_avg_low, p2.avg_low as p2_avg_low, p1.avg_days_high_above as p1_days_high_above, p2.avg_days_high_above as p2_days_high_above from p1agg p1 JOIN p2agg p2 on (p1.station = p2.station) WHERE p1.count >= %s and p2.count >= %s) SELECT station, ST_X(geom) as lon, ST_Y(geom) as lat, d.* from agg d JOIN stations t ON (d.station = t.id) WHERE t.network ~* 'CLIMATE' and substr(station, 3, 1) != 'C' and substr(station, 3, 4) != '0000' """, pgconn, params=[threshold, p1syear, p1eyear, tuple(months), threshold, p2syear, p2eyear, tuple(months), p1yearreq, p2yearreq], index_col=None) df['total_precip'] = df['p2_total_precip'] - df['p1_total_precip'] df['avg_temp'] = df['p2_avg_temp'] - df['p1_avg_temp'] df['avg_high'] = df['p2_avg_high'] - df['p1_avg_high'] df['avg_low'] = df['p2_avg_low'] - df['p1_avg_low'] df['gdd'] = df['p2_gdd'] - df['p1_gdd'] df['sdd'] = df['p2_sdd'] - df['p1_sdd'] df['days_high_above'] = (df['p2_days_high_above'] - df['p1_days_high_above']) column = varname title = "%s %s" % (MDICT[month], PDICT3[varname]) title = title.replace("[Threshold]", '%.1f' % (threshold,)) if opt1 == 'p1': column = 'p1_%s' % (varname,) title = '%.0f-%.0f %s' % (p1syear, p1eyear, title) else: title = ('%.0f-%.0f minus %.0f-%.0f %s Difference' ) % (p2syear, p2eyear, p1syear, p1eyear, title) # Reindex so that most extreme values are first df = df.reindex(df[column].abs().sort_values(ascending=False).index) # drop 5% most extreme events, too much? df2 = df.iloc[int(len(df.index) * 0.05):] mp = MapPlot(sector=sector, state=state, axisbg='white', title=title, subtitle=('based on IEM Archives'), titlefontsize=12) if opt1 == 'diff': # Create 9 levels centered on zero abval = df2[column].abs().max() levels = centered_bins(abval) else: levels = [round(v, PRECISION[varname]) for v in np.percentile(df2[column].values, range(0, 101, 10))] if opt in ['both', 'contour']: mp.contourf(df2['lon'].values, df2['lat'].values, df2[column].values, levels, cmap=plt.get_cmap(('seismic_r' if varname == 'total_precip' else 'seismic')), units=UNITS[varname]) if sector == 'state': mp.drawcounties() if opt in ['both', 'values']: mp.plot_values(df2['lon'].values, df2['lat'].values, df2[column].values, fmt='%%.%if' % (PRECISION[varname],), labelbuffer=5) return mp.fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') ctx = get_autoplot_context(fdict, get_description()) state = ctx['state'][:2] sector = ctx['sector'] opt = ctx['opt'] p1syear = ctx['p1syear'] p1eyear = ctx['p1eyear'] p2syear = ctx['p2syear'] p2eyear = ctx['p2eyear'] varname = ctx['var'] table = "alldata" if sector == 'state': table = "alldata_%s" % (state, ) df = read_sql(""" WITH season1 as ( SELECT station, year, min(case when month > 7 and low < 32 then extract(doy from day) else 366 end) as first_freeze, max(case when month < 7 and low < 32 then extract(doy from day) else 0 end) as last_freeze from """ + table + """ WHERE year >= %s and year <= %s GROUP by station, year), season2 as ( SELECT station, year, min(case when month > 7 and low < 32 then extract(doy from day) else 366 end) as first_freeze, max(case when month < 7 and low < 32 then extract(doy from day) else 0 end) as last_freeze from """ + table + """ WHERE year >= %s and year <= %s GROUP by station, year), agg as ( SELECT p1.station, avg(p1.first_freeze) as p1_first_fall, avg(p1.last_freeze) as p1_last_spring, avg(p2.first_freeze) as p2_first_fall, avg(p2.last_freeze) as p2_last_spring from season1 as p1 JOIN season2 as p2 on (p1.station = p2.station) GROUP by p1.station) SELECT station, ST_X(geom) as lon, ST_Y(geom) as lat, d.* from agg d JOIN stations t ON (d.station = t.id) WHERE t.network ~* 'CLIMATE' and substr(station, 3, 1) != 'C' and substr(station, 3, 4) != '0000' """, pgconn, params=[p1syear, p1eyear, p2syear, p2eyear], index_col='station') if df.empty: raise NoDataFound('No Data Found') df['p1_season'] = df['p1_first_fall'] - df['p1_last_spring'] df['p2_season'] = df['p2_first_fall'] - df['p2_last_spring'] df['season_delta'] = df['p2_season'] - df['p1_season'] df['spring_delta'] = df['p2_last_spring'] - df['p1_last_spring'] df['fall_delta'] = df['p2_first_fall'] - df['p1_first_fall'] # Reindex so that most extreme values are first df = df.reindex(df[varname + '_delta'].abs().sort_values( ascending=False).index) title = PDICT3[varname] mp = MapPlot(sector=sector, state=state, axisbg='white', title=('%.0f-%.0f minus %.0f-%.0f %s Difference' ) % (p2syear, p2eyear, p1syear, p1eyear, title), subtitle=('based on IEM Archives'), titlefontsize=14) # Create 9 levels centered on zero abval = df[varname + '_delta'].abs().max() levels = centered_bins(abval) if opt in ['both', 'contour']: mp.contourf(df['lon'].values, df['lat'].values, df[varname + '_delta'].values, levels, cmap=plt.get_cmap(ctx['cmap']), units='days') if sector == 'state': mp.drawcounties() if opt in ['both', 'values']: mp.plot_values(df['lon'].values, df['lat'].values, df[varname + '_delta'].values, fmt='%.1f', labelbuffer=5) return mp.fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('postgis') ctx = get_autoplot_context(fdict, get_description()) sts = ctx['sdate'] sts = sts.replace(tzinfo=pytz.UTC) ets = ctx['edate'] ets = ets.replace(tzinfo=pytz.UTC) p1 = ctx['phenomenav1'] p2 = ctx['phenomenav2'] p3 = ctx['phenomenav3'] p4 = ctx['phenomenav4'] varname = ctx['var'] phenomena = [] for p in [p1, p2, p3, p4]: if p is not None: phenomena.append(p[:2]) s1 = ctx['significancev1'] s2 = ctx['significancev2'] s3 = ctx['significancev3'] s4 = ctx['significancev4'] significance = [] for s in [s1, s2, s3, s4]: if s is not None: significance.append(s[0]) pstr = [] subtitle = "" title = "" for p, s in zip(phenomena, significance): pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s)) subtitle += "%s.%s " % (p, s) title += vtec.get_ps_string(p, s) if len(phenomena) > 1: title = "VTEC Unique Event" pstr = " or ".join(pstr) pstr = "(%s)" % (pstr, ) cmap = plt.get_cmap(ctx['cmap']) if varname == 'count': df = read_sql(""" with total as ( select distinct wfo, extract(year from issue at time zone 'UTC') as year, phenomena, significance, eventid from warnings where """ + pstr + """ and issue >= %s and issue < %s ) SELECT wfo, phenomena, significance, year, count(*) from total GROUP by wfo, phenomena, significance, year """, pgconn, params=(sts, ets)) df2 = df.groupby('wfo')['count'].sum() maxv = df2.max() bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200] if maxv > 5000: bins = [ 0, 5, 10, 50, 100, 250, 500, 750, 1000, 1500, 2000, 3000, 5000, 7500, 10000 ] elif maxv > 1000: bins = [ 0, 1, 5, 10, 50, 100, 150, 200, 250, 500, 750, 1000, 1250, 1500, 2000 ] elif maxv > 200: bins = [ 0, 1, 3, 5, 10, 20, 35, 50, 75, 100, 150, 200, 250, 500, 750, 1000 ] units = 'Count' lformat = '%.0f' elif varname == 'days': df = read_sql(""" WITH data as ( SELECT distinct wfo, generate_series(greatest(issue, %s), least(expire, %s), '1 minute'::interval) as ts from warnings WHERE issue > %s and expire < %s and """ + pstr + """ ), agg as ( SELECT distinct wfo, date(ts) from data ) select wfo, count(*) as days from agg GROUP by wfo ORDER by days DESC """, pgconn, params=(sts, ets, sts - datetime.timedelta(days=90), ets + datetime.timedelta(days=90)), index_col='wfo') df2 = df['days'] if df2.max() < 10: bins = list(range(1, 11, 1)) else: bins = np.linspace(1, df['days'].max() + 11, 10, dtype='i') units = 'Days' lformat = '%.0f' cmap.set_under('white') cmap.set_over('#EEEEEE') else: total_minutes = (ets - sts).total_seconds() / 60. df = read_sql(""" WITH data as ( SELECT distinct wfo, generate_series(greatest(issue, %s), least(expire, %s), '1 minute'::interval) as ts from warnings WHERE issue > %s and expire < %s and """ + pstr + """ ) select wfo, count(*) / %s * 100. as tpercent from data GROUP by wfo ORDER by tpercent DESC """, pgconn, params=(sts, ets, sts - datetime.timedelta(days=90), ets + datetime.timedelta(days=90), total_minutes), index_col='wfo') df2 = df['tpercent'] bins = list(range(0, 101, 10)) if df2.max() < 5: bins = np.arange(0, 5.1, 0.5) elif df2.max() < 10: bins = list(range(0, 11, 1)) units = 'Percent' lformat = '%.1f' nt = NetworkTable("WFO") for sid in nt.sts: sid = sid[-3:] if sid not in df2: df2[sid] = 0 mp = MapPlot(sector='nws', axisbg='white', title='%s %s by NWS Office' % (title, PDICT[varname]), subtitle=('Valid %s - %s UTC, based on VTEC: %s') % (sts.strftime("%d %b %Y %H:%M"), ets.strftime("%d %b %Y %H:%M"), subtitle)) mp.fill_cwas(df2, bins=bins, ilabel=True, units=units, lblformat=lformat, cmap=cmap) return mp.fig, df
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) date = ctx['date'] sector = ctx['sector'] threshold = ctx['threshold'] threshold_mm = distance(threshold, 'IN').value('MM') window_sts = date - datetime.timedelta(days=90) if window_sts.year != date.year: raise NoDataFound('Sorry, do not support multi-year plots yet!') # idx0 = iemre.daily_offset(window_sts) idx1 = iemre.daily_offset(date) ncfn = iemre.get_daily_mrms_ncname(date.year) if not os.path.isfile(ncfn): raise NoDataFound("No data found.") ncvar = 'p01d' # Get the state weight df = gpd.GeoDataFrame.from_postgis(""" SELECT the_geom from states where state_abbr = %s """, util.get_dbconn('postgis'), params=(sector, ), index_col=None, geom_col='the_geom') czs = CachingZonalStats(iemre.MRMS_AFFINE) with util.ncopen(ncfn) as nc: czs.gen_stats( np.zeros((nc.variables['lat'].size, nc.variables['lon'].size)), df['the_geom']) jslice = None islice = None for nav in czs.gridnav: # careful here as y is flipped in this context jslice = slice(nc.variables['lat'].size - (nav.y0 + nav.ysz), nc.variables['lat'].size - nav.y0) islice = slice(nav.x0, nav.x0 + nav.xsz) grid = np.zeros( (jslice.stop - jslice.start, islice.stop - islice.start)) total = np.zeros( (jslice.stop - jslice.start, islice.stop - islice.start)) for i, idx in enumerate(range(idx1, idx1 - 90, -1)): total += nc.variables[ncvar][idx, jslice, islice] grid = np.where(np.logical_and(grid == 0, total > threshold_mm), i, grid) lon = nc.variables['lon'][islice] lat = nc.variables['lat'][jslice] mp = MapPlot(sector='state', state=sector, titlefontsize=14, subtitlefontsize=12, title=("NOAA MRMS Q3: Number of Recent Days " "till Accumulating %s\" of Precip") % (threshold, ), subtitle=("valid %s: based on per calendar day " "estimated preciptation, GaugeCorr and " "RadarOnly products") % (date.strftime("%-d %b %Y"), )) x, y = np.meshgrid(lon, lat) cmap = plt.get_cmap(ctx['cmap']) cmap.set_over('k') cmap.set_under('white') mp.pcolormesh(x, y, grid, np.arange(0, 81, 10), cmap=cmap, units='days') mp.drawcounties() mp.drawcities() return mp.fig
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) varname = ctx['v'] if ctx['t'] == 'state': bnds = reference.state_bounds[ctx['state']] title = reference.state_names[ctx['state']] else: bnds = reference.wfo_bounds[ctx['wfo']] title = "NWS CWA %s [%s]" % (ctx['_nt'].sts[ctx['wfo']]['name'], ctx['wfo']) df, valid = get_df(ctx, bnds) if df.empty: raise NoDataFound("No data was found for your query") mp = MapPlot(sector=('state' if ctx['t'] == 'state' else 'cwa'), state=ctx['state'], cwa=(ctx['wfo'] if len(ctx['wfo']) == 3 else ctx['wfo'][1:]), axisbg='white', title='%s for %s' % (PDICT2[ctx['v']], title), subtitle=('Map valid: %s UTC') % (valid.strftime("%d %b %Y %H:%M"), ), nocaption=True, titlefontsize=16) if varname == 'vsby': ramp = np.array([0.01, 0.1, 0.25, 0.5, 1, 2, 3, 5, 8, 9.9]) valunit = 'miles' elif varname == 'feel': valunit = 'F' df['feel'] = apparent_temperature(df['tmpf'].values * units('degF'), df['relh'].values * units('percent'), df['sknt'].values * units('knots')).to(units('degF')).m # Data QC, cough if ctx.get('above'): df = df[df[varname] < ctx['above']] if ctx.get('below'): df = df[df[varname] > ctx['below']] # with QC done, we compute ramps if varname != 'vsby': ramp = np.linspace(df[varname].min() - 5, df[varname].max() + 5, 10, dtype='i') mp.contourf(df['lon'].values, df['lat'].values, df[varname].values, ramp, units=valunit, cmap=plt.get_cmap(ctx['cmap'])) if ctx['t'] == 'state': df2 = df[df['state'] == ctx['state']] else: df2 = df[df['wfo'] == ctx['wfo']] mp.plot_values(df2['lon'].values, df2['lat'].values, df2[varname].values, '%.1f', labelbuffer=10) mp.drawcounties() if ctx['t'] == 'cwa': mp.draw_cwas() return mp.fig, df
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) ptype = ctx['ptype'] sdate = ctx['sdate'] edate = ctx['edate'] src = ctx['src'] opt = ctx['opt'] usdm = ctx['usdm'] if sdate.year != edate.year: raise NoDataFound('Sorry, do not support multi-year plots yet!') days = (edate - sdate).days sector = ctx['sector'] if sdate == edate: title = sdate.strftime("%-d %B %Y") else: title = "%s to %s (inclusive)" % (sdate.strftime("%-d %b"), edate.strftime("%-d %b %Y")) x0 = 0 x1 = -1 y0 = 0 y1 = -1 state = None if len(sector) == 2: state = sector sector = 'state' if src == 'mrms': ncfn = iemre.get_daily_mrms_ncname(sdate.year) clncfn = iemre.get_dailyc_mrms_ncname() ncvar = 'p01d' source = 'MRMS Q3' subtitle = 'NOAA MRMS Project, GaugeCorr and RadarOnly' elif src == 'iemre': ncfn = iemre.get_daily_ncname(sdate.year) clncfn = iemre.get_dailyc_ncname() ncvar = 'p01d_12z' source = 'IEM Reanalysis' subtitle = 'IEM Reanalysis is derived from various NOAA datasets' else: ncfn = "/mesonet/data/prism/%s_daily.nc" % (sdate.year, ) clncfn = "/mesonet/data/prism/prism_dailyc.nc" ncvar = 'ppt' source = 'OSU PRISM' subtitle = ('PRISM Climate Group, Oregon State Univ., ' 'http://prism.oregonstate.edu, created 4 Feb 2004.') mp = MapPlot(sector=sector, state=state, axisbg='white', nocaption=True, title='%s:: %s Precip %s' % (source, title, PDICT3[opt]), subtitle='Data from %s' % (subtitle, ), titlefontsize=14) idx0 = iemre.daily_offset(sdate) idx1 = iemre.daily_offset(edate) + 1 if not os.path.isfile(ncfn): raise NoDataFound("No data for that year, sorry.") with util.ncopen(ncfn) as nc: if state is not None: x0, y0, x1, y1 = util.grid_bounds(nc.variables['lon'][:], nc.variables['lat'][:], state_bounds[state]) elif sector in SECTORS: bnds = SECTORS[sector] x0, y0, x1, y1 = util.grid_bounds( nc.variables['lon'][:], nc.variables['lat'][:], [bnds[0], bnds[2], bnds[1], bnds[3]]) lats = nc.variables['lat'][y0:y1] lons = nc.variables['lon'][x0:x1] if sdate == edate: p01d = distance(nc.variables[ncvar][idx0, y0:y1, x0:x1], 'MM').value('IN') elif (idx1 - idx0) < 32: p01d = distance( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i + 10, idx1]) if idx0 == i: p01d = distance( np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0), 'MM').value('IN') else: p01d += distance( np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0), 'MM').value('IN') if np.ma.is_masked(np.max(p01d)): raise NoDataFound("Data Unavailable") units = 'inches' cmap = plt.get_cmap(ctx['cmap']) cmap.set_bad('white') if opt == 'dep': # Do departure work now with util.ncopen(clncfn) as nc: climo = distance( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0), 'MM').value('IN') p01d = p01d - climo [maxv] = np.percentile(np.abs(p01d), [ 99, ]) clevs = np.around(np.linspace(0 - maxv, maxv, 11), decimals=2) elif opt == 'per': with util.ncopen(clncfn) as nc: climo = distance( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0), 'MM').value('IN') p01d = p01d / climo * 100. cmap.set_under('white') cmap.set_over('black') clevs = [1, 10, 25, 50, 75, 100, 125, 150, 200, 300, 500] units = 'percent' else: p01d = np.where(p01d < 0.001, np.nan, p01d) cmap.set_under('white') clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] if days > 6: clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20] if days > 29: clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35] if days > 90: clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40] x2d, y2d = np.meshgrid(lons, lats) if ptype == 'c': mp.contourf(x2d, y2d, p01d, clevs, cmap=cmap, units=units, iline=False) else: res = mp.pcolormesh(x2d, y2d, p01d, clevs, cmap=cmap, units=units) res.set_rasterized(True) if sector != 'midwest': mp.drawcounties() mp.drawcities() if usdm == 'yes': mp.draw_usdm(edate, filled=False, hatched=True) return mp.fig
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] nt = NetworkTable(network) df = read_sql(""" select extract(doy from valid) as doy, greatest(skyl1, skyl2, skyl3, skyl4) as sky from alldata WHERE station = %s and (skyc1 = 'OVC' or skyc2 = 'OVC' or skyc3 = 'OVC' or skyc4 = 'OVC') and valid > '1973-01-01' and (extract(minute from valid) = 0 or extract(minute from valid) > 50) and report_type = 2 """, pgconn, params=(station, ), index_col=None) if df.empty: raise ValueError('Error, no results returned!') w = np.arange(1, 366, 7) z = np.array([ 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1600, 1700, 1800, 1900, 2000, 2100, 2200, 2300, 2400, 2500, 2600, 2700, 2800, 2900, 3000, 3100, 3200, 3300, 3400, 3500, 3600, 3700, 3800, 3900, 4000, 4100, 4200, 4300, 4400, 4500, 4600, 4700, 4800, 4900, 5000, 5500, 6000, 6500, 7000, 7500, 8000, 8500, 9000, 9500, 10000, 11000, 12000, 13000, 14000, 15000, 16000, 17000, 18000, 19000, 20000, 21000, 22000, 23000, 24000, 25000, 26000, 27000, 28000, 29000, 30000, 31000 ]) H, xedges, yedges = np.histogram2d(df['sky'].values, df['doy'].values, bins=(z, w)) rows = [] for i, x in enumerate(xedges[:-1]): for j, y in enumerate(yedges[:-1]): rows.append(dict(ceiling=x, doy=y, count=H[i, j])) resdf = pd.DataFrame(rows) H = ma.array(H) H.mask = np.where(H < 1, True, False) (fig, ax) = plt.subplots(1, 1) bounds = np.arange(0, 1.2, 0.1) bounds = np.concatenate((bounds, np.arange(1.2, 2.2, 0.2))) cmap = plt.get_cmap('jet') cmap.set_under('#F9CCCC') norm = mpcolors.BoundaryNorm(bounds, cmap.N) syear = max([1973, nt.sts[station]['archive_begin'].year]) years = (datetime.date.today().year - syear) + 1. c = ax.imshow(H / years, aspect='auto', interpolation='nearest', norm=norm) ax.set_ylim(-0.5, len(z) - 0.5) idx = [0, 4, 9, 19, 29, 39, 49, 54, 59, 64, 69, 74, 79] ax.set_yticks(idx) ax.set_yticklabels(z[idx]) ax.set_title( ("%s-%s [%s %s Ceilings Frequency\n" "Level at which Overcast Conditions Reported") % (syear, datetime.date.today().year, station, nt.sts[station]['name'])) ax.set_ylabel("Overcast Level [ft AGL], irregular scale") ax.set_xlabel("Week of the Year") ax.set_xticks(np.arange(1, 55, 7)) ax.set_xticklabels(('Jan 1', 'Feb 19', 'Apr 8', 'May 27', 'Jul 15', 'Sep 2', 'Oct 21', 'Dec 9')) b = fig.colorbar(c) b.set_label("Hourly Obs per week per year") return fig, resdf
def plotter(fdict): """ Go """ pgconn = get_dbconn("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] year = ctx["year"] varname = ctx["var"] how = ctx["how"] gddbase = ctx["gddbase"] gddceil = ctx["gddceil"] table = "alldata_%s" % (station[:2], ) df = read_sql( """ WITH data as ( select day, year, sday, high, low, (high+low)/2. as temp, gddxx(%s, %s, high, low) as gdd, rank() OVER (PARTITION by sday ORDER by high ASC) as high_ptile, rank() OVER (PARTITION by sday ORDER by (high+low)/2. ASC) as temp_ptile, rank() OVER (PARTITION by sday ORDER by low ASC) as low_ptile, rank() OVER (PARTITION by sday ORDER by gddxx(%s, %s, high, low) ASC) as gdd_ptile from """ + table + """ where station = %s ), climo as ( SELECT sday, avg(high) as avg_high, avg(low) as avg_low, avg((high+low)/2.) as avg_temp, stddev(high) as stddev_high, stddev(low) as stddev_low, stddev((high+low)/2.) as stddev_temp, avg(gddxx(%s, %s, high, low)) as avg_gdd, stddev(gddxx(%s, %s, high, low)) as stddev_gdd, count(*)::float as years from """ + table + """ WHERE station = %s GROUP by sday ) SELECT day, d.high - c.avg_high as high_diff, (d.high - c.avg_high) / c.stddev_high as high_sigma, d.low - c.avg_low as low_diff, (d.low - c.avg_low) / c.stddev_low as low_sigma, d.temp - c.avg_temp as avg_diff, (d.temp - c.avg_temp) / c.stddev_temp as avg_sigma, d.gdd - c.avg_gdd as gdd_diff, (d.gdd - c.avg_gdd) / greatest(c.stddev_gdd, 0.1) as gdd_sigma, d.high, c.avg_high, d.low, c.avg_low, d.temp, c.avg_temp, d.gdd, c.avg_gdd, high_ptile / years * 100. as high_ptile, low_ptile / years * 100. as low_ptile, temp_ptile / years * 100. as temp_ptile, gdd_ptile / years * 100. as gdd_ptile from data d JOIN climo c on (c.sday = d.sday) WHERE d.year = %s ORDER by day ASC """, pgconn, params=( gddbase, gddceil, gddbase, gddceil, station, gddbase, gddceil, gddbase, gddceil, station, year, ), index_col=None, ) (fig, ax) = plt.subplots(1, 1) diff = df[varname + "_" + how].values if how == "ptile" and "cmap" in ctx: bins = range(0, 101, 10) cmap = plt.get_cmap(ctx["cmap"]) norm = mpcolors.BoundaryNorm(bins, cmap.N) colors = cmap(norm(diff)) ax.bar(df["day"].values, diff, color=colors, align="center") ax.set_yticks(bins) else: bars = ax.bar(df["day"].values, diff, fc="b", ec="b", align="center") for i, _bar in enumerate(bars): if diff[i] > 0: _bar.set_facecolor("r") _bar.set_edgecolor("r") ax.grid(True) if how == "diff": ax.set_ylabel(r"%s Departure $^\circ$F" % (PDICT[varname], )) elif how == "ptile": ax.set_ylabel("%s Percentile (100 highest)" % (PDICT[varname], )) else: ax.set_ylabel(r"%s Std Dev Departure ($\sigma$)" % (PDICT[varname], )) if varname == "gdd": ax.set_xlabel("Growing Degree Day Base: %s Ceiling: %s" % (gddbase, gddceil)) ax.set_title(("%s %s\nYear %s Daily %s %s") % ( station, ctx["_nt"].sts[station]["name"], year, PDICT[varname], "Departure" if how != "ptile" else "Percentile", )) ax.xaxis.set_major_formatter(mdates.DateFormatter("%b")) ax.xaxis.set_major_locator(mdates.DayLocator(1)) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] syear = ctx['syear'] eyear = ctx['eyear'] sts = datetime.date(syear, 11, 1) ets = datetime.date(eyear + 1, 6, 1) table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) syear = nt.sts[station]['archive_begin'].year eyear = datetime.datetime.now().year obs = np.ma.ones((eyear - syear + 1, 153), 'f') * -1 cursor.execute( """ SELECT year, extract(doy from day), snowd, day from """ + table + """ WHERE station = %s and month in (11,12,1,2,3) and snowd >= 0 and day between %s and %s """, (station, sts, ets)) minyear = 2050 maxyear = 1900 for row in cursor: year = row[0] if year < minyear: minyear = year if row[3].month > 6 and year > maxyear: maxyear = year doy = row[1] val = row[2] if doy > 180: doy = doy - 365 else: year -= 1 obs[year - syear, int(doy + 61)] = val obs.mask = np.where(obs < 0, True, False) # obs[obs == 0] = -1 fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(111) ax.set_xticks((0, 29, 60, 91, 120, 151)) ax.set_xticklabels(('Nov 1', 'Dec 1', 'Jan 1', 'Feb 1', 'Mar 1', 'Apr 1')) ax.set_ylabel('Year of Nov,Dec of Season Labeled') ax.set_xlabel('Date of Winter Season') ax.set_title(('[%s] %s\nDaily Snow Depth (%s-%s) [inches]' '') % (station, nt.sts[station]['name'], minyear, eyear)) cmap = plt.get_cmap("jet") norm = mpcolors.BoundaryNorm( [0.01, 0.1, 1, 2, 3, 4, 5, 6, 9, 12, 15, 18, 21, 24, 30, 36], cmap.N) cmap.set_bad('#EEEEEE') cmap.set_under('white') res = ax.imshow(obs, aspect='auto', rasterized=True, norm=norm, interpolation='nearest', cmap=cmap, extent=[0, 152, eyear + 1 - 0.5, syear - 0.5]) fig.colorbar(res) ax.grid(True) ax.set_ylim(maxyear + 0.5, minyear - 0.5) return fig