def two(year): """Compare yearly totals in a scatter plot""" coop = psycopg2.connect(database='coop', host='localhost', port=5555, user='******') ccursor = coop.cursor() idep = psycopg2.connect(database='idep', host='localhost', port=5555, user='******') icursor = idep.cursor() ccursor.execute( """ SELECT station, sum(precip) from alldata_ia WHERE year = %s and station != 'IA0000' and substr(station, 3, 1) != 'C' GROUP by station ORDER by station ASC """, (year, )) nt = NetworkTable("IACLIMATE") rows = [] for row in ccursor: station = row[0] precip = row[1] if station not in nt.sts: continue lon = nt.sts[station]['lon'] lat = nt.sts[station]['lat'] icursor.execute( """ select huc_12 from huc12 where ST_Contains(geom, ST_Transform(ST_SetSRID(ST_Point(%s, %s), 4326), 5070)) and scenario = 0 """, (lon, lat)) if icursor.rowcount == 0: continue huc12 = icursor.fetchone()[0] icursor.execute( """ select sum(qc_precip) from results_by_huc12 WHERE valid between %s and %s and huc_12 = %s and scenario = 0 """, (datetime.date(year, 1, 1), datetime.date(year, 12, 31), huc12)) val = icursor.fetchone()[0] if val is None: continue iprecip = distance(val, 'MM').value('IN') rows.append( dict(station=station, precip=precip, iprecip=iprecip, lat=lat, lon=lon)) # print("%s %s %5.2f %5.2f" % (station, huc12, precip, iprecip)) df = pd.DataFrame(rows) df['diff'] = df['iprecip'] - df['precip'] bias = df['diff'].mean() print("%s %5.2f %5.2f %5.2f" % (year, df['iprecip'].mean(), df['precip'].mean(), bias)) m = MapPlot(title=("%s IDEP Precipitation minus IEM Climodat (inch)") % (year, ), subtitle=("HUC12 Average minus point observation, " "Overall bias: %.2f") % (bias, ), axisbg='white') m.plot_values(df['lon'], df['lat'], df['diff'], fmt='%.2f', labelbuffer=1) m.postprocess(filename='%s_map.png' % (year, )) m.close() (fig, ax) = plt.subplots(1, 1) ax.scatter(df['precip'], df['iprecip']) ax.grid(True) ylim = ax.get_ylim() ax.plot([ylim[0], ylim[1]], [ylim[0], ylim[1]], lw=2) ax.set_xlabel("IEM Climodat Precip") ax.set_ylabel("IDEP HUC12 Precip") ax.set_title("%s Precipitation Comparison, bias=%.2f" % (year, bias)) fig.savefig('%s_xy.png' % (year, )) plt.close()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.colors as mpcolors pgconn = get_dbconn('coop') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] syear = ctx['syear'] eyear = ctx['eyear'] sts = datetime.date(syear, 11, 1) ets = datetime.date(eyear + 1, 6, 1) table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) syear = nt.sts[station]['archive_begin'].year eyear = datetime.datetime.now().year obs = np.ma.ones((eyear - syear + 1, 153), 'f') * -1 cursor.execute( """ SELECT year, extract(doy from day), snowd, day from """ + table + """ WHERE station = %s and month in (11,12,1,2,3) and snowd >= 0 and day between %s and %s """, (station, sts, ets)) minyear = 2050 maxyear = 1900 for row in cursor: year = row[0] if year < minyear: minyear = year if row[3].month > 6 and year > maxyear: maxyear = year doy = row[1] val = row[2] if doy > 180: doy = doy - 365 else: year -= 1 obs[year - syear, int(doy + 61)] = val obs.mask = np.where(obs < 0, True, False) # obs[obs == 0] = -1 fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(111) ax.set_xticks((0, 29, 60, 91, 120, 151)) ax.set_xticklabels(('Nov 1', 'Dec 1', 'Jan 1', 'Feb 1', 'Mar 1', 'Apr 1')) ax.set_ylabel('Year of Nov,Dec of Season Labeled') ax.set_xlabel('Date of Winter Season') ax.set_title(('[%s] %s\nDaily Snow Depth (%s-%s) [inches]' '') % (station, nt.sts[station]['name'], minyear, eyear)) cmap = plt.get_cmap("jet") norm = mpcolors.BoundaryNorm( [0.01, 0.1, 1, 2, 3, 4, 5, 6, 9, 12, 15, 18, 21, 24, 30, 36], cmap.N) cmap.set_bad('#EEEEEE') cmap.set_under('white') res = ax.imshow(obs, aspect='auto', rasterized=True, norm=norm, interpolation='nearest', cmap=cmap, extent=[0, 152, eyear + 1 - 0.5, syear - 0.5]) fig.colorbar(res) ax.grid(True) ax.set_ylim(maxyear + 0.5, minyear - 0.5) return fig
"""Download and process the scan dataset""" from __future__ import print_function import datetime import sys import pytz import requests import urllib3 from pyiem.datatypes import temperature from pyiem.observation import Observation from pyiem.network import Table as NetworkTable from pyiem.util import get_dbconn # Stop the SSL cert warning :/ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) nt = NetworkTable("SCAN") SCAN = get_dbconn('scan') scursor = SCAN.cursor() ACCESS = get_dbconn('iem') icursor = ACCESS.cursor() mapping = { 'Site Id': { 'iemvar': 'station', 'multiplier': 1 }, 'Date': { 'iemvar': '', 'multiplier': 1 }, 'Time (CST)': {
# Generate a map of today's record high and low temperature from pyiem.plot import MapPlot import datetime now = datetime.datetime.now() from pyiem.network import Table as NetworkTable nt = NetworkTable('IACLIMATE') nt.sts["IA0200"]["lon"] = -93.6 nt.sts["IA5992"]["lat"] = 41.65 import psycopg2.extras coop = psycopg2.connect(database='coop', host='iemdb', user='******') # Compute normal from the climate database sql = """SELECT station, max_high, min_low from climate WHERE valid = '2000-%s' and substr(station,0,3) = 'IA'""" % (now.strftime("%m-%d"),) obs = [] c = coop.cursor(cursor_factory=psycopg2.extras.DictCursor) c.execute(sql) for row in c: sid = row['station'] if sid[2] == 'C' or sid[2:] == '0000' or sid not in nt.sts: continue obs.append(dict(id=sid[2:], lat=nt.sts[sid]['lat'], lon=nt.sts[sid]['lon'], tmpf=row['max_high'], dwpf=row['min_low'])) m = MapPlot(title="Record High + Low Temperature [F] (1893-%s)" % (now.year,), subtitle="For Date: %s" % (now.strftime("%d %b"),), axisbg='white')
from pyiem.datatypes import distance import pyproj import datetime from PIL import Image import numpy as np import psycopg2 import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt from pyiem.plot import MapPlot, nwsprecip from pandas.io.sql import read_sql from pyiem.network import Table as NetworkTable p26915 = pyproj.Proj(init="EPSG:26915") central = pytz.timezone("America/Chicago") nt = NetworkTable("IA_DCP") pgconn = psycopg2.connect(database='hads', host='iemdb-hads', user='******') df = read_sql(""" SELECT distinct station, valid, key, value from raw2016_08 where station in ('SPLI4', 'EDRI4', 'CMTI4', 'TENI4', 'EKDI4', 'GRBI4') and valid >= '2016-08-23 23:00+00' and valid < '2016-08-24 23:00+00' and substr(key, 1, 2) = 'HG' ORDER by valid ASC """, pgconn, index_col=None) now = df['valid'].min() maxval = df['valid'].max() xticks = [] xticklabels = []
def plotter(fdict): """ Go """ pgconn = get_dbconn('coop') station = fdict.get('station', 'IA0200') varname = fdict.get('var', 'cdd') table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) df = read_sql(""" SELECT year, month, sum(precip) as sum_precip, avg(high) as avg_high, avg(low) as avg_low, sum(cdd(high,low,60)) as cdd60, sum(cdd(high,low,65)) as cdd65, sum(hdd(high,low,60)) as hdd60, sum(hdd(high,low,65)) as hdd65, sum(case when precip >= 0.01 then 1 else 0 end) as rain_days, sum(case when snow >= 0.1 then 1 else 0 end) as snow_days from """ + table + """ WHERE station = %s GROUP by year, month """, pgconn, params=(station, ), index_col=None) df['monthdate'] = df[['year', 'month' ]].apply(lambda x: datetime.date(x[0], x[1], 1), axis=1) df.set_index('monthdate', inplace=True) res = """\ # IEM Climodat https://mesonet.agron.iastate.edu/climodat/ # Report Generated: %s # Climate Record: %s -> %s # Site Information: [%s] %s # Contact Information: Daryl Herzmann [email protected] 515.294.5978 """ % (datetime.date.today().strftime("%d %b %Y"), nt.sts[station]['archive_begin'].date(), datetime.date.today(), station, nt.sts[station]['name']) res += """# THESE ARE THE MONTHLY %s (base=65) FOR STATION %s YEAR JAN FEB MAR APR MAY JUN JUL AUG SEP \ OCT NOV DEC """ % (PDICT[varname].upper(), station) second = """# THESE ARE THE MONTHLY %s (base=60) FOR STATION %s YEAR JAN FEB MAR APR MAY JUN JUL AUG SEP \ OCT NOV DEC """ % (PDICT[varname].upper(), station) minyear = df['year'].min() maxyear = df['year'].max() for yr in range(minyear, maxyear + 1): res += ("%4i" % (yr, )) second += "%4i" % (yr, ) for mo in range(1, 13): ts = datetime.date(yr, mo, 1) if ts not in df.index: res += ("%7s" % ("M", )) second += "%7s" % ("M", ) continue row = df.loc[ts] res += ("%7.0f" % (row[varname + "65"], )) second += "%7.0f" % (row[varname + "60"], ) res += ("\n") second += "\n" res += ("MEAN") second += "MEAN" for mo in range(1, 13): df2 = df[df['month'] == mo] res += ("%7.0f" % (df2[varname + "65"].mean(), )) second += "%7.0f" % (df2[varname + "60"].mean(), ) res += ("\n") second += "\n" res += second return None, df, res
def plotter(fdict): """ Go """ pgconn = get_dbconn('asos') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] df = read_sql(""" SELECT drct::int as t, dwpf, tmpf, coalesce(mslp, alti * 33.8639, 1013.25) as slp from alldata where station = %s and drct is not null and dwpf is not null and dwpf <= tmpf and sknt > 3 and drct::int %% 10 = 0 and extract(month from valid) in %s and report_type = 2 """, pgconn, params=(station, tuple(months))) # Convert sea level pressure to station pressure df['pressure'] = mcalc.add_height_to_pressure( df['slp'].values * units('millibars'), nt.sts[station]['elevation'] * units('m')).to(units('millibar')) # compute RH df['relh'] = mcalc.relative_humidity_from_dewpoint( df['tmpf'].values * units('degF'), df['dwpf'].values * units('degF')) # compute mixing ratio df['mixingratio'] = mcalc.mixing_ratio_from_relative_humidity( df['relh'].values, df['tmpf'].values * units('degF'), df['pressure'].values * units('millibars')) # compute pressure df['vapor_pressure'] = mcalc.vapor_pressure( df['pressure'].values * units('millibars'), df['mixingratio'].values * units('kg/kg')).to(units('kPa')) means = df.groupby('t').mean().copy() # compute dewpoint now means['dwpf'] = mcalc.dewpoint(means['vapor_pressure'].values * units('kPa')).to(units('degF')).m (fig, ax) = plt.subplots(1, 1) ax.bar(means.index.values, means['dwpf'].values, ec='green', fc='green', width=10, align='center') ax.grid(True, zorder=11) ax.set_title(("%s [%s]\nAverage Dew Point by Wind Direction (month=%s) " "(%s-%s)\n" "(must have 3+ hourly obs > 3 knots at given direction)") % (nt.sts[station]['name'], station, month.upper(), max([1973, nt.sts[station]['archive_begin'].year ]), datetime.datetime.now().year), size=10) ax.set_ylabel("Dew Point [F]") ax.set_ylim(means['dwpf'].min() - 5, means['dwpf'].max() + 5) ax.set_xlim(-5, 365) ax.set_xticks([0, 45, 90, 135, 180, 225, 270, 315, 360]) ax.set_xticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) ax.set_xlabel("Wind Direction") return fig, means['dwpf']
def plotter(fdict): """ Go """ pgconn = util.get_dbconn("isuag") nt = NetworkTable("ISUSM", only_online=False) oldnt = NetworkTable("ISUAG", only_online=False) ctx = util.get_autoplot_context(fdict, get_description()) station = ctx["station"] highlightyear = ctx["year"] varname = ctx["var"] oldstation = XREF.get(station, "A130209") df = read_sql( """ WITH legacy as ( SELECT valid, c30 as tsoil, 'L' as dtype from daily where station = %s and c30 > 0 ORDER by valid ASC ), present as ( SELECT valid, tsoil_c_avg_qc * 9./5. + 32. as tsoil, 'C' as dtype, vwc_12_avg_qc as vwc12, vwc_24_avg_qc as vwc24, vwc_50_avg_qc as vwc50 from sm_daily where station = %s and tsoil_c_avg_qc is not null ORDER by valid ASC ) SELECT valid, tsoil, dtype, null as vwc12, null as vwc24, null as vwc50 from legacy UNION ALL select * from present """, pgconn, params=(oldstation, station), index_col=None, ) df["valid"] = pd.to_datetime(df["valid"]) df["doy"] = pd.to_numeric(df["valid"].dt.strftime("%j")) df["year"] = df["valid"].dt.year (fig, ax) = plt.subplots() for dtype in ["L", "C"]: for year, df2 in df[df["dtype"] == dtype].groupby("year"): if year in [1997, 1988]: continue ax.plot( df2["doy"].values, df2[varname].values, color="skyblue", zorder=2, ) if year == highlightyear: ax.plot( df2["doy"].values, df2[varname].values, color="red", zorder=5, label=str(year), lw=2.0, ) gdf = df.groupby("doy").mean() ax.plot(gdf.index.values, gdf[varname].values, color="k", label="Average") ax.set_title(("ISU AgClimate [%s] %s [%s-]\n" "Site %s Yearly Timeseries") % ( station, nt.sts[station]["name"], df["valid"].min().year, VARS[varname], )) ax.grid(True) if varname == "tsoil": ax.set_ylabel("Daily Avg Temp $^{\circ}\mathrm{F}$") ax.set_xlabel(("* pre-2014 data provided by [%s] %s") % (oldstation, oldnt.sts[oldstation]["name"])) else: ax.set_ylabel("Daily Avg Volumetric Water Content [kg/kg]") ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.set_xlim(0, 367) if varname == "tsoil": ax.set_ylim(gdf["tsoil"].min() - 15, gdf["tsoil"].max() + 15) else: ax.set_ylim(0, 1) ax.axhline(32, lw=2, color="purple", zorder=4) # ax.set_yticks(range(-10, 90, 20)) ax.legend(loc="best") return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') pcursor = pgconn.cursor() ctx = get_autoplot_context(fdict, get_description()) syear = ctx['syear'] eyear = ctx['eyear'] + 1 station = ctx['station'][:4] sts = datetime.date(syear, 1, 1) ets = datetime.date(eyear, 1, 1) nt = NetworkTable('WFO') wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else station[1:], ) if station == '_ALL': wfo_limiter = '' pcursor.execute( """ select phenomena, significance, min(issue), count(*) from warnings where ugc is not null and issue > %s and issue < %s """ + wfo_limiter + """ GROUP by phenomena, significance ORDER by count DESC """, (sts, ets)) labels = [] vals = [] cnt = 1 rows = [] for row in pcursor: label = ("%s. %s (%s.%s)") % (cnt, vtec.get_ps_string( row[0], row[1]), row[0], row[1]) if cnt < 26: labels.append(label) vals.append(row[3]) rows.append( dict(phenomena=row[0], significance=row[1], count=row[3], wfo=station)) cnt += 1 df = pd.DataFrame(rows) (fig, ax) = plt.subplots(1, 1, figsize=(7, 10)) vals = np.array(vals) ax.barh(np.arange(len(vals)), vals / float(vals[0]) * 100.0, align='center') for i in range(1, len(vals)): y = vals[i] / float(vals[0]) * 100.0 ax.text(y + 1, i, '%.1f%%' % (y, ), va='center') fig.text(0.5, 0.95, "%s-%s NWS %s Watch/Warning/Advisory Totals" % (syear, eyear - 1 if (eyear - 1 != syear) else '', "ALL WFOs" if station == '_ALL' else nt.sts[station]['name']), ha='center') fig.text(0.5, 0.05, "Event+County/Zone Count, Relative to #%s" % (labels[0], ), ha='center', fontsize=10) ax.set_ylim(len(vals), -0.5) ax.grid(True) ax.set_yticklabels(labels) ax.set_yticks(np.arange(len(vals))) ax.set_position([0.5, 0.1, 0.45, 0.83]) ax.set_xticks([0, 10, 25, 50, 75, 90, 100]) return fig, df
def do_apsim(ctx): """ [weather.met.weather] latitude = 42.1 (DECIMAL DEGREES) tav = 9.325084 (oC) ! annual average ambient temperature amp = 29.57153 (oC) ! annual amplitude in mean monthly temperature year day radn maxt mint rain () () (MJ/m^2) (oC) (oC) (mm) 1986 1 7.38585 0.8938889 -7.295556 0 """ if len(ctx['stations']) > 1: ssw(("ERROR: APSIM output is only " "permitted for one station at a time.")) return dbconn = get_database() cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor) station = ctx['stations'][0] table = get_tablename(ctx['stations']) network = "%sCLIMATE" % (station[:2],) nt = NetworkTable(network) thisyear = datetime.datetime.now().year extra = {} if ctx['scenario'] == 'yes': sts = datetime.datetime(int(ctx['scenario_year']), 1, 1) ets = datetime.datetime(int(ctx['scenario_year']), 12, 31) cursor.execute(""" SELECT day, high, low, precip, 1 as doy, coalesce(narr_srad, merra_srad, hrrr_srad) as srad from """ + table + """ WHERE station = %s and day >= %s and day <= %s """, (ctx['stations'][0], sts, ets)) for row in cursor: ts = row[0].replace(year=thisyear) extra[ts] = row extra[ts]['doy'] = int(ts.strftime("%j")) febtest = datetime.date(thisyear, 3, 1) - datetime.timedelta(days=1) if febtest not in extra: feb28 = datetime.date(thisyear, 2, 28) extra[febtest] = extra[feb28] ssw("! Iowa Environmental Mesonet -- NWS Cooperative Data\n") ssw("! Created: %s UTC\n" % ( datetime.datetime.utcnow().strftime("%d %b %Y %H:%M:%S"),)) ssw("! Contact: daryl herzmann [email protected] 515-294-5978\n") ssw("! Station: %s %s\n" % (station, nt.sts[station]['name'])) ssw("! Data Period: %s - %s\n" % (ctx['sts'], ctx['ets'])) if ctx['scenario'] == 'yes': ssw("! !SCENARIO DATA! inserted after: %s replicating year: %s\n" % ( ctx['ets'], ctx['scenario_year'])) ssw("[weather.met.weather]\n") ssw("latitude = %.1f (DECIMAL DEGREES)\n" % ( nt.sts[ station ]["lat"]) ) # Compute average temperature! cursor.execute(""" SELECT avg((high+low)/2) as avgt from climate51 WHERE station = %s """, (station,)) row = cursor.fetchone() ssw("tav = %.3f (oC) ! annual average ambient temperature\n" % ( temperature(row['avgt'], 'F').value('C'),)) # Compute the annual amplitude in temperature cursor.execute(""" select max(avg) as h, min(avg) as l from (SELECT extract(month from valid) as month, avg((high+low)/2.) from climate51 WHERE station = %s GROUP by month) as foo """, (station,) ) row = cursor.fetchone() ssw("amp = %.3f (oC) ! annual amplitude in mean monthly temperature\n" % ( (temperature(row['h'], 'F').value('C') - temperature(row['l'], 'F').value('C')), )) ssw("""year day radn maxt mint rain () () (MJ/m^2) (oC) (oC) (mm)\n""") if ctx.get('hayhoe_model') is not None: cursor.execute(""" SELECT day, high, low, precip, extract(doy from day) as doy, 0 as srad from hayhoe_daily WHERE station = %s and day >= %s and scenario = %s and model = %s ORDER by day ASC """, (ctx['stations'][0], ctx['sts'], ctx['hayhoe_scenario'], ctx['hayhoe_model'])) else: cursor.execute(""" SELECT day, high, low, precip, extract(doy from day) as doy, coalesce(narr_srad, merra_srad, hrrr_srad) as srad from """ + table + """ WHERE station = %s and day >= %s and day <= %s ORDER by day ASC """, (station, ctx['sts'], ctx['ets'])) for row in cursor: srad = -99 if row['srad'] is None else row['srad'] ssw("%4s %10.0f %10.3f %10.1f %10.1f %10.2f\n" % ( row["day"].year, int(row["doy"]), srad, temperature(row["high"], 'F').value('C'), temperature(row["low"], 'F').value('C'), row["precip"] * 25.4 )) if len(extra) > 0: dec31 = datetime.date(thisyear, 12, 31) now = row['day'] while now <= dec31: row = extra[now] srad = -99 if row['srad'] is None else row['srad'] ssw("%4s %10.0f %10.3f %10.1f %10.1f %10.2f\n" % ( now.year, int(row['doy']), srad, temperature(row["high"], 'F').value('C'), temperature(row["low"], 'F').value('C'), row["precip"] * 25.4 ) ) now += datetime.timedelta(days=1)
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor() station = fdict.get('station', 'IA0200') table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) s = nt.sts[station]['archive_begin'] e = datetime.date.today() YEARS = e.year - s.year + 1 res = """\ # IEM Climodat http://mesonet.agron.iastate.edu/climodat/ # Report Generated: %s # Climate Record: %s -> %s # Site Information: [%s] %s # Contact Information: Daryl Herzmann [email protected] 515.294.5978 # SEASONAL TEMPERATURE CYCLES PER YEAR # 1 CYCLE IS A TEMPERATURE VARIATION FROM A VALUE BELOW A THRESHOLD # TO A VALUE EXCEEDING A THRESHOLD. THINK OF IT AS FREEZE/THAW CYCLES # FIRST DATA COLUMN WOULD BE FOR CYCLES EXCEEDING 26 AND 38 DEGREES F THRES 26-38 26-38 24-40 24-40 20-44 20-44 14-50 14-50 YEAR SPRING FALL SPRING FALL SPRING FALL SPRING FALL """ % (datetime.date.today().strftime("%d %b %Y"), nt.sts[station]['archive_begin'].date(), datetime.date.today(), station, nt.sts[station]['name']) data = {} for yr in range(s.year, e.year + 1): data[yr] = {'26s': 0, '26f': 0, '24s': 0, '24f': 0, '20s': 0, '20f': 0, '14s': 0, '14f': 0} prs = [[26, 38], [24, 40], [20, 44], [14, 50]] cycPos = {'26s': -1, '24s': -1, '20s': -1, '14s': -1} cursor.execute("""SELECT day, high, low from """+table+""" WHERE station = %s and high is not null and low is not null ORDER by day ASC""", (station, )) for row in cursor: ts = row[0] high = int(row[1]) low = int(row[2]) for pr in prs: l, u = pr key = '%ss' % (l, ) ckey = '%ss' % (l, ) if ts.month >= 7: ckey = '%sf' % (l, ) # cycles lower if cycPos[key] == 1 and low < l: # print 'Cycled lower', low, ts cycPos[key] = -1 data[ts.year][ckey] += 0.5 # cycled higher if cycPos[key] == -1 and high > u: # print 'Cycled higher', high, ts cycPos[key] = 1 data[ts.year][ckey] += 0.5 s26 = 0 f26 = 0 s24 = 0 f24 = 0 s20 = 0 f20 = 0 s14 = 0 f14 = 0 for yr in range(s.year, e.year + 1): s26 += data[yr]['26s'] f26 += data[yr]['26f'] s24 += data[yr]['24s'] f24 += data[yr]['24f'] s20 += data[yr]['20s'] f20 += data[yr]['20f'] s14 += data[yr]['14s'] f14 += data[yr]['14f'] res += ("%s %-8i%-8i%-8i%-8i%-8i%-8i%-8i%-8i\n" "") % (yr, data[yr]['26s'], data[yr]['26f'], data[yr]['24s'], data[yr]['24f'], data[yr]['20s'], data[yr]['20f'], data[yr]['14s'], data[yr]['14f']) res += ("AVG %-8.1f%-8.1f%-8.1f%-8.1f%-8.1f%-8.1f%-8.1f%-8.1f\n" "") % (s26/YEARS, f26/YEARS, s24/YEARS, f24/YEARS, s20/YEARS, f20/YEARS, s14/YEARS, f14/YEARS) return None, None, res
def do_century( ctx ): """ Materialize the data in Century Format * Century format (precip cm, avg high C, avg low C) prec 1980 2.60 6.40 0.90 1.00 0.70 0.00 tmin 1980 14.66 12.10 7.33 -0.89 -5.45 -7.29 tmax 1980 33.24 30.50 27.00 18.37 11.35 9.90 prec 1981 12.00 7.20 0.60 4.90 1.10 0.30 tmin 1981 14.32 12.48 8.17 0.92 -3.25 -8.90 tmax 1981 30.84 28.71 27.02 16.84 12.88 6.82 """ if len(ctx['stations']) > 1: ssw(("ERROR: Century output is only " +"permitted for one station at a time.")) return station = ctx['stations'][0] network = "%sCLIMATE" % (station[:2],) nt = NetworkTable(network) dbconn = get_database() cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor) # Automatically set dates to start and end of year to make output clean sts = datetime.date(ctx['sts'].year, 1, 1) ets = datetime.date(ctx['ets'].year, 12, 31) if ets >= datetime.date.today(): ets = datetime.date.today() - datetime.timedelta(days=1) table = get_tablename(ctx['stations']) thisyear = datetime.datetime.now().year cursor.execute(""" WITH scenario as ( SELECT """+str(thisyear)+"""::int as year, month, high, low, precip from """+table+""" WHERE station = %s and day > %s and day <= %s and sday != '0229' ), obs as ( select year, month, high, low, precip from """+table+""" WHERE station = %s and day >= %s and day <= %s ), data as ( SELECT * from obs UNION select * from scenario ) SELECT year, month, avg(high) as tmax, avg(low) as tmin, sum(precip) as prec from data GROUP by year, month """, (station, ctx['scenario_sts'], ctx['scenario_ets'], station, sts, ets)) data = {} for row in cursor: if not data.has_key(row['year']): data[ row['year'] ] = {} for mo in range(1, 13): data[ row['year'] ][mo] = {'prec': -99, 'tmin': -99, 'tmax': -99} data[ row['year'] ][ row['month'] ] = { 'prec' : row['prec'] * 24.5, 'tmin' : temperature(float(row['tmin']), 'F').value('C'), 'tmax' : temperature(float(row['tmax']), 'F').value('C'), } ssw("# Iowa Environmental Mesonet -- NWS Cooperative Data\n") ssw("# Created: %s UTC\n" % ( datetime.datetime.utcnow().strftime("%d %b %Y %H:%M:%S"),)) ssw("# Contact: daryl herzmann [email protected] 515-294-5978\n") ssw("# Station: %s %s\n" % (station, nt.sts[station]['name'])) ssw("# Data Period: %s - %s\n" % (sts, ets)) if ctx['scenario'] == 'yes': ssw("# !SCENARIO DATA! inserted after: %s replicating year: %s\n" % ( ctx['ets'], ctx['scenario_year'])) idxs = ["prec", "tmin", "tmax"] for year in range(sts.year, ets.year+1): for idx in idxs: ssw(("%s %s%7.2f%7.2f%7.2f%7.2f%7.2f%7.2f%7.2f" +"%7.2f%7.2f%7.2f%7.2f%7.2f\n") % (idx, year, data[year][1][idx], data[year][2][idx], data[year][3][idx], data[year][4][idx], data[year][5][idx], data[year][6][idx], data[year][7][idx], data[year][8][idx], data[year][9][idx], data[year][10][idx], data[year][11][idx], data[year][12][idx] ))
def plotter(fdict): """ Go """ font0 = FontProperties() font0.set_family('monospace') font0.set_size(16) pgconn = get_dbconn('iem') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] nt = NetworkTable(network) df = read_sql(""" SELECT day as date, max_tmpf as max, min_tmpf as min, max_tmpf::int - min_tmpf::int as difference from summary s JOIN stations t on (s.iemid = t.iemid) where t.id = %s and t.network = %s and extract(month from day) in %s and max_tmpf is not null and min_tmpf is not null ORDER by difference DESC, date DESC LIMIT 10 """, pgconn, params=(station, network, tuple(months)), parse_dates=('date', ), index_col=None) df['rank'] = df['difference'].rank(ascending=False, method='min') fig = plt.figure(figsize=(5.5, 4)) fig.text(0.5, 0.9, ("%s [%s] %s-%s\n" "Top 10 Local Calendar Day [%s] " "Temperature Differences") % (nt.sts[station]['name'], station, nt.sts[station]['archive_begin'].year, datetime.date.today().year, month.capitalize()), ha='center') fig.text(0.1, 0.81, " # Date Diff Low High", fontproperties=font0) y = 0.74 for _, row in df.iterrows(): fig.text(0.1, y, ("%2.0f %11s %3.0f %3.0f %3.0f") % (row['rank'], row['date'].strftime("%d %b %Y"), row['difference'], row['min'], row['max']), fontproperties=font0) y -= 0.07 return fig, df
database = 'rwis' elif form["network"].value in ('ISUSM', ): database = 'isuag' elif form["network"].value in ('RAOB', ): database = 'postgis' try: nsector = int(form['nsector'].value) except: nsector = 36 rmax = None if "staticrange" in form and form["staticrange"].value == "1": rmax = 100 nt = NetworkTable(form['network'].value) res = windrose(form["station"].value, database=database, sts=sts, ets=ets, months=months, hours=hours, units=units, nsector=nsector, justdata=("justdata" in form), rmax=rmax, sname=nt.sts[form['station'].value]['name'], level=form.getfirst('level', None)) if 'justdata' in form: # We want text sys.stdout.write("Content-type: text/plain\n\n")
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) station = fdict.get('station', 'IA2203') month = int(fdict.get('month', 12)) year = int(fdict.get('year', 2014)) table = "alldata_%s" % (station[:2], ) nt = NetworkTable("%sCLIMATE" % (station[:2], )) # beat month cursor.execute( """ SELECT year, sum(precip), sum(snow) from """ + table + """ WHERE station = %s and month = %s and precip >= 0 and snow >= 0 GROUP by year ORDER by year ASC """, (station, month)) precip = [] snow = [] years = [] for row in cursor: years.append(row[0]) precip.append(float(row[1])) snow.append(float(row[2])) df = pd.DataFrame( dict(year=pd.Series(years), precip=pd.Series(precip), snow=pd.Series(snow))) precip = np.array(precip) snow = np.array(snow) (fig, ax) = plt.subplots(1, 1) ax.scatter(precip, snow, s=40, marker='s', color='b', zorder=2) if year in years: ax.scatter(precip[years.index(year)], snow[years.index(year)], s=60, marker='o', color='r', zorder=3, label=str(year)) ax.set_title( ("[%s] %s\n%s Snowfall vs Precipitation Totals") % (station, nt.sts[station]['name'], calendar.month_name[month])) ax.grid(True) ax.axhline(np.average(snow), lw=2, color='black') ax.axvline(np.average(precip), lw=2, color='black') ax.set_xlim(left=-0.1) ax.set_ylim(bottom=-0.1) ylim = ax.get_ylim() ax.text(np.average(precip), ylim[1], "%.2f" % (np.average(precip), ), va='top', ha='center', color='white', bbox=dict(color='black')) xlim = ax.get_xlim() ax.text(xlim[1], np.average(snow), "%.1f" % (np.average(snow), ), va='center', ha='right', color='white', bbox=dict(color='black')) ax.set_ylabel("Snowfall Total [inch]") ax.set_xlabel("Precipitation Total (liquid + melted) [inch]") ax.legend(loc=2, scatterpoints=1) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] limit = ctx['limit'] combo = ctx['c'] phenomena = ctx['phenomena'][:2] significance = ctx['significance'][:2] opt = ctx['opt'] state = ctx['state'][:2] nt = NetworkTable('WFO') nt.sts['_ALL'] = {'name': 'All Offices'} lastdoy = 367 if limit.lower() == 'yes': lastdoy = int(datetime.datetime.today().strftime("%j")) + 1 wfolimiter = " and wfo = '%s' " % (station, ) if opt == 'state': wfolimiter = " and substr(ugc, 1, 2) = '%s' " % (state, ) if opt == 'wfo' and station == '_ALL': wfolimiter = '' eventlimiter = "" if combo == 'svrtor': eventlimiter = " or (phenomena = 'SV' and significance = 'W') " phenomena = 'TO' significance = 'W' cursor.execute(""" WITH data as ( SELECT extract(year from issue) as yr, issue, phenomena, significance, eventid, wfo from warnings WHERE ((phenomena = %s and significance = %s) """ + eventlimiter + """) and extract(doy from issue) <= %s """ + wfolimiter + """), agg1 as ( SELECT yr, min(issue) as min_issue, eventid, wfo, phenomena, significance from data GROUP by yr, eventid, wfo, phenomena, significance), agg2 as ( SELECT yr, extract(doy from min_issue) as doy, count(*) from agg1 GROUP by yr, doy) SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC) from agg2 ORDER by yr ASC, doy ASC """, (phenomena, significance, lastdoy)) data = {} for yr in range(1986, datetime.datetime.now().year + 1): data[yr] = {'doy': [0], 'counts': [0]} rows = [] for row in cursor: data[row[0]]['doy'].append(row[1]) data[row[0]]['counts'].append(row[2]) rows.append(dict(year=row[0], day_of_year=row[1], count=row[2])) # append on a lastdoy value so all the plots go to the end for yr in range(1986, datetime.datetime.now().year): if len(data[yr]['doy']) == 1 or data[yr]['doy'][-1] >= lastdoy: continue data[yr]['doy'].append(lastdoy) data[yr]['counts'].append(data[yr]['counts'][-1]) if data[datetime.datetime.now().year]['doy']: data[datetime.datetime.now().year]['doy'].append( int(datetime.datetime.today().strftime("%j")) + 1) data[datetime.datetime.now().year]['counts'].append( data[datetime.datetime.now().year]['counts'][-1]) df = pd.DataFrame(rows) (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) ann = [] for yr in range(1986, datetime.datetime.now().year + 1): if len(data[yr]['doy']) < 2: continue lp = ax.plot(data[yr]['doy'], data[yr]['counts'], lw=2, label="%s (%s)" % (str(yr), data[yr]['counts'][-1]), drawstyle='steps-post') ann.append( ax.text(data[yr]['doy'][-1]+1, data[yr]['counts'][-1], "%s" % (yr,), color='w', va='center', fontsize=10, bbox=dict(facecolor=lp[0].get_color(), edgecolor=lp[0].get_color())) ) mask = np.zeros(fig.canvas.get_width_height(), bool) fig.canvas.draw() attempts = 10 while ann and attempts > 0: attempts -= 1 removals = [] for a in ann: bbox = a.get_window_extent() x0 = int(bbox.x0) x1 = int(math.ceil(bbox.x1)) y0 = int(bbox.y0) y1 = int(math.ceil(bbox.y1)) s = np.s_[x0:x1+1, y0:y1+1] if np.any(mask[s]): a.set_position([a._x-int(lastdoy/14), a._y]) else: mask[s] = True removals.append(a) for rm in removals: ann.remove(rm) ax.legend(loc=2, ncol=2, fontsize=10) ax.set_xlim(1, 367) ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365)) ax.set_xticklabels(calendar.month_abbr[1:]) ax.grid(True) ax.set_ylabel("Accumulated Count") ax.set_ylim(bottom=0) title = vtec.get_ps_string(phenomena, significance) if combo == 'svrtor': title = "Severe Thunderstorm + Tornado Warning" ptitle = "%s" % (nt.sts[station]['name'],) if opt == 'state': ptitle = ("NWS Issued for Counties/Parishes in %s" ) % (reference.state_names[state],) ax.set_title(("%s\n %s Count" ) % (ptitle, title)) ax.set_xlim(0, lastdoy) if lastdoy < 367: ax.set_xlabel(("thru approximately %s" ) % (datetime.date.today().strftime("%-d %B"), )) return fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.dates as mdates station = fdict.get('station', 'IA0200') network = fdict.get('network', 'IACLIMATE') nt = NetworkTable(network) p1 = int(fdict.get('p1', 31)) p2 = int(fdict.get('p2', 91)) p3 = int(fdict.get('p3', 365)) pvar = fdict.get('pvar', 'precip') sts = datetime.datetime.strptime(fdict.get('sdate', '2015-12-25'), '%Y-%m-%d') ets = datetime.datetime.strptime(fdict.get('edate', '2015-12-25'), '%Y-%m-%d') bts = sts - datetime.timedelta(days=max([p1, p2, p3])) pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') table = "alldata_%s" % (station[:2], ) df = read_sql(""" WITH obs as ( SELECT day, high - avg(high) OVER (PARTITION by sday) as high_diff, low - avg(low) OVER (PARTITION by sday) as low_diff, ((high+low)/2.) - avg((high+low)/2.) OVER (PARTITION by sday) as avgt_diff, precip - avg(precip) OVER (PARTITION by sday) as precip_diff from """ + table + """ WHERE station = %s ORDER by day ASC), lags as ( SELECT day, avg(high_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_high_diff, avg(high_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_high_diff, avg(high_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_high_diff, avg(low_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_low_diff, avg(low_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_low_diff, avg(low_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_low_diff, avg(avgt_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_avgt_diff, avg(avgt_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_avgt_diff, avg(avgt_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_avgt_diff, sum(precip_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p1_precip_diff, sum(precip_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p2_precip_diff, sum(precip_diff) OVER (ORDER by day ASC ROWS %s PRECEDING) as p3_precip_diff from obs WHERE day >= %s and day <= %s) SELECT * from lags where day >= %s and day <= %s ORDER by day ASC """, pgconn, params=(station, p1, p2, p3, p1, p2, p3, p1, p2, p3, p1, p2, p3, bts, ets, sts, ets), index_col='day') (fig, ax) = plt.subplots(1, 1) ax.plot(df.index.values, df['p1_' + pvar + '_diff'], lw=2, label='%s Day' % (p1, )) ax.plot(df.index.values, df['p2_' + pvar + '_diff'], lw=2, label='%s Day' % (p2, )) ax.plot(df.index.values, df['p3_' + pvar + '_diff'], lw=2, label='%s Day' % (p3, )) ax.set_title(("[%s] %s\nTrailing %s, %s, %s Day Departures") % (station, nt.sts[station]['name'], p1, p2, p3)) ax.xaxis.set_major_formatter(mdates.DateFormatter('%b\n%Y')) ax.set_ylabel(PDICT.get(pvar)) ax.grid(True) ax.legend(ncol=3, fontsize=12, loc='best') ax.text(1, -0.12, "%s to %s" % (sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y")), va='bottom', ha='right', fontsize=12, transform=ax.transAxes) return fig, df
"""Create a hybrid maize dump file""" import psycopg2 import datetime import subprocess import dropbox from pyiem.network import Table as NetworkTable from pyiem.datatypes import speed from pyiem.util import get_properties nt = NetworkTable("ISUSM") SITES = ['ames', 'nashua', 'sutherland', 'crawfordsville', 'lewis'] XREF = ['BOOI4', 'NASI4', 'CAMI4', 'CRFI4', 'OKLI4'] pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor() ipgconn = psycopg2.connect(database='iem', host='iemdb', user='******') icursor = ipgconn.cursor() props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) today = datetime.date.today() for i, site in enumerate(SITES): # Need to figure out this year's data thisyear = {} # get values from latest yieldfx dump for line in open('/mesonet/share/pickup/yieldfx/%s.met' % (site, )): line = line.strip() if not line.startswith('2016'): continue tokens = line.split() valid = (datetime.date(int(tokens[0]), 1, 1) + datetime.timedelta(days=int(tokens[1]) - 1)) if valid >= today: