def plotter(fdict): """ Go """ import seaborn as sns ctx = get_autoplot_context(fdict, get_description()) phenomena = ctx["p"] date = ctx.get("date") wfo = ctx["wfo"] pgconn = get_dbconn("postgis") ps = [phenomena] if phenomena == "_A": ps = ["TO", "SV"] df = read_sql( """ SELECT issue at time zone 'UTC' as issue, tml_direction, tml_sknt from sbw WHERE phenomena in %s and wfo = %s and status = 'NEW' and tml_direction is not null and tml_sknt is not null ORDER by issue """, pgconn, params=(tuple(ps), wfo), ) if df.empty: raise NoDataFound("No Data Found.") g = sns.jointplot( df["tml_direction"], speed(df["tml_sknt"], "KT").value("MPH"), s=40, stat_func=None, zorder=1, color="tan", ).plot_joint(sns.kdeplot, n_levels=6) g.ax_joint.set_xlabel("Storm Motion From Direction") g.ax_joint.set_ylabel("Storm Speed [MPH]") g.ax_joint.set_xticks(range(0, 361, 45)) g.ax_joint.set_xticklabels( ["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"]) if date: df2 = df[df["issue"].dt.date == date] g.ax_joint.scatter( df2["tml_direction"], speed(df2["tml_sknt"], "KT").value("MPH"), color="r", s=50, label=date.strftime("%b %-d, %Y"), zorder=2, ) g.ax_joint.legend() g.ax_joint.grid() g.ax_marg_x.set_title(("NWS %s\n%s Storm Motion\n" "%s warnings ploted between %s and %s") % ( ctx["_nt"].sts[wfo]["name"], PDICT[phenomena], len(df.index), df["issue"].min().date().strftime("%b %-d, %Y"), df["issue"].max().date().strftime("%b %-d, %Y"), )) g.fig.subplots_adjust(top=0.9) return g.fig, df
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt ASOS = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = ASOS.cursor(cursor_factory=psycopg2.extras.DictCursor) station = fdict.get('station', 'AMW') units = fdict.get('units', 'mph') network = fdict.get('network', 'IA_ASOS') nt = NetworkTable(network) cursor.execute(""" SELECT extract(doy from valid), sknt * 0.514, drct from alldata where station = %s and sknt >= 0 and drct >= 0 """, (station, )) uwnd = np.zeros((366,), 'f') vwnd = np.zeros((366,), 'f') cnt = np.zeros((366,), 'f') for row in cursor: u, v = uv(row[1], row[2]) uwnd[int(row[0]) - 1] += u vwnd[int(row[0]) - 1] += v cnt[int(row[0]) - 1] += 1 u = speed(uwnd / cnt, 'MPS').value(units.upper()) v = speed(vwnd / cnt, 'mps').value(units.upper()) df = pd.DataFrame(dict(u=pd.Series(u), v=pd.Series(v), day_of_year=pd.Series(np.arange(1, 366)))) (fig, ax) = plt.subplots(1, 1) ax.plot(np.arange(1, 366), smooth(u[:-1], 14, 'hamming'), color='r', label='u, West(+) : East(-) component') ax.plot(np.arange(1, 366), smooth(v[:-1], 14, 'hamming'), color='b', label='v, South(+) : North(-) component') ax.set_xticks([1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365]) ax.set_xticklabels(calendar.month_abbr[1:]) ax.legend(ncol=2, fontsize=11, loc=(0., -0.15)) ax.grid(True) ax.set_xlim(0, 366) ax.set_title(("[%s] %s Daily Average Component Wind Speed\n" "[%s-%s] 14 day smooth filter applied, %.0f obs found" "") % (station, nt.sts[station]['name'], nt.sts[station]['archive_begin'].year, datetime.datetime.now().year, np.sum(cnt))) ax.set_ylabel("Average Wind Speed %s" % (PDICT.get(units), )) box = ax.get_position() ax.set_position([box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9]) return fig, df
def main(): """Go Main""" pgconn = get_dbconn('asos') df = read_sql(""" SELECT valid - '1 hour'::interval as valid, drct, sknt, gust_sknt, pres1, tmpf, dwpf from t2018_1minute where station = %s and valid >= '2018-06-14 08:30' and valid <= '2018-06-14 10:15' ORDER by valid ASC """, pgconn, params=('PHP', ), index_col='valid') xticks = [] xticklabels = [] for valid in df.index.values: if pd.to_datetime(valid).minute % 15 == 0: xticks.append(valid) ts = pd.to_datetime(valid) - datetime.timedelta(hours=5) xticklabels.append(ts.strftime("%-H:%M\n%p")) fig = plt.figure(figsize=(8, 9)) ax = fig.add_axes([0.1, 0.55, 0.75, 0.35]) ax.plot(df.index.values, df['tmpf'], label='Air Temp') ax.plot(df.index.values, df['dwpf'], label='Dew Point') ax.legend() ax.grid(True) ax.set_ylabel("Temperature $^\circ$F") ax.set_xticks(xticks) ax.set_xticklabels(xticklabels) ax.set_title(("Philip, SD (KPHP) ASOS 1 Minute Interval Data for 14 Jun 2018\n" "Heat Burst Event, data missing in NCEI files 8:02 to 8:10 AM")) ax = fig.add_axes([0.1, 0.08, 0.75, 0.35]) ax.bar(df.index.values, speed(df['gust_sknt'], 'KT').value('MPH'), width=1/1440., color='red') ax.bar(df.index.values, speed(df['sknt'], 'KT').value('MPH'), width=1/1440., color='tan') ax.set_ylabel("Wind Speed (tan) & Gust (red) [mph]") ax.grid(True, zorder=5) ax.set_ylim(0, 60) ax2 = ax.twinx() ax2.plot(df.index.values, pressure(df['pres1'], 'IN').value('MB'), color='g', lw=2) ax2.set_ylabel("Air Pressure [hPa]", color='green') ax2.set_xticks(xticks) ax2.set_xticklabels(xticklabels) ax.set_xlabel("14 June 2018 MDT") ax2.set_ylim(923, 926) ax2.set_yticks(np.arange(923, 926.1, 0.5)) # ax2.set_zorder(ax.get_zorder()-1) # ax2.set_ylim(0, 360) # ax2.set_yticks(range(0, 361, 45)) # ax2.set_yticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) fig.savefig('test.png')
def hourly_process(nwsli, maxts): """ Process the hourly file """ fn = "%s/%s_HrlySI.dat" % (BASE, STATIONS[nwsli]) df = common_df_logic(fn, maxts, nwsli, "sm_hourly") if df is None: return 0 processed = 0 LOG.debug("processing %s rows from %s", len(df.index), fn) acursor = ACCESS.cursor() for _i, row in df.iterrows(): # Update IEMAccess ob = Observation(nwsli, "ISUSM", row["valid"]) tmpc = temperature(row["tair_c_avg_qc"], "C") if tmpc.value("F") > -50 and tmpc.value("F") < 140: ob.data["tmpf"] = tmpc.value("F") relh = humidity(row["rh_qc"], "%") ob.data["relh"] = relh.value("%") ob.data["dwpf"] = met.dewpoint(tmpc, relh).value("F") ob.data["srad"] = row["slrkw_avg_qc"] ob.data["phour"] = round( distance(row["rain_mm_tot_qc"], "MM").value("IN"), 2 ) ob.data["sknt"] = speed(row["ws_mps_s_wvt_qc"], "MPS").value("KT") if "ws_mph_max" in df.columns: ob.data["gust"] = speed(row["ws_mph_max_qc"], "MPH").value("KT") ob.data["max_gust_ts"] = row["ws_mph_tmx"] ob.data["drct"] = row["winddir_d1_wvt_qc"] if "tsoil_c_avg" in df.columns: ob.data["c1tmpf"] = temperature(row["tsoil_c_avg_qc"], "C").value( "F" ) if "t12_c_avg_qc" in df.columns: ob.data["c2tmpf"] = temperature(row["t12_c_avg_qc"], "C").value( "F" ) if "t24_c_avg_qc" in df.columns: ob.data["c3tmpf"] = temperature(row["t24_c_avg_qc"], "C").value( "F" ) if "t50_c_avg" in df.columns: ob.data["c4tmpf"] = temperature(row["t50_c_avg_qc"], "C").value( "F" ) if "calc_vwc_12_avg" in df.columns: ob.data["c2smv"] = row["calc_vwc_12_avg_qc"] * 100.0 if "calc_vwc_24_avg" in df.columns: ob.data["c3smv"] = row["calc_vwc_24_avg_qc"] * 100.0 if "calc_vwc_50_avg" in df.columns: ob.data["c4smv"] = row["calc_vwc_50_avg_qc"] * 100.0 ob.save(acursor) processed += 1 acursor.close() ACCESS.commit() return processed
def hourly_process(nwsli, maxts): """ Process the hourly file """ fn = "%s/%s_HrlySI.dat" % (BASE, STATIONS[nwsli]) df = common_df_logic(fn, maxts, nwsli, "sm_hourly") if df is None: return 0 processed = 0 LOG.debug("processing %s rows from %s", len(df.index), fn) acursor = ACCESS.cursor() for _i, row in df.iterrows(): # Update IEMAccess # print nwsli, valid ob = Observation(nwsli, 'ISUSM', row['valid']) tmpc = temperature(row['tair_c_avg_qc'], 'C') if tmpc.value('F') > -50 and tmpc.value('F') < 140: ob.data['tmpf'] = tmpc.value('F') relh = humidity(row['rh_qc'], '%') ob.data['relh'] = relh.value('%') ob.data['dwpf'] = met.dewpoint(tmpc, relh).value('F') ob.data['srad'] = row['slrkw_avg_qc'] ob.data['phour'] = round(distance(row['rain_mm_tot_qc'], 'MM').value('IN'), 2) ob.data['sknt'] = speed(row['ws_mps_s_wvt_qc'], 'MPS').value("KT") if 'ws_mph_max' in df.columns: ob.data['gust'] = speed(row['ws_mph_max_qc'], 'MPH').value('KT') ob.data['max_gust_ts'] = row['ws_mph_tmx'] ob.data['drct'] = row['winddir_d1_wvt_qc'] if 'tsoil_c_avg' in df.columns: ob.data['c1tmpf'] = temperature(row['tsoil_c_avg_qc'], 'C').value('F') if 't12_c_avg_qc' in df.columns: ob.data['c2tmpf'] = temperature( row['t12_c_avg_qc'], 'C').value('F') if 't24_c_avg_qc' in df.columns: ob.data['c3tmpf'] = temperature( row['t24_c_avg_qc'], 'C').value('F') if 't50_c_avg' in df.columns: ob.data['c4tmpf'] = temperature(row['t50_c_avg_qc'], 'C').value('F') if 'calc_vwc_12_avg' in df.columns: ob.data['c2smv'] = row['calc_vwc_12_avg_qc'] * 100.0 if 'calc_vwc_24_avg' in df.columns: ob.data['c3smv'] = row['calc_vwc_24_avg_qc'] * 100.0 if 'calc_vwc_50_avg' in df.columns: ob.data['c4smv'] = row['calc_vwc_50_avg_qc'] * 100.0 ob.save(acursor) # print 'soilm_ingest.py station: %s ts: %s hrly updated no data?' % ( # nwsli, valid) processed += 1 acursor.close() ACCESS.commit() return processed
def read_excel(siteid, fn): df = pd.read_excel(fn, skiprows=[1, ]) newcols = {} for k in df.columns: newcols[k] = XREF.get(k, k) df.rename(columns=newcols, inplace=True) df['valid'] = df['valid'] + datetime.timedelta(hours=TZREF[siteid]) # do some conversions print("ALERT: doing windspeed unit conv") df['windspeed_mps'] = speed(df['windspeed_mps'].values, 'KMH').value('MPS') print("ALERT: doing windgustunit conv") df['windgust_mps'] = speed(df['windgust_mps'].values, 'KMH').value('MPS') return df
def plotter(fdict): """ Go """ import seaborn as sns ctx = get_autoplot_context(fdict, get_description()) phenomena = ctx['p'] date = ctx.get('date') wfo = ctx['wfo'] pgconn = get_dbconn('postgis') ps = [phenomena] if phenomena == '_A': ps = ['TO', 'SV'] df = read_sql(""" SELECT issue at time zone 'UTC' as issue, tml_direction, tml_sknt from sbw WHERE phenomena in %s and wfo = %s and status = 'NEW' and tml_direction is not null and tml_sknt is not null ORDER by issue """, pgconn, params=(tuple(ps), wfo)) if df.empty: raise NoDataFound("No Data Found.") g = sns.jointplot(df['tml_direction'], speed(df['tml_sknt'], 'KT').value('MPH'), s=40, stat_func=None, zorder=1, color='tan').plot_joint(sns.kdeplot, n_levels=6) g.ax_joint.set_xlabel("Storm Motion From Direction") g.ax_joint.set_ylabel("Storm Speed [MPH]") g.ax_joint.set_xticks(range(0, 361, 45)) g.ax_joint.set_xticklabels( ['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) if date: df2 = df[df['issue'].dt.date == date] g.ax_joint.scatter(df2['tml_direction'], speed(df2['tml_sknt'], 'KT').value('MPH'), color='r', s=50, label=date.strftime("%b %-d, %Y"), zorder=2) g.ax_joint.legend() g.ax_joint.grid() g.ax_marg_x.set_title( ("NWS %s\n%s Storm Motion\n" "%s warnings ploted between %s and %s") % (ctx['_nt'].sts[wfo]['name'], PDICT[phenomena], len( df.index), df['issue'].min().date().strftime("%b %-d, %Y"), df['issue'].max().date().strftime("%b %-d, %Y"))) g.fig.subplots_adjust(top=.9) return g.fig, df
def test_uv(): """ Test calculation of uv wind components """ speed = datatypes.speed([10, ], 'KT') mydir = datatypes.direction([0, ], 'DEG') u, v = meteorology.uv(speed, mydir) assert u.value("KT") == 0. assert v.value("KT") == -10. speed = datatypes.speed([10, 20, 15], 'KT') mydir = datatypes.direction([90, 180, 135], 'DEG') u, v = meteorology.uv(speed, mydir) assert u.value("KT")[0] == -10 assert v.value("KT")[1] == 20. assert abs(v.value("KT")[2] - 10.6) < 0.1
def test_uv(): """ Test calculation of uv wind components """ speed = datatypes.speed([10], "KT") mydir = datatypes.direction([0], "DEG") u, v = meteorology.uv(speed, mydir) assert u.value("KT") == 0.0 assert v.value("KT") == -10.0 speed = datatypes.speed([10, 20, 15], "KT") mydir = datatypes.direction([90, 180, 135], "DEG") u, v = meteorology.uv(speed, mydir) assert u.value("KT")[0] == -10 assert v.value("KT")[1] == 20.0 assert abs(v.value("KT")[2] - 10.6) < 0.1
def test_uv(self): """ Test calculation of uv wind components """ speed = datatypes.speed([10,], 'KT') mydir = datatypes.direction([0,], 'DEG') u,v = meteorology.uv(speed, mydir) self.assertEqual(u.value("KT"), 0.) self.assertEqual(v.value("KT"), -10.) speed = datatypes.speed([10,20,15], 'KT') mydir = datatypes.direction([90,180,135], 'DEG') u,v = meteorology.uv(speed, mydir) self.assertEqual(u.value("KT")[0], -10) self.assertEqual(v.value("KT")[1], 20.) self.assertAlmostEquals(v.value("KT")[2], 10.6, 1)
def rabbit_tracks(row): """Generate a rabbit track for this attr""" res = "" if row['sknt'] is None or row['sknt'] <= 5 or row['drct'] is None: return res # 5 carrots at six minutes to get 30 minutes? lat0 = row['lat'] lon0 = row['lon'] drct = row['drct'] sknt = row['sknt'] x0, y0 = P3857(lon0, lat0) smps = speed(sknt, 'KTS').value('MPS') angle = dir2ccwrot(drct) rotation = (drct + 180) % 360 rad = math.radians(angle) x = x0 + math.cos(rad) * smps * SECONDS y = y0 + math.sin(rad) * smps * SECONDS # Draw white line out 30 minutes lons, lats = P3857(x, y, inverse=True) res += ("Line: 1, 0, \"Cell [%s]\"\n" "%.4f, %.4f\n" "%.4f, %.4f\n" "END:\n") % (row['storm_id'], lat0, lon0, lats[-1], lons[-1]) for i in range(3): res += ("Icon: %.4f,%.4f,%.0f,1,10,\"+%.0f min\"\n") % ( lats[i], lons[i], rotation, (i + 1) * 15) return res
def wind_message(self): """Convert this into a Jabber style message""" drct = 0 sknt = 0 time = self.time.replace(tzinfo=timezone.utc) if self.wind_gust: sknt = self.wind_gust.value("KT") if self.wind_dir: drct = self.wind_dir.value() if self.wind_speed_peak: v1 = self.wind_speed_peak.value("KT") d1 = self.wind_dir_peak.value() t1 = self.peak_wind_time.replace(tzinfo=timezone.utc) if v1 > sknt: sknt = v1 drct = d1 time = t1 key = "%s;%s;%s" % (self.station_id, sknt, time) if key not in WIND_ALERTS: WIND_ALERTS[key] = 1 speed = datatypes.speed(sknt, "KT") return ("gust of %.0f knots (%.1f mph) from %s @ %s") % ( speed.value("KT"), speed.value("MPH"), drct2text(drct), time.strftime("%H%MZ"), )
def one(): """option 1""" icursor = ISUAG.cursor() iemcursor = IEM.cursor() icursor.execute( """ SELECT station, valid, ws_mps_s_wvt, winddir_d1_wvt, rain_mm_tot, tair_c_max, tair_c_min from sm_daily """ ) for row in icursor: avg_sknt = speed(row[2], "MPS").value("KT") avg_drct = row[3] pday = distance(row[4], "MM").value("IN") high = temperature(row[5], "C").value("F") low = temperature(row[6], "C").value("F") iemcursor.execute( """ UPDATE summary SET avg_sknt = %s, vector_avg_drct = %s, pday = %s, max_tmpf = %s, min_tmpf = %s WHERE iemid = (select iemid from stations WHERE network = 'ISUSM' and id = %s) and day = %s """, (avg_sknt, avg_drct, pday, high, low, row[0], row[1]), ) iemcursor.close() IEM.commit() IEM.close()
def plotter(fdict): """ Go """ pgconn = get_dbconn('iem') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['var'] sdate = ctx['sdate'] edate = ctx['edate'] # Get Climatology cdf = read_sql(""" SELECT to_char(valid, 'mmdd') as sday, high, low, (high + low) / 2. as avg, precip from ncdc_climate81 WHERE station = %s """, get_dbconn('coop'), params=( ctx['_nt'].sts[station]['ncdc81'],), index_col='sday') if cdf.empty: raise NoDataFound("No Data Found.") cursor.execute(""" SELECT day, max_tmpf, min_tmpf, max_dwpf, min_dwpf, (max_tmpf + min_tmpf) / 2. as avg_tmpf, pday, coalesce(avg_sknt, 0) as avg_sknt from summary s JOIN stations t on (t.iemid = s.iemid) WHERE s.day >= %s and s.day <= %s and t.id = %s and t.network = %s ORDER by day ASC """, (sdate, edate, station, ctx['network'])) rows = [] data = {} for row in cursor: hd = row['max_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'high'] ld = row['min_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'low'] ad = row['avg_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'avg'] rows.append(dict(day=row['day'], max_tmpf=row['max_tmpf'], avg_smph=speed(row['avg_sknt'], 'KT').value('MPH'), min_dwpf=row['min_dwpf'], max_dwpf=row['max_dwpf'], high_departure=hd, low_departure=ld, avg_departure=ad, min_tmpf=row['min_tmpf'], pday=row['pday'])) data[row[0]] = {'val': safe(rows[-1], varname)} if data[row[0]]['val'] == '0': data[row[0]]['color'] = 'k' elif varname == 'high_departure': data[row[0]]['color'] = 'b' if hd < 0 else 'r' elif varname == 'low_departure': data[row[0]]['color'] = 'b' if ld < 0 else 'r' elif varname == 'avg_departure': data[row[0]]['color'] = 'b' if ad < 0 else 'r' df = pd.DataFrame(rows) title = '[%s] %s Daily %s' % ( station, ctx['_nt'].sts[station]['name'], PDICT.get(varname)) subtitle = '%s thru %s' % ( sdate.strftime("%-d %b %Y"), edate.strftime("%-d %b %Y")) fig = calendar_plot( sdate, edate, data, title=title, subtitle=subtitle) return fig, df
def rabbit_tracks(row): """Generate a rabbit track for this attr""" res = "" if row['sknt'] is None or row['sknt'] <= 5 or row['drct'] is None: return res # 5 carrots at six minutes to get 30 minutes? lat0 = row['lat'] lon0 = row['lon'] drct = row['drct'] sknt = row['sknt'] x0, y0 = P3857(lon0, lat0) smps = speed(sknt, 'KTS').value('MPS') angle = dir2ccwrot(drct) rotation = (drct + 180) % 360 rad = math.radians(angle) x = x0 + math.cos(rad) * smps * SECONDS y = y0 + math.sin(rad) * smps * SECONDS # Draw white line out 30 minutes lons, lats = P3857(x, y, inverse=True) res += ("Line: 1, 0, \"Cell [%s]\"\n" "%.4f, %.4f\n" "%.4f, %.4f\n" "END:\n") % (row['storm_id'], lat0, lon0, lats[-1], lons[-1]) for i in range(3): res += ("Icon: %.4f,%.4f,%.0f,1,10,\"+%.0f min\"\n" ) % (lats[i], lons[i], rotation, (i+1)*15) return res
def rabbit_tracks(row): """Generate a rabbit track for this attr""" res = "" if row["sknt"] is None or row["sknt"] <= 5 or row["drct"] is None: return res # 5 carrots at six minutes to get 30 minutes? lat0 = row["lat"] lon0 = row["lon"] drct = row["drct"] sknt = row["sknt"] x0, y0 = P3857(lon0, lat0) smps = speed(sknt, "KTS").value("MPS") angle = dir2ccwrot(drct) rotation = (drct + 180) % 360 rad = math.radians(angle) x = x0 + math.cos(rad) * smps * SECONDS y = y0 + math.sin(rad) * smps * SECONDS # Draw white line out 30 minutes lons, lats = P3857(x, y, inverse=True) res += ( 'Line: 1, 0, "Cell [%s]"\n' "%.4f, %.4f\n" "%.4f, %.4f\n" "END:\n" ) % (row["storm_id"], lat0, lon0, lats[-1], lons[-1]) for i in range(3): res += ('Icon: %.4f,%.4f,%.0f,1,10,"+%.0f min"\n') % ( lats[i], lons[i], rotation, (i + 1) * 15, ) return res
def main(): """Go Main Go""" iemaccess = get_dbconn('iem') cursor = iemaccess.cursor() valid = datetime.datetime.utcnow() valid = valid.replace(tzinfo=pytz.utc) valid = valid.astimezone(pytz.timezone("America/Chicago")) fn = valid.strftime("/mesonet/ARCHIVE/data/%Y/%m/%d/text/ot/ot0002.dat") if not os.path.isfile(fn): sys.exit(0) lines = open(fn, "r").readlines() lastline = lines[-1] tokens = re.split(r"[\s+]+", lastline) tparts = re.split(":", tokens[4]) valid = valid.replace(hour=int(tparts[0]), minute=int(tparts[1]), second=int(tparts[2])) iem = Observation("OT0002", "OT", valid) sknt = speed(float(tokens[8]), 'MPH').value('KT') iem.data['sknt'] = sknt iem.data['drct'] = tokens[9] iem.data['tmpf'] = tokens[7] iem.save(cursor) cursor.close() iemaccess.commit()
def daily_process(nwsli, maxts): """ Process the daily file """ fn = "%s/%s_DailySI.dat" % (BASE, STATIONS[nwsli]) df = common_df_logic(fn, maxts, nwsli, "sm_daily") if df is None: return 0 LOG.debug("processing %s rows from %s", len(df.index), fn) processed = 0 acursor = ACCESS.cursor() for _i, row in df.iterrows(): # Need a timezone valid = datetime.datetime( row["valid"].year, row["valid"].month, row["valid"].day, 12, 0 ) valid = valid.replace(tzinfo=pytz.timezone("America/Chicago")) ob = Observation(nwsli, "ISUSM", valid) ob.data["max_tmpf"] = temperature(row["tair_c_max_qc"], "C").value("F") ob.data["min_tmpf"] = temperature(row["tair_c_min_qc"], "C").value("F") ob.data["pday"] = round( distance(row["rain_mm_tot_qc"], "MM").value("IN"), 2 ) if valid not in EVENTS["days"]: EVENTS["days"].append(valid) ob.data["et_inch"] = distance(row["dailyet_qc"], "MM").value("IN") ob.data["srad_mj"] = row["slrmj_tot_qc"] # Someday check if this is apples to apples here ob.data["vector_avg_drct"] = row["winddir_d1_wvt_qc"] if ob.data["max_tmpf"] is None: EVENTS["reprocess_temps"] = True if ob.data["srad_mj"] == 0 or np.isnan(ob.data["srad_mj"]): LOG.info( "soilm_ingest.py station: %s ts: %s has 0 solar", nwsli, valid.strftime("%Y-%m-%d"), ) EVENTS["reprocess_solar"] = True if "ws_mps_max" in df.columns: ob.data["max_sknt"] = speed(row["ws_mps_max_qc"], "MPS").value( "KT" ) ob.data["avg_sknt"] = speed(row["ws_mps_s_wvt_qc"], "MPS").value("KT") ob.save(acursor) processed += 1 acursor.close() ACCESS.commit() return processed
def main(): """Go Main Go""" pgconn = get_dbconn("coop") cursor = pgconn.cursor() # Need to have a merge of windspeed and average rh dsm = {} ipgconn = get_dbconn("iem") icursor = ipgconn.cursor() icursor.execute(""" select day, avg_sknt, avg_rh from summary where iemid = 37004 and day >= '1980-01-01' ORDER by day ASC""") for row in icursor: if row[1] is None or row[2] is None: dsm[row[0]] = dsm[row[0] - datetime.timedelta(days=1)] else: dsm[row[0]] = { "wind_speed": speed(row[1], "KTS").value("MPS"), "avg_rh": row[2], } os.chdir("baseline") for fn in glob.glob("*.met"): location = fn[:-4] cursor.execute( """ DELETE from yieldfx_baseline where station = %s """, (location, ), ) LOG.info("Removed %s rows for station: %s", cursor.rowcount, location) for line in open(fn): line = line.strip() if not line.startswith("19") and not line.startswith("20"): continue tokens = line.split() valid = datetime.date(int( tokens[0]), 1, 1) + datetime.timedelta(days=int(tokens[1]) - 1) cursor.execute( """ INSERT into yieldfx_baseline (station, valid, radn, maxt, mint, rain, windspeed, rh) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) """, ( location, valid, float(tokens[2]), float(tokens[3]), float(tokens[4]), float(tokens[5]), dsm[valid]["wind_speed"], dsm[valid]["avg_rh"], ), ) cursor.close() pgconn.commit() pgconn.close()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.patheffects as PathEffects pgconn = psycopg2.connect(database='iem', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) station = fdict.get('zstation', 'AMW') network = fdict.get('network', 'IA_ASOS') units = fdict.get('units', 'MPH').upper() if units not in PDICT: units = 'MPH' year = int(fdict.get('year', datetime.datetime.now().year)) month = int(fdict.get('month', datetime.datetime.now().month)) sts = datetime.date(year, month, 1) ets = (sts + datetime.timedelta(days=35)).replace(day=1) nt = NetworkTable(network) cursor.execute(""" SELECT day, avg_sknt, vector_avg_drct from summary s JOIN stations t ON (t.iemid = s.iemid) WHERE t.id = %s and t.network = %s and s.day >= %s and s.day < %s ORDER by day ASC """, (station, network, sts, ets)) days = [] drct = [] sknt = [] for row in cursor: if row[1] is None: continue days.append(row[0].day) drct.append(row[2]) sknt.append(row[1]) if len(sknt) == 0: return "ERROR: No Data Found" df = pd.DataFrame(dict(day=pd.Series(days), drct=pd.Series(drct), sknt=pd.Series(sknt))) sknt = speed(np.array(sknt), 'KT').value(units) (fig, ax) = plt.subplots(1, 1) ax.bar(np.array(days)-0.4, sknt, ec='green', fc='green') pos = max([min(sknt) / 2.0, 0.5]) for d, _, r in zip(days, sknt, drct): draw_line(plt, d, max(sknt)+0.5, (270. - r) / 180. * np.pi) txt = ax.text(d, pos, drct2text(r), ha='center', rotation=90, color='white', va='center') txt.set_path_effects([PathEffects.withStroke(linewidth=2, foreground="k")]) ax.grid(True, zorder=11) ax.set_title(("%s [%s]\n%s Daily Average Wind Speed and Direction" ) % (nt.sts[station]['name'], station, sts.strftime("%b %Y"))) ax.set_xlim(0.5, max(days)+0.5) ax.set_ylim(top=max(sknt)+2) ax.set_ylabel("Average Wind Speed [%s]" % (PDICT.get(units),)) return fig, df
def plotter(fdict): """ Go """ pgconn = get_dbconn('iem') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] units = ctx['units'] year = ctx['year'] month = ctx['month'] sts = datetime.date(year, month, 1) ets = (sts + datetime.timedelta(days=35)).replace(day=1) nt = NetworkTable(network) cursor.execute( """ SELECT day, avg_sknt, vector_avg_drct from summary s JOIN stations t ON (t.iemid = s.iemid) WHERE t.id = %s and t.network = %s and s.day >= %s and s.day < %s ORDER by day ASC """, (station, network, sts, ets)) days = [] drct = [] sknt = [] for row in cursor: if row[1] is None: continue days.append(row[0].day) drct.append(row[2]) sknt.append(row[1]) if not sknt: raise ValueError("ERROR: No Data Found") df = pd.DataFrame( dict(day=pd.Series(days), drct=pd.Series(drct), sknt=pd.Series(sknt))) sknt = speed(np.array(sknt), 'KT').value(units) (fig, ax) = plt.subplots(1, 1) ax.bar(np.array(days), sknt, ec='green', fc='green', align='center') pos = max([min(sknt) / 2.0, 0.5]) for d, _, r in zip(days, sknt, drct): draw_line(d, max(sknt) + 0.5, (270. - r) / 180. * np.pi) txt = ax.text(d, pos, drct2text(r), ha='center', rotation=90, color='white', va='center') txt.set_path_effects( [PathEffects.withStroke(linewidth=2, foreground="k")]) ax.grid(True, zorder=11) ax.set_title(("%s [%s]\n%s Daily Average Wind Speed and Direction") % (nt.sts[station]['name'], station, sts.strftime("%b %Y"))) ax.set_xlim(0.5, 31.5) ax.set_xticks(range(1, 31, 5)) ax.set_ylim(top=max(sknt) + 2) ax.set_ylabel("Average Wind Speed [%s]" % (PDICT.get(units), )) return fig, df
def uv(speed, direction): """ Compute the u and v components of the wind @param wind speed in whatever units @param dir wind direction with zero as north @return u and v components """ if (not isinstance(speed, dt.speed) or not isinstance(direction, dt.direction)): raise InvalidArguments(("uv() needs speed and direction " "objects as args")) # Get radian units rad = direction.value("RAD") if rad is None or speed.value() is None: return None, None u = (0 - speed.value()) * np.sin(rad) v = (0 - speed.value()) * np.cos(rad) return (dt.speed(u, speed.get_units()), dt.speed(v, speed.get_units()))
def test_uv(self): """ Test calculation of uv wind components """ speed = datatypes.speed([ 10, ], 'KT') mydir = datatypes.direction([ 0, ], 'DEG') u, v = meteorology.uv(speed, mydir) self.assertEqual(u.value("KT"), 0.) self.assertEqual(v.value("KT"), -10.) speed = datatypes.speed([10, 20, 15], 'KT') mydir = datatypes.direction([90, 180, 135], 'DEG') u, v = meteorology.uv(speed, mydir) self.assertEqual(u.value("KT")[0], -10) self.assertEqual(v.value("KT")[1], 20.) self.assertAlmostEquals(v.value("KT")[2], 10.6, 1)
def uv(speed, direction): """ Compute the u and v components of the wind @param wind speed in whatever units @param dir wind direction with zero as north @return u and v components """ if not isinstance(speed, dt.speed) or not isinstance( direction, dt.direction): raise InvalidArguments(("uv() needs speed and direction " "objects as args")) # Get radian units rad = direction.value("RAD") if rad is None or speed.value() is None: return None, None u = (0 - speed.value()) * np.sin(rad) v = (0 - speed.value()) * np.cos(rad) return (dt.speed(u, speed.get_units()), dt.speed(v, speed.get_units()))
def main(): """Go Main Go""" iemaccess = get_dbconn('iem') cursor = iemaccess.cursor() valid = datetime.datetime.utcnow() valid = valid.replace(tzinfo=pytz.utc) valid = valid.astimezone(pytz.timezone("America/Chicago")) fn = valid.strftime("/mesonet/ARCHIVE/data/%Y/%m/%d/text/ot/ot0010.dat") if not os.path.isfile(fn): sys.exit(0) lines = open(fn, "r").readlines() lastline = lines[-1].strip() tokens = re.split(r"[\s+]+", lastline) if len(tokens) != 20: return tparts = re.split(":", tokens[3]) valid = valid.replace(hour=int(tparts[0]), minute=int(tparts[1]), second=0, microsecond=0) iem = Observation("OT0010", "OT", valid) iem.data['tmpf'] = float(tokens[4]) iem.data['max_tmpf'] = float(tokens[5]) iem.data['min_tmpf'] = float(tokens[6]) iem.data['relh'] = int(tokens[7]) iem.data['dwpf'] = dewpoint(temperature(iem.data['tmpf'], 'F'), humidity(iem.data['relh'], '%')).value("F") iem.data['sknt'] = speed(float(tokens[8]), 'mph').value('KT') iem.data['drct'] = int(tokens[9]) iem.data['max_sknt'] = speed(float(tokens[10]), 'mph').value('KT') iem.data['alti'] = float(tokens[12]) iem.data['pday'] = float(tokens[13]) iem.data['srad'] = float(tokens[18]) iem.save(cursor) cursor.close() iemaccess.commit()
def daily_process(nwsli, maxts): """ Process the daily file """ # print '-------------- DAILY PROCESS ----------------' fn = "%s/%s_DailySI.dat" % (BASE, STATIONS[nwsli]) df = common_df_logic(fn, maxts, nwsli, "sm_daily") if df is None: return 0 LOG.debug("processing %s rows from %s", len(df.index), fn) processed = 0 acursor = ACCESS.cursor() for _i, row in df.iterrows(): # Need a timezone valid = datetime.datetime(row['valid'].year, row['valid'].month, row['valid'].day, 12, 0) valid = valid.replace(tzinfo=pytz.timezone("America/Chicago")) ob = Observation(nwsli, 'ISUSM', valid) ob.data['max_tmpf'] = temperature(row['tair_c_max_qc'], 'C').value('F') ob.data['min_tmpf'] = temperature(row['tair_c_min_qc'], 'C').value('F') ob.data['pday'] = round(distance(row['rain_mm_tot_qc'], 'MM').value('IN'), 2) if valid not in EVENTS['days']: EVENTS['days'].append(valid) ob.data['et_inch'] = distance(row['dailyet_qc'], 'MM').value('IN') ob.data['srad_mj'] = row['slrmj_tot_qc'] # Someday check if this is apples to apples here ob.data['vector_avg_drct'] = row['winddir_d1_wvt_qc'] if ob.data['max_tmpf'] is None: EVENTS['reprocess_temps'] = True if ob.data['srad_mj'] == 0 or np.isnan(ob.data['srad_mj']): LOG.info( "soilm_ingest.py station: %s ts: %s has 0 solar", nwsli, valid.strftime("%Y-%m-%d") ) EVENTS['reprocess_solar'] = True if 'ws_mps_max' in df.columns: ob.data['max_sknt'] = speed(row['ws_mps_max_qc'], 'MPS').value('KT') ob.data['avg_sknt'] = speed(row['ws_mps_s_wvt_qc'], 'MPS').value('KT') ob.save(acursor) processed += 1 acursor.close() ACCESS.commit() return processed
def test_vectorized(self): """See that heatindex and windchill can do lists""" temp = datatypes.temperature([0, 10], 'F') sknt = datatypes.speed([30, 40], 'MPH') val = meteorology.windchill(temp, sknt).value('F') self.assertAlmostEquals(val[0], -24.50, 2) t = datatypes.temperature([80.0, 90.0], 'F') td = datatypes.temperature([70.0, 60.0], 'F') hdx = meteorology.heatindex(t, td) self.assertAlmostEqual(hdx.value("F")[0], 83.93, 2)
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import calendar_plot pgconn = psycopg2.connect(database='iem', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] varname = ctx['var'] network = ctx['network'] sdate = ctx['sdate'] edate = ctx['edate'] nt = NetworkTable(network) # Get Climatology cdf = read_sql("""SELECT to_char(valid, 'mmdd') as sday, high, low, precip from ncdc_climate81 WHERE station = %s """, psycopg2.connect(database='coop', host='iemdb', user='******'), params=(nt.sts[station]['ncdc81'],), index_col='sday') cursor.execute(""" SELECT day, max_tmpf, min_tmpf, max_dwpf, min_dwpf, pday, coalesce(avg_sknt, 0) as avg_sknt from summary s JOIN stations t on (t.iemid = s.iemid) WHERE s.day >= %s and s.day <= %s and t.id = %s and t.network = %s ORDER by day ASC """, (sdate, edate, station, network)) rows = [] data = {} for row in cursor: hd = row['max_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'high'] ld = row['min_tmpf'] - cdf.at[row[0].strftime("%m%d"), 'low'] rows.append(dict(day=row['day'], max_tmpf=row['max_tmpf'], avg_smph=speed(row['avg_sknt'], 'KT').value('MPH'), min_dwpf=row['min_dwpf'], max_dwpf=row['max_dwpf'], high_departure=hd, low_departure=ld, min_tmpf=row['min_tmpf'], pday=row['pday'])) data[row[0]] = {'val': safe(rows[-1], varname)} if varname == 'high_departure': data[row[0]]['color'] = 'b' if hd < 0 else 'r' elif varname == 'low_departure': data[row[0]]['color'] = 'b' if ld < 0 else 'r' df = pd.DataFrame(rows) title = ('[%s] %s Daily %s\n%s thru %s' ) % (station, nt.sts[station]['name'], PDICT.get(varname), sdate.strftime("%-d %b %Y"), edate.strftime("%-d %b %Y")) fig = calendar_plot(sdate, edate, data, title=title) return fig, df
def do_windalerts(obs): """Iterate through the obs and do wind alerts where appropriate""" for sid in obs: # Problem sites with lightning issues if sid in [ "RBFI4", "RTMI4", "RWII4", "RCAI4", "RDYI4", "RDNI4", "RCDI4", "RCII4", "RCLI4", "VCTI4", "RGAI4", "RAVI4", ]: continue ob = obs[sid] # screening if ob.get("gust") is None or ob["gust"] < 40: continue if np.isnan(ob["gust"]): continue smph = speed(ob["gust"], "KT").value("MPH") if smph < 50: continue if smph > 100: print(("process_rwis did not relay gust %.1f MPH from %s" "") % (smph, sid)) continue # Use a hacky tmp file to denote a wind alert that was sent fn = "/tmp/iarwis.%s.%s" % (sid, ob["valid"].strftime("%Y%m%d%H%M")) if os.path.isfile(fn): continue o = open(fn, "w") o.write(" ") o.close() lts = ob["valid"].astimezone(pytz.timezone("America/Chicago")) stname = NT.sts[sid]["name"] msg = ("At %s, a wind gust of %.1f mph (%.1f kts) was recorded " "at the %s (%s) Iowa RWIS station" "") % (lts.strftime("%I:%M %p %d %b %Y"), smph, ob["gust"], stname, sid) mt = MIMEText(msg) mt["From"] = "*****@*****.**" # mt['To'] = '*****@*****.**' mt["To"] = "*****@*****.**" mt["Subject"] = "Iowa RWIS Wind Gust %.0f mph %s" % (smph, stname) s = smtplib.SMTP("mailhub.iastate.edu") s.sendmail(mt["From"], [mt["To"]], mt.as_string()) s.quit()
def googlesheet(siteid, sheetkey): """Harvest a google sheet, please""" rows = [] config = util.get_config() sheets = util.get_sheetsclient(config, "td") f = sheets.spreadsheets().get(spreadsheetId=sheetkey, includeGridData=True) j = util.exponential_backoff(f.execute) for sheet in j['sheets']: # sheet_title = sheet['properties']['title'] for griddata in sheet['data']: for row, rowdata in enumerate(griddata['rowData']): if 'values' not in rowdata: # empty sheet continue if row == 1: # skip units continue if row == 0: header = [] for col, celldata in enumerate(rowdata['values']): header.append(celldata['formattedValue']) continue data = {} for col, celldata in enumerate(rowdata['values']): data[header[col]] = fmt(celldata.get('formattedValue')) rows.append(data) df = pd.DataFrame(rows) print("googlesheet has columns: %s" % (repr(df.columns.values),)) newcols = {} for k in df.columns: newcols[k] = XREF.get(k, k) df.rename(columns=newcols, inplace=True) df['valid'] = pd.to_datetime(df['valid'], errors='raise', format='%m/%d/%y %H:%M') df['valid'] = df['valid'] + datetime.timedelta(hours=TZREF[siteid]) # do some conversions print("ALERT: doing windspeed unit conv") df['windspeed_mps'] = speed(df['windspeed_mps'].values, 'KMH').value('MPS') print("ALERT: doing windgustunit conv") df['windgust_mps'] = speed(df['windgust_mps'].values, 'KMH').value('MPS') return df
def minute_iemaccess(df): """Process dataframe into iemaccess.""" pgconn = get_dbconn("iem") cursor = pgconn.cursor() for _i, row in df.iterrows(): # Update IEMAccess # print nwsli, valid ob = Observation(row["station"], "ISUSM", row["valid"]) tmpc = temperature(row["tair_c_avg_qc"], "C") if tmpc.value("F") > -50 and tmpc.value("F") < 140: ob.data["tmpf"] = tmpc.value("F") relh = humidity(row["rh_avg_qc"], "%") ob.data["relh"] = relh.value("%") ob.data["dwpf"] = met.dewpoint(tmpc, relh).value("F") # database srad is W/ms2 ob.data["srad"] = row["slrkj_tot_qc"] / 60.0 * 1000.0 ob.data["pcounter"] = row["rain_in_tot_qc"] ob.data["sknt"] = speed(row["ws_mph_s_wvt_qc"], "MPH").value("KT") if "ws_mph_max" in df.columns: ob.data["gust"] = speed(row["ws_mph_max_qc"], "MPH").value("KT") ob.data["drct"] = row["winddir_d1_wvt_qc"] if "tsoil_c_avg" in df.columns: ob.data["c1tmpf"] = temperature(row["tsoil_c_avg_qc"], "C").value( "F" ) ob.data["c2tmpf"] = temperature(row["t12_c_avg_qc"], "C").value("F") ob.data["c3tmpf"] = temperature(row["t24_c_avg_qc"], "C").value("F") if "t50_c_avg" in df.columns: ob.data["c4tmpf"] = temperature(row["t50_c_avg_qc"], "C").value( "F" ) if "calcvwc12_avg" in df.columns: ob.data["c2smv"] = row["calcvwc12_avg_qc"] * 100.0 if "calcvwc24_avg" in df.columns: ob.data["c3smv"] = row["calcvwc24_avg_qc"] * 100.0 if "calcvwc50_avg" in df.columns: ob.data["c4smv"] = row["calcvwc50_avg_qc"] * 100.0 ob.save(cursor) cursor.close() pgconn.commit()
def windrose(station, database='asos', months=np.arange(1, 13), hours=np.arange(0, 24), sts=datetime.datetime(1970, 1, 1), ets=datetime.datetime(2050, 1, 1), units="mph", nsector=36, justdata=False, rmax=None, cursor=None, sname=None, sknt=None, drct=None, valid=None, level=None, bins=[]): """Utility function that generates a windrose plot Args: station (str): station identifier to search database for database (str,optional): database name to look for data within months (list,optional): optional list of months to limit plot to hours (list,optional): optional list of hours to limit plot to sts (datetime,optional): start datetime ets (datetime,optional): end datetime units (str,optional): units to plot values as nsector (int,optional): number of bins to devide the windrose into justdata (boolean,optional): if True, write out the data only cursor (psycopg2.cursor,optional): provide a database cursor to run the query against. sname (str,optional): The name of this station, if not specified it will default to the ((`station`)) identifier sknt (list,optional): A list of wind speeds in knots already generated drct (list,optional): A list of wind directions (deg N) already generated valid (list,optional): A list of valid datetimes (with tzinfo set) level (int,optional): In case of RAOB, which level interests us (hPa) bins (list,optional): bins to use for the wind speed Returns: matplotlib.Figure instance or textdata """ monthinfo = _get_timeinfo(months, 'month', 12) hourinfo = _get_timeinfo(hours, 'hour', 24) if sknt is None or drct is None: df = _get_data(station, cursor, database, sts, ets, monthinfo, hourinfo, level) else: df = pd.DataFrame({'sknt': sknt, 'drct': drct, 'valid': valid}) # Convert wind speed into the units we want here if df['sknt'].max() > 0: df['speed'] = speed(df['sknt'].values, 'KT').value(units.upper()) if justdata: return _make_textresult(station, df, units, nsector, sname, monthinfo, hourinfo, level, bins) if len(df.index) < 5 or not df['sknt'].max() > 0: fig = plt.figure(figsize=(6, 7), dpi=80, facecolor='w', edgecolor='w') fig.text(0.17, 0.89, 'Not enough data available to generate plot') return fig return _make_plot(station, df, units, nsector, rmax, hours, months, sname, level, bins)
def do(ts): """ Do a UTC date's worth of data""" pgconn = get_dbconn("hads") table = ts.strftime("raw%Y_%m") sts = datetime.datetime(ts.year, ts.month, ts.day).replace(tzinfo=pytz.utc) ets = sts + datetime.timedelta(hours=24) df = read_sql( f""" SELECT station, valid, substr(key, 1, 3) as vname, value from {table} WHERE valid >= %s and valid < %s and substr(key, 1, 3) in ('USI', 'UDI', 'TAI', 'TDI') and value > -999 """, pgconn, params=(sts, ets), index_col=None, ) if df.empty: print("No data found for hads/raw2obs.py date: %s" % (ts, )) return pdf = pd.pivot_table(df, values="value", index=["station", "valid"], columns="vname") if "USI" in pdf.columns: pdf["sknt"] = speed(pdf["USI"].values, "MPH").value("KT") table = ts.strftime("t%Y") data = StringIO() for (station, valid), row in pdf.iterrows(): data.write(("%s\t%s\t%s\t%s\t%s\t%s\n") % ( station, valid.strftime("%Y-%m-%d %H:%M:%S+00"), v(row.get("TAI")), v(row.get("TDI")), v(row.get("UDI")), v(row.get("sknt")), )) cursor = pgconn.cursor() cursor.execute(f"DELETE from {table} WHERE valid between %s and %s", (sts, ets)) data.seek(0) cursor.copy_from( data, table, columns=("station, valid", "tmpf", "dwpf", "drct", "sknt"), null="null", ) cursor.close() pgconn.commit()
def test_drct(): """Conversion of u and v to direction""" r = meteorology.drct(datatypes.speed(np.array([10, 20]), 'KT'), datatypes.speed(np.array([10, 20]), 'KT')).value("DEG") assert r[0] == 225 r = meteorology.drct(datatypes.speed(-10, 'KT'), datatypes.speed(10, 'KT')).value("DEG") assert r == 135 r = meteorology.drct(datatypes.speed(-10, 'KT'), datatypes.speed(-10, 'KT')).value("DEG") assert r == 45 r = meteorology.drct(datatypes.speed(10, 'KT'), datatypes.speed(-10, 'KT')).value("DEG") assert r == 315
def test_drct(self): """Conversion of u and v to direction""" self.assertEquals( meteorology.drct(datatypes.speed(np.array([10, 20]), 'KT'), datatypes.speed(np.array([10, 20]), 'KT') ).value("DEG")[0], 225) self.assertEquals(meteorology.drct(datatypes.speed(-10, 'KT'), datatypes.speed(10, 'KT') ).value("DEG"), 135) self.assertEquals(meteorology.drct(datatypes.speed(-10, 'KT'), datatypes.speed(-10, 'KT') ).value("DEG"), 45) self.assertEquals(meteorology.drct(datatypes.speed(10, 'KT'), datatypes.speed(-10, 'KT') ).value("DEG"), 315)
def test_drct(self): """Conversion of u and v to direction""" self.assertEquals( meteorology.drct(datatypes.speed(np.array([10, 20]), 'KT'), datatypes.speed(np.array([10, 20]), 'KT')).value("DEG")[0], 225) self.assertEquals( meteorology.drct(datatypes.speed(-10, 'KT'), datatypes.speed(10, 'KT')).value("DEG"), 135) self.assertEquals( meteorology.drct(datatypes.speed(-10, 'KT'), datatypes.speed(-10, 'KT')).value("DEG"), 45) self.assertEquals( meteorology.drct(datatypes.speed(10, 'KT'), datatypes.speed(-10, 'KT')).value("DEG"), 315)
def minute_iemaccess(df): """Process dataframe into iemaccess.""" pgconn = get_dbconn('iem') cursor = pgconn.cursor() for _i, row in df.iterrows(): # Update IEMAccess # print nwsli, valid ob = Observation(row['station'], 'ISUSM', row['valid']) tmpc = temperature(row['tair_c_avg_qc'], 'C') if tmpc.value('F') > -50 and tmpc.value('F') < 140: ob.data['tmpf'] = tmpc.value('F') relh = humidity(row['rh_avg_qc'], '%') ob.data['relh'] = relh.value('%') ob.data['dwpf'] = met.dewpoint(tmpc, relh).value('F') # database srad is W/ms2 ob.data['srad'] = row['slrkj_tot_qc'] / 60. * 1000. ob.data['pcounter'] = row['rain_in_tot_qc'] ob.data['sknt'] = speed(row['ws_mph_s_wvt_qc'], 'MPH').value("KT") if 'ws_mph_max' in df.columns: ob.data['gust'] = speed(row['ws_mph_max_qc'], 'MPH').value('KT') ob.data['drct'] = row['winddir_d1_wvt_qc'] if 'tsoil_c_avg' in df.columns: ob.data['c1tmpf'] = temperature(row['tsoil_c_avg_qc'], 'C').value('F') ob.data['c2tmpf'] = temperature(row['t12_c_avg_qc'], 'C').value('F') ob.data['c3tmpf'] = temperature(row['t24_c_avg_qc'], 'C').value('F') if 't50_c_avg' in df.columns: ob.data['c4tmpf'] = temperature(row['t50_c_avg_qc'], 'C').value('F') if 'calcvwc12_avg' in df.columns: ob.data['c2smv'] = row['calcvwc12_avg_qc'] * 100.0 if 'calcvwc24_avg' in df.columns: ob.data['c3smv'] = row['calcvwc24_avg_qc'] * 100.0 if 'calcvwc50_avg' in df.columns: ob.data['c4smv'] = row['calcvwc50_avg_qc'] * 100.0 ob.save(cursor) cursor.close() pgconn.commit()
def main(): """Go Main Go""" now = datetime.datetime.now() pgconn = get_dbconn('iem', user='******') icursor = pgconn.cursor() # Compute normal from the climate database sql = """ select s.id, s.network, ST_x(s.geom) as lon, ST_y(s.geom) as lat, greatest(c.max_sknt, c.max_gust) as wind from summary_%s c, current c2, stations s WHERE s.iemid = c.iemid and c2.valid > 'TODAY' and c.day = 'TODAY' and c2.iemid = s.iemid and (s.network ~* 'ASOS' or s.network = 'AWOS') and s.country = 'US' ORDER by lon, lat """ % (now.year, ) lats = [] lons = [] vals = [] valmask = [] icursor.execute(sql) for row in icursor: if row[4] == 0 or row[4] is None: continue lats.append(row[3]) lons.append(row[2]) vals.append(speed(row[4], 'KT').value('MPH')) valmask.append((row[1] in ['AWOS', 'IA_ASOS'])) if len(vals) < 5 or True not in valmask: return clevs = np.arange(0, 40, 2) clevs = np.append(clevs, np.arange(40, 80, 5)) clevs = np.append(clevs, np.arange(80, 120, 10)) # Iowa pqstr = "plot ac %s summary/today_gust.png iowa_wind_gust.png png" % ( now.strftime("%Y%m%d%H%M"), ) mp = MapPlot(title="Iowa ASOS/AWOS Peak Wind Speed Reports", subtitle="%s" % (now.strftime("%d %b %Y"), ), sector='iowa') mp.contourf(lons, lats, vals, clevs, units='MPH') mp.plot_values(lons, lats, vals, '%.0f', valmask=valmask, labelbuffer=10) mp.drawcounties() mp.postprocess(pqstr=pqstr, view=False) mp.close()
def make_rwis(i, j, initts, oldncout): """ Generate spinup file """ i = i - IOFFSET j = j - JOFFSET o = open('rwis.xml', 'w') o.write("""<?xml version="1.0"?> <observation> <header> <filetype>rwis-observation</filetype> <version>1.0</version> <road-station>oaa</road-station> </header> <measure-list>""") if oldncout is None: fake_rwis(o, initts) return ts0 = find_initts(oldncout) # at Air Temp in C tmpc = dt.temperature(oldncout.variables['tmpk'][:, i, j], 'K').value('C') # td Dew point in C dwpc = dt.temperature(oldncout.variables['dwpk'][:, i, j], 'K').value('C') # pi presence of precipitation 0: No -- 1: Yes # ws wind speed in km / hr ws = dt.speed(oldncout.variables['wmps'][:, i, j], 'MPS').value('KMH') # sc condition code 1=DryCond 2=Wet 3=Ice 4=MixWaterSnow # 5=dew 6=Meltsnow 7=Frost 8=Ice # Was set to 33 for SSI ? icond = oldncout.variables['icond'][:, i, j] # st road surface temp bridgec = dt.temperature( oldncout.variables['bdeckt'][:, i, j], 'K').value('C') # sst sub surface temp subsfc = dt.temperature( oldncout.variables['subsfct'][:, i, j], 'K').value('C') t1 = initts + datetime.timedelta(hours=12) for tstep in range(4, len(oldncout.dimensions['time']), 4): ts = ts0 + datetime.timedelta( minutes=int(oldncout.variables['time'][tstep])) if ts > t1: break o.write("""<measure><observation-time>%s</observation-time> <at>%.2f</at><td>%.2f</td><pi>0</pi><ws>%.2f</ws><sc>%s</sc><st>%.2f</st> <sst>%.2f</sst></measure> """ % (ts.strftime("%Y-%m-%dT%H:%MZ"), tmpc[tstep], dwpc[tstep], ws[tstep], icond[tstep], bridgec[tstep], subsfc[tstep])) o.write("</measure-list></observation>") o.close()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] units = ctx['units'] nt = NetworkTable(network) df = read_sql(""" select date_trunc('hour', valid) as ts, avg(sknt) as sknt, max(drct) as drct from alldata WHERE station = %s and sknt is not null and drct is not null GROUP by ts """, pgconn, params=(station, ), parse_dates=('ts',), index_col=None) sknt = speed(df['sknt'].values, 'KT') drct = direction(df['drct'].values, 'DEG') df['u'], df['v'] = [x.value('MPS') for x in meteorology.uv(sknt, drct)] df['month'] = df['ts'].dt.month grp = df[['month', 'u', 'v', 'sknt']].groupby('month').mean() grp['u_%s' % (units,)] = speed(grp['u'].values, 'KT').value(units.upper()) grp['v_%s' % (units,)] = speed(grp['u'].values, 'KT').value(units.upper()) grp['sped_%s' % (units,)] = speed(grp['sknt'].values, 'KT').value(units.upper()) drct = meteorology.drct(speed(grp['u'].values, 'KT'), speed(grp['v'].values, 'KT')) grp['drct'] = drct.value('DEG') maxval = grp['sped_%s' % (units,)].max() (fig, ax) = plt.subplots(1, 1) ax.barh(grp.index.values, grp['sped_%s' % (units,)].values, align='center') ax.set_xlabel("Average Wind Speed [%s]" % (UNITS[units],)) ax.set_yticks(grp.index.values) ax.set_yticklabels(calendar.month_abbr[1:]) ax.grid(True) ax.set_xlim(0, maxval * 1.2) for mon, row in grp.iterrows(): ax.text(maxval * 1.1, mon, drct2text(row['drct']), ha='center', va='center', bbox=dict(color='white')) ax.text(row['sped_%s' % (units,)] * 0.98, mon, "%.1f" % (row['sped_%s' % (units,)],), ha='right', va='center', bbox=dict(color='white', boxstyle='square,pad=0.03',)) ax.set_ylim(12.5, 0.5) ax.set_title(("[%s] %s [%s-%s]\nMonthly Average Wind Speed and" " Vector Average Direction" ) % (station, nt.sts[station]['name'], df['ts'].min().year, df['ts'].max().year)) return fig, grp
def do(valid, frame): """ Generate plot for a given timestamp """ cursor.execute( """select turbineid, power, ST_x(geom), ST_y(geom), yaw, windspeed from sampled_data s JOIN turbines t on (t.id = s.turbineid) WHERE valid = %s and power is not null and yaw is not null and windspeed is not null""", (valid, )) lons = [] lats = [] vals = [] u = [] v = [] for row in cursor: lons.append(row[2]) lats.append(row[3]) vals.append(row[1]) a, b = uv(speed(row[5], 'MPS'), direction(row[4], 'deg')) u.append(a.value('MPS')) v.append(b.value('MPS')) vals = np.array(vals) avgv = np.average(vals) vals2 = vals - avgv print valid, min(vals2), max(vals2) (fig, ax) = plt.subplots(1, 1) cmap = plt.cm.get_cmap('RdYlBu_r') cmap.set_under('white') cmap.set_over('black') clevs = np.arange(-300, 301, 50) norm = mpcolors.BoundaryNorm(clevs, cmap.N) ax.quiver(lons, lats, u, v, zorder=1) ax.scatter(lons, lats, c=vals2, vmin=-500, vmax=500, cmap=cmap, s=100, zorder=2) ax.set_title( "Pomeroy Farm Turbine Power [kW] Diff from Farm Avg (1min sampled dataset)\nValid: %s" % (valid.strftime("%d %b %Y %I:%M %p"))) make_colorbar(clevs, norm, cmap) fig.savefig('power_movie/frame%05i.png' % (frame, )) plt.close()
def computeOthers(d): r = {} # Need something to compute other values needed for output for sid in d.keys(): ob = d[sid] ob["ticks"] = calendar.timegm(ob['utc_valid'].timetuple()) if ob['sknt'] is not None: ob["sped"] = ob["sknt"] * 1.17 if ob.get('tmpf') is not None and ob.get('dwpf') is not None: tmpf = temperature(ob['tmpf'], 'F') dwpf = temperature(ob['dwpf'], 'F') ob["relh"] = meteorology.relh(tmpf, dwpf).value('%') else: ob['relh'] = None if ob['relh'] == 'M': ob['relh'] = None if (ob.get('tmpf') is not None and ob.get('dwpf') is not None and ob.get('sped') is not None): tmpf = temperature(ob['tmpf'], 'F') dwpf = temperature(ob['dwpf'], 'F') sknt = speed(ob['sped'], 'MPH') ob["feel"] = meteorology.feelslike(tmpf, dwpf, sknt).value("F") else: ob['feel'] = None if ob['feel'] == 'M': ob['feel'] = None ob["altiTend"] = 'S' ob["drctTxt"] = util.drct2text(ob["drct"]) if ob["max_drct"] is None: ob["max_drct"] = 0 ob["max_drctTxt"] = util.drct2text(ob["max_drct"]) ob["20gu"] = 0 if ob['gust'] is not None: ob["gmph"] = ob["gust"] * 1.17 if ob['max_gust'] is not None: ob["max_sped"] = ob["max_gust"] * 1.17 else: ob['max_sped'] = 0 ob['pday'] = 0 if ob['pday'] is None else ob['pday'] ob['pmonth'] = 0 if ob['pmonth'] is None else ob['pmonth'] ob["gtim"] = "0000" ob["gtim2"] = "12:00 AM" if ob["max_gust_ts"] is not None and ob["max_gust_ts"] != "null": ob["gtim"] = ob["max_gust_ts"].strftime("%H%M") ob["gtim2"] = ob["max_gust_ts"].strftime("%-I:%M %p") r[sid] = ob return r
def windrose(station, database='asos', months=np.arange(1, 13), hours=np.arange(0, 24), sts=datetime.datetime(1970, 1, 1), ets=datetime.datetime(2050, 1, 1), units="mph", nsector=36, justdata=False, rmax=None, cursor=None, sname=None, sknt=None, drct=None, level=None, bins=[]): """Utility function that generates a windrose plot Args: station (str): station identifier to search database for database (str,optional): database name to look for data within months (list,optional): optional list of months to limit plot to hours (list,optional): optional list of hours to limit plot to sts (datetime,optional): start datetime ets (datetime,optional): end datetime units (str,optional): units to plot values as nsector (int,optional): number of bins to devide the windrose into justdata (boolean,optional): if True, write out the data only cursor (psycopg2.cursor,optional): provide a database cursor to run the query against. sname (str,optional): The name of this station, if not specified it will default to the ((`station`)) identifier sknt (list,optional): A list of wind speeds in knots already generated drct (list,optional): A list of wind directions (deg N) already generated level (int,optional): In case of RAOB, which level interests us (hPa) bins (list,optional): bins to use for the wind speed Returns: matplotlib.Figure instance or textdata """ monthinfo = _get_timeinfo(months, 'month', 12) hourinfo = _get_timeinfo(hours, 'hour', 24) if sknt is None or drct is None: (sknt, drct, minvalid, maxvalid) = _get_data(station, cursor, database, sts, ets, monthinfo, hourinfo, level) sknt = speed(sknt, 'KT').value(units.upper()) if justdata: return _make_textresult(station, sknt, drct, units, nsector, sname, minvalid, maxvalid, monthinfo, hourinfo, level, bins) if len(sknt) < 5 or np.max(sknt) < 1: fig = plt.figure(figsize=(6, 7), dpi=80, facecolor='w', edgecolor='w') fig.text(0.17, 0.89, 'Not enough data available to generate plot') return fig return _make_plot(station, sknt, drct, units, nsector, rmax, hours, months, sname, minvalid, maxvalid, level, bins)
def grid_day(nc, ts): """ """ offset = iemre.daily_offset(ts) print(('cal hi/lo for %s [idx:%s]') % (ts, offset)) sql = """ SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, s.state, s.name, s.id as station, (CASE WHEN pday >= 0 then pday else null end) as precipdata, (CASE WHEN max_tmpf > -50 and max_tmpf < 130 then max_tmpf else null end) as highdata, (CASE WHEN min_tmpf > -50 and min_tmpf < 95 then min_tmpf else null end) as lowdata, (CASE WHEN max_dwpf > -50 and max_dwpf < 130 then max_dwpf else null end) as highdwpf, (CASE WHEN min_dwpf > -50 and min_dwpf < 95 then min_dwpf else null end) as lowdwpf, (CASE WHEN avg_sknt >= 0 and avg_sknt < 100 then avg_sknt else null end) as avgsknt from summary_%s c, stations s WHERE day = '%s' and s.network in ('IA_ASOS', 'MN_ASOS', 'WI_ASOS', 'IL_ASOS', 'MO_ASOS', 'KS_ASOS', 'NE_ASOS', 'SD_ASOS', 'ND_ASOS', 'KY_ASOS', 'MI_ASOS', 'OH_ASOS', 'AWOS') and c.iemid = s.iemid """ % (ts.year, ts.strftime("%Y-%m-%d")) df = read_sql(sql, pgconn) if len(df.index) > 4: res = generic_gridder(df, 'highdata') nc.variables['high_tmpk'][offset] = datatypes.temperature( res, 'F').value('K') res = generic_gridder(df, 'lowdata') nc.variables['low_tmpk'][offset] = datatypes.temperature( res, 'F').value('K') hres = generic_gridder(df, 'highdwpf') lres = generic_gridder(df, 'lowdwpf') nc.variables['avg_dwpk'][offset] = datatypes.temperature( (hres + lres) / 2., 'F').value('K') res = generic_gridder(df, 'avgsknt') res = np.where(res < 0, 0, res) nc.variables['wind_speed'][offset] = datatypes.speed( res, 'KT').value('MPS') else: print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"), cursor.rowcount)
def test_drct(): """Conversion of u and v to direction""" r = meteorology.drct( datatypes.speed(np.array([10, 20]), 'KT'), datatypes.speed(np.array([10, 20]), 'KT') ).value("DEG") assert r[0] == 225 r = meteorology.drct( datatypes.speed(-10, 'KT'), datatypes.speed(10, 'KT') ).value("DEG") assert r == 135 r = meteorology.drct( datatypes.speed(-10, 'KT'), datatypes.speed(-10, 'KT') ).value("DEG") assert r == 45 r = meteorology.drct( datatypes.speed(10, 'KT'), datatypes.speed(-10, 'KT') ).value("DEG") assert r == 315
def do_windalerts(obs): """Iterate through the obs and do wind alerts where appropriate""" for sid in obs: # Problem sites with lightning issues if sid in ['RBFI4', 'RTMI4', 'RWII4', 'RCAI4', 'RDYI4', 'RDNI4', 'RCDI4', 'RCII4', 'RCLI4']: continue ob = obs[sid] # screening if ob.get('gust', 0) < 40: continue if np.isnan(ob['gust']): continue smph = speed(ob['gust'], 'KT').value('MPH') if smph < 50: continue if smph > 100: print(('process_rwis did not relay gust %.1f MPH from %s' '') % (smph, sid)) continue # Use a hacky tmp file to denote a wind alert that was sent fn = "/tmp/iarwis.%s.%s" % (sid, ob['valid'].strftime("%Y%m%d%H%M")) if os.path.isfile(fn): continue o = open(fn, 'w') o.write(" ") o.close() lts = ob['valid'].astimezone(pytz.timezone("America/Chicago")) stname = NT.sts[sid]['name'] msg = ("At %s, a wind gust of %.1f mph (%.1f kts) was recorded " "at the %s (%s) Iowa RWIS station" "") % (lts.strftime("%I:%M %p %d %b %Y"), smph, ob['gust'], stname, sid) mt = MIMEText(msg) mt['From'] = '*****@*****.**' # mt['To'] = '*****@*****.**' mt['To'] = '*****@*****.**' mt['Subject'] = 'Iowa RWIS Wind Gust %.0f mph %s' % (smph, stname) s = smtplib.SMTP('mailhub.iastate.edu') s.sendmail(mt['From'], [mt['To']], mt.as_string()) s.quit()
def do(valid, frame): """ Generate plot for a given timestamp """ cursor.execute("""select turbineid, power, ST_x(geom), ST_y(geom), yaw, windspeed from sampled_data s JOIN turbines t on (t.id = s.turbineid) WHERE valid = %s and power is not null and yaw is not null and windspeed is not null""", (valid,)) lons = [] lats = [] vals = [] u = [] v = [] for row in cursor: lons.append(row[2]) lats.append(row[3]) vals.append(row[1]) a,b = uv(speed(row[5], 'MPS'), direction(row[4], 'deg')) u.append( a.value('MPS') ) v.append( b.value('MPS') ) vals = np.array(vals) avgv = np.average(vals) vals2 = vals - avgv print valid, min(vals2), max(vals2) (fig, ax) = plt.subplots(1,1) cmap = plt.cm.get_cmap('RdYlBu_r') cmap.set_under('white') cmap.set_over('black') clevs = np.arange(-300,301,50) norm = mpcolors.BoundaryNorm(clevs, cmap.N) ax.quiver(lons, lats, u, v, zorder=1) ax.scatter(lons, lats, c=vals2, vmin=-500, vmax=500, cmap=cmap, s=100, zorder=2) ax.set_title("Pomeroy Farm Turbine Power [kW] Diff from Farm Avg (1min sampled dataset)\nValid: %s" % ( valid.strftime("%d %b %Y %I:%M %p"))) make_colorbar(clevs, norm, cmap) fig.savefig('power_movie/frame%05i.png' % (frame,)) plt.close()
def test_vectorized(): """See that heatindex and windchill can do lists""" temp = datatypes.temperature([0, 10], 'F') sknt = datatypes.speed([30, 40], 'MPH') val = meteorology.windchill(temp, sknt).value('F') assert abs(val[0] - -24.50) < 0.01 t = datatypes.temperature([80.0, 90.0], 'F') td = datatypes.temperature([70.0, 60.0], 'F') hdx = meteorology.heatindex(t, td) assert abs(hdx.value("F")[0] - 83.93) < 0.01 tmpf = np.array([80., 90.]) * units('degF') dwpf = np.array([70., 60.]) * units('degF') smps = np.array([10., 20.]) * units('meter per second') feels = meteorology.mcalc_feelslike(tmpf, dwpf, smps) assert abs(feels.to(units("degF")).magnitude[0] - 83.15) < 0.01 tmpf = masked_array([80., np.nan], units('degF'), mask=[False, True]) feels = meteorology.mcalc_feelslike(tmpf, dwpf, smps) assert abs(feels.to(units("degF")).magnitude[0] - 83.15) < 0.01 assert feels.mask[1]
def sendWindAlert(txn, iemid, v, d, t, clean_metar): """ Send a wind alert please """ speed = datatypes.speed(v, 'KT') print "ALERTING for [%s]" % (iemid,) txn.execute("""SELECT wfo, state, name, ST_x(geom) as lon, ST_y(geom) as lat, network from stations WHERE id = '%s' """ % (iemid, )) if txn.rowcount == 0: print "I not find WFO for sid: %s " % (iemid,) return row = txn.fetchone() wfo = row['wfo'] if wfo is None or wfo == '': log.msg("Unknown WFO for id: %s, skipping WindAlert" % (iemid,)) return st = row['state'] nm = row['name'] extra = "" if clean_metar.find("$") > 0: extra = "(Caution: Maintenance Check Indicator)" jtxt = ("%s,%s (%s) ASOS %s reports gust of %.0f knots (%.1f mph) " "from %s @ %s\n%s" ) % (nm, st, iemid, extra, speed.value('KT'), speed.value('MPH'), drct2dirTxt(d), t.strftime("%H%MZ"), clean_metar) xtra = {'channels': wfo, 'lat': str(row['lat']), 'long': str(row['lon'])} xtra['twitter'] = ("%s,%s (%s) ASOS reports gust of %.1f knots " "(%.1f mph) from %s @ %s" ) % (nm, st, iemid, speed.value('KT'), speed.value('MPH'), drct2dirTxt(d), t.strftime("%H%MZ")) jabber.sendMessage(jtxt, "<p>%s</p>" % (jtxt,), xtra)
def process(sheets): resdf = pd.DataFrame({ 'precip': sheets['RainOut']['Rain_mm_Tot'], 'tmpf': sheets['TempRHVPOut']['AirT_C_Avg'], 'rh': sheets['TempRHVPOut']['RH'], 'drct': sheets['WindOut']['WindDir_D1_WVT'], 'sknt': sheets['WindOut']['WS_ms_S_WVT'], 'srad': sheets['SolarRad1Out']['Slr_kW_Avg']}) # Do unit conversion resdf['srad'] = resdf['srad'] * 1000. resdf['precip'] = distance(resdf['precip'], 'MM').value('IN') resdf['tmpf'] = temperature(resdf['tmpf'], 'C').value('F') resdf['dwpf'] = dewpoint(temperature(resdf['tmpf'], 'F'), humidity(resdf['rh'], '%')).value('F') resdf['sknt'] = speed(resdf['sknt'], 'MPS').value('KT') print(resdf.describe()) minval = resdf.index.min() maxval = resdf.index.max() cursor = pgconn.cursor() cursor.execute("""DELETE from weather_data_obs WHERE valid between '%s-06' and '%s-06' and station = 'HICKS.P' """ % (minval.strftime("%Y-%m-%d %H:%M"), maxval.strftime("%Y-%m-%d %H:%M"))) print("DELETED %s rows between %s and %s" % (cursor.rowcount, minval, maxval)) for valid, row in resdf.iterrows(): if pd.isnull(valid): continue cursor.execute("""INSERT into weather_data_obs (station, valid, tmpf, dwpf, drct, precip, srad, sknt) VALUES ('HICKS.P', %s, %s, %s, %s, %s, %s, %s) """, (valid.strftime("%Y-%m-%d %H:%M-06"), row['tmpf'], row['dwpf'], row['drct'], row['precip'], row['srad'], row['sknt'])) cursor.close() pgconn.commit()
def grid_wind(rs): """ Grid winds based on u and v components @param rs array of dicts @return uwnd, vwnd """ lats = [] lons = [] udata = [] vdata = [] for row in rs: if row["sknt"] is None or row["drct"] is None: continue # mps (u, v) = meteorology.uv(dt.speed(row["sknt"], "KT"), dt.direction(row["drct"], "DEG")) if v is not None: lats.append(nt.sts[row["station"]]["lat"]) lons.append(nt.sts[row["station"]]["lon"]) vdata.append(v.value("MPS")) udata.append(u.value("MPS")) if len(vdata) < 4: print "No wind data at all" return None xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS) nn = NearestNDInterpolator((lons, lats), np.array(udata)) ugrid = nn(xi, yi) nn = NearestNDInterpolator((lons, lats), np.array(vdata)) vgrid = nn(xi, yi) if ugrid is not None: ugt = ugrid vgt = vgrid return ugt, vgt else: return None, None
def wind_message(self): """Convert this into a Jabber style message""" drct = 0 sknt = 0 time = self.time.replace(tzinfo=pytz.UTC) if self.wind_gust: sknt = self.wind_gust.value("KT") if self.wind_dir: drct = self.wind_dir.value() if self.wind_speed_peak: v1 = self.wind_speed_peak.value("KT") d1 = self.wind_dir_peak.value() t1 = self.peak_wind_time.replace(tzinfo=pytz.UTC) if v1 > sknt: sknt = v1 drct = d1 time = t1 key = "%s;%s;%s" % (self.station_id, sknt, time) if key not in WIND_ALERTS: WIND_ALERTS[key] = 1 speed = datatypes.speed(sknt, 'KT') return ("gust of %.0f knots (%.1f mph) from %s @ %s" ) % (speed.value('KT'), speed.value('MPH'), drct2text(drct), time.strftime("%H%MZ"))
def do_daily(fn): df = pd.read_table(fn, sep=' ') df['sknt'] = speed(df['WINDSPEED'], 'MPS').value('KT') df['high'] = temperature(df['TMAX'], 'C').value('F') df['low'] = temperature(df['TMIN'], 'C').value('F') df['pday'] = distance(df['PRECIP'], 'MM').value('IN') df['date'] = df[['YEAR', 'MONTH', 'DAY']].apply(lambda x: datetime.date(x[0], x[1], x[2]), axis=1) print("fn: %s valid: %s - %s" % (fn, df['date'].min(), df['date'].max())) cursor = pgconn.cursor() cursor.execute("""DELETE from weather_data_daily where station = 'DPAC' and valid >= %s and valid <= %s""", (df['date'].min(), df['date'].max())) if cursor.rowcount > 0: print("Deleted %s rows" % (cursor.rowcount, )) for i, row in df.iterrows(): cursor.execute("""INSERT into weather_data_daily (station, valid, high, low, precip, sknt) VALUES ('DPAC', %s, %s, %s, %s, %s)""", (row['date'], row['high'], row['low'], row['pday'], row['sknt'])) print("Inserted %s rows..." % (i + 1, )) cursor.close() pgconn.commit()
def do_hourly(fn): df = pd.read_table(fn, sep=' ') df['sknt'] = speed(df['WINDSPEED'], 'MPS').value('KT') df['tmpf'] = temperature(df['TAIR'], 'C').value('F') df['precip'] = distance(df['PREC'], 'MM').value('IN') df['valid'] = df[['YEAR', 'MONTH', 'DAY', 'HOUR']].apply(lambda x: d(*x), axis=1) print("fn: %s valid: %s - %s" % (fn, df['valid'].min(), df['valid'].max())) cursor = pgconn.cursor() cursor.execute("""DELETE from weather_data_obs where station = 'DPAC' and valid >= %s and valid <= %s""", (df['valid'].min(), df['valid'].max())) if cursor.rowcount > 0: print("Deleted %s rows" % (cursor.rowcount, )) for i, row in df.iterrows(): cursor.execute("""INSERT into weather_data_obs (station, valid, tmpf, sknt, precip, srad) VALUES ('DPAC', %s, %s, %s, %s, %s)""", (row['valid'].strftime("%Y-%m-%d %H:%M-05"), row['tmpf'], row['sknt'], row['precip'], row['RADIATION'])) print("Inserted %s rows..." % (i + 1, )) cursor.close() pgconn.commit()
and c2.iemid = s.iemid and (s.network ~* 'ASOS' or s.network = 'AWOS') and s.country = 'US' ORDER by lon, lat """ % (now.year,) lats = [] lons = [] vals = [] valmask = [] icursor.execute(sql) for row in icursor: if row[4] == 0 or row[4] is None: continue lats.append(row[3]) lons.append(row[2]) vals.append(speed(row[4], 'KT').value('MPH')) valmask.append((row[1] in ['AWOS', 'IA_ASOS'])) if len(vals) < 5 or True not in valmask: sys.exit(0) clevs = numpy.arange(0, 40, 2) clevs = numpy.append(clevs, numpy.arange(40, 80, 5)) clevs = numpy.append(clevs, numpy.arange(80, 120, 10)) # Iowa pqstr = "plot ac %s summary/today_gust.png iowa_wind_gust.png png" % ( now.strftime("%Y%m%d%H%M"), ) m = MapPlot(title="Iowa ASOS/AWOS Peak Wind Speed Reports", subtitle="%s" % (now.strftime("%d %b %Y"), ), sector='iowa')
heat = "M" if tmpf != "M" and dwpf != "M": t = temperature(nc.variables['temperature'][idx], 'K') d = temperature(nc.variables['dewpoint'][idx], 'K') relh = meteorology.relh(t, d).value("%") heat = "%5.1f" % (meteorology.heatindex(t, d).value("F"),) drct = s2( nc.variables['windDir'][idx]) smps = s2( nc.variables['windSpeed'][idx]) sped = "M" if smps != "M": sped = "%5.1f" % (nc.variables['windSpeed'][idx] * 2.23694,) wcht = "M" if tmpf != "M" and sped != "M": t = temperature(nc.variables['temperature'][idx], 'K') sped = speed( nc.variables['windSpeed'][idx], 'MPS') wcht = "%5.1f" % (meteorology.windchill(t, sped).value("F"),) ts = indices[sid]['ts'] out.write("%5.5s %25.25s %8.4f %10.4f %02i %02i %5s %5s %5s %5s %5s %5s\n" % (sid, name, latitude, longitude, ts.hour, ts.minute, tmpf, dwpf, drct, sped, heat, wcht)) nc.close() out.close() pqstr = "data c 000000000000 wxc/wxc_%s.txt bogus txt" % (network.lower(),) subprocess.call("/home/ldm/bin/pqinsert -p '%s' %s" % ( pqstr, wxcfn), shell=True) os.remove(wxcfn)
""" http://www.isws.illinois.edu/warm/datatype.asp """ import pandas as pd import datetime from pyiem.datatypes import speed import psycopg2 pgconn = psycopg2.connect(database='sustainablecorn', host='iemdb') station = 'SEPAC' fn = 'sepac.xlsx' df = pd.read_excel(fn) print df.columns df['sknt'] = speed(pd.to_numeric(df['Wind Speed(mph)'], errors='coerse'), 'MPH').value('KT') df['high'] = pd.to_numeric(df['Maximum Air Temperature(degF)'], errors='coerse') df['low'] = pd.to_numeric(df['Minimum Air Temperature(degF)'], errors='coerse') df['pday'] = df['Precipitation(inch)'] df['srad'] = df['Solar Radiation(MJsqm)'] print("fn: %s valid: %s - %s" % (fn, df['date'].min(), df['date'].max())) cursor = pgconn.cursor() cursor.execute("""DELETE from weather_data_daily where station = %s and valid >= %s and valid <= %s """, (station, df['date'].min(), df['date'].max())) if cursor.rowcount > 0: print("Deleted %s rows" % (cursor.rowcount, )) for i, row in df.iterrows(): cursor.execute("""INSERT into weather_data_daily
# air temperature, RH, Radiation, WS, and precipitation df2 = read_sql("""SELECT extract(month from valid) as month, extract(hour from valid + '10 minutes'::interval) as hour, extract(day from valid + '10 minutes'::interval) as day, c800 from hourly WHERE station = 'A130209' and extract(month from valid) in (10, 11) """, pgconn, index_col=None) gdf2 = df2.groupby(by=['month', 'day', 'hour']).mean() print(("MONTH,DAY,HOUR,AIRTEMP[C]," "RELHUMID[%],RADIATION[kC/m2],WINDSPEED[MPS],PRECIP[MM]")) for month in (10, 11): for day in range(1, 32): if day == 31 and month == 11: continue for hour in range(24): print(("%s,%s,%s,%.2f,%.2f,%.2f,%.2f,%.2f" ) % (month, day, hour, temperature(gdf.at[(month, day, hour), 'tmpf'], 'F').value('C'), gdf.at[(month, day, hour), 'relh'], gdf2.at[(month, day, hour), 'c800'], speed(gdf.at[(month, day, hour), 'sknt'], 'KT').value('MPS'), distance(gdf.at[(month, day, hour), 'p01i'], 'IN').value('MM') ))