def test_press_conv(): """ Pressure convert from MB to IN to HPA""" hpa = datatypes.pressure(850.0, "HPA") assert 850.0 == hpa.value("MB") assert abs(25.10 - hpa.value("in")) < 0.01 hpa = datatypes.pressure(85000.0, "PA") assert abs(25.10 - hpa.value("IN")) < 0.01
def test_press_conv(): """ Pressure convert from MB to IN to HPA""" hpa = datatypes.pressure(850.0, 'HPA') assert 850.0 == hpa.value('MB') assert abs(25.10 - hpa.value('in')) < 0.01 hpa = datatypes.pressure(85000.0, 'PA') assert abs(25.10 - hpa.value('IN')) < 0.01
def test_press_conv(self): """ Pressure convert from MB to IN to HPA""" hpa = datatypes.pressure(850.0, 'HPA') self.assertEquals(850.0, hpa.value('MB')) self.assertAlmostEquals(25.10, hpa.value('in'), 2) hpa = datatypes.pressure(85000.0, 'PA') self.assertAlmostEquals(25.10, hpa.value('IN'), 2)
def test_press_conv(self): """ Pressure convert from MB to IN to HPA""" hpa = datatypes.pressure(850.0, 'HPA') self.assertEquals(850.0, hpa.value('MB')) self.assertAlmostEquals(25.10, hpa.value('IN'), 2) hpa = datatypes.pressure(85000.0, 'PA') self.assertAlmostEquals(25.10, hpa.value('IN'), 2)
def main(): """Go Main""" pgconn = get_dbconn('asos') df = read_sql(""" SELECT valid - '1 hour'::interval as valid, drct, sknt, gust_sknt, pres1, tmpf, dwpf from t2018_1minute where station = %s and valid >= '2018-06-14 08:30' and valid <= '2018-06-14 10:15' ORDER by valid ASC """, pgconn, params=('PHP', ), index_col='valid') xticks = [] xticklabels = [] for valid in df.index.values: if pd.to_datetime(valid).minute % 15 == 0: xticks.append(valid) ts = pd.to_datetime(valid) - datetime.timedelta(hours=5) xticklabels.append(ts.strftime("%-H:%M\n%p")) fig = plt.figure(figsize=(8, 9)) ax = fig.add_axes([0.1, 0.55, 0.75, 0.35]) ax.plot(df.index.values, df['tmpf'], label='Air Temp') ax.plot(df.index.values, df['dwpf'], label='Dew Point') ax.legend() ax.grid(True) ax.set_ylabel("Temperature $^\circ$F") ax.set_xticks(xticks) ax.set_xticklabels(xticklabels) ax.set_title(("Philip, SD (KPHP) ASOS 1 Minute Interval Data for 14 Jun 2018\n" "Heat Burst Event, data missing in NCEI files 8:02 to 8:10 AM")) ax = fig.add_axes([0.1, 0.08, 0.75, 0.35]) ax.bar(df.index.values, speed(df['gust_sknt'], 'KT').value('MPH'), width=1/1440., color='red') ax.bar(df.index.values, speed(df['sknt'], 'KT').value('MPH'), width=1/1440., color='tan') ax.set_ylabel("Wind Speed (tan) & Gust (red) [mph]") ax.grid(True, zorder=5) ax.set_ylim(0, 60) ax2 = ax.twinx() ax2.plot(df.index.values, pressure(df['pres1'], 'IN').value('MB'), color='g', lw=2) ax2.set_ylabel("Air Pressure [hPa]", color='green') ax2.set_xticks(xticks) ax2.set_xticklabels(xticklabels) ax.set_xlabel("14 June 2018 MDT") ax2.set_ylim(923, 926) ax2.set_yticks(np.arange(923, 926.1, 0.5)) # ax2.set_zorder(ax.get_zorder()-1) # ax2.set_ylim(0, 360) # ax2.set_yticks(range(0, 361, 45)) # ax2.set_yticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) fig.savefig('test.png')
def run_calcs(df): """Do our maths""" df['mixingratio'] = meteorology.mixing_ratio( temperature(df['dwpf'].values, 'F')).value('KG/KG') df['vapor_pressure'] = mcalc.vapor_pressure( 1000. * units.mbar, df['mixingratio'].values * units('kg/kg')).to(units('kPa')) df['saturation_mixingratio'] = (meteorology.mixing_ratio( temperature(df['tmpf'].values, 'F')).value('KG/KG')) df['saturation_vapor_pressure'] = mcalc.vapor_pressure( 1000. * units.mbar, df['saturation_mixingratio'].values * units('kg/kg')).to(units('kPa')) df['vpd'] = df['saturation_vapor_pressure'] - df['vapor_pressure'] group = df.groupby('year') df = group.aggregate(np.average) df['dwpf'] = meteorology.dewpoint_from_pq( pressure(1000, 'MB'), mixingratio(df['mixingratio'].values, 'KG/KG')).value('F') return df
def gen_metar(data): """Convert our parsed dictionary into a METAR""" mtr = "%s %sZ AUTO " % (data["call_id"], data["valid"].strftime("%d%H%M")) # wind direction if data.get("wind_code") == "C": mtr += "00000KT " elif (data.get("drct_qc") in ["1", "5"] and data["wind_speed_mps"] is not None): if data["drct"] is None: mtr += "////" else: mtr += "%03.0f" % (data["drct"], ) kts = speed(data["wind_speed_mps"], "MPS").value("KT") mtr += "%02.0f" % (kts, ) if "OC1" in data["extra"]: val = data["extra"]["OC1"].get("speed", 0) if val is not None and val > 0: mtr += "G%02.0f" % (speed(val, "MPS").value("KT"), ) mtr += "KT " # vis if data["vsby_m"] is not None: val = (units("meter") * data["vsby_m"]).to(units("mile")).m mtr += "%sSM " % (vsbyfmt(val), ) # Present Weather Time combocode = "" for code in [ "AU1", "AU2", "AU3", "AU4", "AU5", "AU6", "AU7", "AU8", "AU9", ]: if code not in data["extra"]: continue val = data["extra"][code] if val["combo"] == "1": # lone if val["obscure"] == "1": mtr += "BR " elif val["combo"] == "2": # start of dual code if val["descriptor"] == "7": combocode = "TS" elif val["combo"] == "3": # end of dual code if val["proximity"] == "3" and val["precip"] == "02": mtr += "+%sRA " % (combocode, ) combocode = "" # Clouds for code in ["GD1", "GD2", "GD3", "GD4", "GD5", "GD6"]: if code not in data["extra"]: continue val = data["extra"][code] skycode = SKY_STATE_CODES[val["state_code"]] height = val["height"] if skycode == "CLR": mtr += "CLR " elif height is None: continue else: hft = (units("meter") * height).to(units("feet")).m / 100.0 mtr += "%s%03.0f " % (skycode, hft) # temperature tgroup = None if (data.get("airtemp_c_qc") not in ["2", "3"] and data["airtemp_c"] is not None): tmpc = data["airtemp_c"] dwpc = data["dewpointtemp_c"] mtr += "%s%02.0f/" % ("M" if tmpc < 0 else "", abs(tmpc)) if dwpc is not None: mtr += "%s%02.0f" % ("M" if dwpc < 0 else "", abs(dwpc)) tgroup = "T%s%03i%s%03i" % ( "1" if tmpc < 0 else "0", abs(tmpc) * 10.0, "1" if dwpc < 0 else "0", abs(dwpc) * 10.0, ) mtr += " " # altimeter if ("MA1" in data["extra"] and data["extra"]["MA1"].get("altimeter") is not None): altimeter = pressure(data["extra"]["MA1"]["altimeter"], "HPA").value("IN") mtr += "A%4.0f " % (altimeter * 100, ) rmk = [] for code in ["AA1", "AA2", "AA3", "AA4"]: if code not in data["extra"]: continue hours = data["extra"][code].get("hours") depth = data["extra"][code].get("depth") if hours is None or depth is None or hours == 12: continue if depth == 0 and data["extra"][code]["cond_code"] != "2": continue if hours in [3, 6]: prefix = "6" elif hours == 24: prefix = "7" elif hours == 1: prefix = "P" else: warnings.warn(f"Unknown precip hours {hours}") continue amount = (units("mm") * depth).to(units("inch")).m rmk.append("%s%04.0f" % (prefix, amount * 100)) if data["mslp_hpa"] is not None: rmk.append("SLP%03.0f" % (data["mslp_hpa"] * 10 % 1000, )) if tgroup is not None: rmk.append(tgroup) # temperature groups group4 = {"M": "////", "N": "////"} for code in ["KA1", "KA2", "KA3", "KA4"]: if code not in data["extra"]: continue val = data["extra"][code] hours = val.get("hours") if hours is None: continue typ = val["code"] tmpc = val["tmpc"] if tmpc is None: continue if hours is None or hours == 12: continue elif hours == 6 and typ == "M": prefix = "1" elif hours == 6 and typ == "N": prefix = "2" elif hours == 24: group4[typ] = "%s%03i" % ("1" if tmpc < 0 else "0", abs(tmpc) * 10) continue else: warnings.warn("Unknown temperature hours %s typ: %s" % (hours, typ)) continue rmk.append("%s%s%03i" % (prefix, "1" if tmpc < 0 else "0", abs(tmpc) * 10)) if group4["M"] != "////" or group4["N"] != "////": rmk.append("4%(M)s%(N)s" % group4) # 3-hour pressure tendency if ("MD1" in data["extra"] and data["extra"]["MD1"]["threehour"] is not None): rmk.append("5%s%03i" % ( data["extra"]["MD1"]["code"], data["extra"]["MD1"]["threehour"] * 10, )) rmk.append("IEM_DS3505") mtr += "RMK %s " % (" ".join(rmk), ) data["metar"] = mtr.strip()
def process(ncfn): """Process this file """ pgconn = get_dbconn("iem") icursor = pgconn.cursor() xref = {} icursor.execute("SELECT id, network from stations where " "network ~* 'ASOS' or network = 'AWOS' and country = 'US'") for row in icursor: xref[row[0]] = row[1] icursor.close() nc = ncopen(ncfn) data = {} for vname in [ "stationId", "observationTime", "temperature", "dewpoint", "altimeter", # Pa "windDir", "windSpeed", # mps "windGust", # mps "visibility", # m "precipAccum", "presWx", "skyCvr", "skyCovLayerBase", "autoRemark", "operatorRemark", ]: data[vname] = nc.variables[vname][:] for qc in ["QCR", "QCD"]: vname2 = vname + qc if vname2 in nc.variables: data[vname2] = nc.variables[vname2][:] for vname in ["temperature", "dewpoint"]: data[vname + "C"] = temperature(data[vname], "K").value("C") data[vname] = temperature(data[vname], "K").value("F") for vname in ["windSpeed", "windGust"]: data[vname] = (masked_array(data[vname], units("meter / second")).to( units("knots")).magnitude) data["altimeter"] = pressure(data["altimeter"], "PA").value("IN") data["skyCovLayerBase"] = distance(data["skyCovLayerBase"], "M").value("FT") data["visibility"] = distance(data["visibility"], "M").value("MI") data["precipAccum"] = distance(data["precipAccum"], "MM").value("IN") stations = chartostring(data["stationId"][:]) presentwxs = chartostring(data["presWx"][:]) skycs = chartostring(data["skyCvr"][:]) autoremarks = chartostring(data["autoRemark"][:]) opremarks = chartostring(data["operatorRemark"][:]) def decision(i, fieldname, tolerance): """Our decision if we are going to take a HFMETAR value or not""" if data[fieldname][i] is np.ma.masked: return None if data["%sQCR" % (fieldname, )][i] == 0: return data[fieldname][i] # Now we have work to do departure = np.ma.max(np.ma.abs(data["%sQCD" % (fieldname, )][i, :])) # print("departure: %s tolerance: %s" % (departure, tolerance)) if departure <= tolerance: return data[fieldname][i] return None for i, sid in tqdm( enumerate(stations), total=len(stations), disable=(not sys.stdout.isatty()), ): if len(sid) < 3: continue sid3 = sid[1:] if sid.startswith("K") else sid ts = datetime.datetime(1970, 1, 1) + datetime.timedelta( seconds=data["observationTime"][i]) ts = ts.replace(tzinfo=pytz.UTC) mtr = "%s %sZ AUTO " % (sid, ts.strftime("%d%H%M")) network = xref.get(sid3, "ASOS") iem = Observation(sid3, network, ts) # 06019G23KT val = decision(i, "windDir", 15) if val is not None: iem.data["drct"] = int(val) mtr += "%03i" % (iem.data["drct"], ) else: mtr += "///" val = decision(i, "windSpeed", 10) if val is not None: iem.data["sknt"] = int(val) mtr += "%02i" % (iem.data["sknt"], ) else: mtr += "//" val = decision(i, "windGust", 10) if val is not None and val > 0: iem.data["gust"] = int(val) mtr += "G%02i" % (iem.data["gust"], ) mtr += "KT " val = decision(i, "visibility", 4) if val is not None: iem.data["vsby"] = float(val) mtr += "%sSM " % (vsbyfmt(iem.data["vsby"]), ) presentwx = presentwxs[i] if presentwx != "": # database storage is comma delimited iem.data["wxcodes"] = presentwx.split(" ") mtr += "%s " % (presentwx, ) for _i, (skyc, _l) in enumerate(zip(skycs[i], data["skyCovLayerBase"][i])): if skyc != "": iem.data["skyc%s" % (_i + 1, )] = skyc if skyc != "CLR": iem.data["skyl%s" % (_i + 1, )] = int(_l) mtr += "%s%03i " % (skyc, int(_l) / 100) else: mtr += "CLR " t = "" tgroup = "T" val = decision(i, "temperature", 10) if val is not None: # Recall the pain enabling this # iem.data['tmpf'] = float(data['temperature'][i]) tmpc = float(data["temperatureC"][i]) t = "%s%02i/" % ( "M" if tmpc < 0 else "", tmpc if tmpc > 0 else (0 - tmpc), ) tgroup += "%s%03i" % ( "1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0, ) val = decision(i, "dewpoint", 10) if val is not None: # iem.data['dwpf'] = float(data['dewpoint'][i]) tmpc = float(data["dewpointC"][i]) if t != "": t = "%s%s%02i " % ( t, "M" if tmpc < 0 else "", tmpc if tmpc > 0 else 0 - tmpc, ) tgroup += "%s%03i" % ( "1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0, ) if len(t) > 4: mtr += t val = decision(i, "altimeter", 20) if val is not None: iem.data["alti"] = float(round(val, 2)) mtr += "A%4i " % (iem.data["alti"] * 100.0, ) mtr += "RMK " val = decision(i, "precipAccum", 25) if val is not None: if val > 0.009: iem.data["phour"] = float(round(val, 2)) mtr += "P%04i " % (iem.data["phour"] * 100.0, ) elif val > 0: # Trace mtr += "P0000 " iem.data["phour"] = TRACE_VALUE if tgroup != "T": mtr += "%s " % (tgroup, ) if autoremarks[i] != "" or opremarks[i] != "": mtr += "%s %s " % (autoremarks[i], opremarks[i]) mtr += "MADISHF" # Eat our own dogfood try: Metar.Metar(mtr) iem.data["raw"] = mtr except Exception as exp: print("dogfooding extract_hfmetar %s resulted in %s" % (mtr, exp)) continue for key in iem.data: if isinstance(iem.data[key], np.float32): print("key: %s type: %s" % (key, type(iem.data[key]))) icursor = pgconn.cursor() if not iem.save(icursor, force_current_log=True, skip_current=True): print(("extract_hfmetar: unknown station? %s %s %s\n%s") % (sid3, network, ts, mtr)) icursor.close() pgconn.commit()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] cursor.execute(""" SELECT tmpf::int as t, dwpf from alldata where station = %s and tmpf is not null and dwpf is not null and dwpf <= tmpf and tmpf >= 0 and tmpf <= 140 and extract(month from valid) in %s """, (station, tuple(months))) sums = np.zeros((140,), 'f') counts = np.zeros((140,), 'f') for row in cursor: r = mixing_ratio(temperature(row[1], 'F')).value('KG/KG') sums[row[0]] += r counts[row[0]] += 1 rows = [] for i in range(140): if counts[i] < 3: continue r = sums[i] / float(counts[i]) d = dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(r, 'KG/KG') ).value('F') rh = relh(temperature(i, 'F'), temperature(d, 'F')).value('%') rows.append(dict(tmpf=i, dwpf=d, rh=rh)) df = pd.DataFrame(rows) tmpf = df['tmpf'] dwpf = df['dwpf'] rh = df['rh'] (fig, ax) = plt.subplots(1, 1, figsize=(8, 6)) ax.bar(tmpf-0.5, dwpf, ec='green', fc='green', width=1) ax.grid(True, zorder=11) ax.set_title(("%s [%s]\nAverage Dew Point by Air Temperature (month=%s) " "(%s-%s)\n" "(must have 3+ hourly observations at the given temperature)" ) % (nt.sts[station]['name'], station, month.upper(), nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=10) ax.plot([0, 140], [0, 140], color='b') ax.set_ylabel("Dew Point [F]") y2 = ax.twinx() y2.plot(tmpf, rh, color='k') y2.set_ylabel("Relative Humidity [%] (black line)") y2.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) y2.set_ylim(0, 100) ax.set_ylim(0, max(tmpf)+2) ax.set_xlim(0, max(tmpf)+2) ax.set_xlabel("Air Temperature $^\circ$F") return fig, df
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) station = ctx['station'] network = ctx['network'] sdate = ctx.get('sdate') plot_type = ctx['p'] nt = NetworkTable(network) if not nt.sts: raise ValueError( ("Network Identifier %s is unknown to IEM") % (network, )) if station not in nt.sts: raise ValueError( ("Station %s does not exist in network %s") % (station, network)) tzname = nt.sts[station]['tzname'] df = get_data(network, station, tzname, sdate) if df.empty: raise ValueError("No data was found!") # if d1 is not None and d1 >= 0 and d1 <= 360: # if s is not None and s >= 0 and s < 200: # if t is not None and t >= -90 and t < 190: # if d is not None and d >= -90 and d < 190: # if v1 is not None and v1 >= 0 and v1 < 30: def ceilingfunc(row): """Our logic to compute a ceiling""" c = [row['skyc1'], row['skyc2'], row['skyc3'], row['skyc4']] if 'OVC' in c: pos = c.index('OVC') larr = [row['skyl1'], row['skyl2'], row['skyl3'], row['skyl4']] return larr[pos] / 1000. df['ceiling'] = df.apply(ceilingfunc, axis=1) fig = plt.figure(figsize=(9, 9)) xalign = 0.1 xwidth = 0.8 ax = fig.add_axes([xalign, 0.7, xwidth, 0.25]) xmin = df.index.min() xmax = df.index.max() # ____________PLOT 1___________________________ df2 = df[df['tmpf'].notnull()] ax.plot(df2.index.values, df2['tmpf'], lw=2, label='Air Temp', color='#db6065', zorder=2) df2 = df[df['dwpf'].notnull()] ax.plot(df2.index.values, df2['dwpf'], lw=2, label='Dew Point', color='#346633', zorder=3) ax.set_title("[%s] %s\nRecent Time Series" % (station, nt.sts[station]['name'])) ax.grid(True) ax.text(-0.1, 0, "Temperature [F]", rotation=90, transform=ax.transAxes, verticalalignment='bottom') ax.set_ylim(bottom=(df['dwpf'].min() - 3)) plt.setp(ax.get_xticklabels(), visible=True) date_ticker(ax, pytz.timezone(tzname)) ax.set_xlim(xmin, xmax) ax.legend(loc='best', ncol=2) # _____________PLOT 2____________________________ ax = fig.add_axes([xalign, 0.4, xwidth, 0.25]) df2 = df[df['drct'].notnull()] ax2 = ax.twinx() df2 = df[df['gust'].notnull()] if not df2.empty: ax2.fill_between(df2.index.values, 0, dt.speed(df2['gust'], 'KT').value('MPH'), color='#9898ff', zorder=2) df2 = df[df['sknt'].notnull()] if not df2.empty: ax2.fill_between(df2.index.values, 0, dt.speed(df2['sknt'], 'KT').value('MPH'), color='#373698', zorder=3) ax2.set_ylim(bottom=0) ax.set_yticks(range(0, 361, 45)) ax.set_yticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', "N"]) ax.set_ylabel("Wind Direction") ax2.set_ylabel("Wind Speed [mph]") ax.set_ylim(0, 360.1) date_ticker(ax, pytz.timezone(tzname)) ax.scatter(df2.index.values, df2['drct'], facecolor='None', edgecolor='#b8bc74', zorder=4) ax.set_zorder(ax2.get_zorder() + 1) ax.patch.set_visible(False) ax.set_xlim(xmin, xmax) # _________ PLOT 3 ____ ax = fig.add_axes([xalign, 0.1, xwidth, 0.25]) if plot_type == 'default': ax2 = ax.twinx() ax2.scatter(df.index.values, df['ceiling'], label='Visibility', marker='o', s=40, color='g') ax2.set_ylabel("Overcast Ceiling [k ft]", color='g') ax2.set_ylim(bottom=0) ax.scatter(df.index.values, df['vsby'], label='Visibility', marker='*', s=40, color='b') ax.set_ylabel("Visibility [miles]") ax.set_ylim(0, 14) elif plot_type == 'two': df2 = df[(df['alti'] > 20.) & (df['alti'] < 40.)] ax.grid(True) vals = dt.pressure(df2['alti'], 'IN').value('MB') ax.fill_between(df2.index.values, 0, vals, color='#a16334') ax.set_ylim(bottom=(vals.min() - 1), top=(vals.max() + 1)) ax.set_ylabel("Pressure [mb]") ax.set_xlim(xmin, xmax) date_ticker(ax, pytz.timezone(tzname)) ax.set_xlabel("Plot Time Zone: %s" % (tzname, )) return fig, df
def test_dewpoint_from_pq(): """ See if we can produce dew point from pressure and mixing ratio """ p = datatypes.pressure(1013.25, "MB") mr = datatypes.mixingratio(0.012, "kg/kg") dwpk = meteorology.dewpoint_from_pq(p, mr) assert abs(dwpk.value("C") - 16.84) < 0.01
def doit(opener, station, now): """ Fetch! """ usedcache = False processed = 0 if len(station) == 3: faa = "K%s" % (station,) else: faa = station mydir = "/mesonet/ARCHIVE/wunder/cache/%s/%s/" % (station, now.year) if not os.path.isdir(mydir): os.makedirs(mydir) fn = "%s%s.txt" % (mydir, now.strftime("%Y%m%d"), ) if os.path.isfile(fn): usedcache = True data = open(fn).read() if len(data) < 140: usedcache = False if not usedcache: url = ("http://www.wunderground.com/history/airport/%s/%s/%-i/%-i/" "DailyHistory.html?req_city=NA&req_state=NA&" "req_statename=NA&format=1") % (faa, now.year, now.month, now.day) try: data = opener.open(url, timeout=30).read() except KeyboardInterrupt: sys.exit() except: print "Download Fail STID: %s NOW: %s" % (station, now) return 0, False # Save raw data, since I am an idiot have of the time o = open(fn, 'w') o.write(data) o.close() lines = data.split("\n") headers = None for line in lines: line = line.replace("<br />", "").replace("\xff", "") if line.strip() == "": continue tokens = line.split(",") if headers is None: headers = {} for i in range(len(tokens)): headers[tokens[i]] = i continue if "FullMetar" in headers and len(tokens) >= headers["FullMetar"]: mstr = (tokens[headers["FullMetar"]] ).strip().replace("'", "").replace("SPECI ", "").replace("METAR ", "") ob = process_metar(mstr, now) if ob is None: continue # Account for SLP505 actually being 1050.5 and not 950.5 :( if SLP in headers: try: pres = pressure( float(tokens[headers[SLP]]), "IN") diff = pres.value("MB") - ob.mslp if abs(diff) > 25: oldval = ob.mslp ob.mslp = "%.1f" % (pres.value("MB"),) ob.alti = float(tokens[headers[SLP]]) print 'SETTING PRESSURE %s old: %s new: %s' % ( ob.valid.strftime("%Y/%m/%d %H%M"), oldval, ob.mslp) except: pass sql = """ INSERT into t""" + str(ob.valid.year) + """ (station, valid, tmpf, dwpf, vsby, drct, sknt, gust, p01i, alti, skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, skyl4, metar, mslp, presentwx, p03i, p06i, p24i, max_tmpf_6hr, max_tmpf_24hr, min_tmpf_6hr, min_tmpf_24hr, report_type) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s, %s, %s, %s, %s, %s, %s, %s, %s, 2) """ args = (station, ob.valid, ob.tmpf, ob.dwpf, ob.vsby, ob.drct, ob.sknt, ob.gust, ob.p01i, ob.alti, ob.skyc1, ob.skyc2, ob.skyc3, ob.skyc4, ob.skyl1, ob.skyl2, ob.skyl3, ob.skyl4, ob.metar.decode('utf-8', 'replace').encode('ascii', 'replace'), ob.mslp, ob.presentwx, ob.p03i, ob.p06i, ob.p24i, ob.max_tmpf_6hr, ob.max_tmpf_24hr, ob.min_tmpf_6hr, ob.min_tmpf_24hr) acursor.execute(sql, args) processed += 1 return processed, usedcache
def to_dwpf(val): return meteorology.dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(val, 'KG/KG')).value('F')
def gen_metar(data): """Convert our parsed dictionary into a METAR""" mtr = "%s %sZ AUTO " % (data['call_id'], data['valid'].strftime("%d%H%M")) # wind direction if data.get('wind_code') == 'C': mtr += "00000KT " elif (data.get('drct_qc') in ["1", "5"] and data['wind_speed_mps'] is not None): if data['drct'] is None: mtr += "////" else: mtr += "%03.0f" % (data['drct'], ) kts = speed(data['wind_speed_mps'], 'MPS').value('KT') mtr += "%02.0f" % (kts, ) if 'OC1' in data['extra']: val = data['extra']['OC1'].get('speed', 0) if val is not None and val > 0: mtr += "G%02.0f" % (speed(val, 'MPS').value('KT'), ) mtr += 'KT ' # vis if data['vsby_m'] is not None: val = distance(data['vsby_m'], 'M').value('MI') mtr += "%sSM " % (vsbyfmt(val), ) # Present Weather Time combocode = "" for code in ['AU1', 'AU2', 'AU3', 'AU4', 'AU5', 'AU6', 'AU7', 'AU8', 'AU9']: if code not in data['extra']: continue val = data['extra'][code] if val['combo'] == "1": # lone if val['obscure'] == "1": mtr += "BR " elif val['combo'] == '2': # start of dual code if val['descriptor'] == '7': combocode = "TS" elif val['combo'] == '3': # end of dual code if val['proximity'] == '3' and val['precip'] == '02': mtr += "+%sRA " % (combocode, ) combocode = "" # Clouds for code in ['GD1', 'GD2', 'GD3', 'GD4', 'GD5', 'GD6']: if code not in data['extra']: continue val = data['extra'][code] skycode = SKY_STATE_CODES[val['state_code']] height = val['height'] if skycode == 'CLR': mtr += "CLR " elif height is None: continue else: hft = distance(height, 'M').value('FT') / 100. mtr += "%s%03.0f " % (skycode, hft) # temperature tgroup = None if (data.get('airtemp_c_qc') not in ["2", "3"] and data['airtemp_c'] is not None): tmpc = data['airtemp_c'] dwpc = data['dewpointtemp_c'] mtr += "%s%02.0f/" % ("M" if tmpc < 0 else "", abs(tmpc)) if dwpc is not None: mtr += "%s%02.0f" % ("M" if dwpc < 0 else "", abs(dwpc)) tgroup = "T%s%03i%s%03i" % ("1" if tmpc < 0 else "0", abs(tmpc) * 10., "1" if dwpc < 0 else "0", abs(dwpc) * 10.) mtr += " " # altimeter if ('MA1' in data['extra'] and data['extra']['MA1']['altimeter'] is not None): altimeter = pressure(data['extra']['MA1']['altimeter'], 'HPA').value( "IN") mtr += "A%4.0f " % (altimeter * 100, ) rmk = [] for code in ['AA1', 'AA2', 'AA3', 'AA4']: if code not in data['extra']: continue hours = data['extra'][code].get('hours') depth = data['extra'][code].get('depth') if hours is None or depth is None or hours == 12: continue elif depth == 0 and data['extra'][code]['cond_code'] != '2': continue elif hours in [3, 6]: prefix = "6" elif hours == 24: prefix = "7" elif hours == 1: prefix = "P" else: warnings.warn("Unknown precip hours %s" % (hours, )) continue amount = distance(depth, 'MM').value('IN') rmk.append("%s%04.0f" % (prefix, amount * 100)) if data['mslp_hpa'] is not None: rmk.append("SLP%03.0f" % (data['mslp_hpa'] * 10 % 1000, )) if tgroup is not None: rmk.append(tgroup) # temperature groups group4 = {'M': '////', 'N': '////'} for code in ['KA1', 'KA2', 'KA3', 'KA4']: if code not in data['extra']: continue val = data['extra'][code] hours = val.get('hours') if hours is None: continue typ = val['code'] tmpc = val['tmpc'] if tmpc is None: continue if hours is None or hours == 12: continue elif hours == 6 and typ == 'M': prefix = "1" elif hours == 6 and typ == 'N': prefix = "2" elif hours == 24: group4[typ] = "%s%03i" % ("1" if tmpc < 0 else "0", abs(tmpc) * 10) continue else: warnings.warn("Unknown temperature hours %s typ: %s" % (hours, typ)) continue rmk.append("%s%s%03i" % (prefix, "1" if tmpc < 0 else "0", abs(tmpc) * 10)) if group4['M'] != '////' or group4['N'] != '////': rmk.append("4%(M)s%(N)s" % group4) # 3-hour pressure tendency if ('MD1' in data['extra'] and data['extra']['MD1']['threehour'] is not None): rmk.append("5%s%03i" % (data['extra']['MD1']['code'], data['extra']['MD1']['threehour'] * 10)) rmk.append("IEM_DS3505") mtr += "RMK %s " % (" ".join(rmk), ) data['metar'] = mtr.strip()
def to_dwpf(val): """Unsure why I am doing this, like this""" return meteorology.dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(val, 'KG/KG')).value('F')
def test_dewpoint_from_pq(self): """ See if we can produce dew point from pressure and mixing ratio """ p = datatypes.pressure(1013.25, "MB") mr = datatypes.mixingratio(0.012, "kg/kg") dwpk = meteorology.dewpoint_from_pq(p, mr) self.assertAlmostEqual(dwpk.value("C"), 16.84, 2)
def process(ncfn): """Process this file """ IEM = psycopg2.connect(database="iem", host="iemdb") icursor = IEM.cursor() xref = {} icursor.execute( """SELECT id, network from stations where network ~* 'ASOS' or network = 'AWOS' and country = 'US'""" ) for row in icursor: xref[row[0]] = row[1] icursor.close() nc = netCDF4.Dataset(ncfn) data = {} for vname in [ "stationId", "observationTime", "temperature", "dewpoint", "altimeter", # Pa "windDir", "windSpeed", # mps "windGust", "visibility", # m "precipAccum", "presWx", "skyCvr", "skyCovLayerBase", "autoRemark", "operatorRemark", ]: data[vname] = nc.variables[vname][:] vname += "QCR" if vname in nc.variables: data[vname] = nc.variables[vname][:] for vname in ["temperature", "dewpoint"]: data[vname + "C"] = temperature(data[vname], "K").value("C") data[vname] = temperature(data[vname], "K").value("F") for vname in ["windSpeed", "windGust"]: data[vname] = speed(data[vname], "MPS").value("KT") data["altimeter"] = pressure(data["altimeter"], "PA").value("IN") data["skyCovLayerBase"] = distance(data["skyCovLayerBase"], "M").value("FT") data["visibility"] = distance(data["visibility"], "M").value("MI") data["precipAccum"] = distance(data["precipAccum"], "MM").value("IN") for i in range(len(data["stationId"])): sid = tostring(data["stationId"][i]) sid3 = sid[1:] if sid[0] == "K" else sid ts = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=data["observationTime"][i]) ts = ts.replace(tzinfo=pytz.timezone("UTC")) mtr = "%s %sZ AUTO " % (sid, ts.strftime("%d%H%M")) network = xref.get(sid3, "ASOS") iem = Observation(sid3, network, ts.astimezone(TIMEZONES[LOC2TZ.get(sid3, None)])) # 06019G23KT if data["windDirQCR"][i] == 0 and data["windDir"][i] is not np.ma.masked: iem.data["drct"] = int(data["windDir"][i]) mtr += "%03i" % (iem.data["drct"],) else: mtr += "///" if data["windSpeedQCR"][i] == 0 and data["windSpeed"][i] is not np.ma.masked: iem.data["sknt"] = int(data["windSpeed"][i]) mtr += "%02i" % (iem.data["sknt"],) else: mtr += "//" if data["windGustQCR"][i] == 0 and data["windGust"][i] is not np.ma.masked and data["windGust"][i] > 0: iem.data["gust"] = int(data["windGust"][i]) mtr += "G%02i" % (iem.data["gust"],) mtr += "KT " if data["visibilityQCR"][i] == 0 and data["visibility"][i] is not np.ma.masked: iem.data["vsby"] = float(data["visibility"][i]) mtr += "%sSM " % (vsbyfmt(iem.data["vsby"]),) presentwx = tostring(data["presWx"][i]) if presentwx != "": iem.data["presentwx"] = presentwx mtr += "%s " % (presentwx,) for _i, (_c, _l) in enumerate(zip(data["skyCvr"][i], data["skyCovLayerBase"][i])): if tostring(_c) != "": skyc = tostring(_c) iem.data["skyc%s" % (_i + 1,)] = skyc if skyc != "CLR": iem.data["skyl%s" % (_i + 1,)] = int(_l) mtr += "%s%03i " % (tostring(_c), int(_l) / 100) else: mtr += "CLR " t = "" tgroup = "T" if data["temperatureQCR"][i] == 0 and data["temperature"][i] is not np.ma.masked: # iem.data['tmpf'] = float(data['temperature'][i]) tmpc = float(data["temperatureC"][i]) t = "%s%02i/" % ("M" if tmpc < 0 else "", tmpc if tmpc > 0 else (0 - tmpc)) tgroup += "%s%03i" % ("1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0) if data["dewpointQCR"][i] == 0 and data["dewpoint"][i] is not np.ma.masked: # iem.data['dwpf'] = float(data['dewpoint'][i]) tmpc = float(data["dewpointC"][i]) if t != "": t = "%s%s%02i " % (t, "M" if tmpc < 0 else "", tmpc if tmpc > 0 else 0 - tmpc) tgroup += "%s%03i" % ("1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0) if len(t) > 4: mtr += t if data["altimeterQCR"][i] == 0 and data["altimeter"][i] is not np.ma.masked: iem.data["alti"] = round(data["altimeter"][i], 2) mtr += "A%4i " % (iem.data["alti"] * 100.0,) mtr += "RMK " if data["precipAccumQCR"][i] == 0 and data["precipAccum"][i] is not np.ma.masked: if data["precipAccum"][i] >= 0.01: iem.data["phour"] = round(data["precipAccum"][i], 2) mtr += "P%04i " % (iem.data["phour"] * 100.0,) elif data["precipAccum"][i] < 0.01 and data["precipAccum"][i] > 0: # Trace mtr += "P0000 " iem.data["phour"] = 0.0001 if tgroup != "T": mtr += "%s " % (tgroup,) autoremark = tostring(data["autoRemark"][i]) opremark = tostring(data["operatorRemark"][i]) if autoremark != "" or opremark != "": mtr += "%s %s " % (autoremark, opremark) mtr += "MADISHF" # Eat our own dogfood try: Metar(mtr) iem.data["raw"] = mtr except: pass icursor = IEM.cursor() if not iem.save(icursor, force_current_log=True, skip_current=True): print(("extract_hfmetar: unknown station? %s %s %s\n%s") % (sid3, network, ts, mtr)) pass icursor.close() IEM.commit()
def run_model(nc, initts, ncout, oldncout): """ Actually run the model, please """ t2 = nc.variables['t2'] u10 = nc.variables['u10'] v10 = nc.variables['v10'] tm = nc.variables['time'] lwdown = nc.variables['lwdown'] swdown = nc.variables['swdown'] q2 = nc.variables['q2'] rc = nc.variables['rain_con'] rn = nc.variables['rain_non'] lats = nc.variables['latitcrs'] lons = nc.variables['longicrs'] # keep masking in-tact as we only write data below when we have it otmpk = ma.array(ncout.variables['tmpk'][:]) otmpk._sharedmask = False owmps = ma.array(ncout.variables['wmps'][:]) owmps._sharedmask = False oswout = ma.array(ncout.variables['swout'][:]) oswout._sharedmask = False oh = ma.array(ncout.variables['h'][:]) oh._sharedmask = False olf = ma.array(ncout.variables['lf'][:]) olf._sharedmask = False obdeckt = ma.array(ncout.variables['bdeckt'][:]) obdeckt._sharedmask = False osubsfct = ma.array(ncout.variables['subsfct'][:]) osubsfct._sharedmask = False oifrost = ma.array(ncout.variables['ifrost'][:]) oifrost._sharedmask = False odwpk = ma.array(ncout.variables['dwpk'][:]) odwpk._sharedmask = False ofrostd = ma.array(ncout.variables['frostd'][:]) ofrostd._sharedmask = False oicond = ma.array(ncout.variables['icond'][:]) oicond._sharedmask = False # mini = 200 # minj = 200 # maxi = 0 # maxj = 0 errorcount = 0 cmd = "/usr/bin/python model/usr/bin/metro " cmd += "--roadcast-start-date %s " % (initts.strftime("%Y-%m-%dT%H:%MZ"),) cmd += "--input-forecast isumm5.xml " cmd += "--input-observation rwis.xml " cmd += "--input-station station.xml " cmd += "--output-roadcast roadcast.xml " cmd += "--log-file /dev/null " # cmd += "--verbose-level 4 " cmd += "--use-solarflux-forecast --use-infrared-forecast" # We don't have pressure from MM5 (yet) pressure = dt.pressure(1000.0, 'MB') for i in range(len(nc.dimensions['i_cross'])): if errorcount > 100: print('Too many errors, aborting....') sys.exit() # loopstart = datetime.datetime.now() for j in range(len(nc.dimensions['j_cross'])): lat = lats[i, j] lon = lons[i, j] # Hey, we only care about Iowa data! -97 40 -90 43.75 if lat < 40 or lat > 43.75 or lon < -97 or lon > -90: continue make_rwis(i, j, initts, oldncout) o = open('isumm5.xml', 'w') o.write("""<?xml version="1.0"?> <forecast> <header> <production-date>%s</production-date> <version>1.1</version> <filetype>forecast</filetype> <station-id>ofr</station-id> </header> <prediction-list>""" % ( datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%MZ"),)) for t in range(1, len(nc.dimensions['time'])): ts = initts + datetime.timedelta(minutes=int(tm[t])) tmpk = dt.temperature(t2[t, i, j], 'K') mr = dt.mixingratio(q2[t, i, j], 'KG/KG') dwp = met.dewpoint_from_pq(pressure, mr) sped = dt.speed((u10[t, i, j]**2 + v10[t, i, j]**2)**.5, 'MPS') # sn - snow accumulation in cm # ap - surface pressure in mb o.write("""<prediction> <forecast-time>%s</forecast-time> <at>%.1f</at> <td>%.1f</td> <ra>%.1f</ra> <sn>0.0</sn> <ws>%.1f</ws> <ap>993.8</ap> <wd>300</wd> <cc>0</cc> <sf>%.1f</sf> <ir>%.1f</ir> </prediction> """ % (ts.strftime("%Y-%m-%dT%H:%MZ"), tmpk.value("C"), dwp.value("C"), (rn[t, i, j] + rc[t, i, j])*10., sped.value("KMH"), swdown[t, i, j], lwdown[t, i, j]) ) o.write("</prediction-list></forecast>") o.close() proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) se = proc.stderr.read().decode("utf-8") if se != "": errorcount += 1 print(('metro error i:%03i j:%03i stderr:|%s|' ) % (i, j, se.strip())) continue # Starts at :20 minutes after start time tree = ET.parse('roadcast.xml') root = tree.getroot() tstep = 0 for c in root.findall('./prediction-list/prediction'): tstep += 1 # Road surface temperature st Celsius obdeckt[tstep, i, j] = float(c.find('./st').text) + 273.15 # Road sub surface temperature* (40 cm) sst Celsius osubsfct[tstep, i, j] = float(c.find('./sst').text) + 273.15 # Air temperature at Celsius otmpk[tstep, i, j] = float(c.find('./at').text) + 273.15 # Dew point td Celsius odwpk[tstep, i, j] = float(c.find('./td').text) + 273.15 # Wind speed ws km/h owmps[tstep, i, j] = float(c.find('./ws').text) # Quantity of snow or ice on the road sn cm # Quantity of rain on the road ra mm # Total (1 hr) snow precipitation qp-sn cm # Total (1 hr) rain precipitation qp-ra mm # Solar flux sf W/m2 oswout[tstep, i, j] = float(c.find('./sf').text) # Incident infra-red flux ir W/m2 # Vapor flux fv W/m2 # Sensible heat fc W/m2 # Anthropogenic flux fa W/m2 # Ground exchange flux fg W/m2 # Blackbody effect bb W/m2 # Phase change fp W/m2 # Road condition rc METRo code oicond[tstep, i, j] = int(c.find('./rc').text) # Octal cloud coverage** cc octal ncout.variables['tmpk'][:] = otmpk ncout.variables['wmps'][:] = dt.speed(owmps, 'KMH').value('MPS') ncout.variables['swout'][:] = oswout ncout.variables['h'][:] = oh ncout.variables['lf'][:] = olf ncout.variables['bdeckt'][:] = obdeckt ncout.variables['subsfct'][:] = osubsfct ncout.variables['ifrost'][:] = oifrost ncout.variables['frostd'][:] = ofrostd ncout.variables['dwpk'][:] = odwpk ncout.variables['icond'][:] = oicond
def main(): """Go Main Go""" pgconn = get_dbconn('other') ipgconn = get_dbconn('iem') cursor = pgconn.cursor() # Figure out max valid times maxts = {} cursor.execute(""" SELECT station, max(valid) from flux_data GROUP by station """) for row in cursor: maxts[row[0]] = row[1] processed = 0 for station, fns in FILENAMES.items(): if station not in maxts: print("flux_ingest %s has no prior db archive" % (station, )) maxts[station] = datetime.datetime(1980, 1, 1).replace(tzinfo=pytz.utc) dfs = [] for fn in fns: myfn = "%s%s" % (DIR, fn) if not os.path.isfile(myfn): print("flux_ingest.py missing file: %s" % (myfn, )) continue df = pd.read_csv(myfn, skiprows=[0, 2, 3], index_col=0, na_values=[ 'NAN', ]) df.drop('RECORD', axis=1, inplace=True) if df.empty: print(('flux_ingest.py file: %s has no data') % (fn, )) continue dfs.append(df) if not dfs: print("flux_ingest no data for: %s" % (station, )) continue df = dfs[0] if len(dfs) > 1: df = df.join(dfs[1]).copy() # get index back into a column df.reset_index(inplace=True) # lowercase all column names df.columns = [x.lower() for x in df.columns] df['timestamp'] = df['timestamp'].apply(make_time) df = df[df['timestamp'] > maxts[station]].copy() if df.empty: continue df.rename(columns=CONVERT, inplace=True) # We need a UTC year to allow for the database insert below to work df['utcyear'] = df['valid'].dt.tz_convert(pytz.utc).dt.year df['station'] = station for year, gdf in df.groupby('utcyear'): exclude = [] for colname in gdf.columns: if colname not in DBCOLS: exclude.append(colname) if colname not in DROPCOLS: print(("flux_ingest %s has additional cols: %s") % (station, exclude)) gdf2 = gdf[gdf.columns.difference(exclude)] processed += len(gdf2.index) output = StringIO() gdf2.to_csv(output, sep="\t", header=False, index=False) cursor = pgconn.cursor() output.seek(0) cursor.copy_from(output, "flux%s" % (year, ), columns=gdf2.columns, null="") cursor.close() pgconn.commit() icursor = ipgconn.cursor() for _i, row in df.iterrows(): iemob = Observation(station, 'NSTLFLUX', row['valid']) if 't_hmp_avg' in df.columns: iemob.data['tmpf'] = temperature(row['t_hmp_avg'], 'C').value('F') if 'wnd_spd' in df.columns: iemob.data['sknt'] = speed(row['wnd_spd'], 'MPS').value('KT') if 'press_avg' in df.columns: iemob.data['pres'] = pressure(row['press_avg'] * 1000., 'PA').value('MB') for cvar, ivar in zip( ['solarrad_w_avg', 'rh_hmp_avg', 'wnd_dir_compass'], ['srad', 'rh', 'drct']): if cvar in df.columns: iemob.data[ivar] = row[cvar] iemob.save(icursor) icursor.close() ipgconn.commit()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) station = fdict.get('zstation', 'AMW') network = fdict.get('network', 'IA_ASOS') month = fdict.get('month', 'all') nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] cursor.execute(""" SELECT tmpf::int as t, dwpf from alldata where station = %s and tmpf is not null and dwpf is not null and dwpf <= tmpf and tmpf >= 0 and tmpf <= 140 and extract(month from valid) in %s """, (station, tuple(months))) sums = np.zeros((140,), 'f') counts = np.zeros((140,), 'f') for row in cursor: r = mixing_ratio(temperature(row[1], 'F')).value('KG/KG') sums[row[0]] += r counts[row[0]] += 1 rows = [] for i in range(140): if counts[i] < 3: continue r = sums[i] / float(counts[i]) d = dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(r, 'KG/KG') ).value('F') rh = relh(temperature(i, 'F'), temperature(d, 'F')).value('%') rows.append(dict(tmpf=i, dwpf=d, rh=rh)) df = pd.DataFrame(rows) tmpf = df['tmpf'] dwpf = df['dwpf'] rh = df['rh'] (fig, ax) = plt.subplots(1, 1) ax.bar(tmpf-0.5, dwpf, ec='green', fc='green', width=1) ax.grid(True, zorder=11) ax.set_title(("%s [%s]\nAverage Dew Point by Air Temperature (month=%s) " "(%s-%s)\n" "(must have 3+ hourly observations at the given temperature)" ) % (nt.sts[station]['name'], station, month.upper(), nt.sts[station]['archive_begin'].year, datetime.datetime.now().year), size=10) ax.plot([0, 140], [0, 140], color='b') ax.set_ylabel("Dew Point [F]") y2 = ax.twinx() y2.plot(tmpf, rh, color='k') y2.set_ylabel("Relative Humidity [%] (black line)") y2.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100]) y2.set_ylim(0, 100) ax.set_ylim(0, max(tmpf)+2) ax.set_xlim(0, max(tmpf)+2) ax.set_xlabel("Air Temperature $^\circ$F") return fig, df
def doit(opener, station, now, valids): """ Fetch! """ usedcache = False processed = 0 if len(station) == 3: faa = "K%s" % (station,) else: faa = station mydir = "/mesonet/ARCHIVE/wunder/cache/%s/%s/" % (station, now.year) if not os.path.isdir(mydir): os.makedirs(mydir) fn = "%s%s.txt" % (mydir, now.strftime("%Y%m%d"), ) if os.path.isfile(fn): usedcache = True data = open(fn).read() if len(data) < 140: usedcache = False if not usedcache: url = ("http://www.wunderground.com/history/airport/%s/%s/%-i/%-i/" "DailyHistory.html?req_city=NA&req_state=NA&" "req_statename=NA&format=1") % (faa, now.year, now.month, now.day) try: data = opener.open(url, timeout=30).read() except KeyboardInterrupt: sys.exit() except: print "Download Fail STID: %s NOW: %s" % (station, now) return 0, False # Save raw data, since I am an idiot have of the time o = open(fn, 'w') o.write(data) o.close() lines = data.split("\n") headers = None for line in lines: line = line.replace("<br />", "").replace("\xff", "") if line.strip() == "": continue tokens = line.split(",") if headers is None: headers = {} for i in range(len(tokens)): headers[tokens[i]] = i continue if "FullMetar" in headers and len(tokens) >= headers["FullMetar"]: mstr = (tokens[headers["FullMetar"]] ).strip().replace("'", "").replace("SPECI ", "").replace("METAR ", "") ob = process_metar(mstr, now) if ob is None or ob.valid in valids: continue valids.append(ob.valid) # Account for SLP505 actually being 1050.5 and not 950.5 :( if SLP in headers: try: pres = pressure( float(tokens[headers[SLP]]), "IN") diff = pres.value("MB") - ob.mslp if abs(diff) > 25: oldval = ob.mslp ob.mslp = "%.1f" % (pres.value("MB"),) ob.alti = float(tokens[headers[SLP]]) print 'SETTING PRESSURE %s old: %s new: %s' % ( ob.valid.strftime("%Y/%m/%d %H%M"), oldval, ob.mslp) except: pass sql = """ INSERT into t""" + str(ob.valid.year) + """ (station, valid, tmpf, dwpf, vsby, drct, sknt, gust, p01i, alti, skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, skyl4, metar, mslp, presentwx, p03i, p06i, p24i, max_tmpf_6hr, max_tmpf_24hr, min_tmpf_6hr, min_tmpf_24hr, report_type) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s, %s, %s, %s, %s, %s, %s, %s, %s, 2) """ args = (station, ob.valid, ob.tmpf, ob.dwpf, ob.vsby, ob.drct, ob.sknt, ob.gust, ob.p01i, ob.alti, ob.skyc1, ob.skyc2, ob.skyc3, ob.skyc4, ob.skyl1, ob.skyl2, ob.skyl3, ob.skyl4, ob.metar.decode('utf-8', 'replace').encode('ascii', 'replace'), ob.mslp, ob.presentwx, ob.p03i, ob.p06i, ob.p24i, ob.max_tmpf_6hr, ob.max_tmpf_24hr, ob.min_tmpf_6hr, ob.min_tmpf_24hr) acursor.execute(sql, args) processed += 1 return processed, usedcache
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt ASOS = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = ASOS.cursor(cursor_factory=psycopg2.extras.DictCursor) station = fdict.get('zstation', 'AMW') network = fdict.get('network', 'IA_ASOS') month = fdict.get('month', 'all') nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-"+month+"-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] cursor.execute(""" SELECT drct::int as t, dwpf from alldata where station = %s and drct is not null and dwpf is not null and dwpf <= tmpf and sknt > 3 and drct::int %% 10 = 0 and extract(month from valid) in %s """, (station, tuple(months))) sums = np.zeros((361,), 'f') counts = np.zeros((361,), 'f') for row in cursor: r = mixing_ratio(temperature(row[1], 'F')).value('KG/KG') sums[row[0]] += r counts[row[0]] += 1 sums[0] = sums[360] counts[0] = counts[360] rows = [] for i in range(361): if counts[i] < 3: continue r = sums[i] / float(counts[i]) d = dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(r, 'KG/KG') ).value('F') rows.append(dict(drct=i, dwpf=d)) df = pd.DataFrame(rows) drct = df['drct'] dwpf = df['dwpf'] (fig, ax) = plt.subplots(1, 1) ax.bar(drct-5, dwpf, ec='green', fc='green', width=10) ax.grid(True, zorder=11) ax.set_title(("%s [%s]\nAverage Dew Point by Wind Direction (month=%s) " "(%s-%s)\n" "(must have 3+ hourly obs > 3 knots at given direction)" ) % (nt.sts[station]['name'], station, month.upper(), max([1973, nt.sts[station]['archive_begin'].year]), datetime.datetime.now().year), size=10) ax.set_ylabel("Dew Point [F]") ax.set_ylim(min(dwpf)-5, max(dwpf)+5) ax.set_xlim(-5, 365) ax.set_xticks([0, 45, 90, 135, 180, 225, 270, 315, 360]) ax.set_xticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) ax.set_xlabel("Wind Direction") return fig, df
def gen_metar(data): """Convert our parsed dictionary into a METAR""" mtr = "%s %sZ AUTO " % (data['call_id'], data['valid'].strftime("%d%H%M")) # wind direction if data.get('wind_code') == 'C': mtr += "00000KT " elif (data.get('drct_qc') in ["1", "5"] and data['wind_speed_mps'] is not None): if data['drct'] is None: mtr += "////" else: mtr += "%03.0f" % (data['drct'], ) kts = speed(data['wind_speed_mps'], 'MPS').value('KT') mtr += "%02.0f" % (kts, ) if 'OC1' in data['extra']: val = data['extra']['OC1'].get('speed', 0) if val > 0: mtr += "G%02.0f" % (speed(val, 'MPS').value('KT'), ) mtr += 'KT ' # vis if data['vsby_m'] is not None: val = distance(data['vsby_m'], 'M').value('MI') mtr += "%sSM " % (vsbyfmt(val), ) # Present Weather Time combocode = "" for code in ['AU1', 'AU2', 'AU3', 'AU4', 'AU5', 'AU6', 'AU7', 'AU8', 'AU9']: if code not in data['extra']: continue val = data['extra'][code] if val['combo'] == "1": # lone if val['obscure'] == "1": mtr += "BR " elif val['combo'] == '2': # start of dual code if val['descriptor'] == '7': combocode = "TS" elif val['combo'] == '3': # end of dual code if val['proximity'] == '3' and val['precip'] == '02': mtr += "+%sRA " % (combocode, ) combocode = "" # Clouds for code in ['GD1', 'GD2', 'GD3', 'GD4', 'GD5', 'GD6']: if code not in data['extra']: continue val = data['extra'][code] skycode = SKY_STATE_CODES[val['state_code']] height = val['height'] if skycode == 'CLR': mtr += "CLR " elif height is None: continue else: hft = distance(height, 'M').value('FT') / 100. mtr += "%s%03.0f " % (skycode, hft) # temperature tgroup = None if (data.get('airtemp_c_qc') not in ["2", "3"] and data['airtemp_c'] is not None): tmpc = data['airtemp_c'] dwpc = data['dewpointtemp_c'] mtr += "%s%02.0f/" % ("M" if tmpc < 0 else "", abs(tmpc)) if dwpc is not None: mtr += "%s%02.0f" % ("M" if dwpc < 0 else "", abs(dwpc)) tgroup = "T%s%03i%s%03i" % ("1" if tmpc < 0 else "0", abs(tmpc) * 10., "1" if dwpc < 0 else "0", abs(dwpc) * 10.) mtr += " " # altimeter if ('MA1' in data['extra'] and data['extra']['MA1']['altimeter'] is not None): altimeter = pressure(data['extra']['MA1']['altimeter'], 'HPA').value( "IN") mtr += "A%4.0f " % (altimeter * 100, ) rmk = [] for code in ['AA1', 'AA2', 'AA3', 'AA4']: if code not in data['extra']: continue hours = data['extra'][code]['hours'] depth = data['extra'][code]['depth'] if hours is None or depth is None or hours == 12: continue elif depth == 0 and data['extra'][code]['cond_code'] != '2': continue elif hours in [3, 6]: prefix = "6" elif hours == 24: prefix = "7" elif hours == 1: prefix = "P" else: warnings.warn("Unknown precip hours %s" % (hours, )) continue amount = distance(depth, 'MM').value('IN') rmk.append("%s%04.0f" % (prefix, amount * 100)) if data['mslp_hpa'] is not None: rmk.append("SLP%03.0f" % (data['mslp_hpa'] * 10 % 1000, )) if tgroup is not None: rmk.append(tgroup) # temperature groups group4 = {'M': '////', 'N': '////'} for code in ['KA1', 'KA2', 'KA3', 'KA4']: if code not in data['extra']: continue val = data['extra'][code] hours = val['hours'] typ = val['code'] tmpc = val['tmpc'] if tmpc is None: continue if hours is None or hours == 12: continue elif hours == 6 and typ == 'M': prefix = "1" elif hours == 6 and typ == 'N': prefix = "2" elif hours == 24: group4[typ] = "%s%03i" % ("1" if tmpc < 0 else "0", abs(tmpc) * 10) continue else: warnings.warn("Unknown temperature hours %s typ: %s" % (hours, typ)) continue rmk.append("%s%s%03i" % (prefix, "1" if tmpc < 0 else "0", abs(tmpc) * 10)) if group4['M'] != '////' or group4['N'] != '////': rmk.append("4%(M)s%(N)s" % group4) # 3-hour pressure tendency if ('MD1' in data['extra'] and data['extra']['MD1']['threehour'] is not None): rmk.append("5%s%03i" % (data['extra']['MD1']['code'], data['extra']['MD1']['threehour'] * 10)) rmk.append("IEM_DS3505") mtr += "RMK %s " % (" ".join(rmk), ) data['metar'] = mtr.strip()
def to_dwpf(val): return meteorology.dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(val, 'KG/KG') ).value('F')
def main(): """Go Main Go""" pgconn = get_dbconn("other") ipgconn = get_dbconn("iem") cursor = pgconn.cursor() # Figure out max valid times maxts = {} cursor.execute( "SELECT station, max(valid) from flux_data GROUP by station") for row in cursor: maxts[row[0]] = row[1] processed = 0 for station, fns in FILENAMES.items(): if station not in maxts: LOG.info("%s has no prior db archive", station) maxts[station] = datetime.datetime(1980, 1, 1).replace(tzinfo=pytz.utc) dfs = [] for fn in fns: myfn = "%s%s" % (DIR, fn) if not os.path.isfile(myfn): LOG.info("missing file: %s", myfn) continue df = pd.read_csv(myfn, skiprows=[0, 2, 3], index_col=0, na_values=["NAN"]) df.drop("RECORD", axis=1, inplace=True) if df.empty: LOG.info("file: %s has no data", fn) continue dfs.append(df) if not dfs: LOG.info("no data for: %s", station) continue df = dfs[0] if len(dfs) > 1: df = df.join(dfs[1]).copy() # get index back into a column df.reset_index(inplace=True) # lowercase all column names df.columns = [x.lower() for x in df.columns] df["timestamp"] = df["timestamp"].apply(make_time) df = df[df["timestamp"] > maxts[station]].copy() if df.empty: continue df.rename(columns=CONVERT, inplace=True) # We need a UTC year to allow for the database insert below to work df["utcyear"] = df["valid"].dt.tz_convert(pytz.utc).dt.year df["station"] = station for year, gdf in df.groupby("utcyear"): exclude = [] for colname in gdf.columns: if colname not in DBCOLS: exclude.append(colname) if colname not in DROPCOLS: LOG.info("%s has more cols: %s", station, exclude) gdf2 = gdf[gdf.columns.difference(exclude)] processed += len(gdf2.index) output = StringIO() gdf2.to_csv(output, sep="\t", header=False, index=False) cursor = pgconn.cursor() output.seek(0) cursor.copy_from(output, "flux%s" % (year, ), columns=gdf2.columns, null="") cursor.close() pgconn.commit() icursor = ipgconn.cursor() for _i, row in df.iterrows(): iemob = Observation(station, "NSTLFLUX", row["valid"]) if "t_hmp_avg" in df.columns: iemob.data["tmpf"] = temperature(row["t_hmp_avg"], "C").value("F") if "wnd_spd" in df.columns: iemob.data["sknt"] = speed(row["wnd_spd"], "MPS").value("KT") if "press_avg" in df.columns: iemob.data["pres"] = pressure(row["press_avg"] * 1000.0, "PA").value("MB") for cvar, ivar in zip( ["solarrad_w_avg", "rh_hmp_avg", "wnd_dir_compass"], ["srad", "rh", "drct"], ): if cvar in df.columns: iemob.data[ivar] = row[cvar] iemob.save(icursor) icursor.close() ipgconn.commit()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt pgconn = psycopg2.connect(database='asos', host='iemdb', user='******') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) station = ctx['zstation'] network = ctx['network'] month = ctx['month'] nt = NetworkTable(network) if month == 'all': months = range(1, 13) elif month == 'fall': months = [9, 10, 11] elif month == 'winter': months = [12, 1, 2] elif month == 'spring': months = [3, 4, 5] elif month == 'summer': months = [6, 7, 8] else: ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d') # make sure it is length two for the trick below in SQL months = [ts.month, 999] cursor.execute( """ SELECT drct::int as t, dwpf from alldata where station = %s and drct is not null and dwpf is not null and dwpf <= tmpf and sknt > 3 and drct::int %% 10 = 0 and extract(month from valid) in %s """, (station, tuple(months))) sums = np.zeros((361, ), 'f') counts = np.zeros((361, ), 'f') for row in cursor: r = mixing_ratio(temperature(row[1], 'F')).value('KG/KG') sums[row[0]] += r counts[row[0]] += 1 sums[0] = sums[360] counts[0] = counts[360] rows = [] for i in range(361): if counts[i] < 3: continue r = sums[i] / float(counts[i]) d = dewpoint_from_pq(pressure(1000, 'MB'), mixingratio(r, 'KG/KG')).value('F') rows.append(dict(drct=i, dwpf=d)) df = pd.DataFrame(rows) drct = df['drct'] dwpf = df['dwpf'] (fig, ax) = plt.subplots(1, 1) ax.bar(drct, dwpf, ec='green', fc='green', width=10, align='center') ax.grid(True, zorder=11) ax.set_title(("%s [%s]\nAverage Dew Point by Wind Direction (month=%s) " "(%s-%s)\n" "(must have 3+ hourly obs > 3 knots at given direction)") % (nt.sts[station]['name'], station, month.upper(), max([1973, nt.sts[station]['archive_begin'].year ]), datetime.datetime.now().year), size=10) ax.set_ylabel("Dew Point [F]") ax.set_ylim(min(dwpf) - 5, max(dwpf) + 5) ax.set_xlim(-5, 365) ax.set_xticks([0, 45, 90, 135, 180, 225, 270, 315, 360]) ax.set_xticklabels(['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N']) ax.set_xlabel("Wind Direction") return fig, df
def process(ncfn): """Process this file """ pgconn = get_dbconn('iem') icursor = pgconn.cursor() xref = {} icursor.execute("""SELECT id, network from stations where network ~* 'ASOS' or network = 'AWOS' and country = 'US'""") for row in icursor: xref[row[0]] = row[1] icursor.close() nc = netCDF4.Dataset(ncfn) data = {} for vname in [ 'stationId', 'observationTime', 'temperature', 'dewpoint', 'altimeter', # Pa 'windDir', 'windSpeed', # mps 'windGust', 'visibility', # m 'precipAccum', 'presWx', 'skyCvr', 'skyCovLayerBase', 'autoRemark', 'operatorRemark' ]: data[vname] = nc.variables[vname][:] vname += "QCR" if vname in nc.variables: data[vname] = nc.variables[vname][:] for vname in ['temperature', 'dewpoint']: data[vname + "C"] = temperature(data[vname], 'K').value('C') data[vname] = temperature(data[vname], 'K').value('F') for vname in ['windSpeed', 'windGust']: data[vname] = speed(data[vname], 'MPS').value('KT') data['altimeter'] = pressure(data['altimeter'], 'PA').value("IN") data['skyCovLayerBase'] = distance(data['skyCovLayerBase'], 'M').value("FT") data['visibility'] = distance(data['visibility'], 'M').value("MI") data['precipAccum'] = distance(data['precipAccum'], 'MM').value("IN") for i in range(len(data['stationId'])): sid = tostring(data['stationId'][i]) sid3 = sid[1:] if sid[0] == 'K' else sid ts = datetime.datetime(1970, 1, 1) + datetime.timedelta( seconds=data['observationTime'][i]) ts = ts.replace(tzinfo=pytz.timezone("UTC")) mtr = "%s %sZ AUTO " % (sid, ts.strftime("%d%H%M")) network = xref.get(sid3, 'ASOS') iem = Observation(sid3, network, ts) # 06019G23KT if (data['windDirQCR'][i] == 0 and data['windDir'][i] is not np.ma.masked): iem.data['drct'] = int(data['windDir'][i]) mtr += "%03i" % (iem.data['drct'], ) else: mtr += "///" if (data['windSpeedQCR'][i] == 0 and data['windSpeed'][i] is not np.ma.masked): iem.data['sknt'] = int(data['windSpeed'][i]) mtr += "%02i" % (iem.data['sknt'], ) else: mtr += "//" if (data['windGustQCR'][i] == 0 and data['windGust'][i] is not np.ma.masked and data['windGust'][i] > 0): iem.data['gust'] = int(data['windGust'][i]) mtr += "G%02i" % (iem.data['gust'], ) mtr += "KT " if (data['visibilityQCR'][i] == 0 and data['visibility'][i] is not np.ma.masked): iem.data['vsby'] = float(data['visibility'][i]) mtr += "%sSM " % (vsbyfmt(iem.data['vsby']), ) presentwx = tostring(data['presWx'][i]) if presentwx != '': # database storage is comma delimited iem.data['wxcodes'] = presentwx.split(" ") mtr += "%s " % (presentwx, ) for _i, (_c, _l) in enumerate( zip(data['skyCvr'][i], data['skyCovLayerBase'][i])): if tostring(_c) != '': skyc = tostring(_c) iem.data['skyc%s' % (_i + 1, )] = skyc if skyc != 'CLR': iem.data['skyl%s' % (_i + 1, )] = int(_l) mtr += "%s%03i " % (tostring(_c), int(_l) / 100) else: mtr += "CLR " t = "" tgroup = "T" if (data['temperatureQCR'][i] == 0 and data['temperature'][i] is not np.ma.masked): # iem.data['tmpf'] = float(data['temperature'][i]) tmpc = float(data['temperatureC'][i]) t = "%s%02i/" % ("M" if tmpc < 0 else "", tmpc if tmpc > 0 else (0 - tmpc)) tgroup += "%s%03i" % ("1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.) if (data['dewpointQCR'][i] == 0 and data['dewpoint'][i] is not np.ma.masked): # iem.data['dwpf'] = float(data['dewpoint'][i]) tmpc = float(data['dewpointC'][i]) if t != "": t = "%s%s%02i " % (t, "M" if tmpc < 0 else "", tmpc if tmpc > 0 else 0 - tmpc) tgroup += "%s%03i" % ("1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.) if len(t) > 4: mtr += t if (data['altimeterQCR'][i] == 0 and data['altimeter'][i] is not np.ma.masked): iem.data['alti'] = round(data['altimeter'][i], 2) mtr += "A%4i " % (iem.data['alti'] * 100., ) mtr += "RMK " if (data['precipAccumQCR'][i] == 0 and data['precipAccum'][i] is not np.ma.masked): if data['precipAccum'][i] >= 0.01: iem.data['phour'] = round(data['precipAccum'][i], 2) mtr += "P%04i " % (iem.data['phour'] * 100., ) elif data['precipAccum'][i] < 0.01 and data['precipAccum'][i] > 0: # Trace mtr += "P0000 " iem.data['phour'] = TRACE_VALUE if tgroup != "T": mtr += "%s " % (tgroup, ) autoremark = tostring(data['autoRemark'][i]) opremark = tostring(data['operatorRemark'][i]) if autoremark != '' or opremark != '': mtr += "%s %s " % (autoremark, opremark) mtr += "MADISHF" # Eat our own dogfood try: Metar.Metar(mtr) iem.data['raw'] = mtr except Exception as exp: pass icursor = pgconn.cursor() if not iem.save(icursor, force_current_log=True, skip_current=True): print(("extract_hfmetar: unknown station? %s %s %s\n%s") % (sid3, network, ts, mtr)) icursor.close() pgconn.commit()
def doit(jar, station, days): """ Fetch! """ valids = [] inserts = 0 baddays = 0 acursor = ASOS.cursor() for now in days: if now.month == 1 and now.day == 1: valids = [] df = get_df(station, now, jar) if df is None: baddays += 1 continue for _, row in df.iterrows(): ob = process_metar(row['FullMetar'], now) if ob is None or ob.valid in valids: continue valids.append(ob.valid) # Account for SLP505 actually being 1050.5 and not 950.5 :( if SLP in row: try: pres = pressure(row['Sea Level PressureIn'].value, "IN") diff = pres.value("MB") - ob.mslp if abs(diff) > 25: oldval = ob.mslp ob.mslp = "%.1f" % (pres.value("MB"),) ob.alti = row['Sea Level PressureIn'].value print(('SETTING PRESSURE %s old: %s new: %s' ) % (ob.valid.strftime("%Y/%m/%d %H%M"), oldval, ob.mslp)) except Exception as _exp: pass sql = """ INSERT into t""" + str(ob.valid.year) + """ (station, valid, tmpf, dwpf, vsby, drct, sknt, gust, p01i, alti, skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, skyl4, metar, mslp, wxcodes, p03i, p06i, p24i, max_tmpf_6hr, max_tmpf_24hr, min_tmpf_6hr, min_tmpf_24hr, report_type) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s, %s, %s, %s, %s, %s, %s, %s, %s, 2) """ try: cmtr = ob.metar.decode('utf-8', 'replace').encode('ascii', 'replace') except Exception as exp: print(exp) print("Non-ASCII METAR? %s" % (repr(ob.metar),)) continue args = (station, ob.valid, ob.tmpf, ob.dwpf, ob.vsby, ob.drct, ob.sknt, ob.gust, ob.p01i, ob.alti, ob.skyc1, ob.skyc2, ob.skyc3, ob.skyc4, ob.skyl1, ob.skyl2, ob.skyl3, ob.skyl4, cmtr, ob.mslp, ob.wxcodes, ob.p03i, ob.p06i, ob.p24i, ob.max_tmpf_6hr, ob.max_tmpf_24hr, ob.min_tmpf_6hr, ob.min_tmpf_24hr) acursor.execute(sql, args) inserts += 1 if inserts % 1000 == 0: acursor.close() ASOS.commit() acursor = ASOS.cursor() acursor.close() ASOS.commit() acursor = ASOS.cursor() print("%s Days:%s/%s Inserts: %s" % (station, len(days) - baddays, len(days), inserts)) return inserts
def process(ncfn): """Process this file """ pgconn = get_dbconn('iem') icursor = pgconn.cursor() xref = {} icursor.execute("""SELECT id, network from stations where network ~* 'ASOS' or network = 'AWOS' and country = 'US'""") for row in icursor: xref[row[0]] = row[1] icursor.close() nc = ncopen(ncfn) data = {} for vname in [ 'stationId', 'observationTime', 'temperature', 'dewpoint', 'altimeter', # Pa 'windDir', 'windSpeed', # mps 'windGust', 'visibility', # m 'precipAccum', 'presWx', 'skyCvr', 'skyCovLayerBase', 'autoRemark', 'operatorRemark' ]: data[vname] = nc.variables[vname][:] for qc in ['QCR', 'QCD']: vname2 = vname + qc if vname2 in nc.variables: data[vname2] = nc.variables[vname2][:] for vname in ['temperature', 'dewpoint']: data[vname + "C"] = temperature(data[vname], 'K').value('C') data[vname] = temperature(data[vname], 'K').value('F') for vname in ['windSpeed', 'windGust']: data[vname] = speed(data[vname], 'MPS').value('KT') data['altimeter'] = pressure(data['altimeter'], 'PA').value("IN") data['skyCovLayerBase'] = distance(data['skyCovLayerBase'], 'M').value("FT") data['visibility'] = distance(data['visibility'], 'M').value("MI") data['precipAccum'] = distance(data['precipAccum'], 'MM').value("IN") stations = chartostring(data['stationId'][:]) presentwxs = chartostring(data['presWx'][:]) skycs = chartostring(data['skyCvr'][:]) autoremarks = chartostring(data['autoRemark'][:]) opremarks = chartostring(data['operatorRemark'][:]) def decision(i, fieldname, tolerance): """Our decision if we are going to take a HFMETAR value or not""" if data[fieldname][i] is np.ma.masked: return None if data["%sQCR" % (fieldname, )][i] == 0: return data[fieldname][i] # Now we have work to do departure = np.ma.max(np.ma.abs(data['%sQCD' % (fieldname, )][i, :])) # print("departure: %s tolerance: %s" % (departure, tolerance)) if departure <= tolerance: return data[fieldname][i] return None for i, sid in tqdm(enumerate(stations), total=len(stations), disable=(not sys.stdout.isatty())): sid3 = sid[1:] if sid[0] == 'K' else sid ts = datetime.datetime(1970, 1, 1) + datetime.timedelta( seconds=data['observationTime'][i]) ts = ts.replace(tzinfo=pytz.utc) mtr = "%s %sZ AUTO " % (sid, ts.strftime("%d%H%M")) network = xref.get(sid3, 'ASOS') iem = Observation(sid3, network, ts) # 06019G23KT val = decision(i, 'windDir', 15) if val is not None: iem.data['drct'] = int(val) mtr += "%03i" % (iem.data['drct'], ) else: mtr += "///" val = decision(i, 'windSpeed', 10) if val is not None: iem.data['sknt'] = int(val) mtr += "%02i" % (iem.data['sknt'], ) else: mtr += "//" val = decision(i, 'windGust', 10) if val is not None and val > 0: iem.data['gust'] = int(val) mtr += "G%02i" % (iem.data['gust'], ) mtr += "KT " val = decision(i, 'visibility', 4) if val is not None: iem.data['vsby'] = float(val) mtr += "%sSM " % (vsbyfmt(iem.data['vsby']), ) presentwx = presentwxs[i] if presentwx != '': # database storage is comma delimited iem.data['wxcodes'] = presentwx.split(" ") mtr += "%s " % (presentwx, ) for _i, (skyc, _l) in enumerate(zip(skycs[i], data['skyCovLayerBase'][i])): if skyc != '': iem.data['skyc%s' % (_i + 1, )] = skyc if skyc != 'CLR': iem.data['skyl%s' % (_i + 1, )] = int(_l) mtr += "%s%03i " % (skyc, int(_l) / 100) else: mtr += "CLR " t = "" tgroup = "T" val = decision(i, 'temperature', 10) if val is not None: # Recall the pain enabling this # iem.data['tmpf'] = float(data['temperature'][i]) tmpc = float(data['temperatureC'][i]) t = "%s%02i/" % ("M" if tmpc < 0 else "", tmpc if tmpc > 0 else (0 - tmpc)) tgroup += "%s%03i" % ("1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.) val = decision(i, 'dewpoint', 10) if val is not None: # iem.data['dwpf'] = float(data['dewpoint'][i]) tmpc = float(data['dewpointC'][i]) if t != "": t = "%s%s%02i " % (t, "M" if tmpc < 0 else "", tmpc if tmpc > 0 else 0 - tmpc) tgroup += "%s%03i" % ("1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.) if len(t) > 4: mtr += t val = decision(i, 'altimeter', 20) if val is not None: iem.data['alti'] = float(round(val, 2)) mtr += "A%4i " % (iem.data['alti'] * 100., ) mtr += "RMK " val = decision(i, 'precipAccum', 25) if val is not None: if val >= 0.01: iem.data['phour'] = float(round(val, 2)) mtr += "P%04i " % (iem.data['phour'] * 100., ) elif val < 0.01 and val > 0: # Trace mtr += "P0000 " iem.data['phour'] = TRACE_VALUE if tgroup != "T": mtr += "%s " % (tgroup, ) if autoremarks[i] != '' or opremarks[i] != '': mtr += "%s %s " % (autoremarks[i], opremarks[i]) mtr += "MADISHF" # Eat our own dogfood try: Metar.Metar(mtr) iem.data['raw'] = mtr except Exception as exp: print("dogfooding extract_hfmetar %s resulted in %s" % (mtr, exp)) continue for key in iem.data: if isinstance(iem.data[key], np.float32): print("key: %s type: %s" % (key, type(iem.data[key]))) icursor = pgconn.cursor() if not iem.save(icursor, force_current_log=True, skip_current=True): print(("extract_hfmetar: unknown station? %s %s %s\n%s") % (sid3, network, ts, mtr)) icursor.close() pgconn.commit()