def computeOthers(d): r = {} # Need something to compute other values needed for output for sid in d.keys(): ob = d[sid] ob["ticks"] = calendar.timegm(ob['utc_valid'].timetuple()) if ob['sknt'] is not None: ob["sped"] = ob["sknt"] * 1.17 if ob.get('tmpf') is not None and ob.get('dwpf') is not None: tmpf = temperature(ob['tmpf'], 'F') dwpf = temperature(ob['dwpf'], 'F') ob["relh"] = meteorology.relh(tmpf, dwpf).value('%') else: ob['relh'] = None if ob['relh'] == 'M': ob['relh'] = None if (ob.get('tmpf') is not None and ob.get('dwpf') is not None and ob.get('sped') is not None): ob['feel'] = meteorology.mcalc_feelslike( masked_array([ob['tmpf'], ], units('degF'), mask=[False, ]), masked_array([ob['dwpf'], ], units('degF'), mask=[False, ]), masked_array([ob['sped'], ], units('mile per hour'), mask=[False, ]) ).to(units('degF')).magnitude[0] else: ob['feel'] = None if ob['feel'] == 'M': ob['feel'] = None ob["altiTend"] = 'S' ob["drctTxt"] = util.drct2text(ob["drct"]) if ob["max_drct"] is None: ob["max_drct"] = 0 ob["max_drctTxt"] = util.drct2text(ob["max_drct"]) ob["20gu"] = 0 if ob['gust'] is not None: ob["gmph"] = ob["gust"] * 1.17 if ob['max_gust'] is not None: ob["max_sped"] = ob["max_gust"] * 1.17 else: ob['max_sped'] = 0 ob['pday'] = 0 if ob['pday'] is None else ob['pday'] ob['pmonth'] = 0 if ob['pmonth'] is None else ob['pmonth'] ob["gtim"] = "0000" ob["gtim2"] = "12:00 AM" if ob["max_gust_ts"] is not None and ob["max_gust_ts"] != "null": ob["gtim"] = ob["max_gust_ts"].strftime("%H%M") ob["gtim2"] = ob["max_gust_ts"].strftime("%-I:%M %p") r[sid] = ob return r
def mcalc_feelslike(tmpf, dwpf, smps): """Compute a feels like temperature Args: temperature (temperature): The dry bulb temperature dewpoint (temperature): The dew point temperature speed (speed): the wind speed Returns: temperature (temperature): The feels like temperature """ is_not_scalar = isinstance(tmpf.m, (list, tuple, np.ndarray)) rh = mcalc.relative_humidity_from_dewpoint(tmpf, dwpf) # NB: update this once metpy 0.11 is released app = mcalc.apparent_temperature(tmpf, rh, smps) if hasattr(app, "mask"): if is_not_scalar: app[app.mask] = tmpf[app.mask] else: app = tmpf elif hasattr(tmpf, "mask"): app = masked_array(app.m, app.units) app.mask = tmpf.mask return app
def plot_maxmin(ts, field): """Generate our plot.""" nc = ncopen(ts.strftime("/mesonet/data/ndfd/%Y%m%d%H_ndfd.nc")) if field == 'high_tmpk': data = np.max(nc.variables[field][:], 0) elif field == 'low_tmpk': data = np.min(nc.variables[field][:], 0) data = masked_array(data, units.degK).to(units.degF).m subtitle = ("Based on National Digital Forecast Database (NDFD) " "00 UTC Forecast made %s") % (ts.strftime("%-d %b %Y"), ) mp = MapPlot(title='NWS NDFD 7 Day (%s through %s) %s Temperature' % ( ts.strftime("%-d %b"), (ts + datetime.timedelta(days=6)).strftime("%-d %b"), 'Maximum' if field == 'high_tmpk' else 'Minimum', ), subtitle=subtitle, sector='iailin') mp.pcolormesh(nc.variables['lon'][:], nc.variables['lat'][:], data, np.arange(10, 121, 10), cmap=plt.get_cmap('jet'), units='Degrees F') mp.drawcounties() pqstr = ( "data c %s summary/cb_ndfd_7day_%s.png summary/cb_ndfd_7day_%s.png " "png") % (ts.strftime("%Y%m%d%H%M"), "max" if field == 'high_tmpk' else 'min', "max" if field == 'high_tmpk' else 'min') mp.postprocess(pqstr=pqstr) mp.close() nc.close()
def generic_gridder(day, df, idx): """ Generic gridding algorithm for easy variables """ data = df[idx].values coordinates = (df["lon"].values, df["lat"].values) region = [XAXIS[0], XAXIS[-1], YAXIS[0], YAXIS[-1]] projection = pyproj.Proj(proj="merc", lat_ts=df["lat"].mean()) spacing = 0.5 chain = vd.Chain([ ("mean", vd.BlockReduce(np.mean, spacing=spacing * 111e3)), ("spline", vd.Spline(damping=1e-10, mindist=100e3)), ]) train, test = vd.train_test_split(projection(*coordinates), data, random_state=0) chain.fit(*train) score = chain.score(*test) shape = (len(YAXIS), len(XAXIS)) grid = chain.grid( region=region, shape=shape, projection=projection, dims=["latitude", "longitude"], data_names=["precip"], ) res = grid.to_array() res = np.ma.where(res < 0, 0, res) print(("%s %s rows for %s column min:%.3f max:%.3f score: %.3f") % (day, len(df.index), idx, np.nanmin(res), np.nanmax(res), score)) return masked_array(res, mpunits("inch"))
def make_wind_plot(ctx, ptype): """Generate a wind plot, please.""" df = ctx["df"] (fig, ax) = plt.subplots(1, 1) gust = df["gust_sknt"].values sknt = df["sknt"].values unit = "kt" if ptype == "wind": gust = masked_array(gust, units("knots")).to(units("miles per hour")).m sknt = masked_array(sknt, units("knots")).to(units("miles per hour")).m unit = "mph" ax.bar( df["local_valid"].values, gust, zorder=1, width=1.0 / 1440.0, label="Gust", ) ax.bar( df["local_valid"].values, sknt, zorder=2, width=1.0 / 1440.0, label="Speed", ) ax.set_ylabel(f"Wind Speed / Gust [{unit}]") ax.grid(True) ax.legend(loc="best") ax2 = ax.twinx() ax2.scatter(df["local_valid"].values, df["drct"].values, c="k") ax2.set_ylabel("Wind Direction") ax2.set_yticks(np.arange(0, 361, 45)) ax2.set_yticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"]) ax2.set_ylim(-1, 361) ax.set_title( (f"{get_ttitle(df)} %s (%s)\n" "One Minute Interval Wind Speed + Direction, %s missing minutes\n" f"Peak Speed: %.1f {unit} Peak Gust: %.1f {unit}") % ( ctx["_nt"].sts[ctx["zstation"]]["name"], ctx["zstation"], df["sknt"].isna().sum(), np.nanmax(sknt), np.nanmax(gust), )) do_xaxis(ctx, ax) ax.set_xlim(df["local_valid"].min(), df["local_valid"].max()) return fig
def main(): """Go Main Go.""" # prevent a clock drift issue sts = utc(2020, 8, 10, 16) ets = utc(2020, 8, 11, 3) edate = ets.strftime("%Y-%m-%dT%H:%M:%SZ") sdate = sts.strftime("%Y-%m-%dT%H:%M:%SZ") props = get_properties() apikey = props["dtn.apikey"] headers = {"accept": "application/json", "apikey": apikey} for nwsli in NT.sts: idot_id = NT.sts[nwsli]["remote_id"] if idot_id is None: continue URI = (f"https://api.dtn.com/weather/stations/IA{idot_id:03}/" f"atmospheric-observations?startDate={sdate}" f"&endDate={edate}&units=us&precision=0") req = requests.get(URI, timeout=60, headers=headers) if req.status_code != 200: LOG.info("Fetch %s got status_code %s", URI, req.status_code) continue res = req.json() if not res: continue try: df = pd.DataFrame(res) except Exception as exp: LOG.info("DataFrame construction failed with %s\n res: %s", exp, res) continue if df.empty: continue df = df.fillna(np.nan) df["valid"] = pd.to_datetime(df["utcTime"]) df["gust"] = (masked_array(df["windGust"].values, units("miles per hour")).to( units("knots")).m) df["sknt"] = (masked_array(df["windSpeed"].values, units("miles per hour")).to( units("knots")).m) df = df.replace({np.nan: None}) cursor = DBCONN.cursor() process(cursor, df, nwsli) cursor.close() DBCONN.commit()
def wrapper(*args, **kwds): nargs = [] a_cube = None for arg in args: if isinstance(arg, Cube): if arg.ndim > 0: a_cube = arg elif a_cube is None: a_cube = arg un = None for ut_format in set(cf_units.UT_FORMATS): try: un = metunits.units(arg.units.format(ut_format)) except Exception: pass if un is None: raise UndefinedMetUnitError('Unable to convert cube' 'units to pint units') elif isinstance(un, (int, float)): un *= metunits.units('dimensionless') if np.ma.is_masked(arg.data): q = metunits.masked_array(arg.data, data_units=un) else: q = arg.data * un nargs.append(q) else: nargs.append(arg) out = f(*nargs, **kwds) if isinstance(out, (tuple, list, set)): res = [] for iout in out: ires = Cube(iout.magnitude, dim_coords_and_dims=[(c, a_cube.coord_dims(c)) for c in a_cube.dim_coords], aux_coords_and_dims=[(c, a_cube.coord_dims(c)) for c in a_cube.aux_coords], units=(iout.units.__str__() .replace(' ** ', '^') .replace(' * ', ' '))) res.append(ires) res = tuple(res) else: res = Cube(out.magnitude, dim_coords_and_dims=[(c, a_cube.coord_dims(c)) for c in a_cube.dim_coords], aux_coords_and_dims=[(c, a_cube.coord_dims(c)) for c in a_cube.aux_coords], units=(out.units.__str__() .replace(' ** ', '^') .replace(' * ', ' '))) return res
def test_vectorized(): """See that heatindex and windchill can do lists""" temp = datatypes.temperature([0, 10], "F") sknt = datatypes.speed([30, 40], "MPH") val = meteorology.windchill(temp, sknt).value("F") assert abs(val[0] - -24.50) < 0.01 t = datatypes.temperature([80.0, 90.0], "F") td = datatypes.temperature([70.0, 60.0], "F") hdx = meteorology.heatindex(t, td) assert abs(hdx.value("F")[0] - 83.93) < 0.01 tmpf = np.array([80.0, 90.0]) * units("degF") dwpf = np.array([70.0, 60.0]) * units("degF") smps = np.array([10.0, 20.0]) * units("meter per second") feels = meteorology.mcalc_feelslike(tmpf, dwpf, smps) assert abs(feels.to(units("degF")).magnitude[0] - 83.15) < 0.01 tmpf = masked_array([80.0, np.nan], units("degF"), mask=[False, True]) feels = meteorology.mcalc_feelslike(tmpf, dwpf, smps) assert abs(feels.to(units("degF")).magnitude[0] - 83.15) < 0.01 assert feels.mask[1]
def test_vectorized(): """See that heatindex and windchill can do lists""" temp = datatypes.temperature([0, 10], 'F') sknt = datatypes.speed([30, 40], 'MPH') val = meteorology.windchill(temp, sknt).value('F') assert abs(val[0] - -24.50) < 0.01 t = datatypes.temperature([80.0, 90.0], 'F') td = datatypes.temperature([70.0, 60.0], 'F') hdx = meteorology.heatindex(t, td) assert abs(hdx.value("F")[0] - 83.93) < 0.01 tmpf = np.array([80., 90.]) * units('degF') dwpf = np.array([70., 60.]) * units('degF') smps = np.array([10., 20.]) * units('meter per second') feels = meteorology.mcalc_feelslike(tmpf, dwpf, smps) assert abs(feels.to(units("degF")).magnitude[0] - 83.15) < 0.01 tmpf = masked_array([80., np.nan], units('degF'), mask=[False, True]) feels = meteorology.mcalc_feelslike(tmpf, dwpf, smps) assert abs(feels.to(units("degF")).magnitude[0] - 83.15) < 0.01 assert feels.mask[1]
def process(ncfn): """Process this file """ pgconn = get_dbconn("iem") icursor = pgconn.cursor() xref = {} icursor.execute("SELECT id, network from stations where " "network ~* 'ASOS' or network = 'AWOS' and country = 'US'") for row in icursor: xref[row[0]] = row[1] icursor.close() nc = ncopen(ncfn) data = {} for vname in [ "stationId", "observationTime", "temperature", "dewpoint", "altimeter", # Pa "windDir", "windSpeed", # mps "windGust", # mps "visibility", # m "precipAccum", "presWx", "skyCvr", "skyCovLayerBase", "autoRemark", "operatorRemark", ]: data[vname] = nc.variables[vname][:] for qc in ["QCR", "QCD"]: vname2 = vname + qc if vname2 in nc.variables: data[vname2] = nc.variables[vname2][:] for vname in ["temperature", "dewpoint"]: data[vname + "C"] = temperature(data[vname], "K").value("C") data[vname] = temperature(data[vname], "K").value("F") for vname in ["windSpeed", "windGust"]: data[vname] = (masked_array(data[vname], units("meter / second")).to( units("knots")).magnitude) data["altimeter"] = pressure(data["altimeter"], "PA").value("IN") data["skyCovLayerBase"] = distance(data["skyCovLayerBase"], "M").value("FT") data["visibility"] = distance(data["visibility"], "M").value("MI") data["precipAccum"] = distance(data["precipAccum"], "MM").value("IN") stations = chartostring(data["stationId"][:]) presentwxs = chartostring(data["presWx"][:]) skycs = chartostring(data["skyCvr"][:]) autoremarks = chartostring(data["autoRemark"][:]) opremarks = chartostring(data["operatorRemark"][:]) def decision(i, fieldname, tolerance): """Our decision if we are going to take a HFMETAR value or not""" if data[fieldname][i] is np.ma.masked: return None if data["%sQCR" % (fieldname, )][i] == 0: return data[fieldname][i] # Now we have work to do departure = np.ma.max(np.ma.abs(data["%sQCD" % (fieldname, )][i, :])) # print("departure: %s tolerance: %s" % (departure, tolerance)) if departure <= tolerance: return data[fieldname][i] return None for i, sid in tqdm( enumerate(stations), total=len(stations), disable=(not sys.stdout.isatty()), ): if len(sid) < 3: continue sid3 = sid[1:] if sid.startswith("K") else sid ts = datetime.datetime(1970, 1, 1) + datetime.timedelta( seconds=data["observationTime"][i]) ts = ts.replace(tzinfo=pytz.UTC) mtr = "%s %sZ AUTO " % (sid, ts.strftime("%d%H%M")) network = xref.get(sid3, "ASOS") iem = Observation(sid3, network, ts) # 06019G23KT val = decision(i, "windDir", 15) if val is not None: iem.data["drct"] = int(val) mtr += "%03i" % (iem.data["drct"], ) else: mtr += "///" val = decision(i, "windSpeed", 10) if val is not None: iem.data["sknt"] = int(val) mtr += "%02i" % (iem.data["sknt"], ) else: mtr += "//" val = decision(i, "windGust", 10) if val is not None and val > 0: iem.data["gust"] = int(val) mtr += "G%02i" % (iem.data["gust"], ) mtr += "KT " val = decision(i, "visibility", 4) if val is not None: iem.data["vsby"] = float(val) mtr += "%sSM " % (vsbyfmt(iem.data["vsby"]), ) presentwx = presentwxs[i] if presentwx != "": # database storage is comma delimited iem.data["wxcodes"] = presentwx.split(" ") mtr += "%s " % (presentwx, ) for _i, (skyc, _l) in enumerate(zip(skycs[i], data["skyCovLayerBase"][i])): if skyc != "": iem.data["skyc%s" % (_i + 1, )] = skyc if skyc != "CLR": iem.data["skyl%s" % (_i + 1, )] = int(_l) mtr += "%s%03i " % (skyc, int(_l) / 100) else: mtr += "CLR " t = "" tgroup = "T" val = decision(i, "temperature", 10) if val is not None: # Recall the pain enabling this # iem.data['tmpf'] = float(data['temperature'][i]) tmpc = float(data["temperatureC"][i]) t = "%s%02i/" % ( "M" if tmpc < 0 else "", tmpc if tmpc > 0 else (0 - tmpc), ) tgroup += "%s%03i" % ( "1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0, ) val = decision(i, "dewpoint", 10) if val is not None: # iem.data['dwpf'] = float(data['dewpoint'][i]) tmpc = float(data["dewpointC"][i]) if t != "": t = "%s%s%02i " % ( t, "M" if tmpc < 0 else "", tmpc if tmpc > 0 else 0 - tmpc, ) tgroup += "%s%03i" % ( "1" if tmpc < 0 else "0", (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0, ) if len(t) > 4: mtr += t val = decision(i, "altimeter", 20) if val is not None: iem.data["alti"] = float(round(val, 2)) mtr += "A%4i " % (iem.data["alti"] * 100.0, ) mtr += "RMK " val = decision(i, "precipAccum", 25) if val is not None: if val > 0.009: iem.data["phour"] = float(round(val, 2)) mtr += "P%04i " % (iem.data["phour"] * 100.0, ) elif val > 0: # Trace mtr += "P0000 " iem.data["phour"] = TRACE_VALUE if tgroup != "T": mtr += "%s " % (tgroup, ) if autoremarks[i] != "" or opremarks[i] != "": mtr += "%s %s " % (autoremarks[i], opremarks[i]) mtr += "MADISHF" # Eat our own dogfood try: Metar.Metar(mtr) iem.data["raw"] = mtr except Exception as exp: print("dogfooding extract_hfmetar %s resulted in %s" % (mtr, exp)) continue for key in iem.data: if isinstance(iem.data[key], np.float32): print("key: %s type: %s" % (key, type(iem.data[key]))) icursor = pgconn.cursor() if not iem.save(icursor, force_current_log=True, skip_current=True): print(("extract_hfmetar: unknown station? %s %s %s\n%s") % (sid3, network, ts, mtr)) icursor.close() pgconn.commit()
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) ptype = ctx["ptype"] date = ctx["date"] varname = ctx["var"] csector = ctx["csector"] title = date.strftime("%-d %B %Y") mp = MapPlot( sector=("state" if len(csector) == 2 else csector), state=ctx["csector"], axisbg="white", nocaption=True, title="IEM Reanalysis of %s for %s" % (PDICT.get(varname), title), subtitle="Data derived from various NOAA datasets", ) (west, east, south, north) = mp.ax.get_extent(ccrs.PlateCarree()) i0, j0 = iemre.find_ij(west, south) i1, j1 = iemre.find_ij(east, north) jslice = slice(j0, j1) islice = slice(i0, i1) idx0 = iemre.daily_offset(date) ncfn = iemre.get_daily_ncname(date.year) if not os.path.isfile(ncfn): raise NoDataFound("No Data Found.") with ncopen(ncfn) as nc: lats = nc.variables["lat"][jslice] lons = nc.variables["lon"][islice] cmap = ctx["cmap"] if varname in ["rsds", "power_swdn"]: # Value is in W m**-2, we want MJ multi = (86400.0 / 1000000.0) if varname == "rsds" else 1 data = nc.variables[varname][idx0, jslice, islice] * multi plot_units = "MJ d-1" clevs = np.arange(0, 37, 3.0) clevs[0] = 0.01 clevstride = 1 elif varname in ["wind_speed"]: data = (masked_array( nc.variables[varname][idx0, jslice, islice], units("meter / second"), ).to(units("mile / hour")).m) plot_units = "mph" clevs = np.arange(0, 41, 2) clevs[0] = 0.01 clevstride = 2 elif varname in ["p01d", "p01d_12z", "snow_12z", "snowd_12z"]: # Value is in W m**-2, we want MJ data = (masked_array(nc.variables[varname][idx0, jslice, islice], units("mm")).to(units("inch")).m) plot_units = "inch" clevs = np.arange(0, 0.25, 0.05) clevs = np.append(clevs, np.arange(0.25, 3.0, 0.25)) clevs = np.append(clevs, np.arange(3.0, 10.0, 1)) clevs[0] = 0.01 clevstride = 1 cmap = stretch_cmap(ctx["cmap"], clevs) elif varname in [ "high_tmpk", "low_tmpk", "high_tmpk_12z", "low_tmpk_12z", "avg_dwpk", ]: # Value is in W m**-2, we want MJ data = (masked_array(nc.variables[varname][idx0, jslice, islice], units("degK")).to(units("degF")).m) plot_units = "F" clevs = np.arange(-30, 120, 5) clevstride = 2 elif varname in ["range_tmpk", "range_tmpk_12z"]: vname1 = "high_tmpk%s" % ("_12z" if varname == "range_tmpk_12z" else "", ) vname2 = "low_tmpk%s" % ("_12z" if varname == "range_tmpk_12z" else "", ) d1 = nc.variables[vname1][idx0, jslice, islice] d2 = nc.variables[vname2][idx0, jslice, islice] data = (masked_array(d1, units("degK")).to(units("degF")).m - masked_array(d2, units("degK")).to(units("degF")).m) plot_units = "F" clevs = np.arange(0, 61, 5) clevstride = 2 if np.ma.is_masked(np.max(data)): raise NoDataFound("Data Unavailable") x, y = np.meshgrid(lons, lats) if ptype == "c": # in the case of contour, use the centroids on the grids mp.contourf( x + 0.125, y + 0.125, data, clevs, clevstride=clevstride, units=plot_units, ilabel=True, labelfmt="%.0f", cmap=cmap, ) else: x, y = np.meshgrid(lons, lats) mp.pcolormesh( x, y, data, clevs, clevstride=clevstride, cmap=cmap, units=plot_units, ) return mp.fig
def grid_day(ts, ds): """Do our gridding""" mybuf = 2.0 if ts.year > 1927: sql = """ SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, s.state, s.name, s.id as station, (CASE WHEN pday >= 0 then pday else null end) as precipdata, (CASE WHEN max_tmpf > -50 and max_tmpf < 130 then max_tmpf else null end) as highdata, (CASE WHEN min_tmpf > -50 and min_tmpf < 95 then min_tmpf else null end) as lowdata, (CASE WHEN max_dwpf > -50 and max_dwpf < 130 then max_dwpf else null end) as highdwpf, (CASE WHEN min_dwpf > -50 and min_dwpf < 95 then min_dwpf else null end) as lowdwpf, (CASE WHEN avg_sknt >= 0 and avg_sknt < 100 then avg_sknt else null end) as avgsknt, (CASE WHEN min_rh > 0 and min_rh < 101 then min_rh else null end) as minrh, (CASE WHEN max_rh > 0 and max_rh < 101 then max_rh else null end) as maxrh from summary_%s c, stations s WHERE day = '%s' and ST_Contains( ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))'), geom) and (s.network = 'AWOS' or s.network ~* 'ASOS') and c.iemid = s.iemid """ % ( ts.year, ts.strftime("%Y-%m-%d"), iemre.WEST - mybuf, iemre.SOUTH - mybuf, iemre.WEST - mybuf, iemre.NORTH + mybuf, iemre.EAST + mybuf, iemre.NORTH + mybuf, iemre.EAST + mybuf, iemre.SOUTH - mybuf, iemre.WEST - mybuf, iemre.SOUTH - mybuf, ) df = read_sql(sql, PGCONN) else: df = read_sql( """ WITH mystations as ( SELECT id, ST_X(geom) as lon, ST_Y(geom) as lat, state, name from stations where ST_Contains( ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))'), geom) and network ~* 'CLIMATE' and (temp24_hour is null or temp24_hour between 4 and 10) ) SELECT m.lon, m.lat, m.state, m.id as station, m.name as name, precip as precipdata, snow as snowdata, snowd as snowddata, high as highdata, low as lowdata, null as highdwpf, null as lowdwpf, null as avgsknt, null as minrh, null as maxrh from alldata a JOIN mystations m ON (a.station = m.id) WHERE a.day = %s """, COOP_PGCONN, params=( iemre.WEST - mybuf, iemre.SOUTH - mybuf, iemre.WEST - mybuf, iemre.NORTH + mybuf, iemre.EAST + mybuf, iemre.NORTH + mybuf, iemre.EAST + mybuf, iemre.SOUTH - mybuf, iemre.WEST - mybuf, iemre.SOUTH - mybuf, ts, ), ) if len(df.index) < 4: LOG.info( "%s has %02i entries, FAIL", ts.strftime("%Y-%m-%d"), len(df.index) ) return res = generic_gridder(df, "highdata") ds["high_tmpk"].values = datatypes.temperature(res, "F").value("K") res = generic_gridder(df, "lowdata") ds["low_tmpk"].values = datatypes.temperature(res, "F").value("K") hres = generic_gridder(df, "highdwpf") lres = generic_gridder(df, "lowdwpf") if hres is not None and lres is not None: ds["avg_dwpk"].values = datatypes.temperature( (hres + lres) / 2.0, "F" ).value("K") res = generic_gridder(df, "avgsknt") if res is not None: ds["wind_speed"].values = ( masked_array(res, units.knots).to(units.meters / units.second).m ) LOG.debug( "wind_speed min: %s max: %s", np.nanmin(ds["wind_speed"].values), np.nanmax(ds["wind_speed"].values), ) res = generic_gridder(df, "minrh") if res is not None: ds["min_rh"].values = res res = generic_gridder(df, "maxrh") if res is not None: ds["max_rh"].values = res
def mm2inch(val): """Helper.""" return masked_array(val, units("mm")).to(units("inch")).m
from metpy.units import masked_array, units from netCDF4 import Dataset ############################### # Download the data from the National Weather Service. dt = datetime.utcnow() - timedelta(days=1) # This should always be available url = 'http://water.weather.gov/precip/downloads/{dt:%Y/%m/%d}/nws_precip_1day_'\ '{dt:%Y%m%d}_conus.nc'.format(dt=dt) data = urlopen(url).read() Dataset('temp.nc', 'w').close() # Work around bug where it needs an existing netCDF file nc = Dataset('temp.nc', 'r', memory=data) ############################### # Pull the needed information out of the netCDF file prcpvar = nc.variables['observation'] data = masked_array(prcpvar[:], units(prcpvar.units.lower())).to('mm') x = nc.variables['x'][:] y = nc.variables['y'][:] proj_var = nc.variables[prcpvar.grid_mapping] ############################### # Set up the projection information within CartoPy globe = ccrs.Globe(semimajor_axis=proj_var.earth_radius) proj = ccrs.Stereographic(central_latitude=90.0, central_longitude=proj_var.straight_vertical_longitude_from_pole, true_scale_latitude=proj_var.standard_parallel, globe=globe) ############################### # Create the figure and plot the data # create figure and axes instances fig = plt.figure(figsize=(8, 8))
def get_df(network, station, date): """Figure out how to get the data being requested.""" if date == datetime.date.today() and network not in ["ISUSM"]: # Use IEM Access pgconn = get_dbconn("iem") return read_sql( "SELECT distinct valid at time zone 'UTC' as utc_valid, " "valid at time zone t.tzname as local_valid, tmpf, dwpf, sknt, " "drct, vsby, skyc1, skyl1, skyc2, skyl2, skyc3, skyl3, skyc4, " "skyl4, relh, feel, alti, mslp, phour, p03i, p24i, " "phour as p01i, raw, gust, max_tmpf_6hr, min_tmpf_6hr, " "array_to_string(wxcodes, ' ') as wxcodes, snowdepth " "from current_log c JOIN stations t on (c.iemid = t.iemid) " "WHERE t.id = %s and t.network = %s and " "date(valid at time zone t.tzname) = %s ORDER by utc_valid ASC", pgconn, params=(station, network, date), index_col=None, ) nt = NetworkTable(network, only_online=False) if station not in nt.sts: raise HTTPException(404, "Station + Network unknown to the IEM.") tzname = nt.sts[station]["tzname"] # This sucks, but alas we want easy datetime construction tz = ZoneInfo(tzname) sts = datetime.datetime(date.year, date.month, date.day, tzinfo=tz) ets = sts + datetime.timedelta(hours=24) tz = pytz.timezone(tzname) if network.find("_ASOS") > 0: # Use ASOS pgconn = get_dbconn("asos") return read_sql( "SELECT valid at time zone 'UTC' as utc_valid, " "valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct, " "vsby, skyc1, skyl1, skyc2, skyl2, skyc3, skyl3, skyc4, skyl4, " "relh, feel, alti, mslp, p01i, p03i, p24i, metar as raw, " "p03i, p06i, p24i, max_tmpf_6hr, min_tmpf_6hr, gust, " "array_to_string(wxcodes, ' ') as wxcodes, snowdepth " "from alldata WHERE station = %s and " "valid >= %s and valid < %s ORDER by valid ASC", pgconn, params=(tzname, station, sts, ets), index_col=None, ) if network.find("_RWIS") > 0: # Use RWIS pgconn = get_dbconn("rwis") return read_sql( "SELECT valid at time zone 'UTC' as utc_valid, " "valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct, " "gust from alldata WHERE station = %s and " "valid >= %s and valid < %s ORDER by valid ASC", pgconn, params=(tzname, station, sts, ets), index_col=None, ) if network in ["ISUSM", "ISUAG"]: # Use ISUAG pgconn = get_dbconn("isuag") df = read_sql( "SELECT valid at time zone 'UTC' as utc_valid, phour, " "valid at time zone %s as local_valid, tmpf, relh, sknt, drct " "from alldata WHERE station = %s and " "valid >= %s and valid < %s ORDER by valid ASC", pgconn, params=(tzname, station, sts, ets), index_col=None, ) # Compute dew point if not df.empty: try: df["dwpf"] = (dewpoint_from_relative_humidity( masked_array(df["tmpf"].values, units("degF")), masked_array(df["relh"].values, units("percent")), ).to(units("degF")).m) except TypeError: df["dwpf"] = np.nan return df if network == "OT": # Use ISUAG pgconn = get_dbconn("other") return read_sql( "SELECT valid at time zone 'UTC' as utc_valid, " "valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct " "from alldata WHERE station = %s and " "valid >= %s and valid < %s ORDER by valid ASC", pgconn, params=(tzname, station, sts, ets), index_col=None, ) if network == "USCRN": pgconn = get_dbconn("other") df = read_sql( "SELECT valid at time zone 'UTC' as utc_valid, " "valid at time zone %s as local_valid, tmpc, rh, " "wind_mps from uscrn_alldata WHERE station = %s and " "valid >= %s and valid < %s ORDER by valid ASC", pgconn, params=(tzname, station, sts, ets), index_col=None, ) if df.empty: return df # Do some unit work tmpc = masked_array(df["tmpc"].values, units("degC")) df["tmpf"] = tmpc.to(units("degF")).m df["dwpf"] = (dewpoint_from_relative_humidity( tmpc, masked_array(df["rh"].values, units("percent"))).to(units("degF")).m) df["sknt"] = (masked_array(df["wind_mps"], units("meters per second")).to( units("knots")).m) return df if network.find("_COOP") > 0 or network.find("_DCP") > 0: # Use HADS pgconn = get_dbconn("hads") df = read_sql( "SELECT distinct valid at time zone 'UTC' as utc_valid, " "key, value " f"from raw{date.strftime('%Y')} WHERE station = %s and " "valid >= %s and valid < %s ORDER by utc_valid ASC", pgconn, params=(station, sts, ets), index_col=None, ) if df.empty: return df df = df.pivot_table(index="utc_valid", columns="key", values="value", aggfunc="first") df = df.reset_index() # Query alldata too as it has the variable conversions done df2 = read_sql( "SELECT valid at time zone 'UTC' as utc_valid, " "tmpf, dwpf, sknt, drct " "from alldata WHERE station = %s and " "valid >= %s and valid < %s ORDER by utc_valid ASC", pgconn, params=(station, sts, ets), index_col=None, ) df = df.merge(df2, on="utc_valid") # Generate the local_valid column df["local_valid"] = (df["utc_valid"].dt.tz_localize( datetime.timezone.utc).dt.tz_convert(tz)) return df return None
def grid_hour(ts): """ I proctor the gridding of data on an hourly basis @param ts Timestamp of the analysis, we'll consider a 20 minute window """ pprint("grid_hour called...") nc = ncopen(iemre.get_hourly_ncname(ts.year), "a", timeout=300) domain = nc.variables["hasdata"][:, :] nc.close() ts0 = ts - datetime.timedelta(minutes=10) ts1 = ts + datetime.timedelta(minutes=10) utcnow = datetime.datetime.utcnow() utcnow = utcnow.replace(tzinfo=pytz.utc) - datetime.timedelta(hours=36) # If we are near realtime, look in IEMAccess instead of ASOS database mybuf = 2.0 params = ( iemre.WEST - mybuf, iemre.SOUTH - mybuf, iemre.WEST - mybuf, iemre.NORTH + mybuf, iemre.EAST + mybuf, iemre.NORTH + mybuf, iemre.EAST + mybuf, iemre.SOUTH - mybuf, iemre.WEST - mybuf, iemre.SOUTH - mybuf, ts0, ts1, ) if utcnow < ts: dbconn = get_dbconn("iem", user="******") sql = """SELECT t.id as station, ST_x(geom) as lon, ST_y(geom) as lat, max(case when tmpf > -60 and tmpf < 130 THEN tmpf else null end) as max_tmpf, max(case when sknt > 0 and sknt < 100 then sknt else 0 end) as max_sknt, max(getskyc(skyc1)) as max_skyc1, max(getskyc(skyc2)) as max_skyc2, max(getskyc(skyc3)) as max_skyc3, max(case when phour > 0 and phour < 1000 then phour else 0 end) as phour, max(case when dwpf > -60 and dwpf < 100 THEN dwpf else null end) as max_dwpf, max(case when sknt >= 0 then sknt else 0 end) as sknt, max(case when sknt >= 0 then drct else 0 end) as drct from current_log s JOIN stations t on (s.iemid = t.iemid) WHERE ST_Contains( ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))'), geom) and valid >= %s and valid < %s GROUP by station, lon, lat """ else: dbconn = get_dbconn("asos", user="******") sql = """SELECT station, ST_x(geom) as lon, st_y(geom) as lat, max(case when tmpf > -60 and tmpf < 130 THEN tmpf else null end) as max_tmpf, max(case when sknt > 0 and sknt < 100 then sknt else 0 end) as max_sknt, max(getskyc(skyc1)) as max_skyc1, max(getskyc(skyc2)) as max_skyc2, max(getskyc(skyc3)) as max_skyc3, max(case when p01i > 0 and p01i < 1000 then p01i else 0 end) as phour, max(case when dwpf > -60 and dwpf < 100 THEN dwpf else null end) as max_dwpf, max(case when sknt >= 0 then sknt else 0 end) as sknt, max(case when sknt >= 0 then drct else 0 end) as drct from alldata a JOIN stations t on (a.station = t.id) WHERE ST_Contains( ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))'), geom) and (t.network ~* 'ASOS' or t.network = 'AWOS') and valid >= %s and valid < %s GROUP by station, lon, lat""" df = read_sql(sql, dbconn, params=params, index_col="station") pprint("got database results") if df.empty: print(("%s has no entries, FAIL") % (ts.strftime("%Y-%m-%d %H:%M"), )) return ures, vres = grid_wind(df, domain) pprint("grid_wind is done") if ures is None: print("iemre.hourly_analysis failure for uwnd at %s" % (ts, )) else: write_grid(ts, "uwnd", ures) write_grid(ts, "vwnd", vres) tmpf = generic_gridder(df, "max_tmpf", domain) pprint("grid tmpf is done") if tmpf is None: print("iemre.hourly_analysis failure for tmpk at %s" % (ts, )) else: dwpf = generic_gridder(df, "max_dwpf", domain) pprint("grid dwpf is done") # require that dwpk <= tmpk mask = ~np.isnan(dwpf) mask[mask] &= dwpf[mask] > tmpf[mask] dwpf = np.where(mask, tmpf, dwpf) write_grid(ts, "tmpk", masked_array(tmpf, data_units="degF").to("degK")) write_grid(ts, "dwpk", masked_array(dwpf, data_units="degF").to("degK")) res = grid_skyc(df, domain) pprint("grid skyc is done") if res is None: print("iemre.hourly_analysis failure for skyc at %s" % (ts, )) else: write_grid(ts, "skyc", res)
def plotter(fdict): """ Go """ ctx = get_autoplot_context(fdict, get_description()) ts = ctx["ts"].replace(tzinfo=pytz.utc) hour = int(ctx["hour"]) ilabel = ctx["ilabel"] == "yes" plot = MapPlot( sector=ctx["t"], continentalcolor="white", state=ctx["state"], cwa=ctx["wfo"], title=("NWS RFC %s Hour Flash Flood Guidance on %s UTC") % (hour, ts.strftime("%-d %b %Y %H")), subtitle=("Estimated amount of %s Rainfall " "needed for non-urban Flash Flooding to commence") % (HOURS[ctx["hour"]], ), ) cmap = plt.get_cmap(ctx["cmap"]) bins = [ 0.01, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.25, 2.5, 2.75, 3.0, 3.5, 4.0, 5.0, ] if ts.year < 2019: column = "hour%02i" % (hour, ) pgconn = get_dbconn("postgis") df = read_sql( """ WITH data as ( SELECT ugc, rank() OVER (PARTITION by ugc ORDER by valid DESC), hour01, hour03, hour06, hour12, hour24 from ffg WHERE valid >= %s and valid <= %s) SELECT *, substr(ugc, 3, 1) as ztype from data where rank = 1 """, pgconn, params=(ts - datetime.timedelta(hours=24), ts), index_col="ugc", ) df2 = df[df["ztype"] == "C"] plot.fill_ugcs( df2[column].to_dict(), bins, cmap=cmap, plotmissing=False, ilabel=ilabel, ) df2 = df[df["ztype"] == "Z"] plot.fill_ugcs( df2[column].to_dict(), bins, cmap=cmap, plotmissing=False, units="inches", ilabel=ilabel, ) else: # use grib data ts -= datetime.timedelta(hours=(ts.hour % 6)) ts = ts.replace(minute=0) fn = None for offset in range(0, 24, 4): ts2 = ts - datetime.timedelta(hours=offset) testfn = ts2.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/ffg/" "5kmffg_%Y%m%d00.grib2")) if os.path.isfile(testfn): fn = testfn break if fn is None: raise NoDataFound("No valid grib data found!") grbs = pygrib.index(fn, "stepRange") grb = grbs.select(stepRange="0-%s" % (hour, ))[0] lats, lons = grb.latlons() data = (masked_array(grb.values, data_units=units("mm")).to(units("inch")).m) plot.pcolormesh(lons, lats, data, bins, cmap=cmap) if ilabel: plot.drawcounties() df = pd.DataFrame() return plot.fig, df
def doday(ts, realtime): """ Create a plot of precipitation stage4 estimates for some day We should total files from 1 AM to midnight local time """ sts = ts.replace(hour=1) ets = sts + datetime.timedelta(hours=24) interval = datetime.timedelta(hours=1) now = sts total = None lts = None while now < ets: gmt = now.astimezone(pytz.utc) fn = gmt.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/" "stage4/ST4.%Y%m%d%H.01h.grib")) if os.path.isfile(fn): lts = now grbs = pygrib.open(fn) if total is None: total = grbs[1]["values"] lats, lons = grbs[1].latlons() else: total += grbs[1]["values"] grbs.close() now += interval if lts is None: if ts.hour > 1: LOG.info("found no data for date: %s", ts) return lts = lts - datetime.timedelta(minutes=1) subtitle = "Total between 12:00 AM and %s" % ( lts.strftime("%I:%M %p %Z"), ) routes = "ac" if not realtime: routes = "a" total = masked_array(total, units("mm")).to(units("inch")).m for sector in ["iowa", "midwest", "conus"]: pqstr = ("plot %s %s00 %s_stage4_1d.png %s_stage4_1d.png png") % ( routes, ts.strftime("%Y%m%d%H"), sector, sector, ) mp = MapPlot( sector=sector, title=("%s NCEP Stage IV Today's Precipitation") % (ts.strftime("%-d %b %Y"), ), subtitle=subtitle, ) clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] mp.pcolormesh(lons, lats, total, clevs, cmap=nwsprecip(), units="inch") # map.drawstates(zorder=2) if sector == "iowa": mp.drawcounties() mp.postprocess(pqstr=pqstr) mp.close()
def append_cfs(res, lon, lat): """Append on needed CFS data.""" gridx, gridy = find_ij(lon, lat) lastyear = max(res["data"].keys()) thisyear = datetime.date.today().year lastdate = datetime.date(thisyear, 8, 31) if lastyear != thisyear: # We don't have any data yet for this year, so we add some res["data"][thisyear] = {"dates": [], "high": [], "low": [], "rh": []} else: # shrug if res["data"][lastyear]["dates"]: lastdate = datetime.datetime.strptime( res["data"][thisyear]["dates"][-1], "%Y-%m-%d" ).date() # go find the most recent CFS 0z file valid = datetime.date.today() attempt = 0 while True: testfn = valid.strftime("/mesonet/data/iemre/cfs_%Y%m%d00.nc") if os.path.isfile(testfn): break valid -= datetime.timedelta(hours=24) attempt += 1 if attempt > 9: return None try: nc = ncopen(testfn, timeout=NCOPEN_TIMEOUT) except Exception as exp: LOG.error(exp) return None if nc is None: LOG.debug("Failing %s as nc is None", testfn) return None high = ( masked_array(nc.variables["high_tmpk"][:, gridy, gridx], units.degK) .to(units.degF) .m ) low = ( masked_array(nc.variables["low_tmpk"][:, gridy, gridx], units.degK) .to(units.degF) .m ) # RH hack # found ~20% bias with this value, so arb addition for now rh = ( relative_humidity_from_dewpoint( masked_array(high, units.degF), masked_array(low, units.degF) ).m * 100.0 + 20.0 ) rh = np.where(rh > 95, 95, rh) entry = res["data"][thisyear] # lastdate is either August 31 or a date after, so our first forecast # date is i+1 tidx = daily_offset(lastdate + datetime.timedelta(days=1)) for i in range(tidx, 365): lts = datetime.date(thisyear, 1, 1) + datetime.timedelta(days=i) if lts.month in [9, 10, 11]: entry["dates"].append(lts.strftime("%Y-%m-%d")) entry["high"].append(_i(high[i])) entry["low"].append(_i(low[i])) entry["rh"].append(_i(rh[i])) return res