def do_month(year, month, routes): """ Generate a MRMS plot for the month!""" sts = datetime.datetime(year, month, 1) ets = sts + datetime.timedelta(days=35) ets = ets.replace(day=1) today = datetime.datetime.now() if ets > today: ets = today idx0 = iemre.daily_offset(sts) idx1 = iemre.daily_offset(ets) nc = ncopen(iemre.get_daily_mrms_ncname(year), 'r') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] p01d = distance(np.sum(nc.variables['p01d'][idx0:idx1, :, :], 0), 'MM').value('IN') nc.close() mp = MapPlot(sector='iowa', title='MRMS %s - %s Total Precipitation' % ( sts.strftime("%-d %b"), (ets - datetime.timedelta(days=1)).strftime("%-d %b %Y")), subtitle='Data from NOAA MRMS Project') x, y = np.meshgrid(lons, lats) bins = [0.01, 0.1, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 12, 16, 20] mp.pcolormesh(x, y, p01d, bins, units='inches') mp.drawcounties() currentfn = "summary/iowa_mrms_q3_month.png" archivefn = sts.strftime("%Y/%m/summary/iowa_mrms_q3_month.png") pqstr = "plot %s %s00 %s %s png" % ( routes, sts.strftime("%Y%m%d%H"), currentfn, archivefn) mp.postprocess(pqstr=pqstr)
def run(ts): """Actually do the work, please""" nc = ncopen(iemre.get_daily_mrms_ncname(ts.year), "a", timeout=300) offset = iemre.daily_offset(ts) ncprecip = nc.variables["p01d"] ts += datetime.timedelta(hours=24) gmtts = ts.astimezone(pytz.utc) fn = findfile(gmtts) if fn is None: print("merge_mrms_q2 failed to find file for time: %s" % (gmtts, )) return img = Image.open(fn) data = np.asarray(img) # data is 3500,7000 , starting at upper L data = np.flipud(data) # Anything over 254 is bad res = np.where(data > 254, 0, data) res = np.where(np.logical_and(data >= 0, data < 100), data * 0.25, res) res = np.where( np.logical_and(data >= 100, data < 180), 25.0 + ((data - 100) * 1.25), res, ) res = np.where( np.logical_and(data >= 180, data < 255), 125.0 + ((data - 180) * 5.0), res, ) y1 = int((iemre.NORTH - mrms.SOUTH) * 100.0) y0 = int((iemre.SOUTH - mrms.SOUTH) * 100.0) x0 = int((iemre.WEST - mrms.WEST) * 100.0) x1 = int((iemre.EAST - mrms.WEST) * 100.0) ncprecip[offset, :, :] = res[y0:y1, x0:x1] nc.close()
def init_year(ts): """ Create a new NetCDF file for a year of our specification! """ fp = iemre.get_daily_mrms_ncname(ts.year) nc = ncopen(fp, 'a', timeout=300) nc.createDimension('nv', 2) lat = nc.variables['lat'] lat.bounds = "lat_bnds" lat[:] = np.arange(iemre.SOUTH + 0.005, iemre.NORTH, 0.01) lat_bnds = nc.createVariable('lat_bnds', np.float, ('lat', 'nv')) lat_bnds[:, 0] = np.arange(iemre.SOUTH, iemre.NORTH, 0.01) lat_bnds[:, 1] = np.arange(iemre.SOUTH + 0.01, iemre.NORTH + 0.01, 0.01) lon = nc.variables['lon'] lon.bounds = "lon_bnds" lon[:] = np.arange(iemre.WEST, iemre.EAST, 0.01) lon_bnds = nc.createVariable('lon_bnds', np.float, ('lon', 'nv')) lon_bnds[:, 0] = np.arange(iemre.WEST, iemre.EAST, 0.01) lon_bnds[:, 1] = np.arange(iemre.WEST + 0.01, iemre.EAST + 0.01, 0.01) nc.close()
def doday(ts, realtime): """ Create a plot of precipitation stage4 estimates for some day """ lts = utc(ts.year, ts.month, ts.day, 12) lts = lts.astimezone(pytz.timezone("America/Chicago")) # make assumptions about the last valid MRMS data if realtime: # Up until :59 after of the last hour lts = (datetime.datetime.now() - datetime.timedelta(hours=1)).replace(minute=59) else: lts = lts.replace(year=ts.year, month=ts.month, day=ts.day, hour=23, minute=59) idx = iemre.daily_offset(ts) ncfn = iemre.get_daily_mrms_ncname(ts.year) if not os.path.isfile(ncfn): LOG.info("File %s missing, abort.", ncfn) return with ncopen(ncfn, timeout=300) as nc: precip = nc.variables['p01d'][idx, :, :] lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] subtitle = "Total between 12:00 AM and %s" % ( lts.strftime("%I:%M %p %Z"), ) routes = 'ac' if not realtime: routes = 'a' # clevs = np.arange(0, 0.25, 0.05) # clevs = np.append(clevs, np.arange(0.25, 3., 0.25)) # clevs = np.append(clevs, np.arange(3., 10.0, 1)) clevs = [ 0.01, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10 ] (xx, yy) = np.meshgrid(lons, lats) for sector in ['iowa', 'midwest']: pqstr = ("plot %s %s00 %s_q2_1d.png %s_q2_1d.png png") % ( routes, ts.strftime("%Y%m%d%H"), sector, sector) mp = MapPlot(title=("%s NCEP MRMS Q3 Today's Precipitation") % (ts.strftime("%-d %b %Y"), ), subtitle=subtitle, sector=sector) mp.pcolormesh(xx, yy, distance(precip, 'MM').value('IN'), clevs, cmap=nwsprecip(), units='inch') if sector == 'iowa': mp.drawcounties() mp.postprocess(pqstr=pqstr, view=False) mp.close()
def main(): """Go Main Go""" form = cgi.FieldStorage() ts1 = datetime.datetime.strptime(form.getfirst("date1"), "%Y-%m-%d") ts2 = datetime.datetime.strptime(form.getfirst("date2"), "%Y-%m-%d") if ts1 > ts2: send_error("date1 larger than date2") if ts1.year != ts2.year: send_error("multi-year query not supported yet...") # Make sure we aren't in the future tsend = datetime.date.today() if ts2.date() > tsend: ts2 = datetime.datetime.now() - datetime.timedelta(days=1) lat = float(form.getfirst("lat")) lon = float(form.getfirst("lon")) if lon < iemre.WEST or lon > iemre.EAST: send_error("lon value outside of bounds: %s to %s" % (iemre.WEST, iemre.EAST)) if lat < iemre.SOUTH or lat > iemre.NORTH: send_error("lat value outside of bounds: %s to %s" % (iemre.SOUTH, iemre.NORTH)) # fmt = form["format"][0] i, j = iemre.find_ij(lon, lat) offset1 = iemre.daily_offset(ts1) offset2 = iemre.daily_offset(ts2) + 1 # Get our netCDF vars with ncopen(iemre.get_daily_ncname(ts1.year)) as nc: hightemp = datatypes.temperature( nc.variables['high_tmpk'][offset1:offset2, j, i], 'K').value("F") high12temp = datatypes.temperature( nc.variables['high_tmpk_12z'][offset1:offset2, j, i], 'K').value("F") lowtemp = datatypes.temperature( nc.variables['low_tmpk'][offset1:offset2, j, i], 'K').value("F") low12temp = datatypes.temperature( nc.variables['low_tmpk_12z'][offset1:offset2, j, i], 'K').value("F") precip = nc.variables['p01d'][offset1:offset2, j, i] / 25.4 precip12 = nc.variables['p01d_12z'][offset1:offset2, j, i] / 25.4 # Get our climatology vars c2000 = ts1.replace(year=2000) coffset1 = iemre.daily_offset(c2000) c2000 = ts2.replace(year=2000) coffset2 = iemre.daily_offset(c2000) + 1 cnc = ncopen(iemre.get_dailyc_ncname()) chigh = datatypes.temperature( cnc.variables['high_tmpk'][coffset1:coffset2, j, i], 'K').value("F") clow = datatypes.temperature( cnc.variables['low_tmpk'][coffset1:coffset2, j, i], 'K').value("F") cprecip = cnc.variables['p01d'][coffset1:coffset2, j, i] / 25.4 cnc.close() if ts1.year > 1980: nc = ncopen("/mesonet/data/prism/%s_daily.nc" % (ts1.year, )) i2, j2 = prismutil.find_ij(lon, lat) prism_precip = nc.variables['ppt'][offset1:offset2, j2, i2] / 25.4 nc.close() else: prism_precip = [None] * (offset2 - offset1) if ts1.year > 2010: nc = ncopen(iemre.get_daily_mrms_ncname(ts1.year)) j2 = int((lat - iemre.SOUTH) * 100.0) i2 = int((lon - iemre.WEST) * 100.0) mrms_precip = nc.variables['p01d'][offset1:offset2, j2, i2] / 25.4 nc.close() else: mrms_precip = [None] * (offset2 - offset1) res = { 'data': [], } for i in range(0, offset2 - offset1): now = ts1 + datetime.timedelta(days=i) res['data'].append({ 'date': now.strftime("%Y-%m-%d"), 'mrms_precip_in': clean(mrms_precip[i]), 'prism_precip_in': clean(prism_precip[i]), 'daily_high_f': clean(hightemp[i]), '12z_high_f': clean(high12temp[i]), 'climate_daily_high_f': clean(chigh[i]), 'daily_low_f': clean(lowtemp[i]), '12z_low_f': clean(low12temp[i]), 'climate_daily_low_f': clean(clow[i]), 'daily_precip_in': clean(precip[i]), '12z_precip_in': clean(precip12[i]), 'climate_daily_precip_in': clean(cprecip[i]) }) ssw('Content-type: application/json\n\n') ssw(json.dumps(res))
def main(): """Do Something Fun!""" form = cgi.FieldStorage() ts = datetime.datetime.strptime(form.getfirst("date"), "%Y-%m-%d") lat = float(form.getfirst("lat")) lon = float(form.getfirst("lon")) fmt = form.getfirst("format") if fmt != 'json': ssw("Content-type: text/plain\n\n") ssw("ERROR: Service only emits json at this time") return i, j = iemre.find_ij(lon, lat) offset = iemre.daily_offset(ts) res = { 'data': [], } fn = iemre.get_daily_ncname(ts.year) ssw('Content-type: application/json\n\n') if not os.path.isfile(fn): ssw(json.dumps(res)) sys.exit() if i is None or j is None: ssw(json.dumps({'error': 'Coordinates outside of domain'})) return if ts.year > 1980: ncfn = "/mesonet/data/prism/%s_daily.nc" % (ts.year, ) if not os.path.isfile(ncfn): prism_precip = None else: i2, j2 = prismutil.find_ij(lon, lat) with ncopen(ncfn) as nc: prism_precip = nc.variables['ppt'][offset, j2, i2] / 25.4 else: prism_precip = None if ts.year > 2010: ncfn = iemre.get_daily_mrms_ncname(ts.year) if not os.path.isfile(ncfn): mrms_precip = None else: j2 = int((lat - iemre.SOUTH) * 100.0) i2 = int((lon - iemre.WEST) * 100.0) with ncopen(ncfn) as nc: mrms_precip = nc.variables['p01d'][offset, j2, i2] / 25.4 else: mrms_precip = None nc = ncopen(fn) c2000 = ts.replace(year=2000) coffset = iemre.daily_offset(c2000) cnc = ncopen(iemre.get_dailyc_ncname()) res['data'].append({ 'prism_precip_in': myrounder(prism_precip, 2), 'mrms_precip_in': myrounder(mrms_precip, 2), 'daily_high_f': myrounder( datatypes.temperature(nc.variables['high_tmpk'][offset, j, i], 'K').value('F'), 1), '12z_high_f': myrounder( datatypes.temperature(nc.variables['high_tmpk_12z'][offset, j, i], 'K').value('F'), 1), 'climate_daily_high_f': myrounder( datatypes.temperature(cnc.variables['high_tmpk'][coffset, j, i], 'K').value("F"), 1), 'daily_low_f': myrounder( datatypes.temperature(nc.variables['low_tmpk'][offset, j, i], 'K').value("F"), 1), '12z_low_f': myrounder( datatypes.temperature(nc.variables['low_tmpk_12z'][offset, j, i], 'K').value('F'), 1), 'avg_dewpoint_f': myrounder( datatypes.temperature(nc.variables['avg_dwpk'][offset, j, i], 'K').value('F'), 1), 'climate_daily_low_f': myrounder( datatypes.temperature(cnc.variables['low_tmpk'][coffset, j, i], 'K').value("F"), 1), 'daily_precip_in': myrounder(nc.variables['p01d'][offset, j, i] / 25.4, 2), '12z_precip_in': myrounder(nc.variables['p01d_12z'][offset, j, i] / 25.4, 2), 'climate_daily_precip_in': myrounder(cnc.variables['p01d'][coffset, j, i] / 25.4, 2), 'srad_mj': myrounder(nc.variables['rsds'][offset, j, i] * 86400. / 1000000., 2), 'avg_windspeed_mps': myrounder(nc.variables['wind_speed'][offset, j, i], 2), }) nc.close() cnc.close() ssw(json.dumps(res))
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) ptype = ctx['ptype'] sdate = ctx['sdate'] edate = ctx['edate'] src = ctx['src'] opt = ctx['opt'] usdm = ctx['usdm'] if sdate.year != edate.year: raise NoDataFound('Sorry, do not support multi-year plots yet!') days = (edate - sdate).days sector = ctx['sector'] if sdate == edate: title = sdate.strftime("%-d %B %Y") else: title = "%s to %s (inclusive)" % (sdate.strftime("%-d %b"), edate.strftime("%-d %b %Y")) x0 = 0 x1 = -1 y0 = 0 y1 = -1 state = None if len(sector) == 2: state = sector sector = 'state' if src == 'mrms': ncfn = iemre.get_daily_mrms_ncname(sdate.year) clncfn = iemre.get_dailyc_mrms_ncname() ncvar = 'p01d' source = 'MRMS Q3' subtitle = 'NOAA MRMS Project, GaugeCorr and RadarOnly' elif src == 'iemre': ncfn = iemre.get_daily_ncname(sdate.year) clncfn = iemre.get_dailyc_ncname() ncvar = 'p01d_12z' source = 'IEM Reanalysis' subtitle = 'IEM Reanalysis is derived from various NOAA datasets' else: ncfn = "/mesonet/data/prism/%s_daily.nc" % (sdate.year, ) clncfn = "/mesonet/data/prism/prism_dailyc.nc" ncvar = 'ppt' source = 'OSU PRISM' subtitle = ('PRISM Climate Group, Oregon State Univ., ' 'http://prism.oregonstate.edu, created 4 Feb 2004.') mp = MapPlot(sector=sector, state=state, axisbg='white', nocaption=True, title='%s:: %s Precip %s' % (source, title, PDICT3[opt]), subtitle='Data from %s' % (subtitle, ), titlefontsize=14) idx0 = iemre.daily_offset(sdate) idx1 = iemre.daily_offset(edate) + 1 if not os.path.isfile(ncfn): raise NoDataFound("No data for that year, sorry.") with util.ncopen(ncfn) as nc: if state is not None: x0, y0, x1, y1 = util.grid_bounds(nc.variables['lon'][:], nc.variables['lat'][:], state_bounds[state]) elif sector in SECTORS: bnds = SECTORS[sector] x0, y0, x1, y1 = util.grid_bounds( nc.variables['lon'][:], nc.variables['lat'][:], [bnds[0], bnds[2], bnds[1], bnds[3]]) lats = nc.variables['lat'][y0:y1] lons = nc.variables['lon'][x0:x1] if sdate == edate: p01d = distance(nc.variables[ncvar][idx0, y0:y1, x0:x1], 'MM').value('IN') elif (idx1 - idx0) < 32: p01d = distance( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i + 10, idx1]) if idx0 == i: p01d = distance( np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0), 'MM').value('IN') else: p01d += distance( np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0), 'MM').value('IN') if np.ma.is_masked(np.max(p01d)): raise NoDataFound("Data Unavailable") units = 'inches' cmap = plt.get_cmap(ctx['cmap']) cmap.set_bad('white') if opt == 'dep': # Do departure work now with util.ncopen(clncfn) as nc: climo = distance( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0), 'MM').value('IN') p01d = p01d - climo [maxv] = np.percentile(np.abs(p01d), [ 99, ]) clevs = np.around(np.linspace(0 - maxv, maxv, 11), decimals=2) elif opt == 'per': with util.ncopen(clncfn) as nc: climo = distance( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0), 'MM').value('IN') p01d = p01d / climo * 100. cmap.set_under('white') cmap.set_over('black') clevs = [1, 10, 25, 50, 75, 100, 125, 150, 200, 300, 500] units = 'percent' else: p01d = np.where(p01d < 0.001, np.nan, p01d) cmap.set_under('white') clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] if days > 6: clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20] if days > 29: clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35] if days > 90: clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40] x2d, y2d = np.meshgrid(lons, lats) if ptype == 'c': mp.contourf(x2d, y2d, p01d, clevs, cmap=cmap, units=units, iline=False) else: res = mp.pcolormesh(x2d, y2d, p01d, clevs, cmap=cmap, units=units) res.set_rasterized(True) if sector != 'midwest': mp.drawcounties() mp.drawcities() if usdm == 'yes': mp.draw_usdm(edate, filled=False, hatched=True) return mp.fig
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) date = ctx['date'] sector = ctx['sector'] days = ctx['trailing'] threshold = ctx['threshold'] window_sts = date - datetime.timedelta(days=days) if window_sts.year != date.year: raise ValueError('Sorry, do not support multi-year plots yet!') if len(sector) != 2: raise ValueError("Sorry, this does not support multi-state plots yet.") idx0 = iemre.daily_offset(window_sts) idx1 = iemre.daily_offset(date) ncfn = iemre.get_daily_mrms_ncname(date.year) ncvar = 'p01d' if not os.path.isfile(ncfn): raise ValueError("No data for that year, sorry.") nc = util.ncopen(ncfn) # Get the state weight df = gpd.GeoDataFrame.from_postgis(""" SELECT the_geom from states where state_abbr = %s """, util.get_dbconn('postgis'), params=(sector, ), index_col=None, geom_col='the_geom') czs = CachingZonalStats(iemre.MRMS_AFFINE) czs.gen_stats( np.zeros((nc.variables['lat'].size, nc.variables['lon'].size)), df['the_geom']) hasdata = None jslice = None islice = None for nav in czs.gridnav: hasdata = np.ones((nav.ysz, nav.xsz)) hasdata[nav.mask] = 0. # careful here as y is flipped in this context jslice = slice(nc.variables['lat'].size - (nav.y0 + nav.ysz), nc.variables['lat'].size - nav.y0) islice = slice(nav.x0, nav.x0 + nav.xsz) hasdata = np.flipud(hasdata) today = distance(nc.variables[ncvar][idx1, jslice, islice], 'MM').value('IN') if (idx1 - idx0) < 32: p01d = distance( np.sum(nc.variables[ncvar][idx0:idx1, jslice, islice], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i + 10, idx1]) if idx0 == i: p01d = distance( np.sum(nc.variables[ncvar][i:i2, jslice, islice], 0), 'MM').value('IN') else: p01d += distance( np.sum(nc.variables[ncvar][i:i2, jslice, islice], 0), 'MM').value('IN') nc.close() # Get climatology nc = util.ncopen(iemre.get_dailyc_mrms_ncname()) if (idx1 - idx0) < 32: c_p01d = distance( np.sum(nc.variables[ncvar][idx0:idx1, jslice, islice], 0), 'MM').value('IN') else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i + 10, idx1]) if idx0 == i: c_p01d = distance( np.sum(nc.variables[ncvar][i:i2, jslice, islice], 0), 'MM').value('IN') else: c_p01d += distance( np.sum(nc.variables[ncvar][i:i2, jslice, islice], 0), 'MM').value('IN') nc.close() # we actually don't care about weights at this fine of scale cells = np.sum(np.where(hasdata > 0, 1, 0)) departure = p01d - c_p01d # Update departure and today to values unconsidered below when out of state departure = np.where(hasdata > 0, departure, -9999) today = np.where(hasdata > 0, today, 0) ranges = [[-99, -3], [-3, -2], [-2, -1], [-1, 0], [0, 1], [1, 2], [2, 3], [3, 99]] x = [] x2 = [] labels = [] for (minv, maxv) in ranges: labels.append("%.0f to %.0f" % (minv, maxv)) # How many departure cells in this range hits = np.logical_and(departure < maxv, departure > minv) hits2 = np.logical_and(hits, today > threshold) x.append(np.sum(np.where(hits, 1, 0)) / float(cells) * 100.) x2.append(np.sum(np.where(hits2, 1, 0)) / float(cells) * 100.) (fig, ax) = plt.subplots(1, 1) ax.set_title(("%s NOAA MRMS %s %.2f inch Precip Coverage") % (state_names[sector], date.strftime("%-d %b %Y"), threshold)) ax.bar(np.arange(8) - 0.2, x, align='center', width=0.4, label='Trailing %s Day Departure' % (days, )) ax.bar(np.arange(8) + 0.2, x2, align='center', width=0.4, label='%s Coverage (%.1f%% Tot)' % (date.strftime("%-d %b %Y"), sum(x2))) for i, (_x1, _x2) in enumerate(zip(x, x2)): ax.text(i - 0.2, _x1 + 1, "%.1f" % (_x1, ), ha='center') ax.text(i + 0.2, _x2 + 1, "%.1f" % (_x2, ), ha='center') ax.set_xticks(np.arange(8)) ax.set_xticklabels(labels) ax.set_xlabel("Trailing %s Day Precip Departure [in]" % (days, )) ax.set_position([0.1, 0.2, 0.8, 0.7]) ax.legend(loc=(0., -0.2), ncol=2) ax.set_ylabel("Areal Coverage of %s [%%]" % (state_names[sector], )) ax.grid(True) ax.set_xlim(-0.5, 7.5) ax.set_ylim(0, max([max(x2), max(x)]) + 5) return fig
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) date = ctx['date'] sector = ctx['sector'] threshold = ctx['threshold'] threshold_mm = distance(threshold, 'IN').value('MM') window_sts = date - datetime.timedelta(days=90) if window_sts.year != date.year: raise NoDataFound('Sorry, do not support multi-year plots yet!') # idx0 = iemre.daily_offset(window_sts) idx1 = iemre.daily_offset(date) ncfn = iemre.get_daily_mrms_ncname(date.year) if not os.path.isfile(ncfn): raise NoDataFound("No data found.") ncvar = 'p01d' # Get the state weight df = gpd.GeoDataFrame.from_postgis(""" SELECT the_geom from states where state_abbr = %s """, util.get_dbconn('postgis'), params=(sector, ), index_col=None, geom_col='the_geom') czs = CachingZonalStats(iemre.MRMS_AFFINE) with util.ncopen(ncfn) as nc: czs.gen_stats( np.zeros((nc.variables['lat'].size, nc.variables['lon'].size)), df['the_geom']) jslice = None islice = None for nav in czs.gridnav: # careful here as y is flipped in this context jslice = slice(nc.variables['lat'].size - (nav.y0 + nav.ysz), nc.variables['lat'].size - nav.y0) islice = slice(nav.x0, nav.x0 + nav.xsz) grid = np.zeros( (jslice.stop - jslice.start, islice.stop - islice.start)) total = np.zeros( (jslice.stop - jslice.start, islice.stop - islice.start)) for i, idx in enumerate(range(idx1, idx1 - 90, -1)): total += nc.variables[ncvar][idx, jslice, islice] grid = np.where(np.logical_and(grid == 0, total > threshold_mm), i, grid) lon = nc.variables['lon'][islice] lat = nc.variables['lat'][jslice] mp = MapPlot(sector='state', state=sector, titlefontsize=14, subtitlefontsize=12, title=("NOAA MRMS Q3: Number of Recent Days " "till Accumulating %s\" of Precip") % (threshold, ), subtitle=("valid %s: based on per calendar day " "estimated preciptation, GaugeCorr and " "RadarOnly products") % (date.strftime("%-d %b %Y"), )) x, y = np.meshgrid(lon, lat) cmap = plt.get_cmap(ctx['cmap']) cmap.set_over('k') cmap.set_under('white') mp.pcolormesh(x, y, grid, np.arange(0, 81, 10), cmap=cmap, units='days') mp.drawcounties() mp.drawcities() return mp.fig
def test_ncname(): """Test the responses for get_names.""" assert iemre.get_daily_ncname(2020) is not None assert iemre.get_hourly_ncname(2020) is not None assert iemre.get_daily_mrms_ncname(2020) is not None
def application(environ, start_response): """Do Something Fun!""" form = parse_formvars(environ) ts = datetime.datetime.strptime(form.get("date", "2019-03-01"), "%Y-%m-%d") lat = float(form.get("lat", 41.99)) lon = float(form.get("lon", -95.1)) fmt = form.get("format", "json") if fmt != "json": headers = [("Content-type", "text/plain")] start_response("200 OK", headers) return [b"ERROR: Service only emits json at this time"] i, j = iemre.find_ij(lon, lat) offset = iemre.daily_offset(ts) res = {"data": []} fn = iemre.get_daily_ncname(ts.year) headers = [("Content-type", "application/json")] start_response("200 OK", headers) if not os.path.isfile(fn): return [json.dumps(res).encode("ascii")] if i is None or j is None: data = {"error": "Coordinates outside of domain"} return [json.dumps(data).encode("ascii")] if ts.year > 1980: ncfn = "/mesonet/data/prism/%s_daily.nc" % (ts.year, ) if not os.path.isfile(ncfn): prism_precip = None else: i2, j2 = prismutil.find_ij(lon, lat) with ncopen(ncfn) as nc: prism_precip = nc.variables["ppt"][offset, j2, i2] / 25.4 else: prism_precip = None if ts.year > 2010: ncfn = iemre.get_daily_mrms_ncname(ts.year) if not os.path.isfile(ncfn): mrms_precip = None else: j2 = int((lat - iemre.SOUTH) * 100.0) i2 = int((lon - iemre.WEST) * 100.0) with ncopen(ncfn) as nc: mrms_precip = nc.variables["p01d"][offset, j2, i2] / 25.4 else: mrms_precip = None c2000 = ts.replace(year=2000) coffset = iemre.daily_offset(c2000) with ncopen(fn) as nc: with ncopen(iemre.get_dailyc_ncname()) as cnc: res["data"].append({ "prism_precip_in": myrounder(prism_precip, 2), "mrms_precip_in": myrounder(mrms_precip, 2), "daily_high_f": myrounder( datatypes.temperature( nc.variables["high_tmpk"][offset, j, i], "K").value("F"), 1, ), "12z_high_f": myrounder( datatypes.temperature( nc.variables["high_tmpk_12z"][offset, j, i], "K").value("F"), 1, ), "climate_daily_high_f": myrounder( datatypes.temperature( cnc.variables["high_tmpk"][coffset, j, i], "K").value("F"), 1, ), "daily_low_f": myrounder( datatypes.temperature( nc.variables["low_tmpk"][offset, j, i], "K").value("F"), 1, ), "12z_low_f": myrounder( datatypes.temperature( nc.variables["low_tmpk_12z"][offset, j, i], "K").value("F"), 1, ), "avg_dewpoint_f": myrounder( datatypes.temperature( nc.variables["avg_dwpk"][offset, j, i], "K").value("F"), 1, ), "climate_daily_low_f": myrounder( datatypes.temperature( cnc.variables["low_tmpk"][coffset, j, i], "K").value("F"), 1, ), "daily_precip_in": myrounder(nc.variables["p01d"][offset, j, i] / 25.4, 2), "12z_precip_in": myrounder(nc.variables["p01d_12z"][offset, j, i] / 25.4, 2), "climate_daily_precip_in": myrounder(cnc.variables["p01d"][coffset, j, i] / 25.4, 2), "srad_mj": myrounder( nc.variables["rsds"][offset, j, i] * 86400.0 / 1000000.0, 2, ), "avg_windspeed_mps": myrounder(nc.variables["wind_speed"][offset, j, i], 2), }) return [json.dumps(res).encode("ascii")]
def application(environ, start_response): """Go Main Go""" form = parse_formvars(environ) ts1 = datetime.datetime.strptime(form.get("date1"), "%Y-%m-%d") ts2 = datetime.datetime.strptime(form.get("date2"), "%Y-%m-%d") if ts1 > ts2: return [send_error(start_response, "date1 larger than date2")] if ts1.year != ts2.year: return [ send_error(start_response, "multi-year query not supported yet...") ] # Make sure we aren't in the future tsend = datetime.date.today() if ts2.date() > tsend: ts2 = datetime.datetime.now() - datetime.timedelta(days=1) lat = float(form.get("lat")) lon = float(form.get("lon")) if lon < iemre.WEST or lon > iemre.EAST: return [ send_error( start_response, "lon value outside of bounds: %s to %s" % (iemre.WEST, iemre.EAST), ) ] if lat < iemre.SOUTH or lat > iemre.NORTH: return [ send_error( start_response, "lat value outside of bounds: %s to %s" % (iemre.SOUTH, iemre.NORTH), ) ] # fmt = form["format"][0] i, j = iemre.find_ij(lon, lat) offset1 = iemre.daily_offset(ts1) offset2 = iemre.daily_offset(ts2) + 1 # Get our netCDF vars with ncopen(iemre.get_daily_ncname(ts1.year)) as nc: hightemp = datatypes.temperature( nc.variables["high_tmpk"][offset1:offset2, j, i], "K" ).value("F") high12temp = datatypes.temperature( nc.variables["high_tmpk_12z"][offset1:offset2, j, i], "K" ).value("F") lowtemp = datatypes.temperature( nc.variables["low_tmpk"][offset1:offset2, j, i], "K" ).value("F") low12temp = datatypes.temperature( nc.variables["low_tmpk_12z"][offset1:offset2, j, i], "K" ).value("F") precip = nc.variables["p01d"][offset1:offset2, j, i] / 25.4 precip12 = nc.variables["p01d_12z"][offset1:offset2, j, i] / 25.4 # Get our climatology vars c2000 = ts1.replace(year=2000) coffset1 = iemre.daily_offset(c2000) c2000 = ts2.replace(year=2000) coffset2 = iemre.daily_offset(c2000) + 1 with ncopen(iemre.get_dailyc_ncname()) as cnc: chigh = datatypes.temperature( cnc.variables["high_tmpk"][coffset1:coffset2, j, i], "K" ).value("F") clow = datatypes.temperature( cnc.variables["low_tmpk"][coffset1:coffset2, j, i], "K" ).value("F") cprecip = cnc.variables["p01d"][coffset1:coffset2, j, i] / 25.4 if ts1.year > 1980: i2, j2 = prismutil.find_ij(lon, lat) with ncopen("/mesonet/data/prism/%s_daily.nc" % (ts1.year,)) as nc: prism_precip = nc.variables["ppt"][offset1:offset2, j2, i2] / 25.4 else: prism_precip = [None] * (offset2 - offset1) if ts1.year > 2010: j2 = int((lat - iemre.SOUTH) * 100.0) i2 = int((lon - iemre.WEST) * 100.0) with ncopen(iemre.get_daily_mrms_ncname(ts1.year)) as nc: mrms_precip = nc.variables["p01d"][offset1:offset2, j2, i2] / 25.4 else: mrms_precip = [None] * (offset2 - offset1) res = {"data": []} for i in range(0, offset2 - offset1): now = ts1 + datetime.timedelta(days=i) res["data"].append( { "date": now.strftime("%Y-%m-%d"), "mrms_precip_in": clean(mrms_precip[i]), "prism_precip_in": clean(prism_precip[i]), "daily_high_f": clean(hightemp[i]), "12z_high_f": clean(high12temp[i]), "climate_daily_high_f": clean(chigh[i]), "daily_low_f": clean(lowtemp[i]), "12z_low_f": clean(low12temp[i]), "climate_daily_low_f": clean(clow[i]), "daily_precip_in": clean(precip[i]), "12z_precip_in": clean(precip12[i]), "climate_daily_precip_in": clean(cprecip[i]), } ) start_response("200 OK", [("Content-type", "application/json")]) return [json.dumps(res).encode("ascii")]
def service( fmt: SupportedFormatsNoGeoJSON, sdate: datetime.date = Query(..., description="Start Date."), edate: datetime.date = Query(..., description="End Date."), lon: float = Query(..., description="Longitude of point of interest"), lat: float = Query(..., description="Latitude of point of interest"), ): """Go Main Go""" # Make sure we aren't in the future tsend = datetime.date.today() if edate > tsend: edate = datetime.date.today() - datetime.timedelta(days=1) i, j = iemre.find_ij(lon, lat) offset1 = iemre.daily_offset(sdate) offset2 = iemre.daily_offset(edate) + 1 # Get our netCDF vars with ncopen(iemre.get_daily_ncname(sdate.year)) as nc: hightemp = convert_value( nc.variables["high_tmpk"][offset1:offset2, j, i], "degK", "degF" ) high12temp = convert_value( nc.variables["high_tmpk_12z"][offset1:offset2, j, i], "degK", "degF", ) lowtemp = convert_value( nc.variables["low_tmpk"][offset1:offset2, j, i], "degK", "degF" ) low12temp = convert_value( nc.variables["low_tmpk_12z"][offset1:offset2, j, i], "degK", "degF" ) precip = mm2inch(nc.variables["p01d"][offset1:offset2, j, i]) precip12 = mm2inch(nc.variables["p01d_12z"][offset1:offset2, j, i]) # Get our climatology vars c2000 = sdate.replace(year=2000) coffset1 = iemre.daily_offset(c2000) c2000 = edate.replace(year=2000) coffset2 = iemre.daily_offset(c2000) + 1 with ncopen(iemre.get_dailyc_ncname()) as cnc: chigh = convert_value( cnc.variables["high_tmpk"][coffset1:coffset2, j, i], "degK", "degF" ) clow = convert_value( cnc.variables["low_tmpk"][coffset1:coffset2, j, i], "degK", "degF" ) cprecip = mm2inch( cnc.variables["p01d"][coffset1:coffset2, j, i], ) if sdate.year > 1980: i2, j2 = prismutil.find_ij(lon, lat) with ncopen(f"/mesonet/data/prism/{sdate.year}_daily.nc") as nc: prism_precip = mm2inch( nc.variables["ppt"][offset1:offset2, j2, i2], ) else: prism_precip = [None] * (offset2 - offset1) if sdate.year > 2000: j2 = int((lat - iemre.SOUTH) * 100.0) i2 = int((lon - iemre.WEST) * 100.0) with ncopen(iemre.get_daily_mrms_ncname(sdate.year)) as nc: mrms_precip = mm2inch( nc.variables["p01d"][offset1:offset2, j2, i2], ) else: mrms_precip = [None] * (offset2 - offset1) res = [] for i in range(0, offset2 - offset1): now = sdate + datetime.timedelta(days=i) res.append( { "date": now.strftime("%Y-%m-%d"), "mrms_precip_in": clean(mrms_precip[i]), "prism_precip_in": clean(prism_precip[i]), "daily_high_f": clean(hightemp[i]), "12z_high_f": clean(high12temp[i]), "climate_daily_high_f": clean(chigh[i]), "daily_low_f": clean(lowtemp[i]), "12z_low_f": clean(low12temp[i]), "climate_daily_low_f": clean(clow[i]), "daily_precip_in": clean(precip[i]), "12z_precip_in": clean(precip12[i]), "climate_daily_precip_in": clean(cprecip[i]), } ) return deliver_df(pd.DataFrame(res), fmt)
def plotter(fdict): """ Go """ ctx = util.get_autoplot_context(fdict, get_description()) ptype = ctx["ptype"] sdate = ctx["sdate"] edate = ctx["edate"] src = ctx["src"] opt = ctx["opt"] usdm = ctx["usdm"] if sdate.year != edate.year: raise NoDataFound("Sorry, do not support multi-year plots yet!") days = (edate - sdate).days sector = ctx["sector"] x0 = 0 x1 = -1 y0 = 0 y1 = -1 state = None if len(sector) == 2: state = sector sector = "state" title = compute_title(src, sdate, edate) if src == "mrms": ncfn = iemre.get_daily_mrms_ncname(sdate.year) clncfn = iemre.get_dailyc_mrms_ncname() ncvar = "p01d" source = "MRMS Q3" subtitle = "NOAA MRMS Project, GaugeCorr and RadarOnly" elif src == "iemre": ncfn = iemre.get_daily_ncname(sdate.year) clncfn = iemre.get_dailyc_ncname() ncvar = "p01d_12z" source = "IEM Reanalysis" subtitle = "IEM Reanalysis is derived from various NOAA datasets" else: ncfn = "/mesonet/data/prism/%s_daily.nc" % (sdate.year, ) clncfn = "/mesonet/data/prism/prism_dailyc.nc" ncvar = "ppt" source = "OSU PRISM" subtitle = ("PRISM Climate Group, Oregon State Univ., " "http://prism.oregonstate.edu, created 4 Feb 2004.") mp = MapPlot( sector=sector, state=state, axisbg="white", nocaption=True, title="%s:: %s Precip %s" % (source, title, PDICT3[opt]), subtitle="Data from %s" % (subtitle, ), titlefontsize=14, ) idx0 = iemre.daily_offset(sdate) idx1 = iemre.daily_offset(edate) + 1 if not os.path.isfile(ncfn): raise NoDataFound("No data for that year, sorry.") with util.ncopen(ncfn) as nc: if state is not None: x0, y0, x1, y1 = util.grid_bounds( nc.variables["lon"][:], nc.variables["lat"][:], state_bounds[state], ) elif sector in SECTORS: bnds = SECTORS[sector] x0, y0, x1, y1 = util.grid_bounds( nc.variables["lon"][:], nc.variables["lat"][:], [bnds[0], bnds[2], bnds[1], bnds[3]], ) lats = nc.variables["lat"][y0:y1] lons = nc.variables["lon"][x0:x1] if sdate == edate: p01d = mm2inch(nc.variables[ncvar][idx0, y0:y1, x0:x1]) elif (idx1 - idx0) < 32: p01d = mm2inch( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0)) else: # Too much data can overwhelm this app, need to chunk it for i in range(idx0, idx1, 10): i2 = min([i + 10, idx1]) if idx0 == i: p01d = mm2inch( np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0)) else: p01d += mm2inch( np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0)) if np.ma.is_masked(np.max(p01d)): raise NoDataFound("Data Unavailable") plot_units = "inches" cmap = get_cmap(ctx["cmap"]) cmap.set_bad("white") if opt == "dep": # Do departure work now with util.ncopen(clncfn) as nc: climo = mm2inch( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0)) p01d = p01d - climo [maxv] = np.percentile(np.abs(p01d), [99]) clevs = np.around(np.linspace(0 - maxv, maxv, 11), decimals=2) elif opt == "per": with util.ncopen(clncfn) as nc: climo = mm2inch( np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0)) p01d = p01d / climo * 100.0 cmap.set_under("white") cmap.set_over("black") clevs = [1, 10, 25, 50, 75, 100, 125, 150, 200, 300, 500] plot_units = "percent" else: p01d = np.where(p01d < 0.001, np.nan, p01d) cmap.set_under("white") clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] if days > 6: clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20] if days > 29: clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35] if days > 90: clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40] x2d, y2d = np.meshgrid(lons, lats) if ptype == "c": mp.contourf(x2d, y2d, p01d, clevs, cmap=cmap, units=plot_units, iline=False) else: res = mp.pcolormesh(x2d, y2d, p01d, clevs, cmap=cmap, units=plot_units) res.set_rasterized(True) if sector != "midwest": mp.drawcounties() mp.drawcities() if usdm == "yes": mp.draw_usdm(edate, filled=False, hatched=True) return mp.fig
def init_year(ts): """ Create a new NetCDF file for a year of our specification! """ fn = iemre.get_daily_mrms_ncname(ts.year) nc = ncopen(fn, "w") nc.title = "MRMS Daily Precipitation %s" % (ts.year, ) nc.platform = "Grided Estimates" nc.description = "MRMS 0.01 degree grid" nc.institution = "Iowa State University, Ames, IA, USA" nc.source = "Iowa Environmental Mesonet" nc.project_id = "IEM" nc.realization = 1 nc.Conventions = "CF-1.0" nc.contact = "Daryl Herzmann, [email protected], 515-294-5978" nc.history = "%s Generated" % ( datetime.datetime.now().strftime("%d %B %Y"), ) nc.comment = "No Comment at this time" # Setup Dimensions nc.createDimension("lat", (iemre.NORTH - iemre.SOUTH) * 100.0) nc.createDimension("lon", (iemre.EAST - iemre.WEST) * 100.0) days = ((ts.replace(year=ts.year + 1)) - ts).days nc.createDimension("time", int(days)) nc.createDimension("nv", 2) # Setup Coordinate Variables lat = nc.createVariable("lat", np.float, ("lat", )) lat.units = "degrees_north" lat.long_name = "Latitude" lat.standard_name = "latitude" lat.bounds = "lat_bnds" lat.axis = "Y" # Grid centers lat[:] = np.arange(iemre.SOUTH + 0.005, iemre.NORTH, 0.01) lat_bnds = nc.createVariable("lat_bnds", np.float, ("lat", "nv")) lat_bnds[:, 0] = np.arange(iemre.SOUTH, iemre.NORTH, 0.01) lat_bnds[:, 1] = np.arange(iemre.SOUTH + 0.01, iemre.NORTH + 0.01, 0.01) lon = nc.createVariable("lon", np.float, ("lon", )) lon.units = "degrees_east" lon.long_name = "Longitude" lon.standard_name = "longitude" lon.bounds = "lon_bnds" lon.axis = "X" lon[:] = np.arange(iemre.WEST, iemre.EAST, 0.01) lon_bnds = nc.createVariable("lon_bnds", np.float, ("lon", "nv")) lon_bnds[:, 0] = np.arange(iemre.WEST, iemre.EAST, 0.01) lon_bnds[:, 1] = np.arange(iemre.WEST + 0.01, iemre.EAST + 0.01, 0.01) tm = nc.createVariable("time", np.float, ("time", )) tm.units = "Days since %s-01-01 00:00:0.0" % (ts.year, ) tm.long_name = "Time" tm.standard_name = "time" tm.axis = "T" tm.calendar = "gregorian" tm[:] = np.arange(0, int(days)) # 0 to 65535 by 0.01 p01d = nc.createVariable("p01d", np.uint16, ("time", "lat", "lon"), fill_value=65535) p01d.units = "mm" p01d.scale_factor = 0.01 p01d.long_name = "Precipitation" p01d.standard_name = "Precipitation" p01d.coordinates = "lon lat" p01d.description = "Precipitation accumulation for the day" nc.close()
def run(ts): """Update netcdf file with the MRMS data Args: ts (datetime): timestamptz at midnight central time and we are running forward in time """ nc = ncopen(iemre.get_daily_mrms_ncname(ts.year), "a", timeout=300) offset = iemre.daily_offset(ts) ncprecip = nc.variables["p01d"] gmtts = ts.astimezone(pytz.UTC) utcnow = utc() total = None lats = None for _ in range(1, 25): gmtts += datetime.timedelta(hours=1) if gmtts > utcnow: continue gribfn = None for prefix in ["GaugeCorr_QPE_01H", "RadarOnly_QPE_01H"]: fn = mrms.fetch(prefix, gmtts) if fn is None: continue fp = gzip.GzipFile(fn, "rb") (_, gribfn) = tempfile.mkstemp() tmpfp = open(gribfn, "wb") tmpfp.write(fp.read()) tmpfp.close() os.unlink(fn) break if gribfn is None: if gmtts < utcnow: print("merge_mrms_q3.py MISSING %s" % (gmtts,)) continue grbs = pygrib.open(gribfn) grb = grbs[1] if lats is None: lats, _ = grb.latlons() os.unlink(gribfn) val = grb["values"] # Anything less than zero, we set to zero val = np.where(val < 0, 0, val) if total is None: total = val else: total += val if lats is None: print("merge_mrms_q3 nodata for %s" % (ts.date(),)) return # CAREFUL HERE! The MRMS grid is North to South # set top (smallest y) y0 = int((lats[0, 0] - iemre.NORTH) * 100.0) y1 = int((lats[0, 0] - iemre.SOUTH) * 100.0) x0 = int((iemre.WEST - mrms.WEST) * 100.0) x1 = int((iemre.EAST - mrms.WEST) * 100.0) # print(('y0:%s y1:%s x0:%s x1:%s lat0:%s offset:%s ' # ) % (y0, y1, x0, x1, lats[0, 0], offset)) ncprecip[offset, :, :] = np.flipud(total[y0:y1, x0:x1]) nc.close()
def service( fmt: SupportedFormatsNoGeoJSON, date: datetime.date = Query(..., description="The date of interest."), lon: float = Query(..., description="Longitude of point of interest"), lat: float = Query(..., description="Latitude of point of interest"), ): """Do Something Fun!""" i, j = iemre.find_ij(lon, lat) offset = iemre.daily_offset(date) res = [] fn = iemre.get_daily_ncname(date.year) if date.year > 1980: ncfn = f"/mesonet/data/prism/{date.year}_daily.nc" if not os.path.isfile(ncfn): prism_precip = None else: i2, j2 = prismutil.find_ij(lon, lat) with ncopen(ncfn) as nc: prism_precip = mm2inch(nc.variables["ppt"][offset, j2, i2]) else: prism_precip = None if date.year > 2000: ncfn = iemre.get_daily_mrms_ncname(date.year) if not os.path.isfile(ncfn): mrms_precip = None else: j2 = int((lat - iemre.SOUTH) * 100.0) i2 = int((lon - iemre.WEST) * 100.0) with ncopen(ncfn) as nc: mrms_precip = mm2inch(nc.variables["p01d"][offset, j2, i2]) else: mrms_precip = None c2000 = date.replace(year=2000) coffset = iemre.daily_offset(c2000) with ncopen(fn) as nc: with ncopen(iemre.get_dailyc_ncname()) as cnc: res.append({ "prism_precip_in": myrounder(prism_precip, 2), "mrms_precip_in": myrounder(mrms_precip, 2), "daily_high_f": myrounder( convert_value( nc.variables["high_tmpk"][offset, j, i], "degK", "degF", ), 1, ), "12z_high_f": myrounder( convert_value( nc.variables["high_tmpk_12z"][offset, j, i], "degK", "degF", ), 1, ), "climate_daily_high_f": myrounder( convert_value( cnc.variables["high_tmpk"][coffset, j, i], "degK", "degF", ), 1, ), "daily_low_f": myrounder( convert_value( nc.variables["low_tmpk"][offset, j, i], "degK", "degF", ), 1, ), "12z_low_f": myrounder( convert_value( nc.variables["low_tmpk_12z"][offset, j, i], "degK", "degF", ), 1, ), "avg_dewpoint_f": myrounder( convert_value( nc.variables["avg_dwpk"][offset, j, i], "degK", "degF", ), 1, ), "climate_daily_low_f": myrounder( convert_value( cnc.variables["low_tmpk"][coffset, j, i], "degK", "degF", ), 1, ), "daily_precip_in": myrounder(mm2inch(nc.variables["p01d"][offset, j, i]), 2), "12z_precip_in": myrounder(mm2inch(nc.variables["p01d_12z"][offset, j, i]), 2), "climate_daily_precip_in": myrounder(mm2inch(cnc.variables["p01d"][coffset, j, i]), 2), "srad_mj": myrounder( nc.variables["rsds"][offset, j, i] * 86400.0 / 1000000.0, 2, ), "avg_windspeed_mps": myrounder(nc.variables["wind_speed"][offset, j, i], 2), }) return deliver_df(pd.DataFrame(res), fmt)