def do_month(year, month, routes): """ Generate a MRMS plot for the month!""" sts = datetime.datetime(year, month, 1) ets = sts + datetime.timedelta(days=35) ets = ets.replace(day=1) today = datetime.datetime.now() if ets > today: ets = today idx0 = iemre.daily_offset(sts) idx1 = iemre.daily_offset(ets) nc = ncopen(iemre.get_daily_mrms_ncname(year), 'r') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] p01d = distance(np.sum(nc.variables['p01d'][idx0:idx1, :, :], 0), 'MM').value('IN') nc.close() mp = MapPlot(sector='iowa', title='MRMS %s - %s Total Precipitation' % ( sts.strftime("%-d %b"), (ets - datetime.timedelta(days=1)).strftime("%-d %b %Y")), subtitle='Data from NOAA MRMS Project') x, y = np.meshgrid(lons, lats) bins = [0.01, 0.1, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 12, 16, 20] mp.pcolormesh(x, y, p01d, bins, units='inches') mp.drawcounties() currentfn = "summary/iowa_mrms_q3_month.png" archivefn = sts.strftime("%Y/%m/summary/iowa_mrms_q3_month.png") pqstr = "plot %s %s00 %s %s png" % ( routes, sts.strftime("%Y%m%d%H"), currentfn, archivefn) mp.postprocess(pqstr=pqstr)
def main(): """Go Main""" total = None years = 0. for yr in range(1981, 2018): print(yr) ncfn = "/mesonet/data/prism/%s_daily.nc" % (yr, ) nc = netCDF4.Dataset(ncfn) if total is None: lons = nc.variables['lon'][:] lats = nc.variables['lat'][:] total = np.zeros(nc.variables['tmax'].shape[1:], np.float) days = np.zeros(nc.variables['tmax'].shape[1:], np.float) sidx = daily_offset(datetime.date(yr, 1, 1)) eidx = daily_offset(datetime.date(yr, 7, 4)) for idx in range(sidx, eidx): days += np.where(nc.variables['tmax'][idx, :, :] > THRESHOLD, 1, 0) nc.close() years += 1. total += days val = days - (total / years) print(np.max(val)) print(np.min(val)) mp = MapPlot(sector='conus', title=("OSU PRISM 2017 Days with High >= 90$^\circ$F " "Departure"), subtitle=("2017 thru 4 July against 1981-2016 " "Year to Date Average")) mp.contourf(lons, lats, val, np.arange(-25, 26, 5), units='days', cmap=plt.get_cmap('seismic')) mp.postprocess(filename='test.png')
def do_month(year, month, routes): """ Generate a MRMS plot for the month!""" sts = datetime.datetime(year,month,1) ets = sts + datetime.timedelta(days=35) ets = ets.replace(day=1) today = datetime.datetime.now() if ets > today: ets = today idx0 = iemre.daily_offset(sts) idx1 = iemre.daily_offset(ets) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_mrms_daily.nc" % (year,), 'r') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] p01d = np.sum(nc.variables['p01d'][idx0:idx1,:,:],0) / 24.5 nc.close() m = MapPlot(sector='iowa', title='MRMS %s - %s Total Precipitation' % ( sts.strftime("%-d %b"), (ets - datetime.timedelta(days=1)).strftime("%-d %b %Y")), subtitle='Data from NOAA MRMS Project') x,y = np.meshgrid(lons, lats) bins = [0.01, 0.1, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 12, 16, 20] m.pcolormesh(x, y, p01d, bins, units='inches') m.drawcounties() currentfn = "summary/iowa_mrms_q3_month.png" archivefn = sts.strftime("%Y/%m/summary/iowa_mrms_q3_month.png") pqstr = "plot %s %s00 %s %s png" % ( routes, sts.strftime("%Y%m%d%H"), currentfn, archivefn) m.postprocess(pqstr=pqstr)
def main(): """Do Something Fun!""" form = cgi.FormContent() ts = datetime.datetime.strptime(form["date"][0], "%Y-%m-%d") lat = float(form["lat"][0]) lon = float(form["lon"][0]) fmt = form["format"][0] if fmt != 'json': sys.stdout.write("Content-type: text/plain\n\n") sys.stdout.write("ERROR: Service only emits json at this time") return i, j = iemre.find_ij(lon, lat) offset = iemre.daily_offset(ts) res = {'data': [], } fn = "/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year,) sys.stdout.write('Content-type: application/json\n\n') if not os.path.isfile(fn): sys.stdout.write(json.dumps(res)) sys.exit() if i is None or j is None: sys.stdout.write(json.dumps({'error': 'Coordinates outside of domain'} )) return nc = netCDF4.Dataset(fn, 'r') c2000 = ts.replace(year=2000) coffset = iemre.daily_offset(c2000) cnc = netCDF4.Dataset("/mesonet/data/iemre/mw_dailyc.nc", 'r') res['data'].append({ 'daily_high_f': myrounder( datatypes.temperature( nc.variables['high_tmpk'][offset, j, i], 'K').value('F'), 1), 'climate_daily_high_f': myrounder( datatypes.temperature( cnc.variables['high_tmpk'][coffset, j, i], 'K').value("F"), 1), 'daily_low_f': myrounder( datatypes.temperature( nc.variables['low_tmpk'][offset, j, i], 'K').value("F"), 1), 'climate_daily_low_f': myrounder( datatypes.temperature( cnc.variables['low_tmpk'][coffset, j, i], 'K').value("F"), 1), 'daily_precip_in': myrounder( nc.variables['p01d'][offset, j, i] / 25.4, 2), 'climate_daily_precip_in': myrounder( cnc.variables['p01d'][coffset, j, i] / 25.4, 2), }) nc.close() cnc.close() sys.stdout.write(json.dumps(res))
def main(): """Go Main Go""" ets = datetime.datetime.now() - datetime.timedelta(days=1) sts = datetime.datetime(ets.year, 1, 1) # Get the normal accumm with ncopen(iemre.get_dailyc_ncname()) as cnc: lons = cnc.variables["lon"][:] lats = cnc.variables["lat"][:] index0 = iemre.daily_offset(sts) index1 = iemre.daily_offset(ets) clprecip = np.sum(cnc.variables["p01d"][index0:index1, :, :], 0) with ncopen(iemre.get_daily_ncname(sts.year)) as nc: obprecip = np.sum(nc.variables["p01d"][index0:index1, :, :], 0) lons, lats = np.meshgrid(lons, lats) # Plot departure from normal mp = MapPlot( sector="midwest", title=("Precipitation Departure %s - %s") % (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")), subtitle="based on IEM Estimates", ) mp.pcolormesh(lons, lats, (obprecip - clprecip) / 25.4, np.arange(-10, 10, 1)) mp.postprocess( pqstr="plot c 000000000000 summary/year/stage4_diff.png bogus png") mp.close() # Plot normals mp = MapPlot( sector="midwest", title=("Normal Precipitation:: %s - %s") % (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")), subtitle="based on IEM Estimates", ) mp.pcolormesh(lons, lats, (clprecip) / 25.4, np.arange(0, 30, 2)) mp.postprocess( pqstr="plot c 000000000000 summary/year/stage4_normals.png bogus png") mp.close() # Plot Obs mp = MapPlot( sector="midwest", title=("Estimated Precipitation:: %s - %s") % (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")), subtitle="based on IEM Estimates", ) mp.pcolormesh(lons, lats, (obprecip) / 25.4, np.arange(0, 30, 2)) mp.postprocess( pqstr="plot c 000000000000 summary/year/stage4obs.png bogus png") mp.close()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt from pyiem.plot.geoplot import MapPlot ctx = util.get_autoplot_context(fdict, get_description()) date = ctx['date'] sector = ctx['sector'] threshold = ctx['threshold'] threshold_mm = distance(threshold, 'IN').value('MM') window_sts = date - datetime.timedelta(days=90) if window_sts.year != date.year: raise Exception('Sorry, do not support multi-year plots yet!') idx0 = iemre.daily_offset(window_sts) idx1 = iemre.daily_offset(date) ncfn = "/mesonet/data/iemre/%s_mw_mrms_daily.nc" % (date.year, ) ncvar = 'p01d' if not os.path.isfile(ncfn): raise Exception("No data for that year, sorry.") nc = netCDF4.Dataset(ncfn, 'r') grid = np.zeros((len(nc.dimensions['lat']), len(nc.dimensions['lon']))) total = np.zeros((len(nc.dimensions['lat']), len(nc.dimensions['lon']))) for i, idx in enumerate(range(idx1, idx1-90, -1)): total += nc.variables[ncvar][idx, :, :] grid = np.where(np.logical_and(grid == 0, total > threshold_mm), i, grid) lon = np.append(nc.variables['lon'][:], [-80.5]) lat = np.append(nc.variables['lat'][:], [49.]) nc.close() mp = MapPlot(sector='state', state=sector, titlefontsize=14, subtitlefontsize=12, title=("NOAA MRMS Q3: Number of Recent Days " "till Accumulating %s\" of Precip" ) % (threshold, ), subtitle=("valid %s: based on per calendar day " "estimated preciptation, GaugeCorr and " "RadarOnly products" ) % (date.strftime("%-d %b %Y"), )) x, y = np.meshgrid(lon, lat) cmap = plt.get_cmap('terrain') cmap.set_over('k') cmap.set_under('white') mp.pcolormesh(x, y, grid, np.arange(0, 81, 10), cmap=cmap, units='days') mp.drawcounties() mp.drawcities() return mp.fig
def test_daily_offset(): """ Compute the offsets """ ts = utc(2013, 1, 1, 0, 0) offset = iemre.daily_offset(ts) assert offset == 0 ts = datetime.date(2013, 2, 1) offset = iemre.daily_offset(ts) assert offset == 31 ts = utc(2013, 1, 5, 12, 0) offset = iemre.daily_offset(ts) assert offset == 4
def test_daily_offset(self): """ Compute the offsets """ ts = datetime.datetime(2013, 1, 1, 0, 0) ts = ts.replace(tzinfo=pytz.timezone("UTC")) offset = iemre.daily_offset(ts) self.assertEqual(offset, 0) ts = datetime.date(2013, 2, 1) offset = iemre.daily_offset(ts) self.assertEqual(offset, 31) ts = datetime.datetime(2013, 1, 5, 12, 0) ts = ts.replace(tzinfo=pytz.timezone("UTC")) offset = iemre.daily_offset(ts) self.assertEqual(offset, 4)
def replace_cfs(nc, valid, islice, jslice): """Copy CFS data into the given year.""" tidx0 = (valid - datetime.date(valid.year, 1, 1)).days tidx1 = ( datetime.date(valid.year, 12, 31) - datetime.date(valid.year, 1, 1) ).days cfsnc = ncopen(valid.strftime("/mesonet/data/iemre/cfs_%Y%m%d%H.nc")) tidx = iemre.daily_offset(valid + datetime.timedelta(days=1)) tslice = slice(tidx0 + 1, tidx1 + 1) # print("replace_cfs filling %s from %s" % (tslice, tidx)) # CFS is W m-2, we want MJ nc.variables["srad"][tslice, :, :] = ( cfsnc.variables["srad"][tidx:, jslice, islice] * 86400.0 / 1000000.0 ) highc = temperature( cfsnc.variables["high_tmpk"][tidx:, jslice, islice], "K" ).value("C") lowc = temperature( cfsnc.variables["low_tmpk"][tidx:, jslice, islice], "K" ).value("C") nc.variables["tmax"][tslice, :, :] = highc nc.variables["tmin"][tslice, :, :] = lowc nc.variables["gdd_f"][tslice, :, :] = gdd( temperature(highc, "C"), temperature(lowc, "C") ) nc.variables["prcp"][tslice, :, :] = cfsnc.variables["p01d"][ tidx:, jslice, islice ] cfsnc.close()
def do_coop(ts): """Use COOP solar radiation data""" pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor() cursor.execute( """SELECT ST_x(geom), ST_y(geom), coalesce(narr_srad, merra_srad) from alldata a JOIN stations t ON (a.station = t.id) WHERE day = %s and t.network ~* 'CLIMATE' and substr(id, 3, 1) != 'C' and substr(id, 3, 4) != '0000' """, (ts.strftime("%Y-%m-%d"), )) lons = [] lats = [] vals = [] for row in cursor: if row[2] is None or row[2] < 0: continue lons.append(row[0]) lats.append(row[1]) vals.append(row[2]) nn = NearestNDInterpolator((np.array(lons), np.array(lats)), np.array(vals)) xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ), 'a') offset = iemre.daily_offset(ts) # Data above is MJ / d / m-2, we want W / m-2 nc.variables['rsds'][offset, :, :] = nn(xi, yi) * 1000000. / 86400. nc.close()
def grid_day(nc, ts): """ """ offset = iemre.daily_offset(ts) icursor.execute(""" SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, (CASE WHEN pday >= 0 then pday else null end) as precipdata, (CASE WHEN max_tmpf > -50 and max_tmpf < 130 then max_tmpf else null end) as highdata, (CASE WHEN min_tmpf > -50 and min_tmpf < 95 then min_tmpf else null end) as lowdata from summary_%s c, stations s WHERE day = '%s' and s.network in ('IA_ASOS', 'MN_ASOS', 'WI_ASOS', 'IL_ASOS', 'MO_ASOS', 'KS_ASOS', 'NE_ASOS', 'SD_ASOS', 'ND_ASOS', 'KY_ASOS', 'MI_ASOS', 'OH_ASOS', 'AWOS') and c.iemid = s.iemid """ % (ts.year, ts.strftime("%Y-%m-%d"))) if icursor.rowcount > 4: res = generic_gridder(icursor, 'highdata') nc.variables['high_tmpk'][offset] = datatypes.temperature(res, 'F').value('K') icursor.scroll(0, mode='absolute') res = generic_gridder(icursor, 'lowdata') nc.variables['low_tmpk'][offset] = datatypes.temperature(res, 'F').value('K') icursor.scroll(0, mode='absolute') #res = generic_gridder(icursor, 'precipdata') #nc.variables['p01d'][offset] = res * 25.4 else: print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"), icursor.rowcount)
def do_precip12(nc, ts): """Compute the 24 Hour precip at 12 UTC, we do some more tricks though""" offset = iemre.daily_offset(ts) ets = datetime.datetime(ts.year, ts.month, ts.day, 12) ets = ets.replace(tzinfo=pytz.timezone("UTC")) sts = ets - datetime.timedelta(hours=24) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) if ts.month == 1 and ts.day == 1: print(("p01d_12z for %s [idx:%s] %s(%s)->%s(%s) SPECIAL" ) % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) ncfn = "/mesonet/data/iemre/%s_mw_hourly.nc" % (ets.year,) if not os.path.isfile(ncfn): print("Missing %s" % (ncfn,)) return hnc = netCDF4.Dataset(ncfn) phour = np.sum(hnc.variables['p01m'][:offset2, :, :], 0) hnc.close() hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % ( sts.year,)) phour += np.sum(hnc.variables['p01m'][offset1:, :, :], 0) hnc.close() else: print(("p01d_12z for %s [idx:%s] %s(%s)->%s(%s)" ) % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) ncfn = "/mesonet/data/iemre/%s_mw_hourly.nc" % (ts.year,) if not os.path.isfile(ncfn): print("Missing %s" % (ncfn,)) return hnc = netCDF4.Dataset(ncfn) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) hnc.close() nc.variables['p01d_12z'][offset] = phour
def grid_day(nc, ts): """ I proctor the gridding of data on an hourly basis @param ts Timestamp of the analysis, we'll consider a 20 minute window """ cursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor) offset = iemre.daily_offset(ts) if ts.day == 29 and ts.month == 2: ts = datetime.datetime(2000, 3, 1) sql = """SELECT * from ncdc_climate71 WHERE valid = '%s' and substr(station,3,4) != '0000' and substr(station,3,1) != 'C' """ % (ts.strftime("%Y-%m-%d"), ) cursor.execute(sql) if cursor.rowcount > 4: res = generic_gridder(nc, cursor, 'high') if res is not None: nc.variables['tmax'][offset] = datatypes.temperature( res, 'F').value('C') cursor.scroll(0, mode='absolute') res = generic_gridder(nc, cursor, 'low') if res is not None: nc.variables['tmin'][offset] = datatypes.temperature( res, 'F').value('C') cursor.scroll(0, mode='absolute') res = generic_gridder(nc, cursor, 'precip') if res is not None: nc.variables['ppt'][offset] = datatypes.distance(res, 'IN').value('MM') else: print(("%s has %02i entries, FAIL") % (ts.strftime("%Y-%m-%d"), cursor.rowcount))
def estimate_precip(df, ts): """Estimate precipitation based on IEMRE""" idx = iemre.daily_offset(ts) nc = ncopen(iemre.get_daily_ncname(ts.year), 'r', timeout=300) grid12 = distance(nc.variables['p01d_12z'][idx, :, :], 'MM').value("IN").filled(0) grid00 = distance(nc.variables['p01d'][idx, :, :], "MM").value("IN").filled(0) nc.close() for sid, row in df.iterrows(): if not pd.isnull(row['precip']): continue if row['precip24_hour'] in [0, 22, 23]: precip = grid00[row['gridj'], row['gridi']] else: precip = grid12[row['gridj'], row['gridi']] # denote trace if precip > 0 and precip < 0.01: df.at[sid, 'precip'] = TRACE_VALUE elif precip < 0: df.at[sid, 'precip'] = 0 elif np.isnan(precip) or np.ma.is_masked(precip): df.at[sid, 'precip'] = 0 else: df.at[sid, 'precip'] = "%.2f" % (precip,)
def estimate_hilo(df, ts): """Estimate the High and Low Temperature based on gridded data""" idx = iemre.daily_offset(ts) nc = ncopen(iemre.get_daily_ncname(ts.year), 'r', timeout=300) highgrid12 = temperature(nc.variables['high_tmpk_12z'][idx, :, :], 'K').value('F') lowgrid12 = temperature(nc.variables['low_tmpk_12z'][idx, :, :], 'K').value('F') highgrid00 = temperature(nc.variables['high_tmpk'][idx, :, :], 'K').value('F') lowgrid00 = temperature(nc.variables['low_tmpk'][idx, :, :], 'K').value('F') nc.close() for sid, row in df.iterrows(): if pd.isnull(row['high']): if row['temp24_hour'] in [0, 22, 23]: val = highgrid00[row['gridj'], row['gridi']] else: val = highgrid12[row['gridj'], row['gridi']] if sid == 'IA1402': print(row['temp24_hour']) if not np.ma.is_masked(val): df.at[sid, 'high'] = val if pd.isnull(row['low']): if row['temp24_hour'] in [0, 22, 23]: val = lowgrid00[row['gridj'], row['gridi']] else: val = lowgrid12[row['gridj'], row['gridi']] if not np.ma.is_masked(val): df.at[sid, 'low'] = val
def run(ts): ''' Actually do the work, please ''' nc = netCDF4.Dataset( '/mesonet/data/iemre/%s_mw_mrms_daily.nc' % (ts.year, ), 'a') offset = iemre.daily_offset(ts) ncprecip = nc.variables['p01d'] ts += datetime.timedelta(hours=24) gmtts = ts.astimezone(pytz.timezone("UTC")) fn = gmtts.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/GIS/q2/" "p24h_%Y%m%d%H00.png")) img = Image.open(fn) data = np.asarray(img) # data is 3500,7000 , starting at upper L data = np.flipud(data) # Anything over 254 is bad res = np.where(data > 254, 0, data) res = np.where(np.logical_and(data >= 0, data < 100), data * 0.25, res) res = np.where(np.logical_and(data >= 100, data < 180), 25. + ((data - 100) * 1.25), res) res = np.where(np.logical_and(data >= 180, data < 255), 125. + ((data - 180) * 5.), res) y1 = (iemre.NORTH - mrms.SOUTH) * 100.0 y0 = (iemre.SOUTH - mrms.SOUTH) * 100.0 x0 = (iemre.WEST - mrms.WEST) * 100.0 x1 = (iemre.EAST - mrms.WEST) * 100.0 ncprecip[offset, :, :] = res[y0:y1, x0:x1] nc.close()
def grid_day(nc, ts): """ """ offset = iemre.daily_offset(ts) print(('cal hi/lo for %s [idx:%s]') % (ts, offset)) sql = """ SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, s.state, s.name, s.id as station, (CASE WHEN pday >= 0 then pday else null end) as precipdata, (CASE WHEN max_tmpf > -50 and max_tmpf < 130 then max_tmpf else null end) as highdata, (CASE WHEN min_tmpf > -50 and min_tmpf < 95 then min_tmpf else null end) as lowdata from summary_%s c, stations s WHERE day = '%s' and s.network in ('IA_ASOS', 'MN_ASOS', 'WI_ASOS', 'IL_ASOS', 'MO_ASOS', 'KS_ASOS', 'NE_ASOS', 'SD_ASOS', 'ND_ASOS', 'KY_ASOS', 'MI_ASOS', 'OH_ASOS', 'AWOS') and c.iemid = s.iemid """ % (ts.year, ts.strftime("%Y-%m-%d")) df = read_sql(sql, pgconn) if len(df.index) > 4: res = generic_gridder(df, 'highdata') nc.variables['high_tmpk'][offset] = datatypes.temperature( res, 'F').value('K') res = generic_gridder(df, 'lowdata') nc.variables['low_tmpk'][offset] = datatypes.temperature( res, 'F').value('K') else: print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"), cursor.rowcount)
def run(ts): """Actually do the work, please""" nc = ncopen(iemre.get_daily_mrms_ncname(ts.year), "a", timeout=300) offset = iemre.daily_offset(ts) ncprecip = nc.variables["p01d"] ts += datetime.timedelta(hours=24) gmtts = ts.astimezone(pytz.utc) fn = findfile(gmtts) if fn is None: print("merge_mrms_q2 failed to find file for time: %s" % (gmtts, )) return img = Image.open(fn) data = np.asarray(img) # data is 3500,7000 , starting at upper L data = np.flipud(data) # Anything over 254 is bad res = np.where(data > 254, 0, data) res = np.where(np.logical_and(data >= 0, data < 100), data * 0.25, res) res = np.where( np.logical_and(data >= 100, data < 180), 25.0 + ((data - 100) * 1.25), res, ) res = np.where( np.logical_and(data >= 180, data < 255), 125.0 + ((data - 180) * 5.0), res, ) y1 = int((iemre.NORTH - mrms.SOUTH) * 100.0) y0 = int((iemre.SOUTH - mrms.SOUTH) * 100.0) x0 = int((iemre.WEST - mrms.WEST) * 100.0) x1 = int((iemre.EAST - mrms.WEST) * 100.0) ncprecip[offset, :, :] = res[y0:y1, x0:x1] nc.close()
def grid_day(nc, ts): """ I proctor the gridding of data on an hourly basis @param ts Timestamp of the analysis, we'll consider a 20 minute window """ offset = iemre.daily_offset(ts) if ts.day == 29 and ts.month == 2: ts = datetime.datetime(2000, 3, 1) sql = """SELECT * from ncdc_climate71 WHERE valid = '%s' and substr(station,3,4) != '0000' and substr(station,3,1) != 'C' """ % (ts.strftime("%Y-%m-%d"), ) cursor.execute(sql) if cursor.rowcount > 4: res = generic_gridder(cursor, 'high') if res is not None: nc.variables['high_tmpk'][offset] = datatypes.temperature(res, 'F').value('K') cursor.scroll(0, mode='absolute') res = generic_gridder(cursor, 'low') if res is not None: nc.variables['low_tmpk'][offset] = datatypes.temperature(res, 'F').value('K') cursor.scroll(0, mode='absolute') res = generic_gridder(cursor, 'precip') if res is not None: nc.variables['p01d'][offset] = res * 25.4 else: print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"), cursor.rowcount)
def do_precip12(nc, ts): """Compute the 24 Hour precip at 12 UTC, we do some more tricks though""" offset = iemre.daily_offset(ts) ets = datetime.datetime(ts.year, ts.month, ts.day, 12) ets = ets.replace(tzinfo=pytz.timezone("UTC")) sts = ets - datetime.timedelta(hours=24) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) if ts.month == 1 and ts.day == 1: print(("p01d_12z for %s [idx:%s] %s(%s)->%s(%s) SPECIAL") % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ets.year, )) phour = np.sum(hnc.variables['p01m'][:offset2, :, :], 0) hnc.close() hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (sts.year, )) phour += np.sum(hnc.variables['p01m'][offset1:, :, :], 0) hnc.close() else: print(("p01d_12z for %s [idx:%s] %s(%s)->%s(%s)") % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ts.year, )) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) hnc.close() nc.variables['p01d_12z'][offset] = phour
def run(ts): ''' Actually do the work, please ''' nc = netCDF4.Dataset('/mesonet/data/iemre/%s_mw_mrms_daily.nc' % ( ts.year,), 'a') offset = iemre.daily_offset(ts) ncprecip = nc.variables['p01d'] ts += datetime.timedelta(hours=24) gmtts = ts.astimezone(pytz.timezone("UTC")) fn = gmtts.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/GIS/q2/" "p24h_%Y%m%d%H00.png")) img = Image.open(fn) data = np.asarray(img) # data is 3500,7000 , starting at upper L data = np.flipud(data) # Anything over 254 is bad res = np.where(data > 254, 0, data) res = np.where(np.logical_and(data >= 0, data < 100), data * 0.25, res) res = np.where(np.logical_and(data >= 100, data < 180), 25. + ((data - 100) * 1.25), res) res = np.where(np.logical_and(data >= 180, data < 255), 125. + ((data - 180) * 5.), res) y1 = (iemre.NORTH - mrms.SOUTH) * 100.0 y0 = (iemre.SOUTH - mrms.SOUTH) * 100.0 x0 = (iemre.WEST - mrms.WEST) * 100.0 x1 = (iemre.EAST - mrms.WEST) * 100.0 ncprecip[offset, :, :] = res[y0:y1, x0:x1] nc.close()
def estimate_hilo(ts): """Estimate the High and Low Temperature based on gridded data""" idx = iemre.daily_offset(ts) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ), 'r') highgrid12 = temperature(nc.variables['high_tmpk_12z'][idx, :, :], 'K').value('F') lowgrid12 = temperature(nc.variables['low_tmpk_12z'][idx, :, :], 'K').value('F') highgrid00 = temperature(nc.variables['high_tmpk'][idx, :, :], 'K').value('F') lowgrid00 = temperature(nc.variables['low_tmpk'][idx, :, :], 'K').value('F') nc.close() for sid in nt.sts.keys(): if nt.sts[sid]['temp24_hour'] in [0, 22, 23]: val = highgrid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] else: val = highgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] if val > -80 and val < 140: nt.sts[sid]['high'] = "%.0f" % (val, ) if nt.sts[sid]['temp24_hour'] in [0, 22, 23]: val = lowgrid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] else: val = lowgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] if val > -80 and val < 140: nt.sts[sid]['low'] = "%.0f" % (val, )
def plot_daily(date, interval, plotvar, mc, mckey): """ Generate the plot, please """ opts = PLOT_OPS[plotvar] offset = iemre.daily_offset(date) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (date.year, )) data = nc.variables[opts['ncvar_daily']][offset] / 25.4 # inches lons = nc.variables['lon'][:] lats = nc.variables['lat'][:] extra = lons[-1] + (lons[-1] - lons[-2]) lons = np.concatenate([lons, [ extra, ]]) extra = lats[-1] + (lats[-1] - lats[-2]) lats = np.concatenate([lats, [ extra, ]]) x, y = np.meshgrid(lons, lats) nc.close() p = plot.MapPlot( sector='midwest', title='%s IEM Reanalysis %s [%s]' % (date.strftime("%-d %b %Y"), opts['title'], opts['units'])) p.pcolormesh(x, y, data, opts['clevs'], units=opts['units']) p.postprocess(web=True, memcache=mc, memcachekey=mckey, memcacheexpire=0)
def do_hrrr(ts): """Convert the hourly HRRR data to IEMRE grid""" total = None xaxis = None yaxis = None for hr in range(5, 23): # Only need 5 AM to 10 PM for solar utc = ts.replace(hour=hr).astimezone(pytz.timezone("UTC")) fn = utc.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/%H/" "hrrr.t%Hz.3kmf00.grib2")) if not os.path.isfile(fn): # print 'HRRR file %s missing' % (fn,) continue grbs = pygrib.open(fn) try: if utc >= SWITCH_DATE: grb = grbs.select(name='Downward short-wave radiation flux') else: grb = grbs.select(parameterNumber=192) except ValueError: print 'coop/hrrr_solarrad.py %s had no solar rad' % (fn,) continue if len(grb) == 0: print 'Could not find SWDOWN in HRR %s' % (fn,) continue g = grb[0] if total is None: total = g.values lat1 = g['latitudeOfFirstGridPointInDegrees'] lon1 = g['longitudeOfFirstGridPointInDegrees'] llcrnrx, llcrnry = LCC(lon1, lat1) nx = g['Nx'] ny = g['Ny'] dx = g['DxInMetres'] dy = g['DyInMetres'] xaxis = llcrnrx + dx * np.arange(nx) yaxis = llcrnry + dy * np.arange(ny) else: total += g.values if total is None: print 'coop/hrrr_solarrad.py found no HRRR data for %s' % ( ts.strftime("%d %b %Y"), ) return # We wanna store as W m-2, so we just average out the data by hour total = total / 24.0 nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year,), 'a') offset = iemre.daily_offset(ts) data = nc.variables['rsds'][offset, :, :] for i, lon in enumerate(iemre.XAXIS): for j, lat in enumerate(iemre.YAXIS): (x, y) = LCC(lon, lat) i2 = np.digitize([x], xaxis)[0] j2 = np.digitize([y], yaxis)[0] data[j, i] = total[j2, i2] nc.variables['rsds'][offset] = data nc.close()
def do_coop(ts): """Use COOP solar radiation data""" pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor() cursor.execute("""SELECT ST_x(geom), ST_y(geom), coalesce(narr_srad, merra_srad) from alldata a JOIN stations t ON (a.station = t.id) WHERE day = %s and t.network ~* 'CLIMATE' and substr(id, 3, 1) != 'C' and substr(id, 3, 4) != '0000' """, (ts.strftime("%Y-%m-%d"), )) lons = [] lats = [] vals = [] for row in cursor: if row[2] is None or row[2] < 0: continue lons.append(row[0]) lats.append(row[1]) vals.append(row[2]) nn = NearestNDInterpolator((np.array(lons), np.array(lats)), np.array(vals)) xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year,), 'a') offset = iemre.daily_offset(ts) # Data above is MJ / d / m-2, we want W / m-2 nc.variables['rsds'][offset, :, :] = nn(xi, yi) * 1000000. / 86400. nc.close()
def do_precip(nc, ts): """Compute the 6 UTC to 6 UTC precip We need to be careful here as the timestamp sent to this app is today, we are actually creating the analysis for yesterday """ sts = datetime.datetime(ts.year, ts.month, ts.day, 6) sts = sts.replace(tzinfo=pytz.timezone("UTC")) ets = sts + datetime.timedelta(hours=24) offset = iemre.daily_offset(ts) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) if ts.month == 12 and ts.day == 31: print(("p01d for %s [idx:%s] %s(%s)->%s(%s) SPECIAL") % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ets.year, )) phour = np.sum(hnc.variables['p01m'][:offset2, :, :], 0) hnc.close() hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (sts.year, )) phour += np.sum(hnc.variables['p01m'][offset1:, :, :], 0) hnc.close() else: print(("p01d for %s [idx:%s] %s(%s)->%s(%s)") % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (sts.year, )) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) hnc.close() nc.variables['p01d'][offset] = phour
def do_precip12(ts, ds): """Compute the 24 Hour precip at 12 UTC, we do some more tricks though""" offset = iemre.daily_offset(ts) ets = utc(ts.year, ts.month, ts.day, 12) sts = ets - datetime.timedelta(hours=24) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) if ts.month == 1 and ts.day == 1: if sts.year >= 1900: LOG.warning( "p01d_12z for %s [idx:%s] %s(%s)->%s(%s) SPECIAL", ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2, ) ncfn = iemre.get_hourly_ncname(ets.year) if not os.path.isfile(ncfn): LOG.warning("Missing %s", ncfn) return with ncopen(ncfn, timeout=600) as hnc: phour = np.sum(hnc.variables["p01m"][:offset2, :, :], 0) with ncopen(iemre.get_hourly_ncname(sts.year), timeout=600) as hnc: phour += np.sum(hnc.variables["p01m"][offset1:, :, :], 0) else: ncfn = iemre.get_hourly_ncname(ts.year) if not os.path.isfile(ncfn): LOG.warning("Missing %s", ncfn) return with ncopen(ncfn, timeout=600) as hnc: phour = np.sum(hnc.variables["p01m"][offset1:offset2, :, :], 0) ds["p01d_12z"].values = np.where(phour < 0, 0, phour)
def plot_daily(date, interval, plotvar, mc, mckey): """ Generate the plot, please """ opts = PLOT_OPS[plotvar] offset = iemre.daily_offset(date) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (date.year,)) data = nc.variables[opts['ncvar_daily']][offset] / 25.4 # inches lons = nc.variables['lon'][:] lats = nc.variables['lat'][:] extra = lons[-1] + (lons[-1] - lons[-2]) lons = np.concatenate([lons, [extra,]]) extra = lats[-1] + (lats[-1] - lats[-2]) lats = np.concatenate([lats, [extra,]]) x,y = np.meshgrid(lons, lats) nc.close() p = plot.MapPlot(sector='midwest', title='%s IEM Reanalysis %s [%s]' % (date.strftime("%-d %b %Y"), opts['title'], opts['units']) ) p.pcolormesh(x, y, data, opts['clevs'], units=opts['units']) p.postprocess(web=True, memcache=mc, memcachekey=mckey, memcacheexpire=0)
def do_precip(nc, ts): """Compute the 6 UTC to 6 UTC precip We need to be careful here as the timestamp sent to this app is today, we are actually creating the analysis for yesterday """ sts = datetime.datetime(ts.year, ts.month, ts.day, 6) sts = sts.replace(tzinfo=pytz.timezone("UTC")) ets = sts + datetime.timedelta(hours=24) offset = iemre.daily_offset(ts) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) if ts.month == 12 and ts.day == 31: print(("p01d for %s [idx:%s] %s(%s)->%s(%s) SPECIAL" ) % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % ( ets.year,)) phour = np.sum(hnc.variables['p01m'][:offset2, :, :], 0) hnc.close() hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % ( sts.year,)) phour += np.sum(hnc.variables['p01m'][offset1:, :, :], 0) hnc.close() else: print(("p01d for %s [idx:%s] %s(%s)->%s(%s)" ) % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % ( sts.year,)) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) hnc.close() nc.variables['p01d'][offset] = phour
def do_hrrr(ts): """Convert the hourly HRRR data to IEMRE grid""" total = None xaxis = None yaxis = None for hr in range(5, 23): # Only need 5 AM to 10 PM for solar utc = ts.replace(hour=hr).astimezone(pytz.timezone("UTC")) fn = utc.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/%H/" "hrrr.t%Hz.3kmf00.grib2")) if not os.path.isfile(fn): # print 'HRRR file %s missing' % (fn,) continue grbs = pygrib.open(fn) try: if utc >= SWITCH_DATE: grb = grbs.select(name='Downward short-wave radiation flux') else: grb = grbs.select(parameterNumber=192) except ValueError: print 'coop/hrrr_solarrad.py %s had no solar rad' % (fn, ) continue if len(grb) == 0: print 'Could not find SWDOWN in HRR %s' % (fn, ) continue g = grb[0] if total is None: total = g.values lat1 = g['latitudeOfFirstGridPointInDegrees'] lon1 = g['longitudeOfFirstGridPointInDegrees'] llcrnrx, llcrnry = LCC(lon1, lat1) nx = g['Nx'] ny = g['Ny'] dx = g['DxInMetres'] dy = g['DyInMetres'] xaxis = llcrnrx + dx * np.arange(nx) yaxis = llcrnry + dy * np.arange(ny) else: total += g.values if total is None: print 'coop/hrrr_solarrad.py found no HRRR data for %s' % ( ts.strftime("%d %b %Y"), ) return # We wanna store as W m-2, so we just average out the data by hour total = total / 24.0 nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ), 'a') offset = iemre.daily_offset(ts) data = nc.variables['rsds'][offset, :, :] for i, lon in enumerate(iemre.XAXIS): for j, lat in enumerate(iemre.YAXIS): (x, y) = LCC(lon, lat) i2 = np.digitize([x], xaxis)[0] j2 = np.digitize([y], yaxis)[0] data[j, i] = total[j2, i2] nc.variables['rsds'][offset] = data nc.close()
def grid_day(nc, ts): """ I proctor the gridding of data on an hourly basis @param ts Timestamp of the analysis, we'll consider a 20 minute window """ cursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor) offset = iemre.daily_offset(ts) if ts.day == 29 and ts.month == 2: ts = datetime.datetime(2000, 3, 1) sql = """SELECT * from climate51 WHERE valid = '%s' and substr(station,3,4) != '0000' and substr(station,3,1) != 'C' """ % (ts.strftime("%Y-%m-%d"), ) cursor.execute(sql) res = generic_gridder(nc, cursor, "high") nc.variables["high_tmpk"][offset] = datatypes.temperature(res, "F").value("K") cursor.scroll(0, mode="absolute") res = generic_gridder(nc, cursor, "low") nc.variables["low_tmpk"][offset] = datatypes.temperature(res, "F").value("K") cursor.scroll(0, mode="absolute") res = generic_gridder(nc, cursor, "precip") nc.variables["p01d"][offset] = datatypes.distance(res, "IN").value("MM") cursor.scroll(0, mode="absolute") res = generic_gridder(nc, cursor, "gdd50") nc.variables["gdd50"][offset] = res
def merge(nc, valid, gribname, vname): """Merge in the grib data""" fn = valid.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/cfs/%H/" + gribname + ".01.%Y%m%d%H.daily.grib2")) if not os.path.isfile(fn): print("cfs2iemre missing %s, abort" % (fn, )) sys.exit() grbs = pygrib.open(fn) lats = None lons = None xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS) for grib in tqdm(grbs, total=grbs.messages, desc=vname, disable=not sys.stdout.isatty()): ftime = valid + datetime.timedelta(hours=grib.forecastTime) # move us safely back to get into the proper date cst = ftime - datetime.timedelta(hours=7) if cst.year != valid.year: continue if lats is None: lats, lons = grib.latlons() vals = grib.values nn = NearestNDInterpolator((lons.flat, lats.flat), vals.flat) vals = nn(xi, yi) tstep = iemre.daily_offset(cst.date()) current = nc.variables[vname][tstep, :, :] if current.mask.all(): current[:, :] = DEFAULTS[vname] nc.variables[vname][tstep, :, :] = AGGFUNC[vname](current, vals)
def do_precip12(ts): """Compute the 24 Hour precip at 12 UTC, we do some more tricks though""" offset = iemre.daily_offset(ts) ets = utc(ts.year, ts.month, ts.day, 12) sts = ets - datetime.timedelta(hours=24) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) if ts.month == 1 and ts.day == 1: if sts.year >= 1900: print(("p01d_12z for %s [idx:%s] %s(%s)->%s(%s) SPECIAL") % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) ncfn = iemre.get_hourly_ncname(ets.year) if not os.path.isfile(ncfn): print("Missing %s" % (ncfn, )) return hnc = ncopen(ncfn, timeout=600) phour = np.sum(hnc.variables['p01m'][:offset2, :, :], 0) hnc.close() hnc = ncopen(iemre.get_hourly_ncname(sts.year), timeout=600) phour += np.sum(hnc.variables['p01m'][offset1:, :, :], 0) hnc.close() else: ncfn = iemre.get_hourly_ncname(ts.year) if not os.path.isfile(ncfn): print("Missing %s" % (ncfn, )) return hnc = ncopen(ncfn, timeout=600) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) hnc.close() write_grid(ts, 'p01d_12z', np.where(phour < 0, 0, phour))
def do_var(varname): """ Run our estimator for a given variable """ currentnc = None sql = """select day, station from alldata_%s WHERE %s is null and day >= '1893-01-01' ORDER by day ASC""" % (state.lower(), varname) ccursor.execute(sql) for row in ccursor: day = row[0] station = row[1] if station not in nt.sts: continue sql = """ SELECT station, %s from alldata_%s WHERE %s is not NULL and station in %s and day = '%s' """ % (varname, state, varname, tuple(friends[station]), day) ccursor2.execute(sql) weight = [] value = [] for row2 in ccursor2: idx = friends[station].index(row2[0]) weight.append(weights[station][idx]) value.append(row2[1]) if len(weight) < 3: # Nearest neighbors failed, so lets look at our grided analysis # and sample from it if currentnc is None or currentnc.title.find(str(day.year)) == -1: currentnc = netCDF4.Dataset(("/mesonet/data/iemre/" "%s_mw_daily.nc") % (day.year, )) tidx = iemre.daily_offset( datetime.datetime(day.year, day.month, day.day)) iidx, jidx = iemre.find_ij(nt.sts[station]['lon'], nt.sts[station]['lat']) iemreval = currentnc.variables[vnameconv[varname]][tidx, jidx, iidx] if varname in ('high', 'low'): interp = temperature(iemreval, 'K').value('F') else: interp = iemreval / 24.5 print '--> Neighbor failure, %s %s %s' % (station, day, varname) else: mass = sum(weight) interp = np.sum(np.array(weight) * np.array(value) / mass) dataformat = '%.2f' if varname in ['high', 'low']: dataformat = '%.0f' print(('Set station: %s day: %s varname: %s value: %s') % (station, day, varname, dataformat % (interp, ))) sql = """ UPDATE alldata_%s SET estimated = true, %s = %s WHERE station = '%s' and day = '%s' """ % (state.lower(), varname, dataformat % (interp, ), station, day) sql = sql.replace(' nan ', ' null ') ccursor2.execute(sql)
def doday(ts, realtime): """ Create a plot of precipitation stage4 estimates for some day """ lts = utc(ts.year, ts.month, ts.day, 12) lts = lts.astimezone(pytz.timezone("America/Chicago")) # make assumptions about the last valid MRMS data if realtime: # Up until :59 after of the last hour lts = (datetime.datetime.now() - datetime.timedelta(hours=1)).replace(minute=59) else: lts = lts.replace(year=ts.year, month=ts.month, day=ts.day, hour=23, minute=59) idx = iemre.daily_offset(ts) ncfn = iemre.get_daily_mrms_ncname(ts.year) if not os.path.isfile(ncfn): LOG.info("File %s missing, abort.", ncfn) return with ncopen(ncfn, timeout=300) as nc: precip = nc.variables['p01d'][idx, :, :] lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] subtitle = "Total between 12:00 AM and %s" % ( lts.strftime("%I:%M %p %Z"), ) routes = 'ac' if not realtime: routes = 'a' # clevs = np.arange(0, 0.25, 0.05) # clevs = np.append(clevs, np.arange(0.25, 3., 0.25)) # clevs = np.append(clevs, np.arange(3., 10.0, 1)) clevs = [ 0.01, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10 ] (xx, yy) = np.meshgrid(lons, lats) for sector in ['iowa', 'midwest']: pqstr = ("plot %s %s00 %s_q2_1d.png %s_q2_1d.png png") % ( routes, ts.strftime("%Y%m%d%H"), sector, sector) mp = MapPlot(title=("%s NCEP MRMS Q3 Today's Precipitation") % (ts.strftime("%-d %b %Y"), ), subtitle=subtitle, sector=sector) mp.pcolormesh(xx, yy, distance(precip, 'MM').value('IN'), clevs, cmap=nwsprecip(), units='inch') if sector == 'iowa': mp.drawcounties() mp.postprocess(pqstr=pqstr, view=False) mp.close()
def workflow(valid): """Our workflow""" if valid.month == 1 and valid.day == 1: print("prism_adjust_stage4, sorry Jan 1 processing is a TODO!") return # read prism tidx = daily_offset(valid) nc = netCDF4.Dataset("/mesonet/data/prism/%s_daily.nc" % (valid.year, ), 'r') ppt = nc.variables['ppt'][tidx, :, :] # missing as zero ppt = np.where(ppt.mask, 0, ppt) lons = nc.variables['lon'][:] lats = nc.variables['lat'][:] nc.close() (lons, lats) = np.meshgrid(lons, lats) # Interpolate this onto the stage4 grid nc = netCDF4.Dataset( ("/mesonet/data/stage4/%s_stage4_hourly.nc") % (valid.year, ), 'a') p01m = nc.variables['p01m'] p01m_status = nc.variables['p01m_status'] s4lons = nc.variables['lon'][:] s4lats = nc.variables['lat'][:] # Values are in the hourly arrears, so start at -23 and thru current hour sts_tidx = hourly_offset(valid - datetime.timedelta(hours=23)) ets_tidx = hourly_offset(valid + datetime.timedelta(hours=1)) s4total = np.sum(p01m[sts_tidx:ets_tidx, :, :], axis=0) # make sure the s4total does not have zeros s4total = np.where(s4total < 0.001, 0.001, s4total) nn = NearestNDInterpolator((lons.flat, lats.flat), ppt.flat) prism_on_s4grid = nn(s4lons, s4lats) multiplier = prism_on_s4grid / s4total # Do the work now, we should not have to worry about the scale factor for tidx in range(sts_tidx, ets_tidx): newval = p01m[tidx, :, :] * multiplier p01m[tidx, :, :] = newval # make sure have data if np.ma.max(newval) > 0: p01m_status[tidx] = 2 else: print(("prism_adjust_stage4 NOOP for time %s[idx:%s]") % ((datetime.datetime(valid.year, 1, 1, 0) + datetime.timedelta(hours=tidx)).strftime("%Y-%m-%dT%H"), tidx)) """ s4total_v2 = np.sum(p01m[sts_tidx:ets_tidx, :, :], axis=0) from pyiem.plot.geoplot import MapPlot import matplotlib.pyplot as plt mp = MapPlot(sector='iowa') mp.pcolormesh(s4lons, s4lats, s4total_v2, np.arange(-10, 11, 1), cmap=plt.get_cmap("BrBG")) mp.postprocess(filename='test.png') mp.close() """ nc.close()
def do_var(varname): """ Run our estimator for a given variable """ currentnc = None sql = """select day, station from alldata_%s WHERE %s is null and day >= '1893-01-01' ORDER by day ASC""" % (state.lower(), varname) ccursor.execute(sql) for row in ccursor: day = row[0] station = row[1] if station not in nt.sts: continue sql = """ SELECT station, %s from alldata_%s WHERE %s is not NULL and station in %s and day = '%s' """ % (varname, state, varname, tuple(friends[station]), day) ccursor2.execute(sql) weight = [] value = [] for row2 in ccursor2: idx = friends[station].index(row2[0]) weight.append(weights[station][idx]) value.append(row2[1]) if len(weight) < 3: # Nearest neighbors failed, so lets look at our grided analysis # and sample from it if currentnc is None or currentnc.title.find(str(day.year)) == -1: currentnc = netCDF4.Dataset(("/mesonet/data/iemre/" "%s_mw_daily.nc") % (day.year,)) tidx = iemre.daily_offset(datetime.datetime(day.year, day.month, day.day)) iidx, jidx = iemre.find_ij(nt.sts[station]['lon'], nt.sts[station]['lat']) iemreval = currentnc.variables[vnameconv[varname]][tidx, jidx, iidx] if varname in ('high', 'low'): interp = temperature(iemreval, 'K').value('F') else: interp = distance(iemreval, 'MM').value('IN') print '--> Neighbor failure, %s %s %s' % (station, day, varname) else: mass = sum(weight) interp = np.sum(np.array(weight) * np.array(value) / mass) dataformat = '%.2f' if varname in ['high', 'low']: dataformat = '%.0f' print(('Set station: %s day: %s varname: %s value: %s' ) % (station, day, varname, dataformat % (interp,))) sql = """ UPDATE alldata_%s SET estimated = true, %s = %s WHERE station = '%s' and day = '%s' """ % (state.lower(), varname, dataformat % (interp,), station, day) sql = sql.replace(' nan ', ' null ') ccursor2.execute(sql)
def do_process(valid, fn): """Process this file, please """ # shape of data is (1, 621, 1405) data = rasterio.open(fn).read() varname = fn.split("_")[1] idx = daily_offset(valid) with ncopen("/mesonet/data/prism/%s_daily.nc" % (valid.year, ), 'a') as nc: nc.variables[varname][idx] = np.flipud(data[0])
def do_process(valid, fn): """Process this file, please """ # shape of data is (1, 621, 1405) data = rasterio.open(fn).read() varname = fn.split("_")[1] nc = netCDF4.Dataset("/mesonet/data/prism/%s_daily.nc" % (valid.year,), 'a') idx = daily_offset(valid) nc.variables[varname][idx] = np.flipud(data[0]) nc.close()
def doday(ts, realtime): """ Create a plot of precipitation stage4 estimates for some day """ lts = datetime.datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")) lts = lts.astimezone(pytz.timezone("America/Chicago")) # make assumptions about the last valid MRMS data if realtime: # Up until :59 after of the last hour lts = (lts - datetime.timedelta(hours=1)).replace(minute=59) else: lts = lts.replace(year=ts.year, month=ts.month, day=ts.day, hour=23, minute=59) idx = iemre.daily_offset(ts) ncfn = "/mesonet/data/iemre/%s_mw_mrms_daily.nc" % (ts.year, ) nc = netCDF4.Dataset(ncfn) precip = nc.variables['p01d'][idx, :, :] lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] subtitle = "Total between 12:00 AM and %s" % ( lts.strftime("%I:%M %p %Z"), ) routes = 'ac' if not realtime: routes = 'a' # clevs = np.arange(0, 0.25, 0.05) # clevs = np.append(clevs, np.arange(0.25, 3., 0.25)) # clevs = np.append(clevs, np.arange(3., 10.0, 1)) clevs = [ 0.01, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10 ] sector = 'iowa' pqstr = ("plot %s %s00 %s_q2_1d.png %s_q2_1d.png png") % ( routes, ts.strftime("%Y%m%d%H"), sector, sector) m = MapPlot(title=("%s NCEP MRMS Q3 Today's Precipitation") % (ts.strftime("%-d %b %Y"), ), subtitle=subtitle, sector=sector) (x, y) = np.meshgrid(lons, lats) m.pcolormesh(x, y, distance(precip, 'MM').value('IN'), clevs, cmap=nwsprecip(), units='inch') m.drawcounties() m.postprocess(pqstr=pqstr, view=False) m.close()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot ptype = fdict.get('ptype', 'c') date = datetime.datetime.strptime(fdict.get('date', '2015-01-01'), '%Y-%m-%d') varname = fdict.get('var', 'rsds') idx0 = iemre.daily_offset(date) nc = netCDF4.Dataset(("/mesonet/data/iemre/%s_mw_daily.nc" ) % (date.year, ), 'r') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] if varname == 'rsds': # Value is in W m**-2, we want MJ data = nc.variables[varname][idx0, :, :] * 86400. / 1000000. units = 'MJ d-1' clevs = np.arange(0, 37, 3.) clevs[0] = 0.01 clevstride = 1 elif varname in ['p01d', 'p01d_12z']: # Value is in W m**-2, we want MJ data = nc.variables[varname][idx0, :, :] / 25.4 units = 'inch' clevs = np.arange(0, 0.25, 0.05) clevs = np.append(clevs, np.arange(0.25, 3., 0.25)) clevs = np.append(clevs, np.arange(3., 10.0, 1)) clevs[0] = 0.01 clevstride = 1 elif varname in ['high_tmpk', 'low_tmpk', 'high_tmpk_12z', 'low_tmpk_12z']: # Value is in W m**-2, we want MJ data = temperature(nc.variables[varname][idx0, :, :], 'K').value('F') units = 'F' clevs = np.arange(-30, 120, 2) clevstride = 5 nc.close() title = date.strftime("%-d %B %Y") m = MapPlot(sector='midwest', axisbg='white', nocaption=True, title='IEM Reanalysis of %s for %s' % (PDICT.get(varname), title), subtitle='Data derived from various NOAA datasets' ) if np.ma.is_masked(np.max(data)): return 'Data Unavailable' x, y = np.meshgrid(lons, lats) if ptype == 'c': m.contourf(x, y, data, clevs, clevstride=clevstride, units=units) else: m.pcolormesh(x, y, data, clevs, clevstride=clevstride, units=units) return m.fig
def do_precip(nc, ts): """Compute the precip totals based on the hourly analysis totals""" offset = iemre.daily_offset(ts) ets = ts.replace(hour=12, tzinfo=pytz.timezone("UTC")) sts = ets - datetime.timedelta(hours=24) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ts.year,)) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) nc.variables['p01d'][offset] = phour hnc.close()
def main(argv): """Go Main Go.""" year = int(argv[1]) ets = min([datetime.date(year, 12, 31), datetime.date.today()]) queue = [] for x0 in np.arange(iemre.WEST, iemre.EAST, 5.): for y0 in np.arange(iemre.SOUTH, iemre.NORTH, 5.): queue.append([x0, y0]) for x0, y0 in tqdm(queue, disable=not sys.stdout.isatty()): url = ( "https://power.larc.nasa.gov/cgi-bin/v1/DataAccess.py?" "request=execute&identifier=Regional&" "parameters=ALLSKY_SFC_SW_DWN&" "startDate=%s0101&endDate=%s&userCommunity=SSE&" "tempAverage=DAILY&bbox=%s,%s,%s,%s&user=anonymous&" "outputList=NETCDF" ) % (year, ets.strftime("%Y%m%d"), y0, x0, min([y0 + 5., iemre.NORTH]) - 0.1, min([x0 + 5., iemre.EAST]) - 0.1) req = requests.get(url, timeout=60) js = req.json() if 'outputs' not in js: print(url) print(js) continue fn = js['outputs']['netcdf'] req = requests.get(fn, timeout=60, stream=True) ncfn = '/tmp/power%s.nc' % (year, ) with open(ncfn, 'wb') as fh: for chunk in req.iter_content(chunk_size=1024): if chunk: fh.write(chunk) fh.close() nc = ncopen(ncfn) for day, _ in enumerate(nc.variables['time'][:]): date = datetime.date(year, 1, 1) + datetime.timedelta(days=day) # kwh to MJ/d 3600 * 1000 / 1e6 data = nc.variables['ALLSKY_SFC_SW_DWN'][day, :, :] * 3.6 # Sometimes there are missing values? if np.ma.is_masked(data): data[data.mask] = np.mean(data) i, j = iemre.find_ij(x0, y0) # resample data is 0.5, iemre is 0.125 data = np.repeat(np.repeat(data, 4, axis=0), 4, axis=1) shp = np.shape(data) # print("i: %s j: %s shp: %s" % (i, j, shp)) renc = ncopen(iemre.get_daily_ncname(year), 'a') renc.variables['power_swdn'][ iemre.daily_offset(date), slice(j, j+shp[0]), slice(i, i+shp[1]) ] = data renc.close() nc.close()
def write_grid(valid, vname, grid): """Write data to backend netcdf""" offset = iemre.daily_offset(valid) nc = ncopen(iemre.get_daily_ncname(valid.year), 'a', timeout=600) if nc is None: print("daily_analysis#write_grid first open attempt failed, try #2") nc = ncopen(iemre.get_daily_ncname(valid.year), 'a', timeout=600) print(("%13s [idx:%s] min: %6.2f max: %6.2f [%s]") % (vname, offset, np.nanmin(grid), np.nanmax(grid), nc.variables[vname].units)) nc.variables[vname][offset] = grid nc.close()
def run(ts): """ Actually do the work, please """ nc = netCDF4.Dataset(('/mesonet/data/iemre/%s_mw_mrms_daily.nc' '') % (ts.year,), 'a') offset = iemre.daily_offset(ts) ncprecip = nc.variables['p01d'] # We want this mrms variable to replicate the netcdf file, so the # origin is the southwestern corner ts += datetime.timedelta(hours=24) gmtts = ts.astimezone(pytz.timezone("UTC")) gribfn = gmtts.strftime(("/mnt/a4/data/%Y/%m/%d/mrms/ncep/" "RadarOnly_QPE_24H/" "RadarOnly_QPE_24H_00.00_%Y%m%d-%H%M00.grib2.gz")) if not os.path.isfile(gribfn): print("merge_mrms_q3.py MISSING %s" % (gribfn,)) return fp = gzip.GzipFile(gribfn, 'rb') (_, tmpfn) = tempfile.mkstemp() tmpfp = open(tmpfn, 'wb') tmpfp.write(fp.read()) tmpfp.close() grbs = pygrib.open(tmpfn) grb = grbs[1] lats, _ = grb.latlons() os.unlink(tmpfn) val = grb['values'] # Anything less than zero, we set to zero val = np.where(val < 0, 0, val) # CAREFUL HERE! The MRMS grid is North to South # set top (smallest y) y0 = int((lats[0, 0] - iemre.NORTH) * 100.0) y1 = int((lats[0, 0] - iemre.SOUTH) * 100.0) x0 = int((iemre.WEST - mrms.WEST) * 100.0) x1 = int((iemre.EAST - mrms.WEST) * 100.0) # print 'y0:%s y1:%s x0:%s x1:%s' % (y0, y1, x0, x1) ncprecip[offset, :, :] = np.flipud(val[y0:y1, x0:x1]) # m = MapPlot(sector='midwest') # x, y = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:]) # m.pcolormesh(x, y, ncprecip[offset,:,:], range(10), latlon=True) # m.postprocess(filename='test.png') # (fig, ax) = plt.subplots() # ax.imshow(mrms) # fig.savefig('test.png') # (fig, ax) = plt.subplots() # ax.imshow(mrms[y0:y1,x0:x1]) # fig.savefig('test2.png') nc.close()
def load_iemre(): """Use IEM Reanalysis for non-precip data 24km product is smoothed down to the 0.01 degree grid """ printt("load_iemre() called") xaxis = np.arange(MYWEST, MYEAST, 0.01) yaxis = np.arange(MYSOUTH, MYNORTH, 0.01) xi, yi = np.meshgrid(xaxis, yaxis) fn = iemre.get_daily_ncname(VALID.year) if not os.path.isfile(fn): printt("Missing %s for load_solar, aborting" % (fn, )) sys.exit() with ncopen(fn) as nc: offset = iemre.daily_offset(VALID) lats = nc.variables["lat"][:] lons = nc.variables["lon"][:] lons, lats = np.meshgrid(lons, lats) # Storage is W m-2, we want langleys per day data = nc.variables["rsds"][offset, :, :] * 86400.0 / 1000000.0 * 23.9 # Default to a value of 300 when this data is missing, for some reason nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) SOLAR[:] = iemre_bounds_check("rsds", nn(xi, yi), 0, 1000) data = temperature(nc.variables["high_tmpk"][offset, :, :], "K").value("C") nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) HIGH_TEMP[:] = iemre_bounds_check("high_tmpk", nn(xi, yi), -60, 60) data = temperature(nc.variables["low_tmpk"][offset, :, :], "K").value("C") nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) LOW_TEMP[:] = iemre_bounds_check("low_tmpk", nn(xi, yi), -60, 60) data = temperature(nc.variables["avg_dwpk"][offset, :, :], "K").value("C") nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) DEWPOINT[:] = iemre_bounds_check("avg_dwpk", nn(xi, yi), -60, 60) data = nc.variables["wind_speed"][offset, :, :] nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) WIND[:] = iemre_bounds_check("wind_speed", nn(xi, yi), 0, 30) printt("load_iemre() finished")
def do_day(valid): """ Process a day please """ idx = iemre.daily_offset(valid) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (valid.year, ), 'r') high = temperature(nc.variables['high_tmpk_12z'][idx, :, :], 'K').value('F') low = temperature(nc.variables['low_tmpk_12z'][idx, :, :], 'K').value('F') precip = nc.variables['p01d_12z'][idx, :, :] / 25.4 nc.close() for state in ('IA', 'NE', 'MN', 'WI', 'MI', 'OH', 'IN', 'IL', 'MO', 'KS', 'KY', 'ND', 'SD'): do_state_day(state, valid, high, low, precip) do_climdiv_day(state, valid, high, low, precip)
def load_iemre(): """Use IEM Reanalysis for non-precip data 24km product is smoothed down to the 0.01 degree grid """ printt("load_iemre() called") xaxis = np.arange(MYWEST, MYEAST, 0.01) yaxis = np.arange(MYSOUTH, MYNORTH, 0.01) xi, yi = np.meshgrid(xaxis, yaxis) fn = iemre.get_daily_ncname(VALID.year) if not os.path.isfile(fn): printt("Missing %s for load_solar, aborting" % (fn,)) sys.exit() nc = netCDF4.Dataset(fn, 'r') offset = iemre.daily_offset(VALID) lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] lons, lats = np.meshgrid(lons, lats) # Storage is W m-2, we want langleys per day data = nc.variables['rsds'][offset, :, :] * 86400. / 1000000. * 23.9 # Default to a value of 300 when this data is missing, for some reason nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) SOLAR[:] = iemre_bounds_check('rsds', nn(xi, yi), 0, 1000) data = temperature(nc.variables['high_tmpk'][offset, :, :], 'K').value('C') nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) HIGH_TEMP[:] = iemre_bounds_check('high_tmpk', nn(xi, yi), -60, 60) data = temperature(nc.variables['low_tmpk'][offset, :, :], 'K').value('C') nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) LOW_TEMP[:] = iemre_bounds_check('low_tmpk', nn(xi, yi), -60, 60) data = temperature(nc.variables['avg_dwpk'][offset, :, :], 'K').value('C') nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) DEWPOINT[:] = iemre_bounds_check('avg_dwpk', nn(xi, yi), -60, 60) data = nc.variables['wind_speed'][offset, :, :] nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)), np.ravel(data)) WIND[:] = iemre_bounds_check('wind_speed', nn(xi, yi), 0, 30) nc.close() printt("load_iemre() finished")
def do_precip12(nc, ts): """Compute the 24 Hour precip at 12 UTC, we do some more tricks though""" offset = iemre.daily_offset(ts) ets = datetime.datetime(ts.year, ts.month, ts.day, 12) ets = ets.replace(tzinfo=pytz.timezone("UTC")) sts = ets - datetime.timedelta(hours=24) offset1 = iemre.hourly_offset(sts) offset2 = iemre.hourly_offset(ets) print(("p01d_12z for %s [idx:%s] %s(%s)->%s(%s)" ) % (ts, offset, sts.strftime("%Y%m%d%H"), offset1, ets.strftime("%Y%m%d%H"), offset2)) hnc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ts.year,)) phour = np.sum(hnc.variables['p01m'][offset1:offset2, :, :], 0) nc.variables['p01d_12z'][offset] = phour hnc.close()
def estimate_snow(ts): """Estimate the Snow based on COOP reports""" idx = iemre.daily_offset(ts) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ), 'r') snowgrid12 = nc.variables['snow_12z'][idx, :, :] / 25.4 snowdgrid12 = nc.variables['snowd_12z'][idx, :, :] / 25.4 nc.close() for sid in nt.sts.keys(): val = snowgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] if val >= 0 and val < 100: nt.sts[sid]['snow'] = "%.1f" % (val, ) val = snowdgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] if val >= 0 and val < 140: nt.sts[sid]['snowd'] = "%.1f" % (val, )
def doday(ts, realtime): """ Create a plot of precipitation stage4 estimates for some day """ lts = datetime.datetime.utcnow().replace( tzinfo=pytz.timezone("UTC")) lts = lts.astimezone(pytz.timezone("America/Chicago")) # make assumptions about the last valid MRMS data if realtime: # Up until :59 after of the last hour lts = (lts - datetime.timedelta(hours=1)).replace(minute=59) else: lts = lts.replace(year=ts.year, month=ts.month, day=ts.day, hour=23, minute=59) idx = iemre.daily_offset(ts) ncfn = "/mesonet/data/iemre/%s_mw_mrms_daily.nc" % (ts.year,) nc = netCDF4.Dataset(ncfn) precip = nc.variables['p01d'][idx, :, :] lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] subtitle = "Total between 12:00 AM and %s" % ( lts.strftime("%I:%M %p %Z"),) routes = 'ac' if not realtime: routes = 'a' # clevs = np.arange(0, 0.25, 0.05) # clevs = np.append(clevs, np.arange(0.25, 3., 0.25)) # clevs = np.append(clevs, np.arange(3., 10.0, 1)) clevs = [0.01, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] sector = 'iowa' pqstr = ("plot %s %s00 %s_q2_1d.png %s_q2_1d.png png" ) % (routes, ts.strftime("%Y%m%d%H"), sector, sector) m = MapPlot(title=("%s NCEP MRMS Q3 Today's Precipitation" ) % (ts.strftime("%-d %b %Y"),), subtitle=subtitle, sector=sector) (x, y) = np.meshgrid(lons, lats) m.pcolormesh(x, y, distance(precip, 'MM').value('IN'), clevs, cmap=nwsprecip(), units='inch') m.drawcounties() m.postprocess(pqstr=pqstr, view=False) m.close()
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.dates as mdates year = int(fdict.get('year', 2014)) threshold = float(fdict.get('threshold', 1)) period = int(fdict.get('period', 7)) state = fdict.get('state', 'IA')[:2] nc2 = netCDF4.Dataset("/mesonet/data/iemre/state_weights.nc") iowa = nc2.variables[state][:] iowapts = np.sum(np.where(iowa > 0, 1, 0)) nc2.close() nc = netCDF4.Dataset('/mesonet/data/iemre/%s_mw_daily.nc' % (year, )) precip = nc.variables['p01d'] now = datetime.datetime(year, 1, 1) now += datetime.timedelta(days=(period-1)) ets = datetime.datetime(year, 12, 31) days = [] coverage = [] while now < ets: idx = iemre.daily_offset(now) sevenday = np.sum(precip[(idx-period):idx, :, :], 0) pday = np.where(iowa > 0, sevenday[:, :], -1) tots = np.sum(np.where(pday >= (threshold * 25.4), 1, 0)) days.append(now) coverage.append(tots / float(iowapts) * 100.0) now += datetime.timedelta(days=1) df = pd.DataFrame(dict(day=pd.Series(days), coverage=pd.Series(coverage))) (fig, ax) = plt.subplots(1, 1) ax.bar(days, coverage, fc='g', ec='g') ax.set_title(("IEM Estimated Areal Coverage Percent of %s\n" " receiving %.2f inches of rain over trailing %s day period" ) % (STATES[state], threshold, period)) ax.set_ylabel("Areal Coverage [%]") ax.xaxis.set_major_formatter(mdates.DateFormatter('%b\n%Y')) ax.grid(True) return fig, df
def grid_day12(nc, ts): """Use the COOP data for gridding """ offset = iemre.daily_offset(ts) print(('12z hi/lo for %s [idx:%s]') % (ts, offset)) sql = """ SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, s.state, s.id as station, s.name as name, (CASE WHEN pday >= 0 then pday else null end) as precipdata, (CASE WHEN snow >= 0 then snow else null end) as snowdata, (CASE WHEN snowd >= 0 then snowd else null end) as snowddata, (CASE WHEN max_tmpf > -50 and max_tmpf < 130 then max_tmpf else null end) as highdata, (CASE WHEN min_tmpf > -50 and min_tmpf < 95 then min_tmpf else null end) as lowdata from summary_%s c, stations s WHERE day = '%s' and s.network in ('IA_COOP', 'MN_COOP', 'WI_COOP', 'IL_COOP', 'MO_COOP', 'KS_COOP', 'NE_COOP', 'SD_COOP', 'ND_COOP', 'KY_COOP', 'MI_COOP', 'OH_COOP') and c.iemid = s.iemid and extract(hour from c.coop_valid) between 4 and 11 """ % (ts.year, ts.strftime("%Y-%m-%d")) df = read_sql(sql, pgconn) if len(df.index) > 4: res = generic_gridder(df, 'highdata') nc.variables['high_tmpk_12z'][offset] = datatypes.temperature( res, 'F').value('K') res = generic_gridder(df, 'lowdata') nc.variables['low_tmpk_12z'][offset] = datatypes.temperature( res, 'F').value('K') res = generic_gridder(df, 'snowdata') nc.variables['snow_12z'][offset] = res * 25.4 res = generic_gridder(df, 'snowddata') nc.variables['snowd_12z'][offset] = res * 25.4 else: print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"), len(df.index))
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot ptype = fdict.get('ptype', 'c') date = datetime.datetime.strptime(fdict.get('date', '2015-01-01'), '%Y-%m-%d') varname = fdict.get('var', 'rsds') idx0 = iemre.daily_offset(date) nc = netCDF4.Dataset(("/mesonet/data/iemre/%s_mw_daily.nc" ) % (date.year, ), 'r') lats = nc.variables['lat'][:] lons = nc.variables['lon'][:] if varname == 'rsds': # Value is in W m**-2, we want MJ data = nc.variables['rsds'][idx0, :, :] * 86400. / 1000000. units = 'MJ d-1' nc.close() title = date.strftime("%-d %B %Y") m = MapPlot(sector='midwest', axisbg='white', nocaption=True, title='IEM Reanalysis of %s for %s' % (PDICT.get(varname), title), subtitle='Data derived from various NOAA datasets' ) if np.ma.is_masked(np.max(data)): return 'Data Unavailable' clevs = np.arange(0, 37, 3.) clevs[0] = 0.01 x, y = np.meshgrid(lons, lats) if ptype == 'c': m.contourf(x, y, data, clevs, units=units) else: m.pcolormesh(x, y, data, clevs, units=units) return m.fig
def doday(ts, realtime): """ Create a plot of precipitation stage4 estimates for some day """ nc = netCDF4.Dataset("/mesonet/data/iemre/%s_ifc_daily.nc" % (ts.year,)) idx = daily_offset(ts) xaxis = nc.variables['lon'][:] yaxis = nc.variables['lat'][:] total = nc.variables['p01d'][idx, :, :] nc.close() lastts = datetime.datetime(ts.year, ts.month, ts.day, 23, 59) if realtime: now = datetime.datetime.now() - datetime.timedelta(minutes=60) lastts = now.replace(minute=59) subtitle = "Total between 12:00 AM and %s" % ( lastts.strftime("%I:%M %p"),) routes = 'ac' if not realtime: routes = 'a' clevs = [0.01, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10] pqstr = ("plot %s %s00 iowa_ifc_1d.png iowa_ifc_1d.png png" ) % (routes, ts.strftime("%Y%m%d%H")) m = MapPlot(title=("%s Iowa Flood Center Today's Precipitation" ) % (ts.strftime("%-d %b %Y"),), subtitle=subtitle, sector='custom', west=xaxis[0], east=xaxis[-1], south=yaxis[0], north=yaxis[-1]) (x, y) = np.meshgrid(xaxis, yaxis) m.pcolormesh(x, y, distance(total, 'MM').value("IN"), clevs, cmap=nwsprecip(), units='inch') m.drawcounties() m.postprocess(pqstr=pqstr, view=False) m.close()
def estimate_precip(ts): """Estimate precipitation based on IEMRE""" idx = iemre.daily_offset(ts) nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ), 'r') grid12 = nc.variables['p01d_12z'][idx, :, :] / 25.4 grid00 = nc.variables['p01d'][idx, :, :] / 25.4 nc.close() for sid in nt.sts.keys(): if nt.sts[sid]['precip24_hour'] in [0, 22, 23]: precip = grid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] else: precip = grid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']] # denote trace if precip > 0 and precip < 0.01: nt.sts[sid]['precip'] = 0.0001 elif precip < 0: nt.sts[sid]['precip'] = 0 elif np.isnan(precip) or np.ma.is_masked(precip): nt.sts[sid]['precip'] = None else: nt.sts[sid]['precip'] = "%.2f" % (precip,)