Beispiel #1
0
def workflow(ts, irealtime, justprecip):
    """Do Work"""
    # Load up our netcdf file!
    ncfn = iemre.get_daily_ncname(ts.year)
    if not os.path.isfile(ncfn):
        print("will create %s" % (ncfn, ))
        cmd = "python init_daily.py %s" % (ts.year, )
        subprocess.call(cmd, shell=True)
    nc = ncopen(ncfn, 'a', timeout=600)
    domain = nc.variables['hasdata'][:, :]
    nc.close()
    # For this date, the 12 UTC COOP obs will match the date
    if not justprecip:
        grid_day12(ts, domain)
    do_precip12(ts)
    # This is actually yesterday!
    if irealtime:
        ts -= datetime.timedelta(days=1)
    ncfn = iemre.get_daily_ncname(ts.year)
    if not os.path.isfile(ncfn):
        print("will create %s" % (ncfn, ))
        cmd = "python init_daily.py %s" % (ts.year, )
        subprocess.call(cmd, shell=True)
    if not justprecip:
        grid_day(ts, domain)
    do_precip(ts)
Beispiel #2
0
def write_grid(valid, vname, grid):
    """Write data to backend netcdf"""
    offset = iemre.daily_offset(valid)
    nc = ncopen(iemre.get_daily_ncname(valid.year), 'a', timeout=600)
    if nc is None:
        print("daily_analysis#write_grid first open attempt failed, try #2")
        nc = ncopen(iemre.get_daily_ncname(valid.year), 'a', timeout=600)
    print(("%13s [idx:%s] min: %6.2f max: %6.2f [%s]") %
          (vname, offset, np.nanmin(grid), np.nanmax(grid),
           nc.variables[vname].units))
    nc.variables[vname][offset] = grid
    nc.close()
Beispiel #3
0
def main():
    """go Main go"""
    for yr in range(1893, 2016):
        fn = iemre.get_daily_ncname(yr)
        if not os.path.isfile(fn):
            print("Miss %s" % (fn, ))
            continue
        print(fn)
        nc = ncopen(fn, 'a', timeout=300)
        v1 = nc.createVariable('avg_dwpk', np.float, ('time', 'lat', 'lon'),
                               fill_value=1.e20)
        v1.units = 'K'
        v1.long_name = '2m Average Dew Point Temperature'
        v1.standard_name = 'Dewpoint'
        v1.coordinates = "lon lat"
        v1.description = ("Dew Point average computed "
                          "by averaging mixing ratios")

        v2 = nc.createVariable('wind_speed', np.float, ('time', 'lat', 'lon'),
                               fill_value=1.e20)
        v2.units = 'm s-1'
        v2.long_name = 'Wind Speed'
        v2.standard_name = 'Wind Speed'
        v2.coordinates = "lon lat"
        v2.description = "Daily averaged wind speed magnitude"

        nc.sync()
        nc.close()
def estimate_hilo(df, ts):
    """Estimate the High and Low Temperature based on gridded data"""
    idx = iemre.daily_offset(ts)
    nc = ncopen(iemre.get_daily_ncname(ts.year), 'r', timeout=300)
    highgrid12 = temperature(nc.variables['high_tmpk_12z'][idx, :, :],
                             'K').value('F')
    lowgrid12 = temperature(nc.variables['low_tmpk_12z'][idx, :, :],
                            'K').value('F')
    highgrid00 = temperature(nc.variables['high_tmpk'][idx, :, :],
                             'K').value('F')
    lowgrid00 = temperature(nc.variables['low_tmpk'][idx, :, :],
                            'K').value('F')
    nc.close()

    for sid, row in df.iterrows():
        if pd.isnull(row['high']):
            if row['temp24_hour'] in [0, 22, 23]:
                val = highgrid00[row['gridj'], row['gridi']]
            else:
                val = highgrid12[row['gridj'], row['gridi']]
            if sid == 'IA1402':
                print(row['temp24_hour'])
            if not np.ma.is_masked(val):
                df.at[sid, 'high'] = val
        if pd.isnull(row['low']):
            if row['temp24_hour'] in [0, 22, 23]:
                val = lowgrid00[row['gridj'], row['gridi']]
            else:
                val = lowgrid12[row['gridj'], row['gridi']]
            if not np.ma.is_masked(val):
                df.at[sid, 'low'] = val
def estimate_precip(df, ts):
    """Estimate precipitation based on IEMRE"""
    idx = iemre.daily_offset(ts)
    nc = ncopen(iemre.get_daily_ncname(ts.year), 'r', timeout=300)
    grid12 = distance(nc.variables['p01d_12z'][idx, :, :],
                      'MM').value("IN").filled(0)
    grid00 = distance(nc.variables['p01d'][idx, :, :],
                      "MM").value("IN").filled(0)
    nc.close()

    for sid, row in df.iterrows():
        if not pd.isnull(row['precip']):
            continue
        if row['precip24_hour'] in [0, 22, 23]:
            precip = grid00[row['gridj'], row['gridi']]
        else:
            precip = grid12[row['gridj'], row['gridi']]
        # denote trace
        if precip > 0 and precip < 0.01:
            df.at[sid, 'precip'] = TRACE_VALUE
        elif precip < 0:
            df.at[sid, 'precip'] = 0
        elif np.isnan(precip) or np.ma.is_masked(precip):
            df.at[sid, 'precip'] = 0
        else:
            df.at[sid, 'precip'] = "%.2f" % (precip,)
Beispiel #6
0
def do_coop(ts):
    """Use COOP solar radiation data"""
    pgconn = get_dbconn('coop', user='******')
    cursor = pgconn.cursor()

    cursor.execute(
        """SELECT ST_x(geom), ST_y(geom),
        coalesce(narr_srad, merra_srad) from alldata a JOIN stations t
        ON (a.station = t.id) WHERE
        day = %s and t.network ~* 'CLIMATE' and substr(id, 3, 1) != 'C'
        and substr(id, 3, 4) != '0000'
    """, (ts.strftime("%Y-%m-%d"), ))
    lons = []
    lats = []
    vals = []
    for row in cursor:
        if row[2] is None or row[2] < 0:
            continue
        lons.append(row[0])
        lats.append(row[1])
        vals.append(row[2])

    nn = NearestNDInterpolator((np.array(lons), np.array(lats)),
                               np.array(vals))
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    nc = ncopen(iemre.get_daily_ncname(ts.year), 'a', timeout=300)
    offset = iemre.daily_offset(ts)
    # Data above is MJ / d / m-2, we want W / m-2
    nc.variables['rsds'][offset, :, :] = nn(xi, yi) * 1000000. / 86400.
    nc.close()
Beispiel #7
0
def main():
    """Go Main Go"""
    ets = datetime.datetime.now() - datetime.timedelta(days=1)
    sts = datetime.datetime(ets.year, 1, 1)

    # Get the normal accumm
    with ncopen(iemre.get_dailyc_ncname()) as cnc:
        lons = cnc.variables["lon"][:]
        lats = cnc.variables["lat"][:]
        index0 = iemre.daily_offset(sts)
        index1 = iemre.daily_offset(ets)
        clprecip = np.sum(cnc.variables["p01d"][index0:index1, :, :], 0)

    with ncopen(iemre.get_daily_ncname(sts.year)) as nc:
        obprecip = np.sum(nc.variables["p01d"][index0:index1, :, :], 0)

    lons, lats = np.meshgrid(lons, lats)

    # Plot departure from normal
    mp = MapPlot(
        sector="midwest",
        title=("Precipitation Departure %s - %s") %
        (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")),
        subtitle="based on IEM Estimates",
    )

    mp.pcolormesh(lons, lats, (obprecip - clprecip) / 25.4,
                  np.arange(-10, 10, 1))
    mp.postprocess(
        pqstr="plot c 000000000000 summary/year/stage4_diff.png bogus png")
    mp.close()

    # Plot normals
    mp = MapPlot(
        sector="midwest",
        title=("Normal Precipitation:: %s - %s") %
        (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")),
        subtitle="based on IEM Estimates",
    )

    mp.pcolormesh(lons, lats, (clprecip) / 25.4, np.arange(0, 30, 2))
    mp.postprocess(
        pqstr="plot c 000000000000 summary/year/stage4_normals.png bogus png")
    mp.close()

    # Plot Obs
    mp = MapPlot(
        sector="midwest",
        title=("Estimated Precipitation:: %s - %s") %
        (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")),
        subtitle="based on IEM Estimates",
    )

    mp.pcolormesh(lons, lats, (obprecip) / 25.4, np.arange(0, 30, 2))
    mp.postprocess(
        pqstr="plot c 000000000000 summary/year/stage4obs.png bogus png")
    mp.close()
Beispiel #8
0
def copy_iemre(nc, fromyear, ncdate0, ncdate1, islice, jslice):
    """Copy IEMRE data from a given year to **inclusive** dates."""
    rencfn = iemre.get_daily_ncname(fromyear)
    if not os.path.isfile(rencfn):
        print("reanalysis fn %s missing" % (rencfn,))
        return
    renc = ncopen(rencfn)
    tidx0 = (ncdate0 - datetime.date(fromyear, 1, 1)).days
    tidx1 = (ncdate1 - datetime.date(fromyear, 1, 1)).days
    tslice = slice(tidx0, tidx1 + 1)
    # time steps to fill
    tsteps = (tidx1 - tidx0) + 1
    # figure out the slice
    if ncdate0.strftime("%m%d") == "0101":
        retslice = slice(0, tsteps)
    else:
        retslice = slice(0 - tsteps, None)
    # print("copy_iemre from %s filling %s steps nc: %s iemre: %s" % (
    #    fromyear, tsteps, tslice, retslice
    # ))
    highc = temperature(
        renc.variables["high_tmpk"][retslice, jslice, islice], "K"
    ).value("C")
    lowc = temperature(
        renc.variables["low_tmpk"][retslice, jslice, islice], "K"
    ).value("C")
    nc.variables["tmax"][tslice, :, :] = highc
    nc.variables["tmin"][tslice, :, :] = lowc
    nc.variables["gdd_f"][tslice, :, :] = gdd(
        temperature(highc, "C"), temperature(lowc, "C")
    )
    nc.variables["prcp"][tslice, :, :] = renc.variables["p01d"][
        retslice, jslice, islice
    ]
    for rt, nt in zip(
        list(
            range(
                retslice.start, 0 if retslice.stop is None else retslice.stop
            )
        ),
        list(range(tslice.start, tslice.stop)),
    ):
        # IEMRE power_swdn is MJ, test to see if data exists
        srad = renc.variables["power_swdn"][rt, jslice, islice]
        if srad.mask.any():
            # IEMRE rsds uses W m-2, we want MJ
            srad = (
                renc.variables["rsds"][rt, jslice, islice]
                * 86400.0
                / 1000000.0
            )
        nc.variables["srad"][nt, :, :] = srad
    renc.close()
Beispiel #9
0
def main(argv):
    """Go Main Go."""
    year = int(argv[1])
    ets = min([datetime.date(year, 12, 31), datetime.date.today()])
    queue = []
    for x0 in np.arange(iemre.WEST, iemre.EAST, 5.):
        for y0 in np.arange(iemre.SOUTH, iemre.NORTH, 5.):
            queue.append([x0, y0])
    for x0, y0 in tqdm(queue, disable=not sys.stdout.isatty()):
        url = (
            "https://power.larc.nasa.gov/cgi-bin/v1/DataAccess.py?"
            "request=execute&identifier=Regional&"
            "parameters=ALLSKY_SFC_SW_DWN&"
            "startDate=%s0101&endDate=%s&userCommunity=SSE&"
            "tempAverage=DAILY&bbox=%s,%s,%s,%s&user=anonymous&"
            "outputList=NETCDF"
        ) % (year, ets.strftime("%Y%m%d"), y0, x0,
             min([y0 + 5., iemre.NORTH]) - 0.1,
             min([x0 + 5., iemre.EAST]) - 0.1)
        req = requests.get(url, timeout=60)
        js = req.json()
        if 'outputs' not in js:
            print(url)
            print(js)
            continue
        fn = js['outputs']['netcdf']
        req = requests.get(fn, timeout=60, stream=True)
        ncfn = '/tmp/power%s.nc' % (year, )
        with open(ncfn, 'wb') as fh:
            for chunk in req.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
            fh.close()
        nc = ncopen(ncfn)
        for day, _ in enumerate(nc.variables['time'][:]):
            date = datetime.date(year, 1, 1) + datetime.timedelta(days=day)
            # kwh to MJ/d  3600 * 1000 / 1e6
            data = nc.variables['ALLSKY_SFC_SW_DWN'][day, :, :] * 3.6
            # Sometimes there are missing values?
            if np.ma.is_masked(data):
                data[data.mask] = np.mean(data)
            i, j = iemre.find_ij(x0, y0)
            # resample data is 0.5, iemre is 0.125
            data = np.repeat(np.repeat(data, 4, axis=0), 4, axis=1)
            shp = np.shape(data)
            # print("i: %s j: %s shp: %s" % (i, j, shp))
            renc = ncopen(iemre.get_daily_ncname(year), 'a')
            renc.variables['power_swdn'][
                iemre.daily_offset(date),
                slice(j, j+shp[0]), slice(i, i+shp[1])
            ] = data
            renc.close()
        nc.close()
Beispiel #10
0
def load_iemre():
    """Use IEM Reanalysis for non-precip data

    24km product is smoothed down to the 0.01 degree grid
    """
    printt("load_iemre() called")
    xaxis = np.arange(MYWEST, MYEAST, 0.01)
    yaxis = np.arange(MYSOUTH, MYNORTH, 0.01)
    xi, yi = np.meshgrid(xaxis, yaxis)

    fn = iemre.get_daily_ncname(VALID.year)
    if not os.path.isfile(fn):
        printt("Missing %s for load_solar, aborting" % (fn, ))
        sys.exit()
    with ncopen(fn) as nc:
        offset = iemre.daily_offset(VALID)
        lats = nc.variables["lat"][:]
        lons = nc.variables["lon"][:]
        lons, lats = np.meshgrid(lons, lats)

        # Storage is W m-2, we want langleys per day
        data = nc.variables["rsds"][offset, :, :] * 86400.0 / 1000000.0 * 23.9
        # Default to a value of 300 when this data is missing, for some reason
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        SOLAR[:] = iemre_bounds_check("rsds", nn(xi, yi), 0, 1000)

        data = temperature(nc.variables["high_tmpk"][offset, :, :],
                           "K").value("C")
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        HIGH_TEMP[:] = iemre_bounds_check("high_tmpk", nn(xi, yi), -60, 60)

        data = temperature(nc.variables["low_tmpk"][offset, :, :],
                           "K").value("C")
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        LOW_TEMP[:] = iemre_bounds_check("low_tmpk", nn(xi, yi), -60, 60)

        data = temperature(nc.variables["avg_dwpk"][offset, :, :],
                           "K").value("C")
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        DEWPOINT[:] = iemre_bounds_check("avg_dwpk", nn(xi, yi), -60, 60)

        data = nc.variables["wind_speed"][offset, :, :]
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        WIND[:] = iemre_bounds_check("wind_speed", nn(xi, yi), 0, 30)
    printt("load_iemre() finished")
Beispiel #11
0
def load_iemre():
    """Use IEM Reanalysis for non-precip data

    24km product is smoothed down to the 0.01 degree grid
    """
    printt("load_iemre() called")
    xaxis = np.arange(MYWEST, MYEAST, 0.01)
    yaxis = np.arange(MYSOUTH, MYNORTH, 0.01)
    xi, yi = np.meshgrid(xaxis, yaxis)

    fn = iemre.get_daily_ncname(VALID.year)
    if not os.path.isfile(fn):
        printt("Missing %s for load_solar, aborting" % (fn,))
        sys.exit()
    nc = netCDF4.Dataset(fn, 'r')
    offset = iemre.daily_offset(VALID)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    lons, lats = np.meshgrid(lons, lats)

    # Storage is W m-2, we want langleys per day
    data = nc.variables['rsds'][offset, :, :] * 86400. / 1000000. * 23.9
    # Default to a value of 300 when this data is missing, for some reason
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    SOLAR[:] = iemre_bounds_check('rsds', nn(xi, yi), 0, 1000)

    data = temperature(nc.variables['high_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    HIGH_TEMP[:] = iemre_bounds_check('high_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['low_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    LOW_TEMP[:] = iemre_bounds_check('low_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['avg_dwpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    DEWPOINT[:] = iemre_bounds_check('avg_dwpk', nn(xi, yi), -60, 60)

    data = nc.variables['wind_speed'][offset, :, :]
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    WIND[:] = iemre_bounds_check('wind_speed', nn(xi, yi), 0, 30)

    nc.close()
    printt("load_iemre() finished")
Beispiel #12
0
def load_iemre():
    """Use IEM Reanalysis for non-precip data

    24km product is smoothed down to the 0.01 degree grid
    """
    printt("load_iemre() called")
    xaxis = np.arange(WEST, EAST, 0.01)
    yaxis = np.arange(SOUTH, NORTH, 0.01)
    xi, yi = np.meshgrid(xaxis, yaxis)

    fn = iemre.get_daily_ncname(VALID.year)
    if not os.path.isfile(fn):
        printt("Missing %s for load_solar, aborting" % (fn, ))
        sys.exit()
    nc = netCDF4.Dataset(fn, 'r')
    offset = iemre.daily_offset(VALID)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    lons, lats = np.meshgrid(lons, lats)

    # Storage is W m-2, we want langleys per day
    data = nc.variables['rsds'][offset, :, :] * 86400. / 1000000. * 23.9
    # Default to a value of 300 when this data is missing, for some reason
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    SOLAR[:] = iemre_bounds_check('rsds', nn(xi, yi), 0, 1000)

    data = temperature(nc.variables['high_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    HIGH_TEMP[:] = iemre_bounds_check('high_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['low_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    LOW_TEMP[:] = iemre_bounds_check('low_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['avg_dwpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    DEWPOINT[:] = iemre_bounds_check('avg_dwpk', nn(xi, yi), -60, 60)

    data = nc.variables['wind_speed'][offset, :, :]
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    WIND[:] = iemre_bounds_check('wind_speed', nn(xi, yi), 0, 30)

    nc.close()
    printt("load_iemre() finished")
Beispiel #13
0
def estimate_snow(df, ts):
    """Estimate the Snow based on COOP reports"""
    idx = iemre.daily_offset(ts)
    nc = ncopen(iemre.get_daily_ncname(ts.year), 'r', timeout=300)
    snowgrid12 = distance(nc.variables['snow_12z'][idx, :, :],
                          'MM').value('IN').filled(0)
    snowdgrid12 = distance(nc.variables['snowd_12z'][idx, :, :],
                           'MM').value('IN').filled(0)
    nc.close()

    for sid, row in df.iterrows():
        if pd.isnull(row['snow']):
            df.at[sid, 'snow'] = snowgrid12[row['gridj'], row['gridi']]
        if pd.isnull(row['snowd']):
            df.at[sid, 'snowd'] = snowdgrid12[row['gridj'], row['gridi']]
Beispiel #14
0
def main():
    """Go Main Go"""
    for year in range(1893, 2018):
        nc = ncopen(get_daily_ncname(year), 'a', timeout=600)
        gridsize = nc.dimensions['lat'].size * nc.dimensions['lon'].size
        for vname in ['p01d', 'high_tmpk', 'low_tmpk']:
            for i in range(nc.variables[vname].shape[0]):
                calday = nc.variables[vname][i, :, :]
                # Get a count of missing values
                missing = np.sum(calday.mask) / float(gridsize)
                if missing > 0.5:
                    print("%s_12z->%s %s %.2f" % (vname, vname, i, missing))
                    nc.variables[vname][i, :, :] = (
                        nc.variables[vname + '_12z'][i, :, :]
                    )

        nc.close()
Beispiel #15
0
def get_data(ctx):
    """Do the processing work, please"""
    pgconn = get_dbconn("postgis")
    states = gpd.GeoDataFrame.from_postgis(
        """
    SELECT the_geom, state_abbr from states where state_abbr = %s
    """,
        pgconn,
        params=(ctx["state"], ),
        index_col="state_abbr",
        geom_col="the_geom",
    )
    if states.empty:
        raise NoDataFound("No data was found.")

    with ncopen(iemre.get_daily_ncname(ctx["year"])) as nc:
        precip = nc.variables["p01d"]
        czs = CachingZonalStats(iemre.AFFINE)
        hasdata = np.zeros(
            (nc.dimensions["lat"].size, nc.dimensions["lon"].size))
        czs.gen_stats(hasdata, states["the_geom"])
        for nav in czs.gridnav:
            grid = np.ones((nav.ysz, nav.xsz))
            grid[nav.mask] = 0.0
            jslice = slice(nav.y0, nav.y0 + nav.ysz)
            islice = slice(nav.x0, nav.x0 + nav.xsz)
            hasdata[jslice, islice] = np.where(grid > 0, 1, hasdata[jslice,
                                                                    islice])
        ctx["iowa"] = np.flipud(hasdata)
        ctx["iowapts"] = float(np.sum(np.where(hasdata > 0, 1, 0)))

        now = datetime.datetime(ctx["year"], 1, 1)
        now += datetime.timedelta(days=(ctx["period"] - 1))
        ets = datetime.datetime(ctx["year"], 12, 31)
        today = datetime.datetime.now()
        if ets > today:
            ets = today - datetime.timedelta(days=1)
        ctx["days"] = []
        rows = []
        trailthres = ((ctx["trailthres"] * units("inch")).to(
            units("mm")).magnitude)
        daythres = (ctx["daythres"] * units("inch")).to(units("mm")).magnitude
        while now < ets:
            rows.append(do_date(ctx, now, precip, daythres, trailthres))
            now += datetime.timedelta(days=1)
    return pd.DataFrame(rows)
Beispiel #16
0
def workflow(fn):
    """Do the copy work"""
    oldnc = netCDF4.Dataset(fn)
    newnc = netCDF4.Dataset(iemre.get_daily_ncname(fn[:4]), 'a')
    newnc.set_auto_scale(True)
    i, j = iemre.find_ij(oldnc.variables['lon'][0], oldnc.variables['lat'][0])
    jslice = slice(j, j + oldnc.dimensions['lat'].size * 2)
    islice = slice(i, i + oldnc.dimensions['lon'].size * 2)
    # print("i:%s j:%s %s %s" % (i, j, islice, jslice))
    for vname in tqdm(oldnc.variables):
        if vname in ['time', 'lat', 'lon']:
            continue
        for tstep in oldnc.variables['time'][:]:
            oldgrid = np.repeat(oldnc.variables[vname][tstep, :, :],
                                2, 0).repeat(2, 1)
            newnc.variables[vname][tstep, jslice, islice] = oldgrid
    newnc.close()
Beispiel #17
0
def main(argv):
    """Go Main Go."""
    log = logger()
    if len(argv) == 6:
        valid = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]))
        ncfn = iemre.get_hourly_ncname(valid.year)
        idx = iemre.hourly_offset(valid)
    else:
        valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))
        ncfn = iemre.get_daily_ncname(valid.year)
        idx = iemre.daily_offset(valid)
    ds = iemre.get_grids(valid)
    with ncopen(ncfn, 'a', timeout=600) as nc:
        for vname in ds:
            if vname not in nc.variables:
                continue
            log.debug("copying database var %s to netcdf", vname)
            nc.variables[vname][idx, :, :] = ds[vname].values
Beispiel #18
0
def tile_extraction(nc, valid, west, south, isnewfile):
    """Do our tile extraction"""
    # update model metadata
    nc.valid = "CFS model: %s" % (valid.strftime("%Y-%m-%dT%H:%M:%SZ"), )
    i, j = iemre.find_ij(west, south)
    islice = slice(i, i + 16)
    jslice = slice(j, j + 16)
    for year in range(1980 if isnewfile else valid.year, valid.year + 1):
        tidx0 = (datetime.date(year, 1, 1) - datetime.date(1980, 1, 1)).days
        tidx1 = (datetime.date(year + 1, 1, 1) -
                 datetime.date(1980, 1, 1)).days
        tslice = slice(tidx0, tidx1)
        ncfn = iemre.get_daily_ncname(year)
        if not os.path.isfile(ncfn):
            continue
        renc = ncopen(ncfn)
        # print("tslice: %s jslice: %s islice: %s" % (tslice, jslice, islice))
        nc.variables['tmax'][tslice, :, :] = temperature(
            renc.variables['high_tmpk'][:, jslice, islice], 'K').value('C')
        nc.variables['tmin'][tslice, :, :] = temperature(
            renc.variables['low_tmpk'][:, jslice, islice], 'K').value('C')
        nc.variables['prcp'][tslice, :, :] = (renc.variables['p01d'][:, jslice,
                                                                     islice])
        # MJ/d back to average W/m2
        nc.variables['srad'][tslice, :, :] = (renc.variables['rsds'][:, jslice,
                                                                     islice])
        renc.close()
        if year != valid.year:
            continue
        # replace CFS!
        renc = ncopen(valid.strftime("/mesonet/data/iemre/cfs_%Y%m%d.nc"))
        tidx = iemre.daily_offset(valid + datetime.timedelta(days=1))
        tslice = slice(tidx0 + tidx, tidx1)
        nc.variables['srad'][tslice, :, :] = (
            renc.variables['srad'][tidx:, jslice, islice] * 1000000. / 86400.)
        nc.variables['tmax'][tslice, :, :] = temperature(
            renc.variables['high_tmpk'][tidx:, jslice, islice], 'K').value('C')
        nc.variables['tmin'][tslice, :, :] = temperature(
            renc.variables['low_tmpk'][tidx:, jslice, islice], 'K').value('C')
        nc.variables['prcp'][tslice, :, :] = (renc.variables['p01d'][tidx:,
                                                                     jslice,
                                                                     islice])
        renc.close()
Beispiel #19
0
def main(argv):
    """Do work please"""
    day = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))
    pgconn = get_dbconn('coop')
    df = read_sql("""
        SELECT a.precip, st_x(t.geom) as lon, st_y(t.geom) as lat
        from alldata a JOIN stations t ON (a.station = t.id)
        WHERE a.day = %s and t.network ~* 'CLIMATE' and
        substr(a.station,3,4) != '0000' and substr(station,3,1) != 'C'
        and precip >= 0 and precip < 50
    """,
                  pgconn,
                  params=(day, ))
    nc = ncopen(get_daily_ncname(day.year), 'a')
    res = generic_gridder(day, nc, df, 'precip')
    if res is not None:
        offset = daily_offset(day)
        nc.variables['p01d_12z'][offset] = res.to(mpunits('mm')).magnitude
    nc.close()
Beispiel #20
0
def compute_hasdata(year):
    """Compute the has_data grid"""
    nc = ncopen(iemre.get_daily_ncname(year), 'a', timeout=300)
    czs = CachingZonalStats(iemre.AFFINE)
    pgconn = get_dbconn('postgis')
    states = gpd.GeoDataFrame.from_postgis("""
    SELECT the_geom, state_abbr from states
    where state_abbr not in ('AK', 'HI')
    """, pgconn, index_col='state_abbr', geom_col='the_geom')
    data = np.flipud(nc.variables['hasdata'][:, :])
    czs.gen_stats(data, states['the_geom'])
    for nav in czs.gridnav:
        grid = np.ones((nav.ysz, nav.xsz))
        grid[nav.mask] = 0.
        jslice = slice(nav.y0, nav.y0 + nav.ysz)
        islice = slice(nav.x0, nav.x0 + nav.xsz)
        data[jslice, islice] = np.where(grid > 0, 1, data[jslice, islice])
    nc.variables['hasdata'][:, :] = np.flipud(data)
    nc.close()
Beispiel #21
0
def get_data(ctx):
    """Do the processing work, please"""
    pgconn = get_dbconn('postgis')
    states = gpd.GeoDataFrame.from_postgis("""
    SELECT the_geom, state_abbr from states where state_abbr = %s
    """,
                                           pgconn,
                                           params=(ctx['state'], ),
                                           index_col='state_abbr',
                                           geom_col='the_geom')

    with ncopen(iemre.get_daily_ncname(ctx['year'])) as nc:
        precip = nc.variables['p01d']
        czs = CachingZonalStats(iemre.AFFINE)
        hasdata = np.zeros(
            (nc.dimensions['lat'].size, nc.dimensions['lon'].size))
        czs.gen_stats(hasdata, states['the_geom'])
        for nav in czs.gridnav:
            grid = np.ones((nav.ysz, nav.xsz))
            grid[nav.mask] = 0.
            jslice = slice(nav.y0, nav.y0 + nav.ysz)
            islice = slice(nav.x0, nav.x0 + nav.xsz)
            hasdata[jslice, islice] = np.where(grid > 0, 1, hasdata[jslice,
                                                                    islice])
        ctx['iowa'] = np.flipud(hasdata)
        ctx['iowapts'] = float(np.sum(np.where(hasdata > 0, 1, 0)))

        now = datetime.datetime(ctx['year'], 1, 1)
        now += datetime.timedelta(days=(ctx['period'] - 1))
        ets = datetime.datetime(ctx['year'], 12, 31)
        today = datetime.datetime.now()
        if ets > today:
            ets = today - datetime.timedelta(days=1)
        ctx['days'] = []
        rows = []
        trailthres = (ctx['trailthres'] * units('inch')).to(
            units('mm')).magnitude
        daythres = (ctx['daythres'] * units('inch')).to(units('mm')).magnitude
        while now < ets:
            rows.append(do_date(ctx, now, precip, daythres, trailthres))
            now += datetime.timedelta(days=1)
    return pd.DataFrame(rows)
Beispiel #22
0
def copy_iemre(nc, ncdate0, ncdate1, islice, jslice):
    """Copy IEMRE data from a given year to **inclusive** dates."""
    rencfn = iemre.get_daily_ncname(ncdate0.year)
    if not os.path.isfile(rencfn):
        LOG.info("reanalysis fn %s missing", rencfn)
        return
    with ncopen(rencfn) as renc:
        # Compute offsets for yieldfx file
        tidx0 = (ncdate0 - datetime.date(1980, 1, 1)).days
        tidx1 = (ncdate1 - datetime.date(1980, 1, 1)).days
        yfx_slice = slice(tidx0, tidx1 + 1)
        # Compute offsets for the reanalysis file
        tidx0 = (ncdate0 - datetime.date(ncdate0.year, 1, 1)).days
        tidx1 = (ncdate1 - datetime.date(ncdate0.year, 1, 1)).days
        re_slice = slice(tidx0, tidx1 + 1)

        # LOG.debug("filling nc: %s iemre: %s", yfx_slice, re_slice)
        highc = temperature(
            renc.variables["high_tmpk"][re_slice, jslice, islice],
            "K").value("C")
        lowc = temperature(
            renc.variables["low_tmpk"][re_slice, jslice, islice],
            "K").value("C")
        nc.variables["tmax"][yfx_slice, :, :] = highc
        nc.variables["tmin"][yfx_slice, :, :] = lowc
        nc.variables["gdd_f"][yfx_slice, :, :] = gdd(temperature(highc, "C"),
                                                     temperature(lowc, "C"))
        nc.variables["prcp"][yfx_slice, :, :] = renc.variables["p01d"][
            re_slice, jslice, islice]
        # Special care needed for solar radiation filling
        for rt, nt in zip(
                list(range(re_slice.start, re_slice.stop)),
                list(range(yfx_slice.start, yfx_slice.stop)),
        ):
            # IEMRE power_swdn is MJ, test to see if data exists
            srad = renc.variables["power_swdn"][rt, jslice, islice]
            if srad.mask.any():
                # IEMRE rsds uses W m-2, we want MJ
                srad = (renc.variables["rsds"][rt, jslice, islice] * 86400.0 /
                        1000000.0)
            nc.variables["srad"][nt, :, :] = srad
Beispiel #23
0
def try_merra(ts):
    """Attempt to use MERRA data."""
    # Our files are UTC date based :/
    ncfn1 = ts.strftime("/mesonet/merra2/%Y/%Y%m%d.nc")
    ncfn2 = (
        ts + datetime.timedelta(days=1)
    ).strftime("/mesonet/merra2/%Y/%Y%m%d.nc")
    if not os.path.isfile(ncfn1) or not os.path.isfile(ncfn2):
        return False
    nc = ncopen(ncfn1)
    # Total up from 6z to end of file for today
    total = np.sum(nc.variables['SWGDN'][5:, :, :], axis=0)
    nc.close()
    nc = ncopen(ncfn2)
    lat1d = nc.variables['lat'][:]
    lon1d = nc.variables['lon'][:]
    # Total up to 6z
    total += np.sum(nc.variables['SWGDN'][:6, :, :], axis=0)
    nc.close()

    # We wanna store as W m-2, so we just average out the data by hour
    total = total / 24.0

    lons, lats = np.meshgrid(lon1d, lat1d)
    nn = NearestNDInterpolator(
        (lons.flatten(), lats.flatten()), total.flatten()
    )
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    nc = ncopen(iemre.get_daily_ncname(ts.year), 'a', timeout=300)
    offset = iemre.daily_offset(ts)
    # Data above is W m-2
    nc.variables['rsds'][offset, :, :] = nn(xi, yi)
    nc.close()

    return True
Beispiel #24
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    ptype = ctx["ptype"]
    date = ctx["date"]
    varname = ctx["var"]
    csector = ctx["csector"]
    title = date.strftime("%-d %B %Y")
    mp = MapPlot(
        sector=("state" if len(csector) == 2 else csector),
        state=ctx["csector"],
        axisbg="white",
        nocaption=True,
        title="IEM Reanalysis of %s for %s" % (PDICT.get(varname), title),
        subtitle="Data derived from various NOAA datasets",
    )
    (west, east, south, north) = mp.ax.get_extent(ccrs.PlateCarree())
    i0, j0 = iemre.find_ij(west, south)
    i1, j1 = iemre.find_ij(east, north)
    jslice = slice(j0, j1)
    islice = slice(i0, i1)

    idx0 = iemre.daily_offset(date)
    ncfn = iemre.get_daily_ncname(date.year)
    if not os.path.isfile(ncfn):
        raise NoDataFound("No Data Found.")
    with ncopen(ncfn) as nc:
        lats = nc.variables["lat"][jslice]
        lons = nc.variables["lon"][islice]
        cmap = ctx["cmap"]
        if varname in ["rsds", "power_swdn"]:
            # Value is in W m**-2, we want MJ
            multi = (86400.0 / 1000000.0) if varname == "rsds" else 1
            data = nc.variables[varname][idx0, jslice, islice] * multi
            plot_units = "MJ d-1"
            clevs = np.arange(0, 37, 3.0)
            clevs[0] = 0.01
            clevstride = 1
        elif varname in ["wind_speed"]:
            data = (masked_array(
                nc.variables[varname][idx0, jslice, islice],
                units("meter / second"),
            ).to(units("mile / hour")).m)
            plot_units = "mph"
            clevs = np.arange(0, 41, 2)
            clevs[0] = 0.01
            clevstride = 2
        elif varname in ["p01d", "p01d_12z", "snow_12z", "snowd_12z"]:
            # Value is in W m**-2, we want MJ
            data = (masked_array(nc.variables[varname][idx0, jslice, islice],
                                 units("mm")).to(units("inch")).m)
            plot_units = "inch"
            clevs = np.arange(0, 0.25, 0.05)
            clevs = np.append(clevs, np.arange(0.25, 3.0, 0.25))
            clevs = np.append(clevs, np.arange(3.0, 10.0, 1))
            clevs[0] = 0.01
            clevstride = 1
            cmap = stretch_cmap(ctx["cmap"], clevs)
        elif varname in [
                "high_tmpk",
                "low_tmpk",
                "high_tmpk_12z",
                "low_tmpk_12z",
                "avg_dwpk",
        ]:
            # Value is in W m**-2, we want MJ
            data = (masked_array(nc.variables[varname][idx0, jslice, islice],
                                 units("degK")).to(units("degF")).m)
            plot_units = "F"
            clevs = np.arange(-30, 120, 5)
            clevstride = 2
        elif varname in ["range_tmpk", "range_tmpk_12z"]:
            vname1 = "high_tmpk%s" % ("_12z"
                                      if varname == "range_tmpk_12z" else "", )
            vname2 = "low_tmpk%s" % ("_12z"
                                     if varname == "range_tmpk_12z" else "", )
            d1 = nc.variables[vname1][idx0, jslice, islice]
            d2 = nc.variables[vname2][idx0, jslice, islice]
            data = (masked_array(d1, units("degK")).to(units("degF")).m -
                    masked_array(d2, units("degK")).to(units("degF")).m)
            plot_units = "F"
            clevs = np.arange(0, 61, 5)
            clevstride = 2

    if np.ma.is_masked(np.max(data)):
        raise NoDataFound("Data Unavailable")
    x, y = np.meshgrid(lons, lats)
    if ptype == "c":
        # in the case of contour, use the centroids on the grids
        mp.contourf(
            x + 0.125,
            y + 0.125,
            data,
            clevs,
            clevstride=clevstride,
            units=plot_units,
            ilabel=True,
            labelfmt="%.0f",
            cmap=cmap,
        )
    else:
        x, y = np.meshgrid(lons, lats)
        mp.pcolormesh(
            x,
            y,
            data,
            clevs,
            clevstride=clevstride,
            cmap=cmap,
            units=plot_units,
        )

    return mp.fig
Beispiel #25
0
def plotter(fdict):
    """ Go """
    ctx = util.get_autoplot_context(fdict, get_description())
    ptype = ctx["ptype"]
    sdate = ctx["sdate"]
    edate = ctx["edate"]
    src = ctx["src"]
    opt = ctx["opt"]
    usdm = ctx["usdm"]
    if sdate.year != edate.year:
        raise NoDataFound("Sorry, do not support multi-year plots yet!")
    days = (edate - sdate).days
    sector = ctx["sector"]

    x0 = 0
    x1 = -1
    y0 = 0
    y1 = -1
    state = None
    if len(sector) == 2:
        state = sector
        sector = "state"

    title = compute_title(src, sdate, edate)
    if src == "mrms":
        ncfn = iemre.get_daily_mrms_ncname(sdate.year)
        clncfn = iemre.get_dailyc_mrms_ncname()
        ncvar = "p01d"
        source = "MRMS Q3"
        subtitle = "NOAA MRMS Project, GaugeCorr and RadarOnly"
    elif src == "iemre":
        ncfn = iemre.get_daily_ncname(sdate.year)
        clncfn = iemre.get_dailyc_ncname()
        ncvar = "p01d_12z"
        source = "IEM Reanalysis"
        subtitle = "IEM Reanalysis is derived from various NOAA datasets"
    else:
        ncfn = "/mesonet/data/prism/%s_daily.nc" % (sdate.year, )
        clncfn = "/mesonet/data/prism/prism_dailyc.nc"
        ncvar = "ppt"
        source = "OSU PRISM"
        subtitle = ("PRISM Climate Group, Oregon State Univ., "
                    "http://prism.oregonstate.edu, created 4 Feb 2004.")

    mp = MapPlot(
        sector=sector,
        state=state,
        axisbg="white",
        nocaption=True,
        title="%s:: %s Precip %s" % (source, title, PDICT3[opt]),
        subtitle="Data from %s" % (subtitle, ),
        titlefontsize=14,
    )

    idx0 = iemre.daily_offset(sdate)
    idx1 = iemre.daily_offset(edate) + 1
    if not os.path.isfile(ncfn):
        raise NoDataFound("No data for that year, sorry.")
    with util.ncopen(ncfn) as nc:
        if state is not None:
            x0, y0, x1, y1 = util.grid_bounds(
                nc.variables["lon"][:],
                nc.variables["lat"][:],
                state_bounds[state],
            )
        elif sector in SECTORS:
            bnds = SECTORS[sector]
            x0, y0, x1, y1 = util.grid_bounds(
                nc.variables["lon"][:],
                nc.variables["lat"][:],
                [bnds[0], bnds[2], bnds[1], bnds[3]],
            )
        lats = nc.variables["lat"][y0:y1]
        lons = nc.variables["lon"][x0:x1]
        if sdate == edate:
            p01d = mm2inch(nc.variables[ncvar][idx0, y0:y1, x0:x1])
        elif (idx1 - idx0) < 32:
            p01d = mm2inch(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0))
        else:
            # Too much data can overwhelm this app, need to chunk it
            for i in range(idx0, idx1, 10):
                i2 = min([i + 10, idx1])
                if idx0 == i:
                    p01d = mm2inch(
                        np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0))
                else:
                    p01d += mm2inch(
                        np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0))
    if np.ma.is_masked(np.max(p01d)):
        raise NoDataFound("Data Unavailable")
    plot_units = "inches"
    cmap = get_cmap(ctx["cmap"])
    cmap.set_bad("white")
    if opt == "dep":
        # Do departure work now
        with util.ncopen(clncfn) as nc:
            climo = mm2inch(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0))
        p01d = p01d - climo
        [maxv] = np.percentile(np.abs(p01d), [99])
        clevs = np.around(np.linspace(0 - maxv, maxv, 11), decimals=2)
    elif opt == "per":
        with util.ncopen(clncfn) as nc:
            climo = mm2inch(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0))
        p01d = p01d / climo * 100.0
        cmap.set_under("white")
        cmap.set_over("black")
        clevs = [1, 10, 25, 50, 75, 100, 125, 150, 200, 300, 500]
        plot_units = "percent"
    else:
        p01d = np.where(p01d < 0.001, np.nan, p01d)
        cmap.set_under("white")
        clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10]
        if days > 6:
            clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20]
        if days > 29:
            clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35]
        if days > 90:
            clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40]

    x2d, y2d = np.meshgrid(lons, lats)
    if ptype == "c":
        mp.contourf(x2d,
                    y2d,
                    p01d,
                    clevs,
                    cmap=cmap,
                    units=plot_units,
                    iline=False)
    else:
        res = mp.pcolormesh(x2d, y2d, p01d, clevs, cmap=cmap, units=plot_units)
        res.set_rasterized(True)
    if sector != "midwest":
        mp.drawcounties()
        mp.drawcities()
    if usdm == "yes":
        mp.draw_usdm(edate, filled=False, hatched=True)

    return mp.fig
Beispiel #26
0
def application(environ, start_response):
    """Go main go"""
    os.chdir("/tmp")

    form = parse_formvars(environ)
    ts0 = datetime.datetime.strptime(form.get("date0"), "%Y-%m-%d")
    ts1 = datetime.datetime.strptime(form.get("date1"), "%Y-%m-%d")
    base = int(form.get("base", 50))
    ceil = int(form.get("ceil", 86))
    # Make sure we aren't in the future
    tsend = datetime.date.today()
    if ts1.date() >= tsend:
        ts1 = tsend - datetime.timedelta(days=1)
        ts1 = datetime.datetime(ts1.year, ts1.month, ts1.day)
    fmt = form.get("format")

    offset0 = iemre.daily_offset(ts0)
    offset1 = iemre.daily_offset(ts1)

    with ncopen(iemre.get_daily_ncname(ts0.year)) as nc:

        # 2-D precipitation, inches
        precip = np.sum(nc.variables["p01d"][offset0:offset1, :, :] / 25.4,
                        axis=0)

        # GDD
        H = datatypes.temperature(nc.variables["high_tmpk"][offset0:offset1],
                                  "K").value("F")
        H = np.where(H < base, base, H)
        H = np.where(H > ceil, ceil, H)
        L = datatypes.temperature(nc.variables["low_tmpk"][offset0:offset1],
                                  "K").value("F")
        L = np.where(L < base, base, L)
        gdd = np.sum((H + L) / 2.0 - base, axis=0)

    if fmt == "json":
        # For example: 19013
        ugc = "IAC" + form.get("county")[2:]
        # Go figure out where this is!
        postgis = get_dbconn("postgis")
        pcursor = postgis.cursor()
        pcursor.execute(
            """
        SELECT ST_x(ST_Centroid(geom)), ST_y(ST_Centroid(geom)) from ugcs WHERE
        ugc = %s and end_ts is null
        """,
            (ugc, ),
        )
        row = pcursor.fetchone()
        lat = row[1]
        lon = row[0]
        (i, j) = iemre.find_ij(lon, lat)
        myGDD = gdd[j, i]
        myPrecip = precip[j, i]
        res = {"data": []}
        res["data"].append({
            "gdd": "%.0f" % (myGDD, ),
            "precip": "%.1f" % (myPrecip, ),
            "latitude": "%.4f" % (lat, ),
            "longitude": "%.4f" % (lon, ),
        })
        headers = [("Content-type", "application/json")]
        start_response("200 OK", headers)
        return [json.dumps(res).encode("ascii")]

    # Time to create the shapefiles
    basefn = "iemre_%s_%s" % (ts0.strftime("%Y%m%d"), ts1.strftime("%Y%m"))
    w = shapefile.Writer(basefn)
    w.field("GDD", "F", 10, 2)
    w.field("PREC_IN", "F", 10, 2)

    for x in iemre.XAXIS:
        for y in iemre.YAXIS:
            w.poly([[
                (x, y),
                (x, y + iemre.DY),
                (x + iemre.DX, y + iemre.DY),
                (x + iemre.DX, y),
                (x, y),
            ]])

    for i in range(len(iemre.XAXIS)):
        for j in range(len(iemre.YAXIS)):
            w.record(gdd[j, i], precip[j, i])
    w.close()
    # Create zip file, send it back to the clients
    shutil.copyfile("/opt/iem/data/gis/meta/4326.prj", "%s.prj" % (basefn, ))
    z = zipfile.ZipFile("%s.zip" % (basefn, ), "w", zipfile.ZIP_DEFLATED)
    for suffix in ["shp", "shx", "dbf", "prj"]:
        z.write("%s.%s" % (basefn, suffix))
    z.close()

    headers = [
        ("Content-type", "application/octet-stream"),
        ("Content-Disposition", "attachment; filename=%s.zip" % (basefn, )),
    ]
    start_response("200 OK", headers)
    content = open(basefn + ".zip", "rb").read()
    for suffix in ["zip", "shp", "shx", "dbf", "prj"]:
        os.unlink("%s.%s" % (basefn, suffix))

    return [content]
Beispiel #27
0
def do_day(valid):
    """ Process a day please """
    idx = iemre.daily_offset(valid)
    nc = ncopen(iemre.get_daily_ncname(valid.year), 'r', timeout=300)
    high = temperature(nc.variables['high_tmpk_12z'][idx, :, :],
                       'K').value('F')
    low = temperature(nc.variables['low_tmpk_12z'][idx, :, :], 'K').value('F')
    precip = distance(nc.variables['p01d_12z'][idx, :, :], 'MM').value("IN")
    snow = distance(nc.variables['snow_12z'][idx, :, :], 'MM').value("IN")
    snowd = distance(nc.variables['snowd_12z'][idx, :, :], 'MM').value("IN")
    nc.close()

    # build out the state mappers
    pgconn = get_dbconn('postgis')
    states = gpd.GeoDataFrame.from_postgis("""
    SELECT the_geom, state_abbr from states
    where state_abbr not in ('AK', 'HI', 'DC')
    """,
                                           pgconn,
                                           index_col='state_abbr',
                                           geom_col='the_geom')
    czs = CachingZonalStats(iemre.AFFINE)
    sthigh = czs.gen_stats(np.flipud(high), states['the_geom'])
    stlow = czs.gen_stats(np.flipud(low), states['the_geom'])
    stprecip = czs.gen_stats(np.flipud(precip), states['the_geom'])
    stsnow = czs.gen_stats(np.flipud(snow), states['the_geom'])
    stsnowd = czs.gen_stats(np.flipud(snowd), states['the_geom'])

    statedata = {}
    for i, state in enumerate(states.index.values):
        statedata[state] = dict(high=sthigh[i],
                                low=stlow[i],
                                precip=stprecip[i],
                                snow=stsnow[i],
                                snowd=stsnowd[i])
        update_database(state + "0000", valid, statedata[state])

    # build out climate division mappers
    climdiv = gpd.GeoDataFrame.from_postgis("""
    SELECT geom, iemid from climdiv
    where st_abbrv not in ('AK', 'HI', 'DC')
    """,
                                            pgconn,
                                            index_col='iemid',
                                            geom_col='geom')
    czs = CachingZonalStats(iemre.AFFINE)
    sthigh = czs.gen_stats(np.flipud(high), climdiv['geom'])
    stlow = czs.gen_stats(np.flipud(low), climdiv['geom'])
    stprecip = czs.gen_stats(np.flipud(precip), climdiv['geom'])
    stsnow = czs.gen_stats(np.flipud(snow), climdiv['geom'])
    stsnowd = czs.gen_stats(np.flipud(snowd), climdiv['geom'])

    for i, iemid in enumerate(climdiv.index.values):
        row = dict(high=sthigh[i],
                   low=stlow[i],
                   precip=stprecip[i],
                   snow=stsnow[i],
                   snowd=stsnowd[i])
        # we must have temperature data
        if row['high'] is np.ma.masked or row['low'] is np.ma.masked:
            print(
                ("compute_0000 %s has missing temperature data, using state") %
                (iemid, ))
            row = statedata[iemid[:2]]
        update_database(iemid, valid, row)
Beispiel #28
0
def init_year(ts):
    """
    Create a new NetCDF file for a year of our specification!
    """

    fn = iemre.get_daily_ncname(ts.year)
    nc = ncopen(fn, 'w')
    nc.title = "IEM Daily Reanalysis %s" % (ts.year, )
    nc.platform = "Grided Observations"
    nc.description = "IEM daily analysis on a 0.125 degree grid"
    nc.institution = "Iowa State University, Ames, IA, USA"
    nc.source = "Iowa Environmental Mesonet"
    nc.project_id = "IEM"
    nc.realization = 1
    nc.Conventions = 'CF-1.0'
    nc.contact = "Daryl Herzmann, [email protected], 515-294-5978"
    nc.history = ("%s Generated") % (
        datetime.datetime.now().strftime("%d %B %Y"), )
    nc.comment = "No Comment at this time"

    # Setup Dimensions
    nc.createDimension('lat', iemre.NY)
    nc.createDimension('lon', iemre.NX)
    days = ((ts.replace(year=ts.year + 1)) - ts).days
    nc.createDimension('time', int(days))

    # Setup Coordinate Variables
    lat = nc.createVariable('lat', np.float, ('lat', ))
    lat.units = "degrees_north"
    lat.long_name = "Latitude"
    lat.standard_name = "latitude"
    lat.axis = "Y"
    lat[:] = iemre.YAXIS

    lon = nc.createVariable('lon', np.float, ('lon', ))
    lon.units = "degrees_east"
    lon.long_name = "Longitude"
    lon.standard_name = "longitude"
    lon.axis = "X"
    lon[:] = iemre.XAXIS

    tm = nc.createVariable('time', np.float, ('time', ))
    tm.units = "Days since %s-01-01 00:00:0.0" % (ts.year, )
    tm.long_name = "Time"
    tm.standard_name = "time"
    tm.axis = "T"
    tm.calendar = "gregorian"
    tm[:] = np.arange(0, int(days))

    # Tracked variables
    hasdata = nc.createVariable('hasdata', np.int8, ('lat', 'lon'))
    hasdata.units = '1'
    hasdata.long_name = 'Analysis Available for Grid Cell'
    hasdata.coordinates = "lon lat"
    hasdata[:] = 0

    high = nc.createVariable('high_tmpk',
                             np.uint16, ('time', 'lat', 'lon'),
                             fill_value=65535)
    high.units = "K"
    high.scale_factor = 0.01
    high.long_name = "2m Air Temperature Daily High"
    high.standard_name = "2m Air Temperature"
    high.coordinates = "lon lat"

    low = nc.createVariable('low_tmpk',
                            np.uint16, ('time', 'lat', 'lon'),
                            fill_value=65535)
    low.units = "K"
    low.scale_factor = 0.01
    low.long_name = "2m Air Temperature Daily Low"
    low.standard_name = "2m Air Temperature"
    low.coordinates = "lon lat"

    high12 = nc.createVariable('high_tmpk_12z',
                               np.uint16, ('time', 'lat', 'lon'),
                               fill_value=65535)
    high12.units = "K"
    high12.scale_factor = 0.01
    high12.long_name = "2m Air Temperature 24 Hour Max at 12 UTC"
    high12.standard_name = "2m Air Temperature"
    high12.coordinates = "lon lat"

    low12 = nc.createVariable('low_tmpk_12z',
                              np.uint16, ('time', 'lat', 'lon'),
                              fill_value=65535)
    low12.units = "K"
    low12.scale_factor = 0.01
    low12.long_name = "2m Air Temperature 12 Hour Min at 12 UTC"
    low12.standard_name = "2m Air Temperature"
    low12.coordinates = "lon lat"

    p01d = nc.createVariable('p01d',
                             np.uint16, ('time', 'lat', 'lon'),
                             fill_value=65535)
    p01d.units = 'mm'
    p01d.scale_factor = 0.01
    p01d.long_name = 'Precipitation'
    p01d.standard_name = 'Precipitation'
    p01d.coordinates = "lon lat"
    p01d.description = "Precipitation accumulation for the day"

    p01d12 = nc.createVariable('p01d_12z',
                               np.uint16, ('time', 'lat', 'lon'),
                               fill_value=65535)
    p01d12.units = 'mm'
    p01d12.scale_factor = 0.01
    p01d12.long_name = 'Precipitation'
    p01d12.standard_name = 'Precipitation'
    p01d12.coordinates = "lon lat"
    p01d12.description = "24 Hour Precipitation Ending 12 UTC"

    # 0 -> 65535  so 0 to 6553.5
    rsds = nc.createVariable('rsds',
                             np.uint16, ('time', 'lat', 'lon'),
                             fill_value=65535)
    rsds.units = "W m-2"
    rsds.scale_factor = 0.1
    rsds.long_name = 'surface_downwelling_shortwave_flux_in_air'
    rsds.standard_name = 'surface_downwelling_shortwave_flux_in_air'
    rsds.coordinates = "lon lat"
    rsds.description = "Global Shortwave Irradiance"

    snow = nc.createVariable('snow_12z',
                             np.uint16, ('time', 'lat', 'lon'),
                             fill_value=65535)
    snow.units = 'mm'
    snow.scale_factor = 0.01
    snow.long_name = 'Snowfall'
    snow.standard_name = 'Snowfall'
    snow.coordinates = "lon lat"
    snow.description = "Snowfall accumulation for the day"

    # 0 to 6553.5
    snowd = nc.createVariable('snowd_12z',
                              np.uint16, ('time', 'lat', 'lon'),
                              fill_value=65535)
    snowd.units = 'mm'
    snowd.scale_factor = 0.1
    snowd.long_name = 'Snow Depth'
    snowd.standard_name = 'Snow Depth'
    snowd.coordinates = "lon lat"
    snowd.description = "Snow depth at time of observation"

    v1 = nc.createVariable('avg_dwpk',
                           np.uint16, ('time', 'lat', 'lon'),
                           fill_value=65535)
    v1.units = 'K'
    v1.scale_factor = 0.01
    v1.long_name = '2m Average Dew Point Temperature'
    v1.standard_name = 'Dewpoint'
    v1.coordinates = "lon lat"
    v1.description = "Dew Point average computed by averaging mixing ratios"

    v2 = nc.createVariable('wind_speed',
                           np.uint16, ('time', 'lat', 'lon'),
                           fill_value=65535)
    v2.units = 'm s-1'
    v2.scale_factor = 0.001
    v2.long_name = 'Wind Speed'
    v2.standard_name = 'Wind Speed'
    v2.coordinates = "lon lat"
    v2.description = "Daily averaged wind speed magnitude"

    nc.close()
Beispiel #29
0
def main():
    """Go Main Go"""
    form = cgi.FieldStorage()
    ts1 = datetime.datetime.strptime(form.getfirst("date1"), "%Y-%m-%d")
    ts2 = datetime.datetime.strptime(form.getfirst("date2"), "%Y-%m-%d")
    if ts1 > ts2:
        send_error("date1 larger than date2")
    if ts1.year != ts2.year:
        send_error("multi-year query not supported yet...")
    # Make sure we aren't in the future
    tsend = datetime.date.today()
    if ts2.date() > tsend:
        ts2 = datetime.datetime.now() - datetime.timedelta(days=1)

    lat = float(form.getfirst("lat"))
    lon = float(form.getfirst("lon"))
    if lon < iemre.WEST or lon > iemre.EAST:
        send_error("lon value outside of bounds: %s to %s" %
                   (iemre.WEST, iemre.EAST))
    if lat < iemre.SOUTH or lat > iemre.NORTH:
        send_error("lat value outside of bounds: %s to %s" %
                   (iemre.SOUTH, iemre.NORTH))
    # fmt = form["format"][0]

    i, j = iemre.find_ij(lon, lat)
    offset1 = iemre.daily_offset(ts1)
    offset2 = iemre.daily_offset(ts2) + 1

    # Get our netCDF vars
    with ncopen(iemre.get_daily_ncname(ts1.year)) as nc:
        hightemp = datatypes.temperature(
            nc.variables['high_tmpk'][offset1:offset2, j, i], 'K').value("F")
        high12temp = datatypes.temperature(
            nc.variables['high_tmpk_12z'][offset1:offset2, j, i],
            'K').value("F")
        lowtemp = datatypes.temperature(
            nc.variables['low_tmpk'][offset1:offset2, j, i], 'K').value("F")
        low12temp = datatypes.temperature(
            nc.variables['low_tmpk_12z'][offset1:offset2, j, i],
            'K').value("F")
        precip = nc.variables['p01d'][offset1:offset2, j, i] / 25.4
        precip12 = nc.variables['p01d_12z'][offset1:offset2, j, i] / 25.4

    # Get our climatology vars
    c2000 = ts1.replace(year=2000)
    coffset1 = iemre.daily_offset(c2000)
    c2000 = ts2.replace(year=2000)
    coffset2 = iemre.daily_offset(c2000) + 1
    cnc = ncopen(iemre.get_dailyc_ncname())
    chigh = datatypes.temperature(
        cnc.variables['high_tmpk'][coffset1:coffset2, j, i], 'K').value("F")
    clow = datatypes.temperature(
        cnc.variables['low_tmpk'][coffset1:coffset2, j, i], 'K').value("F")
    cprecip = cnc.variables['p01d'][coffset1:coffset2, j, i] / 25.4
    cnc.close()

    if ts1.year > 1980:
        nc = ncopen("/mesonet/data/prism/%s_daily.nc" % (ts1.year, ))
        i2, j2 = prismutil.find_ij(lon, lat)
        prism_precip = nc.variables['ppt'][offset1:offset2, j2, i2] / 25.4
        nc.close()
    else:
        prism_precip = [None] * (offset2 - offset1)

    if ts1.year > 2010:
        nc = ncopen(iemre.get_daily_mrms_ncname(ts1.year))
        j2 = int((lat - iemre.SOUTH) * 100.0)
        i2 = int((lon - iemre.WEST) * 100.0)
        mrms_precip = nc.variables['p01d'][offset1:offset2, j2, i2] / 25.4
        nc.close()
    else:
        mrms_precip = [None] * (offset2 - offset1)

    res = {
        'data': [],
    }

    for i in range(0, offset2 - offset1):
        now = ts1 + datetime.timedelta(days=i)
        res['data'].append({
            'date': now.strftime("%Y-%m-%d"),
            'mrms_precip_in': clean(mrms_precip[i]),
            'prism_precip_in': clean(prism_precip[i]),
            'daily_high_f': clean(hightemp[i]),
            '12z_high_f': clean(high12temp[i]),
            'climate_daily_high_f': clean(chigh[i]),
            'daily_low_f': clean(lowtemp[i]),
            '12z_low_f': clean(low12temp[i]),
            'climate_daily_low_f': clean(clow[i]),
            'daily_precip_in': clean(precip[i]),
            '12z_precip_in': clean(precip12[i]),
            'climate_daily_precip_in': clean(cprecip[i])
        })

    ssw('Content-type: application/json\n\n')
    ssw(json.dumps(res))
Beispiel #30
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    year = ctx['year']
    thres = ctx['thres']
    metric = distance(thres, 'IN').value('MM')
    state = ctx['state'][:2]

    sts = datetime.datetime(year, 10, 1)
    ets = datetime.datetime(year + 1, 5, 1)
    rows = []

    pgconn = get_dbconn('postgis')
    states = gpd.GeoDataFrame.from_postgis("""
    SELECT the_geom, state_abbr from states where state_abbr = %s
    """, pgconn, params=(state, ), index_col='state_abbr',
                                           geom_col='the_geom')

    sidx = iemre.daily_offset(sts)
    ncfn = iemre.get_daily_ncname(sts.year)
    if not os.path.isfile(ncfn):
        raise ValueError("Data for year %s not found" % (sts.year, ))
    nc = ncopen(ncfn)
    czs = CachingZonalStats(iemre.AFFINE)
    hasdata = np.zeros((nc.dimensions['lat'].size,
                        nc.dimensions['lon'].size))
    czs.gen_stats(hasdata, states['the_geom'])
    for nav in czs.gridnav:
        grid = np.ones((nav.ysz, nav.xsz))
        grid[nav.mask] = 0.
        jslice = slice(nav.y0, nav.y0 + nav.ysz)
        islice = slice(nav.x0, nav.x0 + nav.xsz)
        hasdata[jslice, islice] = np.where(grid > 0, 1,
                                           hasdata[jslice, islice])
    st = np.flipud(hasdata)
    stpts = np.sum(np.where(hasdata > 0, 1, 0))

    snowd = nc.variables['snowd_12z'][sidx:, :, :]
    nc.close()
    for i in range(snowd.shape[0]):
        rows.append({
            'valid': sts + datetime.timedelta(days=i),
            'coverage': f(st, snowd[i], metric, stpts),
                     })

    eidx = iemre.daily_offset(ets)
    nc = ncopen(iemre.get_daily_ncname(ets.year))
    snowd = nc.variables['snowd_12z'][:eidx, :, :]
    nc.close()
    for i in range(snowd.shape[0]):
        rows.append({
         'valid': datetime.date(ets.year, 1, 1) + datetime.timedelta(days=i),
         'coverage': f(st, snowd[i], metric, stpts),
                     })
    df = pd.DataFrame(rows)
    df = df[np.isfinite(df['coverage'])]

    (fig, ax) = plt.subplots(1, 1, sharex=True, figsize=(8, 6))
    ax.bar(df['valid'].values, df['coverage'].values, fc='tan', ec='tan',
           align='center')
    ax.set_title(("IEM Estimated Areal Snow Coverage Percent of %s\n"
                  " percentage of state reporting at least  %.2fin snow"
                  " cover"
                  ) % (reference.state_names[state], thres))
    ax.set_ylabel("Areal Coverage [%]")
    ax.xaxis.set_major_locator(mdates.DayLocator([1, 15]))
    ax.xaxis.set_major_formatter(mdates.DateFormatter("%-d %b\n%Y"))
    ax.set_yticks(range(0, 101, 25))
    ax.grid(True)

    return fig, df
Beispiel #31
0
def main():
    """Do Something Fun!"""
    form = cgi.FieldStorage()
    ts = datetime.datetime.strptime(form.getfirst("date"), "%Y-%m-%d")
    lat = float(form.getfirst("lat"))
    lon = float(form.getfirst("lon"))
    fmt = form.getfirst("format")
    if fmt != 'json':
        ssw("Content-type: text/plain\n\n")
        ssw("ERROR: Service only emits json at this time")
        return

    i, j = iemre.find_ij(lon, lat)
    offset = iemre.daily_offset(ts)

    res = {
        'data': [],
    }

    fn = iemre.get_daily_ncname(ts.year)

    ssw('Content-type: application/json\n\n')
    if not os.path.isfile(fn):
        ssw(json.dumps(res))
        sys.exit()

    if i is None or j is None:
        ssw(json.dumps({'error': 'Coordinates outside of domain'}))
        return

    if ts.year > 1980:
        ncfn = "/mesonet/data/prism/%s_daily.nc" % (ts.year, )
        if not os.path.isfile(ncfn):
            prism_precip = None
        else:
            i2, j2 = prismutil.find_ij(lon, lat)
            with ncopen(ncfn) as nc:
                prism_precip = nc.variables['ppt'][offset, j2, i2] / 25.4
    else:
        prism_precip = None

    if ts.year > 2010:
        ncfn = iemre.get_daily_mrms_ncname(ts.year)
        if not os.path.isfile(ncfn):
            mrms_precip = None
        else:
            j2 = int((lat - iemre.SOUTH) * 100.0)
            i2 = int((lon - iemre.WEST) * 100.0)
            with ncopen(ncfn) as nc:
                mrms_precip = nc.variables['p01d'][offset, j2, i2] / 25.4
    else:
        mrms_precip = None

    nc = ncopen(fn)

    c2000 = ts.replace(year=2000)
    coffset = iemre.daily_offset(c2000)

    cnc = ncopen(iemre.get_dailyc_ncname())

    res['data'].append({
        'prism_precip_in':
        myrounder(prism_precip, 2),
        'mrms_precip_in':
        myrounder(mrms_precip, 2),
        'daily_high_f':
        myrounder(
            datatypes.temperature(nc.variables['high_tmpk'][offset, j, i],
                                  'K').value('F'), 1),
        '12z_high_f':
        myrounder(
            datatypes.temperature(nc.variables['high_tmpk_12z'][offset, j, i],
                                  'K').value('F'), 1),
        'climate_daily_high_f':
        myrounder(
            datatypes.temperature(cnc.variables['high_tmpk'][coffset, j, i],
                                  'K').value("F"), 1),
        'daily_low_f':
        myrounder(
            datatypes.temperature(nc.variables['low_tmpk'][offset, j, i],
                                  'K').value("F"), 1),
        '12z_low_f':
        myrounder(
            datatypes.temperature(nc.variables['low_tmpk_12z'][offset, j, i],
                                  'K').value('F'), 1),
        'avg_dewpoint_f':
        myrounder(
            datatypes.temperature(nc.variables['avg_dwpk'][offset, j, i],
                                  'K').value('F'), 1),
        'climate_daily_low_f':
        myrounder(
            datatypes.temperature(cnc.variables['low_tmpk'][coffset, j, i],
                                  'K').value("F"), 1),
        'daily_precip_in':
        myrounder(nc.variables['p01d'][offset, j, i] / 25.4, 2),
        '12z_precip_in':
        myrounder(nc.variables['p01d_12z'][offset, j, i] / 25.4, 2),
        'climate_daily_precip_in':
        myrounder(cnc.variables['p01d'][coffset, j, i] / 25.4, 2),
        'srad_mj':
        myrounder(nc.variables['rsds'][offset, j, i] * 86400. / 1000000., 2),
        'avg_windspeed_mps':
        myrounder(nc.variables['wind_speed'][offset, j, i], 2),
    })
    nc.close()
    cnc.close()

    ssw(json.dumps(res))
Beispiel #32
0
def plotter(fdict):
    """ Go """
    ctx = util.get_autoplot_context(fdict, get_description())
    ptype = ctx['ptype']
    sdate = ctx['sdate']
    edate = ctx['edate']
    src = ctx['src']
    opt = ctx['opt']
    usdm = ctx['usdm']
    if sdate.year != edate.year:
        raise NoDataFound('Sorry, do not support multi-year plots yet!')
    days = (edate - sdate).days
    sector = ctx['sector']

    if sdate == edate:
        title = sdate.strftime("%-d %B %Y")
    else:
        title = "%s to %s (inclusive)" % (sdate.strftime("%-d %b"),
                                          edate.strftime("%-d %b %Y"))
    x0 = 0
    x1 = -1
    y0 = 0
    y1 = -1
    state = None
    if len(sector) == 2:
        state = sector
        sector = 'state'

    if src == 'mrms':
        ncfn = iemre.get_daily_mrms_ncname(sdate.year)
        clncfn = iemre.get_dailyc_mrms_ncname()
        ncvar = 'p01d'
        source = 'MRMS Q3'
        subtitle = 'NOAA MRMS Project, GaugeCorr and RadarOnly'
    elif src == 'iemre':
        ncfn = iemre.get_daily_ncname(sdate.year)
        clncfn = iemre.get_dailyc_ncname()
        ncvar = 'p01d_12z'
        source = 'IEM Reanalysis'
        subtitle = 'IEM Reanalysis is derived from various NOAA datasets'
    else:
        ncfn = "/mesonet/data/prism/%s_daily.nc" % (sdate.year, )
        clncfn = "/mesonet/data/prism/prism_dailyc.nc"
        ncvar = 'ppt'
        source = 'OSU PRISM'
        subtitle = ('PRISM Climate Group, Oregon State Univ., '
                    'http://prism.oregonstate.edu, created 4 Feb 2004.')

    mp = MapPlot(sector=sector,
                 state=state,
                 axisbg='white',
                 nocaption=True,
                 title='%s:: %s Precip %s' % (source, title, PDICT3[opt]),
                 subtitle='Data from %s' % (subtitle, ),
                 titlefontsize=14)

    idx0 = iemre.daily_offset(sdate)
    idx1 = iemre.daily_offset(edate) + 1
    if not os.path.isfile(ncfn):
        raise NoDataFound("No data for that year, sorry.")
    with util.ncopen(ncfn) as nc:
        if state is not None:
            x0, y0, x1, y1 = util.grid_bounds(nc.variables['lon'][:],
                                              nc.variables['lat'][:],
                                              state_bounds[state])
        elif sector in SECTORS:
            bnds = SECTORS[sector]
            x0, y0, x1, y1 = util.grid_bounds(
                nc.variables['lon'][:], nc.variables['lat'][:],
                [bnds[0], bnds[2], bnds[1], bnds[3]])
        lats = nc.variables['lat'][y0:y1]
        lons = nc.variables['lon'][x0:x1]
        if sdate == edate:
            p01d = distance(nc.variables[ncvar][idx0, y0:y1, x0:x1],
                            'MM').value('IN')
        elif (idx1 - idx0) < 32:
            p01d = distance(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0),
                'MM').value('IN')
        else:
            # Too much data can overwhelm this app, need to chunk it
            for i in range(idx0, idx1, 10):
                i2 = min([i + 10, idx1])
                if idx0 == i:
                    p01d = distance(
                        np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0),
                        'MM').value('IN')
                else:
                    p01d += distance(
                        np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0),
                        'MM').value('IN')
    if np.ma.is_masked(np.max(p01d)):
        raise NoDataFound("Data Unavailable")
    units = 'inches'
    cmap = plt.get_cmap(ctx['cmap'])
    cmap.set_bad('white')
    if opt == 'dep':
        # Do departure work now
        with util.ncopen(clncfn) as nc:
            climo = distance(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0),
                'MM').value('IN')
        p01d = p01d - climo
        [maxv] = np.percentile(np.abs(p01d), [
            99,
        ])
        clevs = np.around(np.linspace(0 - maxv, maxv, 11), decimals=2)
    elif opt == 'per':
        with util.ncopen(clncfn) as nc:
            climo = distance(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0),
                'MM').value('IN')
        p01d = p01d / climo * 100.
        cmap.set_under('white')
        cmap.set_over('black')
        clevs = [1, 10, 25, 50, 75, 100, 125, 150, 200, 300, 500]
        units = 'percent'
    else:
        p01d = np.where(p01d < 0.001, np.nan, p01d)
        cmap.set_under('white')
        clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10]
        if days > 6:
            clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20]
        if days > 29:
            clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35]
        if days > 90:
            clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40]

    x2d, y2d = np.meshgrid(lons, lats)
    if ptype == 'c':
        mp.contourf(x2d, y2d, p01d, clevs, cmap=cmap, units=units, iline=False)
    else:
        res = mp.pcolormesh(x2d, y2d, p01d, clevs, cmap=cmap, units=units)
        res.set_rasterized(True)
    if sector != 'midwest':
        mp.drawcounties()
        mp.drawcities()
    if usdm == 'yes':
        mp.draw_usdm(edate, filled=False, hatched=True)

    return mp.fig
Beispiel #33
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    year = ctx['year']
    threshold = ctx['threshold']
    period = ctx['period']
    state = ctx['state']

    pgconn = get_dbconn('postgis')
    states = gpd.GeoDataFrame.from_postgis("""
    SELECT the_geom, state_abbr from states where state_abbr = %s
    """,
                                           pgconn,
                                           params=(state, ),
                                           index_col='state_abbr',
                                           geom_col='the_geom')

    nc = ncopen(iemre.get_daily_ncname(year))
    precip = nc.variables['p01d']
    czs = CachingZonalStats(iemre.AFFINE)
    hasdata = np.zeros((nc.dimensions['lat'].size, nc.dimensions['lon'].size))
    czs.gen_stats(hasdata, states['the_geom'])
    for nav in czs.gridnav:
        grid = np.ones((nav.ysz, nav.xsz))
        grid[nav.mask] = 0.
        jslice = slice(nav.y0, nav.y0 + nav.ysz)
        islice = slice(nav.x0, nav.x0 + nav.xsz)
        hasdata[jslice, islice] = np.where(grid > 0, 1, hasdata[jslice,
                                                                islice])
    hasdata = np.flipud(hasdata)
    datapts = np.sum(np.where(hasdata > 0, 1, 0))

    now = datetime.date(year, 1, 1)
    now += datetime.timedelta(days=(period - 1))
    ets = datetime.date(year, 12, 31)
    today = datetime.date.today()
    if ets > today:
        ets = today
    days = []
    coverage = []
    while now <= ets:
        idx = iemre.daily_offset(now)
        sevenday = np.sum(precip[(idx - period):idx, :, :], 0)
        pday = np.where(hasdata > 0, sevenday[:, :], -1)
        tots = np.sum(np.where(pday >= (threshold * 25.4), 1, 0))
        days.append(now)
        coverage.append(tots / float(datapts) * 100.0)

        now += datetime.timedelta(days=1)
    df = pd.DataFrame(dict(day=pd.Series(days), coverage=pd.Series(coverage)))

    (fig, ax) = plt.subplots(1, 1)
    ax.bar(days, coverage, fc='g', ec='g')
    ax.set_title(
        ("%s IEM Estimated Areal Coverage Percent of %s\n"
         " receiving %.2f inches of rain over trailing %s day period") %
        (year, reference.state_names[state], threshold, period))
    ax.set_ylabel("Areal Coverage [%]")
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%b\n%-d'))
    ax.set_yticks(range(0, 101, 25))
    ax.grid(True)
    return fig, df