Exemple #1
0
def workflow(ts, irealtime, justprecip):
    """Do Work"""
    # load up our current data
    ds = iemre.get_grids(ts)
    LOG.debug("loaded %s variables from IEMRE database", len(ds))
    # For this date, the 12 UTC COOP obs will match the date
    if not justprecip:
        LOG.debug("doing 12z logic for %s", ts)
        grid_day12(ts, ds)
    do_precip12(ts, ds)
    # This is actually yesterday!
    if irealtime:
        iemre.set_grids(ts, ds)
        subprocess.call(
            "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"),),
            shell=True,
        )
        ts -= datetime.timedelta(days=1)
        ds = iemre.get_grids(ts)
    if not justprecip:
        LOG.debug("doing calendar day logic for %s", ts)
        grid_day(ts, ds)
    do_precip(ts, ds)
    LOG.debug("calling iemre.set_grids()")
    iemre.set_grids(ts, ds)
    subprocess.call(
        "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"),), shell=True
    )
Exemple #2
0
def test_get_grids():
    """Can we get grids?"""
    pgconn = get_dbconn('iemre')
    cursor = pgconn.cursor()
    valid = utc(2019, 12, 1, 1)
    cursor.execute(
        """
        DELETE from iemre_hourly_201912 WHERE valid = %s
    """, (valid, ))
    cursor.execute(
        """
        DELETE from iemre_hourly_201912 WHERE valid = %s
    """, (valid + datetime.timedelta(days=1), ))
    cursor.execute(
        """
        INSERT into iemre_hourly_201912
        (gid, valid, tmpk, dwpk, uwnd, vwnd, p01m)
        select gid, %s, random(), null, random(),
        random(), random() from iemre_grid
    """, (valid, ))
    ds = iemre.get_grids(valid, varnames='tmpk', cursor=cursor)
    assert 'tmpk' in ds
    assert 'bogus' not in ds
    ds = iemre.get_grids(valid, cursor=cursor)
    assert np.isnan(ds['dwpk'].values.max())

    iemre.set_grids(valid, ds, cursor=cursor)
    iemre.set_grids(valid + datetime.timedelta(days=1), ds, cursor=cursor)
Exemple #3
0
def try_merra(ts):
    """Attempt to use MERRA data."""
    # Our files are UTC date based :/
    ncfn1 = ts.strftime("/mesonet/merra2/%Y/%Y%m%d.nc")
    ncfn2 = (
        ts + datetime.timedelta(days=1)
    ).strftime("/mesonet/merra2/%Y/%Y%m%d.nc")
    if not os.path.isfile(ncfn1) or not os.path.isfile(ncfn2):
        return False
    with ncopen(ncfn1) as nc:
        # Total up from 6z to end of file for today
        total = np.sum(nc.variables['SWGDN'][5:, :, :], axis=0)
    with ncopen(ncfn2) as nc:
        lat1d = nc.variables['lat'][:]
        lon1d = nc.variables['lon'][:]
        # Total up to 6z
        total += np.sum(nc.variables['SWGDN'][:6, :, :], axis=0)

    # We wanna store as W m-2, so we just average out the data by hour
    total = total / 24.0

    lons, lats = np.meshgrid(lon1d, lat1d)
    nn = NearestNDInterpolator(
        (lons.flatten(), lats.flatten()), total.flatten()
    )
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    ds = iemre.get_grids(ts.date(), varnames='rsds')
    ds['rsds'].values = nn(xi, yi)
    iemre.set_grids(ts.date(), ds)
    subprocess.call(
        "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"), ),
        shell=True)

    return True
Exemple #4
0
def do_coop(ts):
    """Use COOP solar radiation data"""
    pgconn = get_dbconn('coop', user='******')
    cursor = pgconn.cursor()

    cursor.execute("""SELECT ST_x(geom), ST_y(geom),
        coalesce(narr_srad, merra_srad) from alldata a JOIN stations t
        ON (a.station = t.id) WHERE
        day = %s and t.network ~* 'CLIMATE' and substr(id, 3, 1) != 'C'
        and substr(id, 3, 4) != '0000'
    """, (ts.strftime("%Y-%m-%d"), ))
    lons = []
    lats = []
    vals = []
    for row in cursor:
        if row[2] is None or row[2] < 0:
            continue
        lons.append(row[0])
        lats.append(row[1])
        vals.append(row[2])

    nn = NearestNDInterpolator((np.array(lons), np.array(lats)),
                               np.array(vals))
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    ds = iemre.get_grids(ts.date(), varnames='rsds')
    # Convert MJ/d to Wm2
    ds['rsds'].values = nn(xi, yi) * 1000000. / 86400.
    iemre.set_grids(ts.date(), ds)
    subprocess.call(
        "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"), ),
        shell=True)
Exemple #5
0
def test_forecast_grids():
    """Test getting and setting grids from the future."""
    pgconn = get_dbconn("iemre")
    cursor = pgconn.cursor()
    valid = datetime.date(2029, 12, 1)
    cursor.execute(
        """
        DELETE from iemre_daily_forecast WHERE valid = %s
    """,
        (valid, ),
    )
    cursor.execute(
        """
        INSERT into iemre_daily_forecast
        (gid, valid, high_tmpk, low_tmpk, p01d, rsds)
        select gid, %s, random(), random(),
        random(), random() from iemre_grid LIMIT 100
    """,
        (valid, ),
    )
    ds = iemre.get_grids(valid, cursor=cursor, table="iemre_daily_forecast")
    assert "high_tmpk" in ds
    assert "bogus" not in ds

    iemre.set_grids(valid, ds, cursor=cursor, table="iemre_daily_forecast")
def main(argv):
    """Do work please"""
    day = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))
    pgconn = get_dbconn('coop')
    df = read_sql("""
        SELECT a.precip, st_x(t.geom) as lon, st_y(t.geom) as lat
        from alldata a JOIN stations t ON (a.station = t.id)
        WHERE a.day = %s and t.network ~* 'CLIMATE' and
        substr(a.station,3,4) != '0000' and substr(station,3,1) != 'C'
        and precip >= 0 and precip < 50
    """, pgconn, params=(day, ))
    res = generic_gridder(day, df, 'precip')
    if res is not None:
        ds = get_grids(day, varnames='p01d_12z')
        ds['p01d_12z'].values = res.to(mpunits('mm')).magnitude[0, :, :]
        subprocess.call("python db_to_netcdf.py %s" % (
            day.strftime("%Y %m %d"), ), shell=True)
Exemple #7
0
def main(argv):
    """Go Main Go."""
    log = logger()
    if len(argv) == 6:
        valid = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]))
        ncfn = iemre.get_hourly_ncname(valid.year)
        idx = iemre.hourly_offset(valid)
    else:
        valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))
        ncfn = iemre.get_daily_ncname(valid.year)
        idx = iemre.daily_offset(valid)
    ds = iemre.get_grids(valid)
    with ncopen(ncfn, 'a', timeout=600) as nc:
        for vname in ds:
            if vname not in nc.variables:
                continue
            log.debug("copying database var %s to netcdf", vname)
            nc.variables[vname][idx, :, :] = ds[vname].values
Exemple #8
0
def main(argv):
    """Go Main Go."""
    date = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))
    ds = iemre.get_grids(date)
    pgconn = get_dbconn("coop")
    for state in state_names:
        if state in ["AK", "HI", "DC"]:
            continue
        table = "alldata_%s" % (state, )
        cursor = pgconn.cursor()
        df = load_table(state, date)
        df = merge_network_obs(df, "%s_COOP" % (state, ), date)
        df = merge_network_obs(df, "%s_ASOS" % (state, ), date)
        estimate_hilo(df, ds)
        estimate_precip(df, ds)
        estimate_snow(df, ds)
        if not commit(cursor, table, df, date):
            return
        cursor.close()
        pgconn.commit()
Exemple #9
0
def do_hrrr(ts):
    """Convert the hourly HRRR data to IEMRE grid"""
    LCC = pyproj.Proj(
        ("+lon_0=-97.5 +y_0=0.0 +R=6367470. +proj=lcc +x_0=0.0"
         " +units=m +lat_2=38.5 +lat_1=38.5 +lat_0=38.5")
    )
    total = None
    xaxis = None
    yaxis = None
    # So IEMRE is storing data from coast to coast, so we should be
    # aggressive about running for an entire calendar date
    now = ts.replace(hour=1)
    for _ in range(24):
        now += datetime.timedelta(hours=1)
        utcnow = now.astimezone(pytz.UTC)
        fn = utcnow.strftime(
            ("/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/%H/"
             "hrrr.t%Hz.3kmf00.grib2")
        )
        if not os.path.isfile(fn):
            continue
        grbs = pygrib.open(fn)
        try:
            if utcnow >= SWITCH_DATE:
                grb = grbs.select(name='Downward short-wave radiation flux')
            else:
                grb = grbs.select(parameterNumber=192)
        except ValueError:
            # don't complain about late evening no-solar
            if utcnow.hour > 10 and utcnow.hour < 24:
                print('iemre/grid_rsds.py %s had no solar rad' % (fn,))
            continue
        if not grb:
            print('Could not find SWDOWN in HRR %s' % (fn,))
            continue
        g = grb[0]
        if total is None:
            total = g.values
            lat1 = g['latitudeOfFirstGridPointInDegrees']
            lon1 = g['longitudeOfFirstGridPointInDegrees']
            llcrnrx, llcrnry = LCC(lon1, lat1)
            nx = g['Nx']
            ny = g['Ny']
            dx = g['DxInMetres']
            dy = g['DyInMetres']
            xaxis = llcrnrx + dx * np.arange(nx)
            yaxis = llcrnry + dy * np.arange(ny)
        else:
            total += g.values

    if total is None:
        print(('iemre/grid_rsds.py found no HRRR data for %s'
               ) % (ts.strftime("%d %b %Y"), ))
        return

    # We wanna store as W m-2, so we just average out the data by hour
    total = total / 24.0

    ds = iemre.get_grids(ts.date(), varnames='rsds')
    for i, lon in enumerate(iemre.XAXIS):
        for j, lat in enumerate(iemre.YAXIS):
            (x, y) = LCC(lon, lat)
            i2 = np.digitize([x], xaxis)[0]
            j2 = np.digitize([y], yaxis)[0]
            try:
                ds['rsds'].values[j, i] = total[j2, i2]
            except IndexError:
                continue

    iemre.set_grids(ts.date(), ds)
    subprocess.call(
        "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"), ),
        shell=True)
Exemple #10
0
def main(argv):
    """Go Main Go."""
    year = int(argv[1])
    sts = datetime.date(year, 1, 1)
    ets = min([datetime.date(year, 12, 31), datetime.date.today()])
    current = {}
    now = ets
    while now >= sts:
        ds = iemre.get_grids(now, varnames="power_swdn")
        maxval = ds["power_swdn"].values.max()
        if np.isnan(maxval) or maxval < 0:
            LOG.debug("adding %s as currently empty", now)
            current[now] = {"data": ds, "dirty": False}
        now -= datetime.timedelta(days=1)
    sts = min(list(current.keys()))
    ets = max(list(current.keys()))
    LOG.debug("running between %s and %s", sts, ets)

    queue = []
    for x0 in np.arange(iemre.WEST, iemre.EAST, 5.0):
        for y0 in np.arange(iemre.SOUTH, iemre.NORTH, 5.0):
            queue.append([x0, y0])
    for x0, y0 in tqdm(queue, disable=not sys.stdout.isatty()):
        url = (
            "https://power.larc.nasa.gov/cgi-bin/v1/DataAccess.py?"
            "request=execute&identifier=Regional&"
            "parameters=ALLSKY_SFC_SW_DWN&"
            "startDate=%s&endDate=%s&userCommunity=SSE&"
            "tempAverage=DAILY&bbox=%s,%s,%s,%s&user=anonymous&"
            "outputList=NETCDF"
        ) % (
            sts.strftime("%Y%m%d"),
            ets.strftime("%Y%m%d"),
            y0,
            x0,
            min([y0 + 5.0, iemre.NORTH]) - 0.1,
            min([x0 + 5.0, iemre.EAST]) - 0.1,
        )
        req = exponential_backoff(requests.get, url, timeout=60)
        js = req.json()
        if "outputs" not in js:
            LOG.debug(url)
            LOG.debug(str(js))
            continue
        fn = js["outputs"]["netcdf"]
        req = requests.get(fn, timeout=60, stream=True)
        ncfn = "/tmp/power%s.nc" % (year,)
        with open(ncfn, "wb") as fh:
            for chunk in req.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
            fh.close()
        with ncopen(ncfn) as nc:
            for day, _ in enumerate(nc.variables["time"][:]):
                date = sts + datetime.timedelta(days=day)
                if date not in current:
                    continue
                # kwh to MJ/d  3600 * 1000 / 1e6
                data = nc.variables["ALLSKY_SFC_SW_DWN"][day, :, :] * 3.6
                # Sometimes there are missing values?
                if np.ma.is_masked(data):
                    data[data.mask] = np.mean(data)
                i, j = iemre.find_ij(x0, y0)
                # resample data is 0.5, iemre is 0.125
                data = np.repeat(np.repeat(data, 4, axis=0), 4, axis=1)
                data = np.where(data < 0, np.nan, data)
                shp = np.shape(data)
                jslice = slice(j, j + shp[0])
                islice = slice(i, i + shp[1])
                # get currentdata
                present = current[date]["data"]["power_swdn"].values[
                    jslice, islice
                ]
                if present.mean() == data.mean():
                    continue
                current[date]["data"]["power_swdn"].values[
                    jslice, islice
                ] = data
                current[date]["dirty"] = True
    for date in current:
        if not current[date]["dirty"]:
            continue
        LOG.debug("saving %s", date)
        iemre.set_grids(date, current[date]["data"])
        subprocess.call(
            "python ../iemre/db_to_netcdf.py %s"
            % (date.strftime("%Y %m %d"),),
            shell=True,
        )