Example #1
0
def estimate_hilo(ts):
    """Estimate the High and Low Temperature based on gridded data"""
    idx = iemre.daily_offset(ts)
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ),
                         'r')
    highgrid12 = temperature(nc.variables['high_tmpk_12z'][idx, :, :],
                             'K').value('F')
    lowgrid12 = temperature(nc.variables['low_tmpk_12z'][idx, :, :],
                            'K').value('F')
    highgrid00 = temperature(nc.variables['high_tmpk'][idx, :, :],
                             'K').value('F')
    lowgrid00 = temperature(nc.variables['low_tmpk'][idx, :, :],
                            'K').value('F')
    nc.close()

    for sid in nt.sts:
        if nt.sts[sid]['temp24_hour'] in [0, 22, 23]:
            val = highgrid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        else:
            val = highgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        if val > -80 and val < 140:
            nt.sts[sid]['high'] = "%.0f" % (val, )

        if nt.sts[sid]['temp24_hour'] in [0, 22, 23]:
            val = lowgrid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        else:
            val = lowgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        if val > -80 and val < 140:
            nt.sts[sid]['low'] = "%.0f" % (val, )
Example #2
0
def grid_day(nc, ts):
    """
    """
    offset = iemre.daily_offset(ts)
    icursor.execute("""
       SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, 
       (CASE WHEN pday >= 0 then pday else null end) as precipdata,
       (CASE WHEN max_tmpf > -50 and max_tmpf < 130 then max_tmpf else null end) as highdata,
       (CASE WHEN min_tmpf > -50 and min_tmpf < 95 then min_tmpf else null end) as lowdata 
       from summary_%s c, stations s WHERE day = '%s' and 
       s.network in ('IA_ASOS', 'MN_ASOS', 'WI_ASOS', 'IL_ASOS', 'MO_ASOS',
        'KS_ASOS', 'NE_ASOS', 'SD_ASOS', 'ND_ASOS', 'KY_ASOS', 'MI_ASOS',
        'OH_ASOS', 'AWOS') and c.iemid = s.iemid
        """ % (ts.year, ts.strftime("%Y-%m-%d")))

    if icursor.rowcount > 4:
        res = generic_gridder(icursor, 'highdata')
        nc.variables['high_tmpk'][offset] = datatypes.temperature(res, 'F').value('K')
        icursor.scroll(0, mode='absolute')
        res = generic_gridder(icursor, 'lowdata')
        nc.variables['low_tmpk'][offset] = datatypes.temperature(res, 'F').value('K')
        icursor.scroll(0, mode='absolute')
        #res = generic_gridder(icursor, 'precipdata')
        #nc.variables['p01d'][offset] = res * 25.4
    else:
        print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"), 
            icursor.rowcount)
Example #3
0
def do(lon, lat, station):
    """ Process this station and geography """
    idx = np.digitize([lon, ], lons)[0]
    jdx = np.digitize([lat, ], lats)[0]
    print("--> Processing %s i:%s j:%s" % (station, idx, jdx))

    pdata = pr_nc.variables['pr'][:, jdx, idx]
    xdata = tasmax_nc.variables['tmax'][:, jdx, idx]
    ndata = tasmin_nc.variables['tmin'][:, jdx, idx]

    highs = temperature(xdata, 'C').value('F')
    lows = temperature(ndata, 'C').value('F')
    precips = distance(pdata, 'MM').value('IN')

    now = basets
    high = low = precip = None
    for k, _ in enumerate(tmdata):
        now += datetime.timedelta(days=1)
        if now.month == 2 and now.day == 29:
            # Insert missing data
            insert(station, now, high, low, precip)
            now += datetime.timedelta(days=1)
        high = fix(highs[k])
        low = fix(lows[k])
        if low is not None and high is not None and low > high:
            # Swap, sigh
            print(('%s %s high: %.1f low: %.1f was swapped'
                   ) % (now.strftime("%m-%d-%Y"), station, high, low))
            high2 = high
            high = low
            low = high2
        precip = fix(precips[k])
        insert(station, now, high, low, precip)
Example #4
0
def replace_forecast(df, location):
    """Replace dataframe data with forecast for this location"""
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor()
    today = datetime.date.today()
    nextjan1 = datetime.date(today.year + 1, 1, 1)
    coop = XREF[location]['climodat']
    years = [int(y) for y in np.arange(df.index.values.min().year,
                                       df.index.values.max().year + 1)]
    cursor.execute("""
        SELECT day, high, low, precip from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'NDFD'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
    """, (coop, today))
    rcols = ['maxt', 'mint', 'rain']
    for row in cursor:
        valid = row[0]
        maxc = temperature(row[1], 'F').value('C')
        minc = temperature(row[2], 'F').value('C')
        rain = distance(row[3], 'IN').value('MM')
        for year in years:
            df.loc[valid.replace(year=year), rcols] = (maxc, minc, rain)

    # Need to get radiation from CFS
    cursor.execute("""
        SELECT day, srad from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'CFS'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
        and day < %s
    """, (coop, today, nextjan1))
    for row in cursor:
        valid = row[0]
        for year in years:
            df.loc[valid.replace(year=year), 'radn'] = row[1]
Example #5
0
def grid_day(nc, ts):
    """
    I proctor the gridding of data on an hourly basis
    @param ts Timestamp of the analysis, we'll consider a 20 minute window
    """
    offset = iemre.daily_offset(ts)
    if ts.day == 29 and ts.month == 2:
        ts = datetime.datetime(2000, 3, 1)

    sql = """SELECT * from ncdc_climate71 WHERE valid = '%s' and
             substr(station,3,4) != '0000' and substr(station,3,1) != 'C'
             """ % (ts.strftime("%Y-%m-%d"), )
    cursor.execute(sql)
    if cursor.rowcount > 4:
        res = generic_gridder(cursor, 'high')
        if res is not None:
            nc.variables['high_tmpk'][offset] = datatypes.temperature(res, 'F').value('K')
        cursor.scroll(0, mode='absolute')
        res = generic_gridder(cursor, 'low')
        if res is not None:
            nc.variables['low_tmpk'][offset] = datatypes.temperature(res, 'F').value('K')
        cursor.scroll(0, mode='absolute')
        res = generic_gridder(cursor, 'precip')
        if res is not None:
            nc.variables['p01d'][offset] = res * 25.4
    else:
        print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"),
                                             cursor.rowcount)
Example #6
0
def estimate_hilo(df, ts):
    """Estimate the High and Low Temperature based on gridded data"""
    idx = iemre.daily_offset(ts)
    nc = ncopen(iemre.get_daily_ncname(ts.year), 'r', timeout=300)
    highgrid12 = temperature(nc.variables['high_tmpk_12z'][idx, :, :],
                             'K').value('F')
    lowgrid12 = temperature(nc.variables['low_tmpk_12z'][idx, :, :],
                            'K').value('F')
    highgrid00 = temperature(nc.variables['high_tmpk'][idx, :, :],
                             'K').value('F')
    lowgrid00 = temperature(nc.variables['low_tmpk'][idx, :, :],
                            'K').value('F')
    nc.close()

    for sid, row in df.iterrows():
        if pd.isnull(row['high']):
            if row['temp24_hour'] in [0, 22, 23]:
                val = highgrid00[row['gridj'], row['gridi']]
            else:
                val = highgrid12[row['gridj'], row['gridi']]
            if sid == 'IA1402':
                print(row['temp24_hour'])
            if not np.ma.is_masked(val):
                df.at[sid, 'high'] = val
        if pd.isnull(row['low']):
            if row['temp24_hour'] in [0, 22, 23]:
                val = lowgrid00[row['gridj'], row['gridi']]
            else:
                val = lowgrid12[row['gridj'], row['gridi']]
            if not np.ma.is_masked(val):
                df.at[sid, 'low'] = val
Example #7
0
File: p141.py Project: akrherz/iem
def load(dirname, location, sdate):
    """ Read a file please """
    data = []
    idx = []
    mindoy = int(sdate.strftime("%j"))
    for line in open("%s/%s.met" % (dirname, location)):
        line = line.strip()
        if not line.startswith('19') and not line.startswith('20'):
            continue
        tokens = line.split()
        if int(tokens[1]) < mindoy:
            continue
        data.append(tokens)
        ts = (datetime.date(int(tokens[0]), 1, 1) +
              datetime.timedelta(days=int(tokens[1])-1))
        idx.append(ts)
    if len(data[0]) < 10:
        cols = ['year', 'doy', 'radn', 'maxt', 'mint', 'rain']
    else:
        cols = ['year', 'doy', 'radn', 'maxt', 'mint',
                'rain', 'gdd', 'st4', 'st12', 'st24',
                'st50', 'sm12', 'sm24', 'sm50']
    df = pd.DataFrame(data, index=idx,
                      columns=cols)
    for col in cols:
        df[col] = pd.to_numeric(df[col], errors='coerce')
    if len(data[0]) < 10:
        df['gdd'] = gdd(temperature(df['maxt'].values, 'C'),
                        temperature(df['mint'].values, 'C'))
    df['gddcum'] = df.groupby(['year'])['gdd'].apply(lambda x: x.cumsum())
    df['raincum'] = distance(
        df.groupby(['year'])['rain'].apply(lambda x: x.cumsum()),
        'MM').value('IN')
    return df
Example #8
0
def figure(val, qcval):
    if qcval > 1000:
        return None
    if np.ma.is_masked(val) or np.ma.is_masked(qcval):
        return None
    return temperature(val + qcval,
                       'K').value('F') - temperature(val, 'K').value('F')
Example #9
0
def replace_cfs(nc, valid, islice, jslice):
    """Copy CFS data into the given year."""
    tidx0 = (valid - datetime.date(valid.year, 1, 1)).days
    tidx1 = (
        datetime.date(valid.year, 12, 31) - datetime.date(valid.year, 1, 1)
    ).days
    cfsnc = ncopen(valid.strftime("/mesonet/data/iemre/cfs_%Y%m%d%H.nc"))
    tidx = iemre.daily_offset(valid + datetime.timedelta(days=1))
    tslice = slice(tidx0 + 1, tidx1 + 1)
    # print("replace_cfs filling %s from %s" % (tslice, tidx))
    # CFS is W m-2, we want MJ
    nc.variables["srad"][tslice, :, :] = (
        cfsnc.variables["srad"][tidx:, jslice, islice] * 86400.0 / 1000000.0
    )
    highc = temperature(
        cfsnc.variables["high_tmpk"][tidx:, jslice, islice], "K"
    ).value("C")
    lowc = temperature(
        cfsnc.variables["low_tmpk"][tidx:, jslice, islice], "K"
    ).value("C")
    nc.variables["tmax"][tslice, :, :] = highc
    nc.variables["tmin"][tslice, :, :] = lowc
    nc.variables["gdd_f"][tslice, :, :] = gdd(
        temperature(highc, "C"), temperature(lowc, "C")
    )
    nc.variables["prcp"][tslice, :, :] = cfsnc.variables["p01d"][
        tidx:, jslice, islice
    ]
    cfsnc.close()
Example #10
0
def figure(val, qcval):
    if qcval > 1000:
        return None
    if np.ma.is_masked(val) or np.ma.is_masked(qcval):
        return None
    return temperature(val + qcval, 'K').value('F') - temperature(
        val, 'K').value('F')
Example #11
0
def replace_forecast(df, location):
    """Replace dataframe data with forecast for this location"""
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor()
    today = datetime.date.today()
    nextjan1 = datetime.date(today.year + 1, 1, 1)
    coop = XREF[location]['climodat']
    years = [int(y) for y in np.arange(df.index.values.min().year,
                                       df.index.values.max().year + 1)]
    cursor.execute("""
        SELECT day, high, low, precip from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'NDFD'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
    """, (coop, today))
    rcols = ['maxt', 'mint', 'rain']
    for row in cursor:
        valid = row[0]
        maxc = temperature(row[1], 'F').value('C')
        minc = temperature(row[2], 'F').value('C')
        rain = distance(row[3], 'IN').value('MM')
        for year in years:
            df.loc[valid.replace(year=year), rcols] = (maxc, minc, rain)

    # Need to get radiation from CFS
    cursor.execute("""
        SELECT day, srad from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'CFS'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
        and day < %s
    """, (coop, today, nextjan1))
    for row in cursor:
        valid = row[0]
        for year in years:
            df.loc[valid.replace(year=year), 'radn'] = row[1]
Example #12
0
def grid_day(nc, ts):
    """
    I proctor the gridding of data on an hourly basis
    @param ts Timestamp of the analysis, we'll consider a 20 minute window
    """
    cursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor)
    offset = iemre.daily_offset(ts)
    if ts.day == 29 and ts.month == 2:
        ts = datetime.datetime(2000, 3, 1)

    sql = """SELECT * from climate51 WHERE valid = '%s' and
             substr(station,3,4) != '0000' and substr(station,3,1) != 'C'
             """ % (ts.strftime("%Y-%m-%d"), )
    cursor.execute(sql)
    res = generic_gridder(nc, cursor, 'high')
    nc.variables['high_tmpk'][offset] = datatypes.temperature(res,
                                                              'F').value('K')

    cursor.scroll(0, mode='absolute')
    res = generic_gridder(nc, cursor, 'low')
    nc.variables['low_tmpk'][offset] = datatypes.temperature(res,
                                                             'F').value('K')

    cursor.scroll(0, mode='absolute')
    res = generic_gridder(nc, cursor, 'precip')
    nc.variables['p01d'][offset] = datatypes.distance(res, 'IN').value('MM')

    cursor.scroll(0, mode='absolute')
    res = generic_gridder(nc, cursor, 'gdd50')
    nc.variables['gdd50'][offset] = res
Example #13
0
def one():
    """option 1"""
    icursor = ISUAG.cursor()
    iemcursor = IEM.cursor()
    icursor.execute(
        """
        SELECT station, valid, ws_mps_s_wvt, winddir_d1_wvt, rain_mm_tot,
        tair_c_max, tair_c_min
        from sm_daily
    """
    )

    for row in icursor:
        avg_sknt = speed(row[2], "MPS").value("KT")
        avg_drct = row[3]
        pday = distance(row[4], "MM").value("IN")
        high = temperature(row[5], "C").value("F")
        low = temperature(row[6], "C").value("F")
        iemcursor.execute(
            """
        UPDATE summary SET avg_sknt = %s, vector_avg_drct = %s, pday = %s,
        max_tmpf = %s, min_tmpf = %s
        WHERE
        iemid = (select iemid from stations WHERE network = 'ISUSM' and
        id = %s) and day = %s
        """,
            (avg_sknt, avg_drct, pday, high, low, row[0], row[1]),
        )
    iemcursor.close()
    IEM.commit()
    IEM.close()
Example #14
0
def replace_cfs(df, location):
    """Replace the CFS data for this year!"""
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor()
    coop = XREF[location]['climodat']
    today = datetime.date.today() + datetime.timedelta(days=3)
    dec31 = today.replace(day=31, month=12)
    cursor.execute("""
        SELECT day, high, low, precip, srad from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'CFS'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
        and day <= %s ORDER by day ASC
    """, (coop, today, dec31))
    rcols = ['maxt', 'mint', 'rain', 'radn']
    for row in cursor:
        maxt = temperature(row[1], 'F').value('C')
        mint = temperature(row[2], 'F').value('C')
        rain = distance(row[3], 'IN').value('MM')
        radn = row[4]
        df.loc[row[0], rcols] = [maxt, mint, rain, radn]

    if row[0] == dec31:
        return
    now = row[0] + datetime.timedelta(days=1)
    # OK, if our last row does not equal dec31, we have some more work to do
    print(("  Replacing %s->%s with previous year's data"
           ) % (now, dec31))
    while now <= dec31:
        lastyear = now.replace(year=(now.year - 1))
        df.loc[now, rcols] = df.loc[lastyear, rcols]
        now += datetime.timedelta(days=1)
Example #15
0
def main():
    """Go Main Go"""
    os.chdir("baseline")
    for fn in glob.glob("*.met"):
        location = fn[:-4]
        output = open("%s.csv" % (location, ), "w")
        output.write("date,high[F],low[F],precip[inch],gdd[F]\n")
        for line in open(fn):
            line = line.strip()
            if (not line.startswith("2012") and not line.startswith("2015")
                    and not line.startswith("2016")):
                continue
            tokens = line.split()
            valid = datetime.date(int(
                tokens[0]), 1, 1) + datetime.timedelta(days=int(tokens[1]) - 1)
            high = temperature(float(tokens[3]), "C")
            low = temperature(float(tokens[4]), "C")
            gdd = met.gdd(high, low, 50, 86)
            precip = distance(float(tokens[5]), "MM")
            output.write(("%s,%.1f,%.1f,%.2f,%.1f\n") % (
                valid.strftime("%Y-%m-%d"),
                high.value("F"),
                low.value("F"),
                precip.value("IN"),
                gdd,
            ))
        output.close()
Example #16
0
def grid_day(nc, ts):
    """
    """
    offset = iemre.daily_offset(ts)
    print(('cal hi/lo for %s [idx:%s]') % (ts, offset))
    sql = """
       SELECT ST_x(s.geom) as lon, ST_y(s.geom) as lat, s.state,
       s.name, s.id as station,
       (CASE WHEN pday >= 0 then pday else null end) as precipdata,
       (CASE WHEN max_tmpf > -50 and max_tmpf < 130
           then max_tmpf else null end) as highdata,
       (CASE WHEN min_tmpf > -50 and min_tmpf < 95
           then min_tmpf else null end) as lowdata
       from summary_%s c, stations s WHERE day = '%s' and
       s.network in ('IA_ASOS', 'MN_ASOS', 'WI_ASOS', 'IL_ASOS', 'MO_ASOS',
        'KS_ASOS', 'NE_ASOS', 'SD_ASOS', 'ND_ASOS', 'KY_ASOS', 'MI_ASOS',
        'OH_ASOS', 'AWOS') and c.iemid = s.iemid
        """ % (ts.year, ts.strftime("%Y-%m-%d"))
    df = read_sql(sql, pgconn)

    if len(df.index) > 4:
        res = generic_gridder(df, 'highdata')
        nc.variables['high_tmpk'][offset] = datatypes.temperature(
                                                res, 'F').value('K')
        res = generic_gridder(df, 'lowdata')
        nc.variables['low_tmpk'][offset] = datatypes.temperature(
                                            res, 'F').value('K')
    else:
        print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d"),
                                             cursor.rowcount)
Example #17
0
def main():
    """Go Main Go"""
    pgconn = get_dbconn('scan')
    for station in ['S2004', 'S2196', 'S2002', 'S2072', 'S2068',
                    'S2031', 'S2001', 'S2047']:
        df = read_sql("""
        select extract(year from valid + '2 months'::interval) as wy,
        tmpf, dwpf from alldata where station = %s and tmpf is not null
        and dwpf is not null
        """, pgconn, params=(station, ), index_col=None)
        df['mixingratio'] = meteorology.mixing_ratio(
            temperature(df['dwpf'].values, 'F')).value('KG/KG')
        df['vapor_pressure'] = mcalc.vapor_pressure(
            1000. * units.mbar,
            df['mixingratio'].values * units('kg/kg')).to(units('kPa'))
        df['saturation_mixingratio'] = (
            meteorology.mixing_ratio(
                temperature(df['tmpf'].values, 'F')).value('KG/KG'))
        df['saturation_vapor_pressure'] = mcalc.vapor_pressure(
            1000. * units.mbar,
            df['saturation_mixingratio'].values * units('kg/kg')).to(units('kPa'))
        df['vpd'] = df['saturation_vapor_pressure'] - df['vapor_pressure']
        means = df.groupby('wy').mean()
        counts = df.groupby('wy').count()
        for yr, row in means.iterrows():
            print(("%s,%s,%.0f,%.3f"
                   ) % (yr, station, counts.at[yr, 'vpd'], row['vpd']))
Example #18
0
def get(station):
    cursor.execute(
        """
      SELECT valid, sknt, tmpf, dwpf from alldata where station = %s 
      and tmpf is not null and dwpf is not null 
      and valid > '1971-01-01' ORDER by valid ASC 
    """,
        (station,),
    )

    hits = {}
    running = False
    startr = None
    for row in cursor:
        relh = met.relh(temperature(row[2], "F"), temperature(row[3], "F")).value("%")
        if relh > 25 or row[1] < (25.0 / 1.15):
            if running:
                delta = (row[0] - startr).seconds
                if delta >= 60 * 60 * 1:
                    # print station, delta, row
                    hits[row[0].strftime("%Y%m%d")] = 1
            running = False
        else:
            running = True
            startr = row[0]

    return len(hits.keys())
Example #19
0
def grid_day(nc, ts):
    """
    I proctor the gridding of data on an hourly basis
    @param ts Timestamp of the analysis, we'll consider a 20 minute window
    """
    cursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor)
    offset = iemre.daily_offset(ts)
    if ts.day == 29 and ts.month == 2:
        ts = datetime.datetime(2000, 3, 1)

    sql = """SELECT * from ncdc_climate71 WHERE valid = '%s' and
             substr(station,3,4) != '0000' and substr(station,3,1) != 'C'
             """ % (ts.strftime("%Y-%m-%d"), )
    cursor.execute(sql)
    if cursor.rowcount > 4:
        if 'high_tmpk' in nc.variables:
            res = generic_gridder(nc, cursor, 'high')
            if res is not None:
                nc.variables['high_tmpk'][offset] = datatypes.temperature(
                    res, 'F').value('K')
            cursor.scroll(0, mode='absolute')
            res = generic_gridder(nc, cursor, 'low')
            if res is not None:
                nc.variables['low_tmpk'][offset] = datatypes.temperature(
                    res, 'F').value('K')
            cursor.scroll(0, mode='absolute')
        res = generic_gridder(nc, cursor, 'precip')
        if res is not None:
            nc.variables['p01d'][offset] = res * 25.4
    else:
        print(("%s has %02i entries, FAIL") %
              (ts.strftime("%Y-%m-%d"), cursor.rowcount))
Example #20
0
def estimate_hilo(ts):
    """Estimate the High and Low Temperature based on gridded data"""
    idx = iemre.daily_offset(ts)
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ),
                         'r')
    highgrid12 = temperature(nc.variables['high_tmpk_12z'][idx, :, :],
                             'K').value('F')
    lowgrid12 = temperature(nc.variables['low_tmpk_12z'][idx, :, :],
                            'K').value('F')
    highgrid00 = temperature(nc.variables['high_tmpk'][idx, :, :],
                             'K').value('F')
    lowgrid00 = temperature(nc.variables['low_tmpk'][idx, :, :],
                            'K').value('F')
    nc.close()

    for sid in nt.sts.keys():
        if nt.sts[sid]['temp24_hour'] in [0, 22, 23]:
            val = highgrid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        else:
            val = highgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        if val > -80 and val < 140:
            nt.sts[sid]['high'] = "%.0f" % (val, )

        if nt.sts[sid]['temp24_hour'] in [0, 22, 23]:
            val = lowgrid00[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        else:
            val = lowgrid12[nt.sts[sid]['gridj'], nt.sts[sid]['gridi']]
        if val > -80 and val < 140:
            nt.sts[sid]['low'] = "%.0f" % (val, )
Example #21
0
def process(model, lon, lat):
    """ Generate a plot for this given combination """
    fig = plt.figure()
    ax = fig.add_axes([0.1, 0.1, 0.7, 0.8])
    modelts = get_latest_time(model)
    if modelts is None:
        ax.text(0.5, 0.5, "No Data Found to Plot!", ha='center')
        sys.stdout.write("Content-Type: image/png\n\n")
        fig.savefig(sys.stdout, format="png")
        return
    nc = netCDF4.Dataset(
            modelts.strftime(("/mesonet/share/frost/" +
                              model + "/%Y%m%d%H%M_iaoutput.nc")), 'r')
    times = get_times(nc)
    i, j = get_ij(lon, lat, nc)

    ax.plot(times,
            temperature(nc.variables['bdeckt'][:, i, j], 'K').value('F'),
            color='k',
            label='Bridge Deck Temp' if model == 'bridget' else 'Pavement')
    ax.plot(times, temperature(nc.variables['tmpk'][:, i, j], 'K').value("F"),
            color='r', label='Air Temp')
    ax.plot(times, temperature(nc.variables['dwpk'][:, i, j], 'K').value("F"),
            color='g', label='Dew Point')
    # ax.set_ylim(-30,150)
    ax.set_title(("ISUMM5 %s Timeseries\n"
                  "i: %s j:%s lon: %.2f lat: %.2f Model Run: %s"
                  ) % (model, i, j, nc.variables['lon'][i, j],
                       nc.variables['lat'][i, j],
                       modelts.astimezone(pytz.timezone("America/Chicago")
                                          ).strftime("%-d %b %Y %-I:%M %p")))

    ax.xaxis.set_major_locator(
        mdates.DayLocator(interval=1, tz=pytz.timezone("America/Chicago")))
    ax.xaxis.set_major_formatter(
        mdates.DateFormatter('%d %b\n%Y', tz=pytz.timezone("America/Chicago")))
    ax.axhline(32, linestyle='-.')
    ax.grid(True)
    ax.set_ylabel(r"Temperature $^\circ$F")

    (ymin, ymax) = ax.get_ylim()

    for i2, ifrost in enumerate(nc.variables['ifrost'][:-1, i, j]):
        ax.barh(ymax-1, 1.0/24.0/4.0, left=times[i2],
                fc=get_ifrost_color(ifrost), ec='none')
    for i2, icond in enumerate(nc.variables['icond'][:-1, i, j]):
        ax.barh(ymax-2, 1.0/24.0/4.0, left=times[i2],
                fc=get_icond_color(model, icond), ec='none')

    # Shrink current axis's height by 10% on the bottom
    box = ax.get_position()
    ax.set_position([box.x0, box.y0 + box.height * 0.1,
                    box.width, box.height * 0.9])
    ax.legend(loc='upper center',
              bbox_to_anchor=(0.5, -0.12), fancybox=True, shadow=True, ncol=3)
    add_labels(fig)

    sys.stdout.write("Content-Type: image/png\n\n")
    fig.savefig(sys.stdout, format="png")
Example #22
0
def replace_obs_iem(df, location):
    """Replace dataframe data with obs for this location

    Tricky part, if the baseline already provides data for this year, we should
    use it!
    """
    pgconn = get_dbconn("iem", user="******")
    cursor = pgconn.cursor()
    station = XREF[location]["station"]
    today = datetime.date.today()
    jan1 = today.replace(month=1, day=1)
    years = [
        int(y) for y in np.arange(df.index.values.min().year,
                                  df.index.values.max().year + 1)
    ]

    table = "summary_%s" % (jan1.year, )
    cursor.execute(
        """
        select day, max_tmpf, min_tmpf, srad_mj, pday
        from """ + table + """ s JOIN stations t on (s.iemid = t.iemid)
        WHERE t.id = %s and max_tmpf is not null
        and day < 'TODAY' ORDER by day ASC
        """,
        (station, ),
    )
    rcols = ["maxt", "mint", "radn", "gdd", "rain"]
    replaced = []
    for row in cursor:
        valid = row[0]
        # Does our df currently have data for this date?  If so, we shall do
        # no more
        dont_replace = not np.isnan(df.at[valid, "mint"])
        if not dont_replace:
            replaced.append(valid)
        _gdd = gdd(temperature(row[1], "F"), temperature(row[2], "F"))
        for year in years:
            if valid.month == 2 and valid.day == 29 and year % 4 != 0:
                continue
            if dont_replace:
                df.loc[valid.replace(year=year), rcols[3:]] = (
                    _gdd,
                    distance(row[4], "in").value("mm"),
                )
                continue
            df.loc[valid.replace(year=year), rcols] = (
                temperature(row[1], "F").value("C"),
                temperature(row[2], "F").value("C"),
                row[3],
                _gdd,
                distance(row[4], "in").value("mm"),
            )
    if replaced:
        LOG.info(
            "  used IEM Access %s from %s->%s",
            station,
            replaced[0],
            replaced[-1],
        )
Example #23
0
def figure(val, qcval):
    """hack"""
    if qcval > 1000:
        return None
    if np.ma.is_masked(val) or np.ma.is_masked(qcval):
        return None
    return temperature(val + qcval, "K").value("F") - temperature(
        val, "K").value("F")
Example #24
0
 def test_gdd_with_nans(self):
     """Can we properly deal with nan's and not emit warnings?"""
     highs = np.ma.array([70, 80, np.nan, 90],
                         mask=[False, False, True, False])
     lows = highs - 10
     r = meteorology.gdd(datatypes.temperature(highs, 'F'),
                         datatypes.temperature(lows, 'F'), 50, 86)
     self.assertTrue(np.ma.is_masked(r[2]))
Example #25
0
def main():
    """Do Something Fun!"""
    form = cgi.FormContent()
    ts = datetime.datetime.strptime(form["date"][0], "%Y-%m-%d")
    lat = float(form["lat"][0])
    lon = float(form["lon"][0])
    fmt = form["format"][0]
    if fmt != 'json':
        sys.stdout.write("Content-type: text/plain\n\n")
        sys.stdout.write("ERROR: Service only emits json at this time")
        return

    i, j = iemre.find_ij(lon, lat)
    offset = iemre.daily_offset(ts)

    res = {'data': [], }

    fn = "/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year,)

    sys.stdout.write('Content-type: application/json\n\n')
    if not os.path.isfile(fn):
        sys.stdout.write(json.dumps(res))
        sys.exit()

    if i is None or j is None:
        sys.stdout.write(json.dumps({'error': 'Coordinates outside of domain'}
                                    ))
        return

    nc = netCDF4.Dataset(fn, 'r')

    c2000 = ts.replace(year=2000)
    coffset = iemre.daily_offset(c2000)

    cnc = netCDF4.Dataset("/mesonet/data/iemre/mw_dailyc.nc", 'r')

    res['data'].append({
        'daily_high_f': myrounder(
           datatypes.temperature(
                nc.variables['high_tmpk'][offset, j, i], 'K').value('F'), 1),
        'climate_daily_high_f': myrounder(
           datatypes.temperature(
                cnc.variables['high_tmpk'][coffset, j, i], 'K').value("F"), 1),
        'daily_low_f': myrounder(
           datatypes.temperature(
                nc.variables['low_tmpk'][offset, j, i], 'K').value("F"), 1),
        'climate_daily_low_f': myrounder(
           datatypes.temperature(
                cnc.variables['low_tmpk'][coffset, j, i], 'K').value("F"), 1),
        'daily_precip_in': myrounder(
           nc.variables['p01d'][offset, j, i] / 25.4, 2),
        'climate_daily_precip_in': myrounder(
           cnc.variables['p01d'][coffset, j, i] / 25.4, 2),
      })
    nc.close()
    cnc.close()

    sys.stdout.write(json.dumps(res))
Example #26
0
def dbsave(ts, data):
    """Save the data! """
    pgconn = psycopg2.connect(database="coop", host="iemdb")
    cursor = pgconn.cursor()
    # Check to see if we already have data for this date
    cursor.execute(
        """SELECT id from forecast_inventory
      WHERE model = 'CFS' and modelts = %s""",
        (ts,),
    )
    if cursor.rowcount > 0:
        modelid = cursor.fetchone()[0]
        cursor.execute(
            """DELETE from alldata_forecast where
        modelid = %s""",
            (modelid,),
        )
        if cursor.rowcount > 0:
            print("Removed %s previous entries" % (cursor.rowcount,))
    else:
        cursor.execute(
            """INSERT into forecast_inventory(model, modelts)
        VALUES ('CFS', %s) RETURNING id""",
            (ts,),
        )
        modelid = cursor.fetchone()[0]

    for date in data["fx"].keys():
        d = data["fx"][date]
        if d["high"] is None or d["low"] is None or d["precip"] is None or d["srad"] is None:
            print("Missing data for date: %s" % (date,))
            del (data["fx"][date])

    for sid in nt.sts.keys():
        # Skip virtual stations
        if sid[2:] == "0000" or sid[2] == "C":
            continue
        # Careful here, lon is 0-360 for this file
        i = np.digitize([nt.sts[sid]["lon"] + 360], data["x"])[0]
        j = np.digitize([nt.sts[sid]["lat"]], data["y"])[0]
        for date in data["fx"]:
            d = data["fx"][date]
            high = bnds(temperature(d["high"][j, i], "K").value("F"), -70, 140)
            low = bnds(temperature(d["low"][j, i], "K").value("F"), -90, 120)
            precip = bnds(round(float(d["precip"][j, i] / 25.4), 2), 0, 30)
            srad = bnds(d["srad"][j, i] / 1000000.0, 0, 50)
            if high is None or low is None or precip is None or srad is None:
                continue
            cursor.execute(
                """INSERT into alldata_forecast(modelid,
            station, day, high, low, precip, srad)
            VALUES (%s, %s, %s, %s, %s, %s, %s)
            """,
                (modelid, sid, date, high, low, precip, srad),
            )
    cursor.close()
    pgconn.commit()
Example #27
0
def test_gdd_with_nans():
    """Can we properly deal with nan's and not emit warnings?"""
    highs = np.ma.array([70, 80, np.nan, 90],
                        mask=[False, False, True, False])
    lows = highs - 10
    r = meteorology.gdd(datatypes.temperature(highs, 'F'),
                        datatypes.temperature(lows, 'F'),
                        50, 86)
    assert np.ma.is_masked(r[2])
Example #28
0
File: coop.py Project: akrherz/iem
def do_salus(ctx):
    """ Generate SALUS
    StationID, Year, DOY, SRAD, Tmax, Tmin, Rain, DewP, Wind, Par, dbnum
    CTRL, 1981, 1, 5.62203, 2.79032, -3.53361, 5.43766, NaN, NaN, NaN, 2
    CTRL, 1981, 2, 3.1898, 1.59032, -6.83361, 1.38607, NaN, NaN, NaN, 3
    """
    if len(ctx['stations']) > 1:
        ssw(("ERROR: SALUS output is only "
             "permitted for one station at a time."))
        return

    dbconn = get_database()
    cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    scenario_year = 2030
    asts = datetime.date(2030, 1, 1)
    if ctx['scenario'] == 'yes':
        # Tricky!
        scenario_year = ctx['scenario_year']
        today = datetime.date.today()
        asts = datetime.date(scenario_year, today.month, today.day)

    table = get_tablename(ctx['stations'])
    station = ctx['stations'][0]
    thisyear = datetime.datetime.now().year
    cursor.execute("""
    WITH scenario as (
        SELECT
 ('""" + str(thisyear) + """-'||month||'-'||extract(day from day))::date as day,
        high, low, precip, station,
        coalesce(narr_srad, merra_srad, hrrr_srad) as srad
        from """ + table + """ WHERE station = %s and
        day >= %s and year = %s
    ), obs as (
        SELECT day,
        high, low, precip,  station,
        coalesce(narr_srad, merra_srad, hrrr_srad) as srad
        from """ + table + """ WHERE station = %s and
        day >= %s and day <= %s ORDER by day ASC
    ), total as (
        SELECT *, extract(doy from day) as doy from obs
        UNION SELECT * from scenario
    )

    SELECT * from total ORDER by day ASC
    """, (station, asts, scenario_year, station, ctx['sts'], ctx['ets']))
    ssw(("StationID, Year, DOY, SRAD, Tmax, Tmin, Rain, DewP, "
         "Wind, Par, dbnum\n"))
    for i, row in enumerate(cursor):
        srad = -99 if row['srad'] is None else row['srad']
        ssw(("%s, %s, %s, %.4f, %.2f, %.2f, %.2f, , , , %s\n"
             ) % (
                  station[:4], row["day"].year,
                  int(row["doy"]), srad,
                  temperature(row["high"], 'F').value('C'),
                  temperature(row["low"], 'F').value('C'),
                  row["precip"] * 25.4, i + 2))
Example #29
0
def do_salus(ctx):
    """ Generate SALUS
    StationID, Year, DOY, SRAD, Tmax, Tmin, Rain, DewP, Wind, Par, dbnum
    CTRL, 1981, 1, 5.62203, 2.79032, -3.53361, 5.43766, NaN, NaN, NaN, 2
    CTRL, 1981, 2, 3.1898, 1.59032, -6.83361, 1.38607, NaN, NaN, NaN, 3
    """
    if len(ctx['stations']) > 1:
        ssw(("ERROR: SALUS output is only "
             "permitted for one station at a time."))
        return

    dbconn = get_database()
    cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    scenario_year = 2030
    asts = datetime.date(2030, 1, 1)
    if ctx['scenario'] == 'yes':
        # Tricky!
        scenario_year = ctx['scenario_year']
        today = datetime.date.today()
        asts = datetime.date(scenario_year, today.month, today.day)

    table = get_tablename(ctx['stations'])
    station = ctx['stations'][0]
    thisyear = datetime.datetime.now().year
    cursor.execute(
        """
    WITH scenario as (
        SELECT
 ('""" + str(thisyear) +
        """-'||month||'-'||extract(day from day))::date as day,
        high, low, precip, station,
        coalesce(narr_srad, merra_srad, hrrr_srad) as srad
        from """ + table + """ WHERE station = %s and
        day >= %s and year = %s
    ), obs as (
        SELECT day,
        high, low, precip,  station,
        coalesce(narr_srad, merra_srad, hrrr_srad) as srad
        from """ + table + """ WHERE station = %s and
        day >= %s and day <= %s ORDER by day ASC
    ), total as (
        SELECT *, extract(doy from day) as doy from obs
        UNION SELECT * from scenario
    )

    SELECT * from total ORDER by day ASC
    """, (station, asts, scenario_year, station, ctx['sts'], ctx['ets']))
    ssw(("StationID, Year, DOY, SRAD, Tmax, Tmin, Rain, DewP, "
         "Wind, Par, dbnum\n"))
    for i, row in enumerate(cursor):
        srad = -99 if row['srad'] is None else row['srad']
        ssw(("%s, %s, %s, %.4f, %.2f, %.2f, %.2f, , , , %s\n") %
            (station[:4], row["day"].year, int(
                row["doy"]), srad, temperature(row["high"], 'F').value('C'),
             temperature(row["low"],
                         'F').value('C'), row["precip"] * 25.4, i + 2))
Example #30
0
File: p143.py Project: akrherz/iem
def load(dirname, location, sdate):
    """ Read a file please """
    data = []
    idx = []
    for line in open("%s/%s.met" % (dirname, location)):
        line = line.strip()
        if not line.startswith("19") and not line.startswith("20"):
            continue
        tokens = line.split()
        data.append(tokens)
        ts = datetime.date(int(tokens[0]), 1, 1) + datetime.timedelta(days=int(tokens[1]) - 1)
        idx.append(ts)
    if len(data[0]) < 10:
        cols = ["year", "doy", "radn", "maxt", "mint", "rain"]
    else:
        cols = [
            "year",
            "doy",
            "radn",
            "maxt",
            "mint",
            "rain",
            "gdd",
            "st4",
            "st12",
            "st24",
            "st50",
            "sm12",
            "sm24",
            "sm50",
        ]
    df = pd.DataFrame(data, index=idx, columns=cols)
    for col in cols:
        df[col] = pd.to_numeric(df[col], errors="coerce")
    if len(data[0]) < 10:
        df["gdd"] = gdd(temperature(df["maxt"].values, "C"), temperature(df["mint"].values, "C"))
    bins = []
    today = datetime.date.today()
    for valid, _ in df.iterrows():
        if valid >= today:
            bins.append(0)
            continue
        if sdate == "nov1" and valid.month >= 11:
            bins.append(valid.year + 1)
            continue
        if valid.month < today.month:
            bins.append(valid.year)
            continue
        if valid.month == today.month and valid.day < today.day:
            bins.append(valid.year)
            continue
        bins.append(0)
    df["bin"] = bins
    df["rain"] = distance(df["rain"].values, "MM").value("IN")
    df["avgt"] = temperature((df["maxt"] + df["mint"]) / 2.0, "C").value("F")
    return df
Example #31
0
def test_heatindex():
    ''' Test our heat index calculations '''
    t = datatypes.temperature(80.0, 'F')
    td = datatypes.temperature(70.0, 'F')
    hdx = meteorology.heatindex(t, td)
    assert abs(hdx.value("F") - 83.93) < 0.01

    t = datatypes.temperature(30.0, 'F')
    hdx = meteorology.heatindex(t, td)
    assert abs(hdx.value("F") - 30.00) < 0.01
Example #32
0
    def test_heatindex(self):
        ''' Test our heat index calculations '''
        t = datatypes.temperature(80.0, 'F')
        td = datatypes.temperature(70.0, 'F')
        hdx = meteorology.heatindex(t, td)
        self.assertAlmostEqual(hdx.value("F"), 83.93, 2)

        t = datatypes.temperature(30.0, 'F')
        hdx = meteorology.heatindex(t, td)
        self.assertAlmostEqual(hdx.value("F"), 30.00, 2)
Example #33
0
def test_heatindex():
    """ Test our heat index calculations """
    t = datatypes.temperature(80.0, "F")
    td = datatypes.temperature(70.0, "F")
    hdx = meteorology.heatindex(t, td)
    assert abs(hdx.value("F") - 83.93) < 0.01

    t = datatypes.temperature(30.0, "F")
    hdx = meteorology.heatindex(t, td)
    assert abs(hdx.value("F") - 30.00) < 0.01
Example #34
0
    def test_heatindex(self):
        ''' Test our heat index calculations '''
        t = datatypes.temperature(80.0, 'F')
        td = datatypes.temperature(70.0, 'F')
        hdx = meteorology.heatindex(t, td)
        self.assertAlmostEqual( hdx.value("F"), 83.93, 2)

        t = datatypes.temperature(30.0, 'F')
        hdx = meteorology.heatindex(t, td)
        self.assertAlmostEqual( hdx.value("F"), 30.00, 2)
Example #35
0
def process(model, lon, lat):
    """ Generate a plot for this given combination """
    (fig, ax) = plt.subplots(1,1)
    modelts = get_latest_time(model)
    if modelts is None:
        ax.text(0.5, 0.5, "No Data Found to Plot!", ha='center')
        sys.stdout.write("Content-Type: image/png\n\n")
        fig.savefig( sys.stdout, format="png")
        return
    nc = netCDF4.Dataset(
            modelts.strftime("/mesonet/share/frost/"+model+"/%Y%m%d%H%M_iaoutput.nc"),'r')
    times = get_times(nc)
    i, j = get_ij(lon, lat, nc)
    
    ax.plot(times, temperature(nc.variables['bdeckt'][:,i,j],'K').value('F'),
            color='k', label='Bridge Deck Temp' if model == 'bridget' else 'Pavement')
    ax.plot(times, temperature(nc.variables['tmpk'][:,i,j], 'K').value("F"),
            color='r', label='Air Temp')
    ax.plot(times, temperature(nc.variables['dwpk'][:,i,j], 'K').value("F"),
            color='g', label='Dew Point')
    #ax.set_ylim(-30,150)
    ax.set_title(("ISUMM5 %s Timeseries\n"
                 +"i: %s j:%s lon: %.2f lat: %.2f Model Run: %s") % (model,
                    i, j, nc.variables['lon'][i,j], nc.variables['lat'][i,j],
                    modelts.astimezone(pytz.timezone("America/Chicago")).strftime(
                            "%-d %b %Y %-I:%M %p")))
    
    ax.xaxis.set_major_locator(
                               mdates.DayLocator(interval=1,
                                        tz=pytz.timezone("America/Chicago"))
                               )
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%d %b\n%Y',
                                        tz=pytz.timezone("America/Chicago")))
    ax.axhline(32, linestyle='-.')
    ax.grid(True)
    ax.set_ylabel("Temperature $^\circ$F")

    (ymin, ymax) = ax.get_ylim()

    for i2, ifrost in enumerate(nc.variables['ifrost'][:-1,i,j]):
        ax.barh(ymax-1, 1.0/24.0/4.0, left=times[i2], 
                fc=get_ifrost_color(ifrost), ec='none')
    for i2, icond in enumerate(nc.variables['icond'][:-1,i,j]):
        ax.barh(ymax-2, 1.0/24.0/4.0, left=times[i2], 
                fc=get_icond_color(model, icond), ec='none')

    # Shrink current axis's height by 10% on the bottom
    box = ax.get_position()
    ax.set_position([box.x0, box.y0 + box.height * 0.1,
                 box.width, box.height * 0.9])
    ax.legend(loc='upper center',
              bbox_to_anchor=(0.5, -0.08),fancybox=True, shadow=True, ncol=3)
    
    sys.stdout.write("Content-Type: image/png\n\n")
    fig.savefig( sys.stdout, format="png")
Example #36
0
def make_daily_plot(ctx):
    """Generate a daily plot of max/min 4 inch soil temps"""
    df = read_sql(
        """
        SELECT date(valid), min(tsoil_c_avg_qc),
        max(tsoil_c_avg_qc), avg(tsoil_c_avg_qc) from sm_hourly
        where station = %s and valid >= %s and valid < %s
        and tsoil_c_avg is not null GROUP by date ORDER by date ASC
    """,
        ctx["pgconn"],
        params=(
            ctx["station"],
            ctx["sts"].strftime("%Y-%m-%d 00:00"),
            ctx["ets"].strftime("%Y-%m-%d 23:59"),
        ),
        index_col="date",
    )
    if df.empty:
        raise NoDataFound("No Data Found for Query")

    mins = temperature(df["min"].values, "C").value("F")
    maxs = temperature(df["max"].values, "C").value("F")
    avgs = temperature(df["avg"].values, "C").value("F")
    (fig, ax) = plt.subplots(1, 1)
    ax.bar(
        df.index.values,
        maxs - mins,
        bottom=mins,
        fc="tan",
        ec="brown",
        zorder=2,
        align="center",
        label="Max/Min",
    )
    ax.scatter(
        df.index.values,
        avgs,
        marker="*",
        s=30,
        zorder=3,
        color="brown",
        label="Hourly Avg",
    )
    ax.axhline(50, lw=1.5, c="k")
    ax.grid(True)
    ax.set_ylabel(r"4 inch Soil Temperature $^\circ$F")
    ax.set_title(("ISUSM Station: %s Timeseries\n"
                  "Daily Max/Min/Avg 4 inch Soil Temperatures") %
                 (ctx["_nt"].sts[ctx["station"]]["name"], ))
    ax.xaxis.set_major_formatter(mdates.DateFormatter("%-d %b\n%Y"))
    interval = int(len(df.index) / 7.0 + 1)
    ax.xaxis.set_major_locator(mdates.DayLocator(interval=interval))
    ax.legend(loc="best", ncol=2, fontsize=10)
    return fig, df
Example #37
0
def make_daily_plot(ctx):
    """Generate a daily plot of max/min 4 inch soil temps"""
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    import matplotlib.dates as mdates
    icursor = ctx['pgconn'].cursor(cursor_factory=psycopg2.extras.DictCursor)
    icursor.execute("""SELECT date(valid), min(tsoil_c_avg_qc),
    max(tsoil_c_avg_qc), avg(tsoil_c_avg_qc) from sm_hourly
    where station = '%s' and valid >= '%s 00:00' and valid < '%s 23:56'
    and tsoil_c_avg is not null GROUP by date ORDER by date ASC
    """ % (ctx['station'], ctx['sts'].strftime("%Y-%m-%d"),
           ctx['ets'].strftime("%Y-%m-%d")))
    dates = []
    mins = []
    maxs = []
    avgs = []
    for row in icursor:
        dates.append(row[0])
        mins.append(row[1])
        maxs.append(row[2])
        avgs.append(row[3])

    mins = temperature(np.array(mins), 'C').value('F')
    maxs = temperature(np.array(maxs), 'C').value('F')
    avgs = temperature(np.array(avgs), 'C').value('F')
    df = pd.DataFrame(dict(mins=mins, maxs=maxs, avgs=avgs, dates=dates))
    (fig, ax) = plt.subplots(1, 1)
    ax.bar(dates,
           maxs - mins,
           bottom=mins,
           fc='tan',
           ec='brown',
           zorder=2,
           align='center',
           label='Max/Min')
    ax.scatter(dates,
               avgs,
               marker='*',
               s=30,
               zorder=3,
               color='brown',
               label='Hourly Avg')
    ax.axhline(50, lw=1.5, c='k')
    ax.grid(True)
    ax.set_ylabel(r"4 inch Soil Temperature $^\circ$F")
    ax.set_title(("ISUSM Station: %s Timeseries\n"
                  "Daily Max/Min/Avg 4 inch Soil Temperatures") %
                 (ctx['nt'].sts[ctx['station']]['name'], ))
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%-d %b\n%Y'))
    interval = len(dates) / 7 + 1
    ax.xaxis.set_major_locator(mdates.DayLocator(interval=interval))
    ax.legend(loc='best', ncol=2, fontsize=10)
    return fig, df
Example #38
0
def hourly_process(nwsli, maxts):
    """ Process the hourly file """
    fn = "%s/%s_HrlySI.dat" % (BASE, STATIONS[nwsli])
    df = common_df_logic(fn, maxts, nwsli, "sm_hourly")
    if df is None:
        return 0
    processed = 0
    LOG.debug("processing %s rows from %s", len(df.index), fn)
    acursor = ACCESS.cursor()
    for _i, row in df.iterrows():
        # Update IEMAccess
        ob = Observation(nwsli, "ISUSM", row["valid"])
        tmpc = temperature(row["tair_c_avg_qc"], "C")
        if tmpc.value("F") > -50 and tmpc.value("F") < 140:
            ob.data["tmpf"] = tmpc.value("F")
            relh = humidity(row["rh_qc"], "%")
            ob.data["relh"] = relh.value("%")
            ob.data["dwpf"] = met.dewpoint(tmpc, relh).value("F")
        ob.data["srad"] = row["slrkw_avg_qc"]
        ob.data["phour"] = round(
            distance(row["rain_mm_tot_qc"], "MM").value("IN"), 2
        )
        ob.data["sknt"] = speed(row["ws_mps_s_wvt_qc"], "MPS").value("KT")
        if "ws_mph_max" in df.columns:
            ob.data["gust"] = speed(row["ws_mph_max_qc"], "MPH").value("KT")
            ob.data["max_gust_ts"] = row["ws_mph_tmx"]
        ob.data["drct"] = row["winddir_d1_wvt_qc"]
        if "tsoil_c_avg" in df.columns:
            ob.data["c1tmpf"] = temperature(row["tsoil_c_avg_qc"], "C").value(
                "F"
            )
        if "t12_c_avg_qc" in df.columns:
            ob.data["c2tmpf"] = temperature(row["t12_c_avg_qc"], "C").value(
                "F"
            )
        if "t24_c_avg_qc" in df.columns:
            ob.data["c3tmpf"] = temperature(row["t24_c_avg_qc"], "C").value(
                "F"
            )
        if "t50_c_avg" in df.columns:
            ob.data["c4tmpf"] = temperature(row["t50_c_avg_qc"], "C").value(
                "F"
            )
        if "calc_vwc_12_avg" in df.columns:
            ob.data["c2smv"] = row["calc_vwc_12_avg_qc"] * 100.0
        if "calc_vwc_24_avg" in df.columns:
            ob.data["c3smv"] = row["calc_vwc_24_avg_qc"] * 100.0
        if "calc_vwc_50_avg" in df.columns:
            ob.data["c4smv"] = row["calc_vwc_50_avg_qc"] * 100.0
        ob.save(acursor)
        processed += 1
    acursor.close()
    ACCESS.commit()
    return processed
Example #39
0
    def test_vectorized(self):
        """See that heatindex and windchill can do lists"""
        temp = datatypes.temperature([0, 10], 'F')
        sknt = datatypes.speed([30, 40], 'MPH')
        val = meteorology.windchill(temp, sknt).value('F')
        self.assertAlmostEquals(val[0], -24.50, 2)

        t = datatypes.temperature([80.0, 90.0], 'F')
        td = datatypes.temperature([70.0, 60.0], 'F')
        hdx = meteorology.heatindex(t, td)
        self.assertAlmostEqual(hdx.value("F")[0], 83.93, 2)
Example #40
0
def daily_process(nwsli, maxts):
    """ Process the daily file """
    # print '-------------- DAILY PROCESS ----------------'
    fn = "%s%s" % (BASE, STATIONS[nwsli]['daily'].split("/")[1])
    if not os.path.isfile(fn):
        return 0
    lines = open(fn).readlines()
    if len(lines) < 5:
        return 0
    # Read header....
    headers = []
    for col in lines[1].strip().replace('"', '').split(","):
        headers.append(VARCONV.get(col.lower(), col.lower()))
    # Read data
    processed = 0
    for i in range(len(lines)-1, 3, -1):
        tokens = lines[i].strip().replace('"','').split(",")
        if len(tokens) != len(headers):
            continue
        valid = datetime.datetime.strptime(tokens[ headers.index('timestamp')][:10],
                                           '%Y-%m-%d')
        valid = valid.date() - datetime.timedelta(days=1)
        if valid < maxts:
            break
        if valid == maxts: # Reprocess
            icursor.execute("""DELETE from sm_daily WHERE valid = '%s' and
            station = '%s' """ % (valid.strftime("%Y-%m-%d"), nwsli))
        # We are ready for dbinserting!
        dbcols = "station,valid," + ",".join(headers[2:])
        dbvals = "'%s','%s'," % (nwsli, valid.strftime("%Y-%m-%d"))
        for v in tokens[2:]:
            dbvals += "%s," % (formatter(v),)
        sql = "INSERT into sm_daily (%s) values (%s)" % (dbcols, dbvals[:-1])
        icursor.execute(sql)

        # Need a timezone
        valid = datetime.datetime(valid.year, valid.month, valid.day, 12, 0)
        valid = valid.replace(tzinfo=pytz.timezone("America/Chicago"))
        ob = Observation(nwsli, 'ISUSM', valid)
        ob.data['max_tmpf'] = temperature(
                    float(tokens[headers.index('tair_c_max')]), 'C').value('F')
        ob.data['min_tmpf'] = temperature(
                    float(tokens[headers.index('tair_c_min')]), 'C').value('F')
        ob.data['pday'] = round(
            float(tokens[headers.index('rain_mm_tot')]) / 24.5, 2)
        ob.data['et_inch'] = float(tokens[headers.index('dailyet')]) / 24.5
        ob.data['srad_mj'] = float(tokens[headers.index('slrmj_tot')])
        ob.data['max_sknt'] = float(tokens[headers.index('ws_mps_max')]) * 1.94
        ob.save(accesstxn)
        #    print 'soilm_ingest.py station: %s ts: %s daily updated no data?' % (
        #                                nwsli, valid.strftime("%Y-%m-%d"))
        processed += 1
    return processed
Example #41
0
def copy_iemre(nc, fromyear, ncdate0, ncdate1, islice, jslice):
    """Copy IEMRE data from a given year to **inclusive** dates."""
    rencfn = iemre.get_daily_ncname(fromyear)
    if not os.path.isfile(rencfn):
        print("reanalysis fn %s missing" % (rencfn,))
        return
    renc = ncopen(rencfn)
    tidx0 = (ncdate0 - datetime.date(fromyear, 1, 1)).days
    tidx1 = (ncdate1 - datetime.date(fromyear, 1, 1)).days
    tslice = slice(tidx0, tidx1 + 1)
    # time steps to fill
    tsteps = (tidx1 - tidx0) + 1
    # figure out the slice
    if ncdate0.strftime("%m%d") == "0101":
        retslice = slice(0, tsteps)
    else:
        retslice = slice(0 - tsteps, None)
    # print("copy_iemre from %s filling %s steps nc: %s iemre: %s" % (
    #    fromyear, tsteps, tslice, retslice
    # ))
    highc = temperature(
        renc.variables["high_tmpk"][retslice, jslice, islice], "K"
    ).value("C")
    lowc = temperature(
        renc.variables["low_tmpk"][retslice, jslice, islice], "K"
    ).value("C")
    nc.variables["tmax"][tslice, :, :] = highc
    nc.variables["tmin"][tslice, :, :] = lowc
    nc.variables["gdd_f"][tslice, :, :] = gdd(
        temperature(highc, "C"), temperature(lowc, "C")
    )
    nc.variables["prcp"][tslice, :, :] = renc.variables["p01d"][
        retslice, jslice, islice
    ]
    for rt, nt in zip(
        list(
            range(
                retslice.start, 0 if retslice.stop is None else retslice.stop
            )
        ),
        list(range(tslice.start, tslice.stop)),
    ):
        # IEMRE power_swdn is MJ, test to see if data exists
        srad = renc.variables["power_swdn"][rt, jslice, islice]
        if srad.mask.any():
            # IEMRE rsds uses W m-2, we want MJ
            srad = (
                renc.variables["rsds"][rt, jslice, islice]
                * 86400.0
                / 1000000.0
            )
        nc.variables["srad"][nt, :, :] = srad
    renc.close()
Example #42
0
def daily_process(nwsli, maxts):
    """ Process the daily file """
    # print '-------------- DAILY PROCESS ----------------'
    fn = "%s%s" % (BASE, STATIONS[nwsli]['daily'].split("/")[1])
    if not os.path.isfile(fn):
        return 0
    lines = open(fn).readlines()
    if len(lines) < 5:
        return 0
    # Read header....
    headers = []
    for col in lines[1].strip().replace('"', '').split(","):
        headers.append(VARCONV.get(col.lower(), col.lower()))
    # Read data
    processed = 0
    for i in range(len(lines) - 1, 3, -1):
        tokens = lines[i].strip().replace('"', '').split(",")
        if len(tokens) != len(headers):
            continue
        valid = datetime.datetime.strptime(
            tokens[headers.index('timestamp')][:10], '%Y-%m-%d')
        valid = valid.date() - datetime.timedelta(days=1)
        if valid < maxts:
            break
        if valid == maxts:  # Reprocess
            icursor.execute("""DELETE from sm_daily WHERE valid = '%s' and
            station = '%s' """ % (valid.strftime("%Y-%m-%d"), nwsli))
        # We are ready for dbinserting!
        dbcols = "station,valid," + ",".join(headers[2:])
        dbvals = "'%s','%s'," % (nwsli, valid.strftime("%Y-%m-%d"))
        for v in tokens[2:]:
            dbvals += "%s," % (formatter(v), )
        sql = "INSERT into sm_daily (%s) values (%s)" % (dbcols, dbvals[:-1])
        icursor.execute(sql)

        # Need a timezone
        valid = datetime.datetime(valid.year, valid.month, valid.day, 12, 0)
        valid = valid.replace(tzinfo=pytz.timezone("America/Chicago"))
        ob = Observation(nwsli, 'ISUSM', valid)
        ob.data['max_tmpf'] = temperature(
            float(tokens[headers.index('tair_c_max')]), 'C').value('F')
        ob.data['min_tmpf'] = temperature(
            float(tokens[headers.index('tair_c_min')]), 'C').value('F')
        ob.data['pday'] = round(
            float(tokens[headers.index('rain_mm_tot')]) / 24.5, 2)
        ob.data['et_inch'] = float(tokens[headers.index('dailyet')]) / 24.5
        ob.data['srad_mj'] = float(tokens[headers.index('slrmj_tot')])
        ob.data['max_sknt'] = float(tokens[headers.index('ws_mps_max')]) * 1.94
        ob.save(accesstxn)
        #    print 'soilm_ingest.py station: %s ts: %s daily updated no data?' % (
        #                                nwsli, valid.strftime("%Y-%m-%d"))
        processed += 1
    return processed
Example #43
0
def replace_obs(df, location):
    """Replace dataframe data with obs for this location

    Tricky part, if the baseline already provides data for this year, we should
    use it!
    """
    pgconn = get_dbconn('isuag', user='******')
    cursor = pgconn.cursor()
    isusm = XREF[location]['isusm']
    today = datetime.date.today()
    jan1 = today.replace(month=1, day=1)
    years = [
        int(y) for y in np.arange(df.index.values.min().year,
                                  df.index.values.max().year + 1)
    ]

    cursor.execute(
        """
        select valid, tair_c_max_qc, tair_c_min_qc, slrmj_tot_qc,
        vwc_12_avg_qc,
        vwc_24_avg_qc, vwc_50_avg_qc, tsoil_c_avg_qc, t12_c_avg_qc,
        t24_c_avg_qc, t50_c_avg_qc,
        rain_mm_tot_qc from sm_daily WHERE station = %s and valid >= %s
        and tair_c_max_qc is not null and tair_c_min_qc is not null
        ORDER by valid
        """, (isusm, jan1))
    rcols = [
        'maxt', 'mint', 'radn', 'gdd', 'sm12', 'sm24', 'sm50', 'st4', 'st12',
        'st24', 'st50', 'rain'
    ]
    replaced = []
    for row in cursor:
        valid = row[0]
        # Does our df currently have data for this date?  If so, we shall do
        # no more
        dont_replace = not np.isnan(df.at[valid, 'mint'])
        if not dont_replace:
            replaced.append(valid)
        _gdd = gdd(temperature(row[1], 'C'), temperature(row[2], 'C'))
        for year in years:
            if valid.month == 2 and valid.day == 29 and year % 4 != 0:
                continue
            if dont_replace:
                df.loc[valid.replace(year=year),
                       rcols[3:-1]] = (_gdd, row[4], row[5], row[6], row[7],
                                       row[8], row[9], row[10])
                continue
            df.loc[valid.replace(year=year),
                   rcols] = (row[1], row[2], row[3], _gdd, row[4], row[5],
                             row[6], row[7], row[8], row[9], row[10], row[11])
    if replaced:
        print(("  replaced with obs from %s for %s->%s") %
              (isusm, replaced[0], replaced[-1]))
Example #44
0
def dowork(form):
    """Do work!"""
    dates = compute_dates(form.getfirst('valid'))
    lat = float(form.getfirst("lat"))
    lon = float(form.getfirst("lon"))

    i, j = prism.find_ij(lon, lat)

    res = {
        'gridi':
        int(i),
        'gridj':
        int(j),
        'data': [],
        'disclaimer': ("PRISM Climate Group, Oregon State University, "
                       "http://prism.oregonstate.edu, created 4 Feb 2004.")
    }

    if i is None or j is None:
        ssw(json.dumps({'error': 'Coordinates outside of domain'}))
        return

    for dpair in dates:
        sts = dpair[0]
        ets = dpair[-1]
        sidx = prism.daily_offset(sts)
        eidx = prism.daily_offset(ets) + 1

        ncfn = "/mesonet/data/prism/%s_daily.nc" % (sts.year, )
        if not os.path.isfile(ncfn):
            continue
        nc = ncopen(ncfn)

        tmax = nc.variables['tmax'][sidx:eidx, j, i]
        tmin = nc.variables['tmin'][sidx:eidx, j, i]
        ppt = nc.variables['ppt'][sidx:eidx, j, i]
        nc.close()

        for tx, (mt, nt, pt) in enumerate(zip(tmax, tmin, ppt)):
            valid = sts + datetime.timedelta(days=tx)
            res['data'].append({
                'valid':
                valid.strftime("%Y-%m-%dT12:00:00Z"),
                'high_f':
                myrounder(datatypes.temperature(mt, 'C').value('F'), 1),
                'low_f':
                myrounder(datatypes.temperature(nt, 'C').value('F'), 1),
                'precip_in':
                myrounder(datatypes.distance(pt, 'MM').value('IN'), 2)
            })

    return json.dumps(res)
Example #45
0
def replace_forecast(df, location):
    """Replace dataframe data with forecast for this location"""
    pgconn = get_dbconn("coop", user="******")
    cursor = pgconn.cursor()
    today = datetime.date.today()
    nextjan1 = datetime.date(today.year + 1, 1, 1)
    coop = XREF[location]["climodat"]
    years = [
        int(y) for y in np.arange(df.index.values.min().year,
                                  df.index.values.max().year + 1)
    ]
    cursor.execute(
        """
        SELECT day, high, low, precip from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'NDFD'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
    """,
        (coop, today),
    )
    rcols = ["maxt", "mint", "rain"]
    for row in cursor:
        valid = row[0]
        maxc = temperature(row[1], "F").value("C")
        minc = temperature(row[2], "F").value("C")
        rain = distance(row[3], "IN").value("MM")
        for year in years:
            # This fails for leap year, just silently skip it when it does.
            try:
                idx = valid.replace(year=year)
            except ValueError:
                continue
            df.loc[idx, rcols] = (maxc, minc, rain)

    # Need to get radiation from CFS
    cursor.execute(
        """
        SELECT day, srad from alldata_forecast WHERE
        modelid = (SELECT id from forecast_inventory WHERE model = 'CFS'
        ORDER by modelts DESC LIMIT 1) and station = %s and day >= %s
        and day < %s
    """,
        (coop, today, nextjan1),
    )
    for row in cursor:
        valid = row[0]
        for year in years:
            # This fails for leap year, just silently skip it when it does.
            try:
                idx = valid.replace(year=year)
            except ValueError:
                continue
            df.loc[idx, "radn"] = row[1]
Example #46
0
def hourly_process(nwsli, maxts):
    """ Process the hourly file """
    fn = "%s/%s_HrlySI.dat" % (BASE, STATIONS[nwsli])
    df = common_df_logic(fn, maxts, nwsli, "sm_hourly")
    if df is None:
        return 0
    processed = 0
    LOG.debug("processing %s rows from %s", len(df.index), fn)
    acursor = ACCESS.cursor()
    for _i, row in df.iterrows():
        # Update IEMAccess
        # print nwsli, valid
        ob = Observation(nwsli, 'ISUSM', row['valid'])
        tmpc = temperature(row['tair_c_avg_qc'], 'C')
        if tmpc.value('F') > -50 and tmpc.value('F') < 140:
            ob.data['tmpf'] = tmpc.value('F')
            relh = humidity(row['rh_qc'], '%')
            ob.data['relh'] = relh.value('%')
            ob.data['dwpf'] = met.dewpoint(tmpc, relh).value('F')
        ob.data['srad'] = row['slrkw_avg_qc']
        ob.data['phour'] = round(distance(row['rain_mm_tot_qc'],
                                          'MM').value('IN'), 2)
        ob.data['sknt'] = speed(row['ws_mps_s_wvt_qc'], 'MPS').value("KT")
        if 'ws_mph_max' in df.columns:
            ob.data['gust'] = speed(row['ws_mph_max_qc'], 'MPH').value('KT')
            ob.data['max_gust_ts'] = row['ws_mph_tmx']
        ob.data['drct'] = row['winddir_d1_wvt_qc']
        if 'tsoil_c_avg' in df.columns:
            ob.data['c1tmpf'] = temperature(row['tsoil_c_avg_qc'],
                                            'C').value('F')
        if 't12_c_avg_qc' in df.columns:
            ob.data['c2tmpf'] = temperature(
                row['t12_c_avg_qc'], 'C').value('F')
        if 't24_c_avg_qc' in df.columns:
            ob.data['c3tmpf'] = temperature(
                row['t24_c_avg_qc'], 'C').value('F')
        if 't50_c_avg' in df.columns:
            ob.data['c4tmpf'] = temperature(row['t50_c_avg_qc'],
                                            'C').value('F')
        if 'calc_vwc_12_avg' in df.columns:
            ob.data['c2smv'] = row['calc_vwc_12_avg_qc'] * 100.0
        if 'calc_vwc_24_avg' in df.columns:
            ob.data['c3smv'] = row['calc_vwc_24_avg_qc'] * 100.0
        if 'calc_vwc_50_avg' in df.columns:
            ob.data['c4smv'] = row['calc_vwc_50_avg_qc'] * 100.0
        ob.save(acursor)
        # print 'soilm_ingest.py station: %s ts: %s hrly updated no data?' % (
        #                                        nwsli, valid)
        processed += 1
    acursor.close()
    ACCESS.commit()
    return processed
Example #47
0
def load(dirname, location, sdate):
    """ Read a file please """
    data = []
    idx = []
    mindoy = int(sdate.strftime("%j"))
    fn = "%s/%s.met" % (dirname, location)
    if not os.path.isfile(fn):
        raise NoDataFound("Data file was not found.")
    for line in open(fn):
        line = line.strip()
        if not line.startswith("19") and not line.startswith("20"):
            continue
        tokens = line.split()
        if int(tokens[1]) < mindoy:
            continue
        data.append(tokens)
        ts = datetime.date(int(tokens[0]), 1, 1) + datetime.timedelta(
            days=int(tokens[1]) - 1
        )
        idx.append(ts)
    if len(data[0]) < 10:
        cols = ["year", "doy", "radn", "maxt", "mint", "rain"]
    else:
        cols = [
            "year",
            "doy",
            "radn",
            "maxt",
            "mint",
            "rain",
            "gdd",
            "st4",
            "st12",
            "st24",
            "st50",
            "sm12",
            "sm24",
            "sm50",
        ]
    df = pd.DataFrame(data, index=idx, columns=cols)
    for col in cols:
        df[col] = pd.to_numeric(df[col], errors="coerce")
    if len(data[0]) < 10:
        df["gdd"] = gdd(
            temperature(df["maxt"].values, "C"),
            temperature(df["mint"].values, "C"),
        )
    df["gddcum"] = df.groupby(["year"])["gdd"].apply(lambda x: x.cumsum())
    df["raincum"] = distance(
        df.groupby(["year"])["rain"].apply(lambda x: x.cumsum()), "MM"
    ).value("IN")
    return df
Example #48
0
def load(dirname, location, sdate):
    """ Read a file please """
    data = []
    idx = []
    fn = "%s/%s.met" % (dirname, location)
    if not os.path.isfile(fn):
        raise NoDataFound("File was not found.")
    for line in open(fn):
        line = line.strip()
        if not line.startswith('19') and not line.startswith('20'):
            continue
        tokens = line.split()
        if float(tokens[5]) > 90:
            continue
        data.append(tokens)
        ts = (datetime.date(int(tokens[0]), 1, 1) +
              datetime.timedelta(days=int(tokens[1]) - 1))
        idx.append(ts)
    if len(data[0]) < 10:
        cols = ['year', 'doy', 'radn', 'maxt', 'mint', 'rain']
    else:
        cols = [
            'year', 'doy', 'radn', 'maxt', 'mint', 'rain', 'gdd', 'st4',
            'st12', 'st24', 'st50', 'sm12', 'sm24', 'sm50'
        ]
    df = pd.DataFrame(data, index=idx, columns=cols)
    for col in cols:
        df[col] = pd.to_numeric(df[col], errors='coerce')
    if len(data[0]) < 10:
        df['gdd'] = gdd(temperature(df['maxt'].values, 'C'),
                        temperature(df['mint'].values, 'C'))
    bins = []
    today = datetime.date.today()
    for valid, _ in df.iterrows():
        if valid >= today:
            bins.append(0)
            continue
        if sdate == 'nov1' and valid.month >= 11:
            bins.append(valid.year + 1)
            continue
        if valid.month < today.month:
            bins.append(valid.year)
            continue
        if valid.month == today.month and valid.day < today.day:
            bins.append(valid.year)
            continue
        bins.append(0)
    df['bin'] = bins
    df['rain'] = distance(df['rain'].values, 'MM').value('IN')
    df['avgt'] = temperature((df['maxt'] + df['mint']) / 2.0, 'C').value('F')
    return df
Example #49
0
def computeOthers(d):
    r = {}
    # Need something to compute other values needed for output
    for sid in d.keys():
        ob = d[sid]
        ob["ticks"] = calendar.timegm(ob['utc_valid'].timetuple())
        if ob['sknt'] is not None:
            ob["sped"] = ob["sknt"] * 1.17
        if ob.get('tmpf') is not None and ob.get('dwpf') is not None:
            tmpf = temperature(ob['tmpf'], 'F')
            dwpf = temperature(ob['dwpf'], 'F')
            ob["relh"] = meteorology.relh(tmpf, dwpf).value('%')
        else:
            ob['relh'] = None
        if ob['relh'] == 'M':
            ob['relh'] = None

        if (ob.get('tmpf') is not None and ob.get('dwpf') is not None and
                ob.get('sped') is not None):
            ob['feel'] = meteorology.mcalc_feelslike(
                masked_array([ob['tmpf'], ], units('degF'), mask=[False, ]),
                masked_array([ob['dwpf'], ],  units('degF'), mask=[False, ]),
                masked_array([ob['sped'], ], units('mile per hour'),
                             mask=[False, ])
                ).to(units('degF')).magnitude[0]
        else:
            ob['feel'] = None
        if ob['feel'] == 'M':
            ob['feel'] = None

        ob["altiTend"] = 'S'
        ob["drctTxt"] = util.drct2text(ob["drct"])
        if ob["max_drct"] is None:
            ob["max_drct"] = 0
        ob["max_drctTxt"] = util.drct2text(ob["max_drct"])
        ob["20gu"] = 0
        if ob['gust'] is not None:
            ob["gmph"] = ob["gust"] * 1.17
        if ob['max_gust'] is not None:
            ob["max_sped"] = ob["max_gust"] * 1.17
        else:
            ob['max_sped'] = 0
        ob['pday'] = 0 if ob['pday'] is None else ob['pday']
        ob['pmonth'] = 0 if ob['pmonth'] is None else ob['pmonth']
        ob["gtim"] = "0000"
        ob["gtim2"] = "12:00 AM"
        if ob["max_gust_ts"] is not None and ob["max_gust_ts"] != "null":
            ob["gtim"] = ob["max_gust_ts"].strftime("%H%M")
            ob["gtim2"] = ob["max_gust_ts"].strftime("%-I:%M %p")
        r[sid] = ob
    return r
Example #50
0
def make_rwis(i, j, initts, oldncout):
    """ Generate spinup file """
    i = i - IOFFSET
    j = j - JOFFSET

    o = open('rwis.xml', 'w')
    o.write("""<?xml version="1.0"?>
<observation>
 <header>
  <filetype>rwis-observation</filetype>
  <version>1.0</version>
  <road-station>oaa</road-station>
  </header>
  <measure-list>""")
    if oldncout is None:
        fake_rwis(o, initts)
        return

    ts0 = find_initts(oldncout)
    # at Air Temp in C
    tmpc = dt.temperature(oldncout.variables['tmpk'][:, i, j], 'K').value('C')
    # td Dew point in C
    dwpc = dt.temperature(oldncout.variables['dwpk'][:, i, j], 'K').value('C')
    # pi presence of precipitation 0: No -- 1: Yes
    # ws wind speed in km / hr
    ws = dt.speed(oldncout.variables['wmps'][:, i, j], 'MPS').value('KMH')
    # sc condition code  1=DryCond 2=Wet 3=Ice 4=MixWaterSnow
    #                    5=dew 6=Meltsnow 7=Frost 8=Ice
    # Was set to 33 for SSI ?
    icond = oldncout.variables['icond'][:, i, j]
    # st road surface temp
    bridgec = dt.temperature(
        oldncout.variables['bdeckt'][:, i, j], 'K').value('C')
    # sst sub surface temp
    subsfc = dt.temperature(
        oldncout.variables['subsfct'][:, i, j], 'K').value('C')
    t1 = initts + datetime.timedelta(hours=12)
    for tstep in range(4, len(oldncout.dimensions['time']), 4):
        ts = ts0 + datetime.timedelta(
                                minutes=int(oldncout.variables['time'][tstep]))
        if ts > t1:
            break
        o.write("""<measure><observation-time>%s</observation-time>
<at>%.2f</at><td>%.2f</td><pi>0</pi><ws>%.2f</ws><sc>%s</sc><st>%.2f</st>
<sst>%.2f</sst></measure>
      """ % (ts.strftime("%Y-%m-%dT%H:%MZ"), tmpc[tstep], dwpc[tstep],
             ws[tstep], icond[tstep], bridgec[tstep], subsfc[tstep]))

    o.write("</measure-list></observation>")
    o.close()
Example #51
0
def computeOthers(d):
    r = {}
    # Need something to compute other values needed for output
    for sid in d.keys():
        ob = d[sid]
        ob["ticks"] = calendar.timegm(ob['utc_valid'].timetuple())
        if ob['sknt'] is not None:
            ob["sped"] = ob["sknt"] * 1.17
        if ob.get('tmpf') is not None and ob.get('dwpf') is not None:
            tmpf = temperature(ob['tmpf'], 'F')
            dwpf = temperature(ob['dwpf'], 'F')
            ob["relh"] = meteorology.relh(tmpf, dwpf).value('%')
        else:
            ob['relh'] = None
        if ob['relh'] == 'M':
            ob['relh'] = None

        if (ob.get('tmpf') is not None and ob.get('dwpf') is not None and
                ob.get('sped') is not None):
            tmpf = temperature(ob['tmpf'], 'F')
            dwpf = temperature(ob['dwpf'], 'F')
            sknt = speed(ob['sped'], 'MPH')
            ob["feel"] = meteorology.feelslike(tmpf, dwpf, sknt).value("F")
        else:
            ob['feel'] = None
        if ob['feel'] == 'M':
            ob['feel'] = None

        ob["altiTend"] = 'S'
        ob["drctTxt"] = util.drct2text(ob["drct"])
        if ob["max_drct"] is None:
            ob["max_drct"] = 0
        ob["max_drctTxt"] = util.drct2text(ob["max_drct"])
        ob["20gu"] = 0
        if ob['gust'] is not None:
            ob["gmph"] = ob["gust"] * 1.17
        if ob['max_gust'] is not None:
            ob["max_sped"] = ob["max_gust"] * 1.17
        else:
            ob['max_sped'] = 0
        ob['pday'] = 0 if ob['pday'] is None else ob['pday']
        ob['pmonth'] = 0 if ob['pmonth'] is None else ob['pmonth']
        ob["gtim"] = "0000"
        ob["gtim2"] = "12:00 AM"
        if ob["max_gust_ts"] is not None and ob["max_gust_ts"] != "null":
            ob["gtim"] = ob["max_gust_ts"].strftime("%H%M")
            ob["gtim2"] = ob["max_gust_ts"].strftime("%-I:%M %p")
        r[sid] = ob
    return r
Example #52
0
def load_iemre():
    """Use IEM Reanalysis for non-precip data

    24km product is smoothed down to the 0.01 degree grid
    """
    printt("load_iemre() called")
    xaxis = np.arange(MYWEST, MYEAST, 0.01)
    yaxis = np.arange(MYSOUTH, MYNORTH, 0.01)
    xi, yi = np.meshgrid(xaxis, yaxis)

    fn = iemre.get_daily_ncname(VALID.year)
    if not os.path.isfile(fn):
        printt("Missing %s for load_solar, aborting" % (fn,))
        sys.exit()
    nc = netCDF4.Dataset(fn, 'r')
    offset = iemre.daily_offset(VALID)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    lons, lats = np.meshgrid(lons, lats)

    # Storage is W m-2, we want langleys per day
    data = nc.variables['rsds'][offset, :, :] * 86400. / 1000000. * 23.9
    # Default to a value of 300 when this data is missing, for some reason
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    SOLAR[:] = iemre_bounds_check('rsds', nn(xi, yi), 0, 1000)

    data = temperature(nc.variables['high_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    HIGH_TEMP[:] = iemre_bounds_check('high_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['low_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    LOW_TEMP[:] = iemre_bounds_check('low_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['avg_dwpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    DEWPOINT[:] = iemre_bounds_check('avg_dwpk', nn(xi, yi), -60, 60)

    data = nc.variables['wind_speed'][offset, :, :]
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    WIND[:] = iemre_bounds_check('wind_speed', nn(xi, yi), 0, 30)

    nc.close()
    printt("load_iemre() finished")
Example #53
0
def do_day(valid):
    """ Process a day please """
    idx = iemre.daily_offset(valid)
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (valid.year, ),
                         'r')
    high = temperature(nc.variables['high_tmpk_12z'][idx, :, :],
                       'K').value('F')
    low = temperature(nc.variables['low_tmpk_12z'][idx, :, :],
                      'K').value('F')
    precip = nc.variables['p01d_12z'][idx, :, :] / 25.4
    nc.close()
    for state in ('IA', 'NE', 'MN', 'WI', 'MI', 'OH', 'IN', 'IL', 'MO',
                  'KS', 'KY', 'ND', 'SD'):
        do_state_day(state, valid, high, low, precip)
        do_climdiv_day(state, valid, high, low, precip)
Example #54
0
def test_dewpoint():
    """ test out computation of dew point """
    for t0, r0, a0 in [[80, 80, 73.42], [80, 20, 35.87]]:
        t = datatypes.temperature(t0, 'F')
        rh = datatypes.humidity(r0, '%')
        dwpk = meteorology.dewpoint(t, rh)
        assert abs(dwpk.value("F") - a0) < 0.01
Example #55
0
def get_currents():
    ''' Return dict of current values '''
    dbconn = psycopg2.connect(database='iem', host='iemdb', user='******')
    cursor = dbconn.cursor()
    dbconn2 = psycopg2.connect(database='isuag', host='iemdb', user='******')
    cursor2 = dbconn2.cursor()
    data = {}
    cursor.execute("""
    SELECT id, valid, tmpf, relh from current c JOIN stations t on
    (t.iemid = c.iemid) WHERE valid > now() - '3 hours'::interval and
    t.network = 'ISUSM'
    """)
    valid = None
    for row in cursor:
        data[row[0]] = {'tmpf': row[2],
                        'rh': row[3],
                        'valid': row[1],
                        'high': None}
        if valid is None:
            valid = row[1]

    # Go get daily values
    cursor2.execute("""SELECT station, tair_c_max from sm_daily
    where valid = %s
    """, (valid,))
    for row in cursor2:
        data[row[0]]['high'] = temperature(row[1], 'C').value('F')

    cursor.close()
    dbconn.close()
    return data
Example #56
0
def make_plots(nc):
    ''' Generate some plots '''
    sts = compute_sts(nc)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    rts = (sts.astimezone(pytz.timezone("America/Chicago"))).strftime(
                                                            "%d %b %Y %H %p")
    for i, tm in enumerate(nc.variables['time'][:]):
        dt = sts + datetime.timedelta(minutes=float(tm))
        if dt.minute != 0:
            continue
        fhour = int( tm / 60.0 )
        fts = (dt.astimezone(pytz.timezone("America/Chicago"))).strftime(
                                                            "%d %b %Y %H %p")
        for pvar in PVARS:
            m = MapPlot(title='ISUMM5/Bridget Modelled %s' % (
                                                    PVARS[pvar]['title'],),
                        subtitle='Model Run: %s Forecast Valid: %s' % (rts, fts))
            vals = nc.variables[pvar][i,:,:]
            if pvar == 'bdeckt':
                vals = temperature(vals, 'K').value('F')
            m.pcolormesh(lons, lats, vals, PVARS[pvar]['levels'], units='mm')
            pqstr = "plot c %s model/frost/bridget/%02i/%s_%02i_f%03i.png bogus png" % (
                                        sts.strftime("%Y%m%d%H%M"), sts.hour,
                                        pvar, sts.hour, fhour)
            m.postprocess(pqstr=pqstr)
            m.close()
Example #57
0
 def test_dewpoint(self):
     """ test out computation of dew point """
     for t0,r0,a0 in [[80,80,73.42], [80,20,35.87]]:
         t = datatypes.temperature(t0, 'F')
         rh = datatypes.humidity(r0, '%')
         dwpk = meteorology.dewpoint(t, rh)
         self.assertAlmostEqual( dwpk.value("F"), a0, 2)
Example #58
0
def heatindex(temperature, polyarg):
    """
    Compute the heat index based on

    Stull, Richard (2000). Meteorology for Scientists and Engineers,
    Second Edition. Brooks/Cole. p. 60. ISBN 9780534372149.

    Another opinion on appropriate equation:
    http://www.wpc.ncep.noaa.gov/html/heatindex_equation.shtml

    http://www.weather.gov/media/ffc/ta_htindx.PDF
    """
    if not isinstance(temperature, dt.temperature):
        raise InvalidArguments("heatindex() needs temperature obj as arg")
    if isinstance(polyarg, dt.temperature):  # We have dewpoint
        polyarg = relh(temperature, polyarg)
    rh = polyarg.value("%")
    t = temperature.value("F")
    t2 = t ** 2
    t3 = t ** 3
    rh2 = rh ** 2
    rh3 = rh ** 3
    hdx = (16.923 + ((1.85212e-1) * t) + (5.37941 * rh) -
           ((1.00254e-1) * t * rh) + ((9.41695e-3) * t2) +
           ((7.28898e-3) * rh2) +
           ((3.45372e-4) * t2 * rh) - ((8.14971e-4) * t * rh2) +
           ((1.02102e-5) * t2 * rh2) - ((3.8646e-5) * t3) +
           ((2.91583e-5) * rh3) + ((1.42721e-6) * t3 * rh) +
           ((1.97483e-7) * t * rh3) - ((2.18429e-8) * t3 * rh2) +
           ((8.43296e-10) * t2 * rh3) - ((4.81975e-11) * t3 * rh3))
    hdx = np.where(np.logical_or(np.less(t, 80),
                                 np.greater(t, 120)), t, hdx)
    return dt.temperature(hdx, 'F')
Example #59
0
def s(val):
    try:
        if val.mask:
            return 'M'
    except:
        pass
    return "%5.1f" % (temperature(val, 'K').value('F'),)
Example #60
0
def roadtmpc(grids, valid, iarchive):
    """ Do the RWIS Road times grid"""
    if iarchive:
        nt = NetworkTable(['IA_RWIS', 'MN_RWIS', 'WI_RWIS', 'IL_RWIS',
                           'MO_RWIS', 'KS_RWIS', 'NE_RWIS', 'SD_RWIS'])
        pgconn = psycopg2.connect(database='rwis', host='iemdb', user='******')
        df = read_sql("""
            SELECT station, tfs0 as tsf0
            from alldata WHERE valid >= %s and valid < %s and
            tfs0 >= -50 and tfs0 < 150
            """, pgconn,  params=((valid - datetime.timedelta(minutes=30)),
                                  (valid + datetime.timedelta(minutes=30))),
                      index_col=None)
        df['lat'] = df['station'].apply(lambda x: nt.sts.get(x, {}).get('lat',
                                                                        0))
        df['lon'] = df['station'].apply(lambda x: nt.sts.get(x, {}).get('lon',
                                                                        0))
    else:
        pgconn = psycopg2.connect(database='iem', host='iemdb', user='******')
        df = read_sql("""
            SELECT ST_x(geom) as lon, ST_y(geom) as lat,
            tsf0
            from current c JOIN stations t on (c.iemid = t.iemid)
            WHERE c.valid > now() - '2 hours'::interval and
            t.network in ('IA_RWIS', 'MN_RWIS', 'WI_RWIS', 'IL_RWIS',
            'MO_RWIS', 'KS_RWIS', 'NE_RWIS', 'SD_RWIS') and tsf0 >= -50
            and tsf0 < 150
            """, pgconn, index_col=None)

    nn = NearestNDInterpolator((df['lon'].values, df['lat'].values),
                               temperature(df['tsf0'].values, 'F').value('C'))
    grids['roadtmpc'] = nn(XI, YI)