Example #1
0
def main():
    """Go Main"""
    log = logger()
    mesosite = get_dbconn("mesosite")
    postgis = get_dbconn("postgis")
    mcursor = mesosite.cursor()
    mcursor2 = mesosite.cursor()
    pcursor = postgis.cursor()

    # Find sites we need to check on
    mcursor.execute("""
        select s.id, s.iemid, s.network, st_x(geom) as lon, st_y(geom) as lat
        from stations s WHERE
        (s.wfo IS NULL or s.wfo = '') and s.country = 'US'
    """)

    for row in mcursor:
        sid = row[0]
        iemid = row[1]
        network = row[2]
        # Look for matching WFO
        pcursor.execute(
            """
            WITH s as (
                SELECT
                ST_SetSrid(ST_GeomFromEWKT('POINT(%s %s)'), 4326) as geom
            )
            select u.wfo, ST_Distance(u.geom, s.geom) as dist
            from s, ugcs u WHERE ST_Intersects(u.geom, s.geom) and
            u.end_ts is null and wfo is not null ORDER by dist ASC LIMIT 1
        """,
            (row[3], row[4]),
        )
        if pcursor.rowcount > 0:
            row2 = pcursor.fetchone()
            wfo = row2[0][:3]
            log.info(
                "Assinging WFO: %s to IEMID: %s ID: %s NETWORK: %s",
                wfo,
                iemid,
                sid,
                network,
            )
            mcursor2.execute("""
                UPDATE stations SET wfo = '%s' WHERE iemid = %s
            """ % (wfo, iemid))
        else:
            log.info(
                "ERROR assigning WFO to IEMID: %s ID: %s NETWORK: %s",
                iemid,
                sid,
                network,
            )

    mcursor.close()
    mcursor2.close()
    mesosite.commit()
    mesosite.close()
Example #2
0
def main(argv):
    """Go main Go."""
    scenario = int(argv[1])
    log = logger()
    myhucs = []
    if os.path.isfile("myhucs.txt"):
        log.warning("Using myhucs.txt to filter job submission")
        myhucs = [s.strip() for s in open("myhucs.txt").readlines()]
    idep = get_dbconn("idep")
    icursor = idep.cursor()

    icursor.execute("""
        SELECT huc_12, fpath, climate_file
        from flowpaths where scenario = %s
    """ % (scenario, ))
    totaljobs = icursor.rowcount
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host="iem-rabbitmq.local"))
    channel = connection.channel()
    channel.queue_declare(queue="dep", durable=True)
    sts = datetime.datetime.now()
    for row in icursor:
        if myhucs and row[0] not in myhucs:
            continue
        wr = WeppRun(row[0], row[1], row[2], scenario)
        channel.basic_publish(
            exchange="",
            routing_key="dep",
            body=wr.make_runfile(),
            properties=pika.BasicProperties(
                delivery_mode=2  # make message persistent
            ),
        )
    # Wait a few seconds for the dust to settle
    time.sleep(10)
    percentile = 1.0001
    while True:
        now = datetime.datetime.now()
        cnt = channel.queue_declare(queue="dep",
                                    durable=True).method.message_count
        done = totaljobs - cnt
        if (cnt / float(totaljobs)) < percentile:
            log.info(
                "%6i/%s [%.3f /s]",
                cnt,
                totaljobs,
                done / (now - sts).total_seconds(),
            )
            percentile -= 0.1
        if (now - sts).total_seconds() > 36000:
            log.error("ERROR, 10 Hour Job Limit Hit")
            break
        if cnt == 0:
            log.info("%s Done!", now.strftime("%H:%M"))
            break
        time.sleep(30)

    connection.close()
def main():
    """Go Main Go."""
    log = logger()
    pgconn = get_dbconn("postgis")
    cursor = pgconn.cursor()

    avwx = requests.get("http://aviationweather.gov/gis/scripts/AirepJSON.php")
    avwx = avwx.json()

    mine = {}
    cursor.execute("""
        SELECT valid at time zone 'UTC', ST_x(geom::geometry),
        ST_y(geom::geometry), report
        from pireps WHERE valid > (now() - '60 minutes'::interval)
    """)
    for row in cursor:
        key = "/".join(row[3].replace(" ", "").split("/")[:3])
        mine[key] = row

    floor = None
    for feature in avwx["features"]:
        if feature["properties"]["airepType"] != "PIREP":
            continue
        ts = datetime.datetime.strptime(feature["properties"]["obsTime"],
                                        "%Y-%m-%dT%H:%M:%SZ")
        if floor is None:
            floor = ts
        lon, lat = feature["geometry"]["coordinates"]
        key = "/".join(feature["properties"]["rawOb"].replace(
            " ", "").split("/")[:3])
        if key not in mine:
            log.info("IEM  MISS %s %s", ts, feature["properties"]["rawOb"])
        else:
            error = ((mine[key][1] - lon)**2 + (mine[key][2] - lat)**2)**0.5
            if error > 0.1:
                location = "/".join(
                    feature["properties"]["rawOb"].split("/")[:2])
                log.info(
                    "ERROR: %5.2f IEM: %8.3f %6.3f AWX: %7.2f %5.2f %s",
                    error,
                    mine[key][1],
                    mine[key][2],
                    lon,
                    lat,
                    location,
                )
            del mine[key]

    for key in mine:
        if mine[key][0] < floor:
            continue
        log.info("AVWX MISS %s %s", mine[key][0], mine[key][3])
Example #4
0
def main(argv):
    """Go Main Go."""
    log = logger()
    if len(argv) == 6:
        valid = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]))
        ncfn = iemre.get_hourly_ncname(valid.year)
        idx = iemre.hourly_offset(valid)
    else:
        valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))
        ncfn = iemre.get_daily_ncname(valid.year)
        idx = iemre.daily_offset(valid)
    ds = iemre.get_grids(valid)
    with ncopen(ncfn, 'a', timeout=600) as nc:
        for vname in ds:
            if vname not in nc.variables:
                continue
            log.debug("copying database var %s to netcdf", vname)
            nc.variables[vname][idx, :, :] = ds[vname].values
Example #5
0
"""
 Main script that adds a site into the appropriate tables
 called from SYNC_STATIONS.sh
"""
import datetime

import psycopg2.extras
from pyiem.util import get_dbconn, logger

LOG = logger()


def add_summary(cursor, date, iemid):
    """Add a summary entry for the given date."""
    table = "summary_%s" % (date.year, )
    cursor.execute(
        """
        SELECT iemid from """ + table + """ WHERE day = %s and iemid = %s
    """,
        (date, iemid),
    )
    if cursor.rowcount == 1:
        LOG.info("%s entry already exists for date %s", table, date)
        return
    cursor.execute(
        """
        INSERT into """ + table + """ (day, iemid) values (%s, %s)
    """,
        (date, iemid),
    )
Example #6
0
def test_logger():
    """Can we emit logs."""
    log = util.logger()
    assert log is not None
Example #7
0
def main(argv):
    """Go Main"""
    log = logger()
    if len(argv) < 2:
        print("USAGE: python merge_hvtec_nwsli.py FILENAME")
        return

    dbconn = get_dbconn("postgis", user="******")
    cursor = dbconn.cursor()
    log.info(" - Connected to database: postgis")

    fn = argv[1]
    uri = "https://www.weather.gov/media/vtec/%s" % (fn,)

    log.info(" - Fetching file: %s", uri)
    req = requests.get(uri)
    updated = 0
    new = 0
    bad = 0
    for linenum, line in enumerate(req.content.decode("ascii").split("\n")):
        if line.strip() == "":
            continue
        tokens = line.strip().split(",")
        if len(tokens) != 7:
            log.info(
                " + Linenum %s had %s tokens, instead of 7\n%s",
                linenum + 1,
                len(tokens),
                line,
            )
            bad += 1
            continue
        (nwsli, river_name, proximity, name, state, lat, lon) = tokens
        if len(nwsli) != 5:
            log.info(
                ' + Linenum %s had a NWSLI "%s" not of 5 character length\n%s',
                linenum + 1,
                nwsli,
                line,
            )
            bad += 1
            continue
        cursor.execute(
            """
            DELETE from hvtec_nwsli WHERE nwsli = %s
        """,
            (nwsli,),
        )
        if cursor.rowcount == 1:
            updated += 1
        else:
            new += 1
        sql = """
            INSERT into hvtec_nwsli (nwsli, river_name, proximity, name,
             state, geom) values (%s, %s, %s, %s, %s,
             'SRID=4326;POINT(%s %s)')
             """
        args = (
            nwsli,
            river_name,
            proximity,
            name,
            state,
            0 - float(lon),
            float(lat),
        )
        cursor.execute(sql, args)

    cursor.close()
    dbconn.commit()
    log.info(" - DONE! %s updated %s new, %s bad entries", updated, new, bad)
Example #8
0
           Site Identifier
               CameraID (may have more than one camera in the future)
                  ViewID
                     GMT Timestamp....
"""
import os
import re
import subprocess

# third party
from PIL import Image, ImageDraw, ImageFont
import psycopg2.extras
import pytz
import pyiem.util as util

LOG = util.logger()
FONT = ImageFont.truetype("veramono.ttf", 10)


def do_imagework(cameras, cid, tokens, now):
    """Process the images"""
    # Hard coded...
    drct = cameras[cid]["pan0"]
    drct_text = util.drct2text(drct)

    # Create 320x240 variant
    fn = "165.206.203.34/rwis_images/Vid-000512%s.jpg" % ("-".join(tokens), )
    try:
        i0 = Image.open(fn)
        i320 = i0.resize((320, 240), Image.ANTIALIAS)
    except Exception:
Example #9
0
def main(argv):
    """Go Main Go."""
    log = logger()
    year = int(argv[1])
    sts = datetime.date(year, 1, 1)
    ets = min([datetime.date(year, 12, 31), datetime.date.today()])
    current = {}
    now = ets
    while now >= sts:
        ds = iemre.get_grids(now, varnames="power_swdn")
        maxval = ds["power_swdn"].values.max()
        if np.isnan(maxval) or maxval < 0:
            log.debug("adding %s as currently empty", now)
            current[now] = {"data": ds, "dirty": False}
        now -= datetime.timedelta(days=1)
    sts = min(list(current.keys()))
    ets = max(list(current.keys()))
    log.debug("running between %s and %s", sts, ets)

    queue = []
    for x0 in np.arange(iemre.WEST, iemre.EAST, 5.0):
        for y0 in np.arange(iemre.SOUTH, iemre.NORTH, 5.0):
            queue.append([x0, y0])
    for x0, y0 in tqdm(queue, disable=not sys.stdout.isatty()):
        url = ("https://power.larc.nasa.gov/cgi-bin/v1/DataAccess.py?"
               "request=execute&identifier=Regional&"
               "parameters=ALLSKY_SFC_SW_DWN&"
               "startDate=%s&endDate=%s&userCommunity=SSE&"
               "tempAverage=DAILY&bbox=%s,%s,%s,%s&user=anonymous&"
               "outputList=NETCDF") % (
                   sts.strftime("%Y%m%d"),
                   ets.strftime("%Y%m%d"),
                   y0,
                   x0,
                   min([y0 + 5.0, iemre.NORTH]) - 0.1,
                   min([x0 + 5.0, iemre.EAST]) - 0.1,
               )
        req = requests.get(url, timeout=60)
        js = req.json()
        if "outputs" not in js:
            print(url)
            print(js)
            continue
        fn = js["outputs"]["netcdf"]
        req = requests.get(fn, timeout=60, stream=True)
        ncfn = "/tmp/power%s.nc" % (year, )
        with open(ncfn, "wb") as fh:
            for chunk in req.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
            fh.close()
        with ncopen(ncfn) as nc:
            for day, _ in enumerate(nc.variables["time"][:]):
                date = sts + datetime.timedelta(days=day)
                if date not in current:
                    continue
                # kwh to MJ/d  3600 * 1000 / 1e6
                data = nc.variables["ALLSKY_SFC_SW_DWN"][day, :, :] * 3.6
                # Sometimes there are missing values?
                if np.ma.is_masked(data):
                    data[data.mask] = np.mean(data)
                i, j = iemre.find_ij(x0, y0)
                # resample data is 0.5, iemre is 0.125
                data = np.repeat(np.repeat(data, 4, axis=0), 4, axis=1)
                data = np.where(data < 0, np.nan, data)
                shp = np.shape(data)
                jslice = slice(j, j + shp[0])
                islice = slice(i, i + shp[1])
                # get currentdata
                present = current[date]["data"]["power_swdn"].values[jslice,
                                                                     islice]
                if present.mean() == data.mean():
                    continue
                current[date]["data"]["power_swdn"].values[jslice,
                                                           islice] = data
                current[date]["dirty"] = True
    for date in current:
        if not current[date]["dirty"]:
            continue
        log.debug("saving %s", date)
        iemre.set_grids(date, current[date]["data"])
        subprocess.call(
            "python ../iemre/db_to_netcdf.py %s" %
            (date.strftime("%Y %m %d"), ),
            shell=True,
        )
Example #10
0
def logger():
    """Get a logger"""
    return util.logger()