Пример #1
0
def load_table(state, date):
    """Update the station table"""
    nt = NetworkTable("%sCLIMATE" % (state, ))
    rows = []
    istoday = date == datetime.date.today()
    for sid in nt.sts:
        # handled by compute_0000
        if sid[2:] == "0000" or sid[2] == "C":
            continue
        if istoday and not nt.sts[sid]["temp24_hour"] in range(3, 12):
            continue
        i, j = iemre.find_ij(nt.sts[sid]["lon"], nt.sts[sid]["lat"])
        nt.sts[sid]["gridi"] = i
        nt.sts[sid]["gridj"] = j
        rows.append({
            "station":
            sid,
            "gridi":
            i,
            "gridj":
            j,
            "temp24_hour":
            nt.sts[sid]["temp24_hour"],
            "precip24_hour":
            nt.sts[sid]["precip24_hour"],
            "tracks":
            nt.sts[sid]["attributes"].get("TRACKS_STATION", "|").split("|")[0],
        })
    if not rows:
        return
    df = pd.DataFrame(rows)
    df.set_index("station", inplace=True)
    for key in ["high", "low", "precip", "snow", "snowd"]:
        df[key] = None
    return df
Пример #2
0
def load_table(state, date):
    """Update the station table"""
    nt = NetworkTable("%sCLIMATE" % (state, ))
    rows = []
    istoday = (date == datetime.date.today())
    for sid in nt.sts:
        # handled by compute_0000
        if sid[2:] == '0000' or sid[2] == 'C':
            continue
        if istoday and not nt.sts[sid]['temp24_hour'] in range(3, 12):
            # print('skipping %s as is_today' % (sid, ))
            continue
        i, j = iemre.find_ij(nt.sts[sid]['lon'], nt.sts[sid]['lat'])
        nt.sts[sid]['gridi'] = i
        nt.sts[sid]['gridj'] = j
        rows.append(
            {'station': sid, 'gridi': i, 'gridj': j,
             'temp24_hour': nt.sts[sid]['temp24_hour'],
             'precip24_hour': nt.sts[sid]['precip24_hour'],
             'tracks': nt.sts[sid]['attributes'].get(
                 'TRACKS_STATION', '|').split("|")[0]}
        )
    if not rows:
        return
    df = pd.DataFrame(rows)
    df.set_index('station', inplace=True)
    for key in ['high', 'low', 'precip', 'snow', 'snowd']:
        df[key] = None
    return df
Пример #3
0
def do(netname, pname):
    """Do something please"""
    mcursor = MESOSITE.cursor()
    mcursor.execute("""
        SELECT id, network, name from webcams where
        network = %s
        and online ORDER by id ASC
    """, (netname, ))
    NT = NetworkTable(None)
    obs = {}
    missing = 0
    for row in mcursor:
        NT.sts[row[0]] = dict(id=row[0], network=row[1], name=row[2],
                              tzname='America/Chicago')
        fn = "%s/%s.jpg" % (mydir, row[0])
        if not os.path.isfile(fn):
            missing += 1
            if missing > 1:
                print 'Missing webcam file: %s' % (fn,)
            continue
        ticks = os.stat(fn)[stat.ST_MTIME]
        valid = (datetime.datetime(1970, 1, 1) +
                 datetime.timedelta(seconds=ticks))
        valid = valid.replace(tzinfo=pytz.timezone("UTC"))
        obs[row[0]] = dict(valid=valid)
    # Abort out if no obs are found
    if len(obs) == 0:
        return

    tracker = TrackerEngine(IEM.cursor(), PORTFOLIO.cursor(), 10)
    tracker.process_network(obs, pname, NT, threshold)
    tracker.send_emails()
    IEM.commit()
    PORTFOLIO.commit()
Пример #4
0
def do(netname, pname):
    """Do something please"""
    mcursor = MESOSITE.cursor()
    mcursor.execute("""
        SELECT id, network, name from webcams where
        network = %s
        and online ORDER by id ASC
    """, (netname, ))
    NT = NetworkTable(None)
    obs = {}
    for row in mcursor:
        NT.sts[row[0]] = dict(id=row[0], network=row[1], name=row[2],
                              tzname='America/Chicago')
        fn = "%s/%s.jpg" % (mydir, row[0])
        if not os.path.isfile(fn):
            print 'Missing webcam file: %s' % (fn,)
            continue
        ticks = os.stat(fn)[stat.ST_MTIME]
        valid = (datetime.datetime(1970, 1, 1) +
                 datetime.timedelta(seconds=ticks))
        valid = valid.replace(tzinfo=pytz.timezone("UTC"))
        obs[row[0]] = dict(valid=valid)
    # Abort out if no obs are found
    if len(obs) == 0:
        return

    tracker = TrackerEngine(IEM.cursor(), PORTFOLIO.cursor(), 10)
    tracker.process_network(obs, pname, NT, threshold)
    tracker.send_emails()
    IEM.commit()
    PORTFOLIO.commit()
Пример #5
0
def plotter(fdict):
    """ Go """
    import seaborn as sns
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    opt = ctx['opt']
    state = ctx['state']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' "
                   ) % (station if len(station) == 3 else station[1:],)
    if station == '_ALL':
        wfo_limiter = ''
    if opt == 'state':
        wfo_limiter = " and substr(ugc, 1, 2) = '%s'" % (state, )

    # NB we added a hack here that may lead to some false positives when events
    # cross over months, sigh, recall the 2017 eventid pain
    df = read_sql("""
        with data as (
            SELECT distinct
            extract(year from issue)::int as yr,
            extract(month from issue)::int as mo, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + """
            GROUP by yr, mo, wfo, eventid)

        SELECT yr, mo, count(*) from data GROUP by yr, mo ORDER by yr, mo ASC
      """, pgconn, params=(phenomena, significance), index_col=None)

    if df.empty:
        raise ValueError("Sorry, no data found!")
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 8))

    df2 = df.pivot('yr', 'mo', 'count')
    df2 = df2.reindex(
        index=range(df2.index.min(), df2.index.max() + 1),
        columns=range(1, 13))

    title = "NWS %s" % (nt.sts[station]['name'], )
    if opt == 'state':
        title = ("NWS Issued for Counties/Zones for State of %s"
                 ) % (reference.state_names[state],)
    title += ("\n%s (%s.%s) Issued by Year,Month"
              ) % (vtec.get_ps_string(phenomena, significance),
                   phenomena, significance)
    ax.set_title(title)
    sns.heatmap(df2, annot=True, fmt=".0f", linewidths=.5, ax=ax, vmin=1)
    ax.set_xticks(np.arange(12) + 0.5)
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_ylabel("Year")
    ax.set_xlabel("Month")

    return fig, df
Пример #6
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get('station', 'DMX')
    limit = fdict.get('limit', 'no')
    phenomena = fdict.get('phenomena', 'FF')
    significance = fdict.get('significance', 'W')

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    lastdoy = 367
    if limit.lower() == 'yes':
        lastdoy = int(datetime.datetime.today().strftime("%j")) + 1

    wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else station[1:],)
    if station == '_ALL':
        wfo_limiter = ''
    doy_limiter = ''
    if limit == 'yes':
        doy_limiter = (" and extract(doy from issue) < "
                       "extract(doy from 'TODAY'::date) ")

    cursor.execute("""
        with data as (
            SELECT distinct extract(year from issue) as yr, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + doy_limiter + """)

        SELECT yr, count(*) from data GROUP by yr ORDER by yr ASC
      """, (phenomena, significance))

    years = []
    counts = []
    for row in cursor:
        years.append(row[0])
        counts.append(row[1])
    if len(years) == 0:
        return("Sorry, no data found!")

    (fig, ax) = plt.subplots(1, 1)
    ax.bar(np.array(years)-0.4, counts)
    ax.set_xlim(min(years)-0.5, max(years)+0.5)
    ax.grid(True)
    ax.set_ylabel("Yearly Count")
    ax.set_title(("NWS %s\n%s %s (%s.%s) Count"
                  ) % (nt.sts[station]['name'], vtec._phenDict[phenomena],
                       vtec._sigDict[significance], phenomena, significance))
    if limit == 'yes':
        ax.set_xlabel(("thru approximately %s"
                       ) % (datetime.date.today().strftime("%-d %b"), ))

    return fig
Пример #7
0
def plotter(fdict):
    """ Go """
    pgconn = psycopg2.connect(database="postgis", host="iemdb", user="******")
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get("station", "DMX")
    limit = fdict.get("limit", "no")
    phenomena = fdict.get("phenomena", "FF")
    significance = fdict.get("significance", "W")

    nt = NetworkTable("WFO")
    nt.sts["_ALL"] = {"name": "All Offices"}

    lastdoy = 367
    if limit.lower() == "yes":
        lastdoy = int(datetime.datetime.today().strftime("%j")) + 1

    wfo_limiter = " and wfo = '%s' " % (station,)
    if station == "_ALL":
        wfo_limiter = ""
    doy_limiter = ""
    if limit == "yes":
        doy_limiter = " and extract(doy from issue) < " "extract(doy from 'TODAY'::date) "

    cursor.execute(
        """
        with data as (
            SELECT distinct extract(year from issue) as yr, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """
        + wfo_limiter
        + doy_limiter
        + """)

        SELECT yr, count(*) from data GROUP by yr ORDER by yr ASC
      """,
        (phenomena, significance),
    )

    years = []
    counts = []
    for row in cursor:
        years.append(row[0])
        counts.append(row[1])

    (fig, ax) = plt.subplots(1, 1)
    ax.bar(np.array(years) - 0.4, counts)
    ax.set_xlim(min(years) - 0.5, max(years) + 0.5)
    ax.grid(True)
    ax.set_ylabel("Yearly Count")
    ax.set_title(
        ("NWS %s\n%s %s (%s.%s) Count")
        % (nt.sts[station]["name"], vtec._phenDict[phenomena], vtec._sigDict[significance], phenomena, significance)
    )
    if limit == "yes":
        ax.set_xlabel(("thru approximately %s") % (datetime.date.today().strftime("%-d %b"),))

    return fig
Пример #8
0
def runYear(year):
    """Do Work."""
    # Grab the data
    now = datetime.datetime.now()
    nt = NetworkTable("IACLIMATE")
    # Help plot readability
    nt.sts["IA0200"]["lon"] = -93.4
    nt.sts["IA5992"]["lat"] = 41.65
    pgconn = get_dbconn("coop", user="******")
    ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    sql = """SELECT station, sum(precip) as total, max(day)
           from alldata_ia WHERE year = %s and
           station != 'IA0000' and
           substr(station,3,1) != 'C' and
           precip is not null GROUP by station""" % (
        year,
    )

    lats = []
    lons = []
    vals = []
    labels = []
    ccursor.execute(sql)
    for row in ccursor:
        sid = row["station"]
        if sid not in nt.sts:
            continue
        labels.append(sid[2:])
        lats.append(nt.sts[sid]["lat"])
        lons.append(nt.sts[sid]["lon"])
        vals.append(row["total"])
        maxday = row["max"]

    # pre-1900 dates cause troubles
    lastday = "31 December"
    if now.year == maxday.year:
        lastday = maxday.strftime("%d %B")
    mp = MapPlot(
        title="Total Precipitation [inch] (%s)" % (year,),
        subtitle="1 January - %s" % (lastday,),
        axisbg="white",
    )
    mp.plot_values(
        lons,
        lats,
        vals,
        labels=labels,
        fmt="%.2f",
        labeltextsize=8,
        labelcolor="tan",
    )
    pqstr = "plot m %s bogus %s/summary/total_precip.png png" % (
        now.strftime("%Y%m%d%H%M"),
        year,
    )
    mp.postprocess(pqstr=pqstr)
Пример #9
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    limit = ctx['limit']
    phenomena = ctx['phenomena']
    significance = ctx['significance']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else
                                          station[1:], )
    if station == '_ALL':
        wfo_limiter = ''
    doy_limiter = ''
    title = "Entire Year"
    if limit.lower() == 'yes':
        title = "thru ~%s" % (datetime.date.today().strftime("%-d %b"), )
        doy_limiter = (" and extract(doy from issue) <= "
                       "extract(doy from 'TODAY'::date) ")

    df = read_sql("""
        with data as (
            SELECT distinct extract(year from issue) as yr, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + doy_limiter + """)

        SELECT yr, count(*) from data GROUP by yr ORDER by yr ASC
      """,
                  pgconn,
                  params=(phenomena, significance))

    if df.empty:
        raise ValueError("Sorry, no data found!")

    (fig, ax) = plt.subplots(1, 1)
    ax.bar(df['yr'], df['count'], align='center')
    ax.set_xlim(df['yr'].min() - 0.5, df['yr'].max() + 0.5)
    ax.grid(True)
    ax.set_ylabel("Yearly Count")
    ax.set_title(
        ("NWS %s [%s]\n%s (%s.%s) Count") %
        (nt.sts[station]['name'], title,
         vtec.get_ps_string(phenomena, significance), phenomena, significance))
    if limit == 'yes':
        ax.set_xlabel(("thru approximately %s") %
                      (datetime.date.today().strftime("%-d %b"), ))

    return fig, df
Пример #10
0
def runYear(year):
    """Do as I say"""
    # Grab the data
    now = datetime.datetime.now()
    nt = NetworkTable("IACLIMATE")
    nt.sts["IA0200"]["lon"] = -93.4
    nt.sts["IA5992"]["lat"] = 41.65
    pgconn = get_dbconn("coop", user="******")
    ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    lats = []
    lons = []
    vals = []
    labels = []
    ccursor.execute(
        """
        SELECT station,
        sum(case when precip > 0.009 then 1 else 0 end) as days, max(day)
        from alldata_ia WHERE year = %s and substr(station,3,1) != 'C'
        and station != 'IA0000' GROUP by station
    """,
        (year, ),
    )
    for row in ccursor:
        sid = row["station"].upper()
        if sid not in nt.sts:
            continue
        labels.append(sid[2:])
        lats.append(nt.sts[sid]["lat"])
        lons.append(nt.sts[sid]["lon"])
        vals.append(row["days"])
        maxday = row["max"]

    mp = MapPlot(
        title="Days with Measurable Precipitation (%s)" % (year, ),
        subtitle="Map valid January 1 - %s" % (maxday.strftime("%b %d")),
        axisbg="white",
    )
    mp.plot_values(
        lons,
        lats,
        vals,
        fmt="%.0f",
        labels=labels,
        labeltextsize=8,
        labelcolor="tan",
    )
    mp.drawcounties()
    pqstr = "plot m %s bogus %s/summary/precip_days.png png" % (
        now.strftime("%Y%m%d%H%M"),
        year,
    )
    mp.postprocess(pqstr=pqstr)
Пример #11
0
def test_workflow(pcursor, icursor):
    """ Test that we can do stuff! """
    sid1 = 'XXX'
    sid2 = 'YYY'
    pnetwork = 'xxxxxx'
    nt = NetworkTable(None)
    nt.sts[sid1] = dict(name='XXX Site Name', network='IA_XXXX',
                        tzname='America/Chicago')
    nt.sts[sid2] = dict(name='YYY Site Name', network='IA_XXXX',
                        tzname='America/Chicago')
    valid = datetime.datetime.utcnow()
    valid = valid.replace(tzinfo=pytz.timezone("UTC"))
    threshold = valid - datetime.timedelta(hours=3)
    obs = {sid1: {'valid': valid},
           sid2: {'valid': valid - datetime.timedelta(hours=6)}}
    # Create dummy iem_site_contacts
    pcursor.execute("""
        INSERT into iem_site_contacts
        (portfolio, s_mid, email) VALUES (%s, %s, %s)
    """, (pnetwork, sid1, 'akrherz@localhost'))
    pcursor.execute("""
        INSERT into iem_site_contacts
        (portfolio, s_mid, email) VALUES (%s, %s, %s)
    """, (pnetwork, sid2, 'root@localhost'))
    # Create some dummy tickets
    pcursor.execute("""
        INSERT into tt_base (portfolio, s_mid, subject,
        status, author) VALUES (%s, %s, %s, %s, %s) RETURNING id
    """, (pnetwork, sid1, 'FIXME PLEASE OPEN', 'OPEN', 'mesonet'))
    pcursor.execute("""
        INSERT into tt_base (portfolio, s_mid, subject,
        status, author) VALUES (%s, %s, %s, %s, %s) RETURNING id
    """, (pnetwork, sid1, 'FIXME PLEASE CLOSED', 'CLOSED', 'mesonet'))
    tracker = TrackerEngine(icursor, pcursor)
    tracker.process_network(obs, pnetwork, nt, threshold)
    tracker.send_emails(really_send=False)
    assert len(tracker.emails) == 1

    tracker.emails = {}
    obs[sid1]['valid'] = valid - datetime.timedelta(hours=6)
    obs[sid2]['valid'] = valid
    tracker.process_network(obs, pnetwork, nt, threshold)
    tracker.send_emails(really_send=False)
    assert len(tracker.emails) == 2

    tracker.emails = {}
    obs[sid1]['valid'] = valid - datetime.timedelta(hours=6)
    obs[sid2]['valid'] = valid
    tracker.process_network(obs, pnetwork, nt, threshold)
    tracker.send_emails(really_send=False)
    assert not tracker.emails
Пример #12
0
def plotter(fdict):
    """ Go """
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get('station', 'DMX')[:4]

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    (fig, ax) = plt.subplots(1, 1, sharex=True)

    if station == '_ALL':
        cursor.execute("""
            with obs as (
                SELECT distinct extract(year from issue) as yr,
                phenomena, significance from warnings WHERE
                phenomena is not null and significance is not null and
                issue > '2005-01-01' and issue is not null
            )
            SELECT yr, count(*) from obs GROUP by yr ORDER by yr ASC
            """)
    else:
        cursor.execute("""
            with obs as (
                SELECT distinct extract(year from issue) as yr,
                phenomena, significance from warnings WHERE
                wfo = %s and phenomena is not null and significance is not null
                and issue > '2005-01-01' and issue is not null
            )
            SELECT yr, count(*) from obs GROUP by yr ORDER by yr ASC
            """, (station, ))

    years = []
    count = []
    for row in cursor:
        years.append(int(row[0]))
        count.append(int(row[1]))

    ax.bar(np.array(years)-0.4, count, width=0.8, fc='b', ec='b')
    for yr, val in zip(years, count):
        ax.text(yr, val+1, "%s" % (val,), ha='center')
    ax.set_title(("[%s] NWS %s\nCount of Distinct VTEC Phenomena/"
                  "Significance - %i to %i"
                  ) % (station, nt.sts[station]['name'],
                       years[0], years[-1]))
    ax.grid()
    ax.set_ylabel("Count")

    return fig
Пример #13
0
Файл: p68.py Проект: nbackas/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')

    station = fdict.get('station', 'DMX')[:4]

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    (fig, ax) = plt.subplots(1, 1, sharex=True)

    if station == '_ALL':
        df = read_sql("""
            with obs as (
                SELECT distinct extract(year from issue) as yr,
                phenomena, significance from warnings WHERE
                phenomena is not null and significance is not null and
                issue > '2005-01-01' and issue is not null
            )
            SELECT yr as year, count(*) from obs GROUP by yr ORDER by yr ASC
            """, pgconn, index_col=None)
    else:
        df = read_sql("""
            with obs as (
                SELECT distinct extract(year from issue) as yr,
                phenomena, significance from warnings WHERE
                wfo = %s and phenomena is not null and significance is not null
                and issue > '2005-01-01' and issue is not null
            )
            SELECT yr as year, count(*) from obs GROUP by yr ORDER by yr ASC
            """, pgconn, params=(station, ), index_col=None)

    df['wfo'] = station
    df['year'] = df['year'].astype('i')

    ax.bar(df['year']-0.4, df['count'], width=0.8, fc='b', ec='b')
    for yr, val in zip(df['year'], df['count']):
        ax.text(yr, val+1, "%s" % (val,), ha='center')
    ax.set_title(("[%s] NWS %s\nCount of Distinct VTEC Phenomena/"
                  "Significance - %i to %i"
                  ) % (station, nt.sts[station]['name'],
                       df['year'].min(), df['year'].max()))
    ax.grid()
    ax.set_ylabel("Count")

    return fig, df
Пример #14
0
Файл: p73.py Проект: nbackas/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')

    station = fdict.get('station', 'DMX')
    limit = fdict.get('limit', 'no')
    phenomena = fdict.get('phenomena', 'FF')
    significance = fdict.get('significance', 'W')

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' "
                   ) % (station if len(station) == 3 else station[1:],)
    if station == '_ALL':
        wfo_limiter = ''
    doy_limiter = ''
    if limit.lower() == 'yes':
        doy_limiter = (" and extract(doy from issue) < "
                       "extract(doy from 'TODAY'::date) ")

    df = read_sql("""
        with data as (
            SELECT distinct extract(year from issue) as yr, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + doy_limiter + """)

        SELECT yr, count(*) from data GROUP by yr ORDER by yr ASC
      """, pgconn, params=(phenomena, significance))

    if len(df.index) == 0:
        return("Sorry, no data found!")

    (fig, ax) = plt.subplots(1, 1)
    ax.bar(df['yr']-0.4, df['count'])
    ax.set_xlim(df['yr'].min()-0.5, df['yr'].max()+0.5)
    ax.grid(True)
    ax.set_ylabel("Yearly Count")
    ax.set_title(("NWS %s\n%s %s (%s.%s) Count"
                  ) % (nt.sts[station]['name'], vtec._phenDict[phenomena],
                       vtec._sigDict[significance], phenomena, significance))
    if limit == 'yes':
        ax.set_xlabel(("thru approximately %s"
                       ) % (datetime.date.today().strftime("%-d %b"), ))

    return fig, df
Пример #15
0
def workflow(netname, pname):
    """Do something please"""
    pgconn_iem = get_dbconn("iem")
    pgconn_mesosite = get_dbconn("mesosite")
    pgconn_portfolio = get_dbconn("portfolio")

    # Now lets check files
    mydir = "/home/ldm/data/camera/stills"

    threshold = datetime.datetime.utcnow() - datetime.timedelta(hours=2)
    threshold = threshold.replace(tzinfo=pytz.UTC)
    mcursor = pgconn_mesosite.cursor()
    mcursor.execute(
        """
        SELECT id, network, name from webcams where
        network = %s
        and online ORDER by id ASC
    """,
        (netname, ),
    )
    nt = NetworkTable(None)
    obs = {}
    missing = 0
    for row in mcursor:
        nt.sts[row[0]] = dict(id=row[0],
                              network=row[1],
                              name=row[2],
                              tzname="America/Chicago")
        fn = "%s/%s.jpg" % (mydir, row[0])
        if not os.path.isfile(fn):
            missing += 1
            if missing > 1:
                print("Missing webcam file: %s" % (fn, ))
            continue
        ticks = os.stat(fn)[stat.ST_MTIME]
        valid = datetime.datetime(1970, 1,
                                  1) + datetime.timedelta(seconds=ticks)
        valid = valid.replace(tzinfo=pytz.UTC)
        obs[row[0]] = dict(valid=valid)
    # Abort out if no obs are found
    if not obs:
        return

    tracker = TrackerEngine(pgconn_iem.cursor(), pgconn_portfolio.cursor(), 10)
    tracker.process_network(obs, pname, nt, threshold)
    tracker.send_emails()
    pgconn_iem.commit()
    pgconn_portfolio.commit()
Пример #16
0
def main():
    """Go Main Go"""
    today = datetime.date.today()
    now = today.replace(year=2000)
    nt = NetworkTable("IACLIMATE")
    nt.sts["IA0200"]["lon"] = -93.6
    nt.sts["IA5992"]["lat"] = 41.65
    coop = get_dbconn("coop", user="******")

    obs = []
    cursor = coop.cursor()
    cursor.execute(
        "SELECT station, max_high, min_low from climate WHERE valid = %s "
        "and substr(station,0,3) = 'IA'",
        (now,),
    )
    for row in cursor:
        sid = row[0]
        if sid[2] == "C" or sid[2:] == "0000" or sid not in nt.sts:
            continue
        obs.append(
            dict(
                id=sid[2:],
                lat=nt.sts[sid]["lat"],
                lon=nt.sts[sid]["lon"],
                tmpf=row[1],
                dwpf=row[2],
            )
        )

    mp = MapPlot(
        title=("Record High + Low Temperature [F] (1893-%s)") % (today.year,),
        subtitle="For Date: %s" % (now.strftime("%d %b"),),
        continentalcolor="white",
    )
    mp.drawcounties()
    mp.plot_station(obs)
    pqstr = (
        "plot ac %s0000 climate/iowa_today_rec_hilo_pt.png "
        "coop_rec_temp.png png"
    ) % (today.strftime("%Y%m%d"),)
    mp.postprocess(view=False, pqstr=pqstr)
    mp.close()
Пример #17
0
def main():
    """Go Main Go"""
    now = datetime.datetime.now()
    nt = NetworkTable("IACLIMATE")
    nt.sts["IA0200"]["lon"] = -93.6
    nt.sts["IA5992"]["lat"] = 41.65
    coop = get_dbconn("coop", user="******")

    # Compute normal from the climate database
    sql = """
        SELECT station, high, low from climate WHERE valid = '2000-%s'
        and substr(station,0,3) = 'IA'
    """ % (now.strftime("%m-%d"), )

    obs = []
    cursor = coop.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cursor.execute(sql)
    for row in cursor:
        sid = row["station"]
        if sid[2] == "C" or sid[2:] == "0000" or sid not in nt.sts:
            continue
        obs.append(
            dict(
                id=sid[2:],
                lat=nt.sts[sid]["lat"],
                lon=nt.sts[sid]["lon"],
                tmpf=row["high"],
                dwpf=row["low"],
            ))

    mp = MapPlot(
        title=("Average High + Low Temperature [F] (1893-%s)") % (now.year, ),
        subtitle="For Date: %s" % (now.strftime("%d %b"), ),
        axisbg="white",
    )
    mp.drawcounties()
    mp.plot_station(obs)
    pqstr = ("plot ac %s0000 climate/iowa_today_avg_hilo_pt.png "
             "coop_avg_temp.png png") % (now.strftime("%Y%m%d"), )
    mp.postprocess(view=False, pqstr=pqstr)
    mp.close()
Пример #18
0
def main():
    """Go Main Go"""
    now = datetime.datetime.now()
    nt = NetworkTable('IACLIMATE')
    nt.sts["IA0200"]["lon"] = -93.6
    nt.sts["IA5992"]["lat"] = 41.65
    coop = get_dbconn('coop', user='******')

    # Compute normal from the climate database
    sql = """
        SELECT station, max_high, min_low from climate WHERE valid = '2000-%s'
        and substr(station,0,3) = 'IA'
    """ % (now.strftime("%m-%d"), )

    obs = []
    cursor = coop.cursor()
    cursor.execute(sql)
    for row in cursor:
        sid = row[0]
        if sid[2] == 'C' or sid[2:] == '0000' or sid not in nt.sts:
            continue
        obs.append(
            dict(id=sid[2:],
                 lat=nt.sts[sid]['lat'],
                 lon=nt.sts[sid]['lon'],
                 tmpf=row[1],
                 dwpf=row[2]))

    mp = MapPlot(title=("Record High + Low Temperature [F] (1893-%s)") %
                 (now.year, ),
                 subtitle="For Date: %s" % (now.strftime("%d %b"), ),
                 continentalcolor='white')
    mp.drawcounties()
    mp.plot_station(obs)
    pqstr = ("plot ac %s0000 climate/iowa_today_rec_hilo_pt.png "
             "coop_rec_temp.png png") % (now.strftime("%Y%m%d"), )
    mp.postprocess(view=False, pqstr=pqstr)
    mp.close()
Пример #19
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get('station', 'DMX')
    limit = fdict.get('limit', 'no')
    combo = fdict.get('c', 'svrtor')
    phenomena = fdict.get('phenomena', 'TO')
    significance = fdict.get('significance', 'W')

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    lastdoy = 367
    if limit.lower() == 'yes':
        lastdoy = int(datetime.datetime.today().strftime("%j")) + 1

    if combo == 'svrtor':
        if station == '_ALL':
            cursor.execute("""
                with counts as (
                    select extract(year from issue) as yr,
                    extract(doy from issue) as doy, count(*) from sbw
                    where status = 'NEW' and phenomena in ('SV', 'TO')
                    and significance = 'W' and issue > '2003-01-01'
                    and extract(doy from issue) < %s
                    GROUP by yr, doy)

                SELECT yr, doy, sum(count) OVER (PARTITION by yr
                ORDER by doy ASC)
                from counts ORDER by yr ASC, doy ASC
              """, (lastdoy, ))

        else:
            cursor.execute("""
                with counts as (
                    select extract(year from issue) as yr,
                    extract(doy from issue) as doy, count(*) from sbw
                    where status = 'NEW' and phenomena in ('SV', 'TO')
                    and significance = 'W' and wfo = %s
                    and issue > '2003-01-01'
                    and extract(doy from issue) < %s
                    GROUP by yr, doy)

                SELECT yr, doy, sum(count) OVER (PARTITION by yr
                ORDER by doy ASC)
                from counts ORDER by yr ASC, doy ASC
              """, (station, lastdoy))
    else:
        if station == '_ALL':
            cursor.execute("""
            WITH data as (
                SELECT extract(year from issue) as yr,
                issue, eventid, wfo from warnings WHERE
                phenomena = %s and significance = %s
                and extract(doy from issue) < %s
                and issue > '2003-01-01'),
            agg1 as (
                SELECT yr, min(issue) as min_issue, eventid, wfo from data
                GROUP by yr, eventid, wfo),
            agg2 as (
                SELECT yr, extract(doy from min_issue) as doy, count(*)
                from agg1 GROUP by yr, doy)
            SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
            from agg2 ORDER by yr ASC, doy ASC
            """, (phenomena, significance, lastdoy))
        else:
            cursor.execute("""
            WITH data as (
                SELECT extract(year from issue) as yr,
                issue, eventid, wfo from warnings WHERE
                phenomena = %s and significance = %s and wfo = %s
                and extract(doy from issue) < %s
                and issue > '2003-01-01'),
            agg1 as (
                SELECT yr, min(issue) as min_issue, eventid, wfo from data
                GROUP by yr, eventid, wfo),
            agg2 as (
                SELECT yr, extract(doy from min_issue) as doy, count(*)
                from agg1 GROUP by yr, doy)
            SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
            from agg2 ORDER by yr ASC, doy ASC
            """, (phenomena, significance, station, lastdoy))

    data = {}
    for yr in range(2003, datetime.datetime.now().year + 1):
        data[yr] = {'doy': [], 'counts': []}
    rows = []
    for row in cursor:
        data[row[0]]['doy'].append(row[1])
        data[row[0]]['counts'].append(row[2])
        rows.append(dict(year=row[0], day_of_year=row[1], count=row[2]))
    # append on a lastdoy value so all the plots go to the end
    for yr in range(2003, datetime.datetime.now().year):
        if len(data[yr]['doy']) == 0 or data[yr]['doy'][-1] >= lastdoy:
            continue
        data[yr]['doy'].append(lastdoy)
        data[yr]['counts'].append(data[yr]['counts'][-1])
    data[datetime.datetime.now().year]['doy'].append(
        int(datetime.datetime.today().strftime("%j")) + 1)
    data[datetime.datetime.now().year]['counts'].append(
        data[datetime.datetime.now().year]['counts'][-1])
    df = pd.DataFrame(rows)

    (fig, ax) = plt.subplots(1, 1)
    ann = []
    for yr in range(2003, datetime.datetime.now().year + 1):
        if len(data[yr]['doy']) < 2:
            continue
        l = ax.plot(data[yr]['doy'], data[yr]['counts'], lw=2,
                    label="%s (%s)" % (str(yr), data[yr]['counts'][-1]),
                    drawstyle='steps-post')
        ann.append(
            ax.text(data[yr]['doy'][-1]+1, data[yr]['counts'][-1],
                    "%s" % (yr,), color='w', va='center',
                    fontsize=10, bbox=dict(facecolor=l[0].get_color(),
                                           edgecolor=l[0].get_color()))
            )

    mask = np.zeros(fig.canvas.get_width_height(), bool)
    fig.canvas.draw()

    attempts = 10
    while len(ann) > 0 and attempts > 0:
        attempts -= 1
        removals = []
        for a in ann:
            bbox = a.get_window_extent()
            x0 = int(bbox.x0)
            x1 = int(math.ceil(bbox.x1))
            y0 = int(bbox.y0)
            y1 = int(math.ceil(bbox.y1))

            s = np.s_[x0:x1+1, y0:y1+1]
            if np.any(mask[s]):
                a.set_position([a._x-int(lastdoy/14), a._y])
            else:
                mask[s] = True
                removals.append(a)
        for rm in removals:
            ann.remove(rm)

    ax.legend(loc=2, ncol=2, fontsize=10)
    ax.set_xlim(1, 367)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.grid(True)
    ax.set_ylabel("Accumulated Count")
    title = "%s %s" % (vtec._phenDict[phenomena], vtec._sigDict[significance])
    if combo == 'svrtor':
        title = "Severe Thunderstorm + Tornado Warning"
    ax.set_title(("NWS %s\n %s Count"
                  ) % (nt.sts[station]['name'], title))
    ax.set_xlim(0, lastdoy)
    if lastdoy < 367:
        ax.set_xlabel(("thru approximately %s"
                       ) % (datetime.date.today().strftime("%-d %b"), ))

    return fig, df
Пример #20
0
    'IA2070': 'DVN',
    'IA2203': 'DSM',
    'IA2367': 'DBQ',
    'IA2723': 'EST',
    'IA4106': 'IOW',
    'IA4587': 'LWD',
    'IA5199': 'MIW',
    'IA5235': 'MCW',
    'IA6389': 'OTM',
    'IA7708': 'SUX',
    'IA7844': 'SPW',
    'IA8706': 'ALO',
    }

# Pre-compute the grid location of each climate site
nt = NetworkTable("%sCLIMATE" % (state.upper(),))
for sid in nt.sts.keys():
    i, j = iemre.find_ij(nt.sts[sid]['lon'], nt.sts[sid]['lat'])
    nt.sts[sid]['gridi'] = i
    nt.sts[sid]['gridj'] = j
    for key in ['high', 'low', 'precip', 'snow', 'snowd']:
        nt.sts[sid][key] = None


def estimate_precip(ts):
    """Estimate precipitation based on IEMRE"""
    idx = iemre.daily_offset(ts)
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ),
                         'r')
    grid12 = nc.variables['p01d_12z'][idx, :, :] / 25.4
    grid00 = nc.variables['p01d'][idx, :, :] / 25.4
Пример #21
0
    'SD6597': 'PIR',
    'SD6947': 'RAP',
    # SD6947       | KUNR | RAPID CITY 4NW       | Rapid City

    # Wisconsin
    'WI5479': 'MKE',
    'WI3269': 'GRB',
    'WI7113': 'RHI',
    'WI2428': 'EAU',
    'WI4961': 'MSN',
    'WI4370': 'LSE',
    'WI2428': 'AUW',
    }

# Pre-compute the grid location of each climate site
nt = NetworkTable("%sCLIMATE" % (state.upper(),))
for sid in nt.sts.keys():
    i, j = iemre.find_ij(nt.sts[sid]['lon'], nt.sts[sid]['lat'])
    nt.sts[sid]['gridi'] = i
    nt.sts[sid]['gridj'] = j
    for key in ['high', 'low', 'precip', 'snow', 'snowd']:
        nt.sts[sid][key] = None


def estimate_precip(ts):
    """Estimate precipitation based on IEMRE"""
    idx = iemre.daily_offset(ts)
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ),
                         'r')
    grid12 = nc.variables['p01d_12z'][idx, :, :] / 25.4
    grid00 = nc.variables['p01d'][idx, :, :] / 25.4
Пример #22
0
"""
 Generate a map of Number of days with precip
"""

import sys
from pyiem.plot import MapPlot
import datetime
now = datetime.datetime.now()

from pyiem.network import Table as NetworkTable
nt = NetworkTable("IACLIMATE")
nt.sts["IA0200"]["lon"] = -93.4
nt.sts["IA5992"]["lat"] = 41.65
import psycopg2.extras
COOP = psycopg2.connect(database='coop', host='iemdb', user='******')
ccursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor)

def runYear(year):
    # Grab the data
    sql = """SELECT station,
        sum(case when precip >= 0.01 then 1 else 0 end) as days, max(day)
        from alldata_ia WHERE year = %s and substr(station,3,1) != 'C' 
        and station != 'IA0000' GROUP by station""" % (year,)

    lats = []
    lons = []
    vals = []
    labels = []
    ccursor.execute( sql )
    for row in ccursor:
        sid = row['station'].upper()
Пример #23
0
import psycopg2.extras
import subprocess
import tempfile
import calendar
import pytz

utc = datetime.datetime.utcnow()
utc = utc.replace(tzinfo=pytz.timezone("UTC"))
tstr = utc.strftime("%Y%m%d%H%M")

now = utc.astimezone(pytz.timezone("America/Chicago"))

IEM = psycopg2.connect(database='iem', host='iemdb', user='******')
icursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor)

st = NetworkTable(['KCCI', 'KELO', 'KIMT'])

st.sts["SMAI4"]["plot_name"] = "M-town"
st.sts["SBZI4"]["plot_name"] = "Zoo"
st.sts["SMSI4"]["plot_name"] = "Barnum"
st.sts["STQI4"]["plot_name"] = "Tama"
st.sts["SBOI4"]["plot_name"] = "Boone"


def altiTxt(d):
    if d == "":
        return "S"
    if d < 0:
        return "F"
    if d > 0:
        return "R"
Пример #24
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    import matplotlib.colors as mpcolors
    import matplotlib.patheffects as PathEffects
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    opt = ctx['opt']
    state = ctx['state']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else
                                          station[1:], )
    if station == '_ALL':
        wfo_limiter = ''
    if opt == 'state':
        wfo_limiter = " and substr(ugc, 1, 2) = '%s'" % (state, )

    df = read_sql("""
        with data as (
            SELECT distinct extract(year from issue) as yr2,
            min(issue) as i, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + """ and issue is not null
            GROUP by yr2, wfo, eventid)

        SELECT extract(year from i) as yr, extract(month from i) as mo,
        count(*) from data GROUP by yr, mo ORDER by yr, mo ASC
      """,
                  pgconn,
                  params=(phenomena, significance),
                  index_col=None)

    if len(df.index) == 0:
        raise Exception("Sorry, no data found!")
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 8))

    minyear = df['yr'].min()
    maxyear = df['yr'].max()
    data = np.zeros((int(maxyear - minyear + 1), 12))
    for _, row in df.iterrows():
        data[int(row['yr'] - minyear), int(row['mo'] - 1)] = row['count']
        txt = ax.text(row['mo'],
                      row['yr'],
                      "%.0f" % (row['count'], ),
                      va='center',
                      ha='center',
                      color='white')
        txt.set_path_effects(
            [PathEffects.withStroke(linewidth=2, foreground="k")])
    cmap = plt.get_cmap('jet')
    cmap.set_under('white')
    maxval = max([df['count'].max(), 11])
    bounds = np.linspace(1, maxval, 10, dtype='i')
    norm = mpcolors.BoundaryNorm(bounds, cmap.N)
    res = ax.imshow(data,
                    extent=[0.5, 12.5, maxyear + 0.5, minyear - 0.5],
                    interpolation='nearest',
                    aspect='auto',
                    norm=norm,
                    cmap=cmap)
    fig.colorbar(res, label='count')
    ax.grid(True)
    ax.set_xticks(range(1, 13))
    ax.set_xticklabels(calendar.month_abbr[1:])

    title = "NWS %s" % (nt.sts[station]['name'], )
    if opt == 'state':
        title = ("NWS Issued for Counties/Zones for State of %s") % (
            reference.state_names[state], )
    title += ("\n%s %s (%s.%s) Issued by Year,Month") % (
        vtec._phenDict[phenomena], vtec._sigDict[significance], phenomena,
        significance)
    ax.set_title(title)

    return fig, df
Пример #25
0
Файл: p68.py Проект: akrherz/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')

    station = fdict.get('station', 'DMX')[:4]

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    fig = plt.figure(figsize=(8,
                              14 if station != '_ALL' else 21))
    ax = [None, None]
    ax[0] = plt.axes([0.1, 0.75, 0.85, 0.2])
    ax[1] = plt.axes([0.1, 0.05, 0.85, 0.65])

    if station == '_ALL':
        df = read_sql("""
            SELECT distinct extract(year from issue) as year,
                phenomena, significance from warnings WHERE
                phenomena is not null and significance is not null and
                issue > '2005-01-01' and issue is not null
            """, pgconn, index_col=None)
    else:
        df = read_sql("""
            SELECT distinct extract(year from issue) as year,
            phenomena, significance from warnings WHERE
            wfo = %s and phenomena is not null and significance is not null
            and issue > '2005-01-01' and issue is not null
            """, pgconn, params=(station, ), index_col=None)

    df['wfo'] = station
    df['year'] = df['year'].astype('i')
    gdf = df.groupby('year').count()

    ax[0].bar(gdf.index.values, gdf['wfo'], width=0.8, fc='b', ec='b',
              align='center')
    for yr, row in gdf.iterrows():
        ax[0].text(yr, row['wfo'] + 1, "%s" % (row['wfo'],), ha='center')
    ax[0].set_title(("[%s] NWS %s\nCount of Distinct VTEC Phenomena/"
                     "Significance - %i to %i"
                     ) % (station, nt.sts[station]['name'],
                          df['year'].min(), df['year'].max()))
    ax[0].grid()
    ax[0].set_ylabel("Count")
    ax[0].set_xlim(gdf.index.values.min() - 0.5,
                   gdf.index.values.max() + 0.5)

    pos = {}
    i = 1
    df.sort_values(['phenomena', 'significance'], inplace=True)
    for _, row in df.iterrows():
        key = "%s.%s" % (row['phenomena'], row['significance'])
        if key not in pos:
            pos[key] = i
            i += 1
        ax[1].text(row['year'], pos[key], key, ha='center',
                   va='center', fontsize=10,
                   bbox=dict(color='white'))

    ax[1].set_title("VTEC <Phenomena.Significance> Issued by Year")
    ax[1].set_ylim(0, i)
    ax[1].grid(True)
    ax[1].set_xlim(gdf.index.values.min() - 0.5,
                   gdf.index.values.max() + 0.5)
    return fig, df
Пример #26
0
def runYear(year):
    """Hack"""
    now = datetime.datetime.now()
    nt = NetworkTable("IACLIMATE")
    nt.sts["IA0200"]["lon"] = -93.4
    nt.sts["IA5992"]["lat"] = 41.65
    pgconn = get_dbconn("coop", user="******")
    ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    sql = """SELECT station, avg(high) as avg_high, avg(low) as avg_low,
           avg( (high+low)/2 ) as avg_tmp, max(day)
           from alldata_ia WHERE year = %s and station != 'IA0000' and
           high is not Null and low is not Null and substr(station,3,1) != 'C'
           GROUP by station""" % (
        year,
    )
    ccursor.execute(sql)
    # Plot Average Highs
    lats = []
    lons = []
    vals = []
    labels = []
    for row in ccursor:
        sid = row["station"].upper()
        if sid not in nt.sts:
            continue
        labels.append(sid[2:])
        lats.append(nt.sts[sid]["lat"])
        lons.append(nt.sts[sid]["lon"])
        vals.append(row["avg_high"])
        maxday = row["max"]

    # ---------- Plot the points
    mp = MapPlot(
        title="Average Daily High Temperature [F] (%s)" % (year,),
        subtitle="1 January - %s" % (maxday.strftime("%d %B"),),
        axisbg="white",
    )
    mp.plot_values(
        lons,
        lats,
        vals,
        labels=labels,
        labeltextsize=8,
        labelcolor="tan",
        fmt="%.1f",
    )
    pqstr = "plot m %s bogus %s/summary/avg_high.png png" % (
        now.strftime("%Y%m%d%H%M"),
        year,
    )
    mp.postprocess(pqstr=pqstr)
    mp.close()

    # Plot Average Lows
    lats = []
    lons = []
    vals = []
    labels = []
    ccursor.execute(sql)
    for row in ccursor:
        sid = row["station"].upper()
        if sid not in nt.sts:
            continue
        labels.append(sid[2:])
        lats.append(nt.sts[sid]["lat"])
        lons.append(nt.sts[sid]["lon"])
        vals.append(row["avg_low"])

    # ---------- Plot the points
    mp = MapPlot(
        title="Average Daily Low Temperature [F] (%s)" % (year,),
        subtitle="1 January - %s" % (maxday.strftime("%d %B"),),
        axisbg="white",
    )
    mp.plot_values(
        lons,
        lats,
        vals,
        labels=labels,
        labeltextsize=8,
        labelcolor="tan",
        fmt="%.1f",
    )
    pqstr = "plot m %s bogus %s/summary/avg_low.png png" % (
        now.strftime("%Y%m%d%H%M"),
        year,
    )
    mp.postprocess(pqstr=pqstr)
    mp.close()

    # Plot Average Highs
    lats = []
    lons = []
    vals = []
    labels = []
    ccursor.execute(sql)
    for row in ccursor:
        sid = row["station"].upper()
        if sid not in nt.sts:
            continue
        labels.append(sid[2:])
        lats.append(nt.sts[sid]["lat"])
        lons.append(nt.sts[sid]["lon"])
        vals.append(row["avg_tmp"])

    # ---------- Plot the points
    mp = MapPlot(
        title="Average Daily Temperature [F] (%s)" % (year,),
        subtitle="1 January - %s" % (maxday.strftime("%d %B"),),
        axisbg="white",
    )
    mp.plot_values(
        lons,
        lats,
        vals,
        labels=labels,
        labeltextsize=8,
        labelcolor="tan",
        fmt="%.1f",
    )
    pqstr = "plot m %s bogus %s/summary/avg_temp.png png" % (
        now.strftime("%Y%m%d%H%M"),
        year,
    )
    mp.postprocess(pqstr=pqstr)
    mp.close()
Пример #27
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    limit = ctx['limit']
    combo = ctx['c']
    phenomena = ctx['phenomena'][:2]
    significance = ctx['significance'][:2]
    opt = ctx['opt']
    state = ctx['state'][:2]
    syear = ctx['syear']
    eyear = ctx['eyear']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    lastdoy = 367
    if limit.lower() == 'yes':
        lastdoy = int(datetime.datetime.today().strftime("%j")) + 1
    wfolimiter = " and wfo = '%s' " % (station, )
    if opt == 'state':
        wfolimiter = " and substr(ugc, 1, 2) = '%s' " % (state, )
    if opt == 'wfo' and station == '_ALL':
        wfolimiter = ''
    eventlimiter = ""
    if combo == 'svrtor':
        eventlimiter = " or (phenomena = 'SV' and significance = 'W') "
        phenomena = 'TO'
        significance = 'W'

    cursor.execute(
        """
    WITH data as (
        SELECT extract(year from issue) as yr,
        issue, phenomena, significance, eventid, wfo from warnings WHERE
        ((phenomena = %s and significance = %s) """ + eventlimiter + """)
        and extract(year from issue) >= %s and
        extract(year from issue) <= %s
        and extract(doy from issue) <= %s """ + wfolimiter + """),
    agg1 as (
        SELECT yr, min(issue) as min_issue, eventid, wfo, phenomena,
        significance from data
        GROUP by yr, eventid, wfo, phenomena, significance),
    agg2 as (
        SELECT yr, extract(doy from min_issue) as doy, count(*)
        from agg1 GROUP by yr, doy)
    SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
    from agg2 ORDER by yr ASC, doy ASC
    """, (phenomena, significance, syear, eyear, lastdoy))

    data = {}
    for yr in range(syear, eyear + 1):
        data[yr] = {'doy': [0], 'counts': [0]}
    rows = []
    for row in cursor:
        data[row[0]]['doy'].append(row[1])
        data[row[0]]['counts'].append(row[2])
        rows.append(dict(year=row[0], day_of_year=row[1], count=row[2]))
    # append on a lastdoy value so all the plots go to the end
    for yr in range(syear, eyear):
        if len(data[yr]['doy']) == 1 or data[yr]['doy'][-1] >= lastdoy:
            continue
        data[yr]['doy'].append(lastdoy)
        data[yr]['counts'].append(data[yr]['counts'][-1])
    if data[eyear]['doy']:
        data[eyear]['doy'].append(
            int(datetime.datetime.today().strftime("%j")) + 1)
        data[eyear]['counts'].append(data[eyear]['counts'][-1])
    df = pd.DataFrame(rows)

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    ann = []
    for yr in range(syear, eyear + 1):
        if len(data[yr]['doy']) < 2:
            continue
        lp = ax.plot(data[yr]['doy'],
                     data[yr]['counts'],
                     lw=2,
                     label="%s (%s)" % (str(yr), data[yr]['counts'][-1]),
                     drawstyle='steps-post')
        ann.append(
            ax.text(data[yr]['doy'][-1] + 1,
                    data[yr]['counts'][-1],
                    "%s" % (yr, ),
                    color='w',
                    va='center',
                    fontsize=10,
                    bbox=dict(facecolor=lp[0].get_color(),
                              edgecolor=lp[0].get_color())))

    mask = np.zeros(fig.canvas.get_width_height(), bool)
    fig.canvas.draw()

    attempts = 10
    while ann and attempts > 0:
        attempts -= 1
        removals = []
        for a in ann:
            bbox = a.get_window_extent()
            x0 = int(bbox.x0)
            x1 = int(math.ceil(bbox.x1))
            y0 = int(bbox.y0)
            y1 = int(math.ceil(bbox.y1))

            s = np.s_[x0:x1 + 1, y0:y1 + 1]
            if np.any(mask[s]):
                a.set_position([a._x - int(lastdoy / 14), a._y])
            else:
                mask[s] = True
                removals.append(a)
        for rm in removals:
            ann.remove(rm)

    ax.legend(loc=2, ncol=2, fontsize=10)
    ax.set_xlim(1, 367)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.grid(True)
    ax.set_ylabel("Accumulated Count")
    ax.set_ylim(bottom=0)
    title = vtec.get_ps_string(phenomena, significance)
    if combo == 'svrtor':
        title = "Severe Thunderstorm + Tornado Warning"
    ptitle = "%s" % (nt.sts[station]['name'], )
    if opt == 'state':
        ptitle = ("NWS Issued for Counties/Parishes in %s") % (
            reference.state_names[state], )
    ax.set_title(("%s\n %s Count") % (ptitle, title))
    ax.set_xlim(0, lastdoy)
    if lastdoy < 367:
        ax.set_xlabel(("thru approximately %s") %
                      (datetime.date.today().strftime("%-d %B"), ))

    return fig, df
Пример #28
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    ctx = get_autoplot_context(fdict, get_description())
    opt = ctx['opt']
    station = ctx['station']
    state = ctx['state']
    date1 = ctx.get('date1', datetime.date(2010, 4, 1))
    date2 = ctx.get(
        'date2', datetime.date.today() + datetime.timedelta(days=1))
    pgconn = get_dbconn('postgis')
    wfo_limiter = ("and wfo = '%s' "
                   ) % (station if len(station) == 3 else station[1:],)
    if station == '_ALL':
        wfo_limiter = ''
    sql = """
    select windtag, hailtag,
    min(issue) as min_issue, max(issue) as max_issue, count(*)
    from sbw WHERE issue >= '%s' and issue <= '%s'
    %s
    and (windtag > 0 or hailtag > 0)
    and status = 'NEW' and phenomena = 'SV'
    GROUP by windtag, hailtag
    """ % (date1, date2, wfo_limiter)
    supextra = ""
    if opt == 'wfo' and station != '_ALL':
        supextra = "For warnings issued by %s %s.\n" % (
            station, nt.sts[station]['name'])
    if opt == 'state':
        supextra = (
            "For warnings that covered some portion of %s.\n"
        ) % (state_names[state], )

        sql = """
        SELECT windtag, hailtag,
        min(issue) as min_issue, max(issue) as max_issue, count(*)
        from sbw w, states s
        WHERE issue >= '%s' and issue <= '%s' and
        s.state_abbr = '%s' and ST_Intersects(s.the_geom, w.geom) and
        (windtag > 0 or hailtag > 0)
        and status = 'NEW' and phenomena = 'SV'
        GROUP by windtag, hailtag
        """ % (date1, date2, state)

    df = read_sql(sql, pgconn, index_col=None)
    minvalid = df['min_issue'].min()
    maxvalid = df['max_issue'].max()
    df.fillna(0, inplace=True)
    total = df['count'].sum()
    uniquehail = df['hailtag'].unique().tolist()
    uniquehail.sort()
    uniquehail = uniquehail[::-1]
    uniquewind = df['windtag'].astype(int).unique().tolist()
    uniquewind.sort()

    gdf = df.set_index(['hailtag', 'windtag'])

    (fig, ax) = plt.subplots(figsize=(8, 6))
    for (hailtag, windtag), row in gdf.iterrows():
        y = uniquehail.index(hailtag)
        x = uniquewind.index(windtag)
        val = row['count'] / total * 100.
        ax.text(x, y, "%.2f" % (val, ), ha='center', fontsize=FONTSIZE,
                color='r' if val >= 10 else 'k',
                va='center', bbox=dict(color='white', boxstyle='square,pad=0'))

    for hailtag, row in df.groupby('hailtag').sum().iterrows():
        y = uniquehail.index(hailtag)
        x = len(uniquewind)
        val = row['count'] / total * 100.
        ax.text(x, y, "%.2f" % (val, ), ha='center', fontsize=FONTSIZE,
                color='r' if val >= 10 else 'k',
                va='center', bbox=dict(color='white', boxstyle='square,pad=0'))

    for windtag, row in df.groupby('windtag').sum().iterrows():
        y = -1
        x = uniquewind.index(windtag)
        val = row['count'] / total * 100.
        ax.text(x, y, "%.2f" % (val, ), ha='center', fontsize=FONTSIZE,
                color='r' if val >= 10 else 'k',
                va='center', bbox=dict(color='white', boxstyle='square,pad=0'))

    ax.set_xticks(range(len(uniquewind) + 1))
    ax.set_yticks(range(-1, len(uniquehail) + 1))
    ax.set_xlim(-0.5, len(uniquewind) + 0.5)
    ax.set_ylim(-1.5, len(uniquehail) - 0.5)
    ax.set_xticklabels(uniquewind + ['Total'], fontsize=14)
    ax.set_yticklabels(['Total'] + uniquehail, fontsize=14)
    ax.xaxis.tick_top()
    ax.set_xlabel("Wind Speed [mph]", fontsize=14)
    ax.set_ylabel("Hail Size [inch]", fontsize=14)
    ax.xaxis.set_label_position('top')
    plt.tick_params(top=False, bottom=False, left=False, right=False)
    fig.suptitle(
        ("Frequency [%%] of NWS Wind/Hail Tags for "
         "Severe Thunderstorm Warning Issuance\n"
         "%s through %s, %.0f warnings\n%s"
         "Values larger than 10%% in red"
         ) % (minvalid.strftime("%d %b %Y"),
              maxvalid.strftime("%d %b %Y"), df['count'].sum(), supextra))
    ax.set_position([0.15, 0.05, 0.8, 0.72])

    return fig, df
Пример #29
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    opt = ctx['opt']
    state = ctx['state']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else
                                          station[1:], )
    if station == '_ALL':
        wfo_limiter = ''
    if opt == 'state':
        wfo_limiter = " and substr(ugc, 1, 2) = '%s'" % (state, )

    # NB we added a hack here that may lead to some false positives when events
    # cross over months, sigh, recall the 2017 eventid pain
    df = read_sql("""
        with data as (
            SELECT distinct
            extract(year from issue) as yr,
            extract(month from issue) as mo, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + """
            GROUP by yr, mo, wfo, eventid)

        SELECT yr, mo, count(*) from data GROUP by yr, mo ORDER by yr, mo ASC
      """,
                  pgconn,
                  params=(phenomena, significance),
                  index_col=None)

    if df.empty:
        raise ValueError("Sorry, no data found!")
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 8))

    minyear = df['yr'].min()
    maxyear = df['yr'].max()
    data = np.zeros((int(maxyear - minyear + 1), 12), 'i')
    for _, row in df.iterrows():
        data[int(row['yr'] - minyear), int(row['mo'] - 1)] = row['count']
        txt = ax.text(row['mo'],
                      row['yr'],
                      "%.0f" % (row['count'], ),
                      va='center',
                      ha='center',
                      color='white')
        txt.set_path_effects(
            [PathEffects.withStroke(linewidth=2, foreground="k")])
    cmap = plt.get_cmap('jet')
    cmap.set_under('white')
    maxval = max([df['count'].max(), 11])
    bounds = np.linspace(1, maxval, 10, dtype='i')
    norm = mpcolors.BoundaryNorm(bounds, cmap.N)
    res = ax.imshow(data,
                    extent=[0.5, 12.5, maxyear + 0.5, minyear - 0.5],
                    interpolation='nearest',
                    aspect='auto',
                    norm=norm,
                    cmap=cmap)
    fig.colorbar(res, label='count')
    ax.grid(True)
    ax.set_xticks(range(1, 13))
    ax.set_xticklabels(calendar.month_abbr[1:])

    title = "NWS %s" % (nt.sts[station]['name'], )
    if opt == 'state':
        title = ("NWS Issued for Counties/Zones for State of %s") % (
            reference.state_names[state], )
    title += ("\n%s (%s.%s) Issued by Year,Month") % (vtec.get_ps_string(
        phenomena, significance), phenomena, significance)
    ax.set_title(title)

    return fig, df
Пример #30
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    ets = ctx['edate']
    wfo = ctx['wfo']
    p1 = ctx['phenomenav1']
    p2 = ctx['phenomenav2']
    p3 = ctx['phenomenav3']
    p4 = ctx['phenomenav4']
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p is not None:
            phenomena.append(p[:2])
    s1 = ctx['significancev1']
    s2 = ctx['significancev2']
    s3 = ctx['significancev3']
    s4 = ctx['significancev4']
    significance = []
    for s in [s1, s2, s3, s4]:
        if s is not None:
            significance.append(s[0])

    pstr = []
    title = ""
    for i, (p, s) in enumerate(zip(phenomena, significance)):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        if i == 2:
            title += "\n"
        title += "%s %s.%s, " % (vtec.get_ps_string(p, s), p, s)
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr, )

    if ctx['w'] == 'wfo':
        nt = NetworkTable("WFO")
        nt.sts['_ALL'] = {'name': 'All Offices', 'tzname': 'America/Chicago'}
        wfo_limiter = (" and wfo = '%s' ") % (wfo
                                              if len(wfo) == 3 else wfo[1:], )
        if wfo == '_ALL':
            wfo_limiter = ''
        tzname = nt.sts[wfo]['tzname']
    else:
        wfo_limiter = " and substr(ugc, 1, 2) = '%s' " % (ctx['state'], )
        tzname = 'America/Chicago'

    df = read_sql("""
with events as (
  select wfo, min(issue at time zone %s) as localissue,
  extract(year from issue) as year,
  phenomena, significance, eventid from warnings
  where """ + pstr + """ """ + wfo_limiter + """ and
  issue >= %s and issue < %s GROUP by wfo, year, phenomena, significance,
  eventid
)

SELECT date(localissue), count(*) from events GROUP by date(localissue)
    """,
                  pgconn,
                  params=(tzname, sts - datetime.timedelta(days=2),
                          ets + datetime.timedelta(days=2)),
                  index_col='date')

    data = {}
    now = sts
    while now <= ets:
        data[now] = {'val': 0}
        now += datetime.timedelta(days=1)
    for date, row in df.iterrows():
        data[date] = {'val': row['count']}
    fig = calendar_plot(sts, ets, data, heatmap=(ctx['heatmap'] == 'yes'))
    if ctx['w'] == 'wfo':
        title2 = "NWS %s [%s]" % (nt.sts[wfo]['name'], wfo)
        if wfo == '_ALL':
            title2 = "All NWS Offices"
    else:
        title2 = state_names[ctx['state']]
    fig.text(
        0.5,
        0.95,
        ("Number of VTEC Events for %s by Local Calendar Date"
         "\nValid %s - %s for %s") %
        (title2, sts.strftime("%d %b %Y"), ets.strftime("%d %b %Y"), title),
        ha='center',
        va='center')

    return fig, df
Пример #31
0
def main(argv):
    """Go Main Go"""
    nt = Table("ISUSM")
    qdict = loadqc()

    idbconn = get_dbconn("isuag", user="******")
    pdbconn = get_dbconn("postgis", user="******")

    day_ago = int(argv[1])
    ts = datetime.date.today() - datetime.timedelta(days=day_ago)
    hlons, hlats, hvals = do_nam(ts)
    nam = temperature(hvals, "K").value("F")
    window = np.ones((3, 3))
    nam = convolve2d(nam, window / window.sum(), mode="same", boundary="symm")

    # mp = MapPlot(sector='midwest')
    # mp.pcolormesh(hlons, hlats, nam,
    #              range(20, 90, 5))
    # mp.postprocess(filename='test.png')
    # sys.exit()

    # Query out the data
    df = read_sql(
        """
        WITH ranges as (
            select station, count(*), min(tsoil_c_avg_qc),
            max(tsoil_c_avg_qc) from sm_hourly WHERE
            valid >= %s and valid < %s and tsoil_c_avg_qc > -40
            and tsoil_c_avg_qc < 50 GROUP by station
        )
        SELECT d.station, d.tsoil_c_avg_qc,
        r.max as hourly_max_c, r.min as hourly_min_c, r.count
         from sm_daily d JOIN ranges r on (d.station = r.station)
        where valid = %s and tsoil_c_avg_qc > -40 and r.count > 19
    """,
        idbconn,
        params=(ts, ts + datetime.timedelta(days=1), ts),
        index_col="station",
    )
    for col, newcol in zip(
        ["tsoil_c_avg_qc", "hourly_min_c", "hourly_max_c"],
        ["ob", "min", "max"],
    ):
        df[newcol] = temperature(df[col].values, "C").value("F")
        df.drop(col, axis=1, inplace=True)

    for stid, row in df.iterrows():
        df.at[stid, "ticket"] = qdict.get(stid, {}).get("soil4", False)
        x, y = get_idx(hlons, hlats, nt.sts[stid]["lon"], nt.sts[stid]["lat"])
        df.at[stid, "nam"] = nam[x, y]
        df.at[stid, "lat"] = nt.sts[stid]["lat"]
        df.at[stid, "lon"] = nt.sts[stid]["lon"]
    # ticket is an object type from above
    df = df[~df["ticket"].astype("bool")]
    df["diff"] = df["ob"] - df["nam"]
    bias = df["diff"].mean()
    nam = nam + bias
    print("fancy_4inch NAM bias correction of: %.2fF applied" % (bias, ))
    # apply nam bias to sampled data
    df["nam"] += bias
    df["diff"] = df["ob"] - df["nam"]
    # we are going to require data be within 1 SD of sampled or 5 deg
    std = 5.0 if df["nam"].std() < 5.0 else df["nam"].std()
    for station in df[df["diff"].abs() > std].index.values:
        print(("fancy_4inch %s QC'd %s out std: %.2f, ob:%.1f nam:%.1f") % (
            ts.strftime("%Y%m%d"),
            station,
            std,
            df.at[station, "ob"],
            df.at[station, "nam"],
        ))
        df.drop(station, inplace=True)

    # Query out centroids of counties...
    cdf = read_sql(
        """SELECT ST_x(ST_centroid(the_geom)) as lon,
        ST_y(ST_centroid(the_geom)) as lat
        from uscounties WHERE state_name = 'Iowa'
    """,
        pdbconn,
        index_col=None,
    )
    for i, row in cdf.iterrows():
        x, y = get_idx(hlons, hlats, row["lon"], row["lat"])
        cdf.at[i, "nam"] = nam[x, y]

    mp = MapPlot(
        sector="iowa",
        title=("Average 4 inch Depth Soil Temperatures for %s") %
        (ts.strftime("%b %d, %Y"), ),
        subtitle=("County est. based on bias adj. "
                  "NWS NAM Model (black numbers), "
                  "ISUSM network observations (red numbers)"),
    )
    mp.pcolormesh(
        hlons,
        hlats,
        nam,
        np.arange(10, 101, 5),
        cmap=cm.get_cmap("jet"),
        units=r"$^\circ$F",
    )
    mp.plot_values(df["lon"],
                   df["lat"],
                   df["ob"],
                   fmt="%.0f",
                   color="r",
                   labelbuffer=5)
    mp.plot_values(
        cdf["lon"],
        cdf["lat"],
        cdf["nam"],
        fmt="%.0f",
        textsize=11,
        labelbuffer=5,
    )
    mp.drawcounties()
    routes = "a" if day_ago >= 4 else "ac"
    pqstr = ("plot %s %s0000 soilt_day%s.png isuag_county_4inch_soil.png png"
             ) % (routes, ts.strftime("%Y%m%d"), day_ago)
    mp.postprocess(pqstr=pqstr)
    mp.close()
Пример #32
0
 PAJN   | OSO
 PHFO   | OSO
 PACR   | RVF
 PAMC   | SCD
 PAFA   | SCD
 PANC   | SHP
 PTSA   | SSM
 PAER   | STQ
 PALU   | STQ
 PAAQ   | TST
 PHEB   | TST
 PAFG   | ZFP"""

from pyiem.network import Table

nt = Table("WFO")

data = {}
labels = {}
uniq = []
for line in text.split("\n"):
    tokens = line.replace(" ", "").split("|")
    wfo = tokens[0][1:]
    if tokens[0][0] == 'P':
        wfo = tokens[0]
    key = "%s" % (tokens[1], )
    if not nt.sts.has_key(wfo):
        continue
    # P
    wfo = tokens[0][1:]
    if not key in uniq:
Пример #33
0
def plotter(fdict):
    """ Go """
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get('station', 'DMX')
    limit = fdict.get('limit', 'no')

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    lastdoy = 367
    if limit.lower() == 'yes':
        lastdoy = int(datetime.datetime.today().strftime("%j")) + 1

    if station == '_ALL':
        cursor.execute("""
            with counts as (
                select extract(year from issue) as yr,
                extract(doy from issue) as doy, count(*) from sbw
                where status = 'NEW' and phenomena in ('SV', 'TO')
                and significance = 'W' and issue > '2003-01-01'
                and extract(doy from issue) < %s
                GROUP by yr, doy)

            SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
            from counts ORDER by yr ASC, doy ASC
          """, (lastdoy, ))

    else:
        cursor.execute("""
            with counts as (
                select extract(year from issue) as yr,
                extract(doy from issue) as doy, count(*) from sbw
                where status = 'NEW' and phenomena in ('SV', 'TO')
                and significance = 'W' and wfo = %s and issue > '2003-01-01'
                and extract(doy from issue) < %s
                GROUP by yr, doy)

            SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
            from counts ORDER by yr ASC, doy ASC
          """, (station, lastdoy))

    data = {}
    for yr in range(2003, datetime.datetime.now().year + 1):
        data[yr] = {'doy': [], 'counts': []}
    for row in cursor:
        data[row[0]]['doy'].append(row[1])
        data[row[0]]['counts'].append(row[2])

    (fig, ax) = plt.subplots(1, 1)
    ann = []
    for yr in range(2003, datetime.datetime.now().year + 1):
        if len(data[yr]['doy']) < 2:
            continue
        l = ax.plot(data[yr]['doy'], data[yr]['counts'], lw=2,
                    label="%s (%s)" % (str(yr), data[yr]['counts'][-1]))
        ann.append(
            ax.text(data[yr]['doy'][-1]+1, data[yr]['counts'][-1],
                    "%s" % (yr,), color='w', va='center',
                    fontsize=10, bbox=dict(facecolor=l[0].get_color(),
                                           edgecolor=l[0].get_color()))
            )

    mask = np.zeros(fig.canvas.get_width_height(), bool)
    fig.canvas.draw()

    attempts = 10
    while len(ann) > 0 and attempts > 0:
        attempts -= 1
        removals = []
        for a in ann:
            bbox = a.get_window_extent()
            x0 = int(bbox.x0)
            x1 = int(math.ceil(bbox.x1))
            y0 = int(bbox.y0)
            y1 = int(math.ceil(bbox.y1))

            s = np.s_[x0:x1+1, y0:y1+1]
            if np.any(mask[s]):
                a.set_position([a._x-int(lastdoy/14), a._y])
            else:
                mask[s] = True
                removals.append(a)
        for rm in removals:
            ann.remove(rm)

    ax.legend(loc=2, ncol=2, fontsize=10)
    ax.set_xlim(1, 367)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.grid(True)
    ax.set_ylabel("Accumulated Count")
    ax.set_title(("NWS %s\nSevere Thunderstorm + Tornado Warning Count"
                  ) % (nt.sts[station]['name'],))
    ax.set_xlim(0, lastdoy)
    if lastdoy < 367:
        ax.set_xlabel(("thru approximately %s"
                       ) % (datetime.date.today().strftime("%-d %b"), ))

    return fig
Пример #34
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    import matplotlib.colors as mpcolors
    import matplotlib.patheffects as PathEffects
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    phenomena = ctx['phenomena']
    significance = ctx['significance']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' "
                   ) % (station if len(station) == 3 else station[1:],)
    if station == '_ALL':
        wfo_limiter = ''

    df = read_sql("""
        with data as (
            SELECT distinct extract(year from issue) as yr2,
            min(issue) as i, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + """ and issue is not null
            GROUP by yr2, wfo, eventid)

        SELECT extract(year from i) as yr, extract(month from i) as mo,
        count(*) from data GROUP by yr, mo ORDER by yr, mo ASC
      """, pgconn, params=(phenomena, significance), index_col=None)
    if len(df.index) == 0:
        return("Sorry, no data found!")
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 8))

    minyear = df['yr'].min()
    maxyear = df['yr'].max()
    data = np.zeros((maxyear - minyear + 1, 12))
    for _, row in df.iterrows():
        data[row['yr'] - minyear, row['mo'] - 1] = row['count']
        txt = ax.text(row['mo'], row['yr'], "%.0f" % (row['count'],),
                      va='center', ha='center', color='white')
        txt.set_path_effects([PathEffects.withStroke(linewidth=2,
                                                     foreground="k")])
    cmap = plt.get_cmap('jet')
    cmap.set_under('white')
    maxval = max([df['count'].max(), 11])
    bounds = np.linspace(1, maxval, 10, dtype='i')
    norm = mpcolors.BoundaryNorm(bounds, cmap.N)
    res = ax.imshow(data, extent=[0.5, 12.5, maxyear + 0.5, minyear - 0.5],
                    interpolation='nearest', aspect='auto', norm=norm)
    fig.colorbar(res, label='count')
    ax.grid(True)
    ax.set_xticks(range(1, 13))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_title(("NWS %s\n%s %s (%s.%s) Issued by Year,Month"
                  ) % (nt.sts[station]['name'], vtec._phenDict[phenomena],
                       vtec._sigDict[significance], phenomena, significance))

    return fig, df
Пример #35
0
import psycopg2.extras
import subprocess
import tempfile
import calendar
import pytz

utc = datetime.datetime.utcnow()
utc = utc.replace(tzinfo=pytz.timezone("UTC"))
tstr = utc.strftime("%Y%m%d%H%M")

now = utc.astimezone(pytz.timezone("America/Chicago"))

IEM = psycopg2.connect(database='iem', host='iemdb', user='******')
icursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor)

st = NetworkTable(['KCCI', 'KIMT'])

st.sts["SMAI4"]["plot_name"] = "M-town"
st.sts["SBZI4"]["plot_name"] = "Zoo"
st.sts["SMSI4"]["plot_name"] = "Barnum"
st.sts["STQI4"]["plot_name"] = "Tama"
st.sts["SBOI4"]["plot_name"] = "Boone"


def altiTxt(d):
    if d == "":
        return "S"
    if d < 0:
        return "F"
    if d > 0:
        return "R"
Пример #36
0
def test_workflow(pcursor, icursor):
    """ Test that we can do stuff! """
    sid1 = 'XXX'
    sid2 = 'YYY'
    pnetwork = 'xxxxxx'
    nt = NetworkTable(None)
    nt.sts[sid1] = dict(name='XXX Site Name',
                        network='IA_XXXX',
                        tzname='America/Chicago')
    nt.sts[sid2] = dict(name='YYY Site Name',
                        network='IA_XXXX',
                        tzname='America/Chicago')
    valid = datetime.datetime.utcnow()
    valid = valid.replace(tzinfo=pytz.timezone("UTC"))
    threshold = valid - datetime.timedelta(hours=3)
    obs = {
        sid1: {
            'valid': valid
        },
        sid2: {
            'valid': valid - datetime.timedelta(hours=6)
        }
    }
    # Create dummy iem_site_contacts
    pcursor.execute(
        """
        INSERT into iem_site_contacts
        (portfolio, s_mid, email) VALUES (%s, %s, %s)
    """, (pnetwork, sid1, 'akrherz@localhost'))
    pcursor.execute(
        """
        INSERT into iem_site_contacts
        (portfolio, s_mid, email) VALUES (%s, %s, %s)
    """, (pnetwork, sid2, 'root@localhost'))
    # Create some dummy tickets
    pcursor.execute(
        """
        INSERT into tt_base (portfolio, s_mid, subject,
        status, author) VALUES (%s, %s, %s, %s, %s) RETURNING id
    """, (pnetwork, sid1, 'FIXME PLEASE OPEN', 'OPEN', 'mesonet'))
    pcursor.execute(
        """
        INSERT into tt_base (portfolio, s_mid, subject,
        status, author) VALUES (%s, %s, %s, %s, %s) RETURNING id
    """, (pnetwork, sid1, 'FIXME PLEASE CLOSED', 'CLOSED', 'mesonet'))
    tracker = TrackerEngine(icursor, pcursor)
    tracker.process_network(obs, pnetwork, nt, threshold)
    tracker.send_emails(really_send=False)
    assert len(tracker.emails) == 1

    tracker.emails = {}
    obs[sid1]['valid'] = valid - datetime.timedelta(hours=6)
    obs[sid2]['valid'] = valid
    tracker.process_network(obs, pnetwork, nt, threshold)
    tracker.send_emails(really_send=False)
    assert len(tracker.emails) == 2

    tracker.emails = {}
    obs[sid1]['valid'] = valid - datetime.timedelta(hours=6)
    obs[sid2]['valid'] = valid
    tracker.process_network(obs, pnetwork, nt, threshold)
    tracker.send_emails(really_send=False)
    assert not tracker.emails
Пример #37
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get('station', 'DMX')
    limit = fdict.get('limit', 'no')
    combo = fdict.get('c', 'svrtor')
    phenomena = fdict.get('phenomena', 'TO')
    significance = fdict.get('significance', 'W')

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    lastdoy = 367
    if limit.lower() == 'yes':
        lastdoy = int(datetime.datetime.today().strftime("%j")) + 1

    if combo == 'svrtor':
        if station == '_ALL':
            cursor.execute(
                """
                with counts as (
                    select extract(year from issue) as yr,
                    extract(doy from issue) as doy, count(*) from sbw
                    where status = 'NEW' and phenomena in ('SV', 'TO')
                    and significance = 'W' and issue > '2003-01-01'
                    and extract(doy from issue) < %s
                    GROUP by yr, doy)

                SELECT yr, doy, sum(count) OVER (PARTITION by yr
                ORDER by doy ASC)
                from counts ORDER by yr ASC, doy ASC
              """, (lastdoy, ))

        else:
            cursor.execute(
                """
                with counts as (
                    select extract(year from issue) as yr,
                    extract(doy from issue) as doy, count(*) from sbw
                    where status = 'NEW' and phenomena in ('SV', 'TO')
                    and significance = 'W' and wfo = %s
                    and issue > '2003-01-01'
                    and extract(doy from issue) < %s
                    GROUP by yr, doy)

                SELECT yr, doy, sum(count) OVER (PARTITION by yr
                ORDER by doy ASC)
                from counts ORDER by yr ASC, doy ASC
              """, (station, lastdoy))
    else:
        if station == '_ALL':
            cursor.execute(
                """
            WITH data as (
                SELECT extract(year from issue) as yr,
                issue, eventid, wfo from warnings WHERE
                phenomena = %s and significance = %s
                and extract(doy from issue) < %s
                and issue > '2003-01-01'),
            agg1 as (
                SELECT yr, min(issue) as min_issue, eventid, wfo from data
                GROUP by yr, eventid, wfo),
            agg2 as (
                SELECT yr, extract(doy from min_issue) as doy, count(*)
                from agg1 GROUP by yr, doy)
            SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
            from agg2 ORDER by yr ASC, doy ASC
            """, (phenomena, significance, lastdoy))
        else:
            cursor.execute(
                """
            WITH data as (
                SELECT extract(year from issue) as yr,
                issue, eventid, wfo from warnings WHERE
                phenomena = %s and significance = %s and wfo = %s
                and extract(doy from issue) < %s
                and issue > '2003-01-01'),
            agg1 as (
                SELECT yr, min(issue) as min_issue, eventid, wfo from data
                GROUP by yr, eventid, wfo),
            agg2 as (
                SELECT yr, extract(doy from min_issue) as doy, count(*)
                from agg1 GROUP by yr, doy)
            SELECT yr, doy, sum(count) OVER (PARTITION by yr ORDER by doy ASC)
            from agg2 ORDER by yr ASC, doy ASC
            """, (phenomena, significance, station, lastdoy))

    data = {}
    for yr in range(2003, datetime.datetime.now().year + 1):
        data[yr] = {'doy': [], 'counts': []}
    rows = []
    for row in cursor:
        data[row[0]]['doy'].append(row[1])
        data[row[0]]['counts'].append(row[2])
        rows.append(dict(year=row[0], day_of_year=row[1], count=row[2]))
    # append on a lastdoy value so all the plots go to the end
    for yr in range(2003, datetime.datetime.now().year):
        if len(data[yr]['doy']) == 0 or data[yr]['doy'][-1] >= lastdoy:
            continue
        data[yr]['doy'].append(lastdoy)
        data[yr]['counts'].append(data[yr]['counts'][-1])
    if len(data[datetime.datetime.now().year]['doy']) > 0:
        data[datetime.datetime.now().year]['doy'].append(
            int(datetime.datetime.today().strftime("%j")) + 1)
        data[datetime.datetime.now().year]['counts'].append(
            data[datetime.datetime.now().year]['counts'][-1])
    df = pd.DataFrame(rows)

    (fig, ax) = plt.subplots(1, 1)
    ann = []
    for yr in range(2003, datetime.datetime.now().year + 1):
        if len(data[yr]['doy']) < 2:
            continue
        l = ax.plot(data[yr]['doy'],
                    data[yr]['counts'],
                    lw=2,
                    label="%s (%s)" % (str(yr), data[yr]['counts'][-1]),
                    drawstyle='steps-post')
        ann.append(
            ax.text(data[yr]['doy'][-1] + 1,
                    data[yr]['counts'][-1],
                    "%s" % (yr, ),
                    color='w',
                    va='center',
                    fontsize=10,
                    bbox=dict(facecolor=l[0].get_color(),
                              edgecolor=l[0].get_color())))

    mask = np.zeros(fig.canvas.get_width_height(), bool)
    fig.canvas.draw()

    attempts = 10
    while len(ann) > 0 and attempts > 0:
        attempts -= 1
        removals = []
        for a in ann:
            bbox = a.get_window_extent()
            x0 = int(bbox.x0)
            x1 = int(math.ceil(bbox.x1))
            y0 = int(bbox.y0)
            y1 = int(math.ceil(bbox.y1))

            s = np.s_[x0:x1 + 1, y0:y1 + 1]
            if np.any(mask[s]):
                a.set_position([a._x - int(lastdoy / 14), a._y])
            else:
                mask[s] = True
                removals.append(a)
        for rm in removals:
            ann.remove(rm)

    ax.legend(loc=2, ncol=2, fontsize=10)
    ax.set_xlim(1, 367)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.grid(True)
    ax.set_ylabel("Accumulated Count")
    title = "%s %s" % (vtec._phenDict[phenomena], vtec._sigDict[significance])
    if combo == 'svrtor':
        title = "Severe Thunderstorm + Tornado Warning"
    ax.set_title(("NWS %s\n %s Count") % (nt.sts[station]['name'], title))
    ax.set_xlim(0, lastdoy)
    if lastdoy < 367:
        ax.set_xlabel(("thru approximately %s") %
                      (datetime.date.today().strftime("%-d %b"), ))

    return fig, df
Пример #38
0
"""
 Generate a map of Number of days with precip
"""

import sys
from pyiem.plot import MapPlot
import datetime

from pyiem.network import Table as NetworkTable
import psycopg2.extras
now = datetime.datetime.now()
nt = NetworkTable("IACLIMATE")
nt.sts["IA0200"]["lon"] = -93.4
nt.sts["IA5992"]["lat"] = 41.65
COOP = psycopg2.connect(database='coop', host='iemdb', user='******')
ccursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor)


def runYear(year):
    # Grab the data
    sql = """SELECT station,
        sum(case when precip >= 0.01 then 1 else 0 end) as days, max(day)
        from alldata_ia WHERE year = %s and substr(station,3,1) != 'C'
        and station != 'IA0000' GROUP by station""" % (year, )

    lats = []
    lons = []
    vals = []
    labels = []
    ccursor.execute(sql)
    for row in ccursor:
Пример #39
0
def main(argv):
    """Go Main Go"""
    nt = Table("ISUSM")
    qdict = loadqc()

    idbconn = get_dbconn('isuag', user='******')
    icursor = idbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    pdbconn = get_dbconn('postgis', user='******')
    pcursor = pdbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    day_ago = int(argv[1])
    ts = datetime.datetime.now() - datetime.timedelta(days=day_ago)

    # Query out the data
    soil_obs = []
    lats = []
    lons = []
    icursor.execute("""
        SELECT station, tsoil_c_avg_qc from sm_daily
        where valid = '%s' and tsoil_c_avg_qc > -40
        and station not in ('AHTI4', 'FRUI4')
    """ % (ts.strftime("%Y-%m-%d"), ))
    for row in icursor:
        stid = row['station']
        if qdict.get(stid, {}).get('soil4', False):
            # print '%s was QCd out' % (stid,)
            continue
        soil_obs.append(temperature(row['tsoil_c_avg_qc'], 'C').value('F'))
        lats.append(nt.sts[stid]['lat'])
        lons.append(nt.sts[stid]['lon'])

    if len(lats) < 5:
        print(("isuag/fancy_4inch found %s obs for %s") %
              (len(lats), ts.strftime("%Y-%m-%d")))
        return

    # Grid it
    # numxout = 40
    # numyout = 40
    # xmin = min(lons) - 2.
    # ymin = min(lats) - 2.
    # xmax = max(lons) + 2.
    # ymax = max(lats) + 2.
    # xc = (xmax-xmin)/(numxout-1)
    # yc = (ymax-ymin)/(numyout-1)

    # xo = xmin + xc * np.arange(0, numxout)
    # yo = ymin + yc * np.arange(0, numyout)

    # analysis = griddata((lons, lats), soil_obs, (xo, yo) )
    # rbfi = Rbf(lons, lats, soil_obs, function='cubic')
    # analysis = rbfi(xo, yo)
    nn = NearestNDInterpolator((lons, lats), np.array(soil_obs))
    # analysis = nn(xo, yo)

    # Query out centroids of counties...
    pcursor.execute("""SELECT ST_x(ST_centroid(the_geom)) as lon,
        ST_y(ST_centroid(the_geom)) as lat
        from uscounties WHERE state_name = 'Iowa'
    """)
    clons = []
    clats = []
    for row in pcursor:
        clats.append(row['lat'])
        clons.append(row['lon'])

    cobs = nn(clons, clats)

    mp = MapPlot(sector='iowa',
                 title=("Iowa Average 4 inch Soil Temperatures %s") %
                 (ts.strftime("%b %d %Y"), ),
                 subtitle=("Based on gridded analysis (black numbers) of "
                           "ISUSM network observations (red numbers)"))
    mp.contourf(clons,
                clats,
                cobs,
                np.arange(10, 101, 5),
                cmap=cm.get_cmap('jet'),
                units=r'$^\circ$F')
    mp.plot_values(lons, lats, soil_obs, fmt='%.0f', color='r', labelbuffer=5)
    mp.plot_values(clons, clats, cobs, fmt='%.0f', textsize=11, labelbuffer=5)
    # for lo, la, ob in zip(clons, clats, cobs):
    #    xi, yi = m.map(lo, la)
    #    txt = m.ax.text(xi, yi, "%.0f" % (ob,))
    mp.drawcounties()
    routes = "a" if day_ago >= 4 else "ac"
    pqstr = ("plot %s %s0000 soilt_day%s.png isuag_county_4inch_soil.png png"
             ) % (routes, ts.strftime("%Y%m%d"), day_ago)
    mp.postprocess(pqstr=pqstr)
    mp.close()
Пример #40
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')

    station = fdict.get('station', 'DMX')[:4]

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    fig = plt.figure(figsize=(8, 14 if station != '_ALL' else 21))
    ax = [None, None]
    ax[0] = plt.axes([0.1, 0.75, 0.85, 0.2])
    ax[1] = plt.axes([0.1, 0.05, 0.85, 0.65])

    if station == '_ALL':
        df = read_sql("""
            SELECT distinct extract(year from issue) as year,
                phenomena, significance from warnings WHERE
                phenomena is not null and significance is not null and
                issue > '2005-01-01' and issue is not null
            """,
                      pgconn,
                      index_col=None)
    else:
        df = read_sql("""
            SELECT distinct extract(year from issue) as year,
            phenomena, significance from warnings WHERE
            wfo = %s and phenomena is not null and significance is not null
            and issue > '2005-01-01' and issue is not null
            """,
                      pgconn,
                      params=(station, ),
                      index_col=None)

    df['wfo'] = station
    df['year'] = df['year'].astype('i')
    gdf = df.groupby('year').count()

    ax[0].bar(gdf.index.values,
              gdf['wfo'],
              width=0.8,
              fc='b',
              ec='b',
              align='center')
    for yr, row in gdf.iterrows():
        ax[0].text(yr, row['wfo'] + 1, "%s" % (row['wfo'], ), ha='center')
    ax[0].set_title(
        ("[%s] NWS %s\nCount of Distinct VTEC Phenomena/"
         "Significance - %i to %i") %
        (station, nt.sts[station]['name'], df['year'].min(), df['year'].max()))
    ax[0].grid()
    ax[0].set_ylabel("Count")
    ax[0].set_xlim(gdf.index.values.min() - 0.5, gdf.index.values.max() + 0.5)

    pos = {}
    i = 1
    df.sort_values(['phenomena', 'significance'], inplace=True)
    for _, row in df.iterrows():
        key = "%s.%s" % (row['phenomena'], row['significance'])
        if key not in pos:
            pos[key] = i
            i += 1
        ax[1].text(row['year'],
                   pos[key],
                   key,
                   ha='center',
                   va='center',
                   fontsize=10,
                   bbox=dict(color='white'))

    ax[1].set_title("VTEC <Phenomena.Significance> Issued by Year")
    ax[1].set_ylim(0, i)
    ax[1].grid(True)
    ax[1].set_xlim(gdf.index.values.min() - 0.5, gdf.index.values.max() + 0.5)
    return fig, df
Пример #41
0
import datetime
import sys

# thirdparty
import numpy as np
from scipy.interpolate import NearestNDInterpolator
import matplotlib.cm as cm
import psycopg2.extras

# pyiem
from pyiem.plot import MapPlot
from pyiem.datatypes import temperature
from pyiem.tracker import loadqc
from pyiem.network import Table

nt = Table("ISUSM")
qdict = loadqc()

ISUAG = psycopg2.connect(database='isuag', host='iemdb', user='******')
icursor = ISUAG.cursor(cursor_factory=psycopg2.extras.DictCursor)
POSTGIS = psycopg2.connect(database='postgis', host='iemdb', user='******')
pcursor = POSTGIS.cursor(cursor_factory=psycopg2.extras.DictCursor)

day_ago = int(sys.argv[1])
ts = datetime.datetime.now() - datetime.timedelta(days=day_ago)

# Query out the data
soil_obs = []
lats = []
lons = []
icursor.execute("""