예제 #1
0
파일: p173.py 프로젝트: tutuhuang/iem
def get_context(fdict):
    """get context for both output types"""
    pgconn = get_dbconn('asos')
    ctx = get_autoplot_context(fdict, get_description())

    station = ctx['zstation']
    ctx['station'] = station
    network = ctx['network']
    units = ctx['units']
    p1 = ctx.get('p1')
    p2 = ctx.get('p2')
    p3 = ctx.get('p3')
    p4 = ctx.get('p4')
    p5 = ctx.get('p5')
    p6 = ctx.get('p6')
    y1 = ctx.get('y1')
    y2 = ctx.get('y2')
    ctx['nt'] = NetworkTable(network)

    ylimiter = ""
    if y1 is not None and y2 is not None:
        ylimiter = (" and extract(year from valid) >= %s and "
                    "extract(year from valid) <= %s ") % (y1, y2)
    else:
        y1 = ctx['nt'].sts[station]['archive_begin'].year
        y2 = datetime.date.today().year

    df = read_sql("""
    WITH obs as (
        SELECT (valid + '10 minutes'::interval) at time zone %s as ts,
        sknt from alldata where station = %s and sknt >= 0 and sknt < 150
        and report_type = 2 """ + ylimiter + """)

    select extract(month from ts)::int as month,
    extract(hour from ts)::int as hour, extract(day from ts)::int as day,
    avg(sknt) as avg_sknt from obs GROUP by month, day, hour
    ORDER by month, day, hour
        """,
                  pgconn,
                  params=(ctx['nt'].sts[station]['tzname'], station),
                  index_col=None)
    # Figure out which mode we are going to do
    if all([a is None for a in [p1, p2, p3, p4, p5, p6]]):
        del df['day']
        df = df.groupby(['month', 'hour']).mean()
        df.reset_index(inplace=True)
        ctx['ncols'] = 6
        ctx['labels'] = calendar.month_abbr
        ctx['subtitle'] = "Monthly Average Wind Speed by Hour"
    else:
        ctx['ncols'] = 3
        df['fake_date'] = pd.to_datetime({
            'year': 2000,
            'month': df['month'],
            'day': df['day']
        })
        df.set_index('fake_date', inplace=True)
        dfs = []
        ctx['labels'] = [None]
        for p in [p1, p2, p3, p4, p5, p6]:
            if p is None:
                continue
            tokens = p.split("-")
            sts = datetime.datetime.strptime("2000" + tokens[0].strip(),
                                             '%Y%m%d')
            ets = datetime.datetime.strptime("2000" + tokens[1].strip(),
                                             '%Y%m%d')
            ldf = df[['hour', 'avg_sknt'
                      ]].loc[sts.date():ets.date()].groupby('hour').mean()
            ldf.reset_index(inplace=True)
            ldf['month'] = len(dfs) + 1
            dfs.append(ldf)
            ctx['labels'].append("%s-%s" %
                                 (sts.strftime("%b%d"), ets.strftime("%b%d")))
            ctx['subtitle'] = "Period Average Wind Speed by Hour"
        df = pd.concat(dfs)

    df['avg_%s' % (units, )] = speed(df['avg_sknt'].values,
                                     'KT').value(units.upper())
    ctx['df'] = df
    ctx['ylabel'] = "Average Wind Speed [%s]" % (UNITS[units], )
    ctx['xlabel'] = ("Hour of Day (timezone: %s)") % (
        ctx['nt'].sts[station]['tzname'], )
    ctx['title'] = ("[%s] %s [%s-%s]") % (
        ctx['station'], ctx['nt'].sts[station]['name'], y1, y2)
    return ctx
예제 #2
0
파일: p102.py 프로젝트: tutuhuang/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = util.get_dbconn('postgis')
    ctx = util.get_autoplot_context(fdict, get_description())
    station = ctx['station'][:4]
    syear = ctx['year']
    eyear = ctx['eyear']
    nt = NetworkTable('WFO')
    wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else
                                        station[1:], )
    if station == '_ALL':
        wfo_limiter = ''

    df = read_sql(
        """
        select extract(year from valid)::int as yr, upper(source) as src,
        count(*) from lsrs
        where valid > '""" + str(syear) + """-01-01' and
        valid < '""" + str(eyear + 1) + """-01-01' """ + wfo_limiter + """
        GROUP by yr, src
    """, pgconn)
    df['rank'] = df.groupby(['yr'])['count'].rank(ascending=False,
                                                  method='first')
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    # Do 2006 as left side
    dyear = df[df['yr'] == syear].sort_values(by=['rank'], ascending=True)
    i = 1
    ylabels = []
    for _, row in dyear.iterrows():
        src = row['src']
        ylabels.append(src)
        d = df[df['src'] == src].sort_values(by=['yr'])
        ax.plot(np.array(d['yr']),
                np.array(d['rank']),
                lw=2,
                label=src,
                marker=MARKERS[i % len(MARKERS)])
        i += 1
        if i > 20:
            break
    ax.set_yticks(range(1, len(ylabels) + 1))
    ax.set_yticklabels(["%s %s" % (s, i + 1) for i, s in enumerate(ylabels)])
    ax.set_ylim(0.5, 20.5)

    ax2 = ax.twinx()
    # Do last year as right side
    dyear = df[df['yr'] == eyear].sort_values(by=['rank'], ascending=True)
    i = 0
    y2labels = []
    for _, row in dyear.iterrows():
        i += 1
        if i > 20:
            break
        src = row['src']
        y2labels.append(src)
        if src in ylabels:
            continue
        ylabels.append(src)
        d = df[df['src'] == src].sort_values(by=['yr'])
        ax.plot(np.array(d['yr']),
                np.array(d['rank']),
                lw=2,
                label=src,
                marker=MARKERS[i % len(MARKERS)])

    ax2.set_yticks(range(1, len(y2labels) + 1))
    ax2.set_yticklabels(["%s %s" % (i + 1, s) for i, s in enumerate(y2labels)])
    ax2.set_ylim(0.5, 20.5)

    ax.set_position([0.3, 0.15, 0.4, 0.75])
    ax2.set_position([0.3, 0.15, 0.4, 0.75])
    ax.set_xticks(range(df['yr'].min(), df['yr'].max(), 2))
    for tick in ax.get_xticklabels():
        tick.set_rotation(90)
    ax.grid()

    fig.text(0.15, 0.9, "%s" % (syear, ), fontsize=14, ha='center')
    fig.text(0.85, 0.9, "%s" % (eyear, ), fontsize=14, ha='center')

    fig.text(0.5,
             0.95,
             "NWS %s Local Storm Report Sources Ranks" %
             ("ALL WFOs" if station == '_ALL' else nt.sts[station]['name'], ),
             ha='center')

    return fig, df
예제 #3
0
파일: mywindrose.py 프로젝트: trentford/iem
def main():
    """ Query out the CGI variables"""
    form = cgi.FieldStorage()
    try:
        sts, ets = get_times(form)
    except Exception as _:
        send_error(form, "Invalid Times Selected, please try again")
        return

    if "hour1" in form and "hourlimit" in form:
        hours = numpy.array((int(form["hour1"].value), ))
    elif "hour1" in form and "hour2" in form and "hourrangelimit" in form:
        if sts.hour > ets.hour:  # over midnight
            hours = numpy.arange(sts.hour, 24)
            hours = numpy.append(hours, numpy.arange(0, ets.hour))
        else:
            if sts.hour == ets.hour:
                ets += datetime.timedelta(hours=1)
            hours = numpy.arange(sts.hour, ets.hour)
    else:
        hours = numpy.arange(0, 24)

    if "units" in form and form["units"].value in ['mph', 'kts', 'mps', 'kph']:
        units = form["units"].value
    else:
        units = "mph"

    if "month1" in form and "monthlimit" in form:
        months = numpy.array((int(form["month1"].value), ))
    else:
        months = numpy.arange(1, 13)

    database = 'asos'
    if form["network"].value in ('KCCI', 'KELO', 'KIMT'):
        database = 'snet'
    elif form["network"].value in ('IA_RWIS', ):
        database = 'rwis'
    elif form["network"].value in ('ISUSM', ):
        database = 'isuag'
    elif form["network"].value in ('RAOB', ):
        database = 'postgis'
    elif form["network"].value.find("_DCP") > 0:
        database = 'hads'

    try:
        nsector = int(form['nsector'].value)
    except Exception as _:
        nsector = 36

    rmax = None
    if "staticrange" in form and form["staticrange"].value == "1":
        rmax = 100

    nt = NetworkTable(form['network'].value)
    bins = []
    if 'bins' in form:
        bins = [float(v) for v in form.getfirst('bins').split(",")]
        bins.insert(0, 0)
    res = windrose(form["station"].value,
                   database=database,
                   sts=sts,
                   ets=ets,
                   months=months,
                   hours=hours,
                   units=units,
                   nsector=nsector,
                   justdata=("justdata" in form),
                   rmax=rmax,
                   sname=nt.sts[form['station'].value]['name'],
                   level=form.getfirst('level', None),
                   bins=bins)
    if 'justdata' in form:
        # We want text
        ssw("Content-type: text/plain\n\n")
        ssw(res)
    else:
        fmt = form.getfirst('fmt', 'png')
        if fmt == 'png':
            ct = "image/png"
        elif fmt == 'pdf':
            ct = "application/pdf"
        elif fmt == 'svg':
            ct = "image/svg+xml"
        else:
            ssw("Content-type: text/plain\n\n")
            ssw("Invalid fmt set")
            sys.exit(0)
        ssw("Content-type: %s\n\n" % (ct, ))
        res.savefig(getattr(sys.stdout, 'buffer', sys.stdout),
                    format=fmt,
                    dpi=int(form.getfirst('dpi', 100)))
예제 #4
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='asos', host='iemdb', user='******')

    station = fdict.get('zstation', 'AMW')
    network = fdict.get('network', 'IA_ASOS')
    nt = NetworkTable(network)

    df = read_sql("""
    WITH obs as (
        SELECT date_trunc('hour', valid) as t, avg(tmpf) as avgt from alldata
        WHERE station = %s and p01i >= 0.01 and tmpf is not null
        GROUP by t
    )

    SELECT extract(week from t) as week, avgt from obs
    """,
                  pgconn,
                  params=(station, ),
                  index_col=None)

    sts = datetime.datetime(2012, 1, 1)
    xticks = []
    for i in range(1, 13):
        ts = sts.replace(month=i)
        xticks.append(int(ts.strftime("%j")))

    (fig, ax) = plt.subplots(1, 1)

    bins = np.arange(df['avgt'].min() - 5, df['avgt'].max() + 5, 2)
    H, xedges, yedges = np.histogram2d(df['week'].values, df['avgt'].values,
                                       [range(0, 54), bins])
    rows = []
    for i, x in enumerate(xedges[:-1]):
        for j, y in enumerate(yedges[:-1]):
            rows.append(dict(tmpf=y, week=x, count=H[i, j]))
    resdf = pd.DataFrame(rows)

    years = datetime.date.today().year - nt.sts[station]['archive_begin'].year
    H = np.ma.array(H) / float(years)
    H.mask = np.ma.where(H < 0.1, True, False)
    res = ax.pcolormesh((xedges - 1) * 7,
                        yedges,
                        H.transpose(),
                        cmap=plt.get_cmap("jet"))
    fig.colorbar(res, label='Hours per week per year')
    ax.set_xticks(xticks)
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xlim(0, 366)

    y = []
    for i in range(np.shape(H)[0]):
        y.append(np.ma.sum(H[i, :] * (bins[:-1] + 0.5)) / np.ma.sum(H[i, :]))

    ax.plot(xedges[:-1] * 7, y, zorder=3, lw=3, color='w')
    ax.plot(xedges[:-1] * 7, y, zorder=3, lw=1, color='k', label='Average')
    ax.legend(loc=2)

    ax.set_title(
        ("[%s] %s (%s-%s)\n"
         "Temperature Frequency During Precipitation by Week") %
        (station, nt.sts[station]['name'],
         nt.sts[station]['archive_begin'].year, datetime.date.today().year))
    ax.grid(True)
    ax.set_ylabel("Temperature [$^\circ$F]")

    return fig, resdf
예제 #5
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']

    table = "alldata_%s" % (station[:2], )
    nt = NetworkTable("%sCLIMATE" % (station[:2], ))
    res = ("# IEM Climodat http://mesonet.agron.iastate.edu/climodat/\n"
           "# Report Generated: %s\n"
           "# Climate Record: %s -> %s\n"
           "# Site Information: [%s] %s\n"
           "# Contact Information: Daryl Herzmann "
           "[email protected] 515.294.5978\n") % (
               datetime.date.today().strftime("%d %b %Y"),
               nt.sts[station]['archive_begin'].date(), datetime.date.today(),
               station, nt.sts[station]['name'])
    res += ("# THESE ARE THE HEAT STRESS VARIABLES FOR STATION #  %s\n") % (
        station, )

    s = nt.sts[station]['archive_begin']
    e = datetime.date.today().year + 1

    df = read_sql("""
        SELECT year, month, sum(case when high > 86 then 1 else 0 end) as days,
        sum(case when high > 86 then high - 86 else 0 end) as sdd
        from """ + table + """ WHERE
        station = %s GROUP by year, month
    """,
                  pgconn,
                  params=(station, ),
                  index_col=None)
    sdd = df.pivot('year', 'month', 'sdd')
    days = df.pivot('year', 'month', 'days')
    df = sdd.join(days, lsuffix='sdd', rsuffix='days')

    res += ("             # OF DAYS MAXT >86                     "
            "ACCUMULATED (MAXT - 86 )\n"
            " YEAR   MAY  JUNE  JULY   AUG  SEPT TOTAL      "
            "MAY  JUNE  JULY   AUG  SEPT TOTAL\n")

    yrCnt = 0
    for yr in range(s.year, e):
        yrCnt += 1
        res += ("%5s" % (yr, ))
        total = 0
        for mo in range(5, 10):
            val = df.at[yr, "%sdays" % (mo, )]
            if np.isnan(val):
                res += ("%6s" % ("M", ))
            else:
                res += ("%6i" % (val, ))
                total += val
        res += ("%6i   " % (total, ))
        total = 0
        for mo in range(5, 10):
            val = df.at[yr, "%ssdd" % (mo, )]
            if np.isnan(val):
                res += ("%6s" % ("M", ))
            else:
                res += ("%6i" % (val, ))
                total += val
        res += ("%6i   \n" % (total, ))

    res += (" **************************************************************"
            "************************\n")

    res += ("MEANS")
    tot = 0
    for mo in range(5, 10):
        val = df["%sdays" % (mo, )].mean()
        tot += val
        res += ("%6.1f" % (val, ))
    res += ("%6.1f   " % (tot, ))
    tot = 0
    for mo in range(5, 10):
        val = df["%ssdd" % (mo, )].mean()
        tot += val
        res += ("%6.1f" % (val, ))
    res += ("%6.1f\n" % (tot, ))

    return None, df, res
예제 #6
0
파일: wxc_moon.py 프로젝트: Xawwell/iem
"""
 Generate a WXC formatted file with moon conditions for Iowa sites.  I am
 unsure if the TV folks are still using this or not.  Its easy to generate
"""
from __future__ import print_function
import datetime
import os
import subprocess

import ephem
import pytz
from pyiem.network import Table as NetworkTable

nt = NetworkTable(("AWOS", "IA_ASOS"))


def figurePhase(p1, p2):
    """ Return a string of the moon phase! """
    if p2 > p1:  # Waning!
        if p1 < 0.1:
            return "New Moon"
        if p1 < 0.4:
            return "Waning Crescent"
        if p1 < 0.6:
            return "Last Quarter"
        if p1 < 0.9:
            return "Waning_Gibbous"
        else:
            return "Full Moon"

    else:  # Waxing!
예제 #7
0
파일: coop.py 프로젝트: trentford/iem
def do_apsim(ctx):
    """
    [weather.met.weather]
    latitude = 42.1 (DECIMAL DEGREES)
    tav = 9.325084 (oC) ! annual average ambient temperature
    amp = 29.57153 (oC) ! annual amplitude in mean monthly temperature
    year          day           radn          maxt          mint          rain
    ()            ()            (MJ/m^2)      (oC)          (oC)          (mm)
     1986          1             7.38585       0.8938889    -7.295556      0
     """
    if len(ctx['stations']) > 1:
        ssw(("ERROR: APSIM output is only "
             "permitted for one station at a time."))
        return

    dbconn = get_database()
    cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = ctx['stations'][0]
    table = get_tablename(ctx['stations'])
    network = "%sCLIMATE" % (station[:2], )
    nt = NetworkTable(network)

    thisyear = datetime.datetime.now().year
    extra = {}
    if ctx['scenario'] == 'yes':
        sts = datetime.datetime(int(ctx['scenario_year']), 1, 1)
        ets = datetime.datetime(int(ctx['scenario_year']), 12, 31)
        febtest = datetime.date(thisyear, 3, 1) - datetime.timedelta(days=1)
        sdaylimit = ''
        if febtest.day == 28:
            sdaylimit = " and sday != '0229'"
        cursor.execute(
            """
            SELECT day, high, low, precip, 1 as doy,
            coalesce(narr_srad, merra_srad, hrrr_srad) as srad
            from """ + table + """ WHERE station = %s
            and day >= %s and day <= %s """ + sdaylimit + """
            """, (ctx['stations'][0], sts, ets))
        for row in cursor:
            ts = row[0].replace(year=thisyear)
            extra[ts] = row
            extra[ts]['doy'] = int(ts.strftime("%j"))
        if febtest not in extra:
            feb28 = datetime.date(thisyear, 2, 28)
            extra[febtest] = extra[feb28]

    ssw("! Iowa Environmental Mesonet -- NWS Cooperative Data\n")
    ssw("! Created: %s UTC\n" %
        (datetime.datetime.utcnow().strftime("%d %b %Y %H:%M:%S"), ))
    ssw("! Contact: daryl herzmann [email protected] 515-294-5978\n")
    ssw("! Station: %s %s\n" % (station, nt.sts[station]['name']))
    ssw("! Data Period: %s - %s\n" % (ctx['sts'], ctx['ets']))
    if ctx['scenario'] == 'yes':
        ssw("! !SCENARIO DATA! inserted after: %s replicating year: %s\n" %
            (ctx['ets'], ctx['scenario_year']))

    ssw("[weather.met.weather]\n")
    ssw("latitude = %.1f (DECIMAL DEGREES)\n" % (nt.sts[station]["lat"], ))

    # Compute average temperature!
    cursor.execute(
        """
        SELECT avg((high+low)/2) as avgt from climate51 WHERE station = %s
        """, (station, ))
    row = cursor.fetchone()
    ssw("tav = %.3f (oC) ! annual average ambient temperature\n" %
        (temperature(row['avgt'], 'F').value('C'), ))

    # Compute the annual amplitude in temperature
    cursor.execute(
        """
        select max(avg) as h, min(avg) as l from
            (SELECT extract(month from valid) as month, avg((high+low)/2.)
             from climate51
             WHERE station = %s GROUP by month) as foo
             """, (station, ))
    row = cursor.fetchone()
    ssw(("amp = %.3f (oC) ! annual amplitude in mean monthly temperature\n") %
        ((temperature(row['h'], 'F').value('C') -
          temperature(row['l'], 'F').value('C'))))

    ssw("""year        day       radn       maxt       mint      rain
  ()         ()   (MJ/m^2)       (oC)       (oC)       (mm)\n""")

    if ctx.get('hayhoe_model') is not None:
        cursor.execute(
            """
            SELECT day, high, low, precip,
            extract(doy from day) as doy,
            0 as srad
            from hayhoe_daily WHERE station = %s
            and day >= %s and scenario = %s and model = %s
            ORDER by day ASC
        """, (ctx['stations'][0], ctx['sts'], ctx['hayhoe_scenario'],
              ctx['hayhoe_model']))
    else:
        cursor.execute(
            """
            SELECT day, high, low, precip,
            extract(doy from day) as doy,
            coalesce(narr_srad, merra_srad, hrrr_srad) as srad
            from """ + table + """
            WHERE station = %s and
            day >= %s and day <= %s ORDER by day ASC
            """, (station, ctx['sts'], ctx['ets']))
    for row in cursor:
        srad = -99 if row['srad'] is None else row['srad']
        ssw(("%4s %10.0f %10.3f %10.1f %10.1f %10.2f\n") %
            (row["day"].year, int(
                row["doy"]), srad, temperature(row["high"], 'F').value('C'),
             temperature(row["low"], 'F').value('C'), row["precip"] * 25.4))

    if extra:
        dec31 = datetime.date(thisyear, 12, 31)
        now = row['day']
        while now <= dec31:
            row = extra[now]
            srad = -99 if row['srad'] is None else row['srad']
            ssw(("%4s %10.0f %10.3f %10.1f %10.1f %10.2f\n") %
                (now.year, int(row['doy']), srad, temperature(
                    row["high"], 'F').value('C'), temperature(
                        row["low"], 'F').value('C'), row["precip"] * 25.4))
            now += datetime.timedelta(days=1)
예제 #8
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station1 = ctx['station1'].upper()
    station2 = ctx['station2'].upper()
    table1 = "alldata_%s" % (station1[:2], )
    table2 = "alldata_%s" % (station2[:2], )
    nt1 = NetworkTable("%sCLIMATE" % (station1[:2],))
    nt2 = NetworkTable("%sCLIMATE" % (station2[:2],))
    varname = ctx['var']

    df = read_sql("""WITH one as (
      SELECT year, sum(precip) as one_total_precip,
      avg(high) as one_avg_high, avg(low) as one_avg_low,
      avg((high+low)/2.) as one_avg_temp, max(high) as one_max_high,
      min(low) as one_min_low from """ + table1 + """ WHERE
      station = %s GROUP by year),
    two as (
      SELECT year, sum(precip) as two_total_precip,
      avg(high) as two_avg_high, avg(low) as two_avg_low,
      avg((high+low)/2.) as two_avg_temp, max(high) as two_max_high,
      min(low) as two_min_low from """ + table2 + """ WHERE
      station = %s GROUP by year
    )

    SELECT o.year, one_total_precip, one_avg_high, one_avg_low,
    one_avg_temp, one_max_high, one_min_low, two_total_precip, two_avg_high,
    two_avg_low, two_avg_temp, two_max_high, two_min_low from one o JOIN two t
    on (o.year = t.year) ORDER by o.year ASC
    """, pgconn, params=(station1, station2), index_col='year')
    df['one_station'] = station1
    df['two_station'] = station2
    for col in ['total_precip', 'avg_high', 'avg_low', 'max_high', 'min_low',
                'avg_temp']:
        df['diff_'+col] = df['one_'+col] - df['two_'+col]

    (fig, ax) = plt.subplots(1, 1)
    color_above = 'b' if varname in ['total_precip', ] else 'r'
    color_below = 'r' if color_above == 'b' else 'b'

    bars = ax.bar(df.index, df['diff_'+varname], fc=color_above,
                  ec=color_above)
    for bar, val in zip(bars, df['diff_'+varname].values):
        if val < 0:
            bar.set_facecolor(color_below)
            bar.set_edgecolor(color_below)

    ax.set_title(("Yearly %s [%s] %s\nminus [%s] %s"
                  ) % (PDICT[varname], station1,
                       nt1.sts[station1]['name'], station2,
                       nt2.sts[station2]['name']))
    units = 'inch' if varname in ['total_precip', ] else 'F'
    lbl = 'wetter' if units == 'inch' else 'warmer'
    wins = len(df[df['diff_'+varname] > 0].index)
    ax.text(0.5, 0.95, "%s %s (%s/%s)" % (nt1.sts[station1]['name'], lbl,
                                          wins, len(df.index)),
            transform=ax.transAxes, ha='center')
    wins = len(df[df['diff_'+varname] < 0].index)
    ax.text(0.5, 0.05, "%s %s (%s/%s)" % (nt1.sts[station2]['name'], lbl,
                                          wins, len(df.index)),
            transform=ax.transAxes, ha='center')
    ax.axhline(df['diff_'+varname].mean(), lw=2, color='k')
    ax.set_ylabel("%s [%s] Avg: %.2f" % (PDICT[varname], units,
                                         df['diff_'+varname].mean()))
    ax.grid(True)
    ax.set_xlim(df.index.min()-1, df.index.max()+1)
    ymax = df['diff_'+varname].abs().max() * 1.1
    ax.set_ylim(0 - ymax, ymax)
    return fig, df
예제 #9
0
# Month percentile
import psycopg2
from pyiem.plot import MapPlot
import matplotlib.cm as cm
from pyiem.network import Table as NetworkTable
import numpy as np

nt = NetworkTable("IACLIMATE")
COOP = psycopg2.connect(database="coop", host='iemdb', user='******')
ccursor = COOP.cursor()
ccursor.execute("""
  SELECT station, max(case when year = 2015 then rank else -1 end) as ma, count(*),
  max(case when year = 2015 then  m else -1 end) from 
  (SELECT station, year, m, rank() over (partition by station ORDER by m ASC)
  from
    (SELECT station, year, max(high) as m from alldata_ia where
     sday < '0520' and year > 1950 GROUP by station, year) as foo
  ) as foo2 WHERE station != 'IA0000' and substr(station,3,1) != 'C' 
  GROUP by station ORDER by ma DESC
""")
lats = []
lons = []
vals = []
for row in ccursor:
    if row[0] not in nt.sts:
        continue
    if row[1] > 30:
        continue
    print row
    vals.append(row[1] / float(row[2]) * 100.0)
    lats.append(nt.sts[row[0]]['lat'])
예제 #10
0
파일: p113.py 프로젝트: trentford/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('coop')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    varname = ctx['var']

    nt = NetworkTable("%sCLIMATE" % (station[:2], ))
    res = """\
# IEM Climodat https://mesonet.agron.iastate.edu/climodat/
# Report Generated: %s
# Climate Record: %s -> %s
# Site Information: [%s] %s
# Contact Information: Daryl Herzmann [email protected] 515.294.5978
""" % (datetime.date.today().strftime("%d %b %Y"),
       nt.sts[station]['archive_begin'].date(), datetime.date.today(), station,
       nt.sts[station]['name'])
    if varname == 'maxmin':
        res += """\
# DAILY RECORD HIGHS AND LOWS OCCURRING DURING %s-%s FOR STATION NUMBER  %s
     JAN     FEB     MAR     APR     MAY     JUN     JUL     AUG     SEP     OCT     NOV     DEC
 DY  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN
""" % (nt.sts[station]['archive_begin'].year, datetime.date.today().year,
       station)
    elif varname == 'means':
        res += """\
# DAILY MEAN HIGHS AND LOWS FOR STATION NUMBER  %s
     JAN     FEB     MAR     APR     MAY     JUN     JUL     AUG     SEP     OCT     NOV     DEC
 DY  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN
""" % (station, )
    elif varname == 'range':
        res += """\
# RECORD LARGEST AND SMALLEST DAILY RANGES (MAX-MIN) FOR STATION NUMBER  %s
     JAN     FEB     MAR     APR     MAY     JUN     JUL     AUG     SEP     OCT     NOV     DEC
 DY  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN
""" % (station, )
    else:
        res += """\
# DAILY MAXIMUM PRECIPITATION FOR STATION NUMBER %s
     JAN   FEB   MAR   APR   MAY   JUN   JUL   AUG   SEP   OCT   NOV   DEC
""" % (station, )

    df = read_sql("SELECT * from climate WHERE station = %s",
                  pgconn, params=(station, ), index_col='valid')

    bad = "  ****" if varname == 'precip' else ' *** ***'
    for day in range(1, 32):
        res += "%3i" % (day,)
        for mo in range(1, 13):
            try:
                ts = datetime.date(2000, mo, day)
                if ts not in df.index:
                    res += bad
                    continue
            except Exception as _:
                res += bad
                continue
            row = df.loc[ts]
            if (row['max_high'] is None or
                    row['min_low'] is None):
                res += bad
                continue
            if varname == 'maxmin':
                res += ("%4i%4i" % (row["max_high"], row["min_low"]))
            elif varname == 'range':
                res += ("%4i%4i" % (row["max_range"], row["min_range"]))
            elif varname == 'means':
                res += ("%4i%4i" % (row["high"], row["low"]))
            else:
                res += "%6.2f" % (row["max_precip"], )
        res += ("\n")

    return None, df, res
예제 #11
0
import pyiem.meteorology as meteorology
from pyiem.datatypes import temperature, speed
import pyiem.util as util
import psycopg2.extras
import pytz

utc = datetime.datetime.utcnow()
utc = utc.replace(tzinfo=pytz.timezone("UTC"))
tstr = utc.strftime("%Y%m%d%H%M")

now = utc.astimezone(pytz.timezone("America/Chicago"))

IEM = util.get_dbconn('iem', user='******')
icursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor)

st = NetworkTable(['KCCI', 'KIMT'])

st.sts["SMAI4"]["plot_name"] = "M-town"
st.sts["SBZI4"]["plot_name"] = "Zoo"
st.sts["SMSI4"]["plot_name"] = "Barnum"
st.sts["STQI4"]["plot_name"] = "Tama"
st.sts["SBOI4"]["plot_name"] = "Boone"


def altiTxt(d):
    if d == "":
        return "S"
    if d < 0:
        return "F"
    if d > 0:
        return "R"
예제 #12
0
파일: p171.py 프로젝트: tutuhuang/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    import matplotlib.colors as mpcolors
    import matplotlib.patheffects as PathEffects
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    phenomena = ctx['phenomena']
    significance = ctx['significance']
    opt = ctx['opt']
    state = ctx['state']

    nt = NetworkTable('WFO')
    nt.sts['_ALL'] = {'name': 'All Offices'}

    wfo_limiter = (" and wfo = '%s' ") % (station if len(station) == 3 else
                                          station[1:], )
    if station == '_ALL':
        wfo_limiter = ''
    if opt == 'state':
        wfo_limiter = " and substr(ugc, 1, 2) = '%s'" % (state, )

    df = read_sql("""
        with data as (
            SELECT distinct extract(year from issue) as yr2,
            min(issue) as i, wfo, eventid
            from warnings where phenomena = %s and significance = %s
            """ + wfo_limiter + """
            GROUP by yr2, wfo, eventid)

        SELECT extract(year from i) as yr, extract(month from i) as mo,
        count(*) from data GROUP by yr, mo ORDER by yr, mo ASC
      """,
                  pgconn,
                  params=(phenomena, significance),
                  index_col=None)

    if df.empty:
        raise ValueError("Sorry, no data found!")
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 8))

    minyear = df['yr'].min()
    maxyear = df['yr'].max()
    data = np.zeros((int(maxyear - minyear + 1), 12))
    for _, row in df.iterrows():
        data[int(row['yr'] - minyear), int(row['mo'] - 1)] = row['count']
        txt = ax.text(row['mo'],
                      row['yr'],
                      "%.0f" % (row['count'], ),
                      va='center',
                      ha='center',
                      color='white')
        txt.set_path_effects(
            [PathEffects.withStroke(linewidth=2, foreground="k")])
    cmap = plt.get_cmap('jet')
    cmap.set_under('white')
    maxval = max([df['count'].max(), 11])
    bounds = np.linspace(1, maxval, 10, dtype='i')
    norm = mpcolors.BoundaryNorm(bounds, cmap.N)
    res = ax.imshow(data,
                    extent=[0.5, 12.5, maxyear + 0.5, minyear - 0.5],
                    interpolation='nearest',
                    aspect='auto',
                    norm=norm,
                    cmap=cmap)
    fig.colorbar(res, label='count')
    ax.grid(True)
    ax.set_xticks(range(1, 13))
    ax.set_xticklabels(calendar.month_abbr[1:])

    title = "NWS %s" % (nt.sts[station]['name'], )
    if opt == 'state':
        title = ("NWS Issued for Counties/Zones for State of %s") % (
            reference.state_names[state], )
    title += ("\n%s (%s.%s) Issued by Year,Month") % (vtec.get_ps_string(
        phenomena, significance), phenomena, significance)
    ax.set_title(title)

    return fig, df
예제 #13
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    network = ctx['network']
    year = ctx['year']
    season = ctx['season']
    nt = NetworkTable(network)
    table = "alldata_%s" % (station[:2],)

    pgconn = get_dbconn('coop')

    # Have to do a redundant query to get the running values
    obs = read_sql("""
    WITH trail as (
        SELECT day, year,
        avg((high+low)/2.) OVER (ORDER by day ASC ROWS 91 PRECEDING) as avgt
        from """ + table + """ WHERE station = %s)

    SELECT day, avgt from trail WHERE year between %s and %s ORDER by day ASC
    """, pgconn, params=(station, year, year + 2), index_col='day')

    df = read_sql("""
    WITH trail as (
        SELECT day, year,
        avg((high+low)/2.) OVER (ORDER by day ASC ROWS 91 PRECEDING) as avgt
        from """ + table + """ WHERE station = %s),
    extremes as (
        SELECT day, year, avgt,
        rank() OVER (PARTITION by year ORDER by avgt ASC) as minrank,
        rank() OVER (PARTITION by year ORDER by avgt DESC) as maxrank
        from trail),
    yearmax as (
        SELECT year, min(day) as summer_end, min(avgt) as summer
        from extremes where maxrank = 1 GROUP by year),
    yearmin as (
        SELECT year, min(day) as winter_end, min(avgt) as winter
        from extremes where minrank = 1 GROUP by year)

    SELECT x.year, winter_end, winter, summer_end, summer,
    extract(doy from winter_end)::int as winter_end_doy,
    extract(doy from summer_end)::int as summer_end_doy
    from yearmax x JOIN yearmin n on (x.year = n.year) ORDER by x.year ASC
    """, pgconn, params=(station, ), index_col='year')
    # Throw out spring of the first year
    for col in ['winter', 'winter_end_doy', 'winter_end']:
        df.at[df.index.min(), col] = None

    # Need to cull current year
    if datetime.date.today().month < 8:
        for col in ['summer', 'summer_end_doy', 'summer_end']:
            df.at[datetime.date.today().year, col] = None
    if datetime.date.today().month < 2:
        for col in ['winter', 'winter_end_doy', 'winter_end']:
            df.at[datetime.date.today().year, col] = None
    df['spring_length'] = df['summer_end_doy'] - 91 - df['winter_end_doy']
    # fall is a bit tricker
    df['fall_length'] = None
    df['fall_length'].values[:-1] = ((df['winter_end_doy'].values[1:] + 365) -
                                     91 - df['summer_end_doy'].values[:-1])

    df['fall_length'] = pd.to_numeric(df['fall_length'])
    (fig, ax) = plt.subplots(3, 1, figsize=(8, 9))

    ax[0].plot(obs.index.values, obs['avgt'].values)
    ax[0].set_ylim(obs['avgt'].min() - 8, obs['avgt'].max() + 8)
    ax[0].set_title(("%s-%s [%s] %s\n91 Day Average Temperatures"
                     ) % (nt.sts[station]['archive_begin'].year,
                          year + 3, station, nt.sts[station]['name']))
    ax[0].set_ylabel(r"Trailing 91 Day Avg T $^{\circ}$F")
    ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%b\n%Y'))
    ax[0].grid(True)

    # Label the maxes and mins
    for yr in range(year, year+3):
        if yr not in df.index:
            continue
        date = df.at[yr, 'winter_end']
        val = df.at[yr, 'winter']
        if date is not None:
            ax[0].text(
                date, val - 1,
                r"%s %.1f$^\circ$F" % (date.strftime("%-d %b"), val),
                ha='center', va='top',
                bbox=dict(color='white', boxstyle='square,pad=0')
            )
        date = df.at[yr, 'summer_end']
        val = df.at[yr, 'summer']
        if date is not None:
            ax[0].text(
                date, val + 1,
                r"%s %.1f$^\circ$F" % (date.strftime("%-d %b"), val),
                ha='center', va='bottom',
                bbox=dict(color='white', boxstyle='square,pad=0')
            )

    df2 = df.dropna()
    p2col = 'winter_end_doy' if season == 'spring' else 'summer_end_doy'
    slp, intercept, r, _, _ = stats.linregress(df2.index.values,
                                               df2[p2col].values)
    ax[1].scatter(df.index.values, df[p2col].values)
    ax[1].grid(True)
    # Do labelling
    yticks = []
    yticklabels = []
    for doy in range(int(df[p2col].min()),
                     int(df[p2col].max())):
        date = datetime.date(2000, 1, 1) + datetime.timedelta(days=(doy - 1))
        if date.day in [1, 15]:
            yticks.append(doy)
            yticklabels.append(date.strftime("%-d %b"))
    ax[1].set_yticks(yticks)
    ax[1].set_yticklabels(yticklabels)
    lbl = ("Date of Minimum (Spring Start)" if season == 'spring'
           else "Date of Maximum (Fall Start)")
    ax[1].set_ylabel(lbl)
    ax[1].set_xlim(df.index.min() - 1, df.index.max() + 1)
    avgv = df[p2col].mean()
    ax[1].axhline(avgv, color='r')
    ax[1].plot(df.index.values, intercept + (df.index.values * slp))
    d = (datetime.date(2000, 1, 1) +
         datetime.timedelta(days=int(avgv))).strftime("%-d %b")
    ax[1].text(0.02, 0.02,
               r"$\frac{\Delta days}{decade} = %.2f,R^2=%.2f, avg = %s$" % (
                    slp * 10.0, r ** 2, d), va='bottom',
               transform=ax[1].transAxes)
    ax[1].set_ylim(bottom=(ax[1].get_ylim()[0] - 10))

    p3col = 'spring_length' if season == 'spring' else 'fall_length'
    slp, intercept, r, _, _ = stats.linregress(df2.index.values,
                                               df2[p3col])
    ax[2].scatter(df.index.values, df[p3col])
    ax[2].set_xlim(df.index.min() - 1, df.index.max() + 1)
    ax[2].set_ylabel("Length of '%s' [days]" % (season.capitalize(),))
    ax[2].grid(True)
    avgv = df[p3col].mean()
    ax[2].axhline(avgv, color='r')
    ax[2].plot(df.index.values, intercept + (df.index.values * slp))
    ax[2].text(0.02, 0.02,
               r"$\frac{\Delta days}{decade} = %.2f,R^2=%.2f, avg = %.1fd$" % (
                slp * 10.0, r ** 2, avgv),
               va='bottom', transform=ax[2].transAxes)
    ax[2].set_ylim(bottom=(ax[2].get_ylim()[0] - 15))

    return fig, df
예제 #14
0
"""
 Generate and email a report to the IASS folks with summarized IEM estimated
 COOP data included...
"""
import sys
import psycopg2.extras
from pyiem.network import Table as NetworkTable
import cStringIO
import datetime
import smtplib
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart

nt = NetworkTable("IA_COOP")

districts = [
             'North West',
             'North Central',
             'North East',
             'West Central',
             'Central',
             'East Central',
             'South West',
             'South Central',
             'South East',
            ]
stids = [
[
 'Rock Rapids    RKRI4',
 'Sheldon    SHDI4',
예제 #15
0
파일: p63.py 프로젝트: theweathermanda/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('coop')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']

    table = "alldata_%s" % (station[:2], )
    nt = NetworkTable("%sCLIMATE" % (station[:2], ))
    sts = nt.sts[station]['archive_begin']
    syear = sts.year if sts.month == 1 and sts.day == 1 else (sts.year + 1)
    syear = max(syear, 1893)
    eyear = datetime.datetime.now().year

    cursor.execute(
        """
        SELECT sday, year, high, low, precip, day from """ + table + """
        where station = %s and sday != '0229'
        and year >= %s ORDER by day ASC
    """, (station, syear))

    hrecords = {}
    hyears = [0] * (eyear - syear)
    lrecords = {}
    lyears = [0] * (eyear - syear)
    precords = {}
    pyears = [0] * (eyear - syear)
    expect = [0] * (eyear - syear)

    # hstraight = 0
    for row in cursor:
        sday = row[0]
        year = row[1]
        high = row[2]
        low = row[3]
        precip = row[4]
        if year == syear:
            hrecords[sday] = high
            lrecords[sday] = low
            precords[sday] = precip
            continue
        if precip > precords[sday]:
            precords[sday] = row['precip']
            pyears[year - syear - 1] += 1
        if high > hrecords[sday]:
            hrecords[sday] = row['high']
            hyears[year - syear - 1] += 1
            # hstraight += 1
            # if hstraight > 3:
            #    print hstraight, sday, row[4]
        # else:
        #     hstraight = 0
        if low < lrecords[sday]:
            lrecords[sday] = low
            lyears[year - syear - 1] += 1

    years = range(syear + 1, eyear + 1)
    for i, year in enumerate(years):
        expect[i] = 365.0 / float(year - syear + 1)

    df = pd.DataFrame(
        dict(expected=pd.Series(expect),
             highs=pd.Series(hyears),
             lows=pd.Series(lyears),
             precip=pd.Series(pyears),
             year=years))

    (fig, ax) = plt.subplots(3, 1, figsize=(7, 8), sharex=True, sharey=True)
    rects = ax[0].bar(years, hyears, facecolor='b', edgecolor='b')
    for i, rect in enumerate(rects):
        if rect.get_height() > expect[i]:
            rect.set_facecolor('r')
            rect.set_edgecolor('r')
    ax[0].plot(years, expect, color='black', label="$365/n$")
    ax[0].set_ylim(0, 50)
    ax[0].set_xlim(syear, eyear + 1)
    ax[0].grid(True)
    ax[0].legend()
    ax[0].set_ylabel("Records set per year")
    ax[0].set_title(("[%s] %s\nDaily Records Set Per Year "
                     "%s sets record then accumulate (%s-%s)\n"
                     "events/year value is long term average, total events / "
                     "%.0f years") % (station, nt.sts[station]['name'], syear,
                                      syear + 1, eyear, eyear - syear - 1))
    rate = sum(hyears) / float(len(hyears))
    ax[0].text(eyear - 70,
               32,
               "Max High Temperature, %.1f events/year" % (rate, ),
               bbox=dict(color='white'))

    rects = ax[1].bar(years, lyears, facecolor='r', edgecolor='r')
    for i, rect in enumerate(rects):
        if rect.get_height() > expect[i]:
            rect.set_facecolor('b')
            rect.set_edgecolor('b')
    ax[1].plot(years, expect, color='black', label="$365/n$")
    ax[1].grid(True)
    ax[1].legend()
    ax[1].set_ylabel("Records set per year")
    rate = sum(lyears) / float(len(lyears))
    ax[1].text(eyear - 70,
               32,
               "Min Low Temperature, %.1f events/year" % (rate, ),
               bbox=dict(color='white'))

    rects = ax[2].bar(years, pyears, facecolor='r', edgecolor='r')
    for i, rect in enumerate(rects):
        if rect.get_height() > expect[i]:
            rect.set_facecolor('b')
            rect.set_edgecolor('b')
    ax[2].plot(years, expect, color='black', label="$365/n$")
    ax[2].grid(True)
    ax[2].legend()
    ax[2].set_ylabel("Records set per year")
    rate = sum(pyears) / float(len(pyears))
    ax[2].text(eyear - 50,
               32,
               "Precipitation, %.1f events/year" % (rate, ),
               bbox=dict(color='white'))

    return fig, df
예제 #16
0
def get_context(fdict):
    """ Get the raw infromations we need"""
    ctx = {}
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    today = datetime.date.today()
    thisyear = today.year
    year = int(fdict.get('year', thisyear))
    station = fdict.get('station', 'IA0200')
    varname = fdict.get('var', 'low')
    half = fdict.get('half', 'fall')
    table = "alldata_%s" % (station[:2], )
    nt = NetworkTable("%sCLIMATE" % (station[:2], ))

    startyear = int(nt.sts[station]['archive_begin'].year)
    data = np.ma.ones((thisyear - startyear + 1, 366)) * 199
    if half == 'fall':
        cursor.execute(
            """SELECT extract(doy from day), year,
            """ + varname + """ from
            """ + table + """ WHERE station = %s and low is not null and
            high is not null and year >= %s""", (station, startyear))
    else:
        cursor.execute(
            """SELECT extract(doy from day), year,
            """ + varname + """ from
            """ + table + """ WHERE station = %s and high is not null and
            low is not null and year >= %s""", (station, startyear))
    for row in cursor:
        data[row[1] - startyear, row[0] - 1] = row[2]

    data.mask = np.where(data == 199, True, False)

    doys = []
    avg = []
    p25 = []
    p2p5 = []
    p75 = []
    p97p5 = []
    mins = []
    maxs = []
    dyear = []
    idx = year - startyear
    last_doy = int(today.strftime("%j"))
    if half == 'fall':
        for doy in range(181, 366):
            l = np.ma.min(data[:-1, 180:doy], 1)
            avg.append(np.ma.average(l))
            mins.append(np.ma.min(l))
            maxs.append(np.ma.max(l))
            p = np.percentile(l, [2.5, 25, 75, 97.5])
            p2p5.append(p[0])
            p25.append(p[1])
            p75.append(p[2])
            p97p5.append(p[3])
            doys.append(doy)
            if year == thisyear and doy > last_doy:
                continue
            dyear.append(np.ma.min(data[idx, 180:doy]))
    else:
        for doy in range(1, 181):
            l = np.ma.max(data[:-1, :doy], 1)
            avg.append(np.ma.average(l))
            mins.append(np.ma.min(l))
            maxs.append(np.ma.max(l))
            p = np.percentile(l, [2.5, 25, 75, 97.5])
            p2p5.append(p[0])
            p25.append(p[1])
            p75.append(p[2])
            p97p5.append(p[3])
            doys.append(doy)
            if year == thisyear and doy > last_doy:
                continue
            dyear.append(np.ma.max(data[idx, :doy]))

    # http://stackoverflow.com/questions/19736080
    d = dict(doy=pd.Series(doys),
             mins=pd.Series(mins),
             maxs=pd.Series(maxs),
             p2p5=pd.Series(p2p5),
             p97p5=pd.Series(p97p5),
             p25=pd.Series(p25),
             p75=pd.Series(p75),
             avg=pd.Series(avg),
             thisyear=pd.Series(dyear))
    df = pd.DataFrame(d)
    sts = datetime.date(2000, 1, 1) + datetime.timedelta(days=doys[0] - 1)
    df['dates'] = pd.date_range(sts, periods=len(doys))
    df.set_index('doy', inplace=True)
    ctx['df'] = df
    ctx['year'] = year
    ctx['half'] = half
    if ctx['half'] == 'fall':
        title = "Minimum Daily %s Temperature after 1 July"
    else:
        title = "Maximum Daily %s Temperature before 1 July"
    title = title % (varname.capitalize(), )
    ctx['ylabel'] = title
    ctx['title'] = "%s-%s %s %s\n%s" % (startyear, thisyear - 1, station,
                                        nt.sts[station]['name'], title)
    return ctx
예제 #17
0
파일: catSNET.py 프로젝트: stormchas4/iem
"""
Generate web output for precip data
"""
from io import StringIO
import datetime

import psycopg2.extras
from paste.request import parse_formvars
from pyiem.network import Table as NetworkTable
from pyiem.util import get_dbconn

nt = NetworkTable(("KCCI", "KIMIT", "KELO"))
IEM = get_dbconn("iem")
icursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor)

requireHrs = [0] * 25
stData = {}
totp = {}


# Return the Date we will be looking for...
def doHeader(environ, start_response, sio):
    """header please"""
    start_response("200 OK", [("Content-type", "text/html")])
    sio.write("""
<html>
<head>
  <title>IEM | Hourly Precip Grid</title>
</head>
<body bgcolor="white">
<a href="/index.php">Iowa Mesonet</a> > 
예제 #18
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = get_dbconn('asos')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['zstation']
    network = ctx['network']
    year = ctx['year']
    pweather = ctx['var']
    if pweather == 'PSN':
        pweather = "+SN"
        PDICT['+SN'] = PDICT['PSN']

    nt = NetworkTable(network)
    tzname = nt.sts[station]['tzname']
    syear = max([1973, nt.sts[station]['archive_begin'].year])
    limiter = "array_to_string(wxcodes, '') LIKE '%%" + pweather + "%%'"
    if pweather == "1":
        # Special in the case of non-VCTS
        limiter = ("ARRAY['TS'::varchar, '-TSRA'::varchar, 'TSRA'::varchar, "
                   "'-TS'::varchar, '+TSRA'::varchar, '+TSSN'::varchar,"
                   "'-TSSN'::varchar, '-TSDZ'::varchar] && wxcodes")
    df = read_sql("""
    WITH data as (
        SELECT distinct date(valid at time zone %s) from alldata
        where station = %s and
        """ + limiter + """
        and valid > '1973-01-01' and report_type = 2)

    SELECT extract(year from date)::int as year,
    extract(month from date)::int as month,
    count(*) from data GROUP by year, month ORDER by year, month
    """,
                  pgconn,
                  params=(tzname, station),
                  index_col=None)

    if df.empty:
        raise ValueError("No database entries found for station, sorry!")
    (fig, ax) = plt.subplots(1, 1)
    ax.set_title(
        ("[%s] %s %s Events\n"
         "(%s-%s) Distinct Calendar Days with '%s' Reported") %
        (station, nt.sts[station]['name'], PDICT[pweather], syear,
         datetime.date.today().year, pweather if pweather != '1' else 'TS'))
    df2 = df[df['year'] == year]
    if not df2.empty:
        ax.bar(df2['month'].values - 0.2,
               df2['count'].values,
               width=0.4,
               fc='r',
               ec='r',
               label='%s' % (year, ))
    df2 = df.groupby('month').sum()
    years = (datetime.date.today().year - syear) + 1
    yvals = df2['count'] / years
    ax.bar(df2.index.values + 0.2,
           yvals,
           width=0.4,
           fc='b',
           ec='b',
           label='Avg')
    for x, y in zip(df2.index.values, yvals):
        ax.text(x, y + 0.2, "%.1f" % (y, ))
    ax.set_xlim(0.5, 12.5)
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xticks(range(1, 13))
    ax.set_ylabel("Days Per Month")
    ax.set_ylim(top=(ax.get_ylim()[1] + 2))
    ax.legend(loc='best')
    ax.grid(True)

    return fig, df
예제 #19
0
파일: coop.py 프로젝트: trentford/iem
def do_century(ctx):
    """ Materialize the data in Century Format
    * Century format  (precip cm, avg high C, avg low C)
    prec  1980   2.60   6.40   0.90   1.00   0.70   0.00
    tmin  1980  14.66  12.10   7.33  -0.89  -5.45  -7.29
    tmax  1980  33.24  30.50  27.00  18.37  11.35   9.90
    prec  1981  12.00   7.20   0.60   4.90   1.10   0.30
    tmin  1981  14.32  12.48   8.17   0.92  -3.25  -8.90
    tmax  1981  30.84  28.71  27.02  16.84  12.88   6.82
    """
    if len(ctx['stations']) > 1:
        ssw(("ERROR: Century output is only "
             "permitted for one station at a time."))
        return

    station = ctx['stations'][0]
    network = "%sCLIMATE" % (station[:2], )
    nt = NetworkTable(network)

    dbconn = get_database()
    cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    # Automatically set dates to start and end of year to make output clean
    sts = datetime.date(ctx['sts'].year, 1, 1)
    ets = datetime.date(ctx['ets'].year, 12, 31)
    if ets >= datetime.date.today():
        ets = datetime.date.today() - datetime.timedelta(days=1)

    table = get_tablename(ctx['stations'])
    thisyear = datetime.datetime.now().year
    cursor.execute(
        """
    WITH scenario as (
        SELECT """ + str(thisyear) + """::int as year, month, high, low, precip
        from """ + table + """
        WHERE station = %s and day > %s and day <= %s and sday != '0229'
    ), obs as (
      select year, month, high, low, precip from """ + table + """
      WHERE station = %s and day >= %s and day <= %s
    ), data as (
      SELECT * from obs UNION select * from scenario
    )

    SELECT year, month, avg(high) as tmax, avg(low) as tmin,
    sum(precip) as prec from data GROUP by year, month
    """,
        (station, ctx['scenario_sts'], ctx['scenario_ets'], station, sts, ets))
    data = {}
    for row in cursor:
        if row['year'] not in data:
            data[row['year']] = {}
            for mo in range(1, 13):
                data[row['year']][mo] = {'prec': -99, 'tmin': -99, 'tmax': -99}

        data[row['year']][row['month']] = {
            'prec': distance(row['prec'], 'IN').value('MM'),
            'tmin': temperature(float(row['tmin']), 'F').value('C'),
            'tmax': temperature(float(row['tmax']), 'F').value('C'),
        }

    ssw("# Iowa Environmental Mesonet -- NWS Cooperative Data\n")
    ssw("# Created: %s UTC\n" %
        (datetime.datetime.utcnow().strftime("%d %b %Y %H:%M:%S"), ))
    ssw("# Contact: daryl herzmann [email protected] 515-294-5978\n")
    ssw("# Station: %s %s\n" % (station, nt.sts[station]['name']))
    ssw("# Data Period: %s - %s\n" % (sts, ets))
    if ctx['scenario'] == 'yes':
        ssw("# !SCENARIO DATA! inserted after: %s replicating year: %s\n" %
            (ctx['ets'], ctx['scenario_year']))
    idxs = ["prec", "tmin", "tmax"]
    for year in range(sts.year, ets.year + 1):
        for idx in idxs:
            ssw(("%s  %s%7.2f%7.2f%7.2f%7.2f%7.2f%7.2f%7.2f"
                 "%7.2f%7.2f%7.2f%7.2f%7.2f\n") %
                (idx, year, data[year][1][idx], data[year][2][idx],
                 data[year][3][idx], data[year][4][idx], data[year][5][idx],
                 data[year][6][idx], data[year][7][idx], data[year][8][idx],
                 data[year][9][idx], data[year][10][idx], data[year][11][idx],
                 data[year][12][idx]))
예제 #20
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = get_dbconn('asos', user='******')

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['zstation']
    network = ctx['network']
    month = int(ctx['month'])
    thres = ctx['t']
    mydir = ctx['dir']

    nt = NetworkTable(network)
    tzname = nt.sts[station]['tzname']

    df = read_sql("""
    WITH data as (
        SELECT valid at time zone %s  + '10 minutes'::interval as v, tmpf
        from alldata where station = %s and tmpf > -90 and tmpf < 150
        and extract(month from valid) = %s and report_type = 2)

    SELECT extract(hour from v) as hour,
    min(v) as min_valid, max(v) as max_valid,
    sum(case when tmpf::int < %s THEN 1 ELSE 0 END) as below,
    sum(case when tmpf::int >= %s THEN 1 ELSE 0 END) as above,
    count(*) from data
    GROUP by hour ORDER by hour ASC
    """,
                  pgconn,
                  params=(tzname, station, month, thres, thres),
                  index_col='hour')

    df['below_freq'] = df['below'].values.astype('f') / df['count'] * 100.
    df['above_freq'] = df['above'].values.astype('f') / df['count'] * 100.

    freq = df[mydir + "_freq"].values
    hours = df.index.values

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    bars = ax.bar(hours, freq, fc='blue', align='center')
    for i, mybar in enumerate(bars):
        ax.text(i,
                mybar.get_height() + 3,
                "%.0f" % (mybar.get_height(), ),
                ha='center',
                fontsize=10)
    ax.set_xticks(range(0, 25, 3))
    ax.set_xticklabels(
        ['Mid', '3 AM', '6 AM', '9 AM', 'Noon', '3 PM', '6 PM', '9 PM'])
    ax.grid(True)
    ax.set_ylim(0, 100)
    ax.set_yticks([0, 25, 50, 75, 100])
    ax.set_ylabel("Frequency [%]")
    ax.set_xlabel("Hour Timezone: %s" % (tzname, ))
    ax.set_xlim(-0.5, 23.5)
    ax.set_title(("(%s - %s) %s [%s]\nFrequency of %s Hour, %s: %s$^\circ$F") %
                 (df['min_valid'].min().year, df['max_valid'].max().year,
                  nt.sts[station]['name'], station, calendar.month_name[month],
                  PDICT[mydir], thres))

    return fig, df
예제 #21
0
파일: p184.py 프로젝트: tutuhuang/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    nt = NetworkTable(ctx['network'])
    year = ctx['year']
    limit = ctx['limit']
    limitsql = ""
    limittitle = ""
    today = datetime.date.today()
    if limit == 'ytd':
        limittitle = "(Jan 1 - %s)" % (today.strftime("%b %-d"), )
        limitsql = " and extract(doy from day) <= %s" % (
            today.strftime("%j"), )

    dbconn = get_dbconn('coop')

    table = "alldata_%s" % (station[:2], )
    df = read_sql("""
        SELECT year, day, high from """ + table + """ WHERE
        station = %s and high is not null
        """ + limitsql + """
        ORDER by day ASC
     """,
                  dbconn,
                  params=(station, ),
                  index_col='day')
    res = []
    for level in range(70, 106):
        gdf = df[df['high'] >= level].groupby('year').count()
        maxval = gdf.max()[0]
        label = ",".join(
            [str(s) for s in list(gdf[gdf['high'] == maxval].index.values)])
        thisyear = 0
        if year in gdf.index.values:
            thisyear = gdf.at[year, 'high']
        res.append(
            dict(level=level, label=label, max=maxval, thisyear=thisyear))

    df = pd.DataFrame(res)
    df.set_index('level', inplace=True)

    (fig, ax) = plt.subplots(1, 1, figsize=(6, 8))
    ax.barh(df.index.values, df['max'].values, label="Max", zorder=2)
    ax.barh(df.index.values,
            df['thisyear'].values,
            label="%s" % (year, ),
            zorder=3)
    for level, row in df.iterrows():
        ax.text(row['max'] + 1,
                level,
                "%s - %s" % (row['max'], row['label']),
                va='center')

    ax.grid(True, color='#EEEEEE', linewidth=1)
    ax.legend(loc='best')
    ax.set_xlim(0, df['max'].max() * 1.2)
    ax.set_ylim(69, 106)
    ax.set_title(("%s Max Days per Year %s\n"
                  "at or above given high temperature threshold") %
                 (nt.sts[station]['name'], limittitle))
    ax.set_ylabel(r"High Temperature $^\circ$F")
    if year == datetime.date.today().year:
        ax.set_xlabel(("Days, %s data through %s") % (year, today))
    else:
        ax.set_xlabel("Days")

    return fig, df
예제 #22
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    varname = ctx['var']
    network = 'RAOB'
    ts = ctx['date']
    hour = int(ctx['hour'])
    ts = datetime.datetime(ts.year, ts.month, ts.day, hour)
    ts = ts.replace(tzinfo=pytz.utc)
    which = ctx['which']
    vlimit = ''
    if which == 'month':
        vlimit = (" and extract(month from f.valid) = %s "
                  ) % (ts.month,)
    nt = NetworkTable(network)
    name = nt.sts[station]['name']
    stations = [station, ]
    if station.startswith("_"):
        name = nt.sts[station]['name'].split("--")[0]
        stations = nt.sts[station]['name'].split("--")[1].strip().split(" ")
    pgconn = get_dbconn('postgis')

    df = read_sql("""
    with data as (
        select f.valid, p.pressure, count(*) OVER (PARTITION by p.pressure),
        min(valid) OVER () as min_valid, max(valid) OVER () as max_valid,
        p.tmpc,
        rank() OVER (PARTITION by p.pressure ORDER by p.tmpc ASC) as tmpc_rank,
        min(p.tmpc) OVER (PARTITION by p.pressure) as tmpc_min,
        max(p.tmpc) OVER (PARTITION by p.pressure) as tmpc_max,
        p.dwpc,
        rank() OVER (PARTITION by p.pressure ORDER by p.dwpc ASC) as dwpc_rank,
        min(p.dwpc) OVER (PARTITION by p.pressure) as dwpc_min,
        max(p.dwpc) OVER (PARTITION by p.pressure) as dwpc_max,
        p.height as hght,
        rank() OVER (
            PARTITION by p.pressure ORDER by p.height ASC) as hght_rank,
        min(p.height) OVER (PARTITION by p.pressure) as hght_min,
        max(p.height) OVER (PARTITION by p.pressure) as hght_max,
        p.smps,
        rank() OVER (PARTITION by p.pressure ORDER by p.smps ASC) as smps_rank,
        min(p.smps) OVER (PARTITION by p.pressure) as smps_min,
        max(p.smps) OVER (PARTITION by p.pressure) as smps_max
        from raob_flights f JOIN raob_profile p on (f.fid = p.fid)
        WHERE f.station in %s
        and extract(hour from f.valid at time zone 'UTC') = %s
        """ + vlimit + """
        and p.pressure in (925, 850, 700, 500, 400, 300, 250, 200,
        150, 100, 70, 50, 10))

    select * from data where valid = %s ORDER by pressure DESC
    """, pgconn, params=(tuple(stations), hour, ts),
                  index_col='pressure')
    if len(df.index) == 0:
        raise ValueError(("Sounding for %s was not found!"
                         ) % (ts.strftime("%Y-%m-%d %H:%M"),))
    for key in PDICT3.keys():
        df[key+'_percentile'] = df[key+'_rank'] / df['count'] * 100.
        # manual hackery to get 0 and 100th percentile
        df.loc[df[key] == df[key+'_max'], key+'_percentile'] = 100.
        df.loc[df[key] == df[key+'_min'], key+'_percentile'] = 0.

    ax = plt.axes([0.1, 0.12, 0.65, 0.75])
    bars = ax.barh(range(len(df.index)), df[varname+'_percentile'],
                   align='center')
    y2labels = []
    fmt = '%.1f' if varname not in ['hght', ] else '%.0f'
    for i, bar in enumerate(bars):
        ax.text(bar.get_width() + 1, i, '%.1f' % (bar.get_width(),),
                va='center', bbox=dict(color='white'))
        y2labels.append((fmt + ' (' + fmt + ' ' + fmt + ')'
                         ) % (df.iloc[i][varname],
                              df.iloc[i][varname+"_min"],
                              df.iloc[i][varname+"_max"]))
    ax.set_yticks(range(len(df.index)))
    ax.set_yticklabels(['%.0f' % (a, ) for a in df.index.values])
    ax.set_ylim(-0.5, len(df.index) - 0.5)
    ax.set_xlabel("Percentile [100 = highest]")
    ax.set_ylabel("Mandatory Pressure Level (hPa)")
    plt.gcf().text(0.5, 0.9,
                   ("%s %s %s Sounding\n"
                    "(%s-%s) Percentile Ranks (%s) for %s"
                    ) % (station, name,
                         ts.strftime("%Y/%m/%d %H UTC"),
                         df.iloc[0]['min_valid'].year,
                         df.iloc[0]['max_valid'].year,
                         ("All Year" if which == 'none'
                          else calendar.month_name[ts.month]),
                         PDICT3[varname]),
                   ha='center', va='bottom')
    ax.grid(True)
    ax.set_xticks([0, 5, 10, 25, 50, 75, 90, 95, 100])
    ax.set_xlim(0, 110)
    ax.text(1.02, 1, 'Ob  (Min  Max)', transform=ax.transAxes)

    ax2 = ax.twinx()
    ax2.set_ylim(-0.5, len(df.index) - 0.5)
    ax2.set_yticks(range(len(df.index)))
    ax2.set_yticklabels(y2labels)
    return plt.gcf(), df
예제 #23
0
파일: p167.py 프로젝트: trentford/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('asos')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['zstation']
    network = ctx['network']
    year = ctx['year']
    month = ctx['month']

    nt = NetworkTable(network)
    tzname = nt.sts[station]['tzname']
    tzinfo = pytz.timezone(tzname)

    # Figure out the 1rst and last of this month in the local time zone
    sts = datetime.datetime(year, month, 3, 0, 0)
    sts = sts.replace(tzinfo=pytz.utc)
    sts = sts.astimezone(tzinfo).replace(day=1, hour=0, minute=0)
    ets = (sts + datetime.timedelta(days=35)).replace(day=1)
    days = (ets-sts).days
    data = np.zeros((24, days))

    df = read_sql("""
    SELECT valid at time zone %s as ts,
    skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, skyl4,
    vsby
    from alldata where station = %s and valid BETWEEN %s and %s
    and vsby is not null and report_type = 2
    ORDER by valid ASC
    """, pgconn, params=(tzname, station, sts, ets), index_col=None)

    if df.empty:
        raise ValueError("No database entries found for station, sorry!")

    # 0 Unknown
    # 1 VFR: Ceiling >3000' AGL and visibility >5 statutes miles (green)
    # 2 MVFR: 1000-3000' and/or 3-5 statute miles, inclusive (blue)
    # 3 IFR: 500 - <1000' and/or 1 to <3 statute miles (red)
    # 4 LIFR: < 500' AGL and/or < 1 mile (magenta)
    lookup = {4: 'LIFR', 3: 'IFR', 2: 'MVFR', 1: 'VFR', 0: 'UNKNOWN'}
    conds = []
    for _, row in df.iterrows():
        x = row['ts'].day - 1
        y = row['ts'].hour
        val = 1
        level = 100000  # arb high number
        coverages = [row['skyc1'], row['skyc2'], row['skyc3'], row['skyc4']]
        if 'OVC' in coverages:
            idx = coverages.index('OVC')
            level = [row['skyl1'], row['skyl2'], row['skyl3'], row['skyl4']
                     ][idx]
        if level < 500 or row['vsby'] < 1:
            val = 4
        elif (level < 1000 and level >= 500) or row['vsby'] < 3:
            val = 3
        elif (level < 3000 and level >= 1000) or row['vsby'] < 5:
            val = 2
        elif level >= 3000 and row['vsby'] >= 5:
            val = 1
        else:
            val = 0
        data[y, x] = max(data[y, x], val)
        conds.append(lookup[val])
        # print row['ts'], y, x, val, data[y, x], level, row['vsby']

    df['flstatus'] = conds

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))

    ax.set_facecolor('skyblue')

    ax.set_title(('[%s] %s %s Flight Category\n'
                  'based on Hourly METAR Cloud Amount/Level'
                  ' and Visibility Reports'
                  ) % (station, nt.sts[station]['name'],
                       sts.strftime("%b %Y")))

    colors = ['#EEEEEE', 'green', 'blue', 'red', 'magenta']
    cmap = mpcolors.ListedColormap(colors)
    norm = mpcolors.BoundaryNorm(boundaries=range(6), ncolors=5)
    ax.imshow(np.flipud(data), aspect='auto', extent=[0.5, days + 0.5, -0.5,
                                                      23.5],
              cmap=cmap, interpolation='nearest', norm=norm)
    ax.set_yticks(range(0, 24, 3))
    ax.set_yticklabels(['Mid', '3 AM', '6 AM', '9 AM', 'Noon',
                        '3 PM', '6 PM', '9 PM'])
    ax.set_xticks(range(1, days+1))
    ax.set_ylabel("Hour of Local Day (%s)" % (tzname, ))
    ax.set_xlabel("Day of %s" % (sts.strftime("%b %Y"),))

    rects = []
    for color in colors:
        rects.append(Rectangle((0, 0), 1, 1, fc=color))

    ax.grid(True)
    # Shrink current axis's height by 10% on the bottom
    box = ax.get_position()
    ax.set_position([box.x0, box.y0 + box.height * 0.1,
                     box.width, box.height * 0.9])

    ax.legend(rects, ['Unknown', 'VFR', 'MVFR', "IFR", "LIFR"],
              loc='upper center', fontsize=14,
              bbox_to_anchor=(0.5, -0.09), fancybox=True, shadow=True, ncol=5)

    return fig, df
예제 #24
0
파일: p45.py 프로젝트: iny/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='asos', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['zstation']
    network = ctx['network']
    hour = ctx['hour']
    year = ctx['year']
    month = ctx['month']

    nt = NetworkTable(network)

    df = read_sql("""
        WITH obs as (
            SELECT to_char(valid, 'YYYYmmdd') as yyyymmdd,
            SUM(case when (skyc1 = 'OVC' or skyc2 = 'OVC' or skyc3 = 'OVC'
                        or skyc4 = 'OVC') then 1 else 0 end)
            from alldata where station = %s
            and valid > '1951-01-01'
            and extract(hour from (valid at time zone %s) +
                        '10 minutes'::interval ) = %s
            GROUP by yyyymmdd)

        SELECT substr(o.yyyymmdd,1,4)::int as year,
        substr(o.yyyymmdd,5,2)::int as month,
        sum(case when o.sum >= 1 then 1 else 0 end) as hits, count(*)
        from obs o GROUP by year, month ORDER by year ASC, month ASC
      """,
                  pgconn,
                  params=(station, nt.sts[station]['tzname'], hour),
                  index_col=None)
    df['freq'] = df['hits'] / df['count'] * 100.
    climo = df.groupby('month').sum()
    climo['freq'] = climo['hits'] / climo['count'] * 100.

    (fig, ax) = plt.subplots(2, 1)
    ax[0].bar(climo.index.values - 0.2,
              climo['freq'].values,
              fc='red',
              ec='red',
              width=0.4,
              label='Climatology',
              align='center')
    for i, row in climo.iterrows():
        ax[0].text(i - 0.2,
                   row['freq'] + 1,
                   "%.0f" % (row['freq'], ),
                   ha='center')

    thisyear = df[df['year'] == year]
    if len(thisyear.index) > 0:
        ax[0].bar(thisyear['month'].values + 0.2,
                  thisyear['freq'].values,
                  fc='blue',
                  ec='blue',
                  width=0.4,
                  label=str(year),
                  align='center')
    for i, row in thisyear.iterrows():
        ax[0].text(row['month'] + 0.2,
                   row['freq'] + 1,
                   "%.0f" % (row['freq'], ),
                   ha='center')
    ax[0].set_ylim(0, 100)
    ax[0].set_xlim(0.5, 12.5)
    ax[0].legend(ncol=2)
    ax[0].set_yticks([0, 10, 25, 50, 75, 90, 100])
    ax[0].set_xticks(range(1, 13))
    ax[0].grid(True)
    ax[0].set_xticklabels(calendar.month_abbr[1:])
    ax[0].set_ylabel("Frequency [%]")
    ax[0].set_title(("%.0f-%s [%s] %s\n"
                     "Frequency of %s Cloud Observation of Overcast") %
                    (df['year'].min(), datetime.datetime.now().year, station,
                     nt.sts[station]['name'],
                     datetime.datetime(2000, 1, 1, hour, 0).strftime("%I %p")))

    # Plot second one now
    obs = df[df['month'] == month]
    ax[1].bar(obs['year'].values, obs['freq'].values, fc='tan', ec='orange')
    ax[1].set_ylim(0, 100)
    ax[1].grid(True)
    ax[1].set_yticks([0, 10, 25, 50, 75, 90, 100])
    ax[1].axhline(obs['freq'].mean())
    ax[1].set_ylabel("%s Frequency [%%]" % (calendar.month_abbr[month], ))
    ax[1].set_xlim(obs['year'].min() - 2, obs['year'].max() + 2)
    return fig, df
예제 #25
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx['sdate']
    ets = ctx['edate']
    wfo = ctx['wfo']
    p1 = ctx['phenomenav1']
    p2 = ctx['phenomenav2']
    p3 = ctx['phenomenav3']
    p4 = ctx['phenomenav4']
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p is not None:
            phenomena.append(p[:2])
    s1 = ctx['significancev1']
    s2 = ctx['significancev2']
    s3 = ctx['significancev3']
    s4 = ctx['significancev4']
    significance = []
    for s in [s1, s2, s3, s4]:
        if s is not None:
            significance.append(s[0])

    pstr = []
    title = ""
    for i, (p, s) in enumerate(zip(phenomena, significance)):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        if i == 2:
            title += "\n"
        title += "%s %s.%s, " % (vtec.get_ps_string(p, s), p, s)
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr, )

    if ctx['w'] == 'wfo':
        nt = NetworkTable("WFO")
        nt.sts['_ALL'] = {'name': 'All Offices', 'tzname': 'America/Chicago'}
        wfo_limiter = (" and wfo = '%s' ") % (wfo
                                              if len(wfo) == 3 else wfo[1:], )
        if wfo == '_ALL':
            wfo_limiter = ''
        tzname = nt.sts[wfo]['tzname']
    else:
        wfo_limiter = " and substr(ugc, 1, 2) = '%s' " % (ctx['state'], )
        tzname = 'America/Chicago'

    df = read_sql("""
with events as (
  select wfo, min(issue at time zone %s) as localissue,
  extract(year from issue) as year,
  phenomena, significance, eventid from warnings
  where """ + pstr + """ """ + wfo_limiter + """ and
  issue >= %s and issue < %s GROUP by wfo, year, phenomena, significance,
  eventid
)

SELECT date(localissue), count(*) from events GROUP by date(localissue)
    """,
                  pgconn,
                  params=(tzname, sts - datetime.timedelta(days=2),
                          ets + datetime.timedelta(days=2)),
                  index_col='date')

    data = {}
    now = sts
    while now <= ets:
        data[now] = {'val': 0}
        now += datetime.timedelta(days=1)
    for date, row in df.iterrows():
        data[date] = {'val': row['count']}
    fig = calendar_plot(sts, ets, data, heatmap=(ctx['heatmap'] == 'yes'))
    if ctx['w'] == 'wfo':
        title2 = "NWS %s [%s]" % (nt.sts[wfo]['name'], wfo)
        if wfo == '_ALL':
            title2 = "All NWS Offices"
    else:
        title2 = state_names[ctx['state']]
    fig.text(
        0.5,
        0.95,
        ("Number of VTEC Events for %s by Local Calendar Date"
         "\nValid %s - %s for %s") %
        (title2, sts.strftime("%d %b %Y"), ets.strftime("%d %b %Y"), title),
        ha='center',
        va='center')

    return fig, df
예제 #26
0
파일: p32.py 프로젝트: nbackas/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    station = fdict.get('station', 'IA0200')
    year = int(fdict.get('year', 2014))
    varname = fdict.get('var', 'high')

    table = "alldata_%s" % (station[:2], )
    nt = NetworkTable("%sCLIMATE" % (station[:2], ))

    cursor.execute(
        """
    WITH data as (
     select day, high, low, (high+low)/2. as avg, sday
     from """ + table + """ where station = %s and year = %s
    ), climo as (
     SELECT valid, high, low, (high+low)/2. as avg from ncdc_climate81
     WHERE station = %s
    )
    SELECT extract(doy from d.day) as doy, d.high - c.high,
    d.low - c.low, d.avg - c.avg, d.high, c.high, d.low, c.low,
    d.avg, c.avg, d.day from
    data d JOIN climo c on
    (to_char(c.valid, 'mmdd') = d.sday) ORDER by doy ASC
    """, (station, year, nt.sts[station]['ncdc81']))

    rows = []
    for row in cursor:
        rows.append(
            dict(jday=row[0],
                 high_diff=row[1],
                 low_diff=row[2],
                 avg_diff=row[3],
                 high=row[4],
                 climate_high=row[5],
                 low=row[6],
                 climate_low=row[7],
                 avg=row[8],
                 climate_avg=row[9],
                 day=row[10]))
    df = pd.DataFrame(rows)
    (fig, ax) = plt.subplots(1, 1, sharex=True)
    diff = df[varname + '_diff']
    bars = ax.bar(df['jday'], diff, fc='b', ec='b')
    for i, bar in enumerate(bars):
        if diff[i] > 0:
            bar.set_facecolor('r')
            bar.set_edgecolor('r')
    ax.grid(True)
    ax.set_ylabel("Temperature Departure $^\circ$F")
    ax.set_title(("%s %s\nYear %s %s Departure") %
                 (station, nt.sts[station]['name'], year, PDICT[varname]))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xlim(0, 366)

    return fig, df
예제 #27
0
def get_data(ts):
    """ Get the data for this timestamp """
    iemcursor = IEM.cursor()
    cursor = ISUAG.cursor(cursor_factory=psycopg2.extras.DictCursor)
    qcdict = loadqc()
    nt = NetworkTable("ISUSM", only_online=False)
    data = {"type": "FeatureCollection", "features": []}
    # Fetch the daily values
    iemcursor.execute(
        """
    SELECT id, pday, max_tmpf, min_tmpf from summary s JOIN stations t
    on (t.iemid = s.iemid) WHERE t.network = 'ISUSM' and day = %s
    """,
        (ts.date(),),
    )
    daily = {}
    for row in iemcursor:
        daily[row[0]] = {
            "pday": row[1],
            "max_tmpf": row[2],
            "min_tmpf": row[3],
        }
    cursor.execute(
        """
    SELECT h.station,
        h.encrh_avg,
        coalesce(m.rh_avg_qc, h.rh_qc) as rh,
        h.rain_mm_tot,
        etalfalfa,
        battv_min,
        coalesce(m.slrkj_tot_qc * 3600 / 1000000, h.slrmj_tot_qc) as slrmj_tot,
        coalesce(m.tair_c_avg, h.tair_c_avg) as tair_c_avg,
        coalesce(m.tsoil_c_avg_qc, h.tsoil_c_avg_qc) as tsoil_c_avg_qc,
        coalesce(m.t12_c_avg_qc, h.t12_c_avg_qc) as t12_c_avg_qc,
        coalesce(m.t24_c_avg_qc, h.t24_c_avg_qc) as t24_c_avg_qc,
        coalesce(m.t50_c_avg_qc, h.t50_c_avg_qc) as t50_c_avg_qc,
        coalesce(m.calcvwc12_avg_qc, h.calc_vwc_12_avg_qc)
            as calc_vwc_12_avg_qc,
        coalesce(m.calcvwc24_avg_qc, h.calc_vwc_24_avg_qc)
            as calc_vwc_24_avg_qc,
        coalesce(m.calcvwc50_avg_qc, h.calc_vwc_50_avg_qc)
            as calc_vwc_50_avg_qc,
        coalesce(m.ws_mph_max, h.ws_mph_max) as ws_mph_max,
        coalesce(m.winddir_d1_wvt, h.winddir_d1_wvt) as winddir_d1_wvt,
        coalesce(m.ws_mph_s_wvt * 0.447, h.ws_mps_s_wvt)as ws_mps_s_wvt
    from sm_hourly h LEFT JOIN sm_minute m on (h.station = m.station and
    h.valid = m.valid)
    where h.valid = %s
    """,
        (ts,),
    )
    for row in cursor:
        sid = row["station"]
        if sid not in nt.sts:
            continue
        lon = nt.sts[sid]["lon"]
        lat = nt.sts[sid]["lat"]
        q = qcdict.get(sid, {})
        data["features"].append(
            {
                "type": "Feature",
                "id": sid,
                "properties": {
                    "encrh_avg": (
                        "%s%%" % safe(row["encrh_avg"], 1)
                        if row["encrh_avg"] is not None
                        and row["encrh_avg"] > 0
                        else "M"
                    ),
                    "rh": "%s%%" % (safe(row["rh"], 0),),
                    "hrprecip": (
                        safe_p(row["rain_mm_tot"])
                        if not q.get("precip", False)
                        else "M"
                    ),
                    "et": safe_p(row["etalfalfa"]),
                    "bat": safe(row["battv_min"], 2),
                    "radmj": safe(row["slrmj_tot"], 2),
                    "tmpf": safe_t(row["tair_c_avg"]),
                    "high": safe_t(
                        daily.get(sid, {}).get("max_tmpf", None), "F"
                    ),
                    "low": safe_t(
                        daily.get(sid, {}).get("min_tmpf", None), "F"
                    ),
                    "pday": (
                        safe(daily.get(sid, {}).get("pday", None), 2)
                        if not q.get("precip", False)
                        else "M"
                    ),
                    "soil04t": (
                        safe_t(row["tsoil_c_avg_qc"])
                        if not q.get("soil4", False)
                        else "M"
                    ),
                    "soil12t": (
                        safe_t(row["t12_c_avg_qc"])
                        if not q.get("soil12", False)
                        else "M"
                    ),
                    "soil24t": (
                        safe_t(row["t24_c_avg_qc"])
                        if not q.get("soil24", False)
                        else "M"
                    ),
                    "soil50t": (
                        safe_t(row["t50_c_avg_qc"])
                        if not q.get("soil50", False)
                        else "M"
                    ),
                    "soil12m": (
                        safe_m(row["calc_vwc_12_avg_qc"])
                        if not q.get("soil12", False)
                        else "M"
                    ),
                    "soil24m": (
                        safe_m(row["calc_vwc_24_avg_qc"])
                        if not q.get("soil24", False)
                        else "M"
                    ),
                    "soil50m": (
                        safe_m(row["calc_vwc_50_avg_qc"])
                        if not q.get("soil50", False)
                        else "M"
                    ),
                    "gust": safe(row["ws_mph_max"], 1),
                    "wind": ("%s@%.0f")
                    % (
                        drct2text(row["winddir_d1_wvt"]),
                        row["ws_mps_s_wvt"] * 2.23,
                    ),
                    "name": nt.sts[sid]["name"],
                },
                "geometry": {"type": "Point", "coordinates": [lon, lat]},
            }
        )
    return json.dumps(data)
예제 #28
0
파일: p32.py 프로젝트: iny/iem
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    import matplotlib.dates as mdates
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    year = ctx['year']
    varname = ctx['var']
    how = ctx['how']

    table = "alldata_%s" % (station[:2],)
    nt = NetworkTable("%sCLIMATE" % (station[:2],))

    df = read_sql("""
    WITH data as (
     select day, high, low, (high+low)/2. as temp, sday
     from """+table+""" where station = %s and year = %s
    ), climo as (
     SELECT sday, avg(high) as avg_high, avg(low) as avg_low,
     avg((high+low)/2.) as avg_temp, stddev(high) as stddev_high,
     stddev(low) as stddev_low, stddev((high+low)/2.) as stddev_temp
     from """ + table + """ WHERE station = %s GROUP by sday
    )
    SELECT day,
    d.high - c.avg_high as high_diff,
    (d.high - c.avg_high) / c.stddev_high as high_sigma,
    d.low - c.avg_low as low_diff,
    (d.low - c.avg_low) / c.stddev_low as low_sigma,
    d.temp - c.avg_temp as avg_diff,
    (d.temp - c.avg_temp) / c.stddev_temp as avg_sigma,
    d.high,
    c.avg_high,
    d.low,
    c.avg_low,
    d.temp,
    c.avg_temp from
    data d JOIN climo c on
    (c.sday = d.sday) ORDER by day ASC
    """, pgconn, params=(station, year, station),
                  index_col=None)

    (fig, ax) = plt.subplots(1, 1)
    diff = df[varname+'_' + how]
    bars = ax.bar(df['day'].values, diff,  fc='b', ec='b', align='center')
    for i, bar in enumerate(bars):
        if diff[i] > 0:
            bar.set_facecolor('r')
            bar.set_edgecolor('r')
    ax.grid(True)
    if how == 'diff':
        ax.set_ylabel("Temperature Departure $^\circ$F")
    else:
        ax.set_ylabel("Temperature Std Dev Departure ($\sigma$)")
    ax.set_title(("%s %s\nYear %s %s Departure"
                  ) % (station, nt.sts[station]['name'], year,
                       PDICT[varname]))
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%b'))
    ax.xaxis.set_major_locator(mdates.DayLocator(1))

    return fig, df
예제 #29
0
def get_ctx(fdict):
    """Get the plotting context """
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    date = ctx['date']

    table = "alldata_%s" % (station[:2], )
    nt = NetworkTable("%sCLIMATE" % (station[:2], ))

    cursor.execute(
        """
    SELECT year,  extract(doy from day) as doy, precip
    from """ + table + """ where station = %s and precip is not null
    """, (station, ))

    baseyear = nt.sts[station]['archive_begin'].year - 1
    years = (datetime.datetime.now().year - baseyear) + 1

    data = np.zeros((years, 367 * 2))
    # 1892 1893
    # 1893 1894
    # 1894 1895

    for row in cursor:
        # left hand
        data[int(row['year'] - baseyear), int(row['doy'])] = row['precip']
        # right hand
        data[int(row['year'] - baseyear - 1),
             int(row['doy']) + 366] = row['precip']

    _temp = date.replace(year=2000)
    _doy = int(_temp.strftime("%j"))
    xticks = []
    xticklabels = []
    for i in range(-366, 0):
        ts = _temp + datetime.timedelta(days=i)
        if ts.day == 1:
            xticks.append(i)
            xticklabels.append(ts.strftime("%b"))
    ranks = []
    totals = []
    maxes = []
    avgs = []
    myyear = date.year - baseyear - 1
    for days in range(1, 366):
        idx0 = _doy + 366 - days
        idx1 = _doy + 366
        sums = np.sum(data[:, idx0:idx1], 1)
        thisyear = sums[myyear]
        sums = np.sort(sums)
        a = np.digitize([
            thisyear,
        ], sums)
        rank = years - a[0] + 1
        ranks.append(rank)
        totals.append(thisyear)
        maxes.append(sums[-1])
        avgs.append(np.nanmean(sums))

    ctx['sdate'] = date - datetime.timedelta(days=360)
    ctx['title'] = "%s %s" % (station, nt.sts[station]['name'])
    ctx['subtitle'] = ("Trailing Days Precipitation Rank [%s-%s] to %s") % (
        baseyear + 2, datetime.datetime.now().year, date.strftime("%-d %b %Y"))

    ctx['ranks'] = ranks
    ctx['totals'] = totals
    ctx['maxes'] = maxes
    ctx['avgs'] = avgs
    ctx['xticks'] = xticks
    ctx['xticklabels'] = xticklabels
    ctx['station'] = station
    return ctx
예제 #30
0
def plotter(fdict):
    """ Go """
    import matplotlib
    matplotlib.use('agg')
    import matplotlib.pyplot as plt
    import matplotlib.patheffects as PathEffects
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    varname = ctx['varname']
    year = ctx['year']
    network = "%sCLIMATE" % (station[:2], )
    nt = NetworkTable(network)

    table = "alldata_%s" % (station[:2], )

    df = read_sql("""
    with obs as
    (select month, year, high, lag(high) OVER (ORDER by day ASC) as lhigh,
    low, lag(low) OVER (ORDER by day ASC) as llow from """ + table + """
    where station = %s)

    SELECT year, month,
    sum(case when high > lhigh then 1 else 0 end)::numeric as high_greater,
    sum(case when high = lhigh then 1 else 0 end)::numeric as high_unch,
    sum(case when high < lhigh then 1 else 0 end)::numeric as high_lower,
    sum(case when low > llow then 1 else 0 end)::numeric as low_greater,
    sum(case when low = llow then 1 else 0 end)::numeric as low_unch,
    sum(case when low < llow then 1 else 0 end)::numeric as low_lower
    from obs GROUP by year, month ORDER by year, month
    """,
                  pgconn,
                  params=(station, ),
                  index_col=None)
    gdf = df.groupby('month').sum()
    gyear = df[df['year'] == year].groupby('month').sum()
    increase = gdf[varname + '_greater']
    nochange = gdf[varname + '_unch']
    decrease = gdf[varname + '_lower']
    increase2 = gyear[varname + '_greater']
    nochange2 = gyear[varname + '_unch']
    decrease2 = gyear[varname + '_lower']

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))

    total = decrease + nochange + increase
    total2 = decrease2 + nochange2 + increase2

    ax.bar(total.index.values - 0.2,
           decrease / total * 100.0,
           fc='b',
           label='Decrease',
           width=0.4,
           align='center')
    ax.bar(total2.index.values + 0.2,
           decrease2 / total2 * 100.0,
           fc='lightblue',
           width=0.4,
           label="%s ''" % (year, ),
           align='center')
    ax.bar(total.index.values - 0.2,
           nochange / total * 100.0,
           bottom=(decrease / total * 100.0),
           fc='g',
           label="No Change",
           width=0.4,
           align='center')
    ax.bar(total2.index.values + 0.2,
           nochange2 / total2 * 100.0,
           bottom=(decrease2 / total2 * 100.0),
           fc='lightgreen',
           width=0.4,
           label="%s ''" % (year, ),
           align='center')
    ax.bar(total.index.values - 0.2,
           increase / total * 100.0,
           bottom=(decrease + nochange) / total * 100.0,
           fc='r',
           width=0.4,
           label="Increase",
           align='center')
    ax.bar(total2.index.values + 0.2,
           increase2 / total2 * 100.0,
           bottom=(decrease2 + nochange2) / total2 * 100.0,
           fc='pink',
           width=0.4,
           label="%s ''" % (year, ),
           align='center')

    offset = -0.2
    for _df in [gdf, gyear]:
        increase = _df[varname + '_greater']
        nochange = _df[varname + '_unch']
        decrease = _df[varname + '_lower']
        total = decrease + nochange + increase
        for i, _ in _df.iterrows():
            txt = ax.text(i + offset,
                          decrease[i] / total[i] * 100.0 - 5,
                          "%.0f" % (decrease[i] / total[i] * 100.0),
                          ha='center',
                          fontsize=10)
            txt.set_path_effects(
                [PathEffects.withStroke(linewidth=2, foreground="white")])
            ymid = (decrease[i] + (nochange[i] / 2.)) / total[i] * 100.
            txt = ax.text(i + offset,
                          ymid,
                          "%.0f" % (nochange[i] / total[i] * 100.0),
                          ha='center',
                          va='center',
                          fontsize=10)
            txt.set_path_effects(
                [PathEffects.withStroke(linewidth=2, foreground="white")])
            txt = ax.text(i + offset,
                          (decrease[i] + nochange[i]) / total[i] * 100.0 + 2,
                          "%.0f" % (increase[i] / total[i] * 100.0),
                          ha='center',
                          fontsize=10)
            txt.set_path_effects(
                [PathEffects.withStroke(linewidth=2, foreground="white")])
        offset += 0.4

    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xticks(np.arange(1, 13))
    ax.legend(ncol=3, fontsize=12, loc=9, framealpha=1)
    ax.set_xlim(0.5, 12.5)
    ax.set_ylim(0, 100)
    ax.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100])
    ax.set_ylabel("Percentage of Days [%]")
    ax.set_xlabel(("Dark Shades are long term averages, lighter are %s "
                   "actuals") % (year, ))
    ax.set_title(("%s [%s]\nDay to Day %s Temperature Change") %
                 (nt.sts[station]['name'], station, varname.title()))

    return fig, df