示例#1
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    binsize = ctx["binsize"]
    month = ctx["month"]
    year = ctx.get("year")
    table = "alldata_%s" % (station[:2], )
    nt = network.Table("%sCLIMATE" % (station[:2], ))
    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "summer":
        months = [6, 7, 8]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]
    ddf = read_sql(
        f"SELECT high, low, year, month from {table} WHERE station = %s "
        "and year > 1892 and high >= low and month in %s",
        pgconn,
        params=(station, tuple(months)),
        index_col=None,
    )
    if ddf.empty:
        raise NoDataFound("No Data Found.")

    bins = np.arange(-40, 121, binsize)

    hist, xedges, yedges = np.histogram2d(ddf["low"], ddf["high"], bins)
    rows = []
    for i, xedge in enumerate(xedges[:-1]):
        for j, yedge in enumerate(yedges[:-1]):
            rows.append(dict(high=yedge, low=xedge, count=hist[i, j]))
    df = pd.DataFrame(rows)
    ab = nt.sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    years = float(datetime.datetime.now().year - ab.year)
    hist = np.ma.array(hist / years)
    hist.mask = np.where(hist < (1.0 / years), True, False)
    ar = np.argwhere(hist.max() == hist)

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    res = ax.pcolormesh(xedges, yedges, hist.T)
    fig.colorbar(res, label="Days per Year")
    ax.grid(True)
    ax.set_title(("%s [%s]\n"
                  "Daily High vs Low Temp Histogram (month=%s)") %
                 (nt.sts[station]["name"], station, month.upper()))
    ax.set_ylabel(r"High Temperature $^{\circ}\mathrm{F}$")
    ax.set_xlabel(r"Low Temperature $^{\circ}\mathrm{F}$")

    xmax = ar[0][0]
    ymax = ar[0][1]
    ax.text(
        0.65,
        0.15,
        ("Largest Frequency: %.1f days\n"
         "High: %.0f-%.0f Low: %.0f-%.0f") % (
             hist[xmax, ymax],
             yedges[ymax],
             yedges[ymax + 1],
             xedges[xmax],
             xedges[xmax + 1],
         ),
        ha="center",
        va="center",
        transform=ax.transAxes,
        bbox=dict(color="white"),
    )
    ax.axhline(32, linestyle="-", lw=1, color="k")
    ax.text(
        120,
        32,
        r"32$^\circ$F",
        va="center",
        ha="right",
        color="white",
        bbox=dict(color="k"),
        fontsize=8,
    )
    ax.axvline(32, linestyle="-", lw=1, color="k")
    ax.text(
        32,
        117,
        r"32$^\circ$F",
        va="top",
        ha="center",
        color="white",
        bbox=dict(facecolor="k", edgecolor="none"),
        fontsize=8,
    )
    if year:
        label = str(year)
        if month == "winter":
            ddf["year"] = (ddf[((ddf["month"] == 1) |
                                (ddf["month"] == 2))]["year"] - 1)
            label = "Dec %s - Feb %s" % (year, year + 1)
        ddf2 = ddf[ddf["year"] == year]
        ax.scatter(
            ddf2["low"],
            ddf2["high"],
            marker="x",
            label=label,
            edgecolor="white",
            facecolor="red",
        )
        ax.legend()

    return fig, df
示例#2
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    hours = ctx["hours"]
    interval = ctx["interval"]
    varname = ctx["var"]
    if interval > 10 or interval < 0.1:
        raise NoDataFound(
            "Invalid interval provided, positive number less than 10"
        )

    cursor.execute(
        """
    WITH one as (
        select valid, """
        + varname
        + """ as t from alldata where
        station = %s and """
        + varname
        + """ is not null
        ),
        two as (SELECT valid + '%s hours'::interval as v, t from one
        )

    SELECT extract(week from one.valid), two.t - one.t
    from one JOIN two on (one.valid = two.v)
    """,
        (station, hours),
    )
    weeks = []
    deltas = []
    for row in cursor:
        weeks.append(row[0])
        deltas.append(float(row[1]))

    sts = datetime.datetime(2012, 1, 1)
    xticks = []
    for i in range(1, 13):
        ts = sts.replace(month=i)
        xticks.append(int(ts.strftime("%j")))

    # We want bins centered on zero
    bins = compute_bins(interval)

    hist, xedges, yedges = np.histogram2d(weeks, deltas, [range(0, 54), bins])
    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    years = float(datetime.datetime.now().year - ab.year)
    hist = np.ma.array(hist / years / 7.0)
    hist.mask = np.where(hist < (1.0 / years), True, False)

    (fig, ax) = plt.subplots(1, 1)
    res = ax.pcolormesh((xedges - 1) * 7, yedges, hist.transpose())
    fig.colorbar(res, label="Hours per Day")
    ax.grid(True)
    ax.set_title(
        ("%s [%s] Histogram\n(bin=%s) of %s Hour %s Change")
        % (
            ctx["_nt"].sts[station]["name"],
            station,
            interval,
            hours,
            PDICT[varname],
        )
    )
    ax.set_ylabel("%s Change" % (PDICT[varname],))

    ax.set_xticks(xticks)
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xlim(0, 366)

    rng = max([max(deltas), 0 - min(deltas)])
    ax.set_ylim(0 - rng * 1.3, rng * 1.3)

    return fig
示例#3
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    if station not in ctx["_nt"].sts:  # This is needed.
        raise NoDataFound("Unknown station metadata.")
    varname = ctx["var"]
    hour = int(ctx["hour"])
    month = ctx["month"]
    level = ctx["level"]
    agg = ctx["agg"]
    offset = 0
    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
        offset = 32
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "mjj":
        months = [5, 6, 7]
    elif month == "summer":
        months = [6, 7, 8]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month]

    name = ctx["_nt"].sts[station]["name"]
    stations = [station]
    if station.startswith("_"):
        name = ctx["_nt"].sts[station]["name"].split("--")[0]
        stations = (
            ctx["_nt"].sts[station]["name"].split("--")[1].strip().split(" ")
        )
    pgconn = get_dbconn("postgis")

    if varname in ["tmpc", "dwpc", "height", "smps"]:
        leveltitle = " @ %s hPa" % (level,)
        dfin = read_sql(
            """
            select extract(year from f.valid + '%s days'::interval) as year,
            avg("""
            + varname
            + """) as avg_"""
            + varname
            + """,
            min("""
            + varname
            + """) as min_"""
            + varname
            + """,
            max("""
            + varname
            + """) as max_"""
            + varname
            + """,
            count(*)
            from raob_profile p JOIN raob_flights f on (p.fid = f.fid)
            WHERE f.station in %s and p.pressure = %s and
            extract(hour from f.valid at time zone 'UTC') = %s and
            extract(month from f.valid) in %s
            GROUP by year ORDER by year ASC
        """,
            pgconn,
            params=(offset, tuple(stations), level, hour, tuple(months)),
            index_col="year",
        )
    else:
        leveltitle = ""
        dfin = read_sql(
            """
            select extract(year from f.valid + '%s days'::interval) as year,
            count(*),
            avg("""
            + varname
            + """) as avg_"""
            + varname
            + """,
            min("""
            + varname
            + """) as min_"""
            + varname
            + """,
            max("""
            + varname
            + """) as max_"""
            + varname
            + """
            from raob_flights f
            WHERE f.station in %s and
            extract(hour from f.valid at time zone 'UTC') = %s and
            extract(month from f.valid) in %s
            GROUP by year ORDER by year ASC
        """,
            pgconn,
            params=(offset, tuple(stations), hour, tuple(months)),
            index_col="year",
        )
    # need quorums
    df = dfin[dfin["count"] > ((len(months) * 28) * 0.75)]
    if df.empty:
        raise NoDataFound("No data was found!")
    colname = "%s_%s" % (agg, varname)
    fig, ax = plt.subplots(1, 1)
    avgv = df[colname].mean()
    bars = ax.bar(df.index.values, df[colname], align="center")
    for i, _bar in enumerate(bars):
        val = df.iloc[i][colname]
        if val < avgv:
            _bar.set_color("blue")
        else:
            _bar.set_color("red")
    ax.set_xlim(df.index.min() - 1, df.index.max() + 1)
    rng = df[colname].max() - df[colname].min()
    ax.set_ylim(df[colname].min() - rng * 0.1, df[colname].max() + rng * 0.1)
    ax.axhline(avgv, color="k")
    ax.text(df.index.values[-1] + 2, avgv, "Avg:\n%.1f" % (avgv,))
    ax.set_xlabel("Year")
    ax.set_ylabel("%s %s%s" % (PDICT4[agg], PDICT3[varname], leveltitle))
    plt.gcf().text(
        0.5,
        0.9,
        ("%s %s %02i UTC Sounding\n" "%s %s%s over %s")
        % (
            station,
            name,
            hour,
            PDICT4[agg],
            PDICT3[varname],
            leveltitle,
            MDICT[month],
        ),
        ha="center",
        va="bottom",
    )
    ax.grid(True)

    return fig, df
示例#4
0
文件: p39.py 项目: smartparrot/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    year = ctx["year"]
    month = ctx["month"]
    effective_date = ctx["date"]
    table = "alldata_%s" % (station[:2], )

    oldmonth = datetime.date(year, month, 1)
    sts = datetime.date(effective_date.year, effective_date.month, 1)
    ets = (sts + datetime.timedelta(days=35)).replace(day=1)
    days = int((ets - sts).days)

    # beat month
    cursor.execute(
        """
        SELECT extract(day from day), (high+low)/2. from
        """ + table + """ WHERE station = %s and year = %s and month = %s
        ORDER by day ASC
    """,
        (station, year, month),
    )
    if cursor.rowcount == 0:
        raise NoDataFound("No Data Found.")

    prevmonth = []
    for row in cursor:
        prevmonth.append(float(row[1]))

    # build history
    cursor.execute(
        """
        SELECT year, day, (high+low)/2. from
        """ + table + """ WHERE station = %s and month = %s and
        extract(day from day) <= %s and day < %s
        ORDER by day ASC
    """,
        (station, effective_date.month, days, ets),
    )

    for i, row in enumerate(cursor):
        if i == 0:
            baseyear = row[0]
            data = np.ma.ones((effective_date.year - row[0] + 1, days)) * -99
        data[row[0] - baseyear, row[1].day - 1] = row[2]

    # overwrite our current month's data
    currentdata = data[-1, :effective_date.day - 1]
    for i in range(np.shape(data)[0] - 1):
        data[i, :effective_date.day - 1] = currentdata
    data.mask = data < -98
    avgs = np.ma.zeros(np.shape(data))
    days = np.shape(data)[1]
    prevavg = []
    for i in range(days):
        avgs[:, i] = np.sum(data[:, :i + 1], 1) / float(i + 1)
        prevavg.append(np.sum(prevmonth[:i + 1]) / float(i + 1))
    avgs.mask = data.mask

    (fig, ax) = plt.subplots(1, 1)

    beats = 0
    for yr in range(np.shape(data)[0] - 1):
        if avgs[yr, -1] > prevavg[-1]:
            beats += 1
        ax.plot(np.arange(1, days + 1), avgs[yr, :], zorder=1, color="tan")

    lv = avgs[-1, effective_date.day - 1]
    if np.ma.is_masked(lv):
        lv = avgs[-1, effective_date.day - 2]
    ax.plot(
        np.arange(1, effective_date.day),
        avgs[-1, :effective_date.day - 1],
        zorder=3,
        lw=2,
        color="brown",
        label=r"%s %s, %.2f$^\circ$F" %
        (calendar.month_abbr[effective_date.month], effective_date.year, lv),
    )
    # For historical, we can additionally plot the month values
    today = datetime.date.today().replace(day=1)
    if effective_date < today:
        ax.plot(
            np.arange(1, days + 1),
            avgs[-1, :],
            lw=2,
            color="brown",
            linestyle="-.",
            zorder=2,
            label=r"%s %s Final, %.2f$^\circ$F" % (
                calendar.month_abbr[effective_date.month],
                effective_date.year,
                avgs[-1, -1],
            ),
        )
    ax.plot(
        np.arange(1,
                  len(prevavg) + 1),
        prevavg,
        lw=2,
        color="k",
        zorder=3,
        label=r"%s %s, %.2f$^\circ$F" %
        (calendar.month_abbr[oldmonth.month], oldmonth.year, prevavg[-1]),
    )

    ax.set_title(("[%s] %s scenarios for %s\n"
                  "1-%s [%s] + %s-%s [%s-%s] beats %s %s %s/%s (%.1f%%)") % (
                      station,
                      ctx["_nt"].sts[station]["name"],
                      effective_date.strftime("%b %Y"),
                      effective_date.day,
                      effective_date.year,
                      effective_date.day + 1,
                      days,
                      baseyear,
                      effective_date.year - 1,
                      calendar.month_abbr[oldmonth.month],
                      oldmonth.year,
                      beats,
                      np.shape(data)[0] - 1,
                      beats / float(np.shape(data)[0] - 1) * 100.0,
                  ))
    ax.set_xlim(1, days)
    ax.set_ylabel(r"Month to Date Average Temp $^\circ$F")
    ax.set_xlabel("Day of Month")
    ax.grid(True)
    ax.legend(loc="best", fontsize=10)

    return fig
示例#5
0
文件: p202.py 项目: smartparrot/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    h1 = int(ctx["h1"])
    h2 = int(ctx["h2"])
    varname = ctx["v"]

    tzname = ctx["_nt"].sts[station]["tzname"]

    df = read_sql(
        """
    WITH data as (
        SELECT valid at time zone %s + '10 minutes'::interval as localvalid,
        date_trunc(
             'hour', valid at time zone %s  + '10 minutes'::interval) as v,
        tmpf, dwpf, sknt, drct, alti, relh, random() as r,
        coalesce(mslp, alti * 33.8639, 1013.25) as slp
        from alldata where station = %s and report_type = 2
        and extract(hour from valid at time zone %s + '10 minutes'::interval)
        in (%s, %s)),
     agg as (
          select *, extract(hour from v) as hour,
          rank() OVER (PARTITION by v ORDER by localvalid ASC, r ASC) from data
     )

     SELECT *, date(
         case when hour = %s
         then date(v - '1 day'::interval)
         else date(v) end) from agg WHERE rank = 1
    """,
        pgconn,
        params=(
            tzname,
            tzname,
            station,
            tzname,
            h1,
            h2,
            h2 if h2 < h1 else -1,
        ),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("No data was found.")
    if varname == "q":
        df["pressure"] = mcalc.add_height_to_pressure(
            df["slp"].values * units("millibars"),
            ctx["_nt"].sts[station]["elevation"] * units("m"),
        ).to(units("millibar"))
        # compute mixing ratio
        df["q"] = (mcalc.mixing_ratio_from_relative_humidity(
            df["relh"].values * units("percent"),
            df["tmpf"].values * units("degF"),
            df["pressure"].values * units("millibars"),
        ) * 1000.0)

    # pivot
    df = df.pivot(index="date", columns="hour", values=varname).reset_index()
    df = df.dropna()
    df["doy"] = pd.to_numeric(pd.to_datetime(df["date"]).dt.strftime("%j"))
    df["year"] = pd.to_datetime(df["date"]).dt.year
    df["week"] = (df["doy"] / 7).astype(int)
    df["delta"] = df[h2] - df[h1]

    (fig, ax) = plt.subplots(1, 1)
    if ctx["opt"] == "no":
        ax.set_xlabel("Plotted lines are smoothed over %.0f days" %
                      (ctx["smooth"], ))
    ax.set_ylabel(
        "%s %s Difference" %
        (PDICT[varname], "Accumulated Sum" if ctx["opt"] == "yes" else ""))

    if ctx["opt"] == "no":
        # Histogram
        H, xedges, yedges = np.histogram2d(df["doy"].values,
                                           df["delta"].values,
                                           bins=(50, 50))
        ax.pcolormesh(
            xedges,
            yedges,
            H.transpose(),
            cmap=plt.get_cmap(ctx["cmap"]),
            alpha=0.5,
        )

    # Plot an average line
    gdf = (df.groupby("doy").mean().rolling(ctx["smooth"],
                                            min_periods=1,
                                            center=True).mean())
    y = gdf["delta"] if ctx["opt"] == "no" else gdf["delta"].cumsum()
    ax.plot(
        gdf.index.values,
        y,
        label="Average",
        zorder=6,
        lw=2,
        color="k",
        linestyle="-.",
    )

    # Plot selected year
    for i in range(1, 5):
        year = ctx.get("y%s" % (i, ))
        if year is None:
            continue
        df2 = df[df["year"] == year]
        if not df2.empty:
            gdf = (df2.groupby("doy").mean().rolling(ctx["smooth"],
                                                     min_periods=1,
                                                     center=True).mean())
            y = gdf["delta"] if ctx["opt"] == "no" else gdf["delta"].cumsum()
            ax.plot(gdf.index.values, y, label=str(year), lw=2, zorder=10)

    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xlim(1, 366)
    ax.grid(True)
    ax.legend(loc="best", ncol=5)
    sts = datetime.datetime(2000, 6, 1, h1)
    ets = datetime.datetime(2000, 6, 1, h2)
    title = ("%s [%s] %s Difference (%.0f-%.0f)\n"
             "%s minus %s (%s) (timezone: %s)") % (
                 ctx["_nt"].sts[station]["name"],
                 station,
                 PDICT[varname],
                 df["year"].min(),
                 df["year"].max(),
                 ets.strftime("%-I %p"),
                 sts.strftime("%-I %p"),
                 "same day" if h2 > h1 else "previous day",
                 tzname,
             )
    fitbox(fig, title, 0.05, 0.95, 0.91, 0.99, ha="center")

    return fig, df
示例#6
0
文件: p45.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    hour = ctx["hour"]
    year = ctx["year"]
    month = ctx["month"]

    df = read_sql(
        """
        WITH obs as (
            SELECT to_char(valid, 'YYYYmmdd') as yyyymmdd,
            SUM(case when (skyc1 = 'OVC' or skyc2 = 'OVC' or skyc3 = 'OVC'
                        or skyc4 = 'OVC') then 1 else 0 end)
            from alldata where station = %s
            and valid > '1951-01-01'
            and extract(hour from (valid at time zone %s) +
                        '10 minutes'::interval ) = %s
            GROUP by yyyymmdd)

        SELECT substr(o.yyyymmdd,1,4)::int as year,
        substr(o.yyyymmdd,5,2)::int as month,
        sum(case when o.sum >= 1 then 1 else 0 end) as hits, count(*)
        from obs o GROUP by year, month ORDER by year ASC, month ASC
      """,
        pgconn,
        params=(station, ctx["_nt"].sts[station]["tzname"], hour),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("No Data Found.")
    df["freq"] = df["hits"] / df["count"] * 100.0
    climo = df.groupby("month").sum()
    climo["freq"] = climo["hits"] / climo["count"] * 100.0

    (fig, ax) = plt.subplots(2, 1)
    ax[0].bar(
        climo.index.values - 0.2,
        climo["freq"].values,
        fc="red",
        ec="red",
        width=0.4,
        label="Climatology",
        align="center",
    )
    for i, row in climo.iterrows():
        ax[0].text(i - 0.2,
                   row["freq"] + 1,
                   "%.0f" % (row["freq"], ),
                   ha="center")

    thisyear = df[df["year"] == year]
    if not thisyear.empty:
        ax[0].bar(
            thisyear["month"].values + 0.2,
            thisyear["freq"].values,
            fc="blue",
            ec="blue",
            width=0.4,
            label=str(year),
            align="center",
        )
    for i, row in thisyear.iterrows():
        ax[0].text(
            row["month"] + 0.2,
            row["freq"] + 1,
            "%.0f" % (row["freq"], ),
            ha="center",
        )
    ax[0].set_ylim(0, 100)
    ax[0].set_xlim(0.5, 12.5)
    ax[0].legend(ncol=2)
    ax[0].set_yticks([0, 10, 25, 50, 75, 90, 100])
    ax[0].set_xticks(range(1, 13))
    ax[0].grid(True)
    ax[0].set_xticklabels(calendar.month_abbr[1:])
    ax[0].set_ylabel("Frequency [%]")
    ax[0].set_title(
        ("%.0f-%s [%s] %s\n"
         "Frequency of %s Cloud Observation of Overcast") % (
             df["year"].min(),
             datetime.datetime.now().year,
             station,
             ctx["_nt"].sts[station]["name"],
             datetime.datetime(2000, 1, 1, hour, 0).strftime("%I %p"),
         ))

    # Plot second one now
    obs = df[df["month"] == month]
    ax[1].bar(obs["year"].values, obs["freq"].values, fc="tan", ec="orange")
    ax[1].set_ylim(0, 100)
    ax[1].grid(True)
    ax[1].set_yticks([0, 10, 25, 50, 75, 90, 100])
    ax[1].axhline(obs["freq"].mean())
    ax[1].set_ylabel("%s Frequency [%%]" % (calendar.month_abbr[month], ))
    ax[1].set_xlim(obs["year"].min() - 2, obs["year"].max() + 2)
    return fig, df
示例#7
0
文件: p196.py 项目: stormchas4/iem
def get_df(ctx):
    """Figure out what data we need to fetch here"""
    ctx["ugc"] = ctx["_nt"].sts[ctx["station"]]["ugc_zone"]
    pgconn = get_dbconn("postgis")
    ctx["s1"] = "Y"
    ctx["s2"] = "W"
    if ctx["var"] == "heat":
        ctx["p1"] = "HT"
        ctx["p2"] = "EH"
    else:
        ctx["p1"] = "WC"
        ctx["p2"] = "WC"
    # Thankfully, all the above are zone based
    events = read_sql(
        """
        SELECT generate_series(issue, expire, '1 minute'::interval) as valid,
        (phenomena ||'.'|| significance) as vtec
        from warnings WHERE ugc = %s and (
            (phenomena = %s and significance = %s) or
            (phenomena = %s and significance = %s)
        ) ORDER by issue ASC
    """,
        pgconn,
        params=(ctx["ugc"], ctx["p1"], ctx["s1"], ctx["p2"], ctx["s2"]),
        index_col="valid",
    )
    if events.empty:
        raise NoDataFound("No Alerts were found for UGC: %s" % (ctx["ugc"],))
    pgconn = get_dbconn("asos")
    thres = "tmpf > 70" if ctx["var"] == "heat" else "tmpf < 40"
    obs = read_sql(
        """
        SELECT valid, tmpf::int as tmpf, feel
        from alldata where station = %s
        and valid > %s and """
        + thres
        + """
        and feel is not null ORDER by valid
    """,
        pgconn,
        params=(ctx["station"], str(events.index.values[0])),
        index_col="valid",
    )
    ctx["title"] = (
        "%s [%s] (%s to %s)\n" "Frequency of NWS Headline for %s by %s"
    ) % (
        ctx["_nt"].sts[ctx["station"]]["name"],
        ctx["station"],
        str(events.index.values[0])[:10],
        str(obs.index.values[-1])[:10],
        ctx["ugc"],
        PDICT2[ctx["var"]],
    )
    if ctx["opt"] == "yes":
        if ctx["var"] == "heat":
            obs = obs[obs["feel"] > obs["tmpf"]]
        else:
            obs = obs[obs["feel"] < obs["tmpf"]]
    obs["feel"] = obs["feel"].round(0)
    res = obs.join(events)
    res.fillna("None", inplace=True)
    counts = res[["feel", "vtec"]].groupby(["feel", "vtec"]).size()
    df = pd.DataFrame(counts)
    df.columns = ["count"]
    df.reset_index(inplace=True)
    ctx["df"] = df.pivot(index="feel", columns="vtec", values="count")
    ctx["df"].fillna(0, inplace=True)
    ctx["df"]["Total"] = ctx["df"].sum(axis=1)
    for vtec in [
        "%s.%s" % (ctx["p1"], ctx["s1"]),
        "%s.%s" % (ctx["p2"], ctx["s2"]),
        "None",
    ]:
        if vtec not in ctx["df"].columns:
            ctx["df"][vtec] = 0.0
        ctx["df"][vtec + "%"] = ctx["df"][vtec] / ctx["df"]["Total"] * 100.0
示例#8
0
文件: p132.py 项目: jamayfieldjr/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('coop')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    month = ctx['month']
    varname = ctx['var']
    days = ctx['days']

    table = "alldata_%s" % (station[:2], )

    if month == 'all':
        months = range(1, 13)
    elif month == 'fall':
        months = [9, 10, 11]
    elif month == 'winter':
        months = [12, 1, 2]
    elif month == 'spring':
        months = [3, 4, 5]
    elif month == 'summer':
        months = [6, 7, 8]
    elif month == 'octmar':
        months = [10, 11, 12, 1, 2, 3]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d')
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]

    sorder = 'ASC' if varname in [
        'min_greatest_low',
    ] else 'DESC'
    df = read_sql("""WITH data as (
        SELECT month, day, day - '%s days'::interval as start_date,
        count(*) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and
        current row) as count,
        sum(precip) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and
        current row) as total_precip,
        min(high) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and
        current row) as max_least_high,
        max(low) OVER (ORDER by day ASC ROWS BETWEEN %s preceding and
        current row) as min_greatest_low
        from """ + table + """ WHERE station = %s)

        SELECT day as end_date, start_date, """ + varname + """ from data WHERE
        month in %s and
        extract(month from start_date) in %s and count = %s
        ORDER by """ + varname + """ """ + sorder + """ LIMIT 10
        """,
                  pgconn,
                  params=(days - 1, days - 1, days - 1, days - 1, days - 1,
                          station, tuple(months), tuple(months), days),
                  index_col=None)
    if df.empty:
        raise NoDataFound('Error, no results returned!')
    ylabels = []
    fmt = '%.2f' if varname in [
        'total_precip',
    ] else '%.0f'
    for _, row in df.iterrows():
        # no strftime support for old days, so we hack at it
        lbl = fmt % (row[varname], )
        if days > 1:
            sts = row['end_date'] - datetime.timedelta(days=(days - 1))
            if sts.month == row['end_date'].month:
                lbl += " -- %s %s-%s, %s" % (calendar.month_abbr[sts.month],
                                             sts.day, row['end_date'].day,
                                             sts.year)
            else:
                lbl += " -- %s %s, %s to\n          %s %s, %s" % (
                    calendar.month_abbr[sts.month], sts.day, sts.year,
                    calendar.month_abbr[row['end_date'].month],
                    row['end_date'].day, row['end_date'].year)
        else:
            lbl += " -- %s %s, %s" % (
                calendar.month_abbr[row['end_date'].month],
                row['end_date'].day, row['end_date'].year)
        ylabels.append(lbl)

    ax = plt.axes([0.1, 0.1, 0.5, 0.8])
    plt.gcf().set_size_inches(8, 6)
    ax.barh(range(10, 0, -1),
            df[varname],
            ec='green',
            fc='green',
            height=0.8,
            align='center')
    ax2 = ax.twinx()
    ax2.set_ylim(0.5, 10.5)
    ax.set_ylim(0.5, 10.5)
    ax2.set_yticks(range(1, 11))
    ax.set_yticks(range(1, 11))
    ax.set_yticklabels(["#%s" % (x, ) for x in range(1, 11)][::-1])
    ax2.set_yticklabels(ylabels[::-1])
    ax.grid(True, zorder=11)
    ax.set_xlabel(("Precipitation [inch]" if varname in ['total_precip'] else
                   r'Temperature $^\circ$F'))
    ab = ctx['_nt'].sts[station]['archive_begin']
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    ax.set_title(("%s [%s] Top 10 Events\n"
                  "%s [days=%s] (%s) "
                  "(%s-%s)") %
                 (ctx['_nt'].sts[station]['name'], station, METRICS[varname],
                  days, MDICT[month], ab.year, datetime.datetime.now().year),
                 size=12)

    return plt.gcf(), df
示例#9
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('coop')
    ctx = get_autoplot_context(fdict, get_description())

    station = ctx['station']
    sdate = ctx['sdate']
    edate = ctx['edate']
    year2 = ctx.get('year2', 0)
    year3 = ctx.get('year3', 0)
    year4 = ctx.get('year4', 0)
    wantedyears = [sdate.year, year2, year3, year4]
    yearcolors = ['r', 'g', 'b', 'purple']
    gddbase = ctx['base']
    gddceil = ctx['ceil']
    whichplots = ctx['which']
    glabel = "gdd%s%s" % (gddbase, gddceil)

    table = "alldata_%s" % (station[:2], )
    df = read_sql("""
    WITH avgs as (
        SELECT sday, avg(gddxx(%s, %s, high, low)) as c""" + glabel + """,
        avg(sdd86(high, low)) as csdd86, avg(precip) as cprecip
        from """ + table + """
        WHERE station = %s GROUP by sday
    )
    SELECT day, gddxx(%s, %s, high, low) as o""" + glabel + """,
    c""" + glabel + """, o.precip as oprecip, cprecip,
    sdd86(o.high, o.low) as osdd86, csdd86 from """ + table + """ o
    JOIN avgs a on (o.sday = a.sday)
    WHERE station = %s and o.sday != '0229' ORDER by day ASC
    """,
                  pgconn,
                  params=(gddbase, gddceil, station, gddbase, gddceil,
                          station),
                  index_col='day')
    df["precip_diff"] = df["oprecip"] - df["cprecip"]
    df[glabel + "_diff"] = df["o" + glabel] - df["c" + glabel]

    xlen = int((edate - sdate).days) + 1  # In case of leap day
    ab = ctx['_nt'].sts[station]['archive_begin']
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    years = (datetime.datetime.now().year - ab.year) + 1
    acc = np.zeros((years, xlen))
    acc[:] = np.nan
    pacc = np.zeros((years, xlen))
    pacc[:] = np.nan
    sacc = np.zeros((years, xlen))
    sacc[:] = np.nan
    if whichplots == 'all':
        fig = plt.figure(figsize=(9, 12))
        ax1 = fig.add_axes([0.1, 0.7, 0.8, 0.2])
        ax2 = fig.add_axes([0.1, 0.6, 0.8, 0.1],
                           sharex=ax1,
                           facecolor='#EEEEEE')
        ax3 = fig.add_axes([0.1, 0.35, 0.8, 0.2], sharex=ax1)
        ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.2], sharex=ax1)
        title = ("GDD(base=%.0f,ceil=%.0f), Precip, & "
                 "SDD(base=86)") % (gddbase, gddceil)
    elif whichplots == 'gdd':
        fig = plt.figure()
        ax1 = fig.add_axes([0.14, 0.31, 0.8, 0.57])
        ax2 = fig.add_axes([0.14, 0.11, 0.8, 0.2],
                           sharex=ax1,
                           facecolor='#EEEEEE')
        title = ("GDD(base=%.0f,ceil=%.0f)") % (gddbase, gddceil)
    elif whichplots == 'precip':
        fig = plt.figure()
        ax3 = fig.add_axes([0.1, 0.11, 0.8, 0.75])
        ax1 = ax3
        title = "Precipitation"
    elif whichplots == 'sdd':
        fig = plt.figure()
        ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.8])
        ax1 = ax4
        title = "Stress Degree Days (base=86)"

    ax1.set_title(("Accumulated %s\n%s %s") %
                  (title, station, ctx['_nt'].sts[station]['name']),
                  fontsize=18 if whichplots == 'all' else 14)

    ab = ctx['_nt'].sts[station]['archive_begin']
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    for year in range(ab.year, datetime.datetime.now().year + 1):
        sts = sdate.replace(year=year)
        ets = sts + datetime.timedelta(days=(xlen - 1))
        x = df.loc[sts:ets, 'o' + glabel].cumsum()
        if x.empty:
            continue
        acc[(year - sdate.year), :len(x.index)] = x.values
        x = df.loc[sts:ets, 'oprecip'].cumsum()
        pacc[(year - sdate.year), :len(x.index)] = x.values
        x = df.loc[sts:ets, 'osdd86'].cumsum()
        sacc[(year - sdate.year), :len(x.index)] = x.values

        if year not in wantedyears:
            continue
        color = yearcolors[wantedyears.index(year)]
        yearlabel = sts.year
        if sts.year != ets.year:
            yearlabel = "%s-%s" % (sts.year, ets.year)
        if whichplots in ['gdd', 'all']:
            ax1.plot(range(len(x.index)),
                     df.loc[sts:ets, "o" + glabel].cumsum().values,
                     zorder=6,
                     color=color,
                     label='%s' % (yearlabel, ),
                     lw=2)
        # Get cumulated precip
        p = df.loc[sts:ets, 'oprecip'].cumsum()
        if whichplots in ['all', 'precip']:
            ax3.plot(range(len(p.index)),
                     p.values,
                     color=color,
                     lw=2,
                     zorder=6,
                     label='%s' % (yearlabel, ))
        p = df.loc[sts:ets, 'osdd86'].cumsum()
        if whichplots in ['all', 'sdd']:
            ax4.plot(range(len(p.index)),
                     p.values,
                     color=color,
                     lw=2,
                     zorder=6,
                     label='%s' % (yearlabel, ))

        # Plot Climatology
        if wantedyears.index(year) == 0:
            x = df.loc[sts:ets, "c" + glabel].cumsum()
            if whichplots in ['all', 'gdd']:
                ax1.plot(range(len(x.index)),
                         x.values,
                         color='k',
                         label='Climatology',
                         lw=2,
                         zorder=5)
            x = df.loc[sts:ets, "cprecip"].cumsum()
            if whichplots in ['all', 'precip']:
                ax3.plot(range(len(x.index)),
                         x.values,
                         color='k',
                         label='Climatology',
                         lw=2,
                         zorder=5)
            x = df.loc[sts:ets, "csdd86"].cumsum()
            if whichplots in ['all', 'sdd']:
                ax4.plot(range(len(x.index)),
                         x.values,
                         color='k',
                         label='Climatology',
                         lw=2,
                         zorder=5)

        x = df.loc[sts:ets, glabel + "_diff"].cumsum()
        if whichplots in ['all', 'gdd']:
            ax2.plot(range(len(x.index)),
                     x.values,
                     color=color,
                     linewidth=2,
                     linestyle='--')

    xmin = np.nanmin(acc, 0)
    xmax = np.nanmax(acc, 0)
    if whichplots in ['all', 'gdd']:
        ax1.fill_between(range(len(xmin)), xmin, xmax, color='lightblue')
        ax1.grid(True)
        ax2.grid(True)
    xmin = np.nanmin(pacc, 0)
    xmax = np.nanmax(pacc, 0)
    if whichplots in ['all', 'precip']:
        ax3.fill_between(range(len(xmin)), xmin, xmax, color='lightblue')
        ax3.set_ylabel("Precipitation [inch]", fontsize=16)
        ax3.grid(True)
    xmin = np.nanmin(sacc, 0)
    xmax = np.nanmax(sacc, 0)
    if whichplots in ['all', 'sdd']:
        ax4.fill_between(range(len(xmin)), xmin, xmax, color='lightblue')
        ax4.set_ylabel(r"SDD Base 86 $^{\circ}\mathrm{F}$", fontsize=16)
        ax4.grid(True)

    if whichplots in ['all', 'gdd']:
        ax1.set_ylabel((r"GDD Base %.0f Ceil %.0f $^{\circ}\mathrm{F}$") %
                       (gddbase, gddceil),
                       fontsize=16)

        ax1.text(0.5,
                 0.9,
                 "%s/%s - %s/%s" %
                 (sdate.month, sdate.day, edate.month, edate.day),
                 transform=ax1.transAxes,
                 ha='center')

        ylim = ax2.get_ylim()
        spread = max([abs(ylim[0]), abs(ylim[1])]) * 1.1
        ax2.set_ylim(0 - spread, spread)
        ax2.text(0.02,
                 0.1,
                 " Accumulated Departure ",
                 transform=ax2.transAxes,
                 bbox=dict(facecolor='white', ec='#EEEEEE'))
        ax2.yaxis.tick_right()

    xticks = []
    xticklabels = []
    wanted = [
        1,
    ] if xlen > 31 else [1, 7, 15, 22, 29]
    now = sdate
    i = 0
    while now <= edate:
        if now.day in wanted:
            xticks.append(i)
            xticklabels.append(now.strftime("%-d\n%b"))
        now += datetime.timedelta(days=1)
        i += 1
    if whichplots in ['all', 'gdd']:
        ax2.set_xticks(xticks)
        ax2.set_xticklabels(xticklabels)
        ax1.legend(loc=2, prop={'size': 12})
        # Remove ticks on the top most plot
        for label in ax1.get_xticklabels():
            label.set_visible(False)

        ax1.set_xlim(0, xlen + 1)
    if whichplots in ['all', 'precip']:
        ax3.set_xticks(xticks)
        ax3.set_xticklabels(xticklabels)
        ax3.legend(loc=2, prop={'size': 10})
        ax3.set_xlim(0, xlen + 1)
    if whichplots in ['all', 'sdd']:
        ax4.set_xticks(xticks)
        ax4.set_xticklabels(xticklabels)
        ax4.legend(loc=2, prop={'size': 10})
        ax4.set_xlim(0, xlen + 1)

    return fig, df
示例#10
0
def get_data(ctx):
    """Build out our data."""
    pgconn = get_dbconn("iem")
    ctx["nt"] = NetworkTable("NWSCLI")
    varname = ctx["var"]

    today = ctx["sdate"]
    yesterday = today - datetime.timedelta(days=1)
    d180 = today - datetime.timedelta(days=180)

    df = read_sql(
        """
     with obs as (
      select station, valid,
      (case when low > low_normal then 1 else 0 end) as low_hit,
      (case when high > high_normal then 1 else 0 end) as high_hit,
      (case when precip > 0.009 then 1 else 0 end) as precip_hit
      from cli_data
      where high is not null
      and high_normal is not null and low is not null and
      low_normal is not null and precip is not null
      and valid > %s and valid <= %s),

      totals as (
      SELECT station,
      max(case when low_hit = 0 then valid else %s end) as last_low_below,
      max(case when low_hit = 1 then valid else %s end) as last_low_above,
      max(case when high_hit = 0 then valid else %s end) as last_high_below,
      max(case when high_hit = 1 then valid else %s end) as last_high_above,
      max(case when precip_hit = 0 then valid else %s end) as last_dry,
      max(case when precip_hit = 1 then valid else %s end) as last_wet,
      count(*) as count from obs GROUP by station)

      SELECT station, last_low_below, last_low_above, last_high_below,
      last_high_above, last_dry, last_wet
      from totals where count > 170
    """,
        pgconn,
        params=(d180, today, d180, d180, d180, d180, d180, d180),
        index_col="station",
    )
    if df.empty:
        raise NoDataFound("No Data Found.")

    df["lat"] = None
    df["lon"] = None
    df["val"] = None
    df["color"] = ""
    df["label"] = ""
    df["precip_days"] = (df["last_dry"] - df["last_wet"]).dt.days
    df["low_days"] = (df["last_low_above"] - df["last_low_below"]).dt.days
    df["high_days"] = (df["last_high_above"] - df["last_high_below"]).dt.days
    # reorder the frame so that the largest values come first
    df = df.reindex(
        df[varname + "_days"].abs().sort_values(ascending=False).index
    )

    for station, row in df.iterrows():
        if station not in ctx["nt"].sts:
            continue
        df.at[station, "lat"] = ctx["nt"].sts[station]["lat"]
        df.at[station, "lon"] = ctx["nt"].sts[station]["lon"]
        if varname == "precip":
            last_wet = row["last_wet"]
            days = 0 if last_wet in [today, yesterday] else row["precip_days"]
        else:
            days = row[varname + "_days"]
        df.at[station, "val"] = days
        df.at[station, "color"] = "#FF0000" if days > 0 else "#0000FF"
        df.at[station, "label"] = station[1:]
    df = df[pd.notnull(df["lon"])]
    ctx["df"] = gpd.GeoDataFrame(
        df, geometry=gpd.points_from_xy(df["lon"], df["lat"])
    )
    ctx["subtitle"] = (
        "based on NWS CLI Sites, map approximately " "valid for %s"
    ) % (today.strftime("%-d %b %Y"),)
示例#11
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    hours = ctx["hours"]
    mydir = ctx["dir"]
    month = ctx["month"]

    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "summer":
        months = [6, 7, 8]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]

    tzname = ctx["_nt"].sts[station]["tzname"]

    # backwards intuitive
    sortdir = "ASC" if mydir == "warm" else "DESC"
    df = read_sql(
        """
    WITH data as (
        SELECT valid at time zone %s as valid, tmpf from alldata
        where station = %s and tmpf between -100 and 150
        and extract(month from valid) in %s),
    doffset as (
        SELECT valid - '%s hours'::interval as valid, tmpf from data),
    agg as (
        SELECT d.valid, d.tmpf as tmpf1, o.tmpf as tmpf2
        from data d JOIN doffset o on (d.valid = o.valid))
    SELECT valid as valid1, valid + '%s hours'::interval as valid2,
    tmpf1, tmpf2 from agg
    ORDER by (tmpf1 - tmpf2) """
        + sortdir
        + """ LIMIT 50
    """,
        pgconn,
        params=(tzname, station, tuple(months), hours, hours),
        index_col=None,
    )
    df["diff"] = (df["tmpf1"] - df["tmpf2"]).abs()

    if df.empty:
        raise NoDataFound("No database entries found for station, sorry!")

    fig = plt.figure()
    ax = plt.axes([0.55, 0.1, 0.4, 0.8])

    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    fig.text(
        0.5,
        0.95,
        ("[%s] %s Top 10 %s\n" "Over %s Hour Period (%s-%s) [%s]")
        % (
            station,
            ctx["_nt"].sts[station]["name"],
            MDICT[mydir],
            hours,
            ab.year,
            datetime.date.today().year,
            MDICT2[month],
        ),
        ha="center",
        va="center",
    )

    labels = []
    for i in range(10):
        row = df.iloc[i]
        ax.barh(i + 1, row["diff"], color="b", align="center")
        sts = row["valid1"]
        ets = row["valid2"]
        labels.append(
            ("%.0f to %.0f -> %.0f\n%s - %s")
            % (
                row["tmpf1"],
                row["tmpf2"],
                row["diff"],
                sts.strftime("%-d %b %Y %I:%M %p"),
                ets.strftime("%-d %b %Y %I:%M %p"),
            )
        )
    ax.set_yticks(range(1, 11))
    ax.set_yticklabels(labels)
    ax.set_ylim(10.5, 0.5)
    ax.grid(True)
    return fig, df
示例#12
0
文件: p194.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    csector = ctx["csector"]
    sdate = make_tuesday(ctx["sdate"])
    edate = make_tuesday(ctx["edate"])
    dlevel = ctx["d"]

    griddelta = 0.1
    mp = MapPlot(
        sector=("state" if len(csector) == 2 else csector),
        state=ctx["csector"],
        title=('%s at or above "%s" %s - %s') % (
            PDICT2[ctx["w"]],
            PDICT[dlevel],
            sdate.strftime("%b %-d, %Y"),
            edate.strftime("%b %-d, %Y"),
        ),
        subtitle=("based on weekly US Drought Monitor Analysis, "
                  "%.2f$^\circ$ grid analysis") % (griddelta, ),
        continentalcolor="white",
        titlefontsize=14,
    )

    # compute the affine
    (west, east, south, north) = mp.ax.get_extent(ccrs.PlateCarree())
    raster = np.zeros((int(
        (north - south) / griddelta), int((east - west) / griddelta)))
    lons = np.arange(raster.shape[1]) * griddelta + west
    lats = np.arange(0, 0 - raster.shape[0], -1) * griddelta + north
    lats = lats[::-1]
    affine = Affine(griddelta, 0.0, west, 0.0, 0 - griddelta, north)
    # get the geopandas data
    pgconn = get_dbconn("postgis")
    df = read_postgis(
        """
    with d as (
        select valid, (ST_Dump(st_simplify(geom, 0.01))).geom from usdm where
        valid >= %s and valid <= %s and dm >= %s and
        ST_Intersects(geom, ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s,
         %s %s, %s %s, %s %s))'))
    )
    select valid, st_collect(geom) as the_geom from d GROUP by valid
    """,
        pgconn,
        params=(
            sdate,
            edate,
            dlevel,
            west,
            south,
            west,
            north,
            east,
            north,
            east,
            south,
            west,
            south,
        ),
        geom_col="the_geom",
    )
    if df.empty:
        raise NoDataFound("No Data Found, sorry!")
    # loop over the cached stats
    czs = CachingZonalStats(affine)
    czs.compute_gridnav(df["the_geom"], raster)
    for nav in czs.gridnav:
        if nav is None:
            continue
        grid = np.ones((nav.ysz, nav.xsz))
        grid[nav.mask] = 0.0
        jslice = slice(nav.y0, nav.y0 + nav.ysz)
        islice = slice(nav.x0, nav.x0 + nav.xsz)
        raster[jslice, islice] += grid

    maxval = 10 if np.max(raster) < 11 else np.max(raster)
    ramp = np.linspace(1, maxval + 1, 11, dtype="i")
    if ctx["w"] == "percent":
        ramp = np.arange(0, 101, 10)
        ramp[0] = 1.0
        ramp[-1] = 100.1
        # we add one since we are rectified to tuesdays, so we have an extra
        # week in there
        raster = raster / ((edate - sdate).days / 7.0 + 1.0) * 100.0
    # plot
    cmap = stretch_cmap(ctx["cmap"], ramp)
    cmap.set_under("white")
    cmap.set_bad("white")
    mp.pcolormesh(
        lons,
        lats,
        np.flipud(raster),
        ramp,
        cmap=cmap,
        units="count" if ctx["w"] == "weeks" else "Percent",
    )
    if len(csector) == 2:
        mp.drawcounties()
        mp.drawcities()

    rows = []
    for j in range(raster.shape[0]):
        for i in range(raster.shape[1]):
            rows.append(dict(lon=lons[i], lat=lats[j], value=raster[j, i]))

    return mp.fig, pd.DataFrame(rows)
示例#13
0
文件: p14.py 项目: jamayfieldjr/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('coop')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    today = datetime.datetime.now()
    year = ctx['year']
    jdaylimit = 367
    if year == today.year:
        jdaylimit = int(today.strftime("%j"))

    table = "alldata_%s" % (station[:2], )
    endyear = int(datetime.datetime.now().year) + 1
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))

    cursor.execute(
        """
        select precip, sum(precip) OVER (ORDER by precip ASC) as rsum,
        sum(precip) OVER () as tsum,
        min(year) OVER () as minyear from """ + table + """ where
        station = %s and precip >= 0.01 and extract(doy from day) < %s and
        year < extract(year from now()) ORDER by precip ASC
    """, (station, jdaylimit))
    if cursor.rowcount == 0:
        raise NoDataFound("No Data Found.")
    total = None
    base = None
    bins = [
        0.01,
    ]
    minyear = None
    row = None
    for i, row in enumerate(cursor):
        if i == 0:
            minyear = row['minyear']
            total = row['tsum']
            onefifth = total / 5.0
            base = onefifth
        if row['rsum'] > base:
            bins.append(row['precip'])
            base += onefifth

    normal = total / float(endyear - minyear - 1)
    # A rounding edge case
    if row['precip'] != bins[-1]:
        bins.append(row['precip'])

    df = pd.DataFrame(
        {
            'bin': range(1, 6),
            'lower': bins[0:-1],
            'upper': bins[1:]
        },
        index=range(1, 6))

    yearlybins = np.zeros((endyear - minyear, 5), 'f')
    yearlytotals = np.zeros((endyear - minyear, 5), 'f')

    cursor.execute(
        """
    SELECT year,
    sum(case when precip >= %s and precip < %s then 1 else 0 end) as bin0,
    sum(case when precip >= %s and precip < %s then 1 else 0 end) as bin1,
    sum(case when precip >= %s and precip < %s then 1 else 0 end) as bin2,
    sum(case when precip >= %s and precip < %s then 1 else 0 end) as bin3,
    sum(case when precip >= %s and precip < %s then 1 else 0 end) as bin4,
    sum(case when precip >= %s and precip < %s then precip else 0 end) as tot0,
    sum(case when precip >= %s and precip < %s then precip else 0 end) as tot1,
    sum(case when precip >= %s and precip < %s then precip else 0 end) as tot2,
    sum(case when precip >= %s and precip < %s then precip else 0 end) as tot3,
    sum(case when precip >= %s and precip < %s then precip else 0 end) as tot4
    from """ + table + """ where extract(doy from day) < %s and
    station = %s and precip > 0 and year > 1879 GROUP by year
    """, (bins[0], bins[1], bins[1], bins[2], bins[2], bins[3], bins[3],
          bins[4], bins[4], bins[5], bins[0], bins[1], bins[1], bins[2],
          bins[2], bins[3], bins[3], bins[4], bins[4], bins[5], jdaylimit,
          station))
    for row in cursor:
        for i in range(5):
            yearlybins[int(row[0]) - minyear, i] = row['bin%s' % (i, )]
            yearlytotals[int(row[0]) - minyear, i] = row['tot%s' % (i, )]

    avgs = np.average(yearlybins, 0)
    df['avg_days'] = avgs
    dlast = yearlybins[year - minyear, :]
    df['days_%s' % (year, )] = dlast
    df['precip_%s' % (year, )] = yearlytotals[year - minyear, :]
    df['normal_%s' % (year, )] = normal / 5.

    ybuffer = (max([max(avgs), max(dlast)]) + 2) * 0.05

    bars = ax.bar(np.arange(5) - 0.2,
                  avgs,
                  width=0.4,
                  fc='b',
                  align='center',
                  label='Average = %.2f"' % (normal, ))
    for i, _bar in enumerate(bars):
        ax.text(_bar.get_x() + 0.2,
                avgs[i] + ybuffer,
                "%.1f" % (avgs[i], ),
                ha='center',
                zorder=2)
        delta = yearlytotals[year - minyear, i] / normal * 100.0 - 20.0
        ax.text(i,
                max(avgs[i], dlast[i]) + 2 * ybuffer,
                "%s%.1f%%" % (
                    "+" if delta > 0 else "",
                    delta,
                ),
                ha='center',
                color='r',
                bbox=dict(pad=0, facecolor='white', edgecolor='white'))

    bars = ax.bar(np.arange(5) + 0.2,
                  dlast,
                  width=0.4,
                  fc='r',
                  align='center',
                  label='%s = %.2f"' %
                  (year, np.sum(yearlytotals[year - minyear, :])))
    for i, _bar in enumerate(bars):
        ax.text(_bar.get_x() + 0.2,
                dlast[i] + ybuffer,
                "%.0f" % (dlast[i], ),
                ha='center')

    ax.text(0.7,
            0.8,
            ("Red text represents %s bin total\nprecip "
             "departure from average") % (year, ),
            transform=ax.transAxes,
            color='r',
            ha='center',
            va='top',
            bbox=dict(facecolor='white', edgecolor='white'))
    ax.legend()
    ax.grid(True)
    ax.set_ylabel("Days")
    ax.text(0.5,
            -0.05,
            ("Precipitation Bins [inch], split into equal 20%%"
             " by rain volume (%.2fin)") % (normal / 5.0, ),
            transform=ax.transAxes,
            va='top',
            ha='center')
    addl = ""
    if jdaylimit < 367:
        addl = " thru %s" % (today.strftime("%-d %b"), )
    ax.set_title(
        ("%s [%s] [%s-%s]\nDaily Precipitation Contributions%s") %
        (ctx['_nt'].sts[station]['name'], station, minyear, endyear - 2, addl))
    ax.set_xticks(np.arange(0, 5))
    xlabels = []
    for i in range(5):
        xlabels.append("%.2f-%.2f" % (bins[i], bins[i + 1]))
    ax.set_xticklabels(xlabels)
    ax.set_ylim(top=ax.get_ylim()[1] * 1.1)

    return fig, df
示例#14
0
文件: p102.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = util.get_dbconn("postgis")
    ctx = util.get_autoplot_context(fdict, get_description())
    ctx["_nt"].sts["_ALL"] = dict(name="All WFOs")
    station = ctx["station"][:4]
    syear = ctx["year"]
    eyear = ctx["eyear"]
    # optional parameter, this could return null
    ltype = ctx.get("ltype")
    wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else
                                        station[1:], )
    if station == "_ALL":
        wfo_limiter = ""
    typetext_limiter = ""
    if ltype:
        if len(ltype) == 1:
            typetext_limiter = " and typetext = '%s'" % (ltype[0], )
        else:
            typetext_limiter = " and typetext in %s" % (tuple(ltype), )

    df = read_sql(
        f"""
        select extract(year from valid)::int as yr, upper(source) as src,
        count(*) from lsrs
        where valid > '{syear}-01-01' and
        valid < '{eyear + 1}-01-01' {wfo_limiter} {typetext_limiter}
        GROUP by yr, src
    """,
        pgconn,
    )
    if df.empty:
        raise NoDataFound("No data found")
    # pivot the table so that we can fill out zeros
    df = df.pivot(index="yr", columns="src", values="count")
    df = df.fillna(0).reset_index()
    df = df.melt(id_vars="yr", value_name="count")
    df["rank"] = df.groupby(["yr"])["count"].rank(ascending=False,
                                                  method="first")
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    # Do syear as left side
    for year in range(syear, eyear):
        dyear = df[df["yr"] == year].sort_values(by=["rank"], ascending=True)
        if not dyear.empty:
            break
        year += 1
    syear = year
    i = 1
    ylabels = []
    leftsrcs = []
    usedline = 0
    for _, row in dyear.iterrows():
        src = row["src"]
        leftsrcs.append(src)
        ylabels.append("%s (%.0f)" % (src, row["count"]))
        d = df[df["src"] == src].sort_values(by=["yr"])
        ax.plot(
            np.array(d["yr"]),
            np.array(d["rank"]),
            lw=2,
            label=src,
            marker=MARKERS[usedline % len(MARKERS)],
        )
        i += 1
        usedline += 1
        if i > 20:
            break
    ax.set_yticks(range(1, len(ylabels) + 1))
    ax.set_yticklabels(["%s %s" % (s, i + 1) for i, s in enumerate(ylabels)])
    ax.set_ylim(0.5, 20.5)

    ax2 = ax.twinx()
    # Do last year as right side
    dyear = df[df["yr"] == eyear].sort_values(by=["rank"], ascending=True)
    i = 0
    y2labels = []
    for _, row in dyear.iterrows():
        i += 1
        if i > 20:
            break
        src = row["src"]
        y2labels.append("%s (%.0f)" % (src, row["count"]))
        if src not in leftsrcs:
            d = df[df["src"] == src].sort_values(by=["yr"])
            ax.plot(
                np.array(d["yr"]),
                np.array(d["rank"]),
                lw=2,
                label=src,
                marker=MARKERS[usedline % len(MARKERS)],
            )
            usedline += 1

    ax2.set_yticks(range(1, len(y2labels) + 1))
    ax2.set_yticklabels(["%s %s" % (i + 1, s) for i, s in enumerate(y2labels)])
    ax2.set_ylim(0.5, 20.5)

    ax.set_position([0.3, 0.13, 0.4, 0.75])
    ax2.set_position([0.3, 0.13, 0.4, 0.75])
    ax.set_xticks(range(df["yr"].min(), df["yr"].max(), 2))
    for tick in ax.get_xticklabels():
        tick.set_rotation(90)
    ax.grid()

    fig.text(0.15, 0.88, "%s" % (syear, ), fontsize=14, ha="center")
    fig.text(0.85, 0.88, "%s" % (eyear, ), fontsize=14, ha="center")

    fig.text(
        0.5,
        0.97,
        "NWS %s Local Storm Report Sources Ranks" %
        (ctx["_nt"].sts[station]["name"], ),
        ha="center",
    )
    if ltype:
        label = "For LSR Types: %s" % (repr(ltype), )
        if len(label) > 90:
            label = "%s..." % (label[:90], )
        fig.text(0.5, 0.93, label, ha="center")

    return fig, df
示例#15
0
def plotter(fdict):
    """ Go """
    from seaborn import heatmap

    ctx = get_autoplot_context(fdict, get_description())
    pgconn = get_dbconn("coop")
    table = "alldata_%s" % (ctx["station"][:2], )
    df = read_sql(
        """
        select day, sday, precip, high,
        extract(doy from day)::int as doy, year
        from """ + table + """  WHERE
        station = %s ORDER by day ASC
    """,
        pgconn,
        params=(ctx["station"], ),
        index_col="day",
        parse_dates="day",
    )
    if df.empty:
        raise NoDataFound("Did not find any data for station!")
    if ctx["var"] == "trail_precip_percent":
        climo = df[["precip", "sday"]].groupby("sday").mean()
        df["precip_avg"] = df.merge(climo,
                                    left_on="sday",
                                    right_index=True,
                                    suffixes=("", "_avg"))["precip_avg"]
        df["trail_precip_percent"] = (
            df["precip"].rolling(ctx["days"]).sum() /
            df["precip_avg"].rolling(ctx["days"]).sum() * 100.0)
        levels = [0, 25, 50, 75, 100, 150, 200, 250, 300]
        label = "Percent"
    elif ctx["var"] == "daily_high_depart":
        climo = df[["high", "sday"]].groupby("sday").mean()
        df["high_avg"] = df.merge(climo,
                                  left_on="sday",
                                  right_index=True,
                                  suffixes=("", "_avg"))["high_avg"]
        df["daily_high_depart"] = df["high"] - df["high_avg"]
        levels = list(range(-20, 21, 4))
        label = "Temperature [F] Departure"

    baseyear = max([df["year"].min(), ctx["syear"]])
    endyear = min([df["year"].max(), ctx["eyear"]])
    years = endyear - baseyear + 1
    cmap = plt.get_cmap(ctx["cmap"])
    norm = mpcolors.BoundaryNorm(levels, cmap.N)
    data = np.full((years, 366), np.nan)
    df2 = df[(df["year"] >= baseyear) & (df["year"] <= endyear)]
    for day, row in df2.iterrows():
        data[day.year - baseyear, row["doy"] - 1] = row[ctx["var"]]

    fig, ax = plt.subplots(1, 1)
    heatmap(
        data,
        cmap=cmap,
        norm=norm,
        ax=ax,
        cbar_kws={
            "spacing": "proportional",
            "label": label
        },
    )
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:], rotation=0)
    yticks = []
    yticklabels = []
    delta = 5 if (endyear - baseyear) < 30 else 10
    for i, year in enumerate(range(baseyear, endyear + 1)):
        if year % delta == 0:
            yticks.append(i + 0.5)
            yticklabels.append(year)
    ax.set_yticks(yticks[::-1])
    ax.set_yticklabels(yticklabels[::-1], rotation=0)
    ax.xaxis.grid(True, color="k")
    ax.set_title("[%s] %s (%s-%s)\n%s" % (
        ctx["station"],
        ctx["_nt"].sts[ctx["station"]]["name"],
        ctx["syear"],
        ctx["eyear"],
        PDICT[ctx["var"]].replace("XX", str(ctx["days"])),
    ))

    return fig, df
示例#16
0
文件: p143.py 项目: stormchas4/iem
def load(dirname, location, sdate):
    """ Read a file please """
    data = []
    idx = []
    fn = "%s/%s.met" % (dirname, location)
    if not os.path.isfile(fn):
        raise NoDataFound("File was not found.")
    for line in open(fn):
        line = line.strip()
        if not line.startswith("19") and not line.startswith("20"):
            continue
        tokens = line.split()
        if float(tokens[5]) > 90:
            continue
        data.append(tokens)
        ts = datetime.date(int(tokens[0]), 1,
                           1) + datetime.timedelta(days=int(tokens[1]) - 1)
        idx.append(ts)
    if len(data[0]) < 10:
        cols = ["year", "doy", "radn", "maxt", "mint", "rain"]
    else:
        cols = [
            "year",
            "doy",
            "radn",
            "maxt",
            "mint",
            "rain",
            "gdd",
            "st4",
            "st12",
            "st24",
            "st50",
            "sm12",
            "sm24",
            "sm50",
        ]
    df = pd.DataFrame(data, index=idx, columns=cols)
    for col in cols:
        df[col] = pd.to_numeric(df[col], errors="coerce")
    if len(data[0]) < 10:
        df["gdd"] = gdd(
            temperature(df["maxt"].values, "C"),
            temperature(df["mint"].values, "C"),
        )
    bins = []
    today = datetime.date.today()
    for valid, _ in df.iterrows():
        if valid >= today:
            bins.append(0)
            continue
        if sdate == "nov1" and valid.month >= 11:
            bins.append(valid.year + 1)
            continue
        if valid.month < today.month:
            bins.append(valid.year)
            continue
        if valid.month == today.month and valid.day < today.day:
            bins.append(valid.year)
            continue
        bins.append(0)
    df["bin"] = bins
    df["rain"] = distance(df["rain"].values, "MM").value("IN")
    df["avgt"] = temperature((df["maxt"] + df["mint"]) / 2.0, "C").value("F")
    return df
示例#17
0
def plotter(fdict):
    """ Go """
    ctx = util.get_autoplot_context(fdict, get_description())
    state = ctx["state"]
    syear = ctx["syear"]
    eyear = ctx["eyear"]

    fips = ""
    for key in state_fips:
        if state_fips[key] == state:
            fips = key
    payload = "{'area':'%s', 'type':'state', 'statstype':'2'}" % (fips, )
    headers = {}
    headers["Accept"] = "application/json, text/javascript, */*; q=0.01"
    headers["Content-Type"] = "application/json; charset=UTF-8"
    req = util.exponential_backoff(requests.post,
                                   SERVICE,
                                   payload,
                                   headers=headers)
    if req is None:
        raise NoDataFound("Drought Web Service failed to deliver data.")
    jdata = req.json()
    if "d" not in jdata:
        raise NoDataFound("Data Not Found.")
    df = pd.DataFrame(jdata["d"])
    df["Date"] = pd.to_datetime(df["ReleaseDate"])
    df.sort_values("Date", ascending=True, inplace=True)
    df["x"] = df["Date"] + datetime.timedelta(hours=(3.5 * 24))

    fig = plt.figure(figsize=(7, 9))
    ax = fig.add_axes([0.1, 0.1, 0.87, 0.84])
    lastrow = None
    for year, gdf in df.groupby(df.Date.dt.year):
        if year < syear or year > eyear:
            continue
        xs = []
        ys = []
        for _, row in gdf.iterrows():
            if lastrow is None:
                lastrow = row
            delta = ((lastrow["D4"] - row["D4"]) * 5.0 +
                     (lastrow["D3"] - row["D3"]) * 4.0 +
                     (lastrow["D2"] - row["D2"]) * 3.0 +
                     (lastrow["D1"] - row["D1"]) * 2.0 +
                     (lastrow["D0"] - row["D0"]))
            xs.append(int(row["Date"].strftime("%j")))
            ys.append(year + (0 - delta) / 100.0)
            lastrow = row
        if len(xs) < 4:
            continue
        fcube = interp1d(xs, ys, kind="cubic")
        xnew = np.arange(xs[0], xs[-1])
        yval = np.ones(len(xnew)) * year
        ynew = fcube(xnew)
        ax.fill_between(
            xnew,
            yval,
            ynew,
            where=(ynew < yval),
            facecolor="blue",
            interpolate=True,
        )
        ax.fill_between(
            xnew,
            yval,
            ynew,
            where=(ynew >= yval),
            facecolor="red",
            interpolate=True,
        )

    ax.set_ylim(eyear + 1, syear - 1)
    ax.set_xlim(0, 366)
    ax.set_xlabel(("curve height of 1 year is 1 effective drought category "
                   "change over area of %s") % (state_names[state], ))
    ax.set_ylabel("Year, thru %s" % (df.Date.max().strftime("%d %b %Y"), ))
    ax.set_title(("%.0f-%.0f US Drought Monitor Weekly Change for %s\n"
                  "curve height represents change in intensity + coverage") %
                 (syear, eyear, state_names[state]))

    ax.grid(True)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax.set_xticklabels(calendar.month_abbr[1:])

    ax.set_yticks(
        np.arange(ax.get_ylim()[0] - 1, ax.get_ylim()[1], -1, dtype="i"))
    fig.text(0.02, 0.03, "Blue areas are improving conditions", color="b")
    fig.text(0.4, 0.03, "Red areas are degrading conditions", color="r")

    return fig, df[["Date", "NONE", "D0", "D1", "D2", "D3", "D4"]]
示例#18
0
def plotter(fdict):
    """ Go """
    font0 = FontProperties()
    font0.set_family("monospace")
    font0.set_size(16)
    pgconn = get_dbconn("iem")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    month = ctx["month"]

    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "summer":
        months = [6, 7, 8]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]

    df = read_sql(
        """
        SELECT day as date, max_tmpf as max, min_tmpf as min,
        max_tmpf::int - min_tmpf::int as difference
        from summary s JOIN stations t on (s.iemid = t.iemid)
        where t.id = %s and t.network = %s
        and extract(month from day) in %s
        and max_tmpf is not null and min_tmpf is not null
        ORDER by difference DESC, date DESC LIMIT 10
    """,
        pgconn,
        params=(station, ctx["network"], tuple(months)),
        parse_dates=("date",),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("No Data Found,")
    df["rank"] = df["difference"].rank(ascending=False, method="min")
    fig = plt.figure(figsize=(5.5, 4))
    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    fig.text(
        0.5,
        0.9,
        (
            "%s [%s] %s-%s\n"
            "Top 10 Local Calendar Day [%s] "
            "Temperature Differences"
        )
        % (
            ctx["_nt"].sts[station]["name"],
            station,
            ab.year,
            datetime.date.today().year,
            month.capitalize(),
        ),
        ha="center",
    )
    fig.text(
        0.1, 0.81, " #  Date         Diff   Low High", fontproperties=font0
    )
    y = 0.74
    for _, row in df.iterrows():
        fig.text(
            0.1,
            y,
            ("%2.0f  %11s   %3.0f   %3.0f  %3.0f")
            % (
                row["rank"],
                row["date"].strftime("%d %b %Y"),
                row["difference"],
                row["min"],
                row["max"],
            ),
            fontproperties=font0,
        )
        y -= 0.07
    return fig, df
示例#19
0
文件: p101.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("postgis")
    pcursor = pgconn.cursor()
    ctx = get_autoplot_context(fdict, get_description())
    ctx["_nt"].sts["_ALL"] = dict(name="ALL WFOs")
    syear = ctx["syear"]
    eyear = ctx["eyear"] + 1
    station = ctx["station"][:4]
    sts = datetime.date(syear, 1, 1)
    ets = datetime.date(eyear, 1, 1)
    wfo_limiter = " and wfo = '%s' " % (station if len(station) == 3 else
                                        station[1:], )
    if station == "_ALL":
        wfo_limiter = ""

    pcursor.execute(
        f"""
        select phenomena, significance, min(issue), count(*) from warnings
        where ugc is not null and issue > %s
        and issue < %s {wfo_limiter}
        GROUP by phenomena, significance ORDER by count DESC
    """,
        (sts, ets),
    )
    if pcursor.rowcount == 0:
        raise NoDataFound("No data found.")
    labels = []
    vals = []
    cnt = 1
    rows = []
    for row in pcursor:
        label = ("%s. %s (%s.%s)") % (
            cnt,
            vtec.get_ps_string(row[0], row[1]),
            row[0],
            row[1],
        )
        if cnt < 26:
            labels.append(label)
            vals.append(row[3])
        rows.append(
            dict(
                phenomena=row[0],
                significance=row[1],
                count=row[3],
                wfo=station,
            ))
        cnt += 1
    df = pd.DataFrame(rows)
    (fig, ax) = plt.subplots(1, 1, figsize=(7, 10))
    vals = np.array(vals)

    ax.barh(np.arange(len(vals)),
            vals / float(vals[0]) * 100.0,
            align="center")
    for i in range(1, len(vals)):
        y = vals[i] / float(vals[0]) * 100.0
        ax.text(y + 1, i, "%.1f%%" % (y, ), va="center")
    fig.text(
        0.5,
        0.95,
        "%s-%s NWS %s Watch/Warning/Advisory Totals" % (
            syear,
            eyear - 1 if (eyear - 1 != syear) else "",
            ctx["_nt"].sts[station]["name"],
        ),
        ha="center",
    )
    fig.text(
        0.5,
        0.05,
        "Event+County/Zone Count, Relative to #%s" % (labels[0], ),
        ha="center",
        fontsize=10,
    )
    ax.set_ylim(len(vals), -0.5)
    ax.grid(True)
    ax.set_yticklabels(labels)
    ax.set_yticks(np.arange(len(vals)))
    ax.set_position([0.5, 0.1, 0.45, 0.83])
    ax.set_xticks([0, 10, 25, 50, 75, 90, 100])

    return fig, df
示例#20
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('asos', user='******')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['zstation']
    threshold = ctx['threshold']
    mydir = ctx['dir']
    hours = ctx['hours']
    varname = ctx['var']
    month = ctx['m'] if fdict.get('month') is None else fdict.get('month')

    year_limiter = ""
    y1, y2 = None, None
    if 'yrange' in ctx:
        y1, y2 = ctx['yrange'].split("-")
        year_limiter = (" and valid >= '%s-01-01' and valid < '%s-01-01' ") % (
            int(y1), int(y2))
    if month == 'all':
        months = range(1, 13)
        sts = datetime.datetime(2000, 1, 1)
        ets = datetime.datetime(2000, 12, 31)
    elif month == 'fall':
        months = [9, 10, 11]
        sts = datetime.datetime(2000, 9, 1)
        ets = datetime.datetime(2000, 11, 30)
    elif month == 'spring':
        months = [3, 4, 5]
        sts = datetime.datetime(2000, 3, 1)
        ets = datetime.datetime(2000, 5, 31)
    elif month == 'summer':
        months = [6, 7, 8]
        sts = datetime.datetime(2000, 6, 1)
        ets = datetime.datetime(2000, 8, 31)
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", '%Y-%b-%d')
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]
        sts = datetime.datetime(2000, ts.month, 1)
        ets = sts + datetime.timedelta(days=35)
        ets = ets.replace(day=1)

    cursor.execute(
        """
        SELECT valid, round(""" + varname + """::numeric,0)
        from alldata where station = %s """ + year_limiter + """
        and """ + varname + """ is not null and
        extract(month from valid) in %s
        ORDER by valid ASC
    """, (station, tuple(months)))

    (fig, ax) = plt.subplots(1, 1, figsize=(9, 6))
    interval = datetime.timedelta(hours=hours)

    valid = []
    tmpf = []
    year = 0
    lines = []
    for row in cursor:
        if ((month != 'all' and year != row[0].year) or
            (valid and (row[0] - valid[-1]) > datetime.timedelta(hours=3))):
            year = row[0].year
            lines = plot(ax, interval, valid, tmpf, lines, mydir, month)
            valid = []
            tmpf = []
        if ((mydir == 'above' and row[1] >= threshold)
                or (mydir == 'below' and row[1] < threshold)):
            valid.append(row[0])
            tmpf.append(row[1])
        if ((mydir == 'above' and row[1] < threshold)
                or (mydir == 'below' and row[1] >= threshold)):
            valid.append(row[0])
            tmpf.append(row[1])
            lines = plot(ax, interval, valid, tmpf, lines, mydir, month)
            valid = []
            tmpf = []

    lines = plot(ax, interval, valid, tmpf, lines, mydir, month)
    compute_xlabels(ax)
    rows = []
    for line in lines:
        # Ensure we don't send datetimes to pandas
        rows.append(
            dict(start=line.period_start.strftime("%Y-%m-%d %H:%M"),
                 end=line.period_end.strftime("%Y-%m-%d %H:%M"),
                 hours=line.hours,
                 days=line.days))
    df = pd.DataFrame(rows)

    ax.grid(True)
    ax.set_ylabel(r"%s $^\circ$F" % (PDICT2.get(varname), ))
    ab = ctx['_nt'].sts[station]['archive_begin']
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    ax.set_title(("%s-%s [%s] %s\n"
                  r"%s :: %.0fd%.0fh+ Streaks %s %s$^\circ$F") %
                 (y1 if y1 is not None else ab.year,
                  y2 if y2 is not None else datetime.datetime.now().year,
                  station, ctx['_nt'].sts[station]['name'], MDICT.get(month),
                  hours / 24, hours % 24, mydir, threshold))
    # ax.axhline(32, linestyle='-.', linewidth=2, color='k')
    # ax.set_ylim(bottom=43)
    ax.set_xlabel(("* Due to timezones and leapday, there is some ambiguity"
                   " with the plotted dates"))
    ax.set_position([0.1, 0.25, 0.85, 0.65])
    ax.legend(loc='upper center',
              bbox_to_anchor=(0.5, -0.165),
              fancybox=True,
              shadow=True,
              ncol=5,
              fontsize=12,
              columnspacing=1)
    return fig, df
示例#21
0
文件: p76.py 项目: stormchas4/iem
def get_data(ctx, startyear):
    """Get data"""
    pgconn = get_dbconn("asos")
    today = datetime.datetime.now()
    lastyear = today.year
    deltadays = 0
    if ctx["season"] == "all":
        months = range(1, 13)
    elif ctx["season"] == "water_year":
        deltadays = 92
        months = range(1, 13)
    elif ctx["season"] == "spring":
        months = [3, 4, 5]
        if today.month > 5:
            lastyear += 1
    elif ctx["season"] == "spring2":
        months = [4, 5, 6]
        if today.month > 6:
            lastyear += 1
    elif ctx["season"] == "fall":
        months = [9, 10, 11]
        if today.month > 11:
            lastyear += 1
    elif ctx["season"] == "summer":
        months = [6, 7, 8]
        if today.month > 8:
            lastyear += 1
    elif ctx["season"] == "winter":
        deltadays = 33
        months = [12, 1, 2]
        if today.month > 2:
            lastyear += 1
    else:
        ts = datetime.datetime.strptime("2000-" + ctx["season"] + "-01",
                                        "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]
        lastyear += 1
    hours = range(24)
    if ctx.get("hours"):
        try:
            tokens = [int(i.strip()) for i in ctx["hours"].split("-")]
            hours = range(tokens[0], tokens[1] + 1)
        except ValueError:
            raise Exception("malformed hour limiter, sorry.")
        ctx["hour_limiter"] = "[%s-%s]" % (
            utc(2017, 1, 1, tokens[0]).strftime("%-I %p"),
            utc(2017, 1, 1, tokens[1]).strftime("%-I %p"),
        )

    df = read_sql(
        """
        WITH obs as (
            SELECT valid at time zone %s as valid, tmpf, dwpf, relh,
            coalesce(mslp, alti * 33.8639, 1013.25) as slp
            from alldata WHERE station = %s and dwpf > -90
            and dwpf < 100 and tmpf >= dwpf and
            extract(month from valid) in %s and
            extract(hour from valid at time zone %s) in %s
            and report_type = 2
        )
      SELECT valid,
      extract(year from valid + '%s days'::interval)::int as year,
      tmpf, dwpf, slp, relh from obs
    """,
        pgconn,
        params=(
            ctx["_nt"].sts[ctx["station"]]["tzname"],
            ctx["station"],
            tuple(months),
            ctx["_nt"].sts[ctx["station"]]["tzname"],
            tuple(hours),
            deltadays,
        ),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("No data found.")
    df = df[(df["year"] >= startyear) & (df["year"] < lastyear)]
    return df
示例#22
0
文件: p48.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("postgis")
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())

    ugc = ctx["ugc"]
    phenomena = ctx["phenomena"]
    significance = ctx["significance"]

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))

    cursor.execute(
        """
    SELECT s.wfo, s.tzname, u.name from ugcs u
    JOIN stations s on (u.wfo = s.id)
    where ugc = %s and end_ts is null and s.network = 'WFO'
    """,
        (ugc, ),
    )
    wfo = None
    tzname = None
    name = ""
    if cursor.rowcount == 1:
        row = cursor.fetchone()
        tzname = row[1]
        wfo = row[0]
        name = row[2]

    cursor.execute(
        """
     SELECT count(*), min(issue at time zone %s), max(issue at time zone %s)
     from warnings WHERE ugc = %s and phenomena = %s and significance = %s
     and wfo = %s
    """,
        (tzname, tzname, ugc, phenomena, significance, wfo),
    )
    row = cursor.fetchone()
    cnt = row[0]
    sts = row[1]
    ets = row[2]
    if sts is None:
        raise NoDataFound("No Results Found, try flipping zone/county")

    cursor.execute(
        """
     WITH coverage as (
        SELECT extract(year from issue) as yr, eventid,
        generate_series(issue at time zone %s,
                        expire at time zone %s, '1 minute'::interval) as s
                        from warnings where
        ugc = %s and phenomena = %s and significance = %s and wfo = %s),
      minutes as (SELECT distinct yr, eventid,
        (extract(hour from s)::numeric * 60. +
         extract(minute from s)::numeric) as m
        from coverage)

    SELECT minutes.m, count(*) from minutes GROUP by m
          """,
        (tzname, tzname, ugc, phenomena, significance, wfo),
    )

    data = np.zeros((1440, ), "f")
    for row in cursor:
        data[int(row[0])] = row[1]

    df = pd.DataFrame(
        dict(minute=pd.Series(np.arange(1440)), events=pd.Series(data)))

    vals = data / float(cnt) * 100.0
    ax.bar(np.arange(1440), vals, ec="b", fc="b")
    if np.max(vals) > 50:
        ax.set_ylim(0, 100)
        ax.set_yticks([0, 10, 25, 50, 75, 90, 100])
    ax.grid()
    ax.set_xticks(range(0, 1441, 60))
    ax.set_xticklabels([
        "Mid",
        "",
        "",
        "3 AM",
        "",
        "",
        "6 AM",
        "",
        "",
        "9 AM",
        "",
        "",
        "Noon",
        "",
        "",
        "3 PM",
        "",
        "",
        "6 PM",
        "",
        "",
        "9 PM",
        "",
        "",
        "Mid",
    ])
    ax.set_xlabel("Timezone: %s (Daylight or Standard)" % (tzname, ))
    ax.set_ylabel("Frequency [%%] out of %s Events" % (cnt, ))
    ax.set_title(("[%s] %s :: %s (%s.%s)\n%s Events - %s to %s") % (
        ugc,
        name,
        vtec.get_ps_string(phenomena, significance),
        phenomena,
        significance,
        cnt,
        sts.strftime("%Y-%m-%d %I:%M %p"),
        ets.strftime("%Y-%m-%d %I:%M %p"),
    ))
    ax.set_xlim(0, 1441)
    return fig, df
示例#23
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    varname = ctx["var"]

    bs = ctx["_nt"].sts[station]["archive_begin"]
    if bs is None:
        raise NoDataFound("No Metadata found.")
    res = """\
# IEM Climodat https://mesonet.agron.iastate.edu/climodat/
# Report Generated: %s
# Climate Record: %s -> %s
# Site Information: [%s] %s
# Contact Information: Daryl Herzmann [email protected] 515.294.5978
""" % (
        datetime.date.today().strftime("%d %b %Y"),
        bs.date(),
        datetime.date.today(),
        station,
        ctx["_nt"].sts[station]["name"],
    )
    if varname == "maxmin":
        res += (
            "# DAILY RECORD HIGHS AND LOWS OCCURRING DURING %s-%s FOR "
            "STATION NUMBER  %s\n"
            "     JAN     FEB     MAR     APR     MAY     JUN     JUL     "
            "AUG     SEP     OCT     NOV     DEC\n"
            " DY  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  "
            "MN  MX  MN  MX  MN  MX  MN  MX  MN\n") % (
                bs.year, datetime.date.today().year, station)
    elif varname == "means":
        res += ("# DAILY MEAN HIGHS AND LOWS FOR STATION NUMBER  %s\n"
                "     JAN     FEB     MAR     APR     MAY     JUN     JUL     "
                "AUG     SEP     OCT     NOV     DEC\n"
                " DY  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  "
                "MX  MN  MX  MN  MX  MN  MX  MN  MX  MN\n") % (station, )
    elif varname == "range":
        res += ("# RECORD LARGEST AND SMALLEST DAILY RANGES (MAX-MIN) "
                "FOR STATION NUMBER  %s\n"
                "     JAN     FEB     MAR     APR     MAY     JUN     JUL     "
                "AUG     SEP     OCT     NOV     DEC\n"
                " DY  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  MX  MN  "
                "MX  MN  MX  MN  MX  MN  MX  MN  MX  MN\n") % (station, )
    else:
        res += (f"# DAILY MAXIMUM PRECIPITATION FOR STATION NUMBER {station}\n"
                "     JAN   FEB   MAR   APR   MAY   JUN   JUL   "
                "AUG   SEP   OCT   NOV   DEC\n")

    df = read_sql(
        "SELECT * from climate WHERE station = %s",
        pgconn,
        params=(station, ),
        index_col="valid",
    )

    bad = "  ****" if varname == "precip" else " *** ***"
    for day in range(1, 32):
        res += "%3i" % (day, )
        for mo in range(1, 13):
            try:
                ts = datetime.date(2000, mo, day)
                if ts not in df.index:
                    res += bad
                    continue
            except Exception:
                res += bad
                continue
            row = df.loc[ts]
            if row["max_high"] is None or row["min_low"] is None:
                res += bad
                continue
            if varname == "maxmin":
                res += "%4i%4i" % (row["max_high"], row["min_low"])
            elif varname == "range":
                res += "%4i%4i" % (row["max_range"], row["min_range"])
            elif varname == "means":
                res += "%4i%4i" % (row["high"], row["low"])
            else:
                res += "%6.2f" % (row["max_precip"], )
        res += "\n"

    return None, df, res
示例#24
0
文件: p152.py 项目: smartparrot/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    state = ctx["state"][:2]
    sector = ctx["sector"]
    opt = ctx["opt"]
    p1syear = ctx["p1syear"]
    p1eyear = ctx["p1eyear"]
    p2syear = ctx["p2syear"]
    p2eyear = ctx["p2eyear"]
    varname = ctx["var"]

    table = "alldata"
    if sector == "state":
        table = "alldata_%s" % (state, )

    df = read_sql(
        """
    WITH season1 as (
        SELECT station, year,
        min(case when month > 7 and low < 32 then
            extract(doy from day) else 366 end) as first_freeze,
        max(case when month < 7 and low < 32 then
            extract(doy from day) else 0 end) as last_freeze
        from """ + table + """ WHERE
        year >= %s and year <= %s GROUP by station, year),
    season2 as (
        SELECT station, year,
        min(case when month > 7 and low < 32 then
            extract(doy from day) else 366 end) as first_freeze,
        max(case when month < 7 and low < 32 then
            extract(doy from day) else 0 end) as last_freeze
        from """ + table + """ WHERE
        year >= %s and year <= %s GROUP by station, year),
    agg as (
        SELECT p1.station, avg(p1.first_freeze) as p1_first_fall,
        avg(p1.last_freeze) as p1_last_spring,
        avg(p2.first_freeze) as p2_first_fall,
        avg(p2.last_freeze) as p2_last_spring
        from season1 as p1 JOIN season2 as p2 on (p1.station = p2.station)
        GROUP by p1.station)

    SELECT station, ST_X(geom) as lon, ST_Y(geom) as lat,
    d.* from agg d JOIN stations t ON (d.station = t.id)
    WHERE t.network ~* 'CLIMATE'
    and substr(station, 3, 1) != 'C' and substr(station, 3, 4) != '0000'
    """,
        pgconn,
        params=[p1syear, p1eyear, p2syear, p2eyear],
        index_col="station",
    )
    if df.empty:
        raise NoDataFound("No Data Found")
    df["p1_season"] = df["p1_first_fall"] - df["p1_last_spring"]
    df["p2_season"] = df["p2_first_fall"] - df["p2_last_spring"]
    df["season_delta"] = df["p2_season"] - df["p1_season"]
    df["spring_delta"] = df["p2_last_spring"] - df["p1_last_spring"]
    df["fall_delta"] = df["p2_first_fall"] - df["p1_first_fall"]
    # Reindex so that most extreme values are first
    df = df.reindex(df[varname +
                       "_delta"].abs().sort_values(ascending=False).index)

    title = PDICT3[varname]
    mp = MapPlot(
        sector=sector,
        state=state,
        axisbg="white",
        title=("%.0f-%.0f minus %.0f-%.0f %s Difference") %
        (p2syear, p2eyear, p1syear, p1eyear, title),
        subtitle=("based on IEM Archives"),
        titlefontsize=14,
    )
    # Create 9 levels centered on zero
    abval = df[varname + "_delta"].abs().max()
    levels = centered_bins(abval)
    if opt in ["both", "contour"]:
        mp.contourf(
            df["lon"].values,
            df["lat"].values,
            df[varname + "_delta"].values,
            levels,
            cmap=plt.get_cmap(ctx["cmap"]),
            units="days",
        )
    if sector == "state":
        mp.drawcounties()
    if opt in ["both", "values"]:
        mp.plot_values(
            df["lon"].values,
            df["lat"].values,
            df[varname + "_delta"].values,
            fmt="%.1f",
            labelbuffer=5,
        )

    return mp.fig, df
示例#25
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    varname = ctx["v"]

    df = get_df(ctx)
    if df.empty:
        raise NoDataFound("No data was found for your query")
    mp = MapPlot(
        sector=("state" if ctx["t"] == "state" else "cwa"),
        state=ctx["state"],
        cwa=(ctx["wfo"] if len(ctx["wfo"]) == 3 else ctx["wfo"][1:]),
        axisbg="white",
        title="%s for %s on %s" % (PDICT2[ctx["v"]], ctx["title"], ctx["day"]),
        nocaption=True,
        titlefontsize=16,
    )
    ramp = None
    cmap = get_cmap(ctx["cmap"])
    extend = "both"
    if varname in ["max_gust", "max_sknt"]:
        extend = "max"
        ramp = np.arange(0, 40, 4)
        ramp = np.append(ramp, np.arange(40, 80, 10))
        ramp = np.append(ramp, np.arange(80, 120, 20))
    # Data QC, cough
    if ctx.get("above"):
        df = df[df[varname] < ctx["above"]]
    if ctx.get("below"):
        df = df[df[varname] > ctx["below"]]
    # with QC done, we compute ramps
    if ramp is None:
        ramp = np.linspace(df[varname].min() - 5,
                           df[varname].max() + 5,
                           10,
                           dtype="i")

    if ctx["p"] == "both":
        mp.contourf(
            df["lon"].values,
            df["lat"].values,
            df[varname].values,
            ramp,
            units=VARUNITS[varname],
            cmap=cmap,
            spacing="proportional",
            extend=extend,
        )
    if ctx["t"] == "state":
        df2 = df[df[ctx["t"]] == ctx[ctx["t"]]]
    else:
        df2 = df[df["wfo"] == ctx["wfo"]]

    mp.plot_values(
        df2["lon"].values,
        df2["lat"].values,
        df2[varname].values,
        "%.1f" if varname in ["max_gust", "max_sknt"] else "%.0f",
        labelbuffer=3,
    )
    mp.drawcounties()
    if ctx["t"] == "cwa":
        mp.draw_cwas()

    return mp.fig, df
示例#26
0
文件: p9.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop", user="******")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    thisyear = datetime.datetime.now().year
    year = ctx["year"]
    base = ctx["base"]
    ceiling = ctx["ceiling"]
    varname = ctx["var"]

    table = "alldata_%s" % (station[:2], )
    nt = network.Table("%sCLIMATE" % (station[:2], ))
    ab = nt.sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown Station Metadata.")
    syear = max(ab.year, 1893)

    glabel = "%s%s%s" % (varname, base, ceiling)
    gfunc = "gddxx(%s, %s, high, low)" % (base, ceiling)
    title = "base=%s/ceil=%s" % (base, ceiling)
    if varname in ["hdd", "cdd"]:
        gfunc = "%s(high, low, %s)" % (varname, base)
        title = "base=%s" % (base, )
    elif varname == "sdd":
        gfunc = "case when high > %s then high - %s else 0 end" % (
            ceiling,
            ceiling,
        )
        title = "base=%s" % (ceiling, )

    df = read_sql(
        """
        SELECT year, sday,
        """ + gfunc + """ as """ + glabel + """
        from """ + table + """ WHERE station = %s and year > 1892
        and sday != '0229'
    """,
        pgconn,
        params=(station, ),
    )
    if df.empty:
        raise NoDataFound("No data Found.")

    # Do some magic!
    df2 = (df[[
        "sday", glabel
    ]].groupby("sday").describe(percentiles=[0.05, 0.25, 0.75, 0.95]))
    df2 = df2.unstack(level=-1)
    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    ax.plot(
        np.arange(1, 366),
        df2[(glabel, "mean")],
        color="r",
        zorder=2,
        lw=2.0,
        label="Average",
    )
    _data = df[df["year"] == year][[glabel, "sday"]]
    _data.sort_values(by="sday", inplace=True)
    ax.scatter(
        np.arange(1, _data[glabel].shape[0] + 1),
        _data[glabel],
        color="b",
        zorder=2,
        label="%s" % (year, ),
    )
    ax.bar(
        np.arange(1, 366),
        df2[(glabel, "95%")] - df2[(glabel, "5%")],
        bottom=df2[(glabel, "5%")],
        ec="tan",
        fc="tan",
        zorder=1,
        label="5-95 Percentile",
    )
    ax.bar(
        np.arange(1, 366),
        df2[(glabel, "75%")] - df2[(glabel, "25%")],
        bottom=df2[(glabel, "25%")],
        ec="lightblue",
        fc="lightblue",
        zorder=1,
        label="25-75 Percentile",
    )
    ax.set_xlim(0, 367)
    if varname == "gdd":
        ax.set_ylim(-0.25, 40)
    ax.grid(True)
    ax.set_title("%s-%s %s [%s]\n%s %s (%s)" % (
        syear,
        thisyear,
        nt.sts[station]["name"],
        station,
        year,
        PDICT[varname],
        title,
    ))
    ax.set_ylabel(r"Daily Accumulation $^{\circ}\mathrm{F}$")
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335))
    ax.legend(ncol=2)
    ax.set_xticklabels(calendar.month_abbr[1:])

    # collapse the multiindex for columns
    df = pd.DataFrame(df2)
    return fig, df
示例#27
0
文件: p69.py 项目: smartparrot/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    varname = ctx["var"]
    month = ctx["month"]

    table = "alldata_%s" % (station[:2], )
    nt = network.Table("%sCLIMATE" % (station[:2], ))

    yr = "year as yr"
    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
        yr = "extract(year from o.day - '60 days'::interval) as yr"
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "summer":
        months = [6, 7, 8]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]

    df = read_sql(
        """
      WITH avgs as (
        SELECT sday, avg(high) as avg_high,
        avg(low) as avg_low,
        avg((high+low)/2.) as avg_temp from """ + table + """ WHERE
        station = %s GROUP by sday)

      SELECT """ + yr + """,
      sum(case when o.high > a.avg_high then 1 else 0 end) as high_above,
      sum(case when o.low > a.avg_low then 1 else 0 end) as low_above,
      sum(case when (o.high+o.low)/2. > a.avg_temp then 1 else 0 end)
          as avg_above,
      count(*) as days from """ + table + """ o, avgs a WHERE o.station = %s
      and o.sday = a.sday and month in %s
      GROUP by yr ORDER by yr ASC
    """,
        pgconn,
        params=(station, station, tuple(months)),
        index_col="yr",
    )
    if df.empty:
        raise NoDataFound("No Data Found.")

    df["high_freq"] = df["high_above"] / df["days"].astype("f") * 100.0
    df["low_freq"] = df["low_above"] / df["days"].astype("f") * 100.0
    df["avg_freq"] = df["avg_above"] / df["days"].astype("f") * 100.0

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    avgv = df[varname + "_freq"].mean()

    colorabove = "r"
    colorbelow = "b"
    data = df[varname + "_freq"].values
    bars = ax.bar(df.index.values,
                  data,
                  fc=colorabove,
                  ec=colorabove,
                  align="center")
    for i, bar in enumerate(bars):
        if data[i] < avgv:
            bar.set_facecolor(colorbelow)
            bar.set_edgecolor(colorbelow)
    ax.axhline(avgv, lw=2, color="k", zorder=2)
    txt = ax.text(
        bars[10].get_x(),
        avgv,
        "Avg: %.1f%%" % (avgv, ),
        color="yellow",
        fontsize=14,
        va="center",
    )
    txt.set_path_effects([PathEffects.withStroke(linewidth=5, foreground="k")])
    ax.set_ylim(0, 100)
    ax.set_yticks([0, 10, 25, 50, 75, 90, 100])
    ax.set_xlabel("Year")
    ax.set_xlim(bars[0].get_x() - 1, bars[-1].get_x() + 1)
    ax.set_ylabel("Frequency [%]")
    ax.grid(True)
    msg = ("[%s] %s %s-%s Percentage of Days with %s Above Average (month=%s)"
           ) % (
               station,
               nt.sts[station]["name"],
               df.index.values.min(),
               df.index.values.max(),
               PDICT[varname],
               month.upper(),
           )
    tokens = msg.split()
    sz = int(len(tokens) / 2)
    ax.set_title(" ".join(tokens[:sz]) + "\n" + " ".join(tokens[sz:]))

    return fig, df
示例#28
0
文件: p55.py 项目: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    month = ctx["month"]

    table = "alldata_%s" % (station[:2],)

    # beat month
    cursor.execute(
        """
    with obs as (
     SELECT sday, avg(high) as avgh, avg(low) as avgl,
     avg((high+low)/2.) as avgt from """
        + table
        + """
     WHERE station = %s and month = %s GROUP by sday
    ), c81 as (
     SELECT to_char(valid, 'mmdd') as sday, high, low, (high+low)/2. as avgt
     from ncdc_climate81 where station = %s
    ), c71 as (
     SELECT to_char(valid, 'mmdd') as sday, high, low, (high+low)/2. as avgt
     from ncdc_climate71 where station = %s
    )

    SELECT o.sday, o.avgh, c81.high, c71.high,
    o.avgl, c81.low, c71.low,
    o.avgt, c81.avgt, c71.avgt from
    obs o, c81, c71 where o.sday = c81.sday and o.sday = c71.sday
    ORDER by o.sday ASC
    """,
        (station, month, ctx["_nt"].sts[station]["ncdc81"], station),
    )
    if cursor.rowcount == 0:
        raise NoDataFound("No Data Found.")

    o_avgh = []
    o_avgl = []
    o_avgt = []
    c81_avgh = []
    c81_avgl = []
    c81_avgt = []
    c71_avgh = []
    c71_avgl = []
    c71_avgt = []
    days = []
    for row in cursor:
        days.append(int(row[0][2:]))
        o_avgh.append(float(row[1]))
        o_avgl.append(float(row[4]))
        o_avgt.append(float(row[7]))

        c81_avgh.append(row[2])
        c81_avgl.append(row[5])
        c81_avgt.append(row[8])

        c71_avgh.append(row[3])
        c71_avgl.append(row[6])
        c71_avgt.append(row[9])

    df = pd.DataFrame(
        dict(
            day=pd.Series(days),
            iem_avgh=pd.Series(o_avgh),
            iem_avgl=pd.Series(o_avgl),
            iem_avgt=pd.Series(o_avgt),
            ncei81_avgh=pd.Series(c81_avgh),
            ncei81_avgl=pd.Series(c81_avgl),
            ncei81_avgt=pd.Series(c81_avgt),
            ncei71_avgh=pd.Series(c71_avgh),
            ncei71_avgl=pd.Series(c71_avgl),
            ncei71_avgt=pd.Series(c71_avgt),
        )
    )

    days = np.array(days)

    (fig, ax) = plt.subplots(3, 1, sharex=True, figsize=(8, 6))

    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    ax[0].set_title(
        ("%s %s Daily Climate Comparison\n" "Observation Period: %s-%s for %s")
        % (
            station,
            ctx["_nt"].sts[station]["name"],
            ab.year,
            datetime.datetime.now().year,
            calendar.month_name[month],
        ),
        fontsize=12,
    )

    ax[0].bar(days, o_avgh, width=0.8, fc="tan", align="center")
    ax[0].plot(days, c81_avgh, lw=2, zorder=2, color="g")
    ax[0].plot(days, c71_avgh, lw=2, zorder=2, color="r")
    ax[0].grid(True)
    ax[0].set_ylabel(r"High Temp $^\circ$F")
    ax[0].set_ylim(bottom=min([min(o_avgh), min(c71_avgh), min(c81_avgh)]) - 2)

    ax[1].bar(days, o_avgl, width=0.8, fc="tan", align="center")
    ax[1].plot(days, c81_avgl, lw=2, zorder=2, color="g")
    ax[1].plot(days, c71_avgl, lw=2, zorder=2, color="r")
    ax[1].grid(True)
    ax[1].set_ylabel(r"Low Temp $^\circ$F")
    ax[1].set_ylim(bottom=min([min(o_avgl), min(c71_avgl), min(c81_avgl)]) - 2)

    ax[2].bar(
        days,
        o_avgt,
        width=0.8,
        fc="tan",
        align="center",
        label="IEM Observered Avg",
    )
    ax[2].plot(
        days, c81_avgt, lw=2, zorder=2, color="g", label="NCEI 1981-2010"
    )
    ax[2].plot(
        days, c71_avgt, lw=2, zorder=2, color="r", label="NCEI 1971-2000"
    )
    ax[2].grid(True)
    ax[2].legend(
        loc="lower center",
        bbox_to_anchor=(0.5, 0.95),
        fancybox=True,
        shadow=True,
        ncol=3,
        scatterpoints=1,
        fontsize=10,
    )

    ax[2].set_ylabel(r"Average Temp $^\circ$F")
    ax[2].set_ylim(bottom=min([min(o_avgt), min(c71_avgt), min(c81_avgt)]) - 2)
    ax[2].set_xlabel("Day of %s" % (calendar.month_name[month],))
    ax[2].set_xlim(0.5, len(days) + 0.5)

    return fig, df
示例#29
0
def plot1(ctx):
    """Do main plotting logic"""
    df = read_sql(
        """
        SELECT * from sm_hourly WHERE
        station = %s and valid BETWEEN %s and %s ORDER by valid ASC
    """,
        ctx["pgconn"],
        params=(ctx["station"], ctx["sts"], ctx["ets"]),
        index_col="valid",
    )
    if df.empty:
        raise NoDataFound("No Data Found for This Plot.")
    slrkw = df["slrkw_avg_qc"]
    d12sm = df["calc_vwc_12_avg_qc"]
    d12t = df["t12_c_avg_qc"]
    d24t = df["t24_c_avg_qc"]
    d50t = df["t50_c_avg_qc"]
    d24sm = df["calc_vwc_24_avg_qc"]
    d50sm = df["calc_vwc_50_avg_qc"]
    rain = df["rain_mm_tot_qc"]
    tair = df["tair_c_avg_qc"]
    tsoil = df["tsoil_c_avg_qc"]
    valid = df.index.values

    (fig, ax) = plt.subplots(3, 1, sharex=True, figsize=(8, 8))
    ax[0].grid(True)
    ax2 = ax[0].twinx()
    ax[0].set_zorder(ax2.get_zorder() + 1)
    ax[0].patch.set_visible(False)
    # arange leads to funky values
    ax2.set_yticks([-0.6, -0.5, -0.4, -0.3, -0.2, -0.1, 0])
    ax2.set_yticklabels([0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0])
    ax2.set_ylim(-0.6, 0)
    ax2.set_ylabel("Hourly Precipitation [inch]")
    b1 = ax2.bar(valid, 0 - rain / 25.4, width=0.04, fc="b", ec="b", zorder=4)

    l1 = None
    l2 = None
    l3 = None
    if not d12sm.isnull().all():
        l1, = ax[0].plot(valid,
                         d12sm * 100.0,
                         linewidth=2,
                         color="r",
                         zorder=5)
    if not d24sm.isnull().all():
        l2, = ax[0].plot(valid,
                         d24sm * 100.0,
                         linewidth=2,
                         color="purple",
                         zorder=5)
    if not d50sm.isnull().all():
        l3, = ax[0].plot(valid,
                         d50sm * 100.0,
                         linewidth=2,
                         color="black",
                         zorder=5)
    ax[0].set_ylabel("Volumetric Soil Water Content [%]", fontsize=10)

    days = (ctx["ets"] - ctx["sts"]).days
    if days >= 3:
        interval = max(int(days / 7), 1)
        ax[0].xaxis.set_major_locator(
            mdates.DayLocator(interval=interval,
                              tz=pytz.timezone("America/Chicago")))
        ax[0].xaxis.set_major_formatter(
            mdates.DateFormatter("%-d %b\n%Y",
                                 tz=pytz.timezone("America/Chicago")))
    else:
        ax[0].xaxis.set_major_locator(
            mdates.AutoDateLocator(maxticks=10,
                                   tz=pytz.timezone("America/Chicago")))
        ax[0].xaxis.set_major_formatter(
            mdates.DateFormatter("%-I %p\n%d %b",
                                 tz=pytz.timezone("America/Chicago")))

    ax[0].set_title(("ISUSM Station: %s Timeseries") %
                    (ctx["_nt"].sts[ctx["station"]]["name"], ))
    box = ax[0].get_position()
    ax[0].set_position(
        [box.x0, box.y0 + box.height * 0.05, box.width, box.height * 0.95])
    box = ax2.get_position()
    ax2.set_position(
        [box.x0, box.y0 + box.height * 0.05, box.width, box.height * 0.95])
    if None not in [l1, l2, l3]:
        ax[0].legend(
            [l1, l2, l3, b1],
            ["12 inch", "24 inch", "50 inch", "Hourly Precip"],
            bbox_to_anchor=(0.5, -0.15),
            ncol=4,
            loc="center",
            fontsize=12,
        )

    # ----------------------------------------
    if not d12t.isnull().all():
        ax[1].plot(
            valid,
            temperature(d12t, "C").value("F"),
            linewidth=2,
            color="r",
            label="12in",
        )
    if not d24t.isnull().all():
        ax[1].plot(
            valid,
            temperature(d24t, "C").value("F"),
            linewidth=2,
            color="purple",
            label="24in",
        )
    if not d50t.isnull().all():
        ax[1].plot(
            valid,
            temperature(d50t, "C").value("F"),
            linewidth=2,
            color="black",
            label="50in",
        )
    ax[1].grid(True)
    ax[1].set_ylabel(r"Temperature $^\circ$F")
    box = ax[1].get_position()
    ax[1].set_position(
        [box.x0, box.y0 + box.height * 0.05, box.width, box.height * 0.95])

    # ------------------------------------------------------

    ax2 = ax[2].twinx()
    l3, = ax2.plot(valid, slrkw, color="g", zorder=1, lw=2)
    ax2.set_ylabel("Solar Radiation [W/m^2]", color="g")

    l1, = ax[2].plot(
        valid,
        temperature(tair, "C").value("F"),
        linewidth=2,
        color="blue",
        zorder=2,
    )
    l2, = ax[2].plot(
        valid,
        temperature(tsoil, "C").value("F"),
        linewidth=2,
        color="brown",
        zorder=2,
    )
    ax[2].grid(True)
    ax[2].legend(
        [l1, l2, l3],
        ["Air", '4" Soil', "Solar Radiation"],
        bbox_to_anchor=(0.5, 1.1),
        loc="center",
        ncol=3,
    )
    ax[2].set_ylabel(r"Temperature $^\circ$F")

    ax[2].set_zorder(ax2.get_zorder() + 1)
    ax[2].patch.set_visible(False)
    ax[0].set_xlim(df.index.min(), df.index.max())
    return fig, df
示例#30
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('coop')
    ctx = get_autoplot_context(fdict, get_description())

    station = ctx['station']
    varname = ctx['varname']
    agg = ctx['agg']

    table = "alldata_%s" % (station[:2], )

    lastday = datetime.date.today()
    if varname == 'total_precip' and agg == 'max':
        lastday += datetime.timedelta(days=1)
    else:
        lastday = lastday.replace(day=1)

    df = read_sql("""SELECT year, month, avg((high+low)/2.) as avg_temp,
      avg(high) as avg_high_temp, avg(low) as avg_low_temp,
      sum(precip) as total_precip
      from """ + table +
                  """ where station = %s and day < %s GROUP by year, month
      """,
                  pgconn,
                  params=(station, lastday))
    if df.empty:
        raise NoDataFound("No Data Found.")

    resdf = pd.DataFrame(
        dict(monthname=pd.Series(calendar.month_abbr[1:], index=range(1, 13))),
        index=pd.Series(range(1, 13), name='month'))
    for _varname in PDICT:
        for _agg in [min, max]:
            df2 = df[[_varname, 'month', 'year']]
            df2 = df2[df[_varname] == df.groupby('month')[_varname].transform(
                _agg)].copy()
            df2.rename(columns={
                'year': '%s_%s_year' % (_agg.__name__, _varname),
                _varname: '%s_%s' % (_agg.__name__, _varname)
            },
                       inplace=True)
            df2.set_index('month', inplace=True)
            resdf = resdf.join(df2)

    # The above can end up with duplicates
    resdf = resdf.groupby(level=0)
    resdf = resdf.last()

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))

    col = "%s_%s" % (agg, varname)
    ax.bar(np.arange(1, 13), resdf[col], fc='pink', align='center')
    for month, row in resdf.iterrows():
        if np.isnan(row[col]):
            continue
        ax.text(month,
                row[col],
                "%.0f\n%.2f" % (row[col + '_year'], row[col]),
                ha='center',
                va='bottom')
    ax.set_xlim(0.5, 12.5)
    ax.set_ylim(top=resdf[col].max() * 1.2)
    ab = ctx['_nt'].sts[station]['archive_begin']
    if ab is None:
        raise NoDataFound("Unknown Station Metadata.")
    ax.set_title(("[%s] %s\n%s %s [%s-%s]\n") %
                 (station, ctx['_nt'].sts[station]['name'], PDICT2[agg],
                  PDICT[varname], ab.year, lastday.year))
    ylabel = r"Temperature $^\circ$F"
    if varname in ['total_precip']:
        ylabel = 'Precipitation [inch]'
    ax.set_ylabel(ylabel)
    ax.grid(True)
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xticks(np.arange(1, 13))

    box = ax.get_position()
    ax.set_position([box.x0, box.y0, box.width, box.height * 0.95])

    return fig, resdf