Ejemplo n.º 1
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    if ctx["p"] == "day":
        get_daily_data(ctx)
    else:
        get_monthly_data(ctx)
    ctx["lastyear"] = datetime.date.today().year
    ctx["years"] = ctx["lastyear"] - 1893 + 1
    csector = ctx["csector"]

    subtitle = ("Based on IEM Estimates, "
                "1 is %s out of %s total years (1893-%s)") % (
                    "wettest" if ctx["var"] == "precip" else "hottest",
                    ctx["years"],
                    ctx["lastyear"],
                )
    if ctx["var"] == "arridity":
        subtitle = "Std Average High Temp Departure minus Std Precip Departure"
    mp = MapPlot(
        sector=("state" if len(csector) == 2 else csector),
        state=ctx["csector"],
        continentalcolor="white",
        title="%s %s %sby Climate District" % (
            ctx["label"],
            PDICT[ctx["var"]],
            "Ranks " if ctx["var"] != "arridity" else "",
        ),
        subtitle=subtitle,
        titlefontsize=14,
    )
    cmap = get_cmap(ctx["cmap"])
    bins = [
        1,
        5,
        10,
        25,
        50,
        75,
        100,
        ctx["years"] - 10,
        ctx["years"] - 5,
        ctx["years"],
    ]
    pvar = ctx["var"] + "_rank"
    fmt = "%.0f"
    if ctx["var"] == "arridity":
        bins = np.arange(-4, 4.1, 1)
        pvar = ctx["var"]
        fmt = "%.1f"
    mp.fill_climdiv(
        ctx["df"][pvar],
        ilabel=True,
        plotmissing=False,
        lblformat=fmt,
        bins=bins,
        cmap=cmap,
    )

    return mp.fig, ctx["df"]
Ejemplo n.º 2
0
def magic(ax, df, colname, title, ctx):
    """You can do magic"""
    df2 = df[df[colname] == 1]

    ax.text(0, 1.02, title, transform=ax.transAxes)
    ax.set_xlim(0, 367)
    ax.grid(True)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335))
    ax.set_xticklabels(calendar.month_abbr[1:])

    bbox = ax.get_position()
    sideax = plt.axes([bbox.x1 + 0.01, bbox.y0, 0.09, 0.35])
    ylim = [df["year"].min(), df["year"].max()]
    year0 = ylim[0] - (ylim[0] % 10)
    year1 = ylim[1] + (10 - ylim[1] % 10)
    cmap = get_cmap(ctx["cmap"])
    norm = mpcolors.BoundaryNorm(np.arange(year0, year1 + 1, 10), cmap.N)
    ax.scatter(df2["doy"], df2["year"], color=cmap(norm(df2["year"].values)))
    ax.set_yticks(np.arange(year0, year1, 20))
    ax.set_ylim(*ylim)
    cnts, edges = np.histogram(df2["year"].values,
                               np.arange(year0, year1 + 1, 10))
    sideax.barh(edges[:-1],
                cnts,
                height=10,
                align="edge",
                color=cmap(norm(edges[:-1])))
    sideax.set_yticks(np.arange(year0, year1, 20))
    sideax.set_yticklabels([])
    sideax.set_ylim(*ylim)
    sideax.grid(True)
    sideax.set_xlabel("Decade Count")
Ejemplo n.º 3
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("postgis")
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx["sdate"]
    sts = sts.replace(tzinfo=pytz.utc)
    ets = ctx["edate"]
    by = ctx["by"]
    ets = ets.replace(tzinfo=pytz.utc)
    myfilter = ctx["filter"]
    if myfilter == "NONE":
        tlimiter = ""
    elif myfilter == "NRS":
        tlimiter = " and typetext not in ('HEAVY RAIN', 'SNOW', 'HEAVY SNOW') "
    elif myfilter == "CON":
        tlimiter = (" and typetext in ('TORNADO', 'HAIL', 'TSTM WND GST', "
                    "'TSTM WND DMG') ")
    else:
        tlimiter = " and typetext = '%s' " % (myfilter, )

    df = read_sql(
        """
    WITH data as (
        SELECT distinct wfo, state, valid, type, magnitude, geom from lsrs
        where valid >= %s and valid < %s """ + tlimiter + """
    )
    SELECT """ + by + """, count(*) from data GROUP by """ + by + """
    """,
        pgconn,
        params=(sts, ets),
        index_col=by,
    )
    data = {}
    for idx, row in df.iterrows():
        if idx == "JSJ":
            idx = "SJU"
        data[idx] = row["count"]
    maxv = df["count"].max()
    bins = np.linspace(1, maxv, 12, dtype="i")
    bins[-1] += 1
    mp = MapPlot(
        sector="nws",
        axisbg="white",
        title=("Preliminary/Unfiltered Local Storm Report Counts %s") %
        (PDICT[by], ),
        subtitlefontsize=10,
        subtitle=("Valid %s - %s UTC, type limiter: %s") % (
            sts.strftime("%d %b %Y %H:%M"),
            ets.strftime("%d %b %Y %H:%M"),
            MDICT.get(myfilter),
        ),
    )
    cmap = get_cmap(ctx["cmap"])
    if by == "wfo":
        mp.fill_cwas(data, bins=bins, cmap=cmap, ilabel=True)
    else:
        mp.fill_states(data, bins=bins, cmap=cmap, ilabel=True)

    return mp.fig, df
Ejemplo n.º 4
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    date1 = ctx["date1"]
    date2 = ctx["date2"]
    date1 = date1.replace(year=2000)
    date2 = date2.replace(year=2000)

    varname = ctx["varname"]

    df = read_sql(
        """
    WITH t2 as (
         SELECT station, high, low from ncdc_climate81 WHERE
         valid = %s
    ), t1 as (
        SELECT station, high, low from ncdc_climate81 where
        valid = %s
    ), data as (
        SELECT t2.station, t1.high as t1_high, t2.high as t2_high,
        t1.low as t1_low, t2.low as t2_low from t1 JOIN t2 on
        (t1.station = t2.station)
    )
    SELECT d.station, ST_x(geom) as lon, ST_y(geom) as lat,
    t2_high -  t1_high as high, t2_low - t1_low as low from data d JOIN
    stations s on (s.id = d.station) where s.network = 'NCDC81'
    and s.state not in ('HI', 'AK')
    """,
        pgconn,
        params=(date2, date1),
        index_col="station",
    )
    if df.empty:
        raise NoDataFound("No Data Found.")

    days = int((date2 - date1).days)
    extent = int(df[varname].abs().max())
    mp = MapPlot(
        sector="conus",
        title=("%s Day Change in %s NCDC 81 Climatology") %
        (days, PDICT[varname]),
        subtitle="from %s to %s" %
        (date1.strftime("%-d %B"), date2.strftime("%-d %B")),
    )
    mp.contourf(
        df["lon"].values,
        df["lat"].values,
        df[varname].values,
        np.arange(0 - extent, extent + 1, 2),
        cmap=get_cmap(ctx["cmap"]),
        units="F",
    )

    return mp.fig, df
Ejemplo n.º 5
0
def plot_vsby(days, vsby, station, ctx, sts):
    """Sky plot variant."""
    fig = plt.figure(figsize=(8, 6))

    # need to convert vsby to 2-d
    data = np.ones((100, days * 24)) * -3
    for i in range(days * 24):
        val = vsby[0, i]
        if np.ma.is_masked(val):
            continue
        val = min([int(val * 10), 100])
        data[val:, i] = val / 10.0
        data[:val, i] = -1
    data = np.ma.array(data, mask=np.where(data < -1, True, False))

    # clouds
    ax = plt.axes([0.1, 0.1, 0.8, 0.8])
    ax.set_facecolor("skyblue")
    ax.set_xticks(np.arange(0, days * 24 + 1, 24))
    ax.set_xticklabels(np.arange(1, days + 1))

    fig.text(
        0.5,
        0.935,
        ("[%s] %s %s Visibility\nbased on hourly ASOS METAR Visibility Reports"
         ) % (station, ctx["_nt"].sts[station]["name"], sts.strftime("%b %Y")),
        ha="center",
        fontsize=14,
    )

    cmap = get_cmap("gray")
    cmap.set_bad("white")
    cmap.set_under("skyblue")
    res = ax.imshow(
        np.flipud(data),
        aspect="auto",
        extent=[0, days * 24, 0, 100],
        cmap=cmap,
        vmin=0,
        vmax=10,
    )
    cax = plt.axes([0.915, 0.08, 0.035, 0.2])
    fig.colorbar(res, cax=cax)
    ax.set_yticks(range(0, 101, 10))
    ax.set_yticklabels(range(0, 11, 1))
    ax.set_ylabel("Visibility [miles]")
    fig.text(0.45, 0.02,
             "Day of %s (UTC Timezone)" % (sts.strftime("%b %Y"), ))

    ax.grid(True)

    return fig
Ejemplo n.º 6
0
def plot_gdd(ts):
    """Generate our plot."""
    nc = ncopen(ts.strftime("/mesonet/data/ndfd/%Y%m%d%H_ndfd.nc"))
    # compute our daily GDDs
    gddtot = np.zeros(np.shape(nc.variables["lon"][:]))
    for i in range(7):
        gddtot += gdd(
            temperature(nc.variables["high_tmpk"][i, :, :], "K"),
            temperature(nc.variables["low_tmpk"][i, :, :], "K"),
        )
    cnc = ncopen("/mesonet/data/ndfd/ndfd_dailyc.nc")
    offset = daily_offset(ts)
    avggdd = np.sum(cnc.variables["gdd50"][offset:offset + 7], 0)
    data = gddtot - np.where(avggdd < 1, 1, avggdd)

    subtitle = ("Based on National Digital Forecast Database (NDFD) "
                "00 UTC Forecast made %s") % (ts.strftime("%-d %b %Y"), )
    mp = MapPlot(
        title="NWS NDFD 7 Day (%s through %s) GDD50 Departure from Avg" % (
            ts.strftime("%-d %b"),
            (ts + datetime.timedelta(days=6)).strftime("%-d %b"),
        ),
        subtitle=subtitle,
        sector="iailin",
    )
    mp.pcolormesh(
        nc.variables["lon"][:],
        nc.variables["lat"][:],
        data,
        np.arange(-80, 81, 20),
        cmap=get_cmap("RdBu_r"),
        units=r"$^\circ$F",
        spacing="proportional",
    )
    mp.drawcounties()
    pqstr = (
        "data c %s summary/cb_ndfd_7day_gdd.png summary/cb_ndfd_7day_gdd.png "
        "png") % (ts.strftime("%Y%m%d%H%M"), )
    mp.postprocess(pqstr=pqstr)
    mp.close()
    nc.close()
Ejemplo n.º 7
0
def plot_maxmin(ts, field):
    """Generate our plot."""
    nc = ncopen(ts.strftime("/mesonet/data/ndfd/%Y%m%d%H_ndfd.nc"))
    if field == "high_tmpk":
        data = np.max(nc.variables[field][:], 0)
    elif field == "low_tmpk":
        data = np.min(nc.variables[field][:], 0)
    data = masked_array(data, units.degK).to(units.degF).m

    subtitle = ("Based on National Digital Forecast Database (NDFD) "
                "00 UTC Forecast made %s") % (ts.strftime("%-d %b %Y"), )
    mp = MapPlot(
        title="NWS NDFD 7 Day (%s through %s) %s Temperature" % (
            ts.strftime("%-d %b"),
            (ts + datetime.timedelta(days=6)).strftime("%-d %b"),
            "Maximum" if field == "high_tmpk" else "Minimum",
        ),
        subtitle=subtitle,
        sector="iailin",
    )
    mp.pcolormesh(
        nc.variables["lon"][:],
        nc.variables["lat"][:],
        data,
        np.arange(10, 121, 10),
        cmap=get_cmap("jet"),
        units="Degrees F",
    )
    mp.drawcounties()
    pqstr = (
        "data c %s summary/cb_ndfd_7day_%s.png summary/cb_ndfd_7day_%s.png "
        "png") % (
            ts.strftime("%Y%m%d%H%M"),
            "max" if field == "high_tmpk" else "min",
            "max" if field == "high_tmpk" else "min",
        )
    mp.postprocess(pqstr=pqstr)
    mp.close()
    nc.close()
Ejemplo n.º 8
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())

    get_df(ctx)
    labels = {}
    data = {}
    for state, row in ctx["df"].iterrows():
        val = row["departure"]
        data[state] = val
        if pd.isna(val):
            if pd.isna(row["avg"]):
                subscript = "M"
            else:
                subscript = "[-%.0f]" % (row["avg"], )
                data[state] = 0 - row["avg"]
        else:
            subscript = "[%s%.0f]" % ("+" if val > 0 else "", val)
            subscript = "[0]" if subscript in ["[-0]", "[+0]"] else subscript
        labels[state] = "%s\n%s" % (
            "M" if pd.isna(row["thisval"]) else int(row["thisval"]),
            subscript,
        )

    mp = MapPlot(sector="conus", title=ctx["title"], subtitle=ctx["subtitle"])
    levels = range(-40, 41, 10)
    cmap = get_cmap(ctx["cmap"])
    cmap.set_bad("white")
    mp.fill_states(
        data,
        ilabel=True,
        labels=labels,
        bins=levels,
        cmap=cmap,
        units="Absolute %",
        labelfontsize=16,
    )

    return mp.fig, ctx["df"]
Ejemplo n.º 9
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    state = ctx["state"][:2]
    unit_desc = ctx["unit_desc"].upper()
    commodity_desc = ctx["commodity_desc"].upper()

    util_practice_desc = (
        "GRAIN"
        if (unit_desc == "PCT HARVESTED" and commodity_desc == "CORN")
        else "ALL UTILIZATION PRACTICES"
    )

    df = read_sql(
        """
        select year, week_ending, num_value,
        extract(doy from week_ending)::int as day_of_year from nass_quickstats
        where commodity_desc = %s and statisticcat_desc = 'PROGRESS'
        and unit_desc = %s and state_alpha = %s and
        util_practice_desc = %s and num_value is not null
        ORDER by week_ending ASC
    """,
        pgconn,
        params=(commodity_desc, unit_desc, state, util_practice_desc),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("ERROR: No data found!")
    df["yeari"] = df["year"] - df["year"].min()

    (fig, ax) = plt.subplots(1, 1)

    year0 = int(df["year"].min())
    lastyear = int(df["year"].max())
    data = np.ma.ones((df["yeari"].max() + 1, 366), "f") * -1
    data.mask = np.where(data == -1, True, False)

    lastrow = None
    for _, row in df.iterrows():
        if lastrow is None:
            lastrow = row
            continue

        date = row["week_ending"]
        ldate = lastrow["week_ending"]
        val = int(row["num_value"])
        lval = int(lastrow["num_value"])
        d0 = int(ldate.strftime("%j"))
        d1 = int(date.strftime("%j"))
        if ldate.year == date.year:
            delta = (val - lval) / float(d1 - d0)
            for i, jday in enumerate(range(d0, d1 + 1)):
                data[date.year - year0, jday] = lval + i * delta
        else:
            data[ldate.year - year0, d0:] = 100

        lastrow = row

    dlast = np.max(data[-1, :])
    for year in range(year0, lastyear):
        idx = np.digitize([dlast], data[year - year0, :])
        ax.text(idx[0], year, "X", va="center", zorder=2, color="white")

    cmap = get_cmap(ctx["cmap"])
    res = ax.imshow(
        data,
        extent=[1, 367, lastyear + 0.5, year0 - 0.5],
        aspect="auto",
        interpolation="none",
        cmap=cmap,
    )
    fig.colorbar(res)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335))
    ax.set_xticklabels(calendar.month_abbr[1:])
    # We need to compute the domain of this plot
    maxv = np.max(data, 0)
    minv = np.min(data, 0)
    ax.set_xlim(np.argmax(maxv > 0) - 7, np.argmax(minv > 99) + 7)
    ax.set_ylim(lastyear + 0.5, year0 - 0.5)
    ax.yaxis.set_major_locator(ticker.MaxNLocator(integer=True))
    ax.grid(True)
    lastweek = df["week_ending"].max()
    ax.set_xlabel(
        "X denotes %s value of %.0f%%" % (lastweek.strftime("%d %b %Y"), dlast)
    )
    ax.set_title(
        (
            "USDA NASS %i-%i %s %s %s Progress\n"
            "Daily Linear Interpolated Values Between Weekly Reports"
        )
        % (
            year0,
            lastyear,
            state,
            PDICT2.get(commodity_desc),
            PDICT.get(unit_desc),
        )
    )

    return fig, df
Ejemplo n.º 10
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    year = ctx["year"]
    gdd1 = ctx["gdd1"]
    gdd2 = ctx["gdd2"]
    table = "alldata_%s" % (station[:2], )
    nt = network.Table("%sCLIMATE" % (station[:2], ))

    ccursor.execute(
        """
    SELECT day, gddxx(%s, %s, high, low) as gdd
    from """ + table + """ WHERE year = %s and station = %s
    ORDER by day ASC
    """,
        (ctx["gddbase"], ctx["gddceil"], year, station),
    )
    days = []
    gdds = []
    for row in ccursor:
        gdds.append(float(row["gdd"]))
        days.append(row["day"])

    yticks = []
    yticklabels = []
    jan1 = datetime.datetime(year, 1, 1)
    for i in range(110, 330):
        ts = jan1 + datetime.timedelta(days=i)
        if ts.day == 1 or ts.day % 12 == 1:
            yticks.append(i)
            yticklabels.append(ts.strftime("%-d %b"))

    gdds = np.array(gdds)
    sts = datetime.datetime(year, 4, 1)
    ets = datetime.datetime(year, 6, 10)
    now = sts
    sz = len(gdds)

    days2 = []
    starts = []
    heights = []
    success = []
    rows = []
    while now < ets:
        idx = int(now.strftime("%j")) - 1
        running = 0
        while idx < sz and running < gdd1:
            running += gdds[idx]
            idx += 1
        idx0 = idx
        while idx < sz and running < gdd2:
            running += gdds[idx]
            idx += 1
        success.append(running >= gdd2)
        idx1 = idx
        days2.append(now)
        starts.append(idx0)
        heights.append(idx1 - idx0)
        rows.append(
            dict(
                plant_date=now,
                start_doy=idx0,
                end_doy=idx1,
                success=success[-1],
            ))
        now += datetime.timedelta(days=1)

    if True not in success:
        raise NoDataFound("No data, pick lower GDD values")
    df = pd.DataFrame(rows)
    heights = np.array(heights)
    success = np.array(success)
    starts = np.array(starts)

    cmap = get_cmap(ctx["cmap"])
    bmin = min(heights[success]) - 1
    bmax = max(heights[success]) + 1
    bins = np.arange(bmin, bmax + 1.1)
    norm = mpcolors.BoundaryNorm(bins, cmap.N)

    ax = plt.axes([0.125, 0.125, 0.75, 0.75])
    bars = ax.bar(days2, heights, bottom=starts, fc="#EEEEEE")
    for i, mybar in enumerate(bars):
        if success[i]:
            mybar.set_facecolor(cmap(norm([heights[i]])[0]))
    ax.grid(True)
    ax.set_yticks(yticks)
    ax.set_yticklabels(yticklabels)

    ax.set_ylim(min(starts) - 7, max(starts + heights) + 7)

    ax.xaxis.set_major_formatter(mdates.DateFormatter("%-d\n%b"))
    ax.set_xlabel("Planting Date")
    ax.set_title(("%s [%s] %s GDD [base=%s,ceil=%s]\n"
                  "Period between GDD %s and %s, gray bars incomplete") % (
                      nt.sts[station]["name"],
                      station,
                      year,
                      ctx["gddbase"],
                      ctx["gddceil"],
                      gdd1,
                      gdd2,
                  ))

    ax2 = plt.axes([0.92, 0.1, 0.07, 0.8], frameon=False, yticks=[], xticks=[])
    ax2.set_xlabel("Days")
    for i, mybin in enumerate(bins):
        ax2.text(0.52, i, "%g" % (mybin, ), ha="left", va="center", color="k")
        # txt.set_path_effects([PathEffects.withStroke(linewidth=2,
        #                                             foreground="k")])
    ax2.barh(
        np.arange(len(bins[:-1])),
        [0.5] * len(bins[:-1]),
        height=1,
        color=cmap(norm(bins[:-1])),
        ec="None",
    )
    ax2.set_xlim(0, 1)

    return plt.gcf(), df
Ejemplo n.º 11
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    # Covert datetime to UTC
    ctx["sdate"] = ctx["sdate"].replace(tzinfo=pytz.utc)
    ctx["edate"] = ctx["edate"].replace(tzinfo=pytz.utc)
    state = ctx["state"]
    phenomena = ctx["phenomena"]
    significance = ctx["significance"]
    station = ctx["station"][:4]
    t = ctx["t"]
    ilabel = ctx["ilabel"] == "yes"
    geo = ctx["geo"]
    if geo == "ugc":
        do_ugc(ctx)
    elif geo == "polygon":
        do_polygon(ctx)

    subtitle = "based on IEM Archives %s" % (ctx.get("subtitle", ""), )
    if t == "cwa":
        subtitle = "Plotted for %s (%s), %s" % (
            ctx["_nt"].sts[station]["name"],
            station,
            subtitle,
        )
    else:
        subtitle = "Plotted for %s, %s" % (state_names[state], subtitle)
    m = MapPlot(
        sector=("state" if t == "state" else "cwa"),
        state=state,
        cwa=(station if len(station) == 3 else station[1:]),
        axisbg="white",
        title=("%s %s (%s.%s)") % (
            ctx["title"],
            vtec.get_ps_string(phenomena, significance),
            phenomena,
            significance,
        ),
        subtitle=subtitle,
        nocaption=True,
        titlefontsize=16,
    )
    cmap = get_cmap(ctx["cmap"])
    cmap.set_under("white")
    cmap.set_over("white")
    if geo == "ugc":
        if ctx["v"] == "hour":
            cl = [
                "Mid",
                "",
                "2 AM",
                "",
                "4 AM",
                "",
                "6 AM",
                "",
                "8 AM",
                "",
                "10 AM",
                "",
                "Noon",
                "",
                "2 PM",
                "",
                "4 PM",
                "",
                "6 PM",
                "",
                "8 PM",
                "",
                "10 PM",
                "",
            ]
            m.fill_ugcs(
                ctx["data"],
                ctx["bins"],
                cmap=cmap,
                ilabel=ilabel,
                labels=ctx["labels"],
                clevstride=2,
                clevlabels=cl,
                labelbuffer=1,  # Texas yall
                extend="neither",
            )
        else:
            m.fill_ugcs(
                ctx["data"],
                ctx["bins"],
                cmap=cmap,
                ilabel=ilabel,
                labelbuffer=1,  # Texas yall
            )
    else:
        res = m.pcolormesh(
            ctx["lons"],
            ctx["lats"],
            ctx["data"],
            ctx["bins"],
            cmap=cmap,
            units=ctx["units"],
            extend=ctx.get("extend", "both"),
        )
        # Cut down on SVG et al size
        res.set_rasterized(True)
        if ctx["drawc"] == "yes":
            m.drawcounties()

    return m.fig, ctx["df"]
Ejemplo n.º 12
0
def doit(ts):
    """
    Generate hourly plot of stage4 data
    """
    gmtnow = datetime.datetime.utcnow()
    gmtnow = gmtnow.replace(tzinfo=pytz.utc)
    routes = "a"
    if ((gmtnow - ts).days * 86400.0 + (gmtnow - ts).seconds) < 7200:
        routes = "ac"

    fn = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.01h.grib" % (
        ts.strftime("%Y/%m/%d"),
        ts.strftime("%Y%m%d%H"),
    )
    if not os.path.isfile(fn):
        LOG.info("Missing stage4 %s", fn)
        return

    grbs = pygrib.open(fn)
    grib = grbs[1]
    lats, lons = grib.latlons()
    vals = grib.values / 25.4

    cmap = get_cmap("jet")
    cmap.set_under("white")
    cmap.set_over("black")
    clevs = [
        0.01,
        0.05,
        0.1,
        0.2,
        0.3,
        0.4,
        0.5,
        0.6,
        0.7,
        0.8,
        0.9,
        1,
        1.5,
        2,
        3,
    ]
    localtime = ts.astimezone(pytz.timezone("America/Chicago"))

    for sector in ["iowa", "midwest", "conus"]:
        mp = MapPlot(
            sector=sector,
            title="Stage IV One Hour Precipitation",
            subtitle="Hour Ending %s" %
            (localtime.strftime("%d %B %Y %I %p %Z"), ),
        )
        mp.pcolormesh(lons, lats, vals, clevs, units="inch")
        pqstr = "plot %s %s00 %s_stage4_1h.png %s_stage4_1h_%s.png png" % (
            routes,
            ts.strftime("%Y%m%d%H"),
            sector,
            sector,
            ts.strftime("%H"),
        )
        if sector == "iowa":
            mp.drawcounties()
        mp.postprocess(view=False, pqstr=pqstr)
        mp.close()
Ejemplo n.º 13
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    state = ctx["state"][:2]
    sector = ctx["sector"]
    opt = ctx["opt"]
    p1syear = ctx["p1syear"]
    p1eyear = ctx["p1eyear"]
    p2syear = ctx["p2syear"]
    p2eyear = ctx["p2eyear"]
    varname = ctx["var"]

    table = "alldata"
    if sector == "state":
        table = "alldata_%s" % (state,)

    df = read_sql(
        """
    WITH season1 as (
        SELECT station, year,
        min(case when month > 7 and low < 32 then
            extract(doy from day) else 366 end) as first_freeze,
        max(case when month < 7 and low < 32 then
            extract(doy from day) else 0 end) as last_freeze
        from """
        + table
        + """ WHERE
        year >= %s and year <= %s GROUP by station, year),
    season2 as (
        SELECT station, year,
        min(case when month > 7 and low < 32 then
            extract(doy from day) else 366 end) as first_freeze,
        max(case when month < 7 and low < 32 then
            extract(doy from day) else 0 end) as last_freeze
        from """
        + table
        + """ WHERE
        year >= %s and year <= %s GROUP by station, year),
    agg as (
        SELECT p1.station, avg(p1.first_freeze) as p1_first_fall,
        avg(p1.last_freeze) as p1_last_spring,
        avg(p2.first_freeze) as p2_first_fall,
        avg(p2.last_freeze) as p2_last_spring
        from season1 as p1 JOIN season2 as p2 on (p1.station = p2.station)
        GROUP by p1.station)

    SELECT station, ST_X(geom) as lon, ST_Y(geom) as lat,
    d.* from agg d JOIN stations t ON (d.station = t.id)
    WHERE t.network ~* 'CLIMATE'
    and substr(station, 3, 1) != 'C' and substr(station, 3, 4) != '0000'
    """,
        pgconn,
        params=[p1syear, p1eyear, p2syear, p2eyear],
        index_col="station",
    )
    if df.empty:
        raise NoDataFound("No Data Found")
    df["p1_season"] = df["p1_first_fall"] - df["p1_last_spring"]
    df["p2_season"] = df["p2_first_fall"] - df["p2_last_spring"]
    df["season_delta"] = df["p2_season"] - df["p1_season"]
    df["spring_delta"] = df["p2_last_spring"] - df["p1_last_spring"]
    df["fall_delta"] = df["p2_first_fall"] - df["p1_first_fall"]
    # Reindex so that most extreme values are first
    df = df.reindex(
        df[varname + "_delta"].abs().sort_values(ascending=False).index
    )

    title = PDICT3[varname]
    mp = MapPlot(
        sector=sector,
        state=state,
        axisbg="white",
        title=("%.0f-%.0f minus %.0f-%.0f %s Difference")
        % (p2syear, p2eyear, p1syear, p1eyear, title),
        subtitle=("based on IEM Archives"),
        titlefontsize=14,
    )
    # Create 9 levels centered on zero
    abval = df[varname + "_delta"].abs().max()
    levels = centered_bins(abval)
    if opt in ["both", "contour"]:
        mp.contourf(
            df["lon"].values,
            df["lat"].values,
            df[varname + "_delta"].values,
            levels,
            cmap=get_cmap(ctx["cmap"]),
            units="days",
        )
    if sector == "state":
        mp.drawcounties()
    if opt in ["both", "values"]:
        mp.plot_values(
            df["lon"].values,
            df["lat"].values,
            df[varname + "_delta"].values,
            fmt="%.1f",
            labelbuffer=5,
        )

    return mp.fig, df
Ejemplo n.º 14
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    lagmonths = ctx["lag"]
    months = ctx["months"]
    month = ctx["month"]
    highyears = [int(x) for x in ctx["year"].split(",")]
    h = ctx["h"]

    wantmonth = month + lagmonths
    yearoffset = 0
    if month + lagmonths < 1:
        wantmonth = 12 - (month + lagmonths)
        yearoffset = 1

    wanted = []
    deltas = []
    for m in range(month, month + months):
        if m < 13:
            wanted.append(m)
            deltas.append(0)
        else:
            wanted.append(m - 12)
            deltas.append(-1)

    table = "alldata_%s" % (station[:2], )
    nt = network.Table("%sCLIMATE" % (station[:2], ))

    elnino = {}
    ccursor.execute("""SELECT monthdate, soi_3m, anom_34 from elnino""")
    for row in ccursor:
        if row[0].month != wantmonth:
            continue
        elnino[row[0].year + yearoffset] = dict(soi_3m=row[1], anom_34=row[2])

    ccursor.execute(
        "SELECT year, month, sum(precip), avg((high+low)/2.) "
        f"from {table} where station = %s GROUP by year, month",
        (station, ),
    )
    if ccursor.rowcount == 0:
        raise NoDataFound("No Data Found.")
    yearly = {}
    for row in ccursor:
        (_year, _month, _precip, _temp) = row
        if _month not in wanted:
            continue
        effectiveyear = _year + deltas[wanted.index(_month)]
        nino = elnino.get(effectiveyear, {}).get("soi_3m", None)
        if nino is None:
            continue
        data = yearly.setdefault(effectiveyear,
                                 dict(precip=0, temp=[], nino=nino))
        data["precip"] += _precip
        data["temp"].append(float(_temp))

    fig = plt.figure(figsize=(10, 6))
    ax = plt.axes([0.1, 0.12, 0.5, 0.75])
    msg = ("[%s] %s\n%s\n%s SOI (3 month average)") % (
        station,
        nt.sts[station]["name"],
        title(wanted),
        datetime.date(2000, wantmonth, 1).strftime("%B"),
    )
    ax.set_title(msg)

    cmap = get_cmap(ctx["cmap"])
    zdata = np.arange(-2.0, 2.1, 0.5)
    norm = mpcolors.BoundaryNorm(zdata, cmap.N)
    rows = []
    xs = []
    ys = []
    for year in yearly:
        x = yearly[year]["precip"]
        y = np.average(yearly[year]["temp"])
        xs.append(x)
        ys.append(y)
        val = yearly[year]["nino"]
        c = cmap(norm([val])[0])
        if h == "hide" and val > -0.5 and val < 0.5:
            ax.scatter(
                x,
                y,
                facecolor="#EEEEEE",
                edgecolor="#EEEEEE",
                s=30,
                zorder=2,
                marker="s",
            )
        else:
            ax.scatter(x,
                       y,
                       facecolor=c,
                       edgecolor="k",
                       s=60,
                       zorder=3,
                       marker="o")
        if year in highyears:
            ax.text(x,
                    y + 0.2,
                    "%s" % (year, ),
                    ha="center",
                    va="bottom",
                    zorder=5)
        rows.append(dict(year=year, precip=x, tmpf=y, soi3m=val))

    ax.axhline(np.average(ys), lw=2, color="k", linestyle="-.", zorder=2)
    ax.axvline(np.average(xs), lw=2, color="k", linestyle="-.", zorder=2)

    sm = plt.cm.ScalarMappable(norm, cmap)
    sm.set_array(zdata)
    cb = plt.colorbar(sm, extend="both")
    cb.set_label("<-- El Nino :: SOI :: La Nina -->")

    ax.grid(True)
    ax.set_xlim(left=-0.01)
    ax.set_xlabel("Total Precipitation [inch], Avg: %.2f" % (np.average(xs), ))
    ax.set_ylabel((r"Average Temperature $^\circ$F, "
                   "Avg: %.1f") % (np.average(ys), ))
    df = pd.DataFrame(rows)
    ax2 = plt.axes([0.67, 0.6, 0.28, 0.35])
    ax2.scatter(df["soi3m"].values, df["tmpf"].values)
    ax2.set_xlabel("<-- El Nino :: SOI :: La Nina -->")
    ax2.set_ylabel(r"Avg Temp $^\circ$F")
    slp, intercept, r_value, _, _ = stats.linregress(df["soi3m"].values,
                                                     df["tmpf"].values)
    y1 = -2.0 * slp + intercept
    y2 = 2.0 * slp + intercept
    ax2.plot([-2, 2], [y1, y2])
    ax2.text(
        0.97,
        0.9,
        "R$^2$=%.2f" % (r_value**2, ),
        ha="right",
        transform=ax2.transAxes,
        bbox=dict(color="white"),
    )
    ax2.grid(True)

    ax3 = plt.axes([0.67, 0.1, 0.28, 0.35])
    ax3.scatter(df["soi3m"].values, df["precip"].values)
    ax3.set_xlabel("<-- El Nino :: SOI :: La Nina -->")
    ax3.set_ylabel("Total Precip [inch]")
    slp, intercept, r_value, _, _ = stats.linregress(df["soi3m"].values,
                                                     df["precip"].values)
    y1 = -2.0 * slp + intercept
    y2 = 2.0 * slp + intercept
    ax3.plot([-2, 2], [y1, y2])
    ax3.text(
        0.97,
        0.9,
        "R$^2$=%.2f" % (r_value**2, ),
        ha="right",
        transform=ax3.transAxes,
        bbox=dict(color="white"),
    )
    ax3.grid(True)

    return fig, df
Ejemplo n.º 15
0
def makeplot(ts, routes="ac"):
    """
    Generate two plots for a given time GMT
    """
    pgconn = get_dbconn("smos", user="******")
    df = read_sql(
        """
    WITH obs as (
        SELECT grid_idx, avg(soil_moisture) * 100. as sm,
        avg(optical_depth) as od from data where valid BETWEEN %s and %s
        GROUP by grid_idx)

    SELECT ST_x(geom) as lon, ST_y(geom) as lat,
    CASE WHEN sm is Null THEN -1 ELSE sm END as sm,
    CASE WHEN od is Null THEN -1 ELSE od END as od
    from obs o JOIN grid g ON (o.grid_idx = g.idx)
    """,
        pgconn,
        params=(
            ts - datetime.timedelta(hours=6),
            ts + datetime.timedelta(hours=6),
        ),
        index_col=None,
    )

    if df.empty:
        LOG.info(
            "Did not find SMOS data for: %s-%s",
            ts - datetime.timedelta(hours=6),
            ts + datetime.timedelta(hours=6),
        )
        return

    for sector in ["midwest", "iowa"]:
        clevs = np.arange(0, 71, 5)
        mp = MapPlot(
            sector=sector,
            axisbg="white",
            title="SMOS Satellite: Soil Moisture (0-5cm)",
            subtitle="Satelite passes around %s UTC" %
            (ts.strftime("%d %B %Y %H"), ),
        )
        if sector == "iowa":
            mp.drawcounties()
        cmap = get_cmap("jet_r")
        cmap.set_under("#EEEEEE")
        cmap.set_over("k")
        mp.hexbin(
            df["lon"].values,
            df["lat"].values,
            df["sm"],
            clevs,
            units="%",
            cmap=cmap,
        )
        pqstr = "plot %s %s00 smos_%s_sm%s.png smos_%s_sm%s.png png" % (
            routes,
            ts.strftime("%Y%m%d%H"),
            sector,
            ts.strftime("%H"),
            sector,
            ts.strftime("%H"),
        )
        mp.postprocess(pqstr=pqstr)
        mp.close()

    for sector in ["midwest", "iowa"]:
        clevs = np.arange(0, 1.001, 0.05)
        mp = MapPlot(
            sector=sector,
            axisbg="white",
            title=("SMOS Satellite: Land Cover Optical Depth "
                   "(microwave L-band)"),
            subtitle="Satelite passes around %s UTC" %
            (ts.strftime("%d %B %Y %H"), ),
        )
        if sector == "iowa":
            mp.drawcounties()
        cmap = get_cmap("jet")
        cmap.set_under("#EEEEEE")
        cmap.set_over("k")
        mp.hexbin(df["lon"].values,
                  df["lat"].values,
                  df["od"],
                  clevs,
                  cmap=cmap)
        pqstr = "plot %s %s00 smos_%s_od%s.png smos_%s_od%s.png png" % (
            routes,
            ts.strftime("%Y%m%d%H"),
            sector,
            ts.strftime("%H"),
            sector,
            ts.strftime("%H"),
        )
        mp.postprocess(pqstr=pqstr)
        mp.close()
Ejemplo n.º 16
0
def do(valid, yawsource):
    """ Generate plot for a given timestamp """
    if yawsource not in ["yaw", "yaw2", "yaw3"]:
        return
    yawdict = {"yaw": "Orginal", "yaw2": "daryl corrected", "yaw3": "daryl v2"}
    pgconn = get_dbconn("mec")
    cursor = pgconn.cursor()

    cursor.execute(
        """select turbineid, power, ST_x(geom), ST_y(geom),
    """ + yawsource + """, windspeed, pitch
     from sampled_data s JOIN turbines t on (t.id = s.turbineid)
     WHERE valid = %s and power is not null and """ + yawsource +
        """ is not null
     and windspeed is not null and pitch is not null""",
        (valid, ),
    )
    lons = []
    lats = []
    vals = []
    u = []
    v = []
    ws = []
    yaw = []
    pitch = []
    for row in cursor:
        lons.append(row[2])
        lats.append(row[3])
        vals.append(row[1])
        ws.append(row[5])
        yaw.append(row[4])
        a, b = uv(speed(row[5], "MPS"), direction(row[4], "deg"))
        u.append(a.value("MPS"))
        v.append(b.value("MPS"))
        pitch.append(row[6])
    pitch = np.array(pitch)
    vals = np.array(vals)
    avgv = np.average(vals)
    # vals2 = vals - avgv
    fig = plt.figure(figsize=(12.8, 7.2))
    ax = fig.add_axes([0.14, 0.1, 0.52, 0.8])

    cmap = get_cmap("jet")
    cmap.set_under("tan")
    cmap.set_over("black")
    clevs = np.arange(0, 1651, 150)
    norm = mpcolors.BoundaryNorm(clevs, cmap.N)
    ax.quiver(lons, lats, u, v, zorder=1)
    ax.scatter(
        lons,
        lats,
        c=vals,
        norm=norm,
        edgecolor="none",
        cmap=cmap,
        s=100,
        zorder=2,
    )
    ax.get_yaxis().get_major_formatter().set_useOffset(False)
    ax.get_xaxis().get_major_formatter().set_useOffset(False)
    ax.set_title(("Farm Turbine Power [kW] (1min sampled dataset)\n"
                  "Valid: %s, yaw source: %s") % (
                      valid.strftime("%d %b %Y %I:%M %p"),
                      yawdict.get(yawsource, yawsource),
                  ))
    make_colorbar(clevs, norm, cmap)

    ax.text(
        0.05,
        0.05,
        "Turbine Power: $\mu$= %.1f $\sigma$= %.1f kW" % (avgv, np.std(vals)),
        transform=ax.transAxes,
    )
    ax.text(
        0.05,
        0.01,
        "Wind $\mu$= %.1f $\sigma$= %.1f $ms^{-1}$" %
        (np.average(ws), np.std(ws)),
        transform=ax.transAxes,
    )
    ax.set_xlabel("Longitude $^\circ$E")
    ax.set_ylabel("Latitude $^\circ$N")
    ax.set_xlim(-94.832, -94.673)
    ax.set_ylim(42.545, 42.671)
    ax.get_xaxis().set_ticks([])
    ax.get_yaxis().set_ticks([])

    # Next plot
    ax2 = fig.add_axes([0.7, 0.80, 0.28, 0.18])
    ax2.scatter(ws, vals, edgecolor="k", c="k")
    ax2.text(
        0.5,
        -0.25,
        "Wind Speed $ms^{-1}$",
        transform=ax2.transAxes,
        ha="center",
    )
    ax2.set_xlim(0, 20)
    # ax2.set_ylabel("Power kW")
    ax2.grid(True)

    # Next plot
    ax3 = fig.add_axes([0.7, 0.57, 0.28, 0.18], sharey=ax2)
    ax3.scatter(yaw, vals, edgecolor="k", c="k")
    ax3.text(0.5, -0.25, "Yaw", transform=ax3.transAxes, ha="center")
    # ax3.set_ylabel("Power kW")
    ax3.set_xlim(0, 360)
    ax3.set_xticks(np.arange(0, 361, 45))
    ax3.set_xticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"])
    ax3.grid(True)

    # Next plot
    ax4 = fig.add_axes([0.7, 0.32, 0.28, 0.18], sharey=ax2)
    ax4.scatter(pitch, vals, edgecolor="k", c="k")
    ax4.text(0.5,
             -0.25,
             "Pitch $^\circ$",
             transform=ax4.transAxes,
             ha="center")
    ax4.set_ylim(-10, 1600)
    ax4.grid(True)

    # Next plot
    ax5 = fig.add_axes([0.7, 0.07, 0.28, 0.18], sharex=ax4)
    ax5.scatter(pitch, ws, edgecolor="k", c="k")
    ax5.text(0.5,
             -0.25,
             "Pitch $^\circ$",
             transform=ax5.transAxes,
             ha="center")
    ax5.grid(True)
    ax5.set_ylim(bottom=-10)
    # maxpitch = max(np.where(pitch > 20, 0, pitch))
    # ax5.set_xlim(np.ma.minimum(pitch)-0.5, maxpitch+0.5)
    ax5.set_xlim(-3, 20.1)
    ax5.set_ylim(0, 20)
    ax5.text(
        -0.1,
        0.5,
        "Wind Speed $ms^{-1}$",
        transform=ax5.transAxes,
        ha="center",
        va="center",
        rotation=90,
    )
Ejemplo n.º 17
0
def do(valid):
    """ Generate plot for a given timestamp """
    pgconn = get_dbconn("scada")
    cursor = pgconn.cursor()

    cursor.execute(
        """select turbine_id, power, lon, lat,
    yawangle, windspeed, alpha1
     from data s JOIN turbines t on (t.id = s.turbine_id)
     WHERE valid = %s and power is not null and yawangle is not null
     and windspeed is not null and alpha1 is not null""",
        (valid, ),
    )
    lons = []
    lats = []
    vals = []
    u = []
    v = []
    ws = []
    yaw = []
    pitch = []
    for row in cursor:
        lons.append(row[2])
        lats.append(row[3])
        vals.append(row[1])
        ws.append(row[5])
        yaw.append(row[4])
        a, b = uv(speed(row[5], "MPS"), direction(row[4], "deg"))
        u.append(a.value("MPS"))
        v.append(b.value("MPS"))
        pitch.append(row[6])
    pitch = np.array(pitch)
    vals = np.array(vals)
    avgv = np.average(vals)
    # vals2 = vals - avgv
    fig = plt.figure(figsize=(12.8, 7.2))
    ax = fig.add_axes([0.14, 0.1, 0.52, 0.8])

    cmap = get_cmap("jet")
    cmap.set_under("tan")
    cmap.set_over("black")
    # cmap = get_cmap('seismic')
    # clevs = np.arange(-250, 251, 50)
    clevs = np.arange(0, 1501, 150)
    norm = mpcolors.BoundaryNorm(clevs, cmap.N)
    ax.quiver(lons, lats, u, v, zorder=1)
    ax.scatter(
        lons,
        lats,
        c=vals,
        norm=norm,
        edgecolor="none",
        cmap=cmap,
        s=100,
        zorder=2,
    )
    ax.get_yaxis().get_major_formatter().set_useOffset(False)
    ax.get_xaxis().get_major_formatter().set_useOffset(False)
    ax.xaxis.set_major_formatter(plt.NullFormatter())
    ax.yaxis.set_major_formatter(plt.NullFormatter())
    ax.set_title(("Turbine Power [kW]\n"
                  "Valid: %s") % (valid.strftime("%d %b %Y %I:%M %p")))
    make_colorbar(clevs, norm, cmap)

    ax.text(
        0.05,
        0.05,
        "Turbine Power: $\mu$= %.1f $\sigma$= %.1f kW" % (avgv, np.std(vals)),
        transform=ax.transAxes,
    )
    ax.text(
        0.05,
        0.01,
        "Wind $\mu$= %.1f $\sigma$= %.1f $ms^{-1}$" %
        (np.average(ws), np.std(ws)),
        transform=ax.transAxes,
    )
    ax.set_xlabel("Longitude $^\circ$E")
    ax.set_ylabel("Latitude $^\circ$N")
    ax.set_xlim(-93.475, -93.328)
    ax.set_ylim(42.20, 42.31)

    # Next plot
    ax2 = fig.add_axes([0.7, 0.80, 0.28, 0.18])
    ax2.scatter(ws, vals, edgecolor="k", c="k")
    ax2.text(
        0.5,
        -0.25,
        "Wind Speed $ms^{-1}$",
        transform=ax2.transAxes,
        ha="center",
    )
    ax2.set_xlim(0, 20)
    # ax2.set_ylabel("Power kW")
    ax2.grid(True)

    # Next plot
    ax3 = fig.add_axes([0.7, 0.57, 0.28, 0.18], sharey=ax2)
    ax3.scatter(yaw, vals, edgecolor="k", c="k")
    ax3.text(0.5, -0.25, "Yaw", transform=ax3.transAxes, ha="center")
    # ax3.set_ylabel("Power kW")
    ax3.set_xlim(0, 360)
    ax3.set_xticks(np.arange(0, 361, 45))
    ax3.set_xticklabels(["N", "NE", "E", "SE", "S", "SW", "W", "NW", "N"])
    ax3.grid(True)

    # Next plot
    ax4 = fig.add_axes([0.7, 0.32, 0.28, 0.18], sharey=ax2)
    ax4.scatter(pitch, vals, edgecolor="k", c="k")
    ax4.text(0.5,
             -0.25,
             "Pitch $^\circ$",
             transform=ax4.transAxes,
             ha="center")
    ax4.set_ylim(-10, 1600)
    ax4.grid(True)

    # Next plot
    ax5 = fig.add_axes([0.7, 0.07, 0.28, 0.18], sharex=ax4)
    ax5.scatter(pitch, ws, edgecolor="k", c="k")
    ax5.text(0.5,
             -0.25,
             "Pitch $^\circ$",
             transform=ax5.transAxes,
             ha="center")
    ax5.grid(True)
    ax5.set_ylim(bottom=-10)
    # maxpitch = max(np.where(pitch > 20, 0, pitch))
    # ax5.set_xlim(np.ma.minimum(pitch)-0.5, maxpitch+0.5)
    ax5.set_xlim(-3, 20.1)
    ax5.set_ylim(0, 20)
    ax5.text(
        -0.1,
        0.5,
        "Wind Speed $ms^{-1}$",
        transform=ax5.transAxes,
        ha="center",
        va="center",
        rotation=90,
    )
Ejemplo n.º 18
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("coop")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    year = ctx["year"]
    varname = ctx["var"]
    how = ctx["how"]
    gddbase = ctx["gddbase"]
    gddceil = ctx["gddceil"]

    table = "alldata_%s" % (station[:2], )

    df = read_sql(
        """
    WITH data as (
     select day, year, sday,
     high,
     low,
     (high+low)/2. as temp,
     gddxx(%s, %s, high, low) as gdd,
     rank() OVER (PARTITION by sday ORDER by high ASC) as high_ptile,
     rank() OVER (PARTITION by sday ORDER by (high+low)/2. ASC) as temp_ptile,
     rank() OVER (PARTITION by sday ORDER by low ASC) as low_ptile,
     rank() OVER (PARTITION by sday
        ORDER by gddxx(%s, %s, high, low) ASC) as gdd_ptile
     from """ + table + """ where station = %s
    ), climo as (
     SELECT sday, avg(high) as avg_high, avg(low) as avg_low,
     avg((high+low)/2.) as avg_temp, stddev(high) as stddev_high,
     stddev(low) as stddev_low, stddev((high+low)/2.) as stddev_temp,
     avg(gddxx(%s, %s, high, low)) as avg_gdd,
     stddev(gddxx(%s, %s, high, low)) as stddev_gdd,
     count(*)::float as years
     from """ + table + """ WHERE station = %s GROUP by sday
    )
    SELECT day,
    d.high - c.avg_high as high_diff,
    (d.high - c.avg_high) / c.stddev_high as high_sigma,
    d.low - c.avg_low as low_diff,
    (d.low - c.avg_low) / c.stddev_low as low_sigma,
    d.temp - c.avg_temp as avg_diff,
    (d.temp - c.avg_temp) / c.stddev_temp as avg_sigma,
    d.gdd - c.avg_gdd as gdd_diff,
    (d.gdd - c.avg_gdd) / greatest(c.stddev_gdd, 0.1) as gdd_sigma,
    d.high,
    c.avg_high,
    d.low,
    c.avg_low,
    d.temp,
    c.avg_temp,
    d.gdd,
    c.avg_gdd,
    high_ptile / years * 100. as high_ptile,
    low_ptile / years * 100. as low_ptile,
    temp_ptile / years * 100. as temp_ptile,
    gdd_ptile / years * 100. as gdd_ptile
    from data d JOIN climo c on
    (c.sday = d.sday) WHERE d.year = %s ORDER by day ASC
    """,
        pgconn,
        params=(
            gddbase,
            gddceil,
            gddbase,
            gddceil,
            station,
            gddbase,
            gddceil,
            gddbase,
            gddceil,
            station,
            year,
        ),
        index_col=None,
    )

    (fig, ax) = plt.subplots(1, 1)
    diff = df[varname + "_" + how].values
    if how == "ptile" and "cmap" in ctx:
        bins = range(0, 101, 10)
        cmap = get_cmap(ctx["cmap"])
        norm = mpcolors.BoundaryNorm(bins, cmap.N)
        colors = cmap(norm(diff))
        ax.bar(df["day"].values, diff, color=colors, align="center")
        ax.set_yticks(bins)
    else:
        bars = ax.bar(df["day"].values, diff, fc="b", ec="b", align="center")
        for i, _bar in enumerate(bars):
            if diff[i] > 0:
                _bar.set_facecolor("r")
                _bar.set_edgecolor("r")
    ax.grid(True)
    if how == "diff":
        ax.set_ylabel(r"%s Departure $^\circ$F" % (PDICT[varname], ))
    elif how == "ptile":
        ax.set_ylabel("%s Percentile (100 highest)" % (PDICT[varname], ))
    else:
        ax.set_ylabel(r"%s Std Dev Departure ($\sigma$)" % (PDICT[varname], ))
    if varname == "gdd":
        ax.set_xlabel("Growing Degree Day Base: %s Ceiling: %s" %
                      (gddbase, gddceil))
    ax.set_title(("%s %s\nYear %s Daily %s %s") % (
        station,
        ctx["_nt"].sts[station]["name"],
        year,
        PDICT[varname],
        "Departure" if how != "ptile" else "Percentile",
    ))
    ax.xaxis.set_major_formatter(mdates.DateFormatter("%b"))
    ax.xaxis.set_major_locator(mdates.DayLocator(1))

    return fig, df
Ejemplo n.º 19
0
def main(argv):
    """Go Main Go"""
    nt = Table("ISUSM")
    qdict = loadqc()

    idbconn = get_dbconn("isuag", user="******")
    pdbconn = get_dbconn("postgis", user="******")

    day_ago = int(argv[1])
    ts = datetime.date.today() - datetime.timedelta(days=day_ago)
    hlons, hlats, hvals = do_nam(ts)
    nam = temperature(hvals, "K").value("F")
    window = np.ones((3, 3))
    nam = convolve2d(nam, window / window.sum(), mode="same", boundary="symm")

    # mp = MapPlot(sector='midwest')
    # mp.pcolormesh(hlons, hlats, nam,
    #              range(20, 90, 5))
    # mp.postprocess(filename='test.png')
    # sys.exit()

    # Query out the data
    df = read_sql(
        """
        WITH ranges as (
            select station, count(*), min(tsoil_c_avg_qc),
            max(tsoil_c_avg_qc) from sm_hourly WHERE
            valid >= %s and valid < %s and tsoil_c_avg_qc > -40
            and tsoil_c_avg_qc < 50 GROUP by station
        )
        SELECT d.station, d.tsoil_c_avg_qc,
        r.max as hourly_max_c, r.min as hourly_min_c, r.count
         from sm_daily d JOIN ranges r on (d.station = r.station)
        where valid = %s and tsoil_c_avg_qc > -40 and r.count > 19
    """,
        idbconn,
        params=(ts, ts + datetime.timedelta(days=1), ts),
        index_col="station",
    )
    for col, newcol in zip(
        ["tsoil_c_avg_qc", "hourly_min_c", "hourly_max_c"],
        ["ob", "min", "max"],
    ):
        df[newcol] = temperature(df[col].values, "C").value("F")
        df.drop(col, axis=1, inplace=True)

    for stid, row in df.iterrows():
        df.at[stid, "ticket"] = qdict.get(stid, {}).get("soil4", False)
        x, y = get_idx(hlons, hlats, nt.sts[stid]["lon"], nt.sts[stid]["lat"])
        df.at[stid, "nam"] = nam[x, y]
        df.at[stid, "lat"] = nt.sts[stid]["lat"]
        df.at[stid, "lon"] = nt.sts[stid]["lon"]
    # ticket is an object type from above
    df = df[~df["ticket"].astype("bool")]
    df["diff"] = df["ob"] - df["nam"]
    bias = df["diff"].mean()
    nam = nam + bias
    print("fancy_4inch NAM bias correction of: %.2fF applied" % (bias, ))
    # apply nam bias to sampled data
    df["nam"] += bias
    df["diff"] = df["ob"] - df["nam"]
    # we are going to require data be within 1 SD of sampled or 5 deg
    std = 5.0 if df["nam"].std() < 5.0 else df["nam"].std()
    for station in df[df["diff"].abs() > std].index.values:
        print(("fancy_4inch %s QC'd %s out std: %.2f, ob:%.1f nam:%.1f") % (
            ts.strftime("%Y%m%d"),
            station,
            std,
            df.at[station, "ob"],
            df.at[station, "nam"],
        ))
        df.drop(station, inplace=True)

    # Query out centroids of counties...
    cdf = read_sql(
        """SELECT ST_x(ST_centroid(the_geom)) as lon,
        ST_y(ST_centroid(the_geom)) as lat
        from uscounties WHERE state_name = 'Iowa'
    """,
        pdbconn,
        index_col=None,
    )
    for i, row in cdf.iterrows():
        x, y = get_idx(hlons, hlats, row["lon"], row["lat"])
        cdf.at[i, "nam"] = nam[x, y]

    mp = MapPlot(
        sector="iowa",
        title=("Average 4 inch Depth Soil Temperatures for %s") %
        (ts.strftime("%b %d, %Y"), ),
        subtitle=("County est. based on bias adj. "
                  "NWS NAM Model (black numbers), "
                  "ISUSM network observations (red numbers)"),
    )
    mp.pcolormesh(
        hlons,
        hlats,
        nam,
        np.arange(10, 101, 5),
        cmap=get_cmap("jet"),
        units=r"$^\circ$F",
    )
    mp.plot_values(df["lon"],
                   df["lat"],
                   df["ob"],
                   fmt="%.0f",
                   color="r",
                   labelbuffer=5)
    mp.plot_values(
        cdf["lon"],
        cdf["lat"],
        cdf["nam"],
        fmt="%.0f",
        textsize=11,
        labelbuffer=5,
    )
    mp.drawcounties()
    routes = "a" if day_ago >= 4 else "ac"
    pqstr = ("plot %s %s0000 soilt_day%s.png isuag_county_4inch_soil.png png"
             ) % (routes, ts.strftime("%Y%m%d"), day_ago)
    mp.postprocess(pqstr=pqstr)
    mp.close()
Ejemplo n.º 20
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    h1 = int(ctx["h1"])
    h2 = int(ctx["h2"])
    varname = ctx["v"]

    tzname = ctx["_nt"].sts[station]["tzname"]

    df = read_sql(
        """
    WITH data as (
        SELECT valid at time zone %s + '10 minutes'::interval as localvalid,
        date_trunc(
             'hour', valid at time zone %s  + '10 minutes'::interval) as v,
        tmpf, dwpf, sknt, drct, alti, relh, random() as r,
        coalesce(mslp, alti * 33.8639, 1013.25) as slp
        from alldata where station = %s and report_type = 2
        and extract(hour from valid at time zone %s + '10 minutes'::interval)
        in (%s, %s)),
     agg as (
          select *, extract(hour from v) as hour,
          rank() OVER (PARTITION by v ORDER by localvalid ASC, r ASC) from data
     )

     SELECT *, date(
         case when hour = %s
         then date(v - '1 day'::interval)
         else date(v) end) from agg WHERE rank = 1
    """,
        pgconn,
        params=(
            tzname,
            tzname,
            station,
            tzname,
            h1,
            h2,
            h2 if h2 < h1 else -1,
        ),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("No data was found.")
    if varname == "q":
        df["pressure"] = mcalc.add_height_to_pressure(
            df["slp"].values * units("millibars"),
            ctx["_nt"].sts[station]["elevation"] * units("m"),
        ).to(units("millibar"))
        # compute mixing ratio
        df["q"] = (mcalc.mixing_ratio_from_relative_humidity(
            df["relh"].values * units("percent"),
            df["tmpf"].values * units("degF"),
            df["pressure"].values * units("millibars"),
        ) * 1000.0)

    # pivot
    df = df.pivot(index="date", columns="hour", values=varname).reset_index()
    df = df.dropna()
    df["doy"] = pd.to_numeric(pd.to_datetime(df["date"]).dt.strftime("%j"))
    df["year"] = pd.to_datetime(df["date"]).dt.year
    df["week"] = (df["doy"] / 7).astype(int)
    df["delta"] = df[h2] - df[h1]

    (fig, ax) = plt.subplots(1, 1)
    if ctx["opt"] == "no":
        ax.set_xlabel("Plotted lines are smoothed over %.0f days" %
                      (ctx["smooth"], ))
    ax.set_ylabel(
        "%s %s Difference" %
        (PDICT[varname], "Accumulated Sum" if ctx["opt"] == "yes" else ""))

    if ctx["opt"] == "no":
        # Histogram
        H, xedges, yedges = np.histogram2d(df["doy"].values,
                                           df["delta"].values,
                                           bins=(50, 50))
        ax.pcolormesh(
            xedges,
            yedges,
            H.transpose(),
            cmap=get_cmap(ctx["cmap"]),
            alpha=0.5,
        )

    # Plot an average line
    gdf = (df.groupby("doy").mean().rolling(ctx["smooth"],
                                            min_periods=1,
                                            center=True).mean())
    y = gdf["delta"] if ctx["opt"] == "no" else gdf["delta"].cumsum()
    ax.plot(
        gdf.index.values,
        y,
        label="Average",
        zorder=6,
        lw=2,
        color="k",
        linestyle="-.",
    )

    # Plot selected year
    for i in range(1, 5):
        year = ctx.get("y%s" % (i, ))
        if year is None:
            continue
        df2 = df[df["year"] == year]
        if not df2.empty:
            gdf = (df2.groupby("doy").mean().rolling(ctx["smooth"],
                                                     min_periods=1,
                                                     center=True).mean())
            y = gdf["delta"] if ctx["opt"] == "no" else gdf["delta"].cumsum()
            ax.plot(gdf.index.values, y, label=str(year), lw=2, zorder=10)

    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335))
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xlim(1, 366)
    ax.grid(True)
    ax.legend(loc="best", ncol=5)
    sts = datetime.datetime(2000, 6, 1, h1)
    ets = datetime.datetime(2000, 6, 1, h2)
    title = ("%s [%s] %s Difference (%.0f-%.0f)\n"
             "%s minus %s (%s) (timezone: %s)") % (
                 ctx["_nt"].sts[station]["name"],
                 station,
                 PDICT[varname],
                 df["year"].min(),
                 df["year"].max(),
                 ets.strftime("%-I %p"),
                 sts.strftime("%-I %p"),
                 "same day" if h2 > h1 else "previous day",
                 tzname,
             )
    fitbox(fig, title, 0.05, 0.95, 0.91, 0.99, ha="center")

    return fig, df
Ejemplo n.º 21
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]

    df = read_sql(
        """
    WITH obs as (
        SELECT date_trunc('hour', valid) as t, avg(tmpf) as avgt from alldata
        WHERE station = %s and p01i > 0.009 and tmpf is not null
        GROUP by t
    )

    SELECT extract(week from t) as week, avgt from obs
    """,
        pgconn,
        params=(station, ),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("No data found.")

    sts = datetime.datetime(2012, 1, 1)
    xticks = []
    for i in range(1, 13):
        ts = sts.replace(month=i)
        xticks.append(int(ts.strftime("%j")))

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))

    bins = np.arange(df["avgt"].min() - 5, df["avgt"].max() + 5, 2)
    H, xedges, yedges = np.histogram2d(df["week"].values, df["avgt"].values,
                                       [range(0, 54), bins])
    rows = []
    for i, x in enumerate(xedges[:-1]):
        for j, y in enumerate(yedges[:-1]):
            rows.append(dict(tmpf=y, week=x, count=H[i, j]))
    resdf = pd.DataFrame(rows)

    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    years = datetime.date.today().year - ab.year
    H = np.ma.array(H) / float(years)
    H.mask = np.ma.where(H < 0.1, True, False)
    res = ax.pcolormesh((xedges - 1) * 7,
                        yedges,
                        H.transpose(),
                        cmap=get_cmap(ctx["cmap"]))
    fig.colorbar(res, label="Hours per week per year")
    ax.set_xticks(xticks)
    ax.set_xticklabels(calendar.month_abbr[1:])
    ax.set_xlim(0, 366)

    y = []
    for i in range(np.shape(H)[0]):
        y.append(np.ma.sum(H[i, :] * (bins[:-1] + 0.5)) / np.ma.sum(H[i, :]))

    ax.plot(xedges[:-1] * 7, y, zorder=3, lw=3, color="w")
    ax.plot(xedges[:-1] * 7, y, zorder=3, lw=1, color="k", label="Average")
    ax.legend(loc=2)

    ax.set_title(("[%s] %s (%s-%s)\n"
                  "Temperature Frequency During Precipitation by Week") % (
                      station,
                      ctx["_nt"].sts[station]["name"],
                      ab.year,
                      datetime.date.today().year,
                  ))
    ax.grid(True)
    ax.set_ylabel(r"Temperature [$^\circ$F]")

    return fig, resdf
Ejemplo n.º 22
0
def plotter(fdict):
    """ Go """
    ctx = util.get_autoplot_context(fdict, get_description())
    ptype = ctx["ptype"]
    sdate = ctx["sdate"]
    edate = ctx["edate"]
    src = ctx["src"]
    opt = ctx["opt"]
    usdm = ctx["usdm"]
    if sdate.year != edate.year:
        raise NoDataFound("Sorry, do not support multi-year plots yet!")
    days = (edate - sdate).days
    sector = ctx["sector"]

    x0 = 0
    x1 = -1
    y0 = 0
    y1 = -1
    state = None
    if len(sector) == 2:
        state = sector
        sector = "state"

    title = compute_title(src, sdate, edate)
    if src == "mrms":
        ncfn = iemre.get_daily_mrms_ncname(sdate.year)
        clncfn = iemre.get_dailyc_mrms_ncname()
        ncvar = "p01d"
        source = "MRMS Q3"
        subtitle = "NOAA MRMS Project, GaugeCorr and RadarOnly"
    elif src == "iemre":
        ncfn = iemre.get_daily_ncname(sdate.year)
        clncfn = iemre.get_dailyc_ncname()
        ncvar = "p01d_12z"
        source = "IEM Reanalysis"
        subtitle = "IEM Reanalysis is derived from various NOAA datasets"
    else:
        ncfn = "/mesonet/data/prism/%s_daily.nc" % (sdate.year, )
        clncfn = "/mesonet/data/prism/prism_dailyc.nc"
        ncvar = "ppt"
        source = "OSU PRISM"
        subtitle = ("PRISM Climate Group, Oregon State Univ., "
                    "http://prism.oregonstate.edu, created 4 Feb 2004.")

    mp = MapPlot(
        sector=sector,
        state=state,
        axisbg="white",
        nocaption=True,
        title="%s:: %s Precip %s" % (source, title, PDICT3[opt]),
        subtitle="Data from %s" % (subtitle, ),
        titlefontsize=14,
    )

    idx0 = iemre.daily_offset(sdate)
    idx1 = iemre.daily_offset(edate) + 1
    if not os.path.isfile(ncfn):
        raise NoDataFound("No data for that year, sorry.")
    with util.ncopen(ncfn) as nc:
        if state is not None:
            x0, y0, x1, y1 = util.grid_bounds(
                nc.variables["lon"][:],
                nc.variables["lat"][:],
                state_bounds[state],
            )
        elif sector in SECTORS:
            bnds = SECTORS[sector]
            x0, y0, x1, y1 = util.grid_bounds(
                nc.variables["lon"][:],
                nc.variables["lat"][:],
                [bnds[0], bnds[2], bnds[1], bnds[3]],
            )
        lats = nc.variables["lat"][y0:y1]
        lons = nc.variables["lon"][x0:x1]
        if sdate == edate:
            p01d = mm2inch(nc.variables[ncvar][idx0, y0:y1, x0:x1])
        elif (idx1 - idx0) < 32:
            p01d = mm2inch(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0))
        else:
            # Too much data can overwhelm this app, need to chunk it
            for i in range(idx0, idx1, 10):
                i2 = min([i + 10, idx1])
                if idx0 == i:
                    p01d = mm2inch(
                        np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0))
                else:
                    p01d += mm2inch(
                        np.sum(nc.variables[ncvar][i:i2, y0:y1, x0:x1], 0))
    if np.ma.is_masked(np.max(p01d)):
        raise NoDataFound("Data Unavailable")
    plot_units = "inches"
    cmap = get_cmap(ctx["cmap"])
    cmap.set_bad("white")
    if opt == "dep":
        # Do departure work now
        with util.ncopen(clncfn) as nc:
            climo = mm2inch(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0))
        p01d = p01d - climo
        [maxv] = np.percentile(np.abs(p01d), [99])
        clevs = np.around(np.linspace(0 - maxv, maxv, 11), decimals=2)
    elif opt == "per":
        with util.ncopen(clncfn) as nc:
            climo = mm2inch(
                np.sum(nc.variables[ncvar][idx0:idx1, y0:y1, x0:x1], 0))
        p01d = p01d / climo * 100.0
        cmap.set_under("white")
        cmap.set_over("black")
        clevs = [1, 10, 25, 50, 75, 100, 125, 150, 200, 300, 500]
        plot_units = "percent"
    else:
        p01d = np.where(p01d < 0.001, np.nan, p01d)
        cmap.set_under("white")
        clevs = [0.01, 0.1, 0.3, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 8, 10]
        if days > 6:
            clevs = [0.01, 0.3, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20]
        if days > 29:
            clevs = [0.01, 0.5, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35]
        if days > 90:
            clevs = [0.01, 1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 25, 30, 35, 40]

    x2d, y2d = np.meshgrid(lons, lats)
    if ptype == "c":
        mp.contourf(x2d,
                    y2d,
                    p01d,
                    clevs,
                    cmap=cmap,
                    units=plot_units,
                    iline=False)
    else:
        res = mp.pcolormesh(x2d, y2d, p01d, clevs, cmap=cmap, units=plot_units)
        res.set_rasterized(True)
    if sector != "midwest":
        mp.drawcounties()
        mp.drawcities()
    if usdm == "yes":
        mp.draw_usdm(edate, filled=False, hatched=True)

    return mp.fig
Ejemplo n.º 23
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("postgis")
    ctx = get_autoplot_context(fdict, get_description())
    sts = ctx["sdate"]
    sts = sts.replace(tzinfo=pytz.UTC)
    ets = ctx["edate"]
    ets = ets.replace(tzinfo=pytz.UTC)
    p1 = ctx["phenomenav1"]
    p2 = ctx["phenomenav2"]
    p3 = ctx["phenomenav3"]
    p4 = ctx["phenomenav4"]
    varname = ctx["var"]
    phenomena = []
    for p in [p1, p2, p3, p4]:
        if p is not None:
            phenomena.append(p[:2])
    s1 = ctx["significancev1"]
    s2 = ctx["significancev2"]
    s3 = ctx["significancev3"]
    s4 = ctx["significancev4"]
    significance = []
    for s in [s1, s2, s3, s4]:
        if s is not None:
            significance.append(s[0])

    pstr = []
    subtitle = ""
    title = ""
    for p, s in zip(phenomena, significance):
        pstr.append("(phenomena = '%s' and significance = '%s')" % (p, s))
        subtitle += "%s.%s " % (p, s)
        title += vtec.get_ps_string(p, s)
    if len(phenomena) > 1:
        title = "VTEC Unique Event"
    pstr = " or ".join(pstr)
    pstr = "(%s)" % (pstr,)
    cmap = get_cmap(ctx["cmap"])

    if varname == "count":
        df = read_sql(
            """
    with total as (
    select distinct wfo, extract(year from issue at time zone 'UTC') as year,
    phenomena, significance, eventid from warnings
    where """
            + pstr
            + """ and
    issue >= %s and issue < %s
    )

    SELECT wfo, phenomena, significance, year, count(*) from total
    GROUP by wfo, phenomena, significance, year
        """,
            pgconn,
            params=(sts, ets),
        )

        df2 = df.groupby("wfo")["count"].sum()
        maxv = df2.max()
        bins = [0, 1, 2, 3, 5, 10, 15, 20, 25, 30, 40, 50, 75, 100, 200]
        if maxv > 5000:
            bins = [
                0,
                5,
                10,
                50,
                100,
                250,
                500,
                750,
                1000,
                1500,
                2000,
                3000,
                5000,
                7500,
                10000,
            ]
        elif maxv > 1000:
            bins = [
                0,
                1,
                5,
                10,
                50,
                100,
                150,
                200,
                250,
                500,
                750,
                1000,
                1250,
                1500,
                2000,
            ]
        elif maxv > 200:
            bins = [
                0,
                1,
                3,
                5,
                10,
                20,
                35,
                50,
                75,
                100,
                150,
                200,
                250,
                500,
                750,
                1000,
            ]
        units = "Count"
        lformat = "%.0f"
    elif varname == "days":
        df = read_sql(
            """
        WITH data as (
            SELECT distinct wfo, generate_series(greatest(issue, %s),
            least(expire, %s), '1 minute'::interval) as ts from warnings
            WHERE issue > %s and expire < %s and """
            + pstr
            + """
        ), agg as (
            SELECT distinct wfo, date(ts) from data
        )
        select wfo, count(*) as days from agg
        GROUP by wfo ORDER by days DESC
        """,
            pgconn,
            params=(
                sts,
                ets,
                sts - datetime.timedelta(days=90),
                ets + datetime.timedelta(days=90),
            ),
            index_col="wfo",
        )

        df2 = df["days"]
        if df2.max() < 10:
            bins = list(range(1, 11, 1))
        else:
            bins = np.linspace(1, df["days"].max() + 11, 10, dtype="i")
        units = "Days"
        lformat = "%.0f"
        cmap.set_under("white")
        cmap.set_over("#EEEEEE")
    else:
        total_minutes = (ets - sts).total_seconds() / 60.0
        df = read_sql(
            """
        WITH data as (
            SELECT distinct wfo, generate_series(greatest(issue, %s),
            least(expire, %s), '1 minute'::interval) as ts from warnings
            WHERE issue > %s and expire < %s and """
            + pstr
            + """
        )
        select wfo, count(*) / %s * 100. as tpercent from data
        GROUP by wfo ORDER by tpercent DESC
        """,
            pgconn,
            params=(
                sts,
                ets,
                sts - datetime.timedelta(days=90),
                ets + datetime.timedelta(days=90),
                total_minutes,
            ),
            index_col="wfo",
        )

        df2 = df["tpercent"]
        bins = list(range(0, 101, 10))
        if df2.max() < 5:
            bins = np.arange(0, 5.1, 0.5)
        elif df2.max() < 10:
            bins = list(range(0, 11, 1))
        units = "Percent"
        lformat = "%.1f"

    nt = NetworkTable("WFO")
    for sid in nt.sts:
        sid = sid[-3:]
        if sid not in df2:
            df2[sid] = 0

    mp = MapPlot(
        sector="nws",
        axisbg="white",
        title="%s %s by NWS Office" % (title, PDICT[varname]),
        subtitle=("Valid %s - %s UTC, based on VTEC: %s")
        % (
            sts.strftime("%d %b %Y %H:%M"),
            ets.strftime("%d %b %Y %H:%M"),
            subtitle,
        ),
    )
    mp.fill_cwas(
        df2, bins=bins, ilabel=True, units=units, lblformat=lformat, cmap=cmap
    )

    return mp.fig, df
Ejemplo n.º 24
0
def plotter(fdict):
    """ Go """
    ctx = util.get_autoplot_context(fdict, get_description())
    date = ctx["date"]
    sector = ctx["sector"]
    threshold = ctx["threshold"]
    threshold_mm = distance(threshold, "IN").value("MM")
    window_sts = date - datetime.timedelta(days=90)
    if window_sts.year != date.year:
        raise NoDataFound("Sorry, do not support multi-year plots yet!")

    # idx0 = iemre.daily_offset(window_sts)
    idx1 = iemre.daily_offset(date)
    ncfn = iemre.get_daily_mrms_ncname(date.year)
    if not os.path.isfile(ncfn):
        raise NoDataFound("No data found.")
    ncvar = "p01d"

    # Get the state weight
    df = gpd.GeoDataFrame.from_postgis(
        """
    SELECT the_geom from states where state_abbr = %s
    """,
        util.get_dbconn("postgis"),
        params=(sector, ),
        index_col=None,
        geom_col="the_geom",
    )
    czs = CachingZonalStats(iemre.MRMS_AFFINE)
    with util.ncopen(ncfn) as nc:
        czs.gen_stats(
            np.zeros((nc.variables["lat"].size, nc.variables["lon"].size)),
            df["the_geom"],
        )
        jslice = None
        islice = None
        for nav in czs.gridnav:
            # careful here as y is flipped in this context
            jslice = slice(
                nc.variables["lat"].size - (nav.y0 + nav.ysz),
                nc.variables["lat"].size - nav.y0,
            )
            islice = slice(nav.x0, nav.x0 + nav.xsz)

        grid = np.zeros(
            (jslice.stop - jslice.start, islice.stop - islice.start))
        total = np.zeros(
            (jslice.stop - jslice.start, islice.stop - islice.start))
        for i, idx in enumerate(range(idx1, idx1 - 90, -1)):
            total += nc.variables[ncvar][idx, jslice, islice]
            grid = np.where(np.logical_and(grid == 0, total > threshold_mm), i,
                            grid)
        lon = nc.variables["lon"][islice]
        lat = nc.variables["lat"][jslice]

    mp = MapPlot(
        sector="state",
        state=sector,
        titlefontsize=14,
        subtitlefontsize=12,
        title=("NOAA MRMS Q3: Number of Recent Days "
               'till Accumulating %s" of Precip') % (threshold, ),
        subtitle=("valid %s: based on per calendar day "
                  "estimated preciptation, MultiSensorPass2 and "
                  "RadarOnly products") % (date.strftime("%-d %b %Y"), ),
    )
    x, y = np.meshgrid(lon, lat)
    cmap = get_cmap(ctx["cmap"])
    cmap.set_over("k")
    cmap.set_under("white")
    mp.pcolormesh(x, y, grid, np.arange(0, 81, 10), cmap=cmap, units="days")
    mp.drawcounties()
    mp.drawcities()

    return mp.fig
Ejemplo n.º 25
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    ctx = get_autoplot_context(fdict, get_description())

    station = ctx["zstation"]
    df = read_sql(
        """
        select extract(doy from valid) as doy,
        greatest(skyl1, skyl2, skyl3, skyl4) as sky from alldata
        WHERE station = %s and
        (skyc1 = 'OVC' or skyc2 = 'OVC' or skyc3 = 'OVC' or skyc4 = 'OVC')
        and valid > '1973-01-01' and (extract(minute from valid) = 0 or
        extract(minute from valid) > 50) and report_type = 2
    """,
        pgconn,
        params=(station, ),
        index_col=None,
    )
    if df.empty:
        raise NoDataFound("Error, no results returned!")

    w = np.arange(1, 366, 7)
    z = np.array([
        100,
        200,
        300,
        400,
        500,
        600,
        700,
        800,
        900,
        1000,
        1100,
        1200,
        1300,
        1400,
        1500,
        1600,
        1700,
        1800,
        1900,
        2000,
        2100,
        2200,
        2300,
        2400,
        2500,
        2600,
        2700,
        2800,
        2900,
        3000,
        3100,
        3200,
        3300,
        3400,
        3500,
        3600,
        3700,
        3800,
        3900,
        4000,
        4100,
        4200,
        4300,
        4400,
        4500,
        4600,
        4700,
        4800,
        4900,
        5000,
        5500,
        6000,
        6500,
        7000,
        7500,
        8000,
        8500,
        9000,
        9500,
        10000,
        11000,
        12000,
        13000,
        14000,
        15000,
        16000,
        17000,
        18000,
        19000,
        20000,
        21000,
        22000,
        23000,
        24000,
        25000,
        26000,
        27000,
        28000,
        29000,
        30000,
        31000,
    ])

    H, xedges, yedges = np.histogram2d(df["sky"].values,
                                       df["doy"].values,
                                       bins=(z, w))
    rows = []
    for i, x in enumerate(xedges[:-1]):
        for j, y in enumerate(yedges[:-1]):
            rows.append(dict(ceiling=x, doy=y, count=H[i, j]))
    resdf = pd.DataFrame(rows)

    H = ma.array(H)
    H.mask = np.where(H < 1, True, False)

    (fig, ax) = plt.subplots(1, 1)

    bounds = np.arange(0, 1.2, 0.1)
    bounds = np.concatenate((bounds, np.arange(1.2, 2.2, 0.2)))
    cmap = get_cmap(ctx["cmap"])
    cmap.set_under("#F9CCCC")
    norm = mpcolors.BoundaryNorm(bounds, cmap.N)

    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    syear = max([1973, ab.year])
    years = (datetime.date.today().year - syear) + 1.0
    c = ax.imshow(H / years,
                  aspect="auto",
                  interpolation="nearest",
                  norm=norm,
                  cmap=cmap)
    ax.set_ylim(-0.5, len(z) - 0.5)
    idx = [0, 4, 9, 19, 29, 39, 49, 54, 59, 64, 69, 74, 79]
    ax.set_yticks(idx)
    ax.set_yticklabels(z[idx])
    ax.set_title(("%s-%s [%s %s Ceilings Frequency\n"
                  "Level at which Overcast Conditions Reported") % (
                      syear,
                      datetime.date.today().year,
                      station,
                      ctx["_nt"].sts[station]["name"],
                  ))
    ax.set_ylabel("Overcast Level [ft AGL], irregular scale")
    ax.set_xlabel("Week of the Year")
    ax.set_xticks(np.arange(1, 55, 7))
    ax.set_xticklabels((
        "Jan 1",
        "Feb 19",
        "Apr 8",
        "May 27",
        "Jul 15",
        "Sep 2",
        "Oct 21",
        "Dec 9",
    ))
    b = fig.colorbar(c)
    b.set_label("Hourly Obs per week per year")
    return fig, resdf
Ejemplo n.º 26
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    varname = ctx["v"]

    df = get_df(ctx)
    if df.empty:
        raise NoDataFound("No data was found for your query")
    mp = MapPlot(
        sector=("state" if ctx["t"] == "state" else "cwa"),
        state=ctx["state"],
        cwa=(ctx["wfo"] if len(ctx["wfo"]) == 3 else ctx["wfo"][1:]),
        axisbg="white",
        title="%s for %s on %s" % (PDICT2[ctx["v"]], ctx["title"], ctx["day"]),
        nocaption=True,
        titlefontsize=16,
    )
    ramp = None
    cmap = get_cmap(ctx["cmap"])
    extend = "both"
    if varname in ["max_gust", "max_sknt"]:
        extend = "max"
        ramp = np.arange(0, 40, 4)
        ramp = np.append(ramp, np.arange(40, 80, 10))
        ramp = np.append(ramp, np.arange(80, 120, 20))
    # Data QC, cough
    if ctx.get("above"):
        df = df[df[varname] < ctx["above"]]
    if ctx.get("below"):
        df = df[df[varname] > ctx["below"]]
    # with QC done, we compute ramps
    if ramp is None:
        ramp = np.linspace(df[varname].min() - 5,
                           df[varname].max() + 5,
                           10,
                           dtype="i")

    if ctx["p"] == "both":
        mp.contourf(
            df["lon"].values,
            df["lat"].values,
            df[varname].values,
            ramp,
            units=VARUNITS[varname],
            cmap=cmap,
            spacing="proportional",
            extend=extend,
        )
    if ctx["t"] == "state":
        df2 = df[df[ctx["t"]] == ctx[ctx["t"]]]
    else:
        df2 = df[df["wfo"] == ctx["wfo"]]

    mp.plot_values(
        df2["lon"].values,
        df2["lat"].values,
        df2[varname].values,
        "%.1f" if varname in ["max_gust", "max_sknt"] else "%.0f",
        labelbuffer=3,
    )
    mp.drawcounties()
    if ctx["t"] == "cwa":
        mp.draw_cwas()

    return mp.fig, df
Ejemplo n.º 27
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    varname = ctx["v"]

    if ctx["t"] == "state":
        bnds = reference.state_bounds[ctx["state"]]
        title = reference.state_names[ctx["state"]]
    else:
        bnds = reference.wfo_bounds[ctx["wfo"]]
        title = "NWS CWA %s [%s]" % (
            ctx["_nt"].sts[ctx["wfo"]]["name"],
            ctx["wfo"],
        )
    df, valid = get_df(ctx, bnds)
    if df.empty:
        raise NoDataFound("No data was found for your query")
    mp = MapPlot(
        sector=("state" if ctx["t"] == "state" else "cwa"),
        state=ctx["state"],
        cwa=(ctx["wfo"] if len(ctx["wfo"]) == 3 else ctx["wfo"][1:]),
        axisbg="white",
        title="%s for %s" % (PDICT2[ctx["v"]], title),
        subtitle=("Map valid: %s UTC") % (valid.strftime("%d %b %Y %H:%M"),),
        nocaption=True,
        titlefontsize=16,
    )
    if varname == "vsby":
        ramp = np.array([0.01, 0.1, 0.25, 0.5, 1, 2, 3, 5, 8, 9.9])
        valunit = "miles"
    elif varname == "feel":
        valunit = "F"
        df["feel"] = (
            apparent_temperature(
                df["tmpf"].values * units("degF"),
                df["relh"].values * units("percent"),
                df["sknt"].values * units("knots"),
            )
            .to(units("degF"))
            .m
        )
    # Data QC, cough
    if ctx.get("above"):
        df = df[df[varname] < ctx["above"]]
    if ctx.get("below"):
        df = df[df[varname] > ctx["below"]]
    # with QC done, we compute ramps
    if varname != "vsby":
        ramp = np.linspace(
            df[varname].min() - 5, df[varname].max() + 5, 10, dtype="i"
        )

    mp.contourf(
        df["lon"].values,
        df["lat"].values,
        df[varname].values,
        ramp,
        units=valunit,
        cmap=get_cmap(ctx["cmap"]),
    )
    if ctx["t"] == "state":
        df2 = df[df["state"] == ctx["state"]]
    else:
        df2 = df[df["wfo"] == ctx["wfo"]]

    mp.plot_values(
        df2["lon"].values,
        df2["lat"].values,
        df2[varname].values,
        "%.1f",
        labelbuffer=10,
    )
    mp.drawcounties()
    if ctx["t"] == "cwa":
        mp.draw_cwas()

    return mp.fig, df
Ejemplo n.º 28
0
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")

    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    date = ctx["date"]
    opt = ctx["opt"]
    varname = ctx["v"]

    tzname = ctx["_nt"].sts[station]["tzname"]

    # Resolve how to limit the query data
    limiter = ""
    if opt == "day":
        limiter = (f" and to_char(valid at time zone '{tzname}', 'mmdd') = "
                   f"'{date.strftime('%m%d')}' ")
        subtitle = (f"For Date of {date.strftime('%-d %b')}, "
                    f"{date.strftime('%-d %b %Y')} plotted in bottom panel")
        datefmt = "%I %p"
    elif opt == "week":
        limiter = f" and extract(week from valid) = {date.strftime('%V')} "
        subtitle = (
            f"For ISO Week of {date.strftime('%V')}, "
            f"week of {date.strftime('%-d %b %Y')} plotted in bottom panel")
        datefmt = "%-d %b"
    elif opt == "month":
        limiter = f" and extract(month from valid) = {date.strftime('%m')} "
        subtitle = (f"For Month of {date.strftime('%B')}, "
                    f"{date.strftime('%b %Y')} plotted in bottom panel")
        datefmt = "%-d"
    else:
        subtitle = f"All Year, {date.year} plotted in bottom panel"
        datefmt = "%-d %b"

    # Load up all the values, since we need pandas to do some heavy lifting
    obsdf = read_sql(
        f"""
        select valid at time zone 'UTC' as utc_valid,
        extract(year from valid at time zone %s)  as year,
        extract(hour from valid at time zone %s +
            '10 minutes'::interval)::int as hr, {varname}
        from alldata WHERE station = %s and {varname} is not null {limiter}
        and report_type = 2 ORDER by valid ASC
    """,
        pgconn,
        params=(tzname, tzname, station),
        index_col=None,
    )
    if obsdf.empty:
        raise NoDataFound("No data was found.")

    # Assign percentiles
    obsdf["quantile"] = obsdf[["hr", varname]].groupby("hr").rank(pct=True)
    # Compute actual percentiles
    qtile = (obsdf[["hr", varname
                    ]].groupby("hr").quantile(np.arange(0, 1.01,
                                                        0.05)).reset_index())
    qtile = qtile.rename(columns={"level_1": "quantile"})
    (fig, ax) = plt.subplots(2, 1)
    cmap = get_cmap(ctx["cmap"])
    for hr, gdf in qtile.groupby("hr"):
        ax[0].plot(
            gdf["quantile"].values * 100.0,
            gdf[varname].values,
            color=cmap(hr / 23.0),
            label=str(hr),
        )
    ax[0].set_xlim(0, 100)
    ax[0].grid(True)
    ax[0].set_ylabel(PDICT[varname])
    ax[0].set_xlabel("Percentile")
    ax[0].set_position([0.13, 0.55, 0.71, 0.34])
    cax = plt.axes([0.86, 0.55, 0.03, 0.33],
                   frameon=False,
                   yticks=[],
                   xticks=[])
    cb = ColorbarBase(cax, cmap=cmap)
    cb.set_ticks(np.arange(0, 1, 4.0 / 24.0))
    cb.set_ticklabels(["Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM"])
    cb.set_label("Local Hour")

    thisyear = obsdf[obsdf["year"] == date.year]
    if not thisyear.empty:
        ax[1].plot(thisyear["utc_valid"].values,
                   thisyear["quantile"].values * 100.0)
        ax[1].grid(True)
        ax[1].set_ylabel("Percentile")
        ax[1].set_ylim(-1, 101)
        ax[1].xaxis.set_major_formatter(
            mdates.DateFormatter(datefmt, tz=pytz.timezone(tzname)))
        if opt == "day":
            ax[1].set_xlabel(f"Timezone: {tzname}")
    title = ("%s %s %s Percentiles\n%s") % (
        station,
        ctx["_nt"].sts[station]["name"],
        PDICT[varname],
        subtitle,
    )
    fitbox(fig, title, 0.01, 0.99, 0.91, 0.99, ha="center", va="center")
    return fig, qtile
Ejemplo n.º 29
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    gddbase = ctx["gddbase"]
    base = ctx["base"]
    ceil = ctx["ceil"]
    today = ctx["date"]
    bs = ctx["_nt"].sts[station]["archive_begin"]
    if bs is None:
        raise NoDataFound("Unknown station metadata.")
    byear = bs.year
    eyear = today.year + 1
    pgconn = get_dbconn("coop")
    cursor = pgconn.cursor()
    table = "alldata_%s" % (station[:2], )
    cursor.execute(
        """
        SELECT year, extract(doy from day), gddxx(%s, %s, high,low), low
        from """ + table + """ where station = %s and year > %s
        and day < %s
    """,
        (base, ceil, station, byear, today),
    )

    gdd = np.zeros((eyear - byear, 366), "f")
    freezes = np.zeros((eyear - byear), "f")
    freezes[:] = 400.0

    for row in cursor:
        gdd[int(row[0]) - byear, int(row[1]) - 1] = row[2]
        if row[1] > 180 and row[3] < 32 and row[1] < freezes[row[0] - byear]:
            freezes[int(row[0]) - byear] = row[1]

    for i, freeze in enumerate(freezes):
        gdd[i, int(freeze):] = 0.0

    idx = int(today.strftime("%j")) - 1
    apr1 = int(datetime.datetime(2000, 4, 1).strftime("%j")) - 1
    jun30 = int(datetime.datetime(2000, 6, 30).strftime("%j")) - 1
    sep1 = int(datetime.datetime(2000, 9, 1).strftime("%j")) - 1
    oct31 = int(datetime.datetime(2000, 10, 31).strftime("%j")) - 1

    # Replace all years with the last year's data
    scenario_gdd = gdd * 1
    scenario_gdd[:-1, :idx] = gdd[-1, :idx]

    # store our probs
    probs = np.zeros((oct31 - sep1, jun30 - apr1), "f")
    scenario_probs = np.zeros((oct31 - sep1, jun30 - apr1), "f")

    rows = []
    for x in range(apr1, jun30):
        for y in range(sep1, oct31):
            sums = np.where(np.sum(gdd[:-1, x:y], 1) >= gddbase, 1, 0)
            probs[y - sep1, x - apr1] = sum(sums) / float(len(sums)) * 100.0
            sums = np.where(np.sum(scenario_gdd[:-1, x:y], 1) >= gddbase, 1, 0)
            scenario_probs[y - sep1,
                           x - apr1] = (sum(sums) / float(len(sums)) * 100.0)
            rows.append(
                dict(
                    x=x,
                    y=y,
                    prob=probs[y - sep1, x - apr1],
                    scenario_probs=scenario_probs[y - sep1, x - apr1],
                ))
    df = pd.DataFrame(rows)

    probs = np.where(probs < 0.1, -1, probs)
    scenario_probs = np.where(scenario_probs < 0.1, -1, scenario_probs)

    (fig, ax) = plt.subplots(1, 2, sharey=True, figsize=(8, 6))

    cmap = get_cmap(ctx["cmap"])
    cmap.set_under("white")
    norm = mpcolors.BoundaryNorm(np.arange(0, 101, 5), cmap.N)

    ax[0].imshow(
        np.flipud(probs),
        aspect="auto",
        extent=[apr1, jun30, sep1, oct31],
        interpolation="nearest",
        vmin=0,
        vmax=100,
        cmap=cmap,
        norm=norm,
    )
    ax[0].grid(True)
    ax[0].set_title("Overall Frequencies")
    ax[0].set_xticks((91, 106, 121, 136, 152, 167))
    ax[0].set_ylabel("Growing Season End Date")
    ax[0].set_xlabel("Growing Season Begin Date")
    ax[0].set_xticklabels(("Apr 1", "15", "May 1", "15", "Jun 1", "15"))
    ax[0].set_yticks((244, 251, 258, 265, 274, 281, 288, 295, 305))
    ax[0].set_yticklabels((
        "Sep 1",
        "Sep 8",
        "Sep 15",
        "Sep 22",
        "Oct 1",
        "Oct 8",
        "Oct 15",
        "Oct 22",
        "Nov",
    ))

    res = ax[1].imshow(
        np.flipud(scenario_probs),
        aspect="auto",
        extent=[apr1, jun30, sep1, oct31],
        interpolation="nearest",
        vmin=0,
        vmax=100,
        cmap=cmap,
        norm=norm,
    )
    ax[1].grid(True)
    ax[1].set_title("Scenario after %s" % (today.strftime("%-d %B %Y"), ))
    ax[1].set_xticks((91, 106, 121, 136, 152, 167))
    ax[1].set_xticklabels(("Apr 1", "15", "May 1", "15", "Jun 1", "15"))
    ax[1].set_xlabel("Growing Season Begin Date")

    fig.subplots_adjust(bottom=0.20, top=0.85)
    cbar_ax = fig.add_axes([0.05, 0.06, 0.85, 0.05])
    fig.colorbar(res, cax=cbar_ax, orientation="horizontal")

    fig.text(
        0.5,
        0.90,
        ("%s-%s %s GDDs\n"
         "Frequency [%%] of reaching %.0f GDDs (%.0f/%.0f) "
         "prior to first freeze") % (
             byear,
             eyear - 1,
             ctx["_nt"].sts[station]["name"],
             gddbase,
             base,
             ceil,
         ),
        fontsize=14,
        ha="center",
    )

    return fig, df
Ejemplo n.º 30
0
def plotter(fdict):
    """ Go """
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["station"]
    days = ctx["days"]
    varname = ctx["var"]
    df = get_data(ctx)
    if df.empty:
        raise NoDataFound("Error, no results returned!")

    # Don't plot zeros for precip
    if varname == "wettest":
        df = df[df["sum_precip"] > 0]
    fig = plt.figure(figsize=(8, 6))
    ax = fig.add_axes([0.1, 0.3, 0.75, 0.6])
    lax = fig.add_axes([0.1, 0.1, 0.75, 0.2])
    cax = fig.add_axes([0.87, 0.3, 0.03, 0.6])
    title = PDICT.get(varname)
    if days == 1:
        title = title.replace("Average ", "")
    ax.set_title(("%s [%s]\n%i Day Period with %s") %
                 (ctx["_nt"].sts[station]["name"], station, days, title))
    cmap = get_cmap(ctx["cmap"])
    minval = df[XREF[varname]].min() - 1.0
    if varname == "wettest" and minval < 0:
        minval = 0
    maxval = df[XREF[varname]].max() + 1.0
    ramp = np.linspace(minval,
                       maxval,
                       min([int(maxval - minval), 10]),
                       dtype="i")
    norm = mpcolors.BoundaryNorm(ramp, cmap.N)
    cb = ColorbarBase(cax, norm=norm, cmap=cmap)
    cb.set_label("inch" if varname == "wettest" else r"$^\circ$F")
    ax.barh(
        df.index.values,
        [days] * len(df.index),
        left=df["doy"].values,
        color=cmap(norm(df[XREF[varname]].values)),
    )
    ax.grid(True)
    lax.grid(True)
    xticks = []
    xticklabels = []
    for i in np.arange(df["doy"].min() - 5, df["doy"].max() + 5, 1):
        ts = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=int(i))
        if ts.day == 1:
            xticks.append(i)
            xticklabels.append(ts.strftime("%-d %b"))
    ax.set_xticks(xticks)
    lax.set_xticks(xticks)
    lax.set_xticklabels(xticklabels)

    counts = np.zeros(366 * 2)
    for _, row in df.iterrows():
        counts[int(row["doy"]):int(row["doy"] + days)] += 1

    lax.bar(np.arange(366 * 2), counts, edgecolor="blue", facecolor="blue")
    lax.set_ylabel("Years")
    lax.text(
        0.02,
        0.9,
        "Frequency of Day\nwithin period",
        transform=lax.transAxes,
        va="top",
    )
    ax.set_ylim(df.index.values.min() - 3, df.index.values.max() + 3)

    ax.set_xlim(df["doy"].min() - 10, df["doy"].max() + 10)
    lax.set_xlim(df["doy"].min() - 10, df["doy"].max() + 10)
    ax.yaxis.set_major_locator(MaxNLocator(prune="lower"))
    return fig, df