Exemplo n.º 1
0
def handler(cccc, date):
    """Handle the request, return df."""
    sts = utc(date.year, date.month, date.day)
    ets = sts + datetime.timedelta(days=1)
    # Cull out "faked" MOS for now
    plimiter = ""
    if cccc == "KWNO":
        plimiter = (
            "and substr(pil, 1, 3) not in ('NBS', 'NBX', 'NBE', 'NBH', "
            "'NBP', 'MAV', 'MET', 'MEX', 'LAV', 'LEV')"
        )
    with get_sqlalchemy_conn("afos") as conn:
        # We don't auto-list some internal products like WRK LLL
        df = read_sql(
            f"""
            select entered at time zone 'UTC' as entered, trim(pil) as pil,
            to_char(entered at time zone 'UTC', 'YYYYmmddHH24MI') || '-' ||
            source || '-' || wmo || '-' || trim(pil) ||
            (case when bbb is not null then '-' || bbb else '' end)
            as product_id
            from products where source = %s and entered >= %s
            and entered < %s and substr(pil, 1, 3) not in ('WRK', 'LLL')
            {plimiter} ORDER by entered ASC
            """,
            conn,
            params=(cccc, sts, ets),
            index_col=None,
        )
    return df
Exemplo n.º 2
0
def gen(huc12s, sdate, edate):
    """Make the map"""
    with get_sqlalchemy_conn("idep") as conn:
        # Check that we have data for this date!
        df = pd.read_sql(
            text(
                """
            SELECT huc_12,
            sum(avg_loss) * 4.463 as avg_loss_ton_acre,
            sum(avg_delivery) * 4.463 as avg_delivery_ton_acre,
            sum(qc_precip) / 25.4 as rain_inch
            from results_by_huc12
            WHERE huc_12 in :h and scenario = 0
            and valid >= :sdate and valid <= :edate
            GROUP by huc_12 ORDER by huc_12
        """
            ),
            conn,
            params={
                "h": tuple(huc12s),
                "sdate": sdate,
                "edate": edate,
            },
        )
    sio = StringIO()
    df.to_csv(sio, index=False, float_format="%.2f")
    return sio.getvalue()
Exemplo n.º 3
0
def main():
    """Do Things."""
    with get_sqlalchemy_conn("id3b") as conn:
        df = pd.read_sql(
            "select valid_at at time zone 'UTC' as valid, wmo_source, "
            "valid_at - wmo_valid_at as latency from mar5 "
            "WHERE ldm_feedtype = 11 and entered_at > '2022-03-5 13:00' "
            "and entered_at < '2022-03-5 19:00' and "
            "extract(minute from wmo_valid_at) > 0 and "
            "wmo_source not in ('KWAL', 'KWOH', 'KWBC', 'KWNB', 'KWNO', 'KAWN') and "
            "substr(wmo_source, 1, 1) = 'K' "
            "and substr(awips_id, 1, 3) not in ('RR3', 'HML', 'RR2', 'RRS', 'LSR', 'LLL') "
            "ORDER by valid ASC",
            conn,
            index_col="valid",
        )
    df["seconds"] = df["latency"] / np.timedelta64(1, "s")
    (fig, ax) = figure_axes(
        title="NWS Text Product Dissemination Latency",
        subtitle=
        "Based on difference between NOAAPort receipt time and product WMO valid time.",
        apctx={"_r": "43"},
    )
    ax.scatter(df.index.values, df["seconds"].values, alpha=1)
    # df2 = df[df['wmo_source'] == 'KSGX']
    # ax.scatter(df2.index.values, df2["seconds"].values, alpha=1, color='b')
    df2 = df[df["wmo_source"] == "KDMX"]
    ax.scatter(df2.index.values, df2["seconds"].values, alpha=1, color="r")
    ax.set_ylim(-10, 480)
    ax.set_ylabel("Latency (minutes)")
    ax.set_yticks(np.arange(0, 481, 60))
    ax.set_yticklabels(np.arange(0, 9, 1))
    ax.grid(True)
    ax.xaxis.set_major_locator(mdates.HourLocator(interval=1))
    ax.xaxis.set_major_formatter(mdates.DateFormatter("%-I %p", tz=CST))
    ax.set_xlim(df.index.values[0], df.index.values[-1])
    ax.set_xlabel("5 March 2022 Central Standard Time")
    fig.text(
        0.05,
        0.02,
        "* Latencies are not exact due to vagaries of how WMO timestamps work"
        ", @akrherz 9 Mar 2022",
    )
    ax.legend(
        handles=[
            Line2D([0], [0], ls="", marker="o", color="b", label="All NWS"),
            Line2D([0], [0],
                   ls="",
                   marker="o",
                   color="r",
                   label="NWS Des Moines"),
        ],
        loc=(0.9, 0.9),
    )
    fig.savefig("test.png")
Exemplo n.º 4
0
def do():
    """Do work"""
    with get_sqlalchemy_conn("idep") as conn:
        df = gpd.read_postgis(
            "SELECT ST_ReducePrecision(ST_Transform(simple_geom, 4326), "
            " 0.0001) as geo, dominant_tillage as dt, "
            "huc_12, hu_12_name as name from huc12 WHERE scenario = 0",
            conn,
            index_col="huc_12",
            geom_col="geo",
        )
    return df.to_json()
Exemplo n.º 5
0
def do(huc12, mode, fmt):
    """Do work"""
    utcnow = datetime.datetime.utcnow()
    if mode == "daily":
        with get_sqlalchemy_conn("idep") as conn:
            df = pd.read_sql(
                """
                SELECT valid,
                avg_loss * 4.463 as avg_loss,
                avg_delivery * 4.463 as avg_delivery,
                qc_precip / 25.4 as qc_precip,
                avg_runoff / 25.4 as avg_runoff,
                1 as avg_loss_events,
                1 as avg_delivery_events,
                1 as qc_precip_events,
                1 as avg_runoff_events
                from results_by_huc12 where huc_12 = %s and scenario = 0 ORDER
                by valid ASC
            """,
                conn,
                params=(huc12,),
                index_col=None,
            )
    else:
        with get_sqlalchemy_conn("idep") as conn:
            df = pd.read_sql(
                """
                SELECT extract(year from valid)::int as yr,
                sum(avg_loss) * 4.463 as avg_loss,
                sum(avg_delivery) * 4.463 as avg_delivery,
                sum(qc_precip) / 25.4 as qc_precip,
                sum(avg_runoff) / 25.4 as avg_runoff,
                sum(case when avg_loss > 0 then 1 else 0 end)
                    as avg_loss_events,
                sum(case when avg_delivery > 0 then 1 else 0 end)
                    as avg_delivery_events,
                sum(case when qc_precip > 0 then 1 else 0 end)
                    as qc_precip_events,
                sum(case when avg_runoff > 0 then 1 else 0 end)
                    as avg_runoff_events
                from results_by_huc12 where huc_12 = %s and scenario = 0
                GROUP by yr ORDER by yr ASC
            """,
                conn,
                params=(huc12,),
                index_col=None,
            )
            df["valid"] = pd.to_datetime(
                {"year": df["yr"], "month": 1, "day": 1}
            )
    if fmt == "xlsx":
        bio = BytesIO()
        # pylint: disable=abstract-class-instantiated
        writer = pd.ExcelWriter(bio, engine="xlsxwriter")
        df.to_excel(writer, f"{huc12} Data", index=False)
        writer.close()
        return bio.getvalue()

    res = {
        "results": [],
        "huc12": huc12,
        "generation_time": utcnow.strftime("%Y-%m-%dT%H:%M:%SZ"),
    }
    for _, row in df.iterrows():
        res["results"].append(
            dict(
                date=row["valid"].strftime("%Y-%m-%d"),
                avg_loss=row["avg_loss"],
                avg_loss_events=row["avg_loss_events"],
                avg_delivery=row["avg_delivery"],
                avg_delivery_events=row["avg_delivery_events"],
                qc_precip=row["qc_precip"],
                qc_precip_events=row["qc_precip_events"],
                avg_runoff=row["avg_runoff"],
                avg_runoff_events=row["avg_runoff_events"],
            )
        )
    return json.dumps(res)
Exemplo n.º 6
0
def make_overviewmap(form):
    """Draw a pretty map of just the HUC."""
    huc = form.get("huc")
    plt.close()
    projection = EPSG[5070]
    if huc is None:
        huclimiter = ""
    elif len(huc) >= 8:
        huclimiter = " and substr(huc_12, 1, 8) = '%s' " % (huc[:8],)
    with get_sqlalchemy_conn("idep") as conn:
        df = read_postgis(
            f"""
            SELECT simple_geom as geom, huc_12,
            ST_x(ST_Transform(ST_Centroid(geom), 4326)) as centroid_x,
            ST_y(ST_Transform(ST_Centroid(geom), 4326)) as centroid_y,
            hu_12_name
            from huc12 i WHERE i.scenario = 0 {huclimiter}
        """,
            conn,
            geom_col="geom",
            index_col="huc_12",
        )
    minx, miny, maxx, maxy = df["geom"].total_bounds
    buf = float(form.get("zoom", 10.0)) * 1000.0  # 10km
    hucname = "" if huc not in df.index else df.at[huc, "hu_12_name"]
    subtitle = "The HUC8 is in tan"
    if len(huc) == 12:
        subtitle = "HUC12 highlighted in red, the HUC8 it resides in is in tan"
    m = MapPlot(
        axisbg="#EEEEEE",
        logo="dep",
        sector="custom",
        south=miny - buf,
        north=maxy + buf,
        west=minx - buf,
        east=maxx + buf,
        projection=projection,
        continentalcolor="white",
        title="DEP HUC %s:: %s" % (huc, hucname),
        subtitle=subtitle,
        titlefontsize=20,
        subtitlefontsize=18,
        caption="Daily Erosion Project",
    )
    for _huc12, row in df.iterrows():
        p = Polygon(
            row["geom"].exterior.coords,
            fc="red" if _huc12 == huc else "tan",
            ec="k",
            zorder=Z_OVERLAY2,
            lw=0.1,
        )
        m.ax.add_patch(p)
        # If this is our HUC, add some text to prevent cities overlay overlap
        if _huc12 == huc:
            m.plot_values(
                [row["centroid_x"]],
                [row["centroid_y"]],
                ["    .    "],
                color="None",
                outlinecolor="None",
            )
    if huc is not None:
        m.drawcounties()
        m.drawcities()
    ram = BytesIO()
    plt.savefig(ram, format="png", dpi=100)
    plt.close()
    ram.seek(0)
    return ram.read(), True
Exemplo n.º 7
0
def make_map(huc, ts, ts2, scenario, v, form):
    """Make the map"""
    projection = EPSG[5070]
    plt.close()
    # suggested for runoff and precip
    if v in ["qc_precip", "avg_runoff"]:
        # c = ['#ffffa6', '#9cf26d', '#76cc94', '#6399ba', '#5558a1']
        cmap = james()
    # suggested for detachment
    elif v in ["avg_loss"]:
        # c =['#cbe3bb', '#c4ff4d', '#ffff4d', '#ffc44d', '#ff4d4d', '#c34dee']
        cmap = dep_erosion()
    # suggested for delivery
    elif v in ["avg_delivery"]:
        # c =['#ffffd2', '#ffff4d', '#ffe0a5', '#eeb74d', '#ba7c57', '#96504d']
        cmap = dep_erosion()

    pgconn = get_dbconn("idep")
    cursor = pgconn.cursor()

    title = "for %s" % (ts.strftime("%-d %B %Y"),)
    if ts != ts2:
        title = "for period between %s and %s" % (
            ts.strftime("%-d %b %Y"),
            ts2.strftime("%-d %b %Y"),
        )
        if "averaged" in form:
            title = "averaged between %s and %s (2008-2017)" % (
                ts.strftime("%-d %b"),
                ts2.strftime("%-d %b"),
            )

    # Check that we have data for this date!
    cursor.execute(
        "SELECT value from properties where key = 'last_date_0'",
    )
    lastts = datetime.datetime.strptime(cursor.fetchone()[0], "%Y-%m-%d")
    floor = datetime.date(2007, 1, 1)
    if ts > lastts.date() or ts2 > lastts.date() or ts < floor:
        plt.text(
            0.5,
            0.5,
            "Data Not Available\nPlease Check Back Later!",
            fontsize=20,
            ha="center",
        )
        ram = BytesIO()
        plt.savefig(ram, format="png", dpi=100)
        plt.close()
        ram.seek(0)
        return ram.read(), False
    if huc is None:
        huclimiter = ""
    elif len(huc) == 8:
        huclimiter = " and substr(i.huc_12, 1, 8) = '%s' " % (huc,)
    elif len(huc) == 12:
        huclimiter = " and i.huc_12 = '%s' " % (huc,)
    if "iowa" in form:
        huclimiter += " and i.states ~* 'IA' "
    if "mn" in form:
        huclimiter += " and i.states ~* 'MN' "
    if "averaged" in form:
        # 11 years of data is standard
        # 10 years is for the switchgrass one-off
        with get_sqlalchemy_conn("idep") as conn:
            df = read_postgis(
                f"""
            WITH data as (
            SELECT huc_12, sum({v}) / 10. as d from results_by_huc12
            WHERE scenario = %s and to_char(valid, 'mmdd') between %s and %s
            and valid between '2008-01-01' and '2018-01-01'
            GROUP by huc_12)

            SELECT simple_geom as geom,
            coalesce(d.d, 0) * %s as data
            from huc12 i LEFT JOIN data d
            ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s {huclimiter}
            """,
                conn,
                params=(
                    scenario,
                    ts.strftime("%m%d"),
                    ts2.strftime("%m%d"),
                    V2MULTI[v],
                    0,
                ),
                geom_col="geom",
            )

    else:
        with get_sqlalchemy_conn("idep") as conn:
            df = read_postgis(
                f"""
            WITH data as (
            SELECT huc_12, sum({v})  as d from results_by_huc12
            WHERE scenario = %s and valid between %s and %s
            GROUP by huc_12)

            SELECT simple_geom as geom,
            coalesce(d.d, 0) * %s as data
            from huc12 i LEFT JOIN data d
            ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s {huclimiter}
            """,
                conn,
                params=(
                    scenario,
                    ts.strftime("%Y-%m-%d"),
                    ts2.strftime("%Y-%m-%d"),
                    V2MULTI[v],
                    0,
                ),
                geom_col="geom",
            )
    minx, miny, maxx, maxy = df["geom"].total_bounds
    buf = 10000.0  # 10km
    m = MapPlot(
        axisbg="#EEEEEE",
        logo="dep",
        sector="custom",
        south=miny - buf,
        north=maxy + buf,
        west=minx - buf,
        east=maxx + buf,
        projection=projection,
        title="DEP %s by HUC12 %s" % (V2NAME[v], title),
        titlefontsize=16,
        caption="Daily Erosion Project",
    )
    if ts == ts2:
        # Daily
        bins = RAMPS["english"][0]
    else:
        bins = RAMPS["english"][1]
    norm = mpcolors.BoundaryNorm(bins, cmap.N)
    for _, row in df.iterrows():
        p = Polygon(
            row["geom"].exterior.coords,
            fc=cmap(norm([row["data"]]))[0],
            ec="k",
            zorder=5,
            lw=0.1,
        )
        m.ax.add_patch(p)

    label_scenario(m.ax, scenario, pgconn)

    lbl = [round(_, 2) for _ in bins]
    if huc is not None:
        m.drawcounties()
        m.drawcities()
    m.draw_colorbar(
        bins, cmap, norm, units=V2UNITS[v], clevlabels=lbl, spacing="uniform"
    )
    if "progressbar" in form:
        fig = plt.gcf()
        avgval = df["data"].mean()
        fig.text(
            0.01,
            0.905,
            "%s: %4.1f T/a"
            % (ts.year if "averaged" not in form else "Avg", avgval),
            fontsize=14,
        )
        bar_width = 0.758
        # yes, a small one off with years having 366 days
        proportion = (ts2 - ts).days / 365.0 * bar_width
        rect1 = Rectangle(
            (0.15, 0.905),
            bar_width,
            0.02,
            color="k",
            zorder=40,
            transform=fig.transFigure,
            figure=fig,
        )
        fig.patches.append(rect1)
        rect2 = Rectangle(
            (0.151, 0.907),
            proportion,
            0.016,
            color=cmap(norm([avgval]))[0],
            zorder=50,
            transform=fig.transFigure,
            figure=fig,
        )
        fig.patches.append(rect2)
    if "cruse" in form:
        # Crude conversion of T/a to mm depth
        depth = avgval / 5.0
        m.ax.text(
            0.9,
            0.92,
            "%.2fmm" % (depth,),
            zorder=1000,
            fontsize=24,
            transform=m.ax.transAxes,
            ha="center",
            va="center",
            bbox=dict(color="k", alpha=0.5, boxstyle="round,pad=0.1"),
            color="white",
        )
    ram = BytesIO()
    plt.savefig(ram, format="png", dpi=100)
    plt.close()
    ram.seek(0)
    return ram.read(), True