Пример #1
0
def main():
    """Go Main"""
    net = sys.argv[1]
    nt = NetworkTable(net)
    sid = sys.argv[2]

    database = 'asos'
    if net in ('KCCI', 'KELO', 'KIMT'):
        database = 'snet'
    elif net in ('IA_RWIS', ):
        database = 'rwis'
    elif net in ('ISUSM', ):
        database = 'isuag'
    elif net.find('_DCP') > 0:
        database = 'hads'

    fn = "/mesonet/share/windrose/climate/yearly/%s_yearly.png" % (sid, )
    print("%4s %-20.20s -- YR" % (sid, nt.sts[sid]['name']), end='')
    res = windrose(sid, database=database, sname=nt.sts[sid]['name'])
    res.savefig(fn)
    plt.close()
    for month in range(1, 13):
        fn = ("/mesonet/share/windrose/climate/monthly/%02i/%s_%s.png") % (
            month, sid, datetime.datetime(2000, month,
                                          1).strftime("%b").lower())
        print(" %s" % (month, ), end='')
        res = windrose(sid,
                       months=(month, ),
                       database=database,
                       sname=nt.sts[sid]['name'])
        res.savefig(fn)
        plt.close()

    print()
Пример #2
0
def plot():
    """Go Plot Go."""
    dfs = {}
    for scenario in [0, 36, 37, 38]:
        dfs[scenario] = pd.read_csv(
            '/tmp/s%s.csv' % (scenario, )).set_index('sday')
        dfs[scenario]['accum'] = dfs[scenario]['avg'].cumsum()

    for i, sday in enumerate(dfs[0].index.values):
        if i == 0:
            continue
        (fig, ax) = plt.subplots(1, 1)
        for scenario in [0, 36, 37, 38]:
            df = dfs[scenario]
            ax.plot(
                range(i), df.iloc[:i]['avg'],
                label=SCENARIOS[scenario], lw=2)
        ax.set_xlim(0, 366)
        ax.set_ylim(0, 0.2)
        ax.grid(True)
        ax.legend(loc=2)
        ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305,
                       335, 365))
        ax.set_xticklabels(calendar.month_abbr[1:])
        ax.set_ylabel("Hillslope Soil Loss [T/a/day]")
        ax.set_title("2008-2017 DEP Daily Average Hillslope Soil Loss")

        fig.savefig('/tmp/frames/%05i.png' % (i - 1, ))
        plt.close()
Пример #3
0
def main():
    """Go Main"""
    net = sys.argv[1]
    nt = NetworkTable(net)
    sid = sys.argv[2]

    database = "asos"
    if net in ("KCCI", "KELO", "KIMT"):
        database = "snet"
    elif net in ("IA_RWIS", ):
        database = "rwis"
    elif net in ("ISUSM", ):
        database = "isuag"
    elif net.find("_DCP") > 0:
        database = "hads"

    mydir = "%s/%s/%s" % (CACHE_DIR, net, sid)
    if not os.path.isdir(mydir):
        os.makedirs(mydir)
    fn = "%s/%s_yearly.png" % (mydir, sid)
    res = windrose(sid, database=database, sname=nt.sts[sid]["name"])
    res.savefig(fn)
    plt.close()
    for month in range(1, 13):
        fn = ("%s/%s_%s.png") % (
            mydir,
            sid,
            datetime.datetime(2000, month, 1).strftime("%b").lower(),
        )
        res = windrose(sid,
                       months=(month, ),
                       database=database,
                       sname=nt.sts[sid]["name"])
        res.savefig(fn)
        plt.close()
Пример #4
0
def error_image(message, fmt):
    """Create an error image"""
    plt.close()
    _, ax = plt.subplots(1, 1)
    msg = "IEM Autoplot generation resulted in an error\n%s" % (message,)
    ax.text(0.5, 0.5, msg, transform=ax.transAxes, ha="center", va="center")
    ram = BytesIO()
    plt.axis("off")
    plt.savefig(ram, format=fmt, dpi=100)
    ram.seek(0)
    plt.close()
    return ram.read()
Пример #5
0
def main(argv):
    """Go Main Go"""
    pgconn = get_dbconn('postgis')
    pcursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    pcursor2 = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    # Preparation
    sortOpt = argv[1]
    ts = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
    sts = ts.replace(tzinfo=pytz.utc,
                     hour=0,
                     minute=0,
                     second=0,
                     microsecond=0)
    if len(argv) == 5:
        sts = sts.replace(year=int(argv[1]),
                          month=int(argv[2]),
                          day=int(argv[3]))
        sortOpt = argv[4]

    ets = sts + datetime.timedelta(hours=24)

    opts = {
        'W': {
            'fnadd': '-wfo',
            'sortby': 'wfo ASC, phenomena ASC, eventid ASC'
        },
        'S': {
            'fnadd': '',
            'sortby': 'size DESC'
        },
        'T': {
            'fnadd': '-time',
            'sortby': 'issue ASC'
        }
    }

    # Defaults
    thumbpx = 100
    cols = 10

    # Find largest polygon either in height or width
    sql = """SELECT *, ST_area2d(ST_transform(geom,2163)) as size,
      (ST_xmax(ST_transform(geom,2163)) -
       ST_xmin(ST_transform(geom,2163))) as width,
      (ST_ymax(ST_transform(geom,2163)) -
       ST_ymin(ST_transform(geom,2163))) as height
      from sbw_%s WHERE status = 'NEW' and issue >= '%s' and issue < '%s' and
      phenomena IN ('TO','SV') """ % (sts.year, sts, ets)
    pcursor.execute(sql)

    maxDimension = 0
    mybuffer = 10000
    i = 0
    torCount = 0
    torSize = 0
    svrCount = 0
    svrSize = 0
    for row in pcursor:
        w = float(row['width'])
        h = float(row['height'])
        if w > maxDimension:
            maxDimension = w
        if h > maxDimension:
            maxDimension = h

        if row['phenomena'] == "SV":
            svrCount += 1
            svrSize += float(row['size'])
        if row['phenomena'] == "TO":
            torCount += 1
            torSize += float(row['size'])
        i += 1

    sql = """
        SELECT phenomena, sum( ST_area2d(ST_transform(u.geom,2163)) ) as size
        from warnings_%s w JOIN ugcs u on (u.gid = w.gid)
        WHERE issue >= '%s' and issue < '%s' and
        significance = 'W' and phenomena IN ('TO','SV') GROUP by phenomena
    """ % (sts.year, sts, ets)

    pcursor.execute(sql)
    for row in pcursor:
        if row['phenomena'] == "TO":
            totalTorCar = 100.0 * (1.0 - (torSize / float(row['size'])))
        if row['phenomena'] == "SV":
            totalSvrCar = 100.0 * (1.0 - (svrSize / float(row['size'])))

    # Make mosaic image
    header = 35
    mosaic = Image.new('RGB', (thumbpx * cols,
                               ((int(i / cols) + 1) * thumbpx) + header))
    draw = ImageDraw.Draw(mosaic)

    imagemap = open('imap.txt', 'w')
    utcnow = datetime.datetime.utcnow()
    imagemap.write("<!-- %s %s -->\n" %
                   (utcnow.strftime("%Y-%m-%d %H:%M:%S"), sortOpt))
    imagemap.write("<map name='mymap'>\n")

    # Find my polygons
    gdf = read_postgis("""
        SELECT *, ST_area2d(ST_transform(geom,2163)) as size,
        (ST_xmax(ST_transform(geom,2163)) +
         ST_xmin(ST_transform(geom,2163))) /2.0 as xc,
        (ST_ymax(ST_transform(geom,2163)) +
         ST_ymin(ST_transform(geom,2163))) /2.0 as yc,
         ST_transform(geom, 2163) as utmgeom
        from sbw_""" + str(sts.year) + """ WHERE
        status = 'NEW' and issue >= %s and issue < %s and
        phenomena IN ('TO','SV') and eventid is not null
        ORDER by """ + opts[sortOpt]['sortby'] + """
    """,
                       pgconn,
                       params=(sts, ets),
                       geom_col='utmgeom',
                       index_col=None)

    # Write metadata to image
    tmp = Image.open("logo_small.png")
    mosaic.paste(tmp, (3, 2))
    s = "IEM Summary of NWS Storm Based Warnings issued %s UTC" % (
        sts.strftime("%d %b %Y"), )
    (w, h) = FONT2.getsize(s)
    draw.text((54, 3), s, font=FONT2)

    s = "Generated: %s UTC" % (
        datetime.datetime.utcnow().strftime("%d %b %Y %H:%M:%S"), )
    draw.text((54, 3 + h), s, font=FONT10)

    if svrCount > 0:
        s = ("%3i SVR: Avg Size %5.0f km^2 CAR: %.0f%%") % (
            svrCount, (svrSize / float(svrCount)) / 1000000, totalSvrCar)
        draw.text((54 + w + 10, 8), s, font=FONT10, fill="#ffff00")

    if torCount > 0:
        s = ("%3i TOR: Avg Size %5.0f km^2 CAR: %.0f%%") % (
            torCount, (torSize / float(torCount)) / 1000000, totalTorCar)
        draw.text((54 + w + 10, 22), s, font=FONT10, fill="#ff0000")

    if pcursor.rowcount == 0:
        s = "No warnings in database for this date"
        draw.text((100, 78), s, font=FONT2, fill="#ffffff")

    i = 0
    for _, row in gdf.iterrows():
        # - Map each polygon
        x0 = float(row['xc']) - (maxDimension / 2.0) - mybuffer
        x1 = float(row['xc']) + (maxDimension / 2.0) + mybuffer
        y0 = float(row['yc']) - (maxDimension / 2.0) - 1.75 * mybuffer
        y1 = float(row['yc']) + (maxDimension / 2.0) + 0.25 * mybuffer

        fig = plt.figure(figsize=(thumbpx / 100., thumbpx / 100.))
        ax = plt.axes([0, 0, 1, 1], facecolor='black')
        ax.set_xlim(x0, x1)
        ax.set_ylim(y0, y1)
        for poly in row['utmgeom']:
            xs, ys = poly.exterior.xy
            color = 'r' if row['phenomena'] == 'TO' else 'yellow'
            ax.plot(xs, ys, color=color, lw=2)
        fig.savefig('tmp.png')
        plt.close()

        my = int(i / cols) * thumbpx + header
        mx0 = (i % cols) * thumbpx
        # - Add each polygon to mosaic
        tmp = Image.open("tmp.png")
        mosaic.paste(tmp, (mx0, my))
        del tmp
        os.remove("tmp.png")

        # Compute CAR!
        sql = """
            select sum(ST_area2d(ST_transform(u.geom,2163))) as csize
            from warnings_%s w
            JOIN ugcs u on (u.gid = w.gid) WHERE
            phenomena = '%s' and significance = '%s' and eventid = %s
            and w.wfo = '%s'
            """ % (row['issue'].year, row['phenomena'], row['significance'],
                   row['eventid'], row['wfo'])

        pcursor2.execute(sql)
        row2 = pcursor2.fetchone()
        car = "NA"
        carColor = (255, 255, 255)
        if row2 and row2['csize'] is not None:
            csize = float(row2['csize'])
            carF = 100.0 * (1.0 - (row['size'] / csize))
            car = "%.0f" % (carF, )
            if carF > 75:
                carColor = (0, 255, 0)
            if carF < 25:
                carColor = (255, 0, 0)

        # Draw Text!
        issue = row['issue']
        s = "%s.%s.%s.%s" % (row['wfo'], row['phenomena'], row['eventid'],
                             issue.strftime("%H%M"))
        # (w, h) = font10.getsize(s)
        # print s, h
        draw.text((mx0 + 2, my + thumbpx - 10), s, font=FONT10)
        s = "%.0f sq km %s%%" % (row['size'] / 1000000.0, car)
        draw.text((mx0 + 2, my + thumbpx - (20)),
                  s,
                  font=FONT10,
                  fill=carColor)

        # Image map
        url = ("/vtec/#%s-O-NEW-K%s-%s-%s-%04i") % (
            ts.year, row['wfo'], row['phenomena'], row['significance'],
            row['eventid'])
        altxt = "Click for text/image"
        imagemap.write(
            ("<area href=\"%s\" alt=\"%s\" title=\"%s\" "
             "shape=\"rect\" coords=\"%s,%s,%s,%s\">\n") %
            (url, altxt, altxt, mx0, my, mx0 + thumbpx, my + thumbpx))
        i += 1

    for i in range(len(gdf.index)):
        my = int(i / cols) * thumbpx + header
        mx0 = (i % cols) * thumbpx
        if mx0 == 0:
            draw.line(
                (0, my + thumbpx + 2, (thumbpx * cols), my + thumbpx + 2),
                (0, 120, 200))

    mosaic.save("test.png")
    del mosaic

    imagemap.write("</map>")
    imagemap.close()

    cmd = ("/home/ldm/bin/pqinsert -p "
           "'plot a %s0000 blah sbwsum%s.png png' test.png") % (
               sts.strftime("%Y%m%d"), opts[sortOpt]['fnadd'])
    subprocess.call(cmd, shell=True)

    cmd = ("/home/ldm/bin/pqinsert -p "
           "'plot a %s0000 blah sbwsum-imap%s.txt txt' imap.txt") % (
               sts.strftime("%Y%m%d"), opts[sortOpt]['fnadd'])
    subprocess.call(cmd, shell=True)

    os.remove("test.png")
    os.remove("imap.txt")
Пример #6
0
def main(argv):
    """Do things"""
    dfs = []
    for fn in glob.glob("/i/0/wb/07100004/0704/*"):
        df = read_wb(fn)
        df["fpath"] = int(fn.split("_")[1][:-3])
        dfs.append(df)
    df = pd.concat(dfs)
    ranges = df.groupby(["fpath", "ofe"]).describe()
    year = 2018
    for doy in tqdm(range(1, 365)):
        date = datetime.date(year, 1, 1) + datetime.timedelta(days=(doy - 1))
        wb = df[(df["year"] == year) & (df["jday"] == doy)].copy()
        wb = wb.set_index(["fpath", "ofe"])
        for f2 in ["sw1", "sw2", "sw"]:
            for f1 in ["min", "max"]:
                wb["%s_%s" % (f2, f1)] = ranges[f2, f1]
            wb["%s_range" % (f2, )] = (wb["%s_max" % (f2, )] - wb["%s_min" %
                                                                  (f2, )])
            wb["%s_percent" % (f2, )] = ((wb[f2] - wb["%s_min" % (f2, )]) /
                                         wb["%s_range" % (f2, )] * 100.0)

        sns.set(style="white", palette="muted", color_codes=True)
        (fig, ax) = plt.subplots(3, 2, figsize=(7, 7))
        sns.despine(left=True)
        fig.text(
            0.5,
            0.98,
            "%s :: Water Balance for 071000040704" %
            (date.strftime("%d %B %Y"), ),
            ha="center",
        )

        # ---------------------------------------------------
        myax = ax[0, 0]
        sns.distplot(wb["sw1"], hist=False, color="g", ax=myax, rug=True)
        myax.set_xlabel("0-10cm Soil Water [mm]")
        myax.axvline(wb["sw1"].mean(), color="r")
        myax.set_xlim(0, 60)

        myax = ax[0, 1]
        sns.distplot(wb["sw1_percent"],
                     hist=False,
                     color="g",
                     ax=myax,
                     rug=True)
        myax.set_xlabel("0-10cm Soil Water of Capacity [%]")
        myax.axvline(wb["sw1_percent"].mean(), color="r")
        myax.set_xlim(0, 100)

        # ---------------------------------------------------------
        myax = ax[1, 0]
        sns.distplot(wb["sw2"], hist=False, color="g", ax=myax, rug=True)
        myax.set_xlabel("10-20cm Soil Water [mm]")
        myax.axvline(wb["sw2"].mean(), color="r")
        myax.set_xlim(0, 60)

        myax = ax[1, 1]
        sns.distplot(wb["sw2_percent"],
                     hist=False,
                     color="g",
                     ax=myax,
                     rug=True)
        myax.set_xlabel("10-20cm Soil Water of Capacity [%]")
        myax.axvline(wb["sw2_percent"].mean(), color="r")
        myax.set_xlim(0, 100)

        # -------------------------------------------------------
        myax = ax[2, 0]
        sns.distplot(wb["sw"], hist=False, color="g", ax=myax, rug=True)
        myax.set_xlabel("Total Soil Water [mm]")
        myax.axvline(wb["sw"].mean(), color="r")
        myax.set_xlim(150, 650)

        myax = ax[2, 1]
        sns.distplot(wb["sw_percent"],
                     hist=False,
                     color="g",
                     ax=myax,
                     rug=True)
        myax.set_xlabel("Total Soil Water of Capacity [%]")
        myax.axvline(wb["sw_percent"].mean(), color="r")
        myax.set_xlim(0, 100)

        plt.setp(ax, yticks=[])
        plt.tight_layout()
        fig.savefig("frames/%05i.png" % (doy - 1, ))
        plt.close()
Пример #7
0
 def close(self):
     ''' Close the figure in the case of batch processing '''
     plt.close()
Пример #8
0
def workflow(environ, form, fmt):
    """we need to return a status and content"""
    # q is the full query string that was rewritten to use by apache
    q = form.get("q", "")
    fdict = parser(q)
    # p=number is the python backend code called by this framework
    scriptnum = int(form.get("p", 0))
    dpi = int(fdict.get("dpi", 100))

    # memcache keys can not have spaces
    mckey = get_mckey(scriptnum, fdict, fmt)
    mc = memcache.Client(["iem-memcached:11211"], debug=0)
    # Don't fetch memcache when we have _cb set for an inbound CGI
    res = mc.get(mckey) if fdict.get("_cb") is None else None
    if res:
        return HTTP200, res
    # memcache failed to save us work, so work we do!
    start_time = datetime.datetime.utcnow()
    # res should be a 3 length tuple
    try:
        res, meta = get_res_by_fmt(scriptnum, fmt, fdict)
    except NoDataFound as exp:
        return HTTP400, handle_error(exp, fmt, environ.get("REQUEST_URI"))
    except Exception as exp:
        # Everything else should be considered fatal
        return HTTP500, handle_error(exp, fmt, environ.get("REQUEST_URI"))
    end_time = datetime.datetime.utcnow()
    sys.stderr.write(
        ("Autoplot[%3s] Timing: %7.3fs Key: %s\n")
        % (scriptnum, (end_time - start_time).total_seconds(), mckey)
    )

    [mixedobj, df, report] = res
    # Our output content
    content = ""
    if fmt == "js" and isinstance(mixedobj, dict):
        content = ('$("#ap_container").highcharts(%s);') % (
            json.dumps(mixedobj),
        )
    elif fmt in ["js", "mapbox"]:
        content = mixedobj
    elif fmt in ["svg", "png", "pdf"] and isinstance(mixedobj, plt.Figure):
        # if our content is a figure, then add some fancy metadata to plot
        if meta.get("plotmetadata", True):
            plot_metadata(mixedobj, start_time, end_time, scriptnum)
        ram = BytesIO()
        plt.savefig(ram, format=fmt, dpi=dpi)
        plt.close()
        ram.seek(0)
        content = ram.read()
        del ram
    elif fmt in ["svg", "png", "pdf"] and mixedobj is None:
        return (
            HTTP400,
            error_image(
                ("plot requested but backend " "does not support plots"), fmt
            ),
        )
    elif fmt == "txt" and report is not None:
        content = report
    elif fmt in ["csv", "xlsx"] and df is not None:
        if fmt == "csv":
            content = df.to_csv(index=(df.index.name is not None), header=True)
        elif fmt == "xlsx":
            # Can't write to ram buffer yet, unimplmented upstream
            (_, tmpfn) = tempfile.mkstemp()
            df.index.name = None
            # Need to set engine as xlsx/xls can't be inferred
            with pd.ExcelWriter(tmpfn, engine="openpyxl") as writer:
                df.to_excel(writer, encoding="latin-1", sheet_name="Sheet1")
            content = open(tmpfn, "rb").read()
            os.unlink(tmpfn)
        del df
    else:
        sys.stderr.write(
            ("Undefined edge case: fmt: %s uri: %s\n")
            % (fmt, environ.get("REQUEST_URI"))
        )
        raise Exception("Undefined autoplot action |%s|" % (fmt,))

    try:
        mc.set(mckey, content, meta.get("cache", 43200))
    except Exception as exp:
        sys.stderr.write(
            "Exception while writting key: %s\n%s\n" % (mckey, exp)
        )
    if isinstance(mixedobj, plt.Figure):
        plt.close()
    return HTTP200, content
Пример #9
0
def application(environ, start_response):
    """Process this request

    This should look something like "/onsite/features/2016/11/161125.png"
    """
    headers = [("Accept-Ranges", "bytes")]
    uri = environ.get("REQUEST_URI")
    # Option 1, no URI is provided.
    if uri is None:
        headers.append(get_content_type("text"))
        start_response("500 Internal Server Error", headers)
        return [b"ERROR!"]
    match = PATTERN.match(uri)
    # Option 2, the URI pattern is unknown.
    if match is None:
        headers.append(get_content_type("text"))
        start_response("500 Internal Server Error", headers)
        sys.stderr.write("feature content failure: %s\n" % (repr(uri), ))
        return [b"ERROR!"]

    data = match.groupdict()
    fn = ("/mesonet/share/features/%(yyyy)s/%(mm)s/"
          "%(yymmdd)s%(extra)s.%(suffix)s") % data
    # Option 3, we have no file.
    if not os.path.isfile(fn):
        # lazy import to save the expense of firing this up when this loads
        # pylint: disable=import-outside-toplevel
        from pyiem.plot.use_agg import plt

        headers.append(get_content_type("png"))
        (_, ax) = plt.subplots(1, 1)
        ax.text(
            0.5,
            0.5,
            "Feature Image was not Found!",
            transform=ax.transAxes,
            ha="center",
        )
        plt.axis("off")
        ram = BytesIO()
        plt.savefig(ram, format="png")
        plt.close()
        ram.seek(0)
        start_response("404 Not Found", headers)
        return [ram.read()]

    # Option 4, we can support this request.
    headers.append(get_content_type(data["suffix"]))
    rng = environ.get("HTTP_RANGE", "bytes=0-")
    tokens = rng.replace("bytes=", "").split("-", 1)
    resdata = open(fn, "rb").read()
    totalsize = len(resdata)
    stripe = slice(
        int(tokens[0]),
        totalsize if tokens[-1] == "" else (int(tokens[-1]) + 1),
    )
    status = "200 OK"
    if totalsize != (stripe.stop - stripe.start):
        status = "206 Partial Content"
    headers.append(("Content-Length", "%.0f" % (stripe.stop - stripe.start, )))
    if environ.get("HTTP_RANGE") and stripe is not None:
        secondval = ("" if environ.get("HTTP_RANGE") == "bytes=0-" else
                     (stripe.stop - 1))
        headers.append((
            "Content-Range",
            "bytes %s-%s/%s" % (stripe.start, secondval, totalsize),
        ))
    dblog(data["yymmdd"])
    start_response(status, headers)
    return [resdata[stripe]]
Пример #10
0
def run(nexrad, name, network, cname):
    """Do some work!"""
    cmap = get_cmap(cname)
    cmap.set_bad("white")

    today = utc()

    pgconn = get_dbconn("radar", user="******")
    df = read_sql(
        """
    SELECT drct, sknt, extract(doy from valid) as doy, valid
    from nexrad_attributes_log WHERE nexrad = %s and sknt > 0
    """,
        pgconn,
        params=(nexrad, ),
        index_col=None,
    )
    if df.empty:
        print("No results for %s" % (nexrad, ))
        return
    minvalid = df["valid"].min()

    years = (today - minvalid).days / 365.25
    fig = plt.figure(figsize=(10.24, 7.68), dpi=100)
    ax = [None, None]
    ax[0] = fig.add_axes([0.06, 0.53, 0.99, 0.39])
    ax[1] = fig.add_axes([0.06, 0.06, 0.99, 0.39])

    H2, xedges, yedges = np.histogram2d(
        df["drct"].values,
        df["sknt"].values,
        bins=(36, 15),
        range=[[0, 360], [0, 70]],
    )
    H2 = np.ma.array(H2 / years)
    H2.mask = np.where(H2 < 1, True, False)
    res = ax[0].pcolormesh(xedges, yedges, H2.transpose(), cmap=cmap)
    fig.colorbar(res, ax=ax[0], extend="neither")
    ax[0].set_xlim(0, 360)
    ax[0].set_ylabel("Storm Speed [kts]")
    ax[0].set_xlabel("Movement Direction (from)")
    ax[0].set_xticks((0, 90, 180, 270, 360))
    ax[0].set_xticklabels(("N", "E", "S", "W", "N"))
    ax[0].set_title(("Storm Attributes Histogram\n%s - %s K%s %s (%s)\n"
                     "%s total attrs, units are ~ (attrs+scans)/year") % (
                         minvalid.strftime("%d %b %Y"),
                         today.strftime("%d %b %Y"),
                         nexrad,
                         name,
                         network,
                         len(df.index),
                     ))
    ax[0].grid(True)

    H2, xedges, yedges = np.histogram2d(
        df["doy"].values,
        df["drct"].values,
        bins=(36, 36),
        range=[[0, 365], [0, 360]],
    )
    H2 = np.ma.array(H2 / years)
    H2.mask = np.where(H2 < 1, True, False)
    res = ax[1].pcolormesh(xedges, yedges, H2.transpose(), cmap=cmap)
    fig.colorbar(res, ax=ax[1], extend="neither")
    ax[1].set_ylim(0, 360)
    ax[1].set_ylabel("Movement Direction (from)")
    ax[1].set_yticks((0, 90, 180, 270, 360))
    ax[1].set_yticklabels(("N", "E", "S", "W", "N"))
    ax[1].set_xticks(
        (1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 365))
    ax[1].set_xticklabels(calendar.month_abbr[1:])
    ax[1].set_xlim(0, 365)
    ax[1].grid(True)

    ax[1].set_xlabel(("Generated %s by Iowa Environmental Mesonet") %
                     (today.strftime("%d %b %Y"), ))

    fig.savefig("%s_histogram.png" % (nexrad, ))
    plt.close()
Пример #11
0
def make_overviewmap(form):
    """Draw a pretty map of just the HUC."""
    huc = form.get("huc")
    plt.close()
    projection = EPSG[5070]
    if huc is None:
        huclimiter = ""
    elif len(huc) >= 8:
        huclimiter = " and substr(huc_12, 1, 8) = '%s' " % (huc[:8],)
    with get_sqlalchemy_conn("idep") as conn:
        df = read_postgis(
            f"""
            SELECT simple_geom as geom, huc_12,
            ST_x(ST_Transform(ST_Centroid(geom), 4326)) as centroid_x,
            ST_y(ST_Transform(ST_Centroid(geom), 4326)) as centroid_y,
            hu_12_name
            from huc12 i WHERE i.scenario = 0 {huclimiter}
        """,
            conn,
            geom_col="geom",
            index_col="huc_12",
        )
    minx, miny, maxx, maxy = df["geom"].total_bounds
    buf = float(form.get("zoom", 10.0)) * 1000.0  # 10km
    hucname = "" if huc not in df.index else df.at[huc, "hu_12_name"]
    subtitle = "The HUC8 is in tan"
    if len(huc) == 12:
        subtitle = "HUC12 highlighted in red, the HUC8 it resides in is in tan"
    m = MapPlot(
        axisbg="#EEEEEE",
        logo="dep",
        sector="custom",
        south=miny - buf,
        north=maxy + buf,
        west=minx - buf,
        east=maxx + buf,
        projection=projection,
        continentalcolor="white",
        title="DEP HUC %s:: %s" % (huc, hucname),
        subtitle=subtitle,
        titlefontsize=20,
        subtitlefontsize=18,
        caption="Daily Erosion Project",
    )
    for _huc12, row in df.iterrows():
        p = Polygon(
            row["geom"].exterior.coords,
            fc="red" if _huc12 == huc else "tan",
            ec="k",
            zorder=Z_OVERLAY2,
            lw=0.1,
        )
        m.ax.add_patch(p)
        # If this is our HUC, add some text to prevent cities overlay overlap
        if _huc12 == huc:
            m.plot_values(
                [row["centroid_x"]],
                [row["centroid_y"]],
                ["    .    "],
                color="None",
                outlinecolor="None",
            )
    if huc is not None:
        m.drawcounties()
        m.drawcities()
    ram = BytesIO()
    plt.savefig(ram, format="png", dpi=100)
    plt.close()
    ram.seek(0)
    return ram.read(), True
Пример #12
0
def make_map(huc, ts, ts2, scenario, v, form):
    """Make the map"""
    projection = EPSG[5070]
    plt.close()
    # suggested for runoff and precip
    if v in ["qc_precip", "avg_runoff"]:
        # c = ['#ffffa6', '#9cf26d', '#76cc94', '#6399ba', '#5558a1']
        cmap = james()
    # suggested for detachment
    elif v in ["avg_loss"]:
        # c =['#cbe3bb', '#c4ff4d', '#ffff4d', '#ffc44d', '#ff4d4d', '#c34dee']
        cmap = dep_erosion()
    # suggested for delivery
    elif v in ["avg_delivery"]:
        # c =['#ffffd2', '#ffff4d', '#ffe0a5', '#eeb74d', '#ba7c57', '#96504d']
        cmap = dep_erosion()

    pgconn = get_dbconn("idep")
    cursor = pgconn.cursor()

    title = "for %s" % (ts.strftime("%-d %B %Y"),)
    if ts != ts2:
        title = "for period between %s and %s" % (
            ts.strftime("%-d %b %Y"),
            ts2.strftime("%-d %b %Y"),
        )
        if "averaged" in form:
            title = "averaged between %s and %s (2008-2017)" % (
                ts.strftime("%-d %b"),
                ts2.strftime("%-d %b"),
            )

    # Check that we have data for this date!
    cursor.execute(
        "SELECT value from properties where key = 'last_date_0'",
    )
    lastts = datetime.datetime.strptime(cursor.fetchone()[0], "%Y-%m-%d")
    floor = datetime.date(2007, 1, 1)
    if ts > lastts.date() or ts2 > lastts.date() or ts < floor:
        plt.text(
            0.5,
            0.5,
            "Data Not Available\nPlease Check Back Later!",
            fontsize=20,
            ha="center",
        )
        ram = BytesIO()
        plt.savefig(ram, format="png", dpi=100)
        plt.close()
        ram.seek(0)
        return ram.read(), False
    if huc is None:
        huclimiter = ""
    elif len(huc) == 8:
        huclimiter = " and substr(i.huc_12, 1, 8) = '%s' " % (huc,)
    elif len(huc) == 12:
        huclimiter = " and i.huc_12 = '%s' " % (huc,)
    if "iowa" in form:
        huclimiter += " and i.states ~* 'IA' "
    if "mn" in form:
        huclimiter += " and i.states ~* 'MN' "
    if "averaged" in form:
        # 11 years of data is standard
        # 10 years is for the switchgrass one-off
        with get_sqlalchemy_conn("idep") as conn:
            df = read_postgis(
                f"""
            WITH data as (
            SELECT huc_12, sum({v}) / 10. as d from results_by_huc12
            WHERE scenario = %s and to_char(valid, 'mmdd') between %s and %s
            and valid between '2008-01-01' and '2018-01-01'
            GROUP by huc_12)

            SELECT simple_geom as geom,
            coalesce(d.d, 0) * %s as data
            from huc12 i LEFT JOIN data d
            ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s {huclimiter}
            """,
                conn,
                params=(
                    scenario,
                    ts.strftime("%m%d"),
                    ts2.strftime("%m%d"),
                    V2MULTI[v],
                    0,
                ),
                geom_col="geom",
            )

    else:
        with get_sqlalchemy_conn("idep") as conn:
            df = read_postgis(
                f"""
            WITH data as (
            SELECT huc_12, sum({v})  as d from results_by_huc12
            WHERE scenario = %s and valid between %s and %s
            GROUP by huc_12)

            SELECT simple_geom as geom,
            coalesce(d.d, 0) * %s as data
            from huc12 i LEFT JOIN data d
            ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s {huclimiter}
            """,
                conn,
                params=(
                    scenario,
                    ts.strftime("%Y-%m-%d"),
                    ts2.strftime("%Y-%m-%d"),
                    V2MULTI[v],
                    0,
                ),
                geom_col="geom",
            )
    minx, miny, maxx, maxy = df["geom"].total_bounds
    buf = 10000.0  # 10km
    m = MapPlot(
        axisbg="#EEEEEE",
        logo="dep",
        sector="custom",
        south=miny - buf,
        north=maxy + buf,
        west=minx - buf,
        east=maxx + buf,
        projection=projection,
        title="DEP %s by HUC12 %s" % (V2NAME[v], title),
        titlefontsize=16,
        caption="Daily Erosion Project",
    )
    if ts == ts2:
        # Daily
        bins = RAMPS["english"][0]
    else:
        bins = RAMPS["english"][1]
    norm = mpcolors.BoundaryNorm(bins, cmap.N)
    for _, row in df.iterrows():
        p = Polygon(
            row["geom"].exterior.coords,
            fc=cmap(norm([row["data"]]))[0],
            ec="k",
            zorder=5,
            lw=0.1,
        )
        m.ax.add_patch(p)

    label_scenario(m.ax, scenario, pgconn)

    lbl = [round(_, 2) for _ in bins]
    if huc is not None:
        m.drawcounties()
        m.drawcities()
    m.draw_colorbar(
        bins, cmap, norm, units=V2UNITS[v], clevlabels=lbl, spacing="uniform"
    )
    if "progressbar" in form:
        fig = plt.gcf()
        avgval = df["data"].mean()
        fig.text(
            0.01,
            0.905,
            "%s: %4.1f T/a"
            % (ts.year if "averaged" not in form else "Avg", avgval),
            fontsize=14,
        )
        bar_width = 0.758
        # yes, a small one off with years having 366 days
        proportion = (ts2 - ts).days / 365.0 * bar_width
        rect1 = Rectangle(
            (0.15, 0.905),
            bar_width,
            0.02,
            color="k",
            zorder=40,
            transform=fig.transFigure,
            figure=fig,
        )
        fig.patches.append(rect1)
        rect2 = Rectangle(
            (0.151, 0.907),
            proportion,
            0.016,
            color=cmap(norm([avgval]))[0],
            zorder=50,
            transform=fig.transFigure,
            figure=fig,
        )
        fig.patches.append(rect2)
    if "cruse" in form:
        # Crude conversion of T/a to mm depth
        depth = avgval / 5.0
        m.ax.text(
            0.9,
            0.92,
            "%.2fmm" % (depth,),
            zorder=1000,
            fontsize=24,
            transform=m.ax.transAxes,
            ha="center",
            va="center",
            bbox=dict(color="k", alpha=0.5, boxstyle="round,pad=0.1"),
            color="white",
        )
    ram = BytesIO()
    plt.savefig(ram, format="png", dpi=100)
    plt.close()
    ram.seek(0)
    return ram.read(), True
Пример #13
0
def main(argv):
    """Run main Run."""
    fn = argv[1]
    df = pd.read_csv(fn)
    sdate = datetime.datetime.strptime(df.columns[0], '%Y%m%d')
    df.columns = ['precip_mm']
    df['date'] = pd.date_range(sdate, periods=len(df.index))
    df.set_index('date', inplace=True)
    gdf = df.groupby([df.index.year, df.index.month]).sum().copy()
    gdf.reset_index(level=1, inplace=True)
    gdf.columns = ['month', 'precip_mm']
    gdf.reset_index(inplace=True)
    gdf.columns = ['year', 'month', 'precip_mm']
    gdf = pd.pivot_table(gdf, index='year', values='precip_mm',
                         columns='month')
    print(gdf)
    fig, ax = plt.subplots(figsize=(9, 6))
    sns.heatmap(gdf, annot=True, fmt='.0f', cmap='YlGnBu', linewidths=.5,
                ax=ax)
    ax.set_xticklabels(calendar.month_abbr[1:])
    tokens = fn.split("/")
    ax.set_title("Monthly Precipitation [mm] %s %s" % (tokens[-3], tokens[-1]))
    fig.savefig('%s_monthly_total.png' % (tokens[-1][:-4], ))
    plt.close()

    # -------------------------------------
    for threshold in [0.25, 25]:
        df2 = df[df['precip_mm'] > threshold]
        gdf = df2.groupby(
            [df2.index.year, df2.index.month]).count().copy()
        gdf.reset_index(level=1, inplace=True)
        gdf.columns = ['month', 'precip_mm']
        gdf.reset_index(inplace=True)
        gdf.columns = ['year', 'month', 'precip_mm']
        gdf = pd.pivot_table(gdf, index='year', values='precip_mm',
                             columns='month')
        print(gdf)
        fig, ax = plt.subplots(figsize=(9, 6))
        sns.heatmap(gdf, annot=True, fmt='.0f', cmap='YlGnBu', linewidths=.5,
                    ax=ax)
        ax.set_xticklabels(calendar.month_abbr[1:])
        tokens = fn.split("/")
        ax.set_title("Daily Events >= %smm %s %s" % (
            threshold, tokens[-3], tokens[-1]))
        fig.savefig('%s_%s_counts.png' % (tokens[-1][:-4], threshold))
        plt.close()

    # -------------------------------------
    gdf = df.groupby(
        [df.index.year, df.index.month]).max().copy()
    gdf.reset_index(level=1, inplace=True)
    gdf.columns = ['month', 'precip_mm']
    gdf.reset_index(inplace=True)
    gdf.columns = ['year', 'month', 'precip_mm']
    gdf = pd.pivot_table(gdf, index='year', values='precip_mm',
                         columns='month')
    print(gdf)
    fig, ax = plt.subplots(figsize=(9, 6))
    sns.heatmap(gdf, annot=True, fmt='.0f', cmap='YlGnBu', linewidths=.5,
                ax=ax)
    ax.set_xticklabels(calendar.month_abbr[1:])
    tokens = fn.split("/")
    ax.set_title("Max Daily Precip [mm] %s %s" % (tokens[-3], tokens[-1]))
    fig.savefig('%s_monthly_max.png' % (tokens[-1][:-4], ))
    plt.close()