def main(): """Do something fun""" form = cgi.FieldStorage() huc12 = form.getfirst('huc12', '000000000000')[:12] scenario = int(form.getfirst('scenario', 0)) ssw("Content-type: image/png\n\n") ssw(make_plot(huc12, scenario))
def check_auth(form): """ Make sure request is authorized """ if form.getfirst('hash') != config['appauth']['sharedkey']: ssw("Content-type: text/plain\n\n") ssw("Unauthorized request!") sys.stderr.write(("Unauthorized CSCAP hash=%s" ) % (form.getfirst('hash'),)) sys.exit()
def main(): """See how we are called""" form = cgi.FieldStorage() huc12 = form.getfirst('huc', '070801050306')[:12] ishuc12 = (len(huc12) == 12) bio = BytesIO() styles = getSampleStyleSheet() doc = SimpleDocTemplate(bio, pagesize=letter, topMargin=(inch * 1.5)) story = [] story.append(Paragraph(INTROTEXT, styles['Normal'])) story.append(Spacer(inch, inch * 0.25)) story.append(Paragraph('Geographic Location', styles['Heading1'])) story.append(Table([ [[Image(get_image_bytes( ('http://dailyerosion.local/' 'auto/map.wsgi?overview=1&huc=%s&zoom=250') % (huc12,)), width=3.6*inch, height=2.4*inch), Paragraph(LOCALIZATION['F1'][int(ishuc12)], styles['Normal'])], [Image(get_image_bytes( ('http://dailyerosion.local/' 'auto/map.wsgi?overview=1&huc=%s&zoom=11') % (huc12,)), width=3.6*inch, height=2.4*inch), Paragraph(LOCALIZATION['F2'][int(ishuc12)], styles['Normal'])]] ])) story.append(Spacer(inch, inch * 0.25)) story.append(Paragraph('DEP Input Data', styles['Heading1'])) story.extend(generate_run_metadata(huc12)) story.append(PageBreak()) story.append(Spacer(inch, inch * 0.25)) story.append(Paragraph('Yearly Summary', styles['Heading1'])) story.append(generate_summary_table(huc12)) story.append(Paragraph(('Table 2: Average value does not include the ' 'current year. Events column are the number of ' 'days with non-zero soil loss. ' '(* year to date total)' ), styles['Normal'])) story.append(PageBreak()) story.append(Spacer(inch, inch * 0.25)) story.append(Paragraph('Monthly Summary', styles['Heading1'])) story.append(generate_monthly_summary_table(huc12)) story.append(Paragraph( ('Table 3: Monthly Totals. Events column are the number of ' 'days with non-zero soil loss. (* month to date total)' ), styles['Normal'])) def pagecb(canvas, doc): """Proxy to our draw_header func""" draw_header(canvas, doc, huc12) doc.build(story, onFirstPage=pagecb, onLaterPages=pagecb) ssw('Content-type: application/pdf\n\n') ssw(bio.getvalue())
def main(): """Main""" form = cgi.FieldStorage() check_auth(form) report = form.getfirst('report', 'ag1') if report == 'ag1': ssw("Content-type: text/plain\n\n") ssw(get_agdata()) elif report == 'dl': # coming from internal website ssw(get_dl(form)) else: ssw("Content-type: text/plain\n\n") ssw(get_nitratedata())
def main(): """Do Something""" form = cgi.FieldStorage() remote_user = os.environ.get('REMOTE_USER', 'anonymous') ssw("Content-type: application/json\n\n") # Figure out what we are editing table = form.getfirst('table') valid = datetime.datetime.strptime(form.getfirst('valid')[:19], '%Y-%m-%dT%H:%M:%S') valid = valid.replace(tzinfo=pytz.utc) column = form.getfirst('column') uniqueid = form.getfirst('uniqueid') plotid = form.getfirst('plotid') value = form.getfirst('value') comment = form.getfirst('comment') if value == 'null': value = None if table == "decagon_data": # We have to do some hackery straighten this out uniqueid, plotid, column = decagon_logic(uniqueid, plotid) dbname = ('sustainablecorn' if os.environ.get('DATATEAM_APP') == 'cscap' else 'td') pgconn = get_dbconn(dbname) cursor = pgconn.cursor() cursor.execute("""UPDATE """+table+""" SET """+column+"""_qc = %s, """+column+"""_qcflag = 'w' WHERE uniqueid = %s and plotid = %s and valid = %s """, (value, uniqueid, plotid, valid)) res = {} if cursor.rowcount == 1: res['status'] = 'OK' else: sys.stderr.write(repr(uniqueid) + repr(plotid) + repr(valid)) res['status'] = 'ERROR: Failed to find database entry' cursor.execute("""INSERT into website_edits( username, edit_table, uniqueid, plotid, valid, edit_column, newvalue, comment) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) """, (remote_user, table, uniqueid, plotid, valid, column, value, comment)) cursor.close() pgconn.commit() ssw(json.dumps(res))
def dowork(form): """Do work!""" dates = compute_dates(form.getfirst('valid')) lat = float(form.getfirst("lat")) lon = float(form.getfirst("lon")) i, j = prism.find_ij(lon, lat) res = {'gridi': int(i), 'gridj': int(j), 'data': [], 'disclaimer': ("PRISM Climate Group, Oregon State University, " "http://prism.oregonstate.edu, created 4 Feb 2004.")} if i is None or j is None: ssw(json.dumps({'error': 'Coordinates outside of domain'})) return for dpair in dates: sts = dpair[0] ets = dpair[-1] sidx = prism.daily_offset(sts) eidx = prism.daily_offset(ets) + 1 ncfn = "/mesonet/data/prism/%s_daily.nc" % (sts.year, ) if not os.path.isfile(ncfn): continue with ncopen(ncfn) as nc: tmax = nc.variables['tmax'][sidx:eidx, j, i] tmin = nc.variables['tmin'][sidx:eidx, j, i] ppt = nc.variables['ppt'][sidx:eidx, j, i] for tx, (mt, nt, pt) in enumerate(zip(tmax, tmin, ppt)): valid = sts + datetime.timedelta(days=tx) res['data'].append({ 'valid': valid.strftime("%Y-%m-%dT12:00:00Z"), 'high_f': myrounder( datatypes.temperature(mt, 'C').value('F'), 1), 'low_f': myrounder( datatypes.temperature(nt, 'C').value('F'), 1), 'precip_in': myrounder( datatypes.distance(pt, 'MM').value('IN'), 2) }) return json.dumps(res)
def main(): """Do Something""" pgconn = get_dbconn('asos', user='******') check_load(pgconn.cursor()) acursor = pgconn.cursor("streamer") ssw("Content-type: text/plain\n\n") form = cgi.FieldStorage() valid = datetime.datetime.strptime( form.getfirst('valid', '2016010100')[:10], '%Y%m%d%H') valid = valid.replace(tzinfo=pytz.utc) table = "t%s" % (valid.year, ) acursor.execute( """ SELECT metar from """ + table + """ WHERE valid >= %s and valid < %s and metar is not null ORDER by valid ASC """, (valid, valid + datetime.timedelta(hours=1))) for row in acursor: ssw("%s\n" % (row[0].replace("\n", " "), ))
def main(): """ Go Main Go """ form = cgi.FieldStorage() nexrad = form.getfirst('nexrad', '').upper()[:3] if nexrad == '': lon = form.getfirst('lon') if lon is not None: for line in RADARS.split("\n"): if line.find(lon) > 0: nexrad = line[1:4].upper() mckey = "/request/grx/i3attr|%s" % (nexrad, ) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: res = produce_content(nexrad) mc.set(mckey, res, 60) ssw("Content-type: text/plain\n\n") ssw(res)
def get_time_bounds(form, tzinfo): """ Figure out the exact time bounds desired """ y1 = int(form.getfirst('year1')) y2 = int(form.getfirst('year2')) m1 = int(form.getfirst('month1')) m2 = int(form.getfirst('month2')) d1 = int(form.getfirst('day1')) d2 = int(form.getfirst('day2')) # Here lie dragons, so tricky to get a proper timestamp try: sts = tzinfo.localize(datetime.datetime(y1, m1, d1)) ets = tzinfo.localize(datetime.datetime(y2, m2, d2)) except Exception as _exp: ssw("ERROR: Malformed Date!") sys.exit() if sts == ets: ets += datetime.timedelta(days=1) return sts, ets
def main(): """ Do something, one time """ form = cgi.FieldStorage() cid = form.getvalue("cid", 'KCCI-016') start_ts = form.getvalue('start_ts', None) end_ts = form.getvalue('end_ts', None) date = form.getvalue('date', None) if date is not None: start_ts = datetime.datetime.strptime(date, '%Y%m%d') start_ts = start_ts.replace(tzinfo=pytz.timezone("America/Chicago")) end_ts = start_ts + datetime.timedelta(days=1) else: start_ts = datetime.datetime.strptime(start_ts, '%Y%m%d%H%M') start_ts = start_ts.replace(tzinfo=pytz.utc) end_ts = datetime.datetime.strptime(end_ts, '%Y%m%d%H%M') end_ts = end_ts.replace(tzinfo=pytz.utc) ssw("Content-type: application/json\n\n") ssw(json.dumps(dance(cid, start_ts, end_ts)))
def get_time_bounds(form, tzinfo): """ Figure out the exact time bounds desired """ # Here lie dragons, so tricky to get a proper timestamp try: y1 = int(form.getfirst("year1")) y2 = int(form.getfirst("year2")) m1 = int(form.getfirst("month1")) m2 = int(form.getfirst("month2")) d1 = int(form.getfirst("day1")) d2 = int(form.getfirst("day2")) sts = tzinfo.localize(datetime.datetime(y1, m1, d1)) ets = tzinfo.localize(datetime.datetime(y2, m2, d2)) except Exception as exp: sys.stderr.write("asos.py malformed date: %s\n" % (exp, )) ssw("ERROR: Malformed Date!") sys.exit() if sts == ets: ets += datetime.timedelta(days=1) return sts, ets
def get_climate(network, stations): """Fetch the climatology for these stations""" nt = NetworkTable(network) if not nt.sts: ssw("ERROR: Invalid network specified") return data = dict() clisites = [] cldata = dict() for station in stations: cldata[nt.sts[station]['ncdc81']] = dict() clisites.append(nt.sts[station]['ncdc81']) if not clisites: return data if len(clisites) == 1: clisites.append('XX') mesosite = get_dbconn('coop') cursor = mesosite.cursor() cursor.execute( """ SELECT station, valid, high, low, precip from ncdc_climate81 where station in %s """, (tuple(clisites), )) for row in cursor: cldata[row[0]][row[1].strftime("%m%d")] = { 'high': row[2], 'low': row[3], 'precip': row[4] } sts = datetime.datetime(2000, 1, 1) ets = datetime.datetime(2001, 1, 1) for stid in stations: data[stid] = dict() now = sts clsite = nt.sts[stid]['ncdc81'] while now < ets: key = now.strftime("%m%d") data[stid][key] = cldata[clsite].get( key, dict(high='M', low='M', precip='M')) now += datetime.timedelta(days=1) return data
def main(): """ go main go """ form = cgi.FieldStorage() ctx = {} ctx['stations'] = get_cgi_stations(form) ctx['sts'], ctx['ets'] = get_cgi_dates(form) ctx['myvars'] = form.getlist("vars[]") # Model specification trumps vars[] if form.getfirst('model') is not None: ctx['myvars'] = [form.getfirst('model')] ctx['what'] = form.getfirst('what', 'view') ctx['delim'] = form.getfirst('delim', 'comma') ctx['inclatlon'] = form.getfirst('gis', 'no') ctx['scenario'] = form.getfirst('scenario', 'no') ctx['hayhoe_scenario'] = form.getfirst('hayhoe_scenario') ctx['hayhoe_model'] = form.getfirst('hayhoe_model') if ctx['scenario'] == 'yes': ctx['scenario_year'] = int(form.getfirst('scenario_year', 2099)) else: ctx['scenario_year'] = 2099 ctx['scenario_sts'], ctx['scenario_ets'] = get_scenario_period(ctx) # TODO: this code stinks and is likely buggy if "apsim" in ctx['myvars']: ssw("Content-type: text/plain\n\n") elif "dndc" not in ctx['myvars'] and ctx['what'] != 'excel': if ctx['what'] == 'download': ssw("Content-type: application/octet-stream\n") dlfn = "changeme.txt" if len(ctx['stations']) < 10: dlfn = "%s.txt" % ("_".join(ctx['stations']), ) ssw(("Content-Disposition: attachment; " "filename=%s\n\n" % (dlfn, ))) else: ssw("Content-type: text/plain\n\n") # OK, now we fret if "daycent" in ctx['myvars']: do_daycent(ctx) elif "century" in ctx['myvars']: do_century(ctx) elif "apsim" in ctx['myvars']: do_apsim(ctx) elif "dndc" in ctx['myvars']: do_dndc(ctx) elif "salus" in ctx['myvars']: do_salus(ctx) elif "swat" in ctx['myvars']: do_swat(ctx) else: do_simple(ctx)
def main(): """Do Fun things""" ssw("Content-Type: application/vnd.geo+json\n\n") form = cgi.FieldStorage() cb = form.getfirst('callback', None) domain = form.getfirst('domain', None) ts = datetime.datetime.strptime(form.getfirst('date', '2015-05-05'), '%Y-%m-%d') ts2 = None if form.getfirst('date2', None) is not None: ts2 = datetime.datetime.strptime(form.getfirst('date2'), '%Y-%m-%d') mckey = ("/geojson/huc12/%s/%s/%s") % ( ts.strftime("%Y%m%d"), '' if ts2 is None else ts2.strftime("%Y%m%d"), '' if domain is None else domain) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: res = do(ts, ts2, domain) mc.set(mckey, res, 3600) if cb is None: ssw(res) else: ssw("%s(%s)" % (cb, res))
def main(): """ Do Stuff """ ssw("Content-type: application/json\n\n") form = cgi.FieldStorage() awipsid = form.getfirst('awipsid')[:6] sts = form.getfirst('sts') ets = form.getfirst('ets') cb = form.getfirst('callback', None) mckey = "/json/nwstext_search/%s/%s/%s?callback=%s" % (sts, ets, awipsid, cb) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: sts = datetime.datetime.strptime(sts, '%Y-%m-%dT%H:%MZ') sts = sts.replace(tzinfo=pytz.utc) ets = datetime.datetime.strptime(ets, '%Y-%m-%dT%H:%MZ') ets = ets.replace(tzinfo=pytz.utc) now = datetime.datetime.utcnow() now = now.replace(tzinfo=pytz.utc) cacheexpire = 0 if ets < now else 120 res = run(sts, ets, awipsid) mc.set(mckey, res, cacheexpire) if cb is None: ssw(res) else: ssw("%s(%s)" % (cb, res))
def main(): """Main()""" ssw("Content-type: application/json\n\n") form = cgi.FieldStorage() wfo = form.getfirst("wfo", "MPX") if len(wfo) == 4: wfo = wfo[1:] year = int(form.getfirst("year", 2015)) phenomena = form.getfirst('phenomena') significance = form.getfirst('significance') cb = form.getfirst("callback", None) mckey = "/json/vtec_events/%s/%s/%s/%s" % (wfo, year, phenomena, significance) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: res = run(wfo, year, phenomena, significance) mc.set(mckey, res, 60) if cb is None: ssw(res) else: ssw("%s(%s)" % (cb, res))
def main(): """Do Main Stuff""" ssw("Content-type: application/vnd.geo+json\n\n") form = cgi.FieldStorage() lat = float(form.getfirst('lat', 42.0)) lon = float(form.getfirst('lon', -95.0)) time = form.getfirst('time') last = int(form.getfirst('last', 0)) day = int(form.getfirst('day', 1)) cat = form.getfirst('cat', 'categorical').upper() cb = form.getfirst('callback', None) hostname = os.environ.get("SERVER_NAME", "") mckey = ("/json/spcoutlook/%.4f/%.4f/%s/%s/%s/%s" ) % (lon, lat, last, day, cat, time) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if hostname != 'iem.local' else None if not res: if time is not None: res = dotime(time, lon, lat, day, cat) else: res = dowork(lon, lat, last, day, cat) mc.set(mckey, res, 3600) if cb is None: ssw(res) else: ssw("%s(%s)" % (cgi.escape(cb, quote=True), res))
def main(): """Main()""" ssw("Content-type: application/vnd.geo+json\n\n") form = cgi.FieldStorage() wfo = form.getfirst("wfo", "MPX") if len(wfo) == 4: wfo = wfo[1:] year = int(form.getfirst("year", 2015)) phenomena = form.getfirst('phenomena', 'SV')[:2] significance = form.getfirst('significance', 'W')[:1] etn = int(form.getfirst('etn', 1)) cb = form.getfirst("callback", None) mckey = "/json/vtec_event/%s/%s/%s/%s/%s" % (wfo, year, phenomena, significance, etn) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: res = run(wfo, year, phenomena, significance, etn) mc.set(mckey, res, 300) if cb is None: ssw(res) else: ssw("%s(%s)" % (cgi.escape(cb, quote=True), res))
def run(sts, ets): """ Get data! """ dbconn = get_dbconn("other", user="******") sql = """SELECT * from feel_data_daily where valid >= '%s' and valid < '%s' ORDER by valid ASC""" % ( sts, ets, ) df = pd.read_sql(sql, dbconn) sql = """SELECT * from feel_data_hourly where valid >= '%s' and valid < '%s' ORDER by valid ASC""" % ( sts, ets, ) df2 = pd.read_sql(sql, dbconn) def fmt(val): """Lovely hack.""" return val.strftime("%Y-%m-%d %H:%M") df2["valid"] = df2["valid"].apply(fmt) with pd.ExcelWriter("/tmp/ss.xlsx") as writer: df.to_excel(writer, "Daily Data", index=False) df2.to_excel(writer, "Hourly Data", index=False) ssw("Content-type: application/vnd.ms-excel\n") ssw("Content-Disposition: attachment;Filename=feel.xls\n\n") ssw(open("/tmp/ss.xlsx", "rb").read()) os.unlink("/tmp/ss.xlsx")
def do_work(valid, prod): """Our workflow""" # Get lookup table xref, template, units, long_name = get_table(prod) # Get RASTER fn = valid.strftime(template) if not os.path.isfile(fn): send_error("ERROR: The IEM Archives do not have this file available") raster = np.flipud(np.array(Image.open(fn))) (ypoints, xpoints) = raster.shape # build lat, lon arrays lons, lats = get_gridinfo(fn, xpoints, ypoints) # create netcdf file tmpname = make_netcdf(xpoints, ypoints, lons, lats) with ncopen(tmpname, 'a') as nc: # write data ncvar = nc.createVariable( prod, np.float, ('lat', 'lon'), zlib=True, fill_value=1.e20) ncvar.units = units ncvar.long_name = long_name ncvar.coordinates = "lon lat" # convert RASTER via lookup table ncvar[:] = xref[raster] # send data to user ssw("Content-type: application/octet-stream\n") ssw("Content-Disposition: attachment; filename=res.nc\n\n") ssw(open(tmpname, 'rb').read()) # remove tmp netcdf file os.unlink(tmpname)
def main(): """Go main go""" props = get_properties() form = cgi.FieldStorage() if "address" in form: address = form["address"].value elif "street" in form and "city" in form: address = "%s, %s" % (form["street"].value, form["city"].value) else: ssw("APIFAIL") return req = requests.get( SERVICE, params=dict(address=address, key=props["google.maps.key2"], sensor="true"), timeout=10, ) data = req.json() if data["results"]: ssw("%s,%s" % ( data["results"][0]["geometry"]["location"]["lat"], data["results"][0]["geometry"]["location"]["lng"], )) else: ssw("ERROR")
def send_error(viewopt, msg): """" """ if viewopt == 'js': ssw("Content-type: application/javascript\n\n") ssw("alert('" + ERRMSG + "');") sys.exit() fig, ax = plt.subplots(1, 1) ax.text(0.5, 0.5, msg, transform=ax.transAxes, ha='center') ssw("Content-type: image/png\n\n") ram = BytesIO() fig.savefig(ram, format='png') ram.seek(0) ssw(ram.read()) sys.exit()
def main(): """Main()""" ssw("Content-type: application/vnd.geo+json\n\n") form = cgi.FieldStorage() wfo = form.getfirst("wfo", "MPX") if len(wfo) == 4: wfo = wfo[1:] year = int(form.getfirst("year", 2015)) phenomena = form.getfirst('phenomena', 'SV')[:2] significance = form.getfirst('significance', 'W')[:1] etn = int(form.getfirst('etn', 1)) sbw = int(form.getfirst('sbw', 0)) lsrs = int(form.getfirst('lsrs', 0)) cb = form.getfirst("callback", None) mckey = ("/geojson/vtec_event/%s/%s/%s/%s/%s/%s/%s") % ( wfo, year, phenomena, significance, etn, sbw, lsrs) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: if lsrs == 1: res = run_lsrs(wfo, year, phenomena, significance, etn, sbw) else: if sbw == 1: res = run_sbw(wfo, year, phenomena, significance, etn) else: res = run(wfo, year, phenomena, significance, etn) mc.set(mckey, res, 3600) if cb is None: ssw(res) else: ssw("%s(%s)" % (cb, res))
def main(): """Main Workflow""" ssw("Content-type: application/vnd.geo+json\n\n") form = cgi.FieldStorage() ts = form.getfirst('ts', None) lat = float(form.getfirst('lat', 0)) lon = float(form.getfirst('lon', 0)) if ts is None: ts = datetime.datetime.utcnow() else: ts = datetime.datetime.strptime(ts, '%Y%m%d%H%M') ts = ts.replace(tzinfo=pytz.utc) cb = form.getfirst('callback', None) if lat != 0 and lon != 0: mckey = ("/json/spcwatch/%.4f/%.4f" ) % (lon, lat) else: mckey = "/json/spcwatch/%s" % (ts.strftime("%Y%m%d%H%M"), ) mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: if lat != 0 and lon != 0: res = pointquery(lon, lat) else: res = dowork(ts) mc.set(mckey, res) if cb is None: ssw(res) else: ssw("%s(%s)" % (cb, res))
def main(): """Go Main""" pgconn = get_dbconn('afos') acursor = pgconn.cursor() form = cgi.FieldStorage() pid = form.getvalue('product_id', '201302241937-KSLC-NOUS45-PNSSLC') cb = form.getvalue('callback', None) utc = datetime.datetime.strptime(pid[:12], '%Y%m%d%H%M') utc = utc.replace(tzinfo=pytz.utc) pil = pid[-6:] root = {'products': []} acursor.execute( """ SELECT data from products where pil = %s and entered = %s """, (pil, utc)) for row in acursor: root['products'].append({'data': row[0]}) ssw("Content-type: application/javascript\n\n") if cb is None: ssw(json.dumps(root)) else: ssw("%s(%s)" % (cgi.escape(cb, quote=True), json.dumps(root)))
def send_error(viewopt, msg): """" """ if viewopt == 'js': ssw("Content-type: application/javascript\n\n") ssw("alert('"+ERRMSG+"');") sys.exit() fig, ax = plt.subplots(1, 1) ax.text(0.5, 0.5, msg, transform=ax.transAxes, ha='center') ssw("Content-type: image/png\n\n") ram = BytesIO() fig.savefig(ram, format='png') ram.seek(0) ssw(ram.read()) sys.exit()
def main(): """ go main go """ form = cgi.FieldStorage() ctx = {} ctx["stations"] = get_cgi_stations(form) ctx["sts"], ctx["ets"] = get_cgi_dates(form) ctx["myvars"] = form.getlist("vars[]") # Model specification trumps vars[] if form.getfirst("model") is not None: ctx["myvars"] = [form.getfirst("model")] ctx["what"] = form.getfirst("what", "view") ctx["delim"] = form.getfirst("delim", "comma") ctx["inclatlon"] = form.getfirst("gis", "no") ctx["scenario"] = form.getfirst("scenario", "no") ctx["hayhoe_scenario"] = form.getfirst("hayhoe_scenario") ctx["hayhoe_model"] = form.getfirst("hayhoe_model") ctx["scenario_year"] = 2099 if ctx["scenario"] == "yes": ctx["scenario_year"] = int(form.getfirst("scenario_year", 2099)) ctx["scenario_sts"], ctx["scenario_ets"] = get_scenario_period(ctx) # TODO: this code stinks and is likely buggy if "apsim" in ctx["myvars"]: ssw("Content-type: text/plain\n\n") elif "dndc" not in ctx["myvars"] and ctx["what"] != "excel": if ctx["what"] == "download": ssw("Content-type: application/octet-stream\n") dlfn = "changeme.txt" if len(ctx["stations"]) < 10: dlfn = "%s.txt" % ("_".join(ctx["stations"]), ) ssw(("Content-Disposition: attachment; " "filename=%s\n\n" % (dlfn, ))) else: ssw("Content-type: text/plain\n\n") # OK, now we fret if "daycent" in ctx["myvars"]: do_daycent(ctx) elif "century" in ctx["myvars"]: do_century(ctx) elif "apsim" in ctx["myvars"]: do_apsim(ctx) elif "dndc" in ctx["myvars"]: do_dndc(ctx) elif "salus" in ctx["myvars"]: do_salus(ctx) elif "swat" in ctx["myvars"]: do_swat(ctx) else: do_simple(ctx)
def main(): """Go Main""" ssw("Content-type: application/json\n\n") form = cgi.FieldStorage() if os.environ['REQUEST_METHOD'] not in ['GET', 'POST']: ssw("Content-type: text/plain\n\n") ssw("HTTP METHOD NOT ALLOWED") return cb = form.getfirst('callback', None) mckey = "/json/tms.json" mc = memcache.Client(['iem-memcached:11211'], debug=0) res = mc.get(mckey) if not res: res = run() mc.set(mckey, res, 15) if cb is None: ssw(res) else: ssw("%s(%s)" % (cb, res))
def do(vote): """ Do Something, yes do something """ cookie = SimpleCookie(os.environ.get("HTTP_COOKIE", '')) myoid = 0 if 'foid' in cookie: myoid = int(cookie['foid'].value) pgconn = get_dbconn('mesosite') cursor = pgconn.cursor() cursor.execute(""" SELECT to_char(valid, 'YYmmdd')::int as oid, good, bad, abstain from feature ORDER by valid DESC LIMIT 1 """) row = cursor.fetchone() foid = row[0] d = {'good': row[1], 'bad': row[2], 'abstain': row[3], 'can_vote': True} if myoid == foid: d['can_vote'] = False if myoid < foid and vote in ['good', 'bad', 'abstain']: # Allow this vote d[vote] += 1 cursor.execute("""UPDATE feature SET """+vote+""" = """+vote+""" + 1 WHERE to_char(valid, 'YYmmdd')::int = %s """, (foid,)) # Now we set a cookie expiration = datetime.datetime.now() + datetime.timedelta(days=4) cookie = SimpleCookie() cookie["foid"] = foid cookie["foid"]["path"] = "/onsite/features/" cookie["foid"]["expires"] = expiration.strftime( "%a, %d-%b-%Y %H:%M:%S CST") ssw(cookie.output() + "\n") cursor.close() pgconn.commit() d['can_vote'] = False return d
def main(): """Go Main""" dbconn = get_dbconn("scada") cursor = dbconn.cursor() ssw("Content-type: application/vnd.geo+json\n\n") data = { "type": "FeatureCollection", "crs": { "type": "EPSG", "properties": { "code": 4326, "coordinate_order": [1, 0] }, }, "features": [], } cursor.execute("""SELECT lon, lat, 'n/a', 'n/a', 'n/a', 'n/a', 'n/a', id from turbines""") for i, row in enumerate(cursor): data["features"].append({ "type": "Feature", "id": i, "properties": { "id": row[7], "wakes": -1, "farmname": row[2], "expansion": row[3], "unitnumber": row[4], "farmnumber": row[5], "turbinename": row[6], }, "geometry": { "type": "Point", "coordinates": [row[0], row[1]] }, }) ssw(json.dumps(data))
def send_content_type(val): """Do as I say""" if val == 'text': ssw("Content-type: text/plain\n\n") elif val in ['png', 'gif', 'jpg']: ssw("Content-type: image/%s\n\n" % (val, )) else: ssw("Content-type: text/plain\n\n")
def main(): """Do Something""" form = cgi.FieldStorage() vote = form.getfirst('vote', 'missing') ssw("Content-type: application/json\n") j = do(vote) ssw("\n") # Finalize headers ssw(json.dumps(j))
def run(): """ Do Stuff """ pgconn = get_dbconn('afos', user='******') cursor = pgconn.cursor() ssw("Content-type:text/plain\n\n") m1 = "" m2 = "" for wfo in ['OKX', 'ALY', 'BTV', 'BUF', 'BGM']: meta, meat = do(cursor, wfo) m1 += meta m2 += meat ssw(m1) ssw("id,timestamp,value,lat,lon,name,source\n") ssw(m2)
def main(): """Do Something""" form = cgi.FieldStorage() appname = form.getfirst('q') df = router(appname) ssw("Content-type: text/plain\n\n") ssw(df.to_csv(None, index=False)) ssw("\n")
def run(): """ Do Stuff """ pgconn = get_dbconn("afos", user="******") cursor = pgconn.cursor() ssw("Content-type:text/plain\n\n") m1 = "" m2 = "" for wfo in ["OKX", "ALY", "BTV", "BUF", "BGM"]: meta, meat = do(cursor, wfo) m1 += meta m2 += meat ssw(m1) ssw("id,timestamp,value,lat,lon,name,source\n") ssw(m2)
def postprocess(self, view=False, filename=None, web=False, memcache=None, memcachekey=None, memcacheexpire=300, pqstr=None): """ postprocess into a slim and trim PNG """ tmpfn = tempfile.mktemp() ram = BytesIO() plt.savefig(ram, format='png') ram.seek(0) im = Image.open(ram) im2 = im.convert('RGB').convert('P', palette=Image.ADAPTIVE) if memcache and memcachekey: ram = BytesIO() im2.save(ram, format='png') ram.seek(0) r = ram.read() memcache.set(memcachekey, r, time=memcacheexpire) sys.stderr.write("memcached key %s set time %s" % (memcachekey, memcacheexpire)) if web: ssw("Content-Type: image/png\n\n") im2.save(getattr(sys.stdout, 'buffer', sys.stdout), format='png') return im2.save(tmpfn, format='PNG') if pqstr is not None: subprocess.call("/home/ldm/bin/pqinsert -p '%s' %s" % (pqstr, tmpfn), shell=True) if view: subprocess.call("xv %s" % (tmpfn, ), shell=True) if filename is not None: shutil.copyfile(tmpfn, filename) os.unlink(tmpfn)
def test_ssw(): """Does pyiem.util.ssw work?""" with mock.patch('sys.stdout', new=BytesIO()) as fake_out: util.ssw("Hello Daryl!") assert fake_out.getvalue() == b'Hello Daryl!' fake_out.seek(0) util.ssw(b"Hello Daryl!") assert fake_out.getvalue() == b'Hello Daryl!' fake_out.seek(0) util.ssw(u"Hello Daryl!") assert fake_out.getvalue() == b'Hello Daryl!' fake_out.seek(0)
def do_work(form): """do great things""" pgconn = get_dbconn('sustainablecorn') stations = form.getlist('stations') if not stations: stations.append("XXX") sts, ets = get_cgi_dates(form) df = read_sql(""" SELECT station as uniqueid, valid as day, extract(doy from valid) as doy, high, low, precip, sknt, srad_mj, drct from weather_data_daily WHERE station in %s and valid >= %s and valid <= %s ORDER by station ASC, valid ASC """, pgconn, params=(tuple(stations), sts, ets), index_col=None) df['highc'] = temperature(df['high'].values, 'F').value('C') df['lowc'] = temperature(df['low'].values, 'F').value('C') df['precipmm'] = distance(df['precip'].values, 'IN').value('MM') metarows = [{}, {}] cols = df.columns for i, colname in enumerate(cols): if i == 0: metarows[0][colname] = 'description' metarows[1][colname] = 'units' continue metarows[0][colname] = VARDF.get(colname, '') metarows[1][colname] = UVARDF.get(colname, '') df = pd.concat([pd.DataFrame(metarows), df], ignore_index=True) # re-establish the correct column sorting df = df.reindex_axis(cols, axis=1) writer = pd.ExcelWriter("/tmp/ss.xlsx", engine='xlsxwriter') df.to_excel(writer, 'Daily Weather', index=False) worksheet = writer.sheets['Daily Weather'] worksheet.freeze_panes(3, 0) writer.close() fn = ",".join(stations) ssw("Content-type: application/vnd.ms-excel\n") ssw(("Content-Disposition: attachment;Filename=wx_%s.xls\n\n" ) % (fn, )) ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx')
def send_error(): """" """ ssw("Content-type: application/javascript\n\n") ssw("alert('"+ERRMSG+"');") sys.exit()
def make_plot(form): """Make the plot""" (uniqueid, plotid) = form.getfirst('site', 'ISUAG::302E').split("::") if uniqueid in ['KELLOGG', 'MASON']: DEPTHS[1] = '-' DEPTHS[5] = '80 cm' elif uniqueid == 'NAEW': DEPTHS[1] = '5 cm' DEPTHS[2] = '10 cm' DEPTHS[3] = '20 cm' DEPTHS[4] = '30 cm' DEPTHS[5] = '50 cm' sts = datetime.datetime.strptime(form.getfirst('date', '2014-06-10'), '%Y-%m-%d') days = int(form.getfirst('days', 1)) ets = sts + datetime.timedelta(days=days) pgconn = get_dbconn('sustainablecorn') tzname = 'America/Chicago' if uniqueid in [ 'ISUAG', 'SERF', 'GILMORE'] else 'America/New_York' viewopt = form.getfirst('view', 'js') ptype = form.getfirst('ptype', '1') plotid_limit = "and plotid = '%s'" % (plotid, ) depth = form.getfirst('depth', 'all') if depth != 'all': plotid_limit = "" if ptype == '1': df = read_sql("""SELECT uniqueid, plotid, valid as v, d1temp_qc as d1t, d2temp_qc as d2t, d3temp_qc as d3t, d4temp_qc as d4t, d5temp_qc as d5t, d1moisture_qc as d1m, d2moisture_qc as d2m, d3moisture_qc as d3m, d4moisture_qc as d4m, d5moisture_qc as d5m from decagon_data WHERE uniqueid = %s """+plotid_limit+""" and valid between %s and %s ORDER by valid ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) df['v'] = df['v'].apply( lambda x: x.astimezone(pytz.timezone(tzname))) elif ptype in ['3', '4']: res = 'hour' if ptype == '3' else 'week' df = read_sql("""SELECT uniqueid, plotid, timezone('UTC', date_trunc('"""+res+"""', valid at time zone 'UTC')) as v, avg(d1temp_qc) as d1t, avg(d2temp_qc) as d2t, avg(d3temp_qc) as d3t, avg(d4temp_qc) as d4t, avg(d5temp_qc) as d5t, avg(d1moisture_qc) as d1m, avg(d2moisture_qc) as d2m, avg(d3moisture_qc) as d3m, avg(d4moisture_qc) as d4m, avg(d5moisture_qc) as d5m from decagon_data WHERE uniqueid = %s """+plotid_limit+""" and valid between %s and %s GROUP by uniqueid, v, plotid ORDER by v ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) df['v'] = pd.to_datetime(df['v'], utc=True) else: df = read_sql("""SELECT uniqueid, plotid, timezone('UTC', date_trunc('day', valid at time zone %s)) as v, avg(d1temp_qc) as d1t, avg(d2temp_qc) as d2t, avg(d3temp_qc) as d3t, avg(d4temp_qc) as d4t, avg(d5temp_qc) as d5t, avg(d1moisture_qc) as d1m, avg(d2moisture_qc) as d2m, avg(d3moisture_qc) as d3m, avg(d4moisture_qc) as d4m, avg(d5moisture_qc) as d5m from decagon_data WHERE uniqueid = %s """+plotid_limit+""" and valid between %s and %s GROUP by uniqueid, v, plotid ORDER by v ASC """, pgconn, params=(tzname, uniqueid, sts.date(), ets.date())) df['v'] = pd.to_datetime(df['v'], utc=True) if len(df.index) < 3: send_error() if ptype not in ['2']: df['v'] = df['v'].apply( lambda x: x.tz_convert(tzname)) if viewopt != 'js': df.rename(columns=dict(v='timestamp', d1t='%s Temp (C)' % (DEPTHS[1], ), d2t='%s Temp (C)' % (DEPTHS[2], ), d3t='%s Temp (C)' % (DEPTHS[3], ), d4t='%s Temp (C)' % (DEPTHS[4], ), d5t='%s Temp (C)' % (DEPTHS[5], ), d1m='%s Moisture (cm3/cm3)' % (DEPTHS[1], ), d2m='%s Moisture (cm3/cm3)' % (DEPTHS[2], ), d3m='%s Moisture (cm3/cm3)' % (DEPTHS[3], ), d4m='%s Moisture (cm3/cm3)' % (DEPTHS[4], ), d5m='%s Moisture (cm3/cm3)' % (DEPTHS[5], ), ), inplace=True) if viewopt == 'html': ssw("Content-type: text/html\n\n") ssw(df.to_html(index=False)) return if viewopt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s_%s.csv\n\n' ) % (uniqueid, plotid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) ssw(df.to_csv(index=False)) return if viewopt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s_%s.xlsx\n\n' ) % (uniqueid, plotid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return # Begin highcharts output lbl = "Plot:%s" % (plotid,) if depth != 'all': lbl = "Depth:%s" % (DEPTHS[int(depth)],) title = ("Decagon Temperature + Moisture for " "Site:%s %s Period:%s to %s" ) % (uniqueid, lbl, sts.date(), ets.date()) ssw("Content-type: application/javascript\n\n") ssw(""" /** * In order to synchronize tooltips and crosshairs, override the * built-in events with handlers defined on the parent element. */ var charts = [], options; /** * Synchronize zooming through the setExtremes event handler. */ function syncExtremes(e) { var thisChart = this.chart; if (e.trigger !== 'syncExtremes') { // Prevent feedback loop Highcharts.each(Highcharts.charts, function (chart) { if (chart !== thisChart) { if (chart.xAxis[0].setExtremes) { // It is null while updating chart.xAxis[0].setExtremes(e.min, e.max, undefined, false, { trigger: 'syncExtremes' }); } } }); } } function syncTooltip(container, p) { var i = 0; for (; i < charts.length; i++) { if (container.id != charts[i].container.id) { var d = []; for (j=0; j < charts[i].series.length; j++){ d[j] = charts[i].series[j].data[p]; } charts[i].tooltip.refresh(d); } } } options = { chart: {zoomType: 'x'}, plotOptions: { series: { cursor: 'pointer', allowPointSelect: true, point: { events: { click: function() { editPoint(this); }, mouseOver: function () { // Note, I converted this.x to this.index syncTooltip(this.series.chart.container, this.index); } } } } }, tooltip: { shared: true, crosshairs: true }, xAxis: { type: 'datetime', crosshair: true, events: { setExtremes: syncExtremes } } }; """) # to_json can't handle serialization of dt df['ticks'] = df['v'].astype(np.int64) // 10 ** 6 lines = [] lines2 = [] if depth == 'all': for i, n in enumerate(['d1t', 'd2t', 'd3t', 'd4t', 'd5t']): v = df[['ticks', n]].to_json(orient='values') lines.append("""{ name: '"""+DEPTHS[i+1]+""" Temp', type: 'line', connectNulls: true, tooltip: {valueDecimal: 1}, data: """+v+""" } """) for i, n in enumerate(['d1m', 'd2m', 'd3m', 'd4m', 'd5m']): v = df[['ticks', n]].to_json(orient='values') lines2.append("""{ name: '"""+DEPTHS[i+1]+""" VSM', type: 'line', connectNulls: true, tooltip: {valueDecimal: 3}, data: """+v+""" } """) else: dlevel = "d%st" % (depth, ) plot_ids = df['plotid'].unique() plot_ids.sort() for i, plotid in enumerate(plot_ids): df2 = df[df['plotid'] == plotid] v = df2[['ticks', dlevel]].to_json(orient='values') lines.append("""{ name: '"""+plotid+"""', type: 'line', connectNulls: true, tooltip: {valueDecimal: 3}, data: """+v+""" } """) dlevel = "d%sm" % (depth, ) plot_ids = df['plotid'].unique() plot_ids.sort() for i, plotid in enumerate(plot_ids): df2 = df[df['plotid'] == plotid] v = df2[['ticks', dlevel]].to_json(orient='values') lines2.append("""{ name: '"""+plotid+"""', type: 'line', connectNulls: true, tooltip: {valueDecimal: 3}, data: """+v+""" } """) series = ",".join(lines) series2 = ",".join(lines2) ssw(""" charts[0] = new Highcharts.Chart($.extend(true, {}, options, { chart: { renderTo: 'hc1'}, title: {text: '"""+title+"""'}, yAxis: {title: {text: 'Temperature [C]'}}, series: ["""+series+"""] })); charts[1] = new Highcharts.Chart($.extend(true, {}, options, { chart: { renderTo: 'hc2'}, title: {text: '"""+title+"""'}, yAxis: {title: {text: 'Volumetric Soil Moisture [cm3/cm3]'}}, series: ["""+series2+"""] })); """)
def make_plot(form): """Make the plot""" uniqueid = form.getfirst('site', 'ISUAG').split("::")[0] pgconn = get_dbconn('sustainablecorn') viewopt = form.getfirst('view', 'plot') varname = form.getfirst('varname', 'WAT2') df = read_sql(""" SELECT uniqueid, plotid, valid at time zone 'UTC' as v, value from waterquality_data WHERE uniqueid = %s and varname = %s ORDER by valid ASC """, pgconn, params=(uniqueid, varname)) if viewopt not in ['plot', 'js']: newcolname = "%s, %s" % (VARDICT[varname]['title'], VARDICT[varname]['units']) df.rename(columns=dict(v='timestamp', value=newcolname ), inplace=True) df = add_bling(pgconn, df, 'Water') if viewopt == 'html': ssw("Content-type: text/html\n\n") ssw(df.to_html(index=False)) return if viewopt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s.csv\n\n') % (uniqueid, )) ssw(df.to_csv(index=False)) return if viewopt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s.xlsx\n\n') % (uniqueid, )) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) worksheet = writer.sheets['Data'] worksheet.freeze_panes(3, 0) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return # Begin highcharts output ssw("Content-type: application/javascript\n\n") title = ("Water Quality for Site: %s" ) % (uniqueid, ) splots = [] plot_ids = df['plotid'].unique() plot_ids.sort() df['ticks'] = df['v'].astype(np.int64) // 10 ** 6 for plotid in plot_ids: df2 = df[df['plotid'] == plotid] splots.append(("""{type: 'scatter', name: '"""+plotid+"""', data: """ + str([[a, b] for a, b in zip(df2['ticks'].values, df2['value'].values)]) + """ }""").replace("None", "null").replace("nan", "null")) series = ",".join(splots) ssw(""" $("#hc").highcharts({ title: {text: '"""+title+"""'}, chart: {zoomType: 'x'}, yAxis: {title: {text: '""" + VARDICT[varname]["title"] +""" """+ VARDICT[varname]["units"] + """'} }, plotOptions: {line: {turboThreshold: 0} }, xAxis: { type: 'datetime' }, tooltip: { pointFormat: 'date: <b>{point.x:%b %e %Y, %H:%M}</b><br/>value: <b>{point.y}</b><br/>', shared: true, valueDecimals: 2, valueSuffix: '""" + VARDICT[varname]["units"] + """' }, series: ["""+series+"""] }); """)
def do_site(site): """Print out a simple listing of trouble""" df = read_sql(""" with ag as ( select year, varname, value, count(*) from agronomic_data where uniqueid = %s and (value is null or value in ('', '.')) GROUP by year, varname, value), soil as ( select year, varname, value, count(*) from soil_data where uniqueid = %s and (value is null or value in ('', '.')) GROUP by year, varname, value) SELECT * from ag UNION select * from soil ORDER by year ASC, varname ASC """, DBCONN, params=(site, site), index_col=None) ssw("Content-type: text/plain\n\n") ssw("CSCAP Variable Progress Report\n") ssw("Site: %s\n" % (site,)) ssw("Generated: %s\n" % ( datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), )) ssw("Total Missing: %s\n" % (df['count'].sum(),)) ssw("%4s %-10s %-10s %-6s\n" % ('YEAR', 'VARNAME', 'VALUE', 'COUNT')) def nice(val): if val is None: return 'Empty' if val == '': return 'Empty' if val == '.': return "Period" return val for _, row in df.iterrows(): ssw("%s %-10s %-10s %-6s\n" % (row['year'], row['varname'], nice(row['value']), row['count']))
def do_work(form): """do great things""" agree = form.getfirst('agree') if agree != 'AGREE': ssw("Content-type: text/plain\n\n") ssw("You did not agree to download terms.") return email = form.getfirst('email') sites = form.getlist('sites[]') if not sites: sites.append("XXX") # treatments = form.getlist('treatments[]') agronomic = redup(form.getlist('agronomic[]')) soil = redup(form.getlist('soil[]')) ghg = redup(form.getlist('ghg[]')) # water = redup(form.getlist('water[]')) ipm = redup(form.getlist('ipm[]')) years = redup(form.getlist('year[]')) if not years: years = ['2011', '2012', '2013', '2014', '2015'] shm = redup(form.getlist('shm[]')) missing = form.getfirst('missing', "M") if missing == '__custom__': missing = form.getfirst('custom_missing', 'M') pprint("Missing is %s" % (missing, )) if years: years = [str(s) for s in range(2011, 2016)] detectlimit = form.getfirst('detectlimit', "1") writer = pd.ExcelWriter("/tmp/cscap.xlsx", engine='xlsxwriter') # First sheet is Data Dictionary if 'SHM5' in shm: do_dictionary(writer) pprint("do_dictionary() is done") # Sheet two is plot IDs if 'SHM4' in shm: do_plotids(writer, sites) pprint("do_plotids() is done") # Measurement Data if agronomic: do_agronomic(writer, sites, agronomic, years, detectlimit, missing) pprint("do_agronomic() is done") if soil: do_soil(writer, sites, soil, years, detectlimit, missing) pprint("do_soil() is done") if ghg: do_ghg(writer, sites, ghg, years, missing) pprint("do_ghg() is done") if ipm: do_ipm(writer, sites, ipm, years, missing) pprint("do_ipm() is done") # Management # Field Operations if "SHM1" in shm: do_operations(writer, sites, years, missing) pprint("do_operations() is done") # Pesticides if 'SHM2' in shm: do_pesticides(writer, sites, years) pprint("do_pesticides() is done") # Residue and Irrigation if 'SHM3' in shm: do_management(writer, sites, years) pprint("do_management() is done") # Site Metadata if 'SHM8' in shm: do_metadata_master(writer, sites, missing) pprint("do_metadata_master() is done") # Drainage Management if 'SHM7' in shm: do_dwm(writer, sites, missing) pprint("do_dwm() is done") # Notes if 'SHM6' in shm: do_notes(writer, sites, missing) pprint("do_notes() is done") # Send to client writer.close() msg = MIMEMultipart() msg['Subject'] = "Sustainable Corn CAP Dataset" msg['From'] = 'ISU Data Team <*****@*****.**>' msg["To"] = email msg.preamble = 'Data' # conservative limit of 8 MB # if os.stat('/tmp/cscap.xlsx').st_size > 8000000: tmpfn = ('cscap_%s.xlsx' ) % (datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S"), ) shutil.copyfile('/tmp/cscap.xlsx', '/var/webtmp/%s' % (tmpfn, )) uri = "https://datateam.agron.iastate.edu/tmp/%s" % (tmpfn, ) text = EMAILTEXT % (datetime.datetime.utcnow( ).strftime("%d %B %Y %H:%M:%S"), uri) msg.attach(MIMEText(text)) # else: # msg.attach(MIMEText(EMAILTEXT)) # part = MIMEBase('application', "octet-stream") # part.set_payload(open('/tmp/cscap.xlsx', 'rb').read()) # encoders.encode_base64(part) # part.add_header('Content-Disposition', # 'attachment; filename="cscap.xlsx"') # msg.attach(part) _s = smtplib.SMTP('localhost') _s.sendmail(msg['From'], msg['To'], msg.as_string()) _s.quit() os.unlink('/tmp/cscap.xlsx') ssw("Content-type: text/plain\n\n") ssw("Email Delivered!") cursor = PGCONN.cursor() cursor.execute("""INSERT into website_downloads(email) values (%s) """, (email, )) cursor.close() PGCONN.commit() pprint("is done!!!")
def main(): ssw("Content-type: text/html\n\n") pgconn = get_dbconn('coop') cids = [] for sid in nt.sts.keys(): csite = nt.sts[sid]['climate_site'] if csite not in cids: cids.append(csite) df = read_sql(""" SELECT station, year, avg((high+low)/2.) as avgt, sum(precip) as p from alldata where station in %s and year < 2016 and year > 1950 GROUP by station, year """, pgconn, params=(tuple(cids),), index_col=None) df2 = df.copy() df.set_index(['station', 'year'], inplace=True) table = "" ids = list(nt.sts.keys()) ids.sort() for sid in ids: cid = nt.sts[sid]['climate_site'] table += "<tr><th>%s</th>" % (nt.sts[sid]['name'], ) for yr in range(2011, 2016): for col in ['avgt', 'p']: table += "<td>%.2f</td>" % (df.at[(cid, yr), col],) df3 = df2[(df2['station'] == cid) & (df2['year'] > 2010)].mean() for col in ['avgt', 'p']: table += "<td>%.2f</td>" % (df3[col],) df4 = df2[(df2['station'] == cid)].mean() for col in ['avgt', 'p']: table += "<td>%.2f</td>" % (df4[col],) table += "</tr>\n" ssw("""<!DOCTYPE html> <html lang='en'> <head> <link href="/vendor/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet"> <link href="/css/bootstrap-override.css" rel="stylesheet"> </head> <body> <table class="table table-striped table-bordered"> <thead> <tr> <th rowspan="2">Site</th> <th colspan="2">2011</th> <th colspan="2">2012</th> <th colspan="2">2013</th> <th colspan="2">2014</th> <th colspan="2">2015</th> <th colspan="2">2011-2015 Avg</th> <th colspan="2">Climatology</th> </tr> <tr> <th>Avg Temp</th><th>Precip</th> <th>Avg Temp</th><th>Precip</th> <th>Avg Temp</th><th>Precip</th> <th>Avg Temp</th><th>Precip</th> <th>Avg Temp</th><th>Precip</th> <th>Avg Temp</th><th>Precip</th> <th>Avg Temp</th><th>Precip</th> </tr> </thead> %s </table> </body> </html> """ % (table, ))
def main(): form = cgi.FieldStorage() if 'site' in form: do_site(form.getfirst('site')) return # mode = form.getfirst('mode', 'agronomic') show_has = (form.getfirst('has', '0') == '1') show_period = (form.getfirst('period', '0') == '1') show_dnc = (form.getfirst('dnc', '0') == '1') show_no = (form.getfirst('no', '0') == '1') if form.getfirst('a') is None: show_has = True show_period = True show_dnc = True show_no = True # Forget the above, we hard code things like so show_has = True show_period = True show_dnc = True show_no = True data = {} arr = [show_has, show_period, show_dnc, show_no] get_data('agronomic', data, arr) get_data('soils', data, arr) sites = list(data.keys()) sites.sort() ssw('Content-type: text/html\n\n') ssw("""<!DOCTYPE html> <html lang='en'> <head> <link href="/vendor/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet"> <title>CSCAP Research Site Agronomic+Soils Data Progress</title> </head> <body> <style> .progress{ margin-bottom: 0px; } .progress-bar { z-index: 1; } .progress span { color: black; z-index: 2; } </style> <!-- <form method="GET" name="c"> <p><strong>Which statuses to show?</strong> <input type="hidden" name="a" value="b"> <input type="checkbox" name="has" value="1"%s>has data <input type="checkbox" name="period" value="1"%s>periods (missing) <input type="checkbox" name="dnc" value="1"%s>did not collect <input type="checkbox" name="no" value="1"%s>no entry / empty <input type="submit" value="Update Page"> </p> </form> --> <p><span>Key:</span> <span class="btn btn-success">has data</span> <span class="btn btn-info">periods (missing)</span><!-- <span class="btn btn-warning">did not collect</span> --> <span class="btn btn-danger">no entry / empty</span> <p>This page lists the data progress for Agronomic + Soils variables collected by the Google Spreadsheets. These values are valid for the duration of the project 2011-2015. This page DOES NOT list data progress for management metadata, greenhouse gas, drainage, soil moisture, nor pest (IPM) data.</p> <table class='table table-striped table-bordered'> <thead><tr> <th width="20%%">SiteID</th> <th width="60%%">Progress</th> <th width="10%%">Count</th> <th width="10%%">Percent Done</th> </tr></thead> """ % ('' if not show_has else ' checked="checked"', '' if not show_period else ' checked="checked"', '' if not show_dnc else ' checked="checked"', '' if not show_no else ' checked="checked"' )) for sid in sites: if sid == '_ALL': continue ssw(""" <tr><th> <a href="siteprogress.py?site=%s"> <i class="glyphicon glyphicon-search"></i> %s</a></th> """ % (sid, sid)) row = data[sid] ssw('<td>%s</td>' % (make_progress(row))) ssw("<td>%.0f</td>" % (row['tot'], )) ssw("<td>%.0f%%</td>" % (((row['hits2']) / float(row['all'])) * 100.)) ssw("</tr>\n\n") sid = "_ALL" ssw("""<tr><th>%s</th>""" % (sid,)) row = data[sid] ssw('<td>%s</td>' % (make_progress(row))) ssw("<td>%.0f</td>" % (row['tot'], )) ssw("<td>%.0f%%</td>" % (((row['hits2']) / float(row['all'])) * 100.)) ssw("</tr>\n\n") ssw("</table>")
def main(): """Go Main Go""" ssw('Content-type: text/html\n\n') form = cgi.FieldStorage() year = int(form.getfirst('year', 2011)) mode = form.getfirst('mode', 'agronomic') build_vars(mode) data, dvars = get_data(year, mode) sites = list(data.keys()) sites.sort() ssw("""<!DOCTYPE html> <html lang='en'> <head> <link href="/vendor/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet"> <link href="/css/bootstrap-override.css" rel="stylesheet"> </head> <body> <style> .progress{ margin-bottom: 0px; } .progress-bar { z-index: 1; } .progress span { color: black; z-index: 2; } </style> <div class="row well"> <div class="col-md-4 col-sm-4">Select Mode:</div> <div class="col-md-4 col-sm-4"> <a href="dataprogress.py?mode=agronomic">Agronomic Data</a> </div> <div class="col-md-4 col-sm-4"> <a href="dataprogress.py?mode=soil">Soil Data</a> </div> </div> <form method="GET" name='theform'> <input type="hidden" name="mode" value="%s" /> Select Year; <select name="year"> """ % (mode,)) for yr in range(2011, 2016): checked = '' if year == yr: checked = " selected='selected'" ssw("""<option value="%s" %s>%s</option>\n""" % (yr, checked, yr)) ssw("</select><br />") ids = form.getlist('ids') dvars = varorder if ids: dvars = ids for varid in varorder: checked = "" if varid in ids: checked = "checked='checked'" ssw("""<input type='checkbox' name='ids' value='%s'%s><abbr title="%s">%s</abbr></input> """ % (varid, checked, varlookup[varid], varid)) ssw(""" <input type="submit" value="Generate Table"> </form> <span>Key:</span> <span class="btn btn-success">has data</span> <span class="btn btn-info">periods</span> <span class="btn btn-warning">did not collect</span> <span class="btn btn-danger">no entry / empty</span> <table class='table table-striped table-bordered'> """) ssw("<thead><tr><th>SiteID</th>") for dv in dvars: ssw("""<th><abbr title="%s">%s</abbr></th>""" % ( varlookup[dv], dv)) ssw("</tr></thead>") for sid in sites: ssw("""<tr><th>%s</th>""" % (sid,)) for datavar in dvars: row = data[sid].get(datavar, None) ssw('<td>%s</td>' % (make_progress(row))) ssw("</tr>\n\n") ssw("</table>") ssw(""" <h3>Data summary for all sites included</h3> <p> <span>Key:</span> <span class="btn btn-success">has data</span> <span class="btn btn-info">periods</span> <span class="btn btn-warning">DNC empty</span> <span class="btn btn-danger">no entry</span> <table class='table table-striped table-bordered'> <thead><tr><th width="33%%">Variable</th><th width="66%%">%s</th></tr> """ % (ALL,)) for datavar in dvars: row = data[ALL].get(datavar, None) ssw(("<tr><th>%s %s</th><td>%s</td></tr>" ) % (datavar, varlookup[datavar], make_progress(row))) ssw('</table></p>')
def make_plot(form): """Make the plot""" pgconn = get_dbconn('td') uniqueid = form.getfirst('site', 'ISUAG') sts = datetime.datetime.strptime(form.getfirst('date', '2014-01-01'), '%Y-%m-%d') days = int(form.getfirst('days', 1)) group = int(form.getfirst('group', 0)) ets = sts + datetime.timedelta(days=days) wxdf = get_weather(pgconn, uniqueid, sts, ets) tzname = 'America/Chicago' if uniqueid in [ 'ISUAG', 'SERF', 'GILMORE'] else 'America/New_York' viewopt = form.getfirst('view', 'plot') ptype = form.getfirst('ptype', '1') if ptype == '1': df = read_sql("""SELECT valid at time zone 'UTC' as v, plotid, discharge_mm_qc as discharge, coalesce(discharge_mm_qcflag, '') as discharge_f from tileflow_data WHERE uniqueid = %s and valid between %s and %s ORDER by valid ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) elif ptype == '2': # resample the weather data if len(wxdf.index) > 0: wxdf = wxdf.resample('M', loffset=datetime.timedelta(days=-27)).sum() wxdf['ticks'] = wxdf.index.values.astype('datetime64[ns]').astype( np.int64) // 10 ** 6 df = read_sql("""SELECT date_trunc('month', valid at time zone 'UTC') as v, plotid, sum(discharge_mm_qc) as discharge from tileflow_data WHERE uniqueid = %s and valid between %s and %s GROUP by v, plotid ORDER by v ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) df["discharge_f"] = '-' elif ptype == '3': # Daily Aggregate df = read_sql("""SELECT date_trunc('day', valid at time zone 'UTC') as v, plotid, sum(discharge_mm_qc) as discharge from tileflow_data WHERE uniqueid = %s and valid between %s and %s GROUP by v, plotid ORDER by v ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) df["discharge_f"] = '-' if len(df.index) < 3: send_error(viewopt, "No / Not Enough Data Found, sorry!") linecol = 'plotid' if group == 1: # Generate the plotid lookup table plotdf = read_sql(""" SELECT * from plotids where siteid = %s """, pgconn, params=(uniqueid, ), index_col='plotid') def lookup(row): try: return plotdf.loc[row['plotid'], "y%s" % (row['v'].year, )] except KeyError: return row['plotid'] df['treatment'] = df.apply(lambda row: lookup(row), axis=1) del df['plotid'] df = df.groupby(['treatment', 'v']).mean() df.reset_index(inplace=True) linecol = 'treatment' if ptype not in ['2', '3']: df['v'] = df['v'].apply( lambda x: x.tz_localize('UTC').tz_convert(tzname)) if viewopt not in ['plot', 'js']: df.rename(columns=dict(v='timestamp', discharge='Discharge (mm)' ), inplace=True) if viewopt == 'html': ssw("Content-type: text/html\n\n") ssw(df.to_html(index=False)) return if viewopt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s.csv\n\n' ) % (uniqueid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) ssw(df.to_csv(index=False)) return if viewopt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s.xlsx\n\n' ) % (uniqueid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return # Begin highcharts output ssw("Content-type: application/javascript\n\n") title = ("Tile Flow for Site: %s (%s to %s)" ) % (uniqueid, sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y")) s = [] plot_ids = df[linecol].unique() plot_ids.sort() if group == '1': plot_ids = plot_ids[::-1] df['ticks'] = df['v'].astype(np.int64) // 10 ** 6 seriestype = 'line' if ptype in ['1', '3'] else 'column' for i, plotid in enumerate(plot_ids): df2 = df[df[linecol] == plotid] s.append(("""{type: '""" + seriestype + """', """ + getColor(plotid, i) + """, name: '""" + CODES.get(plotid, plotid) + """', data: """ + str([[a, b] for a, b in zip(df2['ticks'].values, df2['discharge'].values)]) + """ }""").replace("None", "null").replace("nan", "null")) if len(wxdf.index) > 0: s.append(("""{type: 'column', name: 'Precip', color: '#0000ff', yAxis: 1, data: """ + str([[a, b] for a, b in zip(wxdf['ticks'].values, wxdf['precip_mm'].values)]) + """ }""").replace("None", "null").replace("nan", "null")) series = ",".join(s) ssw(""" $("#hc").highcharts({ title: {text: '"""+title+"""'}, chart: {zoomType: 'x'}, yAxis: [ {title: {text: 'Discharge (mm)'}}, {title: {text: 'Daily Precipitation (mm)'}, reversed: true, maxPadding: 1, opposite: true}, ], plotOptions: { line: {turboThreshold: 0}, series: { cursor: 'pointer', allowPointSelect: true, point: { events: { click: function() { editPoint(this); } } } } }, xAxis: { type: 'datetime' }, tooltip: { dateTimeLabelFormats: { hour: "%b %e %Y, %H:%M", minute: "%b %e %Y, %H:%M" }, shared: true, valueDecimals: 0, valueSuffix: ' mm' }, series: ["""+series+"""] }); """)
def main(): """Go Main""" ssw('Content-type: text/html\n\n') form = cgi.FieldStorage() reloadres = "" if form.getfirst('reload') is not None: reloadres += reload_data() cursor.execute(""" SELECT uniqueid, valid, cropyear, operation, biomassdate1, biomassdate2, fertilizercrop, cashcrop from operations WHERE operation in ('harvest_corn', 'harvest_soy', 'plant_rye', 'plant_rye-corn-res', 'plant_rye-soy-res', 'sample_soilnitrate', 'sample_covercrop', 'termination_rye_corn', 'termination_rye_soy', 'plant_corn', 'plant_soy', 'fertilizer_synthetic') and cropyear != '2016' and valid is not null ORDER by operation DESC, valid ASC """) data = {} for row in cursor: site = row[0] valid = row[1] # datetime! cropyear = str(row[2]) operation = row[3] biomassdate1 = row[4] # biomassdate2 = row[5] fertilizercrop = row[6] # cashcrop = row[7] if site not in data: data[site] = {} for cy in ['2011', '2012', '2013', '2014', '2015']: data[site][cy] = {'harvest_soy': '', 'harvest_corn': '', 'plant_rye': '', 'plant_rye-corn-res': '', 'plant_rye-soy-res': '', 'plant_corn': None, 'plant_soy': None, 'fall_sample_soilnitrate_corn': '', 'fall_sample_soilnitrate_soy': '', 'spring_sample_soilnitrate_corn': '', 'spring_sample_soilnitrate_soy': '', 'termination_rye_corn': '', 'termination_rye_soy': '', 'fertilizer_synthetic_starter': '', 'fertilizer_synthetic_sidedress': '', 'fertilizer_synthetic_preplant': '', 'fertilizer_synthetic_fall': '', 'spring_sample_covercrop_corn': '', 'spring_sample_covercrop_soy': '', 'fall_sample_covercrop_corn': '', 'fal_sample_covercrop_soy': ''} _d = data[site][cropyear] if operation == 'plant_rye': for op2 in ['plant_rye-soy-res', 'plant_rye-corn-res']: _d[op2] = valid elif operation.startswith('termination_rye'): if operation.endswith('soy') and biomassdate1 is not None: _d['spring_sample_covercrop_soy'] = biomassdate1 elif biomassdate1 is not None: _d['spring_sample_covercrop_corn'] = biomassdate1 _d[operation] = valid elif (operation == 'fertilizer_synthetic' and fertilizercrop in [None, 'multiple', 'corn', 'other']): plantcorndate = _d['plant_corn'] # sys.stderr.write("%s %s %s %s %s\n" % (plantcorndate, valid, # fertilizercrop, site, # cropyear)) if plantcorndate is None: sys.stderr.write(("ERROR! No plant corn for %s %s\n" ) % (site, cropyear)) continue if valid.year < plantcorndate.year: _d[operation+"_fall"] = valid elif valid == plantcorndate: _d[operation+"_starter"] = valid elif valid < (plantcorndate + D7): _d[operation+"_preplant"] = valid else: _d[operation+"_sidedress"] = valid elif operation in ['sample_soilnitrate', 'sample_covercrop']: # We only want 'fall' events season = 'fall_' if valid.month in [6, 7, 8]: continue elif valid.month < 6: season = 'spring_' if _d[season+operation+'_soy'] != '': _d[season+operation+'_corn'] = valid else: data[site][cropyear][season+operation+'_soy'] = valid else: data[site][cropyear][operation] = valid table0 = "" df = read_sql(""" WITH sites as ( SELECT uniqueid, latitude, longitude, officialfarmname from metadata_master), plots as ( SELECT uniqueid, soilseriesname1, soiltaxonomicclass1, soilseriesname2, soiltaxonomicclass2, row_number() OVER (PARTITION by uniqueid ORDER by soiltaxonomicclass1) from plotids), plots2 as (select * from plots where row_number = 1) SELECT s.uniqueid, s.latitude, s.longitude, s.officialfarmname, p.soilseriesname1, p.soiltaxonomicclass1, p.soilseriesname2, p.soiltaxonomicclass2 from sites s JOIN plots2 p on (s.uniqueid = p.uniqueid) ORDER by s.uniqueid ASC """, DBCONN, index_col='uniqueid') for uniqueid, row in df.iterrows(): if uniqueid not in COVER_SITES: continue table0 += ("<tr><td>%s</td><td>%s</td><td>%s</td>" "<td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td>" "</tr>" ) % (uniqueid, row['officialfarmname'], row['latitude'], row['longitude'], row['soilseriesname1'], row['soiltaxonomicclass1'], row['soilseriesname2'] or '--', row['soiltaxonomicclass2'] or '--') table = "" for site in COVER_SITES: # data.keys(): table += "<tr><td>%s</td>" % (site,) for yr in ['2011', '2012', '2013', '2014', '2015']: for op in ['harvest_corn', 'harvest_soy']: table += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) yr2 = str(int(yr)+1) if yr != '2015': for op in ['plant_rye-corn-res', 'plant_rye-soy-res']: table += "<td>%s</td>" % ( data[site].get(yr2, {}).get(op, ''),) table += "</tr>" # --------------------------------------------------------------- table2 = "" for site in COVER_SITES: # data.keys(): table2 += "<tr><td>%s</td>" % (site,) for yr in ['2011', '2012', '2013', '2014', '2015']: for op in ['fall_sample_soilnitrate_corn', 'fall_sample_soilnitrate_soy']: table2 += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) yr2 = str(int(yr)+1) for op in ['fall_sample_covercrop_corn', 'fall_sample_covercrop_soy']: table2 += "<td>%s</td>" % ( data[site].get(yr2, {}).get(op, ''),) table2 += "</tr>" # --------------------------------------------------------------- table3 = "" for site in COVER_SITES: # data.keys(): table3 += "<tr><td>%s</td>" % (site,) for yr in ['2012', '2013', '2014', '2015']: for op in ['spring_sample_covercrop_corn', 'spring_sample_covercrop_soy']: table3 += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) for op in ['spring_sample_soilnitrate_corn', 'spring_sample_soilnitrate_soy']: table3 += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) for op in ['termination_rye_corn', 'termination_rye_soy']: table3 += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) table3 += "</tr>" # --------------------------------------------------------------- table4 = "" for site in COVER_SITES: # data.keys(): table4 += "<tr><td>%s</td>" % (site,) for yr in ['2011', '2012', '2013', '2014', '2015']: for op in ['plant_corn', 'plant_soy']: table4 += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) table4 += "</tr>" # --------------------------------------------------------------- table5 = "" for site in COVER_SITES: # data.keys(): table5 += "<tr><td>%s</td>" % (site,) for yr in ['2011', '2012', '2013', '2014', '2015']: for op in ['fertilizer_synthetic_fall', 'fertilizer_synthetic_preplant', 'fertilizer_synthetic_starter', 'fertilizer_synthetic_sidedress']: table5 += "<td>%s</td>" % ( data[site].get(yr, {}).get(op, ''),) table5 += "</tr>" ssw("""<!DOCTYPE html> <html lang='en'> <head> <link href="/vendor/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet"> <link href="/css/bootstrap-override.css" rel="stylesheet"> </head> <body> <p>The data presented on this page is current as of the last sync of Google Data to the ISU Database Server. You can <br /> <a href="mantable.py?reload=yes" class="btn btn-info"><i class="glyphicon glyphicon-cloud-download"></i> Request Sync of Google Data</a> <br />and a script will run to sync the database. %s <h3>Sites</h3> <table class="table table-striped table-bordered"> <thead> <tr> <th>Site</th> <th>Name</th> <th>Latitude</th> <th>Longitude</th> <th>Primary Soil Series</th> <th>Primary Soil Taxonomic Class</th> <th>Secondary Soil Series</th> <th>Secondary Soil Taxonomic Class</th> </tr> </thead> %s </table> <h3>Sub Table 1</h3> <table class="table table-striped table-bordered"> <thead> <tr> <th rowspan="3">Site</th> <th colspan="4">Fall 2011</th> <th colspan="4">Fall 2012</th> <th colspan="4">Fall 2013</th> <th colspan="4">Fall 2014</th> <th colspan="2">Fall 2015</th> </tr> <tr> <th colspan="2">cash harvest</th> <th colspan="2">cover seeding</th> <th colspan="2">cash harvest</th> <th colspan="2">cover seeding</th> <th colspan="2">cash harvest</th> <th colspan="2">cover seeding</th> <th colspan="2">cash harvest</th> <th colspan="2">cover seeding</th> <th colspan="2">cash harvest</th> </tr> <tr> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> </tr> </thead> %s </table> <h3>Sub Table 2</h3> <table class="table table-striped table-bordered"> <thead> <tr> <th rowspan="3">Site</th> <th colspan="4">Fall 2011</th> <th colspan="4">Fall 2012</th> <th colspan="4">Fall 2013</th> <th colspan="4">Fall 2014</th> <th colspan="4">Fall 2015</th> </tr> <tr> <th colspan="2">Fall Soil Nitrate</th> <th colspan="2">Fall Cover Crop Sample</th> <th colspan="2">Fall Soil Nitrate</th> <th colspan="2">Fall Cover Crop Sample</th> <th colspan="2">Fall Soil Nitrate</th> <th colspan="2">Fall Cover Crop Sample</th> <th colspan="2">Fall Soil Nitrate</th> <th colspan="2">Fall Cover Crop Sample</th> <th colspan="2">Fall Soil Nitrate</th> <th colspan="2">Fall Cover Crop Sample</th> </tr> <tr> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> <th>after C</th><th>after S</th> </tr> </thead> %s </table> <h3>Sub Table 3</h3> <table class="table table-striped table-bordered"> <thead> <tr> <th rowspan="3">Site</th> <th colspan="6">Spring 2012</th> <th colspan="6">Spring 2013</th> <th colspan="6">Spring 2014</th> <th colspan="6">Spring 2015</th> </tr> <tr> <th colspan="2">Rye Sampling (spring)</th> <th colspan="2">Soil N Sampling (spring)</th> <th colspan="2">Termination</th> <th colspan="2">Rye Sampling (spring)</th> <th colspan="2">Soil N Sampling (spring)</th> <th colspan="2">Termination</th> <th colspan="2">Rye Sampling (spring)</th> <th colspan="2">Soil N Sampling (spring)</th> <th colspan="2">Termination</th> <th colspan="2">Rye Sampling (spring)</th> <th colspan="2">Soil N Sampling (spring)</th> <th colspan="2">Termination</th> </tr> <tr> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> <th>before C</th><th>before S</th> </tr> </thead> %s </table> <h3>Cash Crop Planting</h3> <table class="table table-striped table-bordered"> <thead> <tr> <th rowspan="3">Site</th> <th colspan="2">2011</th> <th colspan="2">2012</th> <th colspan="2">2013</th> <th colspan="2">2014</th> <th colspan="2">2015</th> </tr> <tr> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> <th>Corn</th><th>Soybean</th> </tr> </thead> %s </table> <h3>Fertilizer N Application</h3> <table class="table table-striped table-bordered"> <thead> <tr> <th rowspan="3">Site</th> <th colspan="4">2011 Cash Crop</th> <th colspan="4">2012 Cash Crop</th> <th colspan="4">2013 Cash Crop</th> <th colspan="4">2014 Cash Crop</th> <th colspan="4">2015 Cash Crop</th> </tr> <tr> <th>Fall</th><th>Pre-Plant</th><th>Starter</th><th>Side Dress</th> <th>Fall</th><th>Pre-Plant</th><th>Starter</th><th>Side Dress</th> <th>Fall</th><th>Pre-Plant</th><th>Starter</th><th>Side Dress</th> <th>Fall</th><th>Pre-Plant</th><th>Starter</th><th>Side Dress</th> <th>Fall</th><th>Pre-Plant</th><th>Starter</th><th>Side Dress</th> </tr> </thead> %s </table> </body> </html> """ % (reloadres, table0, table, table2, table3, table4, table5))
def main(): """Do Stuff""" ssw("Content-type: application/json\n\n") form = cgi.FieldStorage() res = do_filter(form) ssw(json.dumps(res))
def make_plot(form): """Make the plot""" pgconn = get_dbconn('td') (uniqueid, plotid) = form.getfirst('site', 'ISUAG::302E').split("::") sts = datetime.datetime.strptime(form.getfirst('date', '2014-01-01'), '%Y-%m-%d') days = int(form.getfirst('days', 1)) group = int(form.getfirst('group', 0)) ets = sts + datetime.timedelta(days=days) tzname = 'America/Chicago' if uniqueid in [ 'ISUAG', 'SERF', 'GILMORE'] else 'America/New_York' viewopt = form.getfirst('view', 'plot') ptype = form.getfirst('ptype', '1') if ptype == '1': df = read_sql("""SELECT valid at time zone 'UTC' as v, plotid, wat20 as load from nitrateload_data WHERE uniqueid = %s and valid between %s and %s ORDER by valid ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) elif ptype == '2': df = read_sql("""SELECT date_trunc('month', valid at time zone 'UTC') as v, plotid, sum(wat20) as load from nitrateload_data WHERE uniqueid = %s and valid between %s and %s GROUP by v, plotid ORDER by v ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) if len(df.index) < 3: send_error(viewopt, "No / Not Enough Data Found, sorry!") linecol = 'plotid' if group == 1: # Generate the plotid lookup table plotdf = read_sql(""" SELECT * from plotids where siteid = %s """, pgconn, params=(uniqueid, ), index_col='plotid') def lookup(row): try: return plotdf.loc[row['plotid'], "y%s" % (row['v'].year, )] except KeyError: return row['plotid'] df['treatment'] = df.apply(lambda row: lookup(row), axis=1) del df['plotid'] df = df.groupby(['treatment', 'v']).mean() df.reset_index(inplace=True) linecol = 'treatment' if ptype not in ['2', ]: df['v'] = df['v'].apply( lambda x: x.tz_localize('UTC').tz_convert(tzname)) if viewopt not in ['plot', 'js']: df.rename(columns=dict(v='timestamp', load='Load (kg ha-1)' ), inplace=True) if viewopt == 'html': ssw("Content-type: text/html\n\n") ssw(df.to_html(index=False)) return if viewopt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s_%s.csv\n\n' ) % (uniqueid, plotid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) ssw(df.to_csv(index=False)) return if viewopt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s_%s.xlsx\n\n' ) % (uniqueid, plotid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return # Begin highcharts output ssw("Content-type: application/javascript\n\n") title = ("Nitrate Load for Site: %s (%s to %s)" ) % (uniqueid, sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y")) s = [] plot_ids = df[linecol].unique() plot_ids.sort() if group == '1': plot_ids = plot_ids[::-1] df['ticks'] = df['v'].astype(np.int64) // 10 ** 6 seriestype = 'line' if ptype == '1' else 'column' for i, plotid in enumerate(plot_ids): df2 = df[df[linecol] == plotid] s.append(("""{type: '""" + seriestype + """', """ + getColor(plotid, i) + """, name: '""" + CODES.get(plotid, plotid) + """', data: """ + str([[a, b] for a, b in zip(df2['ticks'].values, df2['load'].values)]) + """ }""").replace("None", "null").replace("nan", "null")) series = ",".join(s) ssw(""" $("#hc").highcharts({ title: {text: '"""+title+"""'}, chart: {zoomType: 'x'}, yAxis: {title: {text: 'Load (kg ha-1)'} }, plotOptions: {line: {turboThreshold: 0}}, xAxis: { type: 'datetime' }, tooltip: { dateTimeLabelFormats: { hour: "%b %e %Y, %H:%M", minute: "%b %e %Y, %H:%M" }, shared: true, valueDecimals: 4, valueSuffix: ' kg ha-1' }, series: ["""+series+"""] }); """)
def main(): """Go Main""" form = cgi.FieldStorage() equation = form.getfirst('equation', 'AGR33 / AGR4').upper() fmt = form.getfirst('fmt', 'html') df = get_df(equation) if fmt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; filename=cscap_%s.xlsx\n\n' ) % (datetime.datetime.now().strftime("%Y%m%d%H%M"), )) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return if fmt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; filename=cscap_%s.csv\n\n' ) % (datetime.datetime.now().strftime("%Y%m%d%H%M"), )) ssw(df.to_csv(index=False)) return ssw('Content-type: text/html\n\n') ssw("""<!DOCTYPE html> <html lang='en'> <head> <link href="/vendor/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet"> <link href="/css/bootstrap-override.css" rel="stylesheet"> <style> table {border-collapse: collapse;} td {padding: 6px;} body {padding: 30px;} </style> </head> <body> <form method="GET"> <table> <thead><tr><th>Enter Equation</th><th>Output Format</th></tr></thead> <tbody><tr><td> <input name="equation" type="text" size="80" value="AGR33 / (AGR4 + AGR33)"> </td> <td><select name="fmt"> <option value="html">HTML Table</option> <option value="excel">Excel</option> <option value="csv">Comma Delimited</option> </select></td></tr></tbody></table> <input type="submit"> </form> <br /><br /> %s </body> </html> """ % (df.to_html(index=False).replace("NaN", "M"), ))
def make_plot(form): """Make the plot""" uniqueid = form.getfirst('site', 'ISUAG') sts = datetime.datetime.strptime(form.getfirst('date', '2014-01-01'), '%Y-%m-%d') days = int(form.getfirst('days', 1)) ets = sts + datetime.timedelta(days=days) pgconn = get_dbconn('sustainablecorn') tzname = 'America/Chicago' if uniqueid in [ 'ISUAG', 'SERF', 'GILMORE'] else 'America/New_York' viewopt = form.getfirst('view', 'plot') ptype = form.getfirst('ptype', '1') if ptype == '1': df = read_sql(""" SELECT uniqueid, plotid, valid at time zone 'UTC' as v, depth_mm_qc as depth from watertable_data WHERE uniqueid = %s and valid between %s and %s ORDER by valid ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) elif ptype in ['3', '4']: res = 'hour' if ptype == '3' else 'week' df = read_sql("""SELECT uniqueid, plotid, date_trunc('"""+res+"""', valid at time zone 'UTC') as v, avg(depth_mm_qc) as depth from watertable_data WHERE uniqueid = %s and valid between %s and %s GROUP by uniqueid, v, plotid ORDER by v ASC """, pgconn, params=(uniqueid, sts.date(), ets.date())) else: df = read_sql(""" SELECT uniqueid, plotid, date(valid at time zone %s) as v, avg(depth_mm_qc) as depth from watertable_data WHERE uniqueid = %s and valid between %s and %s GROUP by uniqueid, v, plotid ORDER by v ASC """, pgconn, params=(tzname, uniqueid, sts.date(), ets.date())) if len(df.index) < 3: send_error(viewopt, "No / Not Enough Data Found, sorry!") if ptype not in ['2', ]: df['v'] = df['v'].apply( lambda x: x.tz_localize('UTC').tz_convert(tzname)) if viewopt not in ['plot', 'js']: df.rename(columns=dict(v='timestamp', depth='Depth (mm)' ), inplace=True) df = add_bling(pgconn, df, 'Water') if viewopt == 'html': ssw("Content-type: text/html\n\n") ssw(df.to_html(index=False)) return if viewopt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s.csv\n\n' ) % (uniqueid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) ssw(df.to_csv(index=False)) return if viewopt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s_%s.xlsx\n\n' ) % (uniqueid, sts.strftime("%Y%m%d"), ets.strftime("%Y%m%d"))) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) worksheet = writer.sheets['Data'] worksheet.freeze_panes(3, 0) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return # Begin highcharts output ssw("Content-type: application/javascript\n\n") title = ("Water Table Depth for Site: %s (%s to %s)" ) % (uniqueid, sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y")) s = [] plot_ids = df['plotid'].unique() plot_ids.sort() df['ticks'] = pd.to_datetime(df['v']).astype(np.int64) // 10 ** 6 for plotid in plot_ids: df2 = df[df['plotid'] == plotid] v = df2[['ticks', 'depth']].to_json(orient='values') s.append("""{ name: '"""+plotid+"""', data: """ + v + """ }""") series = ",".join(s) ssw(""" $("#hc").highcharts({ title: {text: '"""+title+"""'}, chart: {zoomType: 'x'}, yAxis: {title: {text: 'Depth below ground (mm)'}, reversed: true }, plotOptions: {line: {turboThreshold: 0}, series: { allowPointSelect: true, cursor: 'pointer', point: { events: { click: function () { editPoint(this); } } } } }, xAxis: { type: 'datetime' }, tooltip: { dateTimeLabelFormats: { hour: "%b %e %Y, %H:%M", minute: "%b %e %Y, %H:%M" }, shared: true, valueDecimals: 0, valueSuffix: ' mm' }, series: ["""+series+"""] }); """)
def make_plot(form): """Make the plot""" pgconn = get_dbconn('td') uniqueid = form.getfirst('site', 'ISUAG') varname = form.getfirst('varname', 'AGR17') (varlabel, varunits) = get_vardesc(varname) group = int(form.getfirst('group', 0)) viewopt = form.getfirst('view', 'plot') df = read_sql("""SELECT value, year, plotid from agronomic_data WHERE uniqueid = %s and varname = %s and value is not null and value not in ('did not collect') ORDER by plotid, year ASC """, pgconn, params=(uniqueid, varname), index_col=None) if df.empty: send_error(viewopt, "No / Not Enough Data Found, sorry!") df['value'] = pd.to_numeric(df['value'], errors='coerse') linecol = 'plotid' if group == 1: # Generate the plotid lookup table plotdf = read_sql(""" SELECT * from plotids where siteid = %s """, pgconn, params=(uniqueid, ), index_col='plotid') def lookup(row): try: return plotdf.loc[row['plotid'], "y%s" % (row['year'], )] except KeyError: return row['plotid'] df['treatment'] = df.apply(lambda row: lookup(row), axis=1) del df['plotid'] df = df.groupby(['treatment', 'year']).mean() df.reset_index(inplace=True) linecol = 'treatment' if viewopt not in ['plot', 'js']: df.rename(columns=dict(value=varname ), inplace=True) if viewopt == 'html': ssw("Content-type: text/html\n\n") ssw(df.to_html(index=False)) return if viewopt == 'csv': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s.csv\n\n' ) % (uniqueid, varname)) ssw(df.to_csv(index=False)) return if viewopt == 'excel': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; ' 'filename=%s_%s.xlsx\n\n' ) % (uniqueid, varname)) writer = pd.ExcelWriter('/tmp/ss.xlsx', options={'remove_timezone': True}) df.to_excel(writer, 'Data', index=False) writer.save() ssw(open('/tmp/ss.xlsx', 'rb').read()) os.unlink('/tmp/ss.xlsx') return # Begin highcharts output ssw("Content-type: application/javascript\n\n") title = "Agronomic Data for Site: %s" % (uniqueid, ) arr = [] plot_ids = df[linecol].unique() plot_ids.sort() if group == 1: plot_ids = plot_ids[::-1] for i, plotid in enumerate(plot_ids): df2 = df[df[linecol] == plotid] arr.append(("""{type: 'column', """ + getColor(plotid, i) + """, name: '""" + CODES.get(plotid, plotid) + """', data: """ + str([[a, b] for a, b in zip(df2['year'].values, df2['value'].values)]) + """ }""").replace("None", "null").replace("nan", "null")) series = ",".join(arr) ssw(""" $("#hc").highcharts({ title: {text: '"""+title+"""'}, subtitle: {text: '""" + varlabel + """ (""" + varunits + """)'}, chart: {zoomType: 'x'}, xAxis: {tickInterval: 1}, yAxis: [ {title: {text: '""" + varlabel + """ (""" + varunits + """)'}} ], plotOptions: {line: {turboThreshold: 0}}, tooltip: { shared: true, valueDecimals: 0 }, series: ["""+series+"""] }); """)
def get_dl(form): """ Process the form provided to us from the Internal website """ pgconn = get_dbconn('sustainablecorn') years = form.getlist('years') if len(years) == 1: years.append('9') if "all" in years or len(years) == 0: yrlist = "('2011', '2012', '2013', '2014', '2015')" else: yrlist = str(tuple(years)) treatlimiter = "1=1" treatments = form.getlist('treatments') if len(treatments) > 0 and 'all' not in treatments: if len(treatments) == 1: treatments.append('ZZ') s = str(tuple(treatments)) treatlimiter = """(tillage in %s or rotation in %s or nitrogen in %s or landscape in %s)""" % (s, s, s, s) sitelimiter = "1=1" sites = form.getlist('sites') if len(sites) > 0 and 'all' not in sites: if len(sites) == 1: sites.append('ZZ') s = str(tuple(sites)) treatlimiter = """t.site in %s""" % (s,) # columns! cols = ['year', 'site', 'plotid', 'depth', 'subsample', 'rep', 'rotation', 'crop', 'tillage', 'drainage', 'nitrogen', 'landscape', 'herbicide', 'sampledate'] dvars = form.getlist("data") wants_soil = False for dv in dvars: if dv.startswith('SOIL') or dv == 'all': wants_soil = True break sys.stderr.write("1. %s\n" % (datetime.datetime.now(), )) if wants_soil: sql = """ WITH ad as (SELECT site, plotid, ''::text as depth, varname, year, value, '1'::text as subsample, null::date as sampledate from agronomic_data WHERE year in %s), sd as (SELECT site, plotid, depth, varname, year, value, subsample, sampledate from soil_data WHERE year in %s), tot as (SELECT * from ad UNION select * from sd) SELECT site || '|' || p.plotid || '|' || coalesce(depth,'') || '|' || coalesce(subsample, '') || '|' || year || '|' || coalesce(rep, '') || '|' || coalesce(rotation, '') || '|' || coalesce(tillage, '') || '|' || coalesce(drainage, '') || '|' || coalesce(nitrogen, '') || '|' || coalesce(landscape, '') || '|' || coalesce(herbicide, '') || '|' || (case when sampledate is null then '' else sampledate::text end) as lbl, varname, value from tot t JOIN plotids p on (t.site = p.uniqueid and t.plotid = p.plotid) WHERE 1=1 and %s and %s """ % (yrlist, yrlist, treatlimiter, sitelimiter) else: sql = """ WITH ad as (SELECT site, plotid, ''::text as depth, varname, year, value, ''::text as subsample, ''::text as sampledate from agronomic_data WHERE year in %s), tot as (SELECT * from ad) SELECT site || '|' || p.plotid || '|' || coalesce(depth,'') || '|' || coalesce(subsample, '') || '|' || year || '|' || coalesce(rep, '') || '|' || coalesce(rotation, '') || '|' || coalesce(tillage, '') || '|' || coalesce(drainage, '') || '|' || coalesce(nitrogen, '') || '|' || coalesce(landscape, '') || '|' || coalesce(herbicide, '') || '|' || coalesce(sampledate, '') as lbl, varname, value from tot t JOIN plotids p on (t.site = p.uniqueid and t.plotid = p.plotid) WHERE 1=1 and %s and %s """ % (yrlist, treatlimiter, sitelimiter) df = pdsql.read_sql(sql, pgconn) sys.stderr.write("2. %s\n" % (datetime.datetime.now(), )) # sys.stderr.write(str(df.columns)) dnc = form.getfirst('dnc', 'DNC') missing = form.getfirst('missing', '.') def cleaner(val): if val is None or val.strip() == '' or val.strip() == '.': return missing if val is not None: if val.strip().lower() == 'did not collect': return dnc if val.strip() == 'n/a': return "N/A" return val df['value'] = df['value'].apply(cleaner) df2 = df.pivot('lbl', 'varname', 'value') allcols = df2.columns.values.tolist() if 'all' in dvars: cols = cols + allcols else: cols = cols + dvars # sys.stderr.write(str(cols)) (df2['site'], df2['plotid'], df2['depth'], df2['subsample'], df2['year'], df2['rep'], df2['rotation'], df2['tillage'], df2['drainage'], df2['nitrogen'], df2['landscape'], df2['herbicide'], df2['sampledate'] ) = zip(*[item.split('|') for item in df2.index]) df2['crop'] = None sys.stderr.write("3. %s\n" % (datetime.datetime.now(), )) df2cols = df2.columns.values.tolist() for col in cols: if col not in df2cols: cols.remove(col) # Assign in Rotations rotdf = pdsql.read_sql(""" SELECT * from xref_rotation """, pgconn, index_col='code') def find_rotation(rotation, year): try: return rotdf.at[rotation, 'y%s' % (year, )] except Exception as _exp: return '' df2['crop'] = df2[['rotation', 'year']].apply(lambda x: find_rotation(x[0], x[1]), axis=1) sys.stderr.write("4. %s\n" % (datetime.datetime.now(), )) fmt = form.getfirst('format', 'csv') if fmt == 'excel': ssw("Content-type: application/vnd.ms-excel\n") ssw(( "Content-Disposition: attachment;Filename=cscap.xlsx\n\n")) writer = pd.ExcelWriter("/tmp/cscap.xlsx", engine='xlsxwriter') df2.to_excel(writer, columns=cols, index=False, encoding='latin-1', sheet_name='Sheet1') workbook = writer.book worksheet = writer.sheets['Sheet1'] format2 = workbook.add_format({'num_format': '@'}) worksheet.set_column('B:E', None, format2) writer.close() return open('/tmp/cscap.xlsx', 'rb').read() elif fmt == 'tab': ssw('Content-type: application/octet-stream\n') ssw(('Content-Disposition: attachment; filename=%s\n\n' ) % ('cscap.txt',)) return df2.to_csv(columns=cols, sep='\t', index=False) ssw('Content-type: application/octet-stream\n') ssw('Content-Disposition: attachment; filename=%s\n\n' % ('cscap.csv',)) sys.stderr.write("5. %s\n" % (datetime.datetime.now(), )) return df2.to_csv(columns=cols, index=False)