def upload_summary_plots(): """Some additional work""" props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) for location in XREF: for interval in ['mar15', 'nov1']: (tmpfd, tmpfn) = tempfile.mkstemp() uri = ("http://iem.local/plotting/auto/plot/143/" "location:%s::s:%s::dpi:100.png") % (location, interval) res = requests.get(uri) os.write(tmpfd, res.content) os.close(tmpfd) today = datetime.date.today() remotefn = "%s_%s_%s.png" % (location, today.strftime("%Y%m%d"), interval) if DO_UPLOAD: try: dbx.files_upload( open(tmpfn).read(), ("/YieldForecast/Daryl/2016 vs other years plots/%s") % (remotefn, ), mode=dropbox.files.WriteMode.overwrite) except: print 'dropbox fail' os.unlink(tmpfn)
def application(environ, start_response): """Go main go""" props = get_properties() form = parse_formvars(environ) if "address" in form: address = form["address"] elif "street" in form and "city" in form: address = "%s, %s" % (form["street"], form["city"]) else: start_response("200 OK", [("Content-type", "text/plain")]) return [b"APIFAIL"] req = requests.get( SERVICE, params=dict( address=address, key=props["google.maps.key2"], sensor="true" ), timeout=10, ) data = req.json() sio = StringIO() if data["results"]: sio.write( "%s,%s" % ( data["results"][0]["geometry"]["location"]["lat"], data["results"][0]["geometry"]["location"]["lng"], ) ) else: sio.write("ERROR") start_response("200 OK", [("Content-type", "text/plain")]) return [sio.getvalue().encode("ascii")]
def main(): """Go Main Go.""" # prevent a clock drift issue ets = utc() - datetime.timedelta(minutes=1) sts = ets - datetime.timedelta(hours=4) edate = ets.strftime("%Y-%m-%dT%H:%M:%SZ") sdate = sts.strftime("%Y-%m-%dT%H:%M:%SZ") meta = load_metadata() props = get_properties() apikey = props["dtn.apikey"] headers = {"accept": "application/json", "apikey": apikey} for nwsli in NT.sts: idot_id = NT.sts[nwsli]["remote_id"] if idot_id is None: continue URI = (f"https://api.dtn.com/weather/stations/IA{idot_id:03}/" f"traffic-observations?startDate={sdate}" f"&endDate={edate}&units=us&precision=0") req = requests.get(URI, headers=headers) res = req.json() if not res: continue df = pd.DataFrame(res) cursor = DBCONN.cursor() process(cursor, df, meta) cursor.close() DBCONN.commit()
def main(): """Go main go""" props = get_properties() form = cgi.FieldStorage() if "address" in form: address = form["address"].value elif "street" in form and "city" in form: address = "%s, %s" % (form["street"].value, form["city"].value) else: ssw("APIFAIL") return req = requests.get( SERVICE, params=dict(address=address, key=props["google.maps.key2"], sensor="true"), timeout=10, ) data = req.json() if data["results"]: ssw("%s,%s" % ( data["results"][0]["geometry"]["location"]["lat"], data["results"][0]["geometry"]["location"]["lng"], )) else: ssw("ERROR")
def upload_summary_plots(): """Some additional work""" props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) for location in XREF: for interval in ['mar15', 'nov1']: (tmpfd, tmpfn) = tempfile.mkstemp() uri = ("http://iem.local/plotting/auto/plot/143/" "location:%s::s:%s::dpi:100.png" ) % (location, interval) res = requests.get(uri) os.write(tmpfd, res.content) os.close(tmpfd) today = datetime.date.today() remotefn = "%s_%s_%s.png" % (location, today.strftime("%Y%m%d"), interval) if DO_UPLOAD: try: dbx.files_upload( open(tmpfn).read(), ("/YieldForecast/Daryl/2016 vs other years plots/%s" ) % (remotefn, ), mode=dropbox.files.WriteMode.overwrite) except: print 'dropbox fail' os.unlink(tmpfn)
def main(): """Go Main Go.""" config = get_properties() access_token = '...' api = twitter.Api(consumer_key=config['bot.twitter.consumerkey'], consumer_secret=config['bot.twitter.consumersecret'], access_token_key=access_token, access_token_secret='...') pgconn = get_dbconn('mesosite') cursor = pgconn.cursor() cursor2 = pgconn.cursor() cursor.execute(""" SELECT screen_name from iembot_twitter_oauth where user_id is null """) for row in cursor: try: user_id = api.UsersLookup(screen_name=row[0])[0].id except Exception as _exp: print("FAIL %s" % (row[0], )) continue print("%s -> %s" % (row[0], user_id)) cursor2.execute(""" UPDATE iembot_twitter_oauth SET user_id = %s where screen_name = %s """, (user_id, row[0])) cursor2.close() pgconn.commit()
def upload_summary_plots(): """Some additional work""" props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) year = datetime.date.today().year interval = 'jan1' for opt in ['yes', 'no']: for location in XREF: (tmpfd, tmpfn) = tempfile.mkstemp() uri = ("http://iem.local/plotting/auto/plot/143/" "location:%s::s:%s::opt:%s::dpi:100.png") % (location, interval, opt) res = requests.get(uri) os.write(tmpfd, res.content) os.close(tmpfd) today = datetime.date.today() remotefn = ("%s_%s_%s%s.png") % (location, today.strftime( "%Y%m%d"), interval, '_yields' if opt == 'yes' else '') if DO_UPLOAD: try: dbx.files_upload( open(tmpfn, 'rb').read(), ("/YieldForecast/Daryl/%s vs other years plots%s/%s") % (year, ' with yields' if opt == 'yes' else '', remotefn), mode=dropbox.files.WriteMode.overwrite) except Exception as _: print('dropbox fail') os.unlink(tmpfn)
def fetch_files(): """Download the files we need""" props = util.get_properties() # get atmosfn atmosfn = "%s/rwis.txt" % (INCOMING, ) try: ftp = ftplib.FTP("165.206.203.34") except TimeoutError: print("process_rwis FTP Server Timeout") sys.exit() ftp.login("rwis", props["rwis_ftp_password"]) ftp.retrbinary("RETR ExpApAirData.txt", open(atmosfn, "wb").write) # Insert into LDM pqstr = "plot ac %s rwis.txt raw/rwis/%sat.txt txt" % (GTS, GTS) subprocess.call(("pqinsert -i -p '%s' %s " "") % (pqstr, atmosfn), shell=True) # get sfcfn sfcfn = "%s/rwis_sf.txt" % (INCOMING, ) ftp.retrbinary("RETR ExpSfData.txt", open(sfcfn, "wb").write) ftp.close() # Insert into LDM pqstr = "plot ac %s rwis_sf.txt raw/rwis/%ssf.txt txt" % (GTS, GTS) subprocess.call(("pqinsert -i -p '%s' %s " "") % (pqstr, sfcfn), shell=True) return atmosfn, sfcfn
def fetch_files(): """Download the files we need""" props = util.get_properties() # get atmosfn atmosfn = "%s/rwis.txt" % (INCOMING, ) ftp = ftplib.FTP('165.206.203.34') ftp.login('rwis', props['rwis_ftp_password']) ftp.retrbinary('RETR ExpApAirData.txt', open(atmosfn, 'wb').write) # Insert into LDM pqstr = "plot ac %s rwis.txt raw/rwis/%sat.txt txt" % (GTS, GTS) subprocess.call(("/home/ldm/bin/pqinsert -i -p '%s' %s " "") % (pqstr, atmosfn), shell=True) # get sfcfn sfcfn = "%s/rwis_sf.txt" % (INCOMING, ) ftp.retrbinary('RETR ExpSfData.txt', open(sfcfn, 'wb').write) ftp.close() # Insert into LDM pqstr = "plot ac %s rwis_sf.txt raw/rwis/%ssf.txt txt" % (GTS, GTS) subprocess.call(("/home/ldm/bin/pqinsert -i -p '%s' %s " "") % (pqstr, sfcfn), shell=True) return atmosfn, sfcfn
def fetch_files(): """Download the files we need""" props = util.get_properties() # get atmosfn atmosfn = "%s/rwis.txt" % (INCOMING, ) data = get_file(("ftp://*****:*****@165.206.203.34/ExpApAirData.txt" "") % (props['rwis_ftp_password'],)) if data is None or data == "": print('RWIS Download of ExpApAirData.txt failed, aborting') sys.exit() fp = open(atmosfn, 'w') fp.write(data) fp.close() # Insert into LDM pqstr = "plot ac %s rwis.txt raw/rwis/%sat.txt txt" % (GTS, GTS) subprocess.call(("/home/ldm/bin/pqinsert -i -p '%s' %s " "") % (pqstr, atmosfn), shell=True) # get sfcfn sfcfn = "%s/rwis_sf.txt" % (INCOMING, ) data = get_file(("ftp://*****:*****@165.206.203.34/ExpSfData.txt" "") % (props['rwis_ftp_password'],)) if data is None or data == "": print('RWIS Download of ExpSfData.txt failed, aborting') sys.exit() fp = open(sfcfn, 'w') fp.write(data) fp.close() # Insert into LDM pqstr = "plot ac %s rwis_sf.txt raw/rwis/%ssf.txt txt" % (GTS, GTS) subprocess.call(("/home/ldm/bin/pqinsert -i -p '%s' %s " "") % (pqstr, sfcfn), shell=True) return atmosfn, sfcfn
def fetch_files(): """Fetch files """ props = util.get_properties() fn = "%s/iaawos_metar.txt" % (INCOMING, ) ftp = ftplib.FTP('165.206.203.34') ftp.login('rwis', props['rwis_ftp_password']) ftp.retrbinary('RETR METAR.txt', open(fn, 'wb').write) ftp.close() return fn
def main(): """Go Main Go""" iemprops = get_properties() oauth = OAuth2( client_id=iemprops['boxclient.client_id'], client_secret=iemprops['boxclient.client_secret'], store_tokens=_store_tokens ) print(oauth.get_authorization_url('https://mesonet.agron.iastate.edu')) oauth.authenticate(input("What was the code? ")) client = Client(oauth) print(client.user(user_id='me').get())
def fetch_files(): """Fetch files """ props = util.get_properties() fn = "%s/iaawos_metar.txt" % (INCOMING, ) data = urllib2.urlopen(("ftp://*****:*****@165.206.203.34/METAR.txt" "") % (props['rwis_ftp_password'], ), timeout=30).read() fp = open(fn, 'w') fp.write(data) fp.close() return fn
def fetch_files(): """Fetch files """ props = util.get_properties() fn = "%s/iaawos_metar.txt" % (INCOMING, ) data = urllib2.urlopen(("ftp://*****:*****@165.206.203.34/METAR.txt" "") % (props['rwis_ftp_password'],), timeout=30).read() fp = open(fn, 'w') fp.write(data) fp.close() return fn
def fetch(cid): """Do work to get the content""" # Get camera metadata pgconn = get_dbconn("mesosite") cursor = pgconn.cursor() cursor.execute( """ SELECT ip, fqdn, online, name, port, is_vapix, scrape_url, network from webcams WHERE id = %s """, (cid, ), ) if cursor.rowcount != 1: return ( ip, fqdn, online, name, port, is_vapix, scrape_url, network, ) = cursor.fetchone() pgconn.close() if scrape_url is not None or not online: return # Get IEM properties iemprops = get_properties() user = iemprops.get("webcam.%s.user" % (network.lower(), )) passwd = iemprops.get("webcam.%s.pass" % (network.lower(), )) # Construct URI uribase = "http://%s:%s/-wvhttp-01-/GetOneShot" if is_vapix: uribase = "http://%s:%s/axis-cgi/jpg/image.cgi" uri = uribase % (ip if ip is not None else fqdn, port) req = requests.get(uri, auth=HTTPDigestAuth(user, passwd), timeout=15) if req.status_code != 200: return image = Image.open(BytesIO(req.content)) (width, height) = image.size # Draw black box draw = ImageDraw.Draw(image) draw.rectangle([0, height - 12, width, height], fill="#000000") stamp = datetime.datetime.now().strftime("%d %b %Y %I:%M:%S %P") title = "%s - %s Webcam Live Image at %s" % (name, network, stamp) draw.text((5, height - 12), title) buf = BytesIO() image.save(buf, format="JPEG") return buf.getvalue()
def write_and_upload(df, location): """ We are done, whew!""" props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) (tmpfd, tmpfn) = tempfile.mkstemp() for line in open("baseline/%s.met" % (location, )): if line.startswith("year"): break os.write(tmpfd, line.strip()+"\r\n") os.write(tmpfd, ('! auto-generated at %sZ by daryl [email protected]\r\n' ) % (datetime.datetime.utcnow().isoformat(),)) fmt = ("%-10s%-10s%-10s%-10s%-10s%-10s" "%-10s%-10s%-10s%-10s%-10s%-10s%-10s%-10s\r\n") os.write(tmpfd, fmt % ('year', 'day', 'radn', 'maxt', 'mint', 'rain', 'gdd', 'st4', 'st12', 'st24', 'st50', 'sm12', 'sm24', 'sm50')) os.write(tmpfd, fmt % ('()', '()', '(MJ/m^2)', '(oC)', '(oC)', '(mm)', '(oF)', '(oC)', '(oC)', '(oC)', '(oC)', '(mm/mm)', '(mm/mm)', '(mm/mm)')) fmt = (" %-9i%-10i%-10s%-10s%-10s%-10s%-10s" "%-10s%-10s%-10s%-10s%-10s%-10s%-10s\r\n") for valid, row in df.iterrows(): os.write(tmpfd, fmt % (valid.year, int(valid.strftime("%j")), p(row['radn'], 3), p(row['maxt'], 1), p(row['mint'], 1), p(row['rain'], 2), p(row['gdd'], 1), p(row['st4'], 2), p(row['st12'], 2), p(row['st24'], 2), p(row['st50'], 2), p(row['sm12'], 2), p(row['sm24'], 2), p(row['sm50'], 2))) os.close(tmpfd) today = datetime.date.today() remotefn = "%s_%s.met" % (location, today.strftime("%Y%m%d")) if DO_UPLOAD: try: dbx.files_upload( open(tmpfn).read(), "/YieldForecast/Daryl/%s" % (remotefn, ), mode=dropbox.files.WriteMode.overwrite) except: print 'dropbox fail' # Save file for usage by web plotting... os.chmod(tmpfn, 0644) # os.rename fails here due to cross device link bug subprocess.call(("mv %s /mesonet/share/pickup/yieldfx/%s.met" ) % (tmpfn, location), shell=True)
def main(): """Go Main Go""" pgconn = util.get_dbconn("mesosite") mcursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) props = util.get_properties() ftp_pass = props["rwis_ftp_password"] utcnow = util.utc() # we work from here os.chdir("/mesonet/data/dotcams") # Every three hours, clean up after ourselves :) if utcnow.hour % 3 == 0 and utcnow.minute < 5: subprocess.call("/usr/sbin/tmpwatch 6 165.206.203.34/rwis_images", shell=True) # Make dictionary of webcams we are interested in cameras = {} mcursor.execute("SELECT * from webcams WHERE network = 'IDOT'") for row in mcursor: cameras[row["id"]] = row proc = subprocess.Popen( ("wget --timeout=20 -m --ftp-user=rwis " "--ftp-password=%s " "ftp://165.206.203.34/rwis_images/*.jpg") % (ftp_pass, ), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) _stdout, stderr = proc.communicate() stderr = stderr.decode("utf-8") lines = stderr.split("\n") for line in lines: # Look for RETR (.*) tokens = re.findall( ("RETR Vid-000512([0-9]{3})-([0-9][0-9])-([0-9][0-9])" "-([0-9]{4})-([0-9][0-9])-([0-9][0-9])-([0-9][0-9])-" "([0-9][0-9]).jpg"), line, ) if not tokens: continue process(tokens[0], cameras, mcursor) mcursor.close() pgconn.commit() pgconn.close()
def write_and_upload(df, location): """ We are done, whew!""" props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) (tmpfd, tmpfn) = tempfile.mkstemp() for line in open("baseline/%s.met" % (location, )): if line.startswith("year"): break os.write(tmpfd, line.strip()+"\r\n") os.write(tmpfd, ('! auto-generated at %sZ by daryl [email protected]\r\n' ) % (datetime.datetime.utcnow().isoformat(),)) fmt = ("%-10s%-10s%-10s%-10s%-10s%-10s" "%-10s%-10s%-10s%-10s%-10s%-10s%-10s%-10s\r\n") os.write(tmpfd, fmt % ('year', 'day', 'radn', 'maxt', 'mint', 'rain', 'gdd', 'st4', 'st12', 'st24', 'st50', 'sm12', 'sm24', 'sm50')) os.write(tmpfd, fmt % ('()', '()', '(MJ/m^2)', '(oC)', '(oC)', '(mm)', '(oF)', '(oC)', '(oC)', '(oC)', '(oC)', '(mm/mm)', '(mm/mm)', '(mm/mm)')) fmt = (" %-9i%-10i%-10s%-10s%-10s%-10s%-10s" "%-10s%-10s%-10s%-10s%-10s%-10s%-10s\r\n") for valid, row in df.iterrows(): os.write(tmpfd, fmt % (valid.year, int(valid.strftime("%j")), p(row['radn'], 3), p(row['maxt'], 1), p(row['mint'], 1), p(row['rain'], 2), p(row['gdd'], 1), p(row['st4'], 2), p(row['st12'], 2), p(row['st24'], 2), p(row['st50'], 2), p(row['sm12'], 2), p(row['sm24'], 2), p(row['sm50'], 2))) os.close(tmpfd) today = datetime.date.today() remotefn = "%s_%s.met" % (location, today.strftime("%Y%m%d")) if DO_UPLOAD: try: dbx.files_upload( open(tmpfn).read(), "/YieldForecast/Daryl/%s" % (remotefn, ), mode=dropbox.files.WriteMode.overwrite) except Exception as _: print('dropbox fail') # Save file for usage by web plotting... os.chmod(tmpfn, 0644) # os.rename fails here due to cross device link bug subprocess.call(("mv %s /mesonet/share/pickup/yieldfx/%s.met" ) % (tmpfn, location), shell=True)
def fetch_files(): """Fetch files """ props = util.get_properties() fn = "%s/iaawos_metar.txt" % (INCOMING, ) try: ftp = ftplib.FTP('165.206.203.34') except TimeoutError as _exp: print("process_idot_awos FTP server timeout error") sys.exit() ftp.login('rwis', props['rwis_ftp_password']) ftp.retrbinary('RETR METAR.txt', open(fn, 'wb').write) ftp.close() return fn
def test_properties(cursor): """ Try the properties function""" tmpname = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) tmpval = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) cursor.execute( """ INSERT into properties(propname, propvalue) VALUES (%s, %s) """, (tmpname, tmpval)) prop = util.get_properties(cursor) assert isinstance(prop, dict) assert prop[tmpname] == tmpval
def fetch_files(): """Fetch files """ props = util.get_properties() fn = "%s/iaawos_metar.txt" % (INCOMING, ) try: ftp = ftplib.FTP("165.206.203.34") except TimeoutError: print("process_idot_awos FTP server timeout error") sys.exit() ftp.login("rwis", props["rwis_ftp_password"]) ftp.retrbinary("RETR METAR.txt", open(fn, "wb").write) ftp.close() return fn
def test_properties(cursor): """ Try the properties function""" tmpname = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(7) ) tmpval = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(7) ) cursor.execute(""" INSERT into properties(propname, propvalue) VALUES (%s, %s) """, (tmpname, tmpval)) prop = util.get_properties(cursor) assert isinstance(prop, dict) assert prop[tmpname] == tmpval
def handler( key, time, tmpf, max_tmpf, min_tmpf, dwpf, relh, sknt, pday, alti, drct ): """Handle the request, return dict""" # sys.stderr.write(repr(fields)) if not PROPS: PROPS.update(get_properties()) lookup = {} for sid in ["OT0013", "OT0014", "OT0015", "OT0016"]: lookup[PROPS.get("meteobridge.key." + sid)] = sid if key not in lookup: raise HTTPException(status_code=404, detail="BAD_KEY") sid = lookup[key] if len(time) == 14: _t = time now = utc( int(_t[:4]), int(_t[4:6]), int(_t[6:8]), int(_t[8:10]), int(_t[10:12]), int(_t[12:14]), ) else: now = datetime.datetime.utcnow() now = now.replace(tzinfo=pytz.UTC) ob = Observation(sid, "OT", now) for fname in [ "tmpf", "max_tmpf", "min_tmpf", "dwpf", "relh", "sknt", "pday", "alti", "drct", ]: if vars()[fname] == "M": continue ob.data[fname] = float(vars()[fname]) pgconn = get_dbconn("iem") cursor = pgconn.cursor() ob.save(cursor) cursor.close() pgconn.commit() return "OK"
def main(): """Go Main Go.""" # prevent a clock drift issue sts = utc(2020, 8, 10, 16) ets = utc(2020, 8, 11, 3) edate = ets.strftime("%Y-%m-%dT%H:%M:%SZ") sdate = sts.strftime("%Y-%m-%dT%H:%M:%SZ") props = get_properties() apikey = props["dtn.apikey"] headers = {"accept": "application/json", "apikey": apikey} for nwsli in NT.sts: idot_id = NT.sts[nwsli]["remote_id"] if idot_id is None: continue URI = (f"https://api.dtn.com/weather/stations/IA{idot_id:03}/" f"atmospheric-observations?startDate={sdate}" f"&endDate={edate}&units=us&precision=0") req = requests.get(URI, timeout=60, headers=headers) if req.status_code != 200: LOG.info("Fetch %s got status_code %s", URI, req.status_code) continue res = req.json() if not res: continue try: df = pd.DataFrame(res) except Exception as exp: LOG.info("DataFrame construction failed with %s\n res: %s", exp, res) continue if df.empty: continue df = df.fillna(np.nan) df["valid"] = pd.to_datetime(df["utcTime"]) df["gust"] = (masked_array(df["windGust"].values, units("miles per hour")).to( units("knots")).m) df["sknt"] = (masked_array(df["windSpeed"].values, units("miles per hour")).to( units("knots")).m) df = df.replace({np.nan: None}) cursor = DBCONN.cursor() process(cursor, df, nwsli) cursor.close() DBCONN.commit()
def main(): """Go Main Go""" emails = get_properties()['nmp_monthly_email_list'].split(",") end_date = datetime.date.today().replace(day=16) start_date = (end_date - datetime.timedelta(days=40)).replace(day=17) report = generate_report(start_date, end_date) msg = MIMEText(report) msg['Subject'] = "[IEM] 404-41-12 Synoptic Contract Deliverables Report" msg['From'] = 'IEM Automation <*****@*****.**>' msg['To'] = ', '.join(emails) msg.add_header('reply-to', '*****@*****.**') # Send the email via our own SMTP server. smtp = smtplib.SMTP('mailhub.iastate.edu') smtp.sendmail(msg['From'], msg['To'], msg.as_string()) smtp.quit()
def main(): """Go Main Go""" emails = get_properties()["nmp_monthly_email_list"].split(",") end_date = datetime.date.today().replace(day=6) start_date = (end_date - datetime.timedelta(days=40)).replace(day=7) report = generate_report(start_date, end_date) msg = MIMEText(report) msg["Subject"] = "[IEM] Synoptic Contract Deliverables Report" msg["From"] = "IEM Automation <*****@*****.**>" msg["To"] = ", ".join(emails) msg.add_header("reply-to", "*****@*****.**") # Send the email via our own SMTP server. smtp = smtplib.SMTP("mailhub.iastate.edu") smtp.sendmail(msg["From"], msg["To"], msg.as_string()) smtp.quit()
def load_ignorelist(): """Sync what the properties database has for sites to ignore.""" try: prop = get_properties().get("pywwa_metar_ignorelist", "") IGNORELIST.clear() for sid in [x.strip() for x in prop.split(",")]: if sid == "": continue if len(sid) != 4: log.msg(f"Not adding {sid} to IGNORELIST as not 4 char id") continue IGNORELIST.append(sid) except Exception as exp: log.err(exp) log.msg(f"Updated ignorelist is now {len(IGNORELIST)} long") # Call every 15 minutes reactor.callLater(15 * 60, load_ignorelist)
def fetch_rda(year, month): """Get data please from RDA""" props = get_properties() req = requests.post( "https://rda.ucar.edu/cgi-bin/login", dict( email=props["rda.user"], passwd=props["rda.password"], action="login", ), timeout=30, ) if req.status_code != 200: print( "download_narr RDA login failed with code %s" % (req.status_code,) ) return cookies = req.cookies days = ["0109", "1019"] lastday = ( datetime.date(year, month, 1) + datetime.timedelta(days=35) ).replace(day=1) - datetime.timedelta(days=1) days.append("20%s" % (lastday.day,)) for day in days: uri = ( "https://rda.ucar.edu/data/ds608.0/3HRLY/" "%i/NARRsfc_%i%02i_%s.tar" ) % (year, year, month, day) req = requests.get(uri, timeout=30, cookies=cookies, stream=True) tmpfn = "%s/narr.tar" % (TMP,) with open(tmpfn, "wb") as fh: for chunk in req.iter_content(chunk_size=1024): if chunk: fh.write(chunk) process(tmpfn) os.unlink(tmpfn) # Now call coop script subprocess.call( ("python /opt/iem/scripts/climodat/narr_solarrad.py %s %s") % (year, month), shell=True, )
def main(): """Go main go""" props = get_properties() form = cgi.FieldStorage() if 'address' in form: address = form["address"].value elif 'street' in form and 'city' in form: address = "%s, %s" % (form["street"].value, form["city"].value) else: ssw("APIFAIL") return req = requests.get( SERVICE, params=dict(address=address, key=props['google.maps.key2'], sensor='true'), timeout=10) data = req.json() if data['results']: ssw("%s,%s" % ( data['results'][0]['geometry']['location']['lat'], data['results'][0]['geometry']['location']['lng'])) else: ssw("ERROR")
def upload_summary_plots(): """Some additional work""" props = get_properties() dbx = dropbox.Dropbox(props.get("dropbox.token")) year = datetime.date.today().year interval = "jan1" for opt in ["yes", "no"]: for location in XREF: (tmpfd, tmpfn) = tempfile.mkstemp() uri = ("http://iem.local/plotting/auto/plot/143/" "location:%s::s:%s::opt:%s::dpi:100.png") % (location, interval, opt) res = requests.get(uri) os.write(tmpfd, res.content) os.close(tmpfd) today = datetime.date.today() remotefn = ("%s_%s_%s%s.png") % ( location, today.strftime("%Y%m%d"), interval, "_yields" if opt == "yes" else "", ) if DO_UPLOAD: try: dbx.files_upload( open(tmpfn, "rb").read(), ("/YieldForecast/Daryl/%s vs other years plots%s/%s") % ( year, " with yields" if opt == "yes" else "", remotefn, ), mode=dropbox.files.WriteMode.overwrite, ) except Exception: LOG.info("dropbox fail") os.unlink(tmpfn)
def fetch_files(): """Download the files we need""" props = util.get_properties() # get atmosfn atmosfn = "%s/rwis.txt" % (INCOMING, ) data = get_file(("ftp://*****:*****@165.206.203.34/ExpApAirData.txt" "") % (props['rwis_ftp_password'], )) if data is None or data == "": print('RWIS Download of ExpApAirData.txt failed, aborting') sys.exit() fp = open(atmosfn, 'w') fp.write(data) fp.close() # Insert into LDM pqstr = "plot ac %s rwis.txt raw/rwis/%sat.txt txt" % (GTS, GTS) subprocess.call(("/home/ldm/bin/pqinsert -i -p '%s' %s " "") % (pqstr, atmosfn), shell=True) # get sfcfn sfcfn = "%s/rwis_sf.txt" % (INCOMING, ) data = get_file(("ftp://*****:*****@165.206.203.34/ExpSfData.txt" "") % (props['rwis_ftp_password'], )) if data is None or data == "": print('RWIS Download of ExpSfData.txt failed, aborting') sys.exit() fp = open(sfcfn, 'w') fp.write(data) fp.close() # Insert into LDM pqstr = "plot ac %s rwis_sf.txt raw/rwis/%ssf.txt txt" % (GTS, GTS) subprocess.call(("/home/ldm/bin/pqinsert -i -p '%s' %s " "") % (pqstr, sfcfn), shell=True) return atmosfn, sfcfn
import psycopg2 import datetime import subprocess import dropbox from pyiem.network import Table as NetworkTable from pyiem.datatypes import speed from pyiem.util import get_properties nt = NetworkTable("ISUSM") SITES = ['ames', 'nashua', 'sutherland', 'crawfordsville', 'lewis'] XREF = ['BOOI4', 'NASI4', 'CAMI4', 'CRFI4', 'OKLI4'] pgconn = psycopg2.connect(database='coop', host='iemdb', user='******') cursor = pgconn.cursor() ipgconn = psycopg2.connect(database='iem', host='iemdb', user='******') icursor = ipgconn.cursor() props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) today = datetime.date.today() for i, site in enumerate(SITES): # Need to figure out this year's data thisyear = {} # get values from latest yieldfx dump for line in open('/mesonet/share/pickup/yieldfx/%s.met' % (site,)): line = line.strip() if not line.startswith('2016'): continue tokens = line.split() valid = (datetime.date(int(tokens[0]), 1, 1) + datetime.timedelta(days=int(tokens[1])-1)) if valid >= today: break
RSAI4 RLRI4 Run from RUN_1MIN """ import datetime import sys import pytz import pyiem.util as util # Run every 3 minutes now = datetime.datetime.now() if now.minute % 4 != 0 and len(sys.argv) < 2: sys.exit(0) from pyiem.observation import Observation props = util.get_properties() import urllib2 import psycopg2 import subprocess IEM = psycopg2.connect(database='iem', host='iemdb') icursor = IEM.cursor() csv = open('/tmp/ctre.txt', 'w') # Get Saylorville try: req = urllib2.Request(("ftp://%s:%[email protected]/Saylorville_" "Table3Min_current.dat" "") % (props['ctre_ftpuser'], props['ctre_ftppass'])) data = urllib2.urlopen(req, timeout=30).readlines()
def sendfiles2box(remotepath, filenames, remotefilenames=None, overwrite=False): """Send a file(s) to Box. Args: remotepath (str): remote directory to send file(s) to filenames (str or list): local files to send to box remotefilenames (str or list, optional): same size as filenames and optional as to if they should have different names or not overwrite (bool): should this overwrite existing files, default `False` Returns: list of ids of the uploaded content """ if isinstance(filenames, string_types): filenames = [filenames, ] if isinstance(remotefilenames, string_types): remotefilenames = [remotefilenames, ] if remotefilenames is None: remotefilenames = [os.path.basename(f) for f in filenames] iemprops = get_properties() oauth = OAuth2( client_id=iemprops['boxclient.client_id'], client_secret=iemprops['boxclient.client_secret'], access_token=iemprops['boxclient.access_token'], refresh_token=iemprops['boxclient.refresh_token'], store_tokens=_store_tokens ) client = Client(oauth) folder_id = 0 for token in remotepath.split("/"): if token.strip() == '': continue offset = 0 found = False while not found: LOG.debug("folder(%s).get_items(offset=%s)", folder_id, offset) items = client.folder( folder_id=folder_id).get_items(limit=100, offset=offset) for item in items: if (item.type == 'folder' and item.name.lower() == token.lower()): folder_id = item.id found = True break if len(items) != 100: break offset += 100 if not found: LOG.debug("Creating folder %s inside of %s", token, folder_id) item = client.folder(folder_id=folder_id).create_subfolder(token) folder_id = item.id LOG.debug("Now we upload to folder_id: %s", folder_id) res = [] for localfn, remotefn in zip(filenames, remotefilenames): LOG.debug("uploading %s", localfn) try: item = client.folder(folder_id=folder_id).upload(localfn, remotefn) res.append(item.id) except Exception as exp: if overwrite and hasattr(exp, 'context_info'): _fileid = exp.context_info['conflicts']['id'] LOG.info("overwriting %s fid: %s", remotefn, _fileid) try: item = client.file(_fileid).update_contents(localfn) res.append(_fileid) continue except Exception as exp2: LOG.debug( "Upload_Contents of %s resulted in exception: %s", localfn, exp2 ) continue LOG.debug( "Upload of %s resulted in exception: %s", localfn, exp ) res.append(None) return res
def write_and_upload(df, location): """ We are done, whew!""" props = get_properties() dbx = dropbox.Dropbox(props.get("dropbox.token")) (tmpfd, tmpfn) = tempfile.mkstemp(text=True) sio = StringIO() for line in open("baseline/%s.met" % (location, )): if line.startswith("year"): break sio.write(line.strip() + "\r\n") sio.write(("! auto-generated at %sZ by daryl [email protected]\r\n") % (datetime.datetime.utcnow().isoformat(), )) fmt = ("%-10s%-10s%-10s%-10s%-10s%-10s" "%-10s%-10s%-10s%-10s%-10s%-10s%-10s%-10s\r\n") sio.write(fmt % ( "year", "day", "radn", "maxt", "mint", "rain", "gdd", "st4", "st12", "st24", "st50", "sm12", "sm24", "sm50", )) sio.write(fmt % ( "()", "()", "(MJ/m^2)", "(oC)", "(oC)", "(mm)", "(oF)", "(oC)", "(oC)", "(oC)", "(oC)", "(mm/mm)", "(mm/mm)", "(mm/mm)", )) fmt = (" %-9i%-10i%-10s%-10s%-10s%-10s%-10s" "%-10s%-10s%-10s%-10s%-10s%-10s%-10s\r\n") for valid, row in df.iterrows(): sio.write(fmt % ( valid.year, int(valid.strftime("%j")), p(row["radn"], 3), p(row["maxt"], 1), p(row["mint"], 1), p(row["rain"], 2), p(row["gdd"], 1), p(row["st4"], 2), p(row["st12"], 2), p(row["st24"], 2), p(row["st50"], 2), p(row["sm12"], 2), p(row["sm24"], 2), p(row["sm50"], 2), )) os.write(tmpfd, sio.getvalue().encode("utf-8")) os.close(tmpfd) today = datetime.date.today() remotefn = "%s_%s.met" % (location, today.strftime("%Y%m%d")) if DO_UPLOAD: try: dbx.files_upload( open(tmpfn, "rb").read(), "/YieldForecast/Daryl/%s" % (remotefn, ), mode=dropbox.files.WriteMode.overwrite, ) except Exception as _: print("dropbox fail") # Save file for usage by web plotting... os.chmod(tmpfn, 0o644) # os.rename fails here due to cross device link bug subprocess.call( ("mv %s /mesonet/share/pickup/yieldfx/%s.met") % (tmpfn, location), shell=True, )
"""Need to set a profile string for my bots.""" import twitter from pandas.io.sql import read_sql from pyiem.network import Table as NetworkTable from pyiem.util import get_dbconn, get_properties PROPS = get_properties() def main(): """Go Main Go.""" nt = NetworkTable(["WFO", "CWSU"]) df = read_sql(""" SELECT screen_name, access_token, access_token_secret from iembot_twitter_oauth WHERE access_token is not null """, get_dbconn('mesosite'), index_col='screen_name') wfos = list(nt.sts.keys()) wfos.sort() for wfo in wfos: username = "******" % (wfo.lower()[-3:], ) if username not in df.index: print("%s is unknown?" % (username, )) continue api = twitter.Api( consumer_key=PROPS['bot.twitter.consumerkey'], consumer_secret=PROPS['bot.twitter.consumersecret'], access_token_key=df.at[username, 'access_token'], access_token_secret=df.at[username, 'access_token_secret'])
def main(): """Go Main Go""" props = get_properties() print(props['rwis_ftp_password'])
def main(): """Go Main Go""" nt = NetworkTable("ISUSM") ipgconn = get_dbconn('iem', user='******') icursor = ipgconn.cursor() props = get_properties() dbx = dropbox.Dropbox(props.get('dropbox.token')) today = datetime.date.today() for i, site in enumerate(SITES): # Need to figure out this year's data thisyear = {} # get values from latest yieldfx dump for line in open('/mesonet/share/pickup/yieldfx/%s.met' % (site, )): line = line.strip() if not line.startswith('2016'): continue tokens = line.split() valid = (datetime.date(int(tokens[0]), 1, 1) + datetime.timedelta(days=int(tokens[1]) - 1)) if valid >= today: break thisyear[valid.strftime("%m%d")] = { 'radn': float(tokens[2]), 'maxt': float(tokens[3]), 'mint': float(tokens[4]), 'rain': float(tokens[5]), 'windspeed': None, 'rh': None } # Supplement with DSM data icursor.execute(""" select day, avg_sknt, avg_rh from summary where iemid = 37004 and day >= '2016-01-01' ORDER by day ASC""") for row in icursor: if row[1] is None or row[2] is None: continue thisyear[row[0].strftime("%m%d")]['windspeed'] = speed( row[1], 'KTS').value('MPS') thisyear[row[0].strftime("%m%d")]['rh'] = row[2] fn = "%s_HM_%s.wth" % (site, today.strftime("%Y%m%d")) fh = open(fn, 'w') fh.write("""\ %s IA Lat.(deg)= %.2f Long.(deg)=%.2f Elev.(m)=%.0f.\r %.2f (Lat.)\r year day Solar T-High T-Low RelHum Precip WndSpd\r MJ/m2 oC oC %% mm km/hr\r """ % (site.upper(), nt.sts[XREF[i]]['lat'], nt.sts[XREF[i]]['lon'], nt.sts[XREF[i]]['lat'], nt.sts[XREF[i]]['elevation'])) # Get the baseline obs sts = datetime.date(2016, 1, 1) ets = today now = sts while now < ets: idx = now.strftime("%m%d") row = [now, None, None, None, None, None, None] for j, key in enumerate( ['radn', 'maxt', 'mint', 'rh', 'rain', 'windspeed']): row[j + 1] = thisyear[idx][key] fh.write( ("%s\t%4s\t%.3f\t%.1f\t%.1f\t%.0f\t%.1f\t%.1f\r\n") % (row[0].year, int(row[0].strftime("%j")), row[1], row[2], row[3], row[4], row[5], speed(row[6], 'MPS').value('KMH'))) now += datetime.timedelta(days=1) fh.close() try: dbx.files_upload(open(fn).read(), "/Hybrid-Maize-Metfiles/%s" % (fn, ), mode=dropbox.files.WriteMode.overwrite) subprocess.call( ("mv %s /mesonet/share/pickup/yieldfx/%s.wth") % (fn, site), shell=True) except Exception as exp: print('fail') print(exp)
NOTE: we need to have a ~/.netrc file to make this script happy. """ from __future__ import print_function import datetime import os import sys import logging import subprocess from pyiem.util import get_properties logging.basicConfig(level=logging.DEBUG) PROPS = get_properties() def trans(now): """ Hacky hack hack """ if now.year < 1992: return "100" if now.year < 2001: return "200" if now.year < 2011: return "300" return "400" def do_month(sts): """ Run for a given month """
def main(): """Go Main Go""" # Run every 3 minutes now = datetime.datetime.now() if now.minute % 4 != 0 and len(sys.argv) < 2: return props = util.get_properties() pgconn = util.get_dbconn("iem") icursor = pgconn.cursor() csv = open("/tmp/ctre.txt", "w") bio = BytesIO() # Get Saylorville try: ftp = ftplib.FTP("129.186.224.167") ftp.login(props["ctre_ftpuser"], props["ctre_ftppass"]) ftp.retrbinary("RETR Saylorville_Table3Min_current.dat", bio.write) ftp.close() except Exception as exp: if now.minute % 15 == 0: print("Download CTRE Bridge Data Failed!!!\n%s" % (exp, )) return bio.seek(0) data = bio.getvalue().decode("ascii").split("\n") bio.truncate() if len(data) < 2: return keys = data[0].strip().replace('"', "").split(",") vals = data[1].strip().replace('"', "").split(",") d = {} for i, val in enumerate(vals): d[keys[i]] = val # Ob times are always CDT ts1 = datetime.datetime.strptime(d["TIMESTAMP"], "%Y-%m-%d %H:%M:%S") gts1 = ts1 + datetime.timedelta(hours=5) gts1 = gts1.replace(tzinfo=pytz.UTC) lts = gts1.astimezone(pytz.timezone("America/Chicago")) iem = Observation("RSAI4", "OT", lts) drct = d["WindDir"] iem.data["drct"] = drct sknt = float(d["WS_mph_S_WVT"]) / 1.15 iem.data["sknt"] = sknt gust = float(d["WS_mph_Max"]) / 1.15 iem.data["gust"] = gust iem.save(icursor) csv.write("%s,%s,%s,%.1f,%.1f\n" % ("RSAI4", gts1.strftime("%Y/%m/%d %H:%M:%S"), drct, sknt, gust)) # Red Rock try: ftp = ftplib.FTP("129.186.224.167") ftp.login(props["ctre_ftpuser"], props["ctre_ftppass"]) ftp.retrbinary("RETR Red Rock_Table3Min_current.dat", bio.write) ftp.close() except Exception as exp: if now.minute % 15 == 0: print("Download CTRE Bridge Data Failed!!!\n%s" % (exp, )) return bio.seek(0) data = bio.getvalue().decode("ascii").split("\n") bio.truncate() if len(data) < 2: return keys = data[0].strip().replace('"', "").split(",") vals = data[1].strip().replace('"', "").split(",") d = {} for i, val in enumerate(vals): d[keys[i]] = val ts2 = datetime.datetime.strptime(d["TIMESTAMP"], "%Y-%m-%d %H:%M:%S") gts2 = ts2 + datetime.timedelta(hours=5) gts2 = gts2.replace(tzinfo=pytz.UTC) lts = gts2.astimezone(pytz.timezone("America/Chicago")) iem = Observation("RLRI4", "OT", lts) drct = d["WindDir"] iem.data["drct"] = drct sknt = float(d["WS_mph_S_WVT"]) / 1.15 iem.data["sknt"] = sknt gust = float(d["WS_mph_Max"]) / 1.15 iem.data["gust"] = gust iem.save(icursor) csv.write("%s,%s,%s,%.1f,%.1f\n" % ("RLRI4", gts2.strftime("%Y/%m/%d %H:%M:%S"), drct, sknt, gust)) csv.close() cmd = ("pqinsert -i -p 'data c %s csv/ctre.txt " "bogus txt' /tmp/ctre.txt") % (now.strftime("%Y%m%d%H%M"), ) subprocess.call(cmd, shell=True) icursor.close() pgconn.commit() pgconn.close()