def update_news(url): """ This program deletes the entries in the 'news' table of the 'cache.db' database, then uploads the 10 most recent articles from the provided RSS feed url. ARGS: parameter1: A url to an RSS feed in string form. ie: "http://sampleRSS.com" RETURNS: 0 for success 1 for error RAISES: raises exception to indicate that the maintainer has some maintaining to do... """ # get news news = get_news(url) try: # connect to database database = "cache.db" [conn, db] = con(database) # delete old news from table db.execute("DELETE FROM news WHERE 1=1") for entry in news: # compile news entry link = entry["link"] title = entry["title"] date = entry["date"] summary = entry["summary"] # insert news entry into table db.execute("INSERT INTO news" + " ('link', 'title', 'date', 'summary')" + " VALUES("+link+","+title+","+date+","+summary+")") #update time stamp in 'news_last_updated' table db.execute("DELETE FROM news_last_updated WHERE 1=1") dt = surround(get_time()) db.execute("INSERT INTO news_last_updated ('last_update') VALUES ("+dt+")") # success conn.commit() return 0 # error except Exception as e: dt = get_time() logging.error(dt + " An error has occurred during news cache update.\n") logging.error(e) return 1
def main(): Fox.browser.delete_all_cookies() start_time = get_time() perf_logs = Scraper().run() if len(Proxies.list): if not DEBUG: Proxies.insert() else: Proxies.errors.append('Got 0 proxies') finish_time = get_time() data = {'start_time': start_time, 'finish_time': finish_time, 'proxy_count': len(Proxies.list), 'perf_logs': perf_logs, 'errors': Proxies.errors, "duplicates": Proxies.duplicates} print(json.dumps(data))
def main(workdir, dataset, identifier, numtopics, passes, lang): print("==", "starting", "==", "\n==", helpers.get_time(), "==") helpers.make_dirs(workdir, identifier) preprocessing.main(workdir, dataset, identifier, lang) build_corpus.main(workdir, identifier) modeling.main(workdir, identifier, numtopics, passes) postprocessing.main(workdir, dataset, identifier, numtopics) make_overview.main(workdir, identifier)
def main(workdir, identifier): print("\n== visualize_model ==") model = helpers.load_model(workdir, identifier) vizfile = join(workdir, "results", identifier, "visualization.html") dictcorpus = helpers.load_pickle(workdir, identifier, "dictcorpus.pickle") vectorcorpus = helpers.load_pickle(workdir, identifier, "vectorcorpus.pickle") visualize_model(model, dictcorpus, vectorcorpus, vizfile) print("==", helpers.get_time(), "done visualizing", "==")
def main(workdir, identifier): print("\n== text2corpus ==") allprepared = helpers.load_pickle(workdir, identifier, "allprepared.pickle") dictcorpus, vectorcorpus = build_vectorcorpus(allprepared) helpers.save_pickle(dictcorpus, workdir, identifier, "dictcorpus.pickle") helpers.save_pickle(vectorcorpus, workdir, identifier, "vectorcorpus.pickle") print("==", helpers.get_time(), "done building corpus", "==")
def main(workdir, dataset, identifier, numtopics): print("\n== postprocessing ==") model = helpers.load_model(workdir, identifier) vectorcorpus = helpers.load_pickle(workdir, identifier, "vectorcorpus.pickle") resultsfolder = join(workdir, "results", identifier) get_topics(model, numtopics, resultsfolder) get_topicwords(model, numtopics, resultsfolder) get_doc_topic_matrix(vectorcorpus, model, resultsfolder) make_mastermatrix(workdir, dataset, identifier) print("==", helpers.get_time(), "done postprocessing", "==")
def itpl(): "Interpolate 4D (true) temp and (mass level) geopotential fields to stations." nc = Dataset('../../data/WRF/2d/d02_2014-09-10_transf.nc') T = nc.variables['temp'][:] GP = nc.variables['ghgt'][:] ma = basemap(nc) x, y = ma.xy() ij = ma(*hh.lonlat(sta)) t = hh.get_time(nc) Ti = interp4D((x, y), T, ij, sta.index, t, method='linear') Gi = interp4D((x, y), GP, ij, sta.index, t, method='linear')
def review(): """Submit a review""" book_id = request.form.get("book_id") rating = request.form.get("rating") review = request.form.get("review") try: q = db.execute( "SELECT * FROM reviews WHERE book_id = :book_id AND user_id = :user_id", { "book_id": book_id, "user_id": session["user_id"] }).fetchall() if q: return apology(message="you can only submit 1 review per book") except: return apology(message="something went wrong") if not rating or not review: return apology(message="please fill the form correctly") q = db.execute( "SELECT * FROM wishlist WHERE user_id = :u_id AND book_id = :b_id", { "u_id": session["user_id"], "b_id": book_id }).fetchall() if q: db.execute( "DELETE FROM wishlist WHERE user_id = :u_id AND book_id = :b_id", { "u_id": session["user_id"], "b_id": book_id }) db.commit() book = db.execute("SELECT title FROM books WHERE id = :id", { "id": book_id }).fetchone() time = get_time() db.execute( "INSERT INTO reviews(book_id, user_id, rating, review, time) VALUES(:book_id, :user_id, :rating, :review, :time)", { "book_id": book_id, "user_id": session["user_id"], "rating": rating, "review": review, "time": time }) try: db.commit() except: return apology(message="something went wrong") flash("Review submitted!") return redirect(f'/books/{book[0].replace(" ", "_")}')
def run(self): methods = dir(self) for method in methods: if (method.startswith('ProxySite') and not SITE) or (SITE and method.endswith(SITE)): cur_proxy_count = len(Proxies.list) start_time = get_time() try: debug_println('\nstarted ' + method) getattr(getattr(self, method)(), 'go')() except: Proxies.errors.append(method + ': uncaught exception ' + sys.exc_info()[0]) end_time = get_time() new_proxy_count = len(Proxies.list) - cur_proxy_count if not new_proxy_count: Proxies.errors.append(method + ' returned 0 proxies') self.perf_logs[method] = {'start_time': start_time, 'end_time': end_time, 'count': new_proxy_count} return self.perf_logs
def __init__(self, nc, var='slp', timedelta='-4H'): self.map = basemap.Basemap( projection = 'merc', llcrnrlon = -180, llcrnrlat = -70, urcrnrlon = -60, urcrnrlat = 10 ) lon, lat = hh.lonlat(nc) self._i, self._j = self.map(lon-360, lat) slp = nc.variables[var][:] s = pd.DataFrame(slp.reshape((slp.shape[0], -1))) t = pd.DatetimeIndex(hh.get_time(nc) + pd.Timedelta(timedelta)) self.slp = s.groupby(t.date).mean() self.t = pd.DatetimeIndex(self.slp.index)
def main(workdir, dataset, identifier, lang): print("\n== preprocessing ==") alltextids = [] allprepared = [] stoplist = load_stoplist(lang) textpath = join(workdir, "datasets", dataset, "txt", "*.txt") for textfile in sorted(glob.glob(textpath)): textid = basename(textfile).split(".")[0] alltextids.append(textid) text = load_text(textfile) prepared = prepare_text(text, lang, stoplist) allprepared.append(prepared) #print("done with:", textid) #print(prepared[0:10]) helpers.save_pickle(allprepared, workdir, identifier, "allprepared.pickle") print("files processed:", len(allprepared)) print("==", helpers.get_time(), "done preprocessing", "==")
def whishlist(): """Display user wishlist""" if request.method == "GET": books = db.execute( "SELECT books.title, books.isbn, wishlist.time FROM wishlist JOIN books ON wishlist.book_id = books.id WHERE user_id = :id", { "id": session["user_id"] }).fetchall() return render_template("wishlist.html", books=books) else: book_id = request.form.get("book_id") if not book_id: return apology(message="something went perfectly well, hacker!") q = db.execute("SELECT title FROM books WHERE id = :id", { "id": book_id }).fetchone() if not q: return apology(message="book not found in database, hacker!") q = db.execute( "SELECT user_id, book_id FROM wishlist WHERE user_id = :id AND book_id = :book", { "id": session["user_id"], "book": book_id }).fetchall() in_wishlist = False if q: in_wishlist = True time = get_time() try: db.execute( "INSERT INTO wishlist (user_id, book_id, time) VALUES(:user_id, :book_id, :time)", { "user_id": session["user_id"], "book_id": book_id, "time": time }) db.commit() except: return apology(message="something went wrong in wishlist") flash("Book added to wishlist!") return redirect("/wishlist")
def buy(): """Buy shares of stock""" if request.method == "GET": return render_template("buy.html") else: symbol = request.form.get("symbol") try: shares = int(request.form.get("shares")) if shares < 1: return apology(message="please enter a positive number") except: return apology(message="please enter a positive integer") if not symbol or not shares: return apology(message="please enter a symbol and a share") quote = lookup(symbol) if not quote: return apology(message="invalid symbol") query = db.execute("SELECT cash FROM users WHERE id = :id", { "id": session["user_id"] }).fetchone() cash = float(query['cash']) price = float(quote["price"] * shares) if cash < price: return apology(message="not enough cash") transacted = get_time() buy = db.execute( "INSERT INTO transactions(id, symbol, shares, price, transacted) VALUES(:id, :symbol, :shares, :price, :transacted)", { 'id': session["user_id"], 'symbol': symbol.strip().upper(), 'shares': shares, 'price': price, 'transacted': transacted }) cash -= price db.execute("UPDATE users SET cash = :cash WHERE id = :id", { 'cash': cash, 'id': session["user_id"] }) db.commit() flash("Bought!") return redirect("/")
def interp_nc(nc, var, stations, time=True, tz=False, method='linear', map=None): m = mp.basemap(nc) if map is None else map xy = m.xy() ij = m(*hh.lonlat(stations)) x = nc.variables[var][:].squeeze() if time: t = pd.DatetimeIndex(hh.get_time(nc)) if tz: t = t.tz_localize('UTC').tz_convert(hh.CEAZAMetTZ()) else: t -= np.timedelta64(4, 'h') return sp.grid_interp(xy, x, ij, stations.index, t, method=method) else: return sp.grid_interp(xy, hh.g2d(x), ij, stations.index, method=method)
def interp4D(): nc = Dataset('data/wrf/d02_2014-09-10.nc') m = mp.basemap(nc) xy = m(*hh.lonlat(nc)) ij = m(*hh.lonlat(sta)) t = hh.get_time(nc) - np.timedelta64(4, 'h') z = sp.grid_interp(xy, nc.variables['HGT'][:], ij, sta.index, method='linear') TH2 = sp.grid_interp(xy, nc.variables['TH2'][:], ij, sta.index, t, method='linear') TSK = sp.grid_interp(xy, nc.variables['TSK'][:], ij, sta.index, t, method='linear') GP = sp.interp4D(xy, nc.variables['PH'][:], ij, sta.index, t, 'linear') GPB = sp.interp4D(xy, nc.variables['PHB'][:], ij, sta.index, t, 'linear') P = sp.interp4D(xy, nc.variables['P'][:], ij, sta.index, t, 'linear') PB = sp.interp4D(xy, nc.variables['PB'][:], ij, sta.index, t, 'linear') TH = sp.interp4D(xy, nc.variables['T'][:], ij, sta.index, t, 'linear') V = pd.HDFStore('data/d02_4D.h5') V['GP'] = GPB.add(GP) V['P'] = PB.add(P) V['T'] = TH V['TH2'] = TH2 V['TSK'] = TSK V['z'] = z
def _wrapper(*args, **kwargs): start = get_time() res = func(*args, **kwargs) elapsed_time = get_time() - start Metric.timing(name, elapsed_time) return res
lat = g.variables['XLAT_M'][:].squeeze() # nc = Dataset('d02_e_pint_lin.nc') # e = nc.variables['e'][:] # p = nc.variables['level'][:] # time = hh.get_time(nc).astype(datetime) # # use only one year of data to avoid seasonal bias # k = np.where(time==datetime(2014,12,31,00))[0][0] # time = time[k:] # e = e[k:,:,:,:] # em = np.nanmean(e.transpose([1,0,2,3]).reshape((len(p),-1)),1) nc = Dataset('d02_rh_pint_lin.nc') r = nc.variables['rh'][:] p = nc.variables['level'][:] time = hh.get_time(nc).astype(datetime) # use only one year of data to avoid seasonal bias k = np.where(time == datetime(2014, 12, 31, 00))[0][0] time = time[k:] r = r[k:, :, :, :] rm = np.nanmean(r.transpose([1, 0, 2, 3]).reshape((len(p), -1)), 1) nc.close() coords = {'stdomingo': (-71.6144, -33.6547), 'mendoza': (-68.7833, -32.8333)} fig, ax = plt.subplots() for x in ['stdomingo', 'mendoza']: for h in v[x].items: # q = v[x][h].mean().dropna() q = rh[x][h].mean().dropna() pl = plt.plot(q, q.index, '-s', label='{} {:02d}h obs'.format(x, h))[0] i, j, d = hh.nearest(lon, lat, *coords[x])
"""email_listener: Listen in an email folder and process incoming emails.""" import email import html2text from imapclient import IMAPClient import os import time import datetime from helpers import ( calc_timeout, get_time, ) t1 = get_time() print(t1) print(datetime.datetime.utcnow().timestamp()) t2 = datetime.datetime.now() dt = datetime.datetime.fromtimestamp(t2.timestamp()) delta = datetime.timedelta(minutes=10) print(t2) print(dt) print(t2 + delta)
S = pd.HDFStore('../data/IGRA/IGRAraw.h5') sta = S['sta'] def ts(x): # need to do it this way, since dropna above doesn't delete the index properly # https://github.com/pandas-dev/pandas/issues/2770 return np.array(x.unstack().index, dtype='datetime64[h]') nc = Dataset('../data/wrf/d02_2014-09-10_transf.nc') ma = basemap(nc) x, y = ma.xy() ij = ma(*hh.lonlat(sta.iloc[:2])) t = hh.get_time(nc) T = nc.variables['temp'][:] P = nc.variables['press'][:] Ti = interp4D((x, y), T, ij, sta.iloc[:2].index, t, method='linear') Pi = interp4D((x, y), P, ij, sta.iloc[:2].index, t, method='linear') p = [1000, 925, 850, 700, 500, 400, 300, 250, 200, 150, 100, 70, 50] pl = np.log(p) + np.log(100) def ints(pl, x): try: return interp1d(np.log(x.index), x, 'linear', bounds_error=False)(pl) * .1 + hh.K except:
def register(): """Register user""" if request.method == "GET": return render_template("register.html") else: username = request.form.get("username") password = request.form.get("password") confirmation = request.form.get("confirmation") email = request.form.get("email") if not username or not password or not confirmation or password != confirmation: return apology( message="please fill the form correctly to register.") # Checking for username c = db.execute("SELECT username FROM users WHERE username = :username", { "username": username }).fetchall() if c: return apology("username already taken") # Specifications for password # password length if len(password) < 6: return apology(message="password must be longer than 6 characters") # password must contain numbers if password.isalpha(): return apology(message="password must contain numbers") # password must contain letters if password.isdigit(): return apology(message="password must contain letters") for c in username: if not c.isalpha() and not c.isdigit() and c != "_": return apology(message="Please enter a valid username.") if len(username) < 1: return apology( message="please enter a username with more than 1 character.") hash_pw = generate_password_hash(password) time = get_time() try: if email: q = db.execute("SELECT email FROM users WHERE email = :email", { "email": email }).fetchone() if q: return apology(message="this email already exists") db.execute( "INSERT INTO users(username, hash, email, time) VALUES(:username, :hash_pw, :email, :time)", { "username": username, "hash_pw": hash_pw, "email": email, "time": time }) db.commit() message = "Congratulations!\n You're now registered on AAA Books!" #send_email(email, username, message) else: db.execute( "INSERT INTO users(username, hash, time) VALUES(:username, :hash_pw, :time)", { "username": username, "hash_pw": hash_pw, "time": time }) db.commit() except: return apology(message="something went wrong with the database.") rows = db.execute( "SELECT id, username, email FROM users WHERE username = :username", { "username": username }).fetchone() session["user_id"] = rows["id"] session["username"] = rows["username"] session["email"] = rows["email"] flash("You're now registered!") return redirect("/")
# v = nc.variables['V'][:] # w = nc.variables['W'][:] # gp = (nc.variables['PHB']+nc.variables['PH'])/9.81 # # uT = .5 * u[:,:,1:-1,1:-1]*(T[:,:,1:-1,:-1]+T[:,:,1:-1,1:]) # vT = .5 * v[:,:,1:-1,1:-1]*(T[:,:,:-1,1:-1]+T[:,:,1:,1:-1]) # wT = .5 * w[:,1:-1,1:-1,1:-1]*(T[:,:-1,1:-1,1:-1]+T[:,1:,1:-1,1:-1]) # # ad = np.diff(uT,1,3)/10000 + np.diff(vT,1,2)/10000 # dw = np.diff(wT,1,1)/np.diff(gp[:,1:-1,1:-1,1:-1],1,1) D = pd.HDFStore('../data/station_data.h5') S = pd.HDFStore('../data/LinearLinear.h5') T = hh.extract(D['ta_c'],'prom',1) Tm = S['T2']['d02'] sta = D['sta'] b = Tm-T nc = Dataset('../data/wrf/d02_2014-09-10.nc') t = pd.DatetimeIndex(hh.get_time(nc)) - np.timedelta64(4,'h') ma = basemap(nc) ij = ma(*hh.lonlat(sta)) lon,lat = ma.lonlat() xy = (lon[1:-1,1:-1],lat[1:-1,1:-1]) ad = sp.grid_interp(xy, np.load('adv.npy'), ij, sta.index, t, method='linear') d = pd.Panel({'adv':ad,'bias':b}) dm = d.groupby(d.major_axis.date,'major').mean().to_frame()
def binned_plot2(self, x, values, color=None, label=None): me, b, n = binned_statistic(x, values, 'mean', 50) std = binned_statistic(x, values, np.nanstd, 50)[0] xc = (b[:-1] + b[1:]) / 2 self.fill_betweenx(xc, me - 2 * std, me + 2 * std, color=color, alpha=.4) self.plot(me, xc, color=color, label=label) Axes.binned_plot = binned_plot2 # model, complete field nc = Dataset(dd('wrf/d02_2014-09-10.nc')) z = nc.variables['HGT'][:].flatten() # use only one year of data so as to not bias towards a particular season j = np.where(hh.get_time(nc) == np.datetime64('2014-12-31T00'))[0][0] T2 = nc.variables['T2'][:] T2 = T2[j:, :, :] T2 = np.mean(T2, 0).flatten() nc.close() # model, field interpolated to station locations S = pd.HDFStore(dd('LinearLinear.h5')) Tm = S['T2'].minor_xs('d02').dropna(0, 'all') # use only one year of data, s.a. Tm = Tm[Tm.index >= '2014-12-31'].mean() Z = S['z'] # lr = S['lapse']['all'] lr = -6.5 / 1000
def __init__(self, name): self.name = name self.start = get_time()
return def intro_message(): """ Prints the intro message. ARGS: None RETURNS: None RAISES: None """ print("\n" * 5) print("Now running cache updater. Press CTRL+C to quit.") print("\nThis program will update the cache once daily.") print("\n" * 5) return # run main() once daily while True: intro_message() if main() == 1: break dt = get_time() print("\n" + dt + " Cache update complete.\nGoing back to sleep...\n\n") sleep(86400)
def done(self): if not self.start: return # already submitted once elapsed_time = get_time() - self.start Metric.timing(self.name, elapsed_time) self.start = None # prevent further submitting
def sell(): """Sell shares of stock""" if request.method == "GET": # Getting symbols user owns to display stocks = db.execute( "SELECT symbol FROM transactions WHERE id = :id GROUP BY symbol HAVING SUM(shares) > 0 ORDER BY symbol", { 'id': session["user_id"] }).fetchall() return render_template("sell.html", stocks=stocks) else: symbol = request.form.get("symbol") shares = request.form.get("shares") if not symbol or not shares: return apology(message="please enter a symbol and a share") stocks = db.execute( "SELECT SUM(shares) AS sum_shares FROM transactions WHERE id = :id AND symbol = :symbol GROUP BY symbol HAVING SUM(shares) > 0", { 'id': session["user_id"], 'symbol': symbol }).fetchone() shares_owned = stocks["sum_shares"] try: shares = int(shares) if shares < 1: return apology(message="please enter a positive number") except: return apology(message="please enter a positive integer") if shares > shares_owned: return apology(message="you don't have enough shares") quote = lookup(symbol.strip().lower()) if not quote: return apology(message="invalid symbol") # Getting the user's cash balance query = db.execute("SELECT cash FROM users WHERE id = :id", { 'id': session["user_id"] }).fetchone() cash = float(query['cash']) # Getting the result of stock * shares to add to user's cash price = quote["price"] * shares # Converting positive number to negative number to add to database negative number as a sell shares = shares * -1 transacted = get_time() db.execute( "INSERT INTO transactions(id, symbol, shares, price, transacted) VALUES(:id, :symbol, :shares, :price, :transacted)", { 'id': session["user_id"], 'symbol': symbol, 'shares': shares, 'price': price, 'transacted': transacted }) flash("Sold!") # Updating the user's cash balance cash += float(price) db.execute("UPDATE users SET cash = :cash WHERE id = :id", { 'cash': cash, 'id': session["user_id"] }) db.commit() return redirect("/")