def register(): """Register user""" if request.method == 'POST': username = sanitize(request.form.get("username")) password = sanitize(request.form.get("password")) password_repeat = sanitize(request.form.get("confirmation")) if username == '' or password == '' or password_repeat == '': return apology("Fields cannot be blank.") elif password != password_repeat: return apology("Passwords didn't match.") else: user = db.execute('SELECT * FROM users WHERE username = :user', user=username) if user: return apology("User already exists.") else: user_id = db.execute( 'INSERT INTO users (username, hash) VALUES (:username, :hash)', username=username, hash=generate_password_hash(password)) # session["user_id"] = user_id return redirect("/login") else: return render_template('register.html')
def login(): """Log user in""" # Forget any user_id session.clear() # User reached route via POST (as by submitting a form via POST) if request.method == "POST": # Ensure username was submitted if not request.form.get("username"): return apology("must provide username", 403) # Ensure password was submitted elif not request.form.get("password"): return apology("must provide password", 403) # Query database for username rows = db.execute("SELECT * FROM users WHERE username = :username", username=sanitize(request.form.get("username"))) # Ensure username exists and password is correct if len(rows) != 1 or not check_password_hash(rows[0]["hash"],\ sanitize(request.form.get("password"))): return apology("invalid username and/or password", 403) # Remember which user has logged in session["user_id"] = rows[0]["id"] # Redirect user to home page return redirect("/") # User reached route via GET (as by clicking a link or via redirect) else: return render_template("login.html")
def buy(): """Buy shares of stock""" message = "" if request.method == 'POST': symbol = sanitize(request.form.get("symbol")) shares = int(sanitize(request.form.get("shares"))) username = db.execute('SELECT username FROM users WHERE id = :id', id=session["user_id"])[0]["username"] cash = db.execute('SELECT cash FROM users WHERE username = :user', user=username)[0]["cash"] stock_check = lookup(symbol) total_price = shares * stock_check["price"] print(total_price) if symbol == '' or shares == '': return apology("Fields cannot be blank") elif stock_check == None: return apology("Stock not found") elif shares < 1: return apology("Shares amount has to be positive number") elif total_price > cash: return apology("Not enough money") else: db.execute( 'INSERT INTO purchases (symbol, amount, owner) VALUES (:symbol, :amount, :user)', symbol=symbol, amount=shares, user=username) db.execute('UPDATE users SET cash = :cash WHERE username = :user', cash=round(cash - total_price, 2), user=username) message = "You successfuly bought %i shares of %s for %s" % ( shares, symbol, usd(total_price)) return render_template("buy.html", message=message)
def submit(request): request_dict = request.GET.copy() try: exercise = request_dict.pop('exercise')[0] except KeyError: return http.HttpResponseBadRequest() cursor = connections['default'].dict_cursor() if exercise in ('butte_diabetes', 'selection', 'assimes_cad', 'neandertal', 'eqtl', 'snyder_binding', 'mignot_narcolepsy', 'longevity', 'kim_aging'): population = helpers.check_population( request_dict.pop('population')[0]) cursor.execute('SELECT MAX(sid) FROM interpretome_exercises.unified;') sid = cursor.fetchone().values()[0] + 1 statements = [] for k, v in request_dict.items(): string = "('%s', '%s', '%s', '%s', %d)" % (helpers.sanitize( k), helpers.sanitize(v), population, exercise, int(sid)) statements.append(string) query = ''' INSERT INTO interpretome_exercises.unified (`key`, `value`, population, exercise, sid) VALUES %s;''' % ', '.join(statements) cursor.execute(query) return http.HttpResponse()
async def submit_message(reader, writer, message): message = "{}\n\n".format(sanitize(message)) writer.write(message.encode()) await writer.drain() logging.info("Sent message: {}".format(sanitize(message))) data = await reader.readline() logging.info("Received: {}".format(data.decode()))
def _metadata(self): s = {} s["age"] = Helpers.sanitize(Helpers.getOrUnknown(self.exp.age_display)) s["sex"] = Helpers.getOrUnknown(self.exp.donor_sex) s["accession"] = self.exp.encodeID s["description"] = Helpers.sanitize(self._desc()) s["donor"] = self.exp.donor_id s["view"] = self.view return s
def load(self): logging.main_logger.debug("[sim] 'load' called") # return if not a directory if not os.path.isdir(self.path): logging.main_logger.warning("[sim] sim %s not found" % (self.path)) return False # check if there is a bin folder if os.path.isdir(self.path + '/bin'): self.has_bin_folder = True # check if there is an OpenSim.ini file if os.path.isfile(self.path + '/bin/OpenSim.exe'): self.has_opensim_exe = True # check if there is an OpenSim.ini file if os.path.isfile(self.path + '/bin/OpenSim.ini'): self.has_opensim_ini = True # check if there is an OpenSim.log file if os.path.isfile(self.path + '/log/OpenSim.log'): self.has_opensim_log = True # check if there is an OpenSim.log file if os.path.isfile(self.path + '/log/tmux.log'): self.has_tmux_log = True # check if there is a Regions.ini file if os.path.isfile(self.path + '/bin/Regions/Regions.ini'): self.has_regions_ini = True # check if RAdmin is enabled if self.has_opensim_ini: from ConfigParser import ConfigParser from helpers import sanitize opensim_ini = ConfigParser() opensim_ini.read(self.path + '/bin/OpenSim.ini') if opensim_ini.has_section('RemoteAdmin'): if opensim_ini.has_option('RemoteAdmin', 'enabled'): if opensim_ini.get('RemoteAdmin', 'enabled').lower() == 'true': if opensim_ini.has_option('RemoteAdmin', 'access_password'): self.radmin_password = sanitize(opensim_ini.get('RemoteAdmin', 'access_password')) self.radmin_ready = True if opensim_ini.has_section('Network'): if opensim_ini.has_option('Network', 'http_listener_port'): self.port = sanitize(opensim_ini.get('Network', 'http_listener_port')) if opensim_ini.has_section('Startup'): if opensim_ini.has_option('Startup', 'PIDFile'): self.pid_file = sanitize(opensim_ini.get('Startup', 'PIDFile')[1:-1]) self.valid = self.has_bin_folder and self.pid_file != "" and self.has_opensim_exe and self.has_opensim_log and self.has_opensim_ini and self.has_regions_ini and self.has_tmux_log and self.radmin_ready return True
def run_make_action(docker_home, service, action): """ go and run the make tasks on the folder of the container. docker_home is needed while we find a better way of retrieving it, service is the name of the folder, action is the task to be run. """ service = sanitize(service) action = sanitize(action) # seems obvious, is here for refactoring purposes docker_home = docker_home base_dir = join(docker_home, service) output = do_run("cd {0}; make {1}".format(base_dir, action)) return output
def register(): """Registers new users""" # Connect to SQL Database conn = sqlite3.connect('finance.db') db = conn.cursor() # Renders the register page if GET request if request.method == "GET": return render_template("register.html") # Checks if the user inputted a username username = sanitize(request.form.get("username")) if not username: return apology("Please enter a username") db.execute("SELECT * FROM users WHERE username = (?)", [username]) user = db.fetchone() # Checks if the username already exists if user != None: return apology("That username already exists. Please enter a different username") # Checks if the passwords are the same and if the passwords were inputted password = sanitize(request.form.get("password")) confirmation = request.form.get("confirmation") if not password or not confirmation: return apology("Please enter both passwords") if password != confirmation: return apology("Please enter passwords that match") # Hashes the password phash = generate_password_hash(password) # Stores the new user in the user database db.execute("INSERT INTO users (username, hash) VALUES((?), (?))", (username, phash)) db.execute("SELECT * FROM users WHERE username = (?)", (username,)) print(db.fetchone()) # Saves changes and closes database connection conn.commit() conn.close() # Renders the success page return render_template("success.html")
def urban_dictionary (self, xmpp_message, room, nick, term): """ Lookup a term on Urban Dictionary. Usage: .ud <term> Alias: urban_dictionary """ # term is required. if not term: return URL = "http://api.urbandictionary.com/v0/define?term=%s" HDRS = { "Host" : "api.urbandictionary.com" } try: data = requests.get(URL % requests.utils.quote(term), headers=HDRS).content data = simplejson.loads(helpers.sanitize(data)) except: return "(facepalm) sorry. I encounted an error." # pick a random definision. try: ud = random.choice(data["list"]) return [ud["permalink"], "%s\nExample: %s" % (ud["definition"], ud["example"])] except: return "No definition found for '%s'" % term
def search(): song_req = request.args['song_req'] #search for the songs q = sanitize(song_req) #remove all spaces url = "https://www.googleapis.com/youtube/v3/search?part=snippet&maxResults=3&q={}&key={}".format( q, API_KEY) response = urllib.urlopen(url) data = json.loads(response.read()) #start storing lists session['songIds'] = [] session['songNames'] = [] reply = "We found three songs for you!\n" #add to storing lists for i in range(3): title = data['items'][i]["snippet"]['title'] author = data['items'][i]["snippet"]['channelTitle'] reply += "{}. Song {} by {}\n".format(i, title, author) videoId = data['items'][0]['id']['videoId'] session['songIds'].append(videoId) session['songNames'].append(title) #construct the response resp = MessagingResponse() resp.message(reply) session['state'] = 1 return str(resp)
def scrap_series(html): """Get useful info from the series list.""" soup = bs4.BeautifulSoup(helpers.sanitize(html)) episodes_list = soup.find('ul', id='listaEpisodios') results = [] seasons = episodes_list.find_all('li', class_='temporada') for season in seasons: season_title_tag = season.find('a', class_='temporada-titulo') if season_title_tag is None: season_title = None else: season_title = helpers.clean_html(season_title_tag.text) episodes = season.find_all('li') for episode in episodes: a_tag = episode.find('a') link = a_tag['href'] # before getting the text, remove a posible span text span_tag = a_tag.find('span') if span_tag is not None: span_tag.clear() title = helpers.enhance_number(helpers.clean_html(a_tag.text)) # store it results.append((season_title, title, link)) return results
def scrap_programa(html): """Get useful info from a program.""" soup = bs4.BeautifulSoup(helpers.sanitize(html)) episodes_list = soup.find('ul', id='listaEpisodios') episodes_result = [] if episodes_list is not None: season = episodes_list.find('h2') season_title = helpers.clean_html(season.text) episodes = episodes_list.find_all('li') for episode in episodes[1:]: # first episode is html weirdy a_tag = episode.find('a') link = a_tag['href'] title = helpers.clean_html(a_tag.text) # store it episodes_result.append((season_title, title, link)) duration_result = None # get only duration from the metadata body metadata = soup.find('div', class_="cuerpoMetadata informacion") if metadata is not None: duration_tag = metadata.find('p', class_='duracion') if duration_tag is not None: duration_text = duration_tag.text.split()[1] duration_result = int(duration_text) return duration_result, episodes_result
def _subgroups(self): assay = self.exp.assay_term_name if "RNA-seq" == assay: assay = self.exp.assay_title target_label = ' '.join([self.exp.assay_term_name, self.exp.target, self.exp.label]).strip() s = {} s["donor"] = Helpers.getOrUnknown(self.exp.donor_id) s["assay"] = Helpers.getOrUnknown(assay) s["label"] = Helpers.getOrUnknown(self.exp.tf) s["target_label"] = Helpers.getOrUnknown(target_label) s["biosample"] = Helpers.getOrUnknown(self.exp.biosample_term_name) s["biosample_summary"] = Helpers.getOrUnknown(self.exp.biosample_summary).encode('ascii', 'ignore').decode('ascii') s["age"] = 'a' + Helpers.sanitize(Helpers.getOrUnknown(self.exp.age_display)) s["sex"] = Helpers.getOrUnknown(self.exp.donor_sex) age_sex = ' '.join([e for e in [self.exp.age_display, self.exp.donor_sex] if e]).strip() s["age_sex"] = Helpers.getOrUnknown(age_sex) s["view"] = self.view self.presentation["label"] = (s["label"], Helpers.html_escape(Helpers.getOrUnknown(self.exp.tf))) self.presentation["assay"] = (s["assay"], s["assay"]) self.presentation["donor"] = (s["donor"], s["donor"]) self.presentation["target_label"] = (s["target_label"], s["target_label"]) self.presentation["age"] = (s["age"], Helpers.html_escape(Helpers.getOrUnknown(self.exp.age_display))) self.presentation["view"] = (s["view"], s["view"]) self.presentation["sex"] = (s["sex"], s["sex"]) self.presentation["age_sex"] = (s["age_sex"], s["age_sex"]) self.presentation["biosample"] = (s["biosample"], s["biosample"]) self.presentation["biosample_summary"] = (s["biosample_summary"], s["biosample_summary"]) self.presentation["tissue"] = self.presentation["biosample"] return s
def on_get(self, request, response, **params): num = params.get("num", "1") num = sanitize(num) worlds = [World[ident].to_dict() for ident in generate_ids(num)] response.set_header('Date', formatdate(timeval=None, localtime=False, usegmt=True)) response.set_header('Server', server_info) response.media = worlds
def scrap_page(html): """Scrap the page.""" contents = [] sanitized = helpers.sanitize(html) soup = BeautifulSoup(sanitized) for i in soup.findAll("div", {"class": "video_muestra_catalogo"}): for a_node in i.find_all("a"): onclick = a_node.get("onclick", "") if onclick.startswith("javascript:verVideo"): break else: # video not really present for this program continue title = i.h4.contents[0].title().strip() _sinop_cat = i.find("h5", {"class": "sinopsis_cat"}).contents sinopsis = _sinop_cat[0] if _sinop_cat else u"" id_video = i.findAll("li")[1].a['href'].split("=")[1] image_url = IMG_URL % (id_video,) video_url = BACKEND % (id_video,) d = {"duration": "?", "channel": "Bacua", "section": "Micro", "description": sinopsis, "title": title, "url": video_url, "episode_id": 'bacua_' + id_video, "image_url": image_url, "season": None} contents.append(d) return contents
def scrap_page(html): """Scrap the page.""" contents = [] sanitized = helpers.sanitize(html) soup = BeautifulSoup(sanitized) for i in soup.findAll("div", {"class": "video_muestra_catalogo"}): for a_node in i.find_all("a"): onclick = a_node.get("onclick", "") if onclick.startswith("javascript:verVideo"): break else: # video not really present for this program continue title = i.h4.contents[0].title().strip() _sinop_cat = i.find("h5", {"class": "sinopsis_cat"}).contents sinopsis = _sinop_cat[0] if _sinop_cat else u"" id_video = i.findAll("li")[1].a['href'].split("=")[1] image_url = IMG_URL % (id_video, ) video_url = BACKEND % (id_video, ) d = { "duration": "?", "channel": "Bacua", "section": "Micro", "description": sinopsis, "title": title, "url": video_url, "episode_id": 'bacua_' + id_video, "image_url": image_url, "season": None } contents.append(d) return contents
def urban_dictionary (self, xmpp_message, room, nick, term): """ Lookup a term on Urban Dictionary. Usage: .ud <term> Alias: urban_dictionary """ # term is required. if not term: return URL = "http://www.urbandictionary.com/iphone/search/define?term=%s" HDRS = { "Host" : "www.urbandictionary.com" } try: data = requests.get(URL % requests.utils.quote(term), headers=HDRS).content data = simplejson.loads(helpers.sanitize(data)) except: return "(facepalm) sorry. I encounted an error." # pick a random definision. ud = random.choice(data["list"]) return [ud["permalink"], "%s\nExample: %s" % (ud["definition"], ud["example"])]
def scrap_video(html): """Get useful info from the video page.""" soup = bs4.BeautifulSoup(helpers.sanitize(html)) item = soup.find('p', class_='duracion') if item is not None: parts = item.text.split() duration = int(parts[1]) return duration
def google_calculator(self, xmpp_message, room, nick, expression): """ (DEPRECATED! Google API has vanished.) Help humans calculate. Use 'result', 'res', 'answer', 'ans' or '_' to reference the result of the calculation. Results are tracked individually per person, room and calculator type. Usage: .gcalc <expression> Alias: gc """ URL = "https://www.google.com/ig/calculator?hl=en&q=" # an expression is required. if not expression: return # ensure local storage exists for this user. if not self.gcalc_answers.has_key(nick): self.gcalc_answers[nick] = {} # ...and in this specific room. if not self.gcalc_answers[nick].has_key(room): self.gcalc_answers[nick][room] = 0 # normalize ans -> answer -> _ expression = expression.replace("answer", "_").replace("ans", "_") # splice in the last value. if "_" in expression: expression = expression.replace( "_", str(self.gcalc_answers[nick][room])) try: data = requests.get(URL + requests.utils.quote(expression)).content data = helpers.sanitize(data) except: return "(facepalm) sorry. I encounted a JSON parsing error." try: # normalize the JavaScript JSON into properly quoted JSON. # ex: {lhs: "200 pounds",rhs: "90.718474 kilograms",error: "",icc: false} for token in ["lhs", "rhs", "error", "icc"]: data = data.replace(token + ":", '"%s":' % token) data = simplejson.loads(data) ans = data["rhs"] if ans: self.gcalc_answers[nick][room] = ans return "%s" % ans else: return "(thumbsdown) could not compute." except: return "(facepalm) oops. I encountered an error."
def on_get(self, request, response, **params): num = params.get("num", "1") num = sanitize(num) ids = generate_ids(num) ids.sort() worlds = [] for item in ids: world = World[item] world.randomNumber = randint(1, 10000) worlds.append({"id": world.id, "randomNumber": world.randomNumber}) response.media = worlds
def google_calculator (self, xmpp_message, room, nick, expression): """ Help humans calculate. Use 'result', 'res', 'answer', 'ans' or '_' to reference the result of the calculation. Results are tracked individually per person, room and calculator type. Usage: .gcalc <expression> Alias: gc """ URL = "https://www.google.com/ig/calculator?hl=en&q=" # an expression is required. if not expression: return # ensure local storage exists for this user. if not self.gcalc_answers.has_key(nick): self.gcalc_answers[nick] = {} # ...and in this specific room. if not self.gcalc_answers[nick].has_key(room): self.gcalc_answers[nick][room] = 0 # normalize ans -> answer -> _ expression = expression.replace("answer", "_").replace("ans", "_") # splice in the last value. if "_" in expression: expression = expression.replace("_", str(self.gcalc_answers[nick][room])) try: data = requests.get(URL + requests.utils.quote(expression)).content data = helpers.sanitize(data) except: return "(facepalm) sorry. I encounted a JSON parsing error." try: # normalize the JavaScript JSON into properly quoted JSON. # ex: {lhs: "200 pounds",rhs: "90.718474 kilograms",error: "",icc: false} for token in ["lhs", "rhs", "error", "icc"]: data = data.replace(token + ":", '"%s":' % token) data = simplejson.loads(data) ans = data["rhs"] if ans: self.gcalc_answers[nick][room] = ans return "%s" % ans else: return "(thumbsdown) could not compute." except: return "(facepalm) oops. I encountered an error."
async def register(reader, writer, username): logging.info("Register: Try username {}".format(username)) writer.write("\n".encode()) await writer.drain() await reader.readline() message = "{}\n".format(sanitize(username)) writer.write(message.encode()) await writer.drain() data = await reader.readline() response = json.loads(data.decode()) set_key(find_dotenv(), "TOKEN", response["account_hash"]) logging.info('Register: Username "{}" registered with token {}'.format( sanitize(username), response["account_hash"])) await reader.readline()
def on_get(self, request, response, **params): num = params.get("num", "1") num = sanitize(num) ids = generate_ids(num) ids.sort() worlds = [] for item in ids: world = World[item] world.randomNumber = randint(1, 10000) worlds.append({"id": world.id, "randomNumber": world.randomNumber}) response.set_header('Date', formatdate(timeval=None, localtime=False, usegmt=True)) response.set_header('Server', server_info) response.media = worlds
def update(self, start=None, end=None, statusFilter=None, updateName=None, updateID=None, up=None, down=None, remove=None): try: start = int(start) end = int(end) statusFilter = int(statusFilter) except TypeError: start = 0 end = 20 statusFilter = -1 if not up is None: try: pacvert.thequeue.movePending(int(up), -1) return "OK." except: return "Nope." if not down is None: try: pacvert.thequeue.movePending(int(down), 1) return "OK." except: return "Nope." if not remove is None: try: pacvert.thequeue.deletePending(int(remove)) return "OK." except: return "Nope." if not updateName is None: try: updateName = replace_illegal_chars(sanitize(str(updateName))) if (len(updateName) < 2): return "Illegal character detected." updateID = int(updateID) returnQueueElementByFileID(updateID).setRename(updateName) return "OK." except ValueError: logger.error("Can't update name of file.") retValue = [] tempQueue = pacvert.thequeue.getMerged(statusFilter) if len(tempQueue) > 0: for i in range(min(start, len(tempQueue)), min(len(tempQueue),end)): retValue.append(tempQueue[i].getAsDict()) retValue.append({'queue_length': len(tempQueue), 'commits_behind': pacvert.COMMITS_BEHIND}) return json.dumps(retValue)
def quote(): """Get stock quote.""" symbol = "" price = 0 if request.method == "POST": if request.form.get("symbol") != "": stock = lookup(sanitize(request.form.get("symbol"))) if not stock: return apology("Not found") else: price = usd(stock["price"]) symbol = stock["symbol"] else: return apology("Field cannot be blank") return render_template("quote.html", symbol=symbol, price=price)
def submit(request): request_dict = request.GET.copy() try: exercise = request_dict.pop('exercise')[0] except KeyError: return http.HttpResponseBadRequest() cursor = connections['default'].dict_cursor() if exercise in ( 'butte_diabetes', 'selection', 'assimes_cad', 'neandertal', 'eqtl', 'snyder_binding', 'mignot_narcolepsy', 'longevity', 'kim_aging' ): population = helpers.check_population(request_dict.pop('population')[0]) cursor.execute('SELECT MAX(sid) FROM interpretome_exercises.unified;') sid = cursor.fetchone().values()[0] + 1 statements = [] for k, v in request_dict.items(): string = "('%s', '%s', '%s', '%s', %d)" % (helpers.sanitize(k), helpers.sanitize(v), population, exercise, int(sid)) statements.append(string) query = ''' INSERT INTO interpretome_exercises.unified (`key`, `value`, population, exercise, sid) VALUES %s;''' % ', '.join(statements) cursor.execute(query) return http.HttpResponse()
def _desc(self): exp = self.exp desc = [self.exp.encodeID] if exp.biosample_summary: desc.append(Helpers.sanitize(exp.biosample_summary.strip())) elif exp.description: desc.append(exp.description) else: desc.append(exp.assay_term_name) if exp.tf: desc.append(exp.tf) age = exp.age_display if age and "unknown" != age: desc += [age] desc.append('(%s)' % self.f.output_type) return " ".join(desc)
def _init(self): p = OrderedDict() p["track"] = self.parent.initials() + Helpers.sanitize(self.f.expID + '_' + self.f.fileID) p["parent"] = self.parent.param(self.parent.on) p["subGroups"] = Helpers.unrollEquals(self._subgroups()) p["bigDataUrl"] = self._url() p["visibility"] = Helpers.viz("dense", self.active) p["type"] = "bigBed" p["shortLabel"] = Helpers.makeShortLabel(self.exp.assay_term_name, self.exp.tf) p["longLabel"] = Helpers.makeLongLabel(self._desc()) p["itemRgb"] = "On" p["color"] = Helpers.colorize(self.exp) p["darkerLabels"] = "on" p["metadata"] = Helpers.unrollEquals(self._metadata()) p["view"] = self.exp.encodeID return p
def _init(self): p = OrderedDict() p["track"] = self.parent.initials() + Helpers.sanitize(self.f.expID + '_' + self.f.fileID) p["parent"] = self.parent.param(self.active) p["subGroups"] = Helpers.unrollEquals(self._subgroups()) p["bigDataUrl"] = self._url() p["visibility"] = Helpers.viz("full", self.active) p["type"] = "bigWig" p["color"] = Helpers.colorize(self.exp) p["height"] = "maxHeightPixels 64:12:8" p["shortLabel"] = Helpers.makeShortLabel(self.exp.assay_term_name, self.exp.biosample_term_name) p["longLabel"] = Helpers.makeLongLabel(self.exp.assay_term_name + ' ' + self._desc()) p["itemRgb"] = "On" p["darkerLabels"] = "on" p["metadata"] = Helpers.unrollEquals(self._metadata()) p["view"] = self.view return p
def quote(): """Get stock quote.""" # Renders the quote page if a GET request if request.method == "GET": return render_template("quote.html") # Checks if the stock symbol exists quote = lookup(sanitize(request.form.get("symbol"))) if quote == None: return apology("Invalid Symbol") # Retrieves and formats the price of the stock price = usd(quote['price']) # Renders the quoted page return render_template("quoted.html", name=quote['name'], price=price, symbol=quote['symbol'])
def login(): """Log user in""" # Connect to SQL Database conn = sqlite3.connect('finance.db') db = conn.cursor() # Forget any user_id session.clear() # User reached route via POST (as by submitting a form via POST) if request.method == "POST": # Ensure username was submitted if not request.form.get("username"): return apology("Must provide username", 403) # Ensure password was submitted elif not request.form.get("password"): return apology("Must provide password", 403) # Query database for username db.execute("SELECT * FROM users WHERE username = (?)", (sanitize(request.form.get("username")),)) # Fetches the row selected above row = db.fetchone() # Checks if the username exists and if the password is correct if row == None or not check_password_hash(row[2], request.form.get("password")): return apology("Invalid username and/or password", 403) # Saves changes and closes cursor conn.commit() conn.close() # Remember which user has logged in session["user_id"] = row[0] # Redirect user to home page return redirect("/") # User reached route via GET (as by clicking a link or via redirect) else: return render_template("login.html")
def get_individuals(request): numsnps = helpers.check_int(request.GET.get('numsnps', None)) if numsnps is None: return http.HttpResponseBadRequest() individuals = helpers.sanitize(request.GET.get('individuals', None)).split(',') individual_select = set() for individual in individuals: if individual == '210-2011-staff': individual_select.update(['Konrad', 'Nick', 'Noah', 'Rob', 'Stuart']) else: individual_select.add(individual) if len(individual_select) == 0: return http.HttpResponseBadRequest() cursor = connections['default'].dict_cursor() query = ''' SELECT dbsnp, %s FROM interpretome_ancestry.similarity LIMIT %s; ''' % (",".join([str(i) for i in individual_select]), numsnps) cursor.execute(query) output = helpers.create_snp_dict(cursor.fetchall()) return http.HttpResponse(simplejson.dumps(output), mimetype = 'application/json')
def add(): """Add cash to user's account""" # Connect to SQL Database conn = sqlite3.connect('finance.db') db = conn.cursor() # Renders the add page if a GET request if request.method == "GET": return render_template("add.html") # Current logged-in user user = session['user_id'] # Retrieves the added cash add = sanitize(request.form.get("added-cash")) # Ensures a valid number was inputted if not add.isdigit(): return apology("Invalid cash amount") # Converts add to a float, ensures it does not exceed maximum of 1000 add = float(add) if add > 1000: return apology("Invalid cash amount") # Updates the user's current cash db.execute("SELECT cash FROM users WHERE id == (?)", (user,)) row = db.fetchone() new_cash = row[0] + add db.execute("UPDATE users SET cash = (?) WHERE id == (?)", (new_cash, user)) # Saves changes to database and closes the connection conn.commit() conn.close() # Redirects user to homepage return redirect("/")
def _subgroups(self): s = {} s["donor"] = Helpers.getOrUnknown(self.exp.donor_id) s["assay"] = Helpers.getOrUnknown(self.stateType) s["label"] = Helpers.getOrUnknown(self.exp.tf) s["biosample"] = Helpers.getOrUnknown(self.exp.biosample_term_name) s["age"] = 'a' + Helpers.sanitize(Helpers.getOrUnknown(self.exp.age_display)) s["view"] = self.exp.encodeID self.presentation = {} self.presentation["label"] = (s["label"], Helpers.html_escape(Helpers.getOrUnknown(self.exp.tf))) self.presentation["assay"] = (s["assay"], s["assay"]) self.presentation["donor"] = (s["donor"], s["donor"]) self.presentation["age"] = (s["age"], Helpers.html_escape(Helpers.getOrUnknown(self.exp.age_display))) self.presentation["view"] = (s["view"], s["view"]) self.presentation["biosample"] = (s["biosample"], s["biosample"]) self.presentation["sex"] = ('', '') self.presentation["age_sex"] = ('', '') self.presentation["target_label"] = (s["assay"], s["assay"]) self.presentation["biosample_summary"] = (s["biosample"], s["biosample"]) self.presentation["tissue"] = self.presentation["biosample"] return s
def get_synced_items(self, machine_id=None, user_id=None): sync_list = self.get_plextv_sync_lists(machine_id) user_data = users.Users() synced_items = [] try: xml_parse = minidom.parseString(sync_list) except Exception as e: logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_synced_items: %s" % e) return [] except: logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_synced_items.") return [] xml_head = xml_parse.getElementsByTagName('SyncList') if not xml_head: logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_synced_items.") else: for a in xml_head: client_id = helpers.get_xml_attr(a, 'id') sync_device = a.getElementsByTagName('Device') for device in sync_device: device_user_id = helpers.get_xml_attr(device, 'userID') try: device_username = user_data.get_details(user_id=device_user_id)['username'] device_friendly_name = user_data.get_details(user_id=device_user_id)['friendly_name'] except: device_username = '' device_friendly_name = '' device_name = helpers.get_xml_attr(device, 'name') device_product = helpers.get_xml_attr(device, 'product') device_product_version = helpers.get_xml_attr(device, 'productVersion') device_platform = helpers.get_xml_attr(device, 'platform') device_platform_version = helpers.get_xml_attr(device, 'platformVersion') device_type = helpers.get_xml_attr(device, 'device') device_model = helpers.get_xml_attr(device, 'model') device_last_seen = helpers.get_xml_attr(device, 'lastSeenAt') # Filter by user_id if user_id and user_id != device_user_id: continue for synced in a.getElementsByTagName('SyncItems'): sync_item = synced.getElementsByTagName('SyncItem') for item in sync_item: sync_id = helpers.get_xml_attr(item, 'id') sync_version = helpers.get_xml_attr(item, 'version') sync_root_title = helpers.get_xml_attr(item, 'rootTitle') sync_title = helpers.get_xml_attr(item, 'title') sync_metadata_type = helpers.get_xml_attr(item, 'metadataType') sync_content_type = helpers.get_xml_attr(item, 'contentType') for status in item.getElementsByTagName('Status'): status_failure_code = helpers.get_xml_attr(status, 'failureCode') status_failure = helpers.get_xml_attr(status, 'failure') status_state = helpers.get_xml_attr(status, 'state') status_item_count = helpers.get_xml_attr(status, 'itemsCount') status_item_complete_count = helpers.get_xml_attr(status, 'itemsCompleteCount') status_item_downloaded_count = helpers.get_xml_attr(status, 'itemsDownloadedCount') status_item_ready_count = helpers.get_xml_attr(status, 'itemsReadyCount') status_item_successful_count = helpers.get_xml_attr(status, 'itemsSuccessfulCount') status_total_size = helpers.get_xml_attr(status, 'totalSize') status_item_download_percent_complete = helpers.get_percent( status_item_downloaded_count, status_item_count) for settings in item.getElementsByTagName('MediaSettings'): settings_audio_boost = helpers.get_xml_attr(settings, 'audioBoost') settings_music_bitrate = helpers.get_xml_attr(settings, 'musicBitrate') settings_photo_quality = helpers.get_xml_attr(settings, 'photoQuality') settings_photo_resolution = helpers.get_xml_attr(settings, 'photoResolution') settings_video_quality = helpers.get_xml_attr(settings, 'videoQuality') settings_video_resolution = helpers.get_xml_attr(settings, 'videoResolution') for location in item.getElementsByTagName('Location'): clean_uri = helpers.get_xml_attr(location, 'uri').split('%2F') rating_key = next((clean_uri[(idx + 1) % len(clean_uri)] for idx, item in enumerate(clean_uri) if item == 'metadata'), None) sync_details = {"device_name": helpers.sanitize(device_name), "platform": helpers.sanitize(device_platform), "username": helpers.sanitize(device_username), "friendly_name": helpers.sanitize(device_friendly_name), "user_id": device_user_id, "root_title": helpers.sanitize(sync_root_title), "title": helpers.sanitize(sync_title), "metadata_type": sync_metadata_type, "content_type": sync_content_type, "rating_key": rating_key, "state": status_state, "item_count": status_item_count, "item_complete_count": status_item_complete_count, "item_downloaded_count": status_item_downloaded_count, "item_downloaded_percent_complete": status_item_download_percent_complete, "music_bitrate": settings_music_bitrate, "photo_quality": settings_photo_quality, "video_quality": settings_video_quality, "total_size": status_total_size, "failure": status_failure, "sync_id": sync_id } synced_items.append(sync_details) return session.filter_session_info(synced_items, filter_key='user_id')
def convert(self): html = markdown.Markdown.convert(self) return h.sanitize(html)
def _xmpp_on_message (self, xmpp_message): """ Called for each message both sent and received by the bot. The logic for parsing messages and appropriately multiplexing out to handlers happens here. """ room, nick, message = xmpp_message["mucroom"], xmpp_message["mucnick"], xmpp_message["body"] # process all handlers bound to "any". for callback in self.triggers["any"]: try: # process callback and speak results. self.speak(xmpp_message, callback(xmpp_message, room, nick, message)) except Exception as e: self._exception_handler("exception in handler-all-%s()." % callback.__name__, e, fatal=True) # ensure the other handlers don't talk to themselves. if nick == config.NICKNAME: return # sanitize any non-printables out of message. handlers can access the raw messag via xmpp_message if they wish. message = helpers.sanitize(message) # make a lower case copy of the message as we we'll use this a few times below. message_lower = message.lower() # the first handler to get triggered is called. "command" takes precedence over "regex". for category in ["command", "regex"]: for callback, trigger in self.triggers[category]: trigger_lower = trigger.lower() # lower case version of trigger. trigger_match = False # whether or not a trigger was matched. arguments = None # trigger arguments, extracted below. # command triggers are either prefixed with a dot... if category == "command" and \ ( message_lower.startswith("." + trigger_lower + " ") or \ message_lower.endswith ("." + trigger_lower) or \ message_lower.startswith("/" + trigger_lower + " ") or \ message_lower.endswith ("/" + trigger_lower) ): # the arguments begin after the trigger, preserve the original case. arguments = message[len(trigger) + 1:].strip() # +1 for the dot or slash (./) trigger_match = True # ...or triggers prefixed with an @mention. elif category == "command" and config.AT_NAME in message_lower: # the trigger and arguments being after the @message. remainder = message[message_lower.index(config.AT_NAME) + len(config.AT_NAME):].strip().lstrip("./") # if the remainder starts with our trigger (no dot prefix required here). if remainder.lower().startswith(trigger_lower): # the arguments begin after the trigger, preserve the original case. arguments = remainder[len(trigger):].strip() trigger_match = True # ...or triggers prefixed with a generic mention of @bot. elif category == "command" and "@bot" in message_lower: # the trigger and arguments being after the @message. remainder = message[message_lower.index("@bot") + 4:].strip().lstrip("./") # if the remainder starts with our trigger (no dot prefix required here). if remainder.lower().startswith(trigger_lower): # the arguments begin after the trigger, preserve the original case. arguments = remainder[len(trigger):].strip() trigger_match = True # look for regular expression match (not search, want to be more strict here). elif category == "regex" and re.match(trigger, message): # the entire message is the argument. arguments = message trigger_match = True # if a trigger was matched, launch the callback. if trigger_match: try: # process callback, speak the results and return self.speak(xmpp_message, callback(xmpp_message, room, nick, arguments)) return except Exception as e: # fata exception. self._exception_handler("handler %s-%s()." % (category, callback.__name__), e, fatal=True)
def ssp_query(self, table_name=None, table_name_union=None, columns=[], columns_union=[], custom_where=[], custom_where_union=[], group_by=[], group_by_union=[], join_types=[], join_tables=[], join_evals=[], kwargs=None): if not table_name: logger.error('PlexPy DataTables :: No table name received.') return None # Fetch all our parameters if kwargs.get('json_data'): parameters = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data')) else: logger.error('PlexPy DataTables :: Parameters for Datatables must be sent as a serialised json object ' 'named json_data.') return None extracted_columns = self.extract_columns(columns=columns) join = self.build_join(join_types, join_tables, join_evals) group = self.build_grouping(group_by) c_where, cw_args = self.build_custom_where(custom_where) order = self.build_order(parameters['order'], extracted_columns['column_named'], parameters['columns']) where, w_args = self.build_where(parameters['search']['value'], extracted_columns['column_named'], parameters['columns']) # Build union parameters if table_name_union: extracted_columns_union = self.extract_columns(columns=columns_union) group_u = self.build_grouping(group_by_union) c_where_u, cwu_args = self.build_custom_where(custom_where_union) union = 'UNION SELECT %s FROM %s %s %s' % (extracted_columns_union['column_string'], table_name_union, c_where_u, group_u) else: union = '' cwu_args = [] args = cw_args + cwu_args + w_args # Build the query query = 'SELECT * FROM (SELECT %s FROM %s %s %s %s %s) %s %s' \ % (extracted_columns['column_string'], table_name, join, c_where, group, union, where, order) # logger.debug(u"Query: %s" % query) # Execute the query filtered = self.ssp_db.select(query, args=args) # Remove NULL rows filtered = [row for row in filtered if not all(v is None for v in row.values())] # Build grand totals totalcount = self.ssp_db.select('SELECT COUNT(id) as total_count from %s' % table_name)[0]['total_count'] # Get draw counter draw_counter = int(parameters['draw']) # Paginate results result = filtered[parameters['start']:(parameters['start'] + parameters['length'])] # Sanitize on the way out result = [{k: helpers.sanitize(v) if isinstance(v, basestring) else v for k, v in row.iteritems()} for row in result] output = {'result': result, 'draw': draw_counter, 'filteredCount': len(filtered), 'totalCount': totalcount} return output
def ssp_query(self, table_name=None, columns=[], custom_where=[], group_by=[], join_types=[], join_tables=[], join_evals=[], kwargs=None): if not table_name: logger.error('PlexPy DataTables :: No table name received.') return None # Set default variable values parameters = {} args = [] group = '' order = '' where = '' join = '' c_where = '' # Fetch all our parameters if kwargs.get('json_data'): parameters = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data')) else: logger.error('PlexPy DataTables :: Parameters for Datatables must be sent as a serialised json object ' 'named json_data.') return None dt_columns = parameters['columns'] extracted_columns = self.extract_columns(columns=columns) # Build grouping if group_by: for g in group_by: group += g + ', ' if group: grouping = True group = 'GROUP BY ' + group.rstrip(', ') else: grouping = False # Build join parameters if join_types: counter = 0 for join_type in join_types: if join_type.upper() == 'LEFT OUTER JOIN': join_item = 'LEFT OUTER JOIN %s ON %s = %s ' % \ (join_tables[counter], join_evals[counter][0], join_evals[counter][1]) elif join_type.upper() == 'JOIN' or join_type.upper() == 'INNER JOIN': join_item = 'JOIN %s ON %s = %s ' % \ (join_tables[counter], join_evals[counter][0], join_evals[counter][1]) else: join_item = '' counter += 1 join += join_item # Build custom where parameters if custom_where: for w in custom_where: if isinstance(w[1], (list, tuple)) and len(w[1]): c_where += '(' for w_ in w[1]: if w_ == None: c_where += w[0] + ' IS NULL OR ' else: c_where += w[0] + ' = ? OR ' args.append(w_) c_where = c_where.rstrip(' OR ') + ') AND ' else: if w[1] == None: c_where += w[0] + ' IS NULL AND ' else: c_where += w[0] + ' = ? AND ' args.append(w[1]) if c_where: c_where = 'WHERE ' + c_where.rstrip(' AND ') # Build ordering for o in parameters['order']: sort_order = ' COLLATE NOCASE' if o['dir'] == 'desc': sort_order = ' COLLATE NOCASE DESC' # We first see if a name was sent though for the column sort. if dt_columns[int(o['column'])]['data']: # We have a name, now check if it's a valid column name for our query # so we don't just inject a random value if any(d.lower() == dt_columns[int(o['column'])]['data'].lower() for d in extracted_columns['column_named']): order += dt_columns[int(o['column'])]['data'] + '%s' % sort_order else: # if we receive a bogus name, rather not sort at all. pass # If no name exists for the column, just use the column index to sort else: order += extracted_columns['column_named'][int(o['column'])] order += ', ' order = order.rstrip(', ') if order: order = 'ORDER BY ' + order # Build where parameters if parameters['search']['value']: counter = 0 for s in parameters['columns']: if s['searchable']: # We first see if a name was sent though for the column search. if s['data']: # We have a name, now check if it's a valid column name for our query # so we don't just inject a random value if any(d.lower() == s['data'].lower() for d in extracted_columns['column_named']): where += s['data'] + ' LIKE ? OR ' args.append('%' + parameters['search']['value'] + '%') else: # if we receive a bogus name, rather not search at all. pass # If no name exists for the column, just use the column index to search else: where += extracted_columns['column_named'][counter] + ' LIKE ? OR ' args.append('%' + parameters['search']['value'] + '%') counter += 1 if where: where = 'WHERE ' + where.rstrip(' OR ') # Build our queries if grouping: if c_where == '': query = 'SELECT * FROM (SELECT %s FROM %s %s %s) %s %s' \ % (extracted_columns['column_string'], table_name, join, group, where, order) else: query = 'SELECT * FROM (SELECT %s FROM %s %s %s %s) %s %s' \ % (extracted_columns['column_string'], table_name, join, c_where, group, where, order) else: if c_where == '': query = 'SELECT %s FROM %s %s %s %s' \ % (extracted_columns['column_string'], table_name, join, where, order) else: query = 'SELECT * FROM (SELECT %s FROM %s %s %s %s) %s' \ % (extracted_columns['column_string'], table_name, join, where, order, c_where) # logger.debug(u"Query: %s" % query) # Execute the query filtered = self.ssp_db.select(query, args=args) # Build grand totals totalcount = self.ssp_db.select('SELECT COUNT(id) as total_count from %s' % table_name)[0]['total_count'] # Get draw counter draw_counter = int(parameters['draw']) # Paginate results result = filtered[parameters['start']:(parameters['start'] + parameters['length'])] # Sanitize on the way out result = [{k: helpers.sanitize(v) if isinstance(v, basestring) else v for k, v in row.iteritems()} for row in result] output = {'result': result, 'draw': draw_counter, 'filteredCount': len(filtered), 'totalCount': totalcount} return output
def load(self): logging.main_logger.debug("[region] 'load' called") # get the regions.ini file ini_path = self.sim_path + '/bin/Regions/Regions.ini' if not os.path.isfile(ini_path): logging.main_logger.warning("[region] no Regions.ini file found : %s" % (ini_path)) return False # check the region name if self.name == '': logging.main_logger.warning("[region] no name defined") return False from ConfigParser import ConfigParser from helpers import sanitize regions_ini = ConfigParser() regions_ini.read(ini_path) sections = regions_ini.sections() # check the region in Regions.ini if not self.name in sections: logging.main_logger.warning("[region] region not found in Regions.ini file") return False if regions_ini.has_option(self.name, 'RegionUUID'): self.region_uuid = sanitize(regions_ini.get(self.name, 'RegionUUID')) if regions_ini.has_option(self.name, 'Location'): self.location = sanitize(regions_ini.get(self.name, 'Location')) if regions_ini.has_option(self.name, 'SizeX'): self.sizex = sanitize(regions_ini.get(self.name, 'SizeX')) if regions_ini.has_option(self.name, 'SizeY'): self.sizey = sanitize(regions_ini.get(self.name, 'SizeY')) if regions_ini.has_option(self.name, 'InternalAddress'): self.internal_address = sanitize(regions_ini.get(self.name, 'InternalAddress')) if regions_ini.has_option(self.name, 'InternalPort'): self.internal_port = sanitize(regions_ini.get(self.name, 'InternalPort')) if regions_ini.has_option(self.name, 'AllowAlternatePorts'): self.allow_alternate_ports = sanitize(regions_ini.get(self.name, 'AllowAlternatePorts')) if regions_ini.has_option(self.name, 'ExternalHostName'): self.external_host_name = sanitize(regions_ini.get(self.name, 'ExternalHostName')) if regions_ini.has_option(self.name, 'NonphysicalPrimMax'): self.nonphysical_prim_max = sanitize(regions_ini.get(self.name, 'NonphysicalPrimMax')) if regions_ini.has_option(self.name, 'PhysicalPrimMax'): self.physical_prim_max = sanitize(regions_ini.get(self.name, 'PhysicalPrimMax')) if regions_ini.has_option(self.name, 'MaxPrims'): self.max_prims = sanitize(regions_ini.get(self.name, 'MaxPrims')) if regions_ini.has_option(self.name, 'MaxAgents'): self.max_agents = sanitize(regions_ini.get(self.name, 'MaxAgents')) if regions_ini.has_option(self.name, 'ScopeID'): self.scope_id = sanitize(regions_ini.get(self.name, 'ScopeID')) if regions_ini.has_option(self.name, 'RegionType'): self.region_type = sanitize(regions_ini.get(self.name, 'RegionType')) if regions_ini.has_option(self.name, 'MaptileStaticUUID'): self.maptile_static_uuid = sanitize(regions_ini.get(self.name, 'MaptileStaticUUID')) return True