def set_password(self, password): """ Sets a password on a the inbox that this instance represents. password -- plaintext string representation of the password """ if self.has_password(): #We already have a password and a salt ^_^ hash, salt = parse_hash(self.password_hash) new_hash = make_hash(settings.INBOX_SETTINGS['hash_secret'], password, salt) self.password_hash = new_hash self.save() else: #We never had a password or don't have one set atm new_hash = make_hash(settings.INBOX_SETTINGS['hash_secret'], password) #i = EmailUser(email_address = self.email_address, password_protected = True, password_hash = new_hash) #i.save() self.password_protected = True self.password_hash = new_hash self.save()
def wrapper(self, *args, **kwargs): #print("Inside wrapped_f()" ) #print("Decorator arguments:", arg1, arg2, arg3 ) macro = args[0] if 'cache_mode' in kwargs: pass #print(kwargs['cache_mode']) print( make_hash(macro) ) output = f(self,*args,**kwargs) return output
def create_user(username, email, password): '''Register user and return user object''' password_hash = utils.make_hash(password) new_user = User(username=username, password=password_hash, email=email) new_user.is_admin = False try: new_user.put() except: return None new_user.user_id = new_user.key().id() return new_user
def get_poly_quadkeys(poly, zoom, override=False, name=None, **kwargs): if name is None: name = utils.make_hash(':'.join([str(poly), str(kwargs)])) filename = '_'.join(['poly', name, 'quadkeys', str(zoom)]) + '.pkl' filePath = os.path.join(repoPath, 'resources', filename) if os.path.isfile(filePath) and not override: with open(filePath, 'rb') as f: return pickle.load(f) else: out = make_poly_quadkey(poly, zoom, **kwargs) with open(filePath, 'wb') as f: pickle.dump(out, f) return out
def post(self): data = {} for attr in self.request.arguments: data_attr = self.get_argument("password", None) if data_attr: if attr == 'password': data.update({'password': utils.make_hash(self.get_argument("password"))}) else: data.update({attr: str(self.get_argument(attr))}) if data: user = User(**data) user.save() self.redirect(self.reverse_url('login'))
def register(): error = None username = "" password1 = "" password2 = "" nickname = "" gender = "" if request.method == 'POST': username = request.form['username'] password1 = request.form['password1'] password2 = request.form['password2'] nickname = request.form['nickname'] gender = request.form['gender'] if not username: error = 'username is required' elif not nickname: error = 'nickname is required' elif not gender: error = 'gender is required' elif not password1 or not password2: error = 'password is required' elif password1 != password2: error = 'password is not match' else: cur = g.db.execute("SELECT username FROM accounts WHERE username=?", (username,)) row = cur.fetchone() print row if row and row[1]: error = 'username is already exists' else: created_at = datetime.strftime(datetime.now(), app.config['DATE_FORMAT']) p_hash = utils.make_hash(username, app.config['SECRET_KEY'], password2) print 'register,username='******'password='******'hash=', p_hash g.db.execute( "INSERT INTO accounts (username,password,nickname,gender,created_at,created_ip) " "VALUES (?,?,?,?,?,?)", (username, p_hash, nickname, gender, created_at, request.remote_addr,)) g.db.commit() result = g.db.execute('SELECT id,username,nickname FROM accounts WHERE username=?', (username,)).fetchone() print 'register,result=', result if result: session['logged_in'] = True session['logged_user'] = request.form['username'] flash('You were logged in') return redirect(url_for('show_entries')) else: error = 'register failed' return render_template('register.html', error=error, username=username, nickname=nickname, gender=gender)
def get_intersection_weights(fromFrm, toFrm, override=False, name=None, **kwargs): if name is None: name = '_'.join([ utils.make_hash(str(pickle.dumps(frm)) + ':' + str(kwargs)) for frm in [fromFrm, toFrm] ]) filename = '_'.join(['intersectionWeights', name]) + '.pkl' filePath = os.path.join(repoPath, 'resources', filename) if os.path.isfile(filePath) and not override: with open(filePath, 'rb') as f: return pickle.load(f) else: out = make_intersection_weights(fromFrm, toFrm) with open(filePath, 'wb') as f: pickle.dump(out, f) return out
def get_frm_poly(frm, override=False, name=None, **kwargs): if name is None: geoms = list(frm['geometry']) geomSizes = [geom.area for geom in geoms] geoms = [ pair[0] for pair in sorted(zip(geoms, geomSizes), key=lambda x: x[1]) ] strGeoms = ';'.join(str(geom) for geom in geoms) strGeoms += ':' + str(kwargs) name = utils.make_hash(strGeoms) filename = '_'.join(['poly', name]) + '.pkl' filePath = os.path.join(repoPath, 'resources', filename) if os.path.isfile(filePath) and not override: with open(filePath, 'rb') as f: return pickle.load(f) else: out = make_frm_poly(frm, **kwargs) with open(filePath, 'wb') as f: pickle.dump(out, f) return out
def __init__(self, text, settings, init_from_hash=None): """ Creates the necessary directories and the makefile for this text and the JSON settings. """ self.status = None self.queues = [] if init_from_hash: self.build_hash = init_from_hash else: self.text = text self.makefile_contents = makefile(settings) self.build_hash = make_hash(self.text, self.makefile_contents) self.settings = settings # Directories self.directory = os.path.join(Config.directory, self.build_hash) self.original_dir = os.path.join(self.directory, 'original') self.annotations_dir = os.path.join(self.directory, 'annotations') self.export_dir = os.path.join(self.directory, 'export') self.warnings_log_file = os.path.join(self.directory,'warnings.log'); # and files self.makefile = os.path.join(self.directory, 'Makefile') self.settings_file = os.path.join(self.directory, 'settings.json') self.text_file = os.path.join(self.original_dir, 'text.xml') # Deem this build as accessed now. self.access() # Output from make, line by line self.make_out = [] # Set increments to dummy values self.command = "" self.steps = 0 self.step = 0
def createSessionCookie(self,userId): userHash = utils.make_hash(userId,userId) self.response.headers.add_header("Set-Cookie", "userId = %s; Path = /" %userHash) return userHash
def __init__(self, text, settings, files=None, init_from_hash=None, resuming=False): """ Create the necessary directories and the makefile for this text and the JSON settings. """ self.status = None self.queues = [] self.files = files if init_from_hash: self.build_hash = init_from_hash # File upload: if init_from_hash.endswith(Config.fileupload_ext): original_dir = os.path.join(os.path.join(Config.builds_dir, self.build_hash), 'original') filelist = [] for root, dirs, files in os.walk(original_dir): for infile in files: with open(os.path.join(root, infile), "r") as f: text = f.read() fname = infile[:infile.rfind(".")] filelist.append((fname, text)) self.files = filelist else: self.text = text self.filename = 'text' else: self.makefile_contents = makefile(settings) self.settings = settings # File upload if files: self.text = "\n".join(text for _fn, text in files) filenames = " ".join(fn for fn, _text in files) self.build_hash = make_hash(self.text, self.makefile_contents, filenames) + Config.fileupload_ext else: self.text = text self.filename = 'text' self.build_hash = make_hash(self.text, self.makefile_contents) # Directories self.directory = os.path.join(Config.builds_dir, self.build_hash) self.original_dir = os.path.join(self.directory, 'original') self.annotations_dir = os.path.join(self.directory, 'annotations') self.export_dir = os.path.join(self.directory, 'export.original') # Files self.makefile = os.path.join(self.directory, 'Makefile') self.warnings_log_file = os.path.join(self.directory, 'warnings.log') self.accessed_file = os.path.join(self.directory, 'accessed') self.settings_file = os.path.join(self.directory, 'settings.json') self.zipfpath = os.path.join(self.directory, "export.zip") self.zipfile = "export.zip" if not files: self.text_file = os.path.join(self.original_dir, self.filename + '.xml') self.result_file_path = os.path.join(self.export_dir, self.filename + '.xml') self.result_file = self.filename + '.xml' # Deem this build as accessed now, unless resuming an old build self.access(resuming) # Output from make, line by line self.make_out = [] # Set increments to dummy values self.command = "" self.steps = 0 self.step = 0
def get_hash(self): if not self.hash: self.hash = make_hash(self.url) return self.hash
def hashPass(self, password): hashedPass = utils.make_hash(password) return hashedPass
def register(): error = None username = "" password1 = "" password2 = "" nickname = "" gender = "" if request.method == 'POST': username = request.form['username'] password1 = request.form['password1'] password2 = request.form['password2'] nickname = request.form['nickname'] gender = request.form['gender'] if not username: error = 'username is required' elif not nickname: error = 'nickname is required' elif not gender: error = 'gender is required' elif not password1 or not password2: error = 'password is required' elif password1 != password2: error = 'password is not match' else: cur = g.db.execute( "SELECT username FROM accounts WHERE username=?", (username, )) row = cur.fetchone() print row if row and row[1]: error = 'username is already exists' else: created_at = datetime.strftime(datetime.now(), app.config['DATE_FORMAT']) p_hash = utils.make_hash(username, app.config['SECRET_KEY'], password2) print 'register,username='******'password='******'hash=', p_hash g.db.execute( "INSERT INTO accounts (username,password,nickname,gender,created_at,created_ip) " "VALUES (?,?,?,?,?,?)", ( username, p_hash, nickname, gender, created_at, request.remote_addr, )) g.db.commit() result = g.db.execute( 'SELECT id,username,nickname FROM accounts WHERE username=?', (username, )).fetchone() print 'register,result=', result if result: session['logged_in'] = True session['logged_user'] = request.form['username'] flash('You were logged in') return redirect(url_for('show_entries')) else: error = 'register failed' return render_template('register.html', error=error, username=username, nickname=nickname, gender=gender)
logging.info(f"Command executed: {' '.join(sys.argv)}") logging.info("Starting outside variant pipeline analysis") file1 = args.case_gen file2 = args.control_gen pairing = args.SNP_pairs init_file = args.init_file p_file = args.output_folder override_folder = args.override odds_file = "" logging.info("Initializing pipeline. This might take a few seconds.") args.exec_dir = os.getcwd() with cd(args.input_folder_path): pipe = Pipeline.init_from_file( init_file, file1, file2, pairing, p_file, odds_file, args) logging.info("Making output directory") working_dir = make_working_dir(p_file, override_folder) pipe.working_dir = working_dir pipe.p_value_filename = p_file.split("/")[-1] pipe.hash = make_hash(args.input_folder_path, init_file, file1, file2, pairing, args.unique_identifier) with cd(args.input_folder_path): pipe.read_input_files() logging.info("Running pipeline...") with cd(pipe.working_dir): pipe.run()
job_dir = "%s/.job" % (output_dir) out_dir = "%s/.out" % (output_dir) for new_dir in [output_dir, job_dir, out_dir]: if not os.path.exists(new_dir): os.mkdir(new_dir) files = [ os.path.join(trajectory_folder, f) for f in os.listdir(trajectory_folder) if f[-3:] == 'pkl' ] file_lists = files_by_params(files, bools, duration=50., max_seed=0) for file_list in file_lists: traj = ",".join(sorted(file_list)) hash_code = make_hash(traj) output = '%s/diffusion_map_%s.pkl' % (diffusion_maps_folder, hash_code) filey_loc = os.path.join(job_dir, "diff_map.job") filey = open(filey_loc, "w") filey.writelines("#!/bin/bash\n") filey.writelines("#SBATCH --job-name=diff_map\n") filey.writelines("#SBATCH --output=%s/diff_map_%s.out\n" % (out_dir, hash_code)) filey.writelines("#SBATCH --error=%s/diff_map_%s.err\n" % (out_dir, hash_code)) filey.writelines("#SBATCH --time=2-00:00\n") filey.writelines("#SBATCH --mem=%s\n" % (memory)) if os.path.exists(output) and overwrite is False: print("File exists and overwrite is False! Aborting diffusion map:\n" +
def cache_file(self): # todo make this filename the hash of the url h = utils.make_hash(self.url) return "{}/debate_{}.json".format(self.cache_folder,h)