def test_multichange(): # timing for performance test t0 = time.time() # sending data to main function data_path = "../data/result/test2_result" write_path = " ../data/result/test2_result" strain_num = 6268 # calling the main function reader(data_path, write_path, strain_num) # call(["bgzip","../data/result/test2_result_result.vcf"]) # generatig checksums crossval_hash = hash("../data/test/test2.vcf.gz") result_hash = hash("../data/result/test2_result_result.vcf.gz") #validating checksums assert crossval_hash == result_hash # printing time outputs t1 = time.time() print " Extra test 1:" + str(t1 - t0)
def test_multiChange(): # timing the function for performance t0 = time.time() #sending data to main function data_path = "../data/result/test1_result" write_path = "../data/result/test1_result" strain_num = 6268 #calling main function reader(data_path, write_path, strain_num) # compressing the results to .vcf.gz call(["bgzip", "../data/result/test1_result_result.vcf"]) # generating checksum for validation test_hash = hash("../data/test/test1.vcf.gz") result_hash = hash("../data/result/test1_result_result.vcf.gz") # asserting test validation assert test_hash == result_hash #printing time results t1 = time.time() print "SNP Multichange time:" + str(t1 - t0)
def test1_test(): # timing the function for performance testing t0 = time.time() #sending data to main function data_path = "../data/test/test1" write_path = "../data/result/test1" strain_num = 6268 #calling main function reader(data_path, write_path, strain_num) #compressing the file call(["bgzip", "../data/result/test1_result.vcf"]) #asserting the results crossval_hash = hash("../data/cross_validation/test1_crossval.vcf.gz") result_hash = hash("../data/result/test1_result.vcf.gz") assert crossval_hash == result_hash #printing tie results t1 = time.time() print "Time for snp change :" + str(t1 - t0)
def test2_test(): # timing for performance test t0 = time.time() #sending data to main function data_path = "../data/test/test2" write_path = "../data/result/test2" strain_num = 6268 # calling main fucntion reader(data_path, write_path, strain_num) # compressing file to .vcf.gz call(["bgzip", "../data/result/test2_result.vcf"]) # generating checksums crossval_hash = hash("../data/cross_validation/test2_crossval.vcf.gz") result_hash = hash("../data/result/test2_result.vcf.gz") # validating checksums assert crossval_hash == result_hash #printing time results t1 = time.time() print "Indel test time: " + str(t1 - t0)
def userSignup(args, sessid=0): # password or site-precookie # password is manually chosen, but site pre-cookie should be computed from hash(siteid + rootcookie) if not MULTIUSER and sessid > 0: raise ValueError("Multiuser not enabled, sessid > 0 not allowed.") password = args.password # global userid salt = randstr(SALTLEN) passwordhash = hash(password) # always compute checks to prevent timing attacks, # that could guess if password is in use. not_new = passwordhash in data.salts not_rare = ALLOWUSERPASSWORDS and not rare(password) if not_new or not_rare: raise ValueError(SIGNUPFAIL) return userid = randstr(IDLEN) while(userid in data.users): #theoretically this could infinite loop, but when are we gonna have that many users? userid = randstr(IDLEN) username = args.username or "" email = args.email or "" if username in data.names: raise ValueError("Sorry, that username is taken.") return if email in data.emails: raise ValueError("Sorry, that email address is taken.") sitecookie = hash(password + salt) # store this entire salt, because it is strictly private on the server. data.authhashes.addRow(*authHashes(userid, password, AUTHHASHES)) data.authhashes.save() data.cookies.addRow(sitecookie, userid, salt) data.cookies.save() data.salts.addRow(passwordhash, salt) data.salts.save() data.users.addRow(userid, username, email, sitecookie) data.users.save() if len(username): data.names.addRow(username, userid) data.names.save() if len(email): data.emails.addRow(email, userid, False) data.emails.save() print(f" New user created.") print(f" User Id: '{userid}'") print(f" SiteCookie: '{sitecookie}'") name = username if len(name) == 0: name = userid
def estimateEdgeWeight(self, a, b): estimated_weight = math.inf for i in range(self.d): node_a = hash(a, self.order, i) node_b = hash(b, self.order, i) if self.matrices[i][node_a][node_b] < estimated_weight: estimated_weight = self.matrices[i][node_a][node_b] return estimated_weight
def autoSignup(args, sessid=0): password = randstr(AUTOPASSLEN) passwordhash = hash(password) while(passwordhash in data.salts): #theoretically this could infinite loop, but when are we gonna have that many users? password = randstr(AUTOPASSLEN) passwordhash = hash(password) SignupArguments = namedtuple('Arguments', "username password email") signupargs = SignupArguments(args.username, password, args.email) userSignup(signupargs, sessid) print(" Password: " + password)
def authHashes(userid, password, n): result = [] result.append(userid) for i in range(n): salt = randstr(SALTLEN) authcookie = hash(password + salt) authhash = hash(authcookie) publicsalt = salt[:SALTLEN-SALTSECRET] result.append(publicsalt) result.append(authhash) return result
def updatePublicAccount(userid = None, currencylookup = None, newbalance = None, balancechange = None): # TODO test this if userid == None: raise ValueError("cannot update account for 'None' user.") if currencylookup == None: raise ValueError("cannot update a 'None' account.") if newbalance == None and balancechange == None: raise ValueError("newbalance or balancechange required to update account.") if newbalance != None and balancechange != None: raise ValueError("updatePublicAccount: newbalance and balancechange were both specified. Choose one, not both.") if not currencylookup in data.currencylookup: raise ValueError("updatePublicAccount: there was no matching currency for provided 'currencylookup'") currencyid = data.currencylookup[currencylookup].CurrencyId privacctid = userid + ":" + currencyid exists = privacctid in data.privaccts balance = 0 pubacctid = None if exists: pubacctid = data.privaccts[privacctid].AcctId acct = data.pubaccts[pubacctid] balance = int(acct.Balance) else: pubacctid = randstr(IDLEN) while(pubacctid in data.pubaccts): #theoretically this could infinite loop, but when are we gonna have that many users? pubacctid = randstr(IDLEN) if balancechange != None: assert isinstance(balancechange, int), "balancechange was not an integer" newbalance = balance + balancechange assert isinstance(newbalance, int), "balancechange was not an integer" if newbalance < 0: raise ValueError("balance may not be less than zero.") user = data.users[userid] sitecookie = user.SiteCookie sitepostcookie = hash(sitecookie) acctversion = randstr(IDLEN) acctsecret = hash(pubacctid + ":" + acctversion + ":" + sitepostcookie) accthash = hash(pubacctid + ":" + userid + ":" + acctsecret) if exists: del data.pubaccts[pubacctid] else: data.privaccts.addRow(privacctid, pubacctid) data.pubaccts.addRow(pubacctid, acctversion, accthash, currencyid, newbalance)
def addEdge(self, a, b, weight): for i in range(self.d): hashed_a = hash(a, self.order, i) hashed_b = hash(b, self.order, i) if hashed_a not in self.matrices[i]: self.matrices[i][hashed_a] = {} if hashed_b not in self.matrices[i][hashed_a]: self.matrices[i][hashed_a][hashed_b] = 0 self.matrices[i][hashed_a][hashed_b] += weight if not self.directed: if hashed_b not in self.matrices[i]: self.matrices[i][hashed_b] = {} if hashed_a not in self.matrices[i][hashed_b]: self.matrices[i][hashed_b][hashed_a] = 0 self.matrices[i][hashed_b][hashed_a] += weight
def userLoginPassword(args, sessid=0): if not MULTIUSER and sessid > 0: raise ValueError("Multiuser not enabled, sessid > 0 not allowed.") password = args.password passwordhash = hash(password) if not passwordhash in data.salts: print("Unknown user password.") return salt = data.salts[passwordhash].Salt cookie = hash(password + salt) # print("salt", salt) # print("cookie", cookie) CookieArguments = namedtuple('Arguments', "cookie") cookieargs = CookieArguments(cookie) userLoginCookie(cookieargs, sessid)
def add(db, hash_value, file_path, label, verbose): if hash_value is None: db.put(hash(file_path), bytes(file_path.stem if label is None else label, "utf-8")) if verbose: sys.stderr.write('New entry: "' + str(file_path) + '" with label "' + label.decode("utf-8") + '"\n')
def append(self, node): new_head = 1 if self.empty: self.empty = False else: new_head = self.head + 1 self.data[new_head] = {} self.data[new_head]['pointer'] = self.head self.data[new_head]['node'] = node if new_head == 1: self.data[new_head]['hash'] = None else: self.data[new_head]['hash'] = hash( int(dict_to_binary(self.data[self.head])), self.generator, self.prime, self.length) node_to_sign = dict_to_text(self.data[new_head]) z, c, t = sign(node_to_sign, self.pri_key, self.length, self.generator, self.prime) self.data[new_head]['sign'] = (z, c, t) self.head = new_head
def __init__ (self, key, block_size = 64): self.block_size = block_size ipadblock = 0x36 opadblock = 0x5c # for i in range (0,block_size): # ipad = (ipad << 8) + 0x36 # opad = (opad << 8) + 0x5c self.key = key self.keylen = 0 self.keystr = "" while (key != 0): self.keystr = chr(key % 256) + self.keystr key >>= 8 self.keylen += 1 if (self.keylen > self.block_size): self.keyhash = hash() self.k0str = self.keyhash.parsing(self.keystr) self.k0str += chr(0) * (self.block_size- len(self.k0str)) else: self.k0str = self.keystr + chr(0) * (self.block_size - len(self.keystr)) self.kistr = "" self.kostr = "" for i in range (0, len (self.k0str)): self.kistr += chr(ord (self.k0str[i]) ^ ipadblock) self.kostr += chr(ord (self.k0str[i]) ^ opadblock)
def create_content(db, id, name): pass_char_options = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!£$%&*@#-_+=?' password = '' salt = '' for i in range(16): password += pass_char_options[random.randint( 1, len(pass_char_options) - 1)] for i in range(8): salt += pass_char_options[random.randint(1, len(pass_char_options) - 1)] hashed_pass = hash.hash(password + salt) c = db.cursor() username = '******' % (name.lower()[0], name.lower()[name.index(' ') + 1:]) email = '*****@*****.**' % ( (name.lower()[0], name.lower()[name.index(' ') + 1:])) c.execute( 'INSERT INTO users (userid, username, name, hash, salt, email) VALUES (?,?,?,?,?,?)', (id, username, name, hashed_pass, salt, email)) date = datetime.datetime.now() - datetime.timedelta(28) for i in range(random.randrange(4, 8)): content = 'Some random text for item %d' % (i) title = 'Item %d' % (i) date = date + datetime.timedelta(random.randrange(1, 3), minutes=random.randrange(1, 120), hours=random.randrange(0, 6)) c.execute( 'INSERT INTO posts (creator,date,title,content) VALUES (?,?,?,?)', (id, date.timestamp(), title, content))
def processURL(url): # Hash URL token = hash(url) siv.v2json(url, token) return token
def main(verbose, database, mode, timestamp, label, path): """ Tool to lookup known file hashes """ mode = 'c' if mode is None else mode if database == DEFAULT_DB_PATH: database_folder = database.split("/").pop() database = environ['HOME'] + "/" + database_folder db = plyvel.DB(database, create_if_missing=True) path = set(path) if len(path) == 0: path.add(".") for path in path: for file_path in walk_paths(path): hash_value = db.get(hash(file_path)) if mode == 'a': add(db, hash_value, file_path, label, verbose) elif mode == 'c': check(timestamp, hash_value, file_path) elif mode == 'u': update(db, hash_value, file_path, verbose) db.close()
def constructGraph(self, input_graph): # apply hash function on all nodes in graph # get buckets of nodes # convert the graph into its new representation for i in range(self.d): self.matrices[i] = {} matrix = self.matrices[i] for node in input_graph: hashed_node = hash(node, self.w, i) if hashed_node not in matrix: matrix[hashed_node] = {} for adjacent_node in input_graph[node]: hashed_adjacent_node = hash(adjacent_node, self.l, i) if hashed_adjacent_node not in matrix[hashed_node]: matrix[hashed_node][hashed_adjacent_node] = 0 matrix[hashed_node][hashed_adjacent_node] += input_graph[ node][adjacent_node]
def find_most_similar(image, resource_dir='resources'): """ find_most_similar :param image - PIL resource :param resource_dir Resource directory :return Name of what the image probably is """ lst = [] # Hash the image hashed_image = hash(image) # Go through all resources in resource directory for name in os.listdir(resource_dir): # Join the path names path = os.path.join(resource_dir, name) # Go through all images in new paths for fp in os.listdir(path): # Load other image compared = Image.open(os.path.join(path, fp)) # If images are the same, return the name of the image if image == compared: return name else: # Otherwise append to the list the name, and hashed value comparison lst.append({ 'name': name, 'value': hash(compared) - hashed_image }) def comp(a): """ comp What key is the comparison being run on? """ return a['value'] # Sort list with custom key lst.sort(key=comp) # Return the most likely chance return lst[0]['name']
def generateProductMapping(product, productCodeMap = {}, duplicateList = []): key = hash.hash(product) if (key in productCodeMap and productCodeMap[key] != [product]): productCodeMap[key].append(product) duplicateList.append(key) return 1 else: productCodeMap[key] = [product] return 0
def titles_hash(data_path): movie_titles = hash(2**15) with open(data_path, 'r') as movies_csv: csv_reader = csv.reader(movies_csv) next(csv_reader, None) for row in csv_reader: movie_titles.add(int(row[0]), row[1]) return movie_titles
def signup(email, password, push_token): salt = str(uuid.uuid4()) password = hash.hash(password, salt) sql = """ INSERT INTO User (email, salt, password, push_token) VALUES(?, ?, ?, ?); """ query_db(sql, [email, salt, password, push_token]) return query_db("SELECT * FROM User WHERE email=?;", [email], one=True)
def read_ratings(movies_data, ratings_data): movies_info = hash(2**15) users_info = hash(2**18) with open(movies_data, 'r') as movies_csv: csv_reader = csv.reader(movies_csv) next(csv_reader, None) for row in csv_reader: movies_info.add(int(row[0]), [row[2].split('|'), [], 0]) with open(ratings_data, 'r') as ratings_csv: csv_reader = csv.reader(ratings_csv) next(csv_reader, None) for row in csv_reader: #movies_info operations data = movies_info.get(int(row[1]))[0] data[1].append(float(row[2])) data[2] += 1 movies_info.update(int(row[1]), data) #users_info operations data = users_info.get(int(row[0]))[0] if not data: users_info.add(int(row[0]), [[int(row[1])], [float(row[2])]]) else: data[0].append(row[1]) data[1].append(row[2]) users_info.update(int(row[0]), data) for cell in movies_info.data: if cell != None: if cell[1][2]: sum = 0 for rating in cell[1][1]: sum += rating cell[1][1] = sum / cell[1][2] else: cell[1][1] = 0 return movies_info, users_info
def verify(message, z, c, t, length, generator, prime, pub_key): print("Verifying the signature") m_binary = ''.join(format(ord(i), 'b') for i in message) if c == hash(int(bin(t) + m_binary, 2), generator, prime, length): if (pow(pub_key, c, prime) * t) % prime == pow(generator, z, prime): return True else: return False else: return False
def verify(message, z, c, t, length, generator, prime, pub_key): print("Verifying the signature") m_binary = ''.join(format(ord(i), 'b') for i in message) if c == hash(int(bin(t) + m_binary, 2), generator, prime, length): if (pow(pub_key, c, prime) * t) % prime == pow(generator, z, prime): return "Signs and message match" else: return "Signs are incorrect" else: return "Message is not the same"
def webtext(url): i = url nr = requests.get(i) nowtext = hash.hash(nr.text) print(deftext) print(nowtext) if nowtext != deftext: return ( 'There is a possibility that this page has been tampered with.') else: return ('OK')
def verify(message, signature, public_key): # hash the message h = hash(message) # load the verification module verifier = DSS.new(public_key, 'deterministic-rfc6979') try: verifier.verify(h, signature) return True except ValueError: return False
def brute_force_random_find(h, l): c = 0 s = "" while h != hash.hash(s): s = new_random_guess(l) c += 1 if c % 1000000 == 0: sys.stdout.write(".") sys.stdout.flush() sys.stdout.write("\n") return c, s
def test3_test(): #timing for performance test t0 = time.time() data_path = "../data/test/test3" write_path = " ../data/result/test3" strain_num = 6268 print reader(data_path, write_path, strain_num) call(["bgzip","test3_result.vcf"]) crossval_hash = hash("../data/cross_validation/test3_crossval.vcf.gz") result_hash = hash("../data/result/test3_result.vcf.gz") assert crossval_hash == result_hash t1 = time.time() print "Time for test 3:" + (t1-t0)
def createCheck(args, sessid=0): userid = getUser(sessid) if userid == None: raise ValueError(f"You must be logged in to issue a check.") lookup = args.currency + ":" + NAMESPACE; currencyid = getCurrencyId(lookup) amt = int(args.amount) updatePublicAccount(userid, lookup, balancechange = -amt) checksecret = randstr(IDLEN) checkhash = hash(checksecret) while(checkhash in data.checks): checksecret = randstr(IDLEN) checkhash = hash(checksecret) data.checks.addRow(checkhash, currencyid, amt) data.checks.save() data.pubaccts.save() print(f"Check Secret: \"{checksecret}\"") print(f"Check issued for {amt} units of {lookup}")
def login(): if request.method == "GET": return render_template("login.html") else: name = request.form["name"] password = request.form["password"] user = db_session.query(User).filter_by(name=name).first() if user is None or user.password != hash(password): return render_template("login.html", error="Invalid Username or Password") session["name"] = name return redirect(url_for("dashboard"))
def remove_multiples(p, n): print('removing multiples') prime = [] htsize = n hash_table = [[]]*htsize for py in p: #print('processing ',py) py_hash = hash.hash(py[0], py[1], htsize) #print('checking entry ', py_hash, hash_table[py_hash]) if py in hash_table[py_hash]: #print('skipping') continue #print('appending ', py) prime.append(list(py)) py_mult = list(py) while py_mult[0] < n: #print('in while: ', py_mult, n) h = hash.hash(py_mult[0], py_mult[1], htsize) hash_table[h].append(list(py_mult)) #print('appending ', py_mult, ' in position ', h) py_mult[0] += py[0] py_mult[1] += py[1] return prime
def sign(message, key_storage='privatekey.pem'): # load the private key for signing: f = open(key_storage, 'rt') private_key = ECC.import_key(f.read()) # hash the message h = hash(message) # sign with the private key signer = DSS.new(private_key, 'deterministic-rfc6979') signature = signer.sign(h) # reurn the signature return signature
def execucao_hash(pag_tam, pag_quant): # Método de execução da hash arquivo = open('dados.csv') linhas = csv.reader(arquivo) ini = time.time() auxhash = hash(pag_tam, pag_quant) for linha in linhas: if linha[0] == '+': auxhash.inserir(int(linha[1])) linhas = csv.reader(arquivo) for linha in linhas: if linha[0] == '-': auxhash.remover(int(linha[1])) fim = time.time() arquivo.close() return (fim - ini)
def generateProductMappingFromCSV(filename, productCodeMap = {}, duplicateList = []): import csv with open(filename, 'rU') as productsFile: productsList = csv.reader(productsFile, delimiter=' ', quotechar='|') for row in productsList: product = ' '.join(row) key = hash.hash(product) if (key in productCodeMap and productCodeMap[key] != product): productCodeMap[key].append(product) duplicateList.append(key); else: productCodeMap[key] = [product] return productCodeMap, duplicateList
def test_known(self): self.assertEqual(680131659347, hash.hash('leepadg'))
def update (self, mess): self.hash1 = hash() self.hash2 = hash() return self.hash2.parsing (self.kostr + self.hash1.parsing(self.kistr + mess))
def hash_function(self, key, size): return hash(key, size)