def reset_password(request): loginhash = request.matchdict['loginhash'] users = User().queryObject().all() for user in users: if sha512(user.login).hexdigest() == loginhash: new_password = rand_string(10) logger.info('Password of user ' + user.login + ' has been reset to ' + new_password) user.password = sha512(new_password).hexdigest() user.save() localizer = get_localizer(request) # Create the body of the message (a plain-text and an HTML version) url = request.route_url('login') text_ts = _('plain_password_reset_mail', mapping={ 'name': user.name, 'password': new_password, 'url': url}, domain='Ondestan') html_ts = _('html_password_reset_mail', mapping={'name': user.name, 'password': new_password, 'url': url}, domain='Ondestan') subject_ts = _('subject_password_reset_mail', domain='Ondestan') text = localizer.translate(text_ts) html = localizer.translate(html_ts) subject = localizer.translate(subject_ts) send_mail(html, text, subject, user.email) break
def _verify_token_in_couch(self, uuid, token): """ Query couchdb to decide if C{token} is valid for C{uuid}. @param uuid: The user uuid. @type uuid: str @param token: The token. @type token: str @raise InvalidAuthTokenError: Raised when token received from user is either missing in the tokens db or is invalid. """ server = Server(url=self._app.state.couch_url) dbname = self.TOKENS_DB db = server[dbname] # lookup key is a hash of the token to prevent timing attacks. token = db.get(sha512(token).hexdigest()) if token is None: raise InvalidAuthTokenError() # we compare uuid hashes to avoid possible timing attacks that # might exploit python's builtin comparison operator behaviour, # which fails immediatelly when non-matching bytes are found. couch_uuid_hash = sha512(token[self.TOKENS_USER_ID_KEY]).digest() req_uuid_hash = sha512(uuid).digest() if token[self.TOKENS_TYPE_KEY] != self.TOKENS_TYPE_DEF \ or couch_uuid_hash != req_uuid_hash: raise InvalidAuthTokenError() return True
def login(self, username, password, session): user_results = list(self.users.find({ "USERNAME": username, "ENABLED": "true" })) if len(user_results) == 1: hashed = hashlib.sha512(str(password) + str(user_results[0]['SALT'])).hexdigest() if 'PASSWORD' in user_results[0]: if user_results[0]['PASSWORD'] == hashed: session_hash = hashlib.sha512('%s-%s-%s' % ( str(time.time()), str(username), self.session_salt)).hexdigest() self.sessions.insert({ "session_id": session_hash, "last_accessed": datetime.datetime.utcnow(), "USERNAME": username}) session['SESSION_KEY'] = session_hash self.users.update ({ "USERNAME": username }, { "$set": { "pass_failed": 0, "last_login": datetime.datetime.utcnow()}}) return True else: if 'pass_failed' in user_results[0]: tries = int(user_results[0]['pass_failed']) else: tries = 0 tries = int(tries) + 1 if tries >= int(self.password_tries): self.users.update({"USERNAME": username }, { "$set": { "ENABLED": "false", "pass_failed": tries}}) else: self.users.update({"USERNAME": username }, { "$set": { "pass_failed": tries }}) return False
def terminal(): print """ Welcome to the HACTT Terminal! """ welcome = raw_input(' [login] [signup]: ') if welcome == 'login': userauth = raw_input('Username: '******'HACTTusers.txt', 'a+').read() storedpass = open('HACTThashes.txt', 'a+').read() if re.search(pwauth, storedpass) and re.search(userauth, storedusers): print 'Successfully Logged In!\n' print 'Type [help] for a list of commands' while True: prompt = raw_input('>>> ') if prompt in COMMANDS.keys(): COMMANDS[prompt]() else: print 'Error invalid command.' else: print 'Login failed' elif welcome == 'signup': username = raw_input('Username: '******'Password: '******'HACTThashes.txt', 'a+') usernames = open('HACTTusers.txt', 'a+') hashes.write(password + '\n') usernames.write(username + '\n') print 'Success!' usernames.close()
def hash_directory(cls, sourcePath, salt=""): filepaths = cls.find_files(sourcePath=sourcePath, exclude_settings=True, include_examples=False) if sys.platform == "win32" or sys.platform == "cygwin": filepaths = sorted([fp.replace('\\', '/') for fp in filepaths]) else: filepaths.sort() SHA512hash = hashlib.sha512() for filepath in filepaths: try: with open(filepath, 'rb') as fh: while True: buf = fh.read(4096) if not buf: break SHA512hash.update(buf) except: return -1 totalHash = SHA512hash.hexdigest() if salt: saltedSHA512hash = hashlib.sha512(totalHash) saltedSHA512hash.update(salt) totalHash = saltedSHA512hash.hexdigest() return totalHash
def derive_password(password, salt, length, iteration=10000): '''Use pbkdf2 to create a new password.''' def finalize_password(encoded_password): '''Pad length and add special characters/numbers to passwords. Necessary when desired length is not a valid base64 length. ''' # substitute letters for special characters or numbers encoded_password = encoded_password.replace("i", "!") encoded_password = encoded_password.replace("a", "@") encoded_password = encoded_password.replace("s", "$") encoded_password = encoded_password.replace("I", "1") encoded_password = encoded_password.replace("O", "0") difference = length - len(encoded_password) final_password = encoded_password + "+" * difference return final_password[0:length] # chop off any excess # hash the password because why not? hashed_password = sha512(password).hexdigest() # and the salt hashed_salt = sha512(salt).hexdigest() keylen = calc_keysize(length) key = pbkdf2_bin(hashed_password, hashed_salt, keylen=keylen, iterations=iteration) new_password = b64encode(key) return finalize_password(new_password)
def _pool_worker(nonce, initialHash, target, pool_size): _set_idle() trialValue = 99999999999999999999 while trialValue > target: nonce += pool_size trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8]) return [trialValue, nonce]
def populateDefaultUsers(): userService = support.entityFor(IUserService) assert isinstance(userService, IUserService) userRbacService = support.entityFor(IUserRbacService) assert isinstance(userRbacService, IUserRbacService) users = userService.getAll(limit=1, q=QUser(name='admin')) if not users: user = User() user.FirstName = 'Janet' user.LastName = 'Editor' user.EMail = '*****@*****.**' user.Name = 'admin' user.Password = hashlib.sha512(b'a').hexdigest() user.Id = userService.insert(user) else: user = next(iter(users)) userRbacService.assignRole(user.Id, blogRoleAdministratorId()) for name in (('Andrew', 'Reporter'), ('Christine', 'Journalist')): loginName = name[1].lower() users = userService.getAll(limit=1, q=QUser(name=loginName)) if not users: user = User() user.FirstName = name[0] user.LastName = name[1] user.EMail = '*****@*****.**' % name user.Name = loginName user.Password = hashlib.sha512(b'a').hexdigest() user.Id = userService.insert(user) else: user = next(iter(users)) userRbacService.assignRole(user.Id, blogRoleCollaboratorId())
def test(): import uuid login_output = login_cli_by_account('admin', 'password') if login_output.find('%s >>>' % ('admin')) < 0: test_util.test_fail('zstack-cli is not display correct name for logined account: %s' % (login_output)) account_name1 = uuid.uuid1().get_hex() account_pass1 = hashlib.sha512(account_name1).hexdigest() test_account1 = test_account.ZstackTestAccount() test_account1.create(account_name1, account_pass1) test_obj_dict.add_account(test_account1) login_output = login_cli_by_account(account_name1, account_name1) if login_output.find('%s >>>' % (account_name1)) < 0: test_util.test_fail('zstack-cli is not display correct name for logined account: %s' % (login_output)) account_name2 = uuid.uuid1().get_hex() account_pass2 = hashlib.sha512(account_name2).hexdigest() test_account2 = test_account.ZstackTestAccount() test_account2.create(account_name2, account_pass2) test_obj_dict.add_account(test_account2) test_account_uuid2 = test_account2.get_account().uuid login_output = login_cli_by_account(account_name2, account_name2) if login_output.find('%s >>>' % (account_name2)) < 0: test_util.test_fail('zstack-cli is not display correct name for logined account %s' % (login_output)) logout_output = logout_cli() if logout_output.find('- >>>') < 0: test_util.test_fail('zstack-cli is not display correct after logout: %s' % (login_output)) test_account1.delete() test_account2.delete() test_obj_dict.rm_account(test_account1) test_obj_dict.rm_account(test_account2)
def hash_sha512(password, salt='', n=12): from hashlib import sha512 salt = salt or sha(urandom(40)).hexdigest() hashed = sha512(salt + password).hexdigest() for i in xrange(n): hashed = sha512(hashed + salt).hexdigest() return '$sha512$%i$%s$%s' % (n, salt, hashed)
def _doSafePoW(target, initialHash): nonce = 0 trialValue = float('inf') while trialValue > target: nonce += 1 trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8]) return [trialValue, nonce]
def sign_up(request): # Protect against CSRF attacks if request.method == 'POST': # If the form has been submitted... form = SignUpForm(request.POST) if form.is_valid(): m1 = hashlib.sha512() m2 = hashlib.sha512() username = form.cleaned_data['username'] password = form.cleaned_data['password'] first_name = form.cleaned_data['first_name'] last_name = form.cleaned_data['last_name'] email = form.cleaned_data['email'] u = User.objects.create_user(username=username, password=password, first_name=first_name, last_name=last_name, email=email ) u.is_active = False u.save() code = sendmail.generate_code() m1.update(code) m2.update(username) ConfirmationCode.objects.create(code=m1.hexdigest(), uid=m2.hexdigest()) sendmail.sendmail(email, code) return HttpResponseRedirect('/activate/') # Redirect after POST else: form = SignUpForm() # An unbound form return render(request, 'signup/sign_up.html', { 'form': form, })
def activate(request): c = {} c.update(csrf(request)) state = "Activate above" if request.method == 'POST': code = request.POST.get('code') username = request.POST.get('username') print code print username m1 = hashlib.sha512() m2 = hashlib.sha512() #code = form.cleaned_data['code'] #uid = form.cleaned_data['uid'] m1.update(code) m2.update(username) code_obj = ConfirmationCode.objects.filter(code=m1.hexdigest(), uid=m2.hexdigest()) print code_obj if code_obj: user = User.objects.get(username=username) user.is_active = True user.save() messages.success(request, "You have successfully activated account. You can now login") ConfirmationCode.objects.get(code=m1.hexdigest(), uid=m2.hexdigest()).delete() return HttpResponseRedirect("/") else: state = "That was incorrect" return render_to_response("signup/activate.html", {'state': state}, context_instance=RequestContext(request))
def validate(self): if not Form.validate(self): return False valid=True #assume success email = mongo.db.users.find({'email':self.email.data.lower()}).count() username = mongo.db.users.find({'username':self.username.data.lower()}).count() if email>0: self.email.errors.append("That email is already taken") valid=False if username>0: self.username.errors.append("That username is already taken") valid=False if(len(self.password.data)<8): self.password.errors.append("Passwords must be at least 8 characters long") self.password1.errors.append("") valid=False else: self.salt = uuid.uuid4().bytes self.password.data = hashlib.sha512(self.password.data.encode('utf-8') + self.salt).digest() self.password1.data = hashlib.sha512(self.password1.data.encode('utf-8') + self.salt).digest() if(self.password.data!=self.password1.data): self.password.errors.append("The passwords do not match") self.password1.errors.append("") valid=False return valid
def update_password(email, password): user = DBSession.query(User).filter(User.email == email).first() cur_password = DBSession.query(Password).filter(Password.userid == user.id).first() if cur_password is None: salt1 = uuid.uuid4().hex salt2 = user.email[1:7] #combo = (salt2 + password + salt1)[0:32] combo = salt2 + password + salt1 hashed_password = combo for i in range(0, 413): hashed_password = hashlib.sha512(hashed_password.encode()).hexdigest() password = Password(userid=user.id, salt1=salt1, salt2=salt2, password=hashed_password, version=1, ) DBSession.add(password) else: salt1 = uuid.uuid4().hex salt2 = user.email[1:7] #combo = (salt2 + password + salt1)[0:32] combo = salt2 + password + salt1 hashed_password = combo for i in range(0, 413): hashed_password = hashlib.sha512(hashed_password.encode()).hexdigest() cur_password.salt1 = salt1 cur_password.salt2 = salt2 cur_password.password = hashed_password
def promjeniLozinku(): result={ "status" : False, "message" : "Neuspješna promjena lozinke!" } fakultet = baseSession.query(Fakultet).filter(Fakultet.idFakulteta == session["user_data"]["idFakulteta"]).first() if fakultet != None: if fakultet.lozinka == hashlib.sha512((request.form['trenutnaLozinka']).encode()).hexdigest(): fakultet.lozinka = hashlib.sha512((request.form['novaLozinka']).encode()).hexdigest() baseSession.add(fakultet) baseSession.commit() result["status"] = True result["message"] = "Lozinka uspješno promjenjena!" try: dnevnik = Dnevnik(session["user_data"]["idFakulteta"], "Fakultet", "Promjena lozinke") baseSession.add(dnevnik) baseSession.commit() except (BaseException, KeyError): baseSession.rollback() pass else: result["message"] += " Pogrešna lozinka!" return Response(json.dumps(result), mimetype='application/json; charset=UTF-8')
def generate_token(uid, pkey): """ Generate capsule signature from capsule destination uid and peer public key. """ return sha256("{}{}".format(sha512(uid).digest(), sha512(pkey).digest())).digest()
def get_process_block_ordering(salt, num_process_blocks): order = [""]*num_process_blocks i = 0 xor_c = 0 ordering_hash = hashlib.sha512(bytes(salt, 'UTF-8')).hexdigest() ordering_hash += hashlib.sha512(bytes(salt[::-1], 'UTF-8')).hexdigest() ordering_hash += hashlib.sha512(bytes(salt+salt[::-1], 'UTF-8')).hexdigest() ordering_hash += hashlib.sha512(bytes(salt[::-1]+salt, 'UTF-8')).hexdigest() for c in ordering_hash: xor_c += ord(c) if xor_c >= num_process_blocks: xor_c %= num_process_blocks if order.__contains__(xor_c) == False: order[i] = xor_c i += 1 if i >= num_process_blocks: break if i < num_process_blocks: raise ValueError("Unable to create process block ordering (max={}) from salt: {}".format(num_process_blocks, ordering_hash)) if cmd_options.debug: print("Generated process block ordering: {}".format(order)) return order
def register(): try: phone = int(request.json['phone_number']) name = request.json['name'] pword = request.json['password'] user = phone if userID(user) >= 0: abort(400, 'Phone number already in use.') print('register() : phone=%s, name=%s' % (phone, name)) # TODO sanity check ? pwd_salt = hashlib.sha512(urandom(128)).hexdigest() pwd_hash = hashlib.sha512(pwd_salt + pword).hexdigest() try: _dbcon.execute('INSERT INTO users VALUES(?, ?, ?, ?, ?)', (None, name, phone, pwd_salt, pwd_hash)) _dbcon.commit() finally: _dbcon.rollback() # init session s = Session(userID(user)) _sessions[s.id] = s # return session 'cookie' return { 'session_id' : s.id.hex, 'session_expires' : s.expires } except: print('Bad register request:') traceback.print_exc(); abort(400, 'Bad register request.')
def post(self): page = JINJA_ENVIRONMENT.get_template('pages/opened.html') user_key = self.request.get('user_key') salt = hashlib.sha512(user_key + user_key).hexdigest() retrieval_key = hashlib.sha512(salt + user_key + salt).hexdigest() box_query = CovertBox.query(ancestor=ndb.Key('retrieval_key', str(retrieval_key)), filters=CovertBox.expiry_date > datetime.now()) box_list = box_query.fetch() for i in box_list: try: i.msg = base64.decodestring(i.msg) except google.appengine.ext.db.BadValueError: pass except UnicodeEncodeError: i.msg = i.msg.encode('utf-8') except binascii.Error: pass page_value = { 'list': box_list, 'time': datetime.now() } self.response.out.write(page.render(page_value))
def pubkey_to_ipv6(key, _cjdns_b32_map = [ # directly from util/Base32.h 99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99, 99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99, 99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,99,99,99,99,99,99, 99,99,10,11,12,99,13,14,15,99,16,17,18,19,20,99, 21,22,23,24,25,26,27,28,29,30,31,99,99,99,99,99, 99,99,10,11,12,99,13,14,15,99,16,17,18,19,20,99, 21,22,23,24,25,26,27,28,29,30,31,99,99,99,99,99 ]): if key.endswith('.k'): key = key[:-2] bits, byte, res = 0, 0, list() for c in key: n = _cjdns_b32_map[ord(c)] if n > 31: raise ValueError('Invalid key: {!r}, char: {!r}'.format(key, n)) byte |= n << bits bits += 5 if bits >= 8: bits -= 8 res.append(chr(byte & 0xff)) byte >>= 8 if bits >= 5 or byte: raise ValueError('Invalid key length: {!r} (leftover bits: {})'.format(key, bits)) res = ''.join(res) addr = sha512(sha512(res).digest()).hexdigest()[:32] if addr[:2] != 'fc': raise ValueError( 'Invalid cjdns key (first' ' addr byte is not 0xfc, addr: {!r}): {!r}'.format(addr, key) ) return addr
def main(): parser = argparse.ArgumentParser(description="Password scrambler") parser.add_argument('--file', dest="file", default=None, help="File used to initialize generation", required=True) parser.add_argument('--login', dest="login", default=None, help="Login for which you want to use the password", required=True) parser.add_argument('--special', dest="special", default="_&#", help="Whitelist of special characters, i.e: '_&#'") parser.add_argument('--length', dest="length", default=30, help="Length of the password, default=30", type=int) args = parser.parse_args() password = getpass.getpass() key = scramble(password) vec = scramble(args.login) raw = get_raw_bytes(args.file) aes = AESCipher(vec, key) aes_out1 = aes.encrypt(raw) del aes sha_digest = hashlib.sha512(aes_out1).digest() passlen = len(password) % len(sha_digest) key2 = sha_digest[passlen: passlen+32] aes = AESCipher(key, key2) aes_out2 = aes.encrypt(aes_out1) del aes start = ord(key[0]) % len(aes_out2) portion = aes_out2[start:] result = hashlib.sha512(portion).digest() longpass = base64.b64encode(result) longpass = longpass[0:args.length] longpass = convert_to_charset(longpass, args.special) print "---" print longpass print "---"
def update_winapp2(url, hash_expected, append_text, cb_success): """Download latest winapp2.ini file. Hash is sha512 or None to disable checks""" # first, determine whether an update is necessary from bleachbit import personal_cleaners_dir fn = os.path.join(personal_cleaners_dir, 'winapp2.ini') delete_current = False if os.path.exists(fn): f = open(fn, 'r') hash_current = hashlib.sha512(f.read()).hexdigest() if not hash_expected or hash_current == hash_expected: # update is same as current return f.close() delete_current = True # download update opener = build_opener() opener.addheaders = [('User-Agent', user_agent())] doc = opener.open(fullurl=url, timeout=20).read() # verify hash hash_actual = hashlib.sha512(doc).hexdigest() if hash_expected and not hash_actual == hash_expected: raise RuntimeError("hash for %s actually %s instead of %s" % (url, hash_actual, hash_expected)) # delete current if delete_current: from bleachbit.FileUtilities import delete delete(fn, True) # write file if not os.path.exists(personal_cleaners_dir): os.mkdir(personal_cleaners_dir) f = open(fn, 'w') f.write(doc) append_text(_('New winapp2.ini was downloaded.')) cb_success()
def Obliviously_Send(self, M0,M1,num_of_OT, lam): conn = self.conn for i in xrange(num_of_OT): print i r=random.randrange(self.q) c=random.randrange(self.q) conn.send(str(c)) pk0 = conn.recv(200) #you should receive pk0 from the receiver via the sender_receiver channel pk0r=modifiedPower(int(pk0),r,self.q) gr=modifiedPower(self.g,r,self.q) r0=(hashlib.sha512(str(pk0r)+str(0)).digest()) r00=str(r0)* int(math.ceil(num_of_OT/len(r0))+1) cr=modifiedPower(c,r,self.q) gcd=inv=-1 while gcd != 1 and inv <= 0 : gcd,inv,k = euclid(pk0r,self.q) pk1r=cr*inv%self.q r1=(hashlib.sha512(str(pk1r)+str(1)).digest()) r10=str(r1) * int(math.ceil(num_of_OT/len(r1))+1) E0=bin(int(M0[i])^ord(r00[i]))[2:].zfill(lam) E1=bin(int(M1[i])^ord(r10[i]))[2:].zfill(lam) conn.send(str(gr)) conn.send(str(E0)) conn.send(str(E1)) return True
def sign(self, msg): h = sha512() h.update(daga.elem_to_bytes(self.group_gen)) h.update(daga.elem_to_bytes(self.tag)) h.update(msg.encode("utf-8")) precompute = int.from_bytes(h.digest(), 'big') u = Rand.randrange(Q) s = [0] * len(self.pub_keys) c = [0] * len(self.pub_keys) h = sha512() h.update(daga.elem_to_bytes(precompute)) h.update(daga.elem_to_bytes(pow(G, u, P))) h.update(daga.elem_to_bytes(pow(self.group_gen, u, P))) c[self.index] = int.from_bytes(h.digest(), 'big') % Q count = len(self.pub_keys) for i in range(1, count): idx = (i + self.index) % count s[idx] = Rand.randrange(Q) h = sha512() h.update(daga.elem_to_bytes(precompute)) tmp = pow(G, s[idx], P) * pow(self.pub_keys[idx], c[idx-1], P) % P h.update(daga.elem_to_bytes(tmp)) tmp = pow(self.group_gen, s[idx], P) * pow(self.tag, c[idx-1], P) % P h.update(daga.elem_to_bytes(tmp)) c[idx] = int.from_bytes(h.digest(), 'big') % Q s[self.index] = (u - self.priv_key * c[self.index-1]) % Q return (c[-1], s, self.tag)
def print_config(cfg,hide_password=True,history=False): """Returns a string representing the config of this ShutIt run. """ r = '' keys1 = cfg.keys() if keys1: keys1.sort() for k in keys1: if type(k) == str and type(cfg[k]) == dict: r = r + '\n[' + k + ']\n' keys2 = cfg[k].keys() if keys2: keys2.sort() for k1 in keys2: r = r + k1 + ':' if hide_password and (k1 == 'password' or k1 == 'passphrase'): s = hashlib.sha512(cfg[k][k1]).hexdigest() i = 27 while i > 0: i = i - 1 s = hashlib.sha512(s).hexdigest() r = r + s else: if type(cfg[k][k1] == bool): r = r + str(cfg[k][k1]) elif type(cfg[k][k1] == str): r = r + cfg[k][k1] r = r + '\n' return r
def modify_password(username, password, new_password, confirm_password): # Create the connection and cursor for the SQLite database. connection = MongoClient() c = connection['data'] # Check if the passwords match. if new_password != confirm_password: return 'Passwords do not match.' # Check if the new password is valid. if len(new_password) < 8: return 'Password must be at least 8 characters long.' if not ( bool(search(r'\d', new_password)) and bool(search('[a-zA-Z]', new_password)) ): return 'Password must contain both letters and digits.' # If the user_info table doesn't exist, return an error message. if not "user_info" in c.collection_names(): return 'Incorrect username or password.' # If the table does exist, check the old username and password. salt_n_hash = c.user_info.find_one({'username':username}) if not ( bool(salt_n_hash) and sha512((password + salt_n_hash['salt']) * 10000).hexdigest() == salt_n_hash['hash_value'] ): return 'Incorrect username or password.' # Create a random salt to add to the hash. salt = uuid4().hex # Create a hash, and use string concatenation to make the hash function slow # for added security. hash_value = sha512((new_password + salt) * 10000).hexdigest() # Change the old salt and hash_value to the new ones and return None. c.user_info.update({'username':username}, {"$set":{'salt':salt,'hash_value':hash_value}}) return None
def testGridFsAssetstore(self): """ Test usage of the GridFS assetstore type. """ # Clear the assetstore database conn = getDbConnection() conn.drop_database('girder_assetstore_test') self.model('assetstore').remove(self.model('assetstore').getCurrent()) assetstore = self.model('assetstore').createGridFsAssetstore( name='Test', db='girder_assetstore_test') self.assetstore = assetstore chunkColl = conn['girder_assetstore_test']['chunk'] # Upload the two-chunk file file = self._testUploadFile('helloWorld1.txt') hash = sha512(chunk1 + chunk2).hexdigest() self.assertEqual(hash, file['sha512']) # We should have two chunks in the database self.assertEqual(chunkColl.find({'uuid': file['chunkUuid']}).count(), 2) self._testDownloadFile(file, chunk1 + chunk2) self._testDownloadFolder() # Delete the file, make sure chunks are gone from database self._testDeleteFile(file) self.assertEqual(chunkColl.find({'uuid': file['chunkUuid']}).count(), 0) empty = self._testEmptyUpload('empty.txt') self.assertEqual(sha512().hexdigest(), empty['sha512']) self._testDownloadFile(empty, '') self._testDeleteFile(empty)
def generate_secret_key(length=16): """Generate random 16 character base 32 secret key. Arguments: .. csv-table:: :header: "argument", "type", "value" :widths: 7, 7, 40 "*length*", "string", "Length of secret key, min 8, max 128." Retruns: Random 16 character base 32 secret key. Usage:: import googauth print googauth.generate_secret_key() """ if length < 8 or length > 128: raise TypeError('Secret key length is invalid.') # Generate sha1 hash of 8192 random bytes sha_hash = hashlib.sha512() sha_hash.update(os.urandom(8192)) hexhash = sha_hash.hexdigest() # Rehash for i in xrange(6): sha_hash = hashlib.sha512() sha_hash.update(hexhash) hexhash = sha_hash.hexdigest() return base64.b32encode(hexhash)[:length]
def handlePassword(self): if 'current-pass' not in request.form or 'new-pass' not in request.form or 'confirm-pass' not in request.form: self.json['error_message'].append(self.responses['PASS_REQUIRED']) else: current_pass = request.form['current-pass'] current_hash = hashlib.sha512(hashlib.sha512(current_pass).hexdigest() + str(int(self.user['salt']))).hexdigest() new_pass = request.form['new-pass'] confirm_pass = request.form['confirm-pass'] if current_hash != self.user['password']: self.json['error_message'].append(self.responses['INCOR_PASS']) if len(new_pass) < 7 or len(new_pass) > 25: self.json['error_message'].append(self.responses['INVAL_PASS']) if new_pass != confirm_pass: self.json['error_message'].append(self.responses['MISMATCH_PASS']) # validate our inputs if len(self.json['error_message']) > 0: self.json['error'] = True else: self.json['error'] = False self.json['success_message'] = self.responses['SUCCESS_PASS'] new_pass_hash = hashlib.sha512(hashlib.sha512(new_pass).hexdigest() + str(int(self.user['salt']))).hexdigest() newdata = {'$set': {'password': new_pass_hash}} config.db.users.update({'account': self.username}, newdata) response = app.make_response(json.dumps(self.json)) # update the record in our mongodb response.set_cookie('login-pass-hash', new_pass_hash) # over write the cookie so they don't get logged out return response
"amount": 35 }, "procurementMethodType": "competitiveDialogueEU", "value": { "currency": "UAH", "amount": 500 }, "procuringEntity": procuring_entity_en, "items": items_en_unit, "milestones": test_milestones, "mainProcurementCategory": "services", } tender_stage2_multiple_lots = { "procurementMethod": "selective", "dialogue_token": sha512('secret').hexdigest(), "title": "Послуги шкільних їдалень", "title_en": "Services in school canteens", "minimalStep": { "currency": "UAH", "amount": 35 }, "procurementMethodType": "competitiveDialogueEU.stage2", "value": { "currency": "UAH", "amount": 500 }, "shortlistedFirms": shortlisted_firms, "owner": "broker", "procuringEntity": procuring_entity_en, "items": items_en_unit
def create(self, request): image_type = int(request.POST.get('image_type', 0)) image_set_id = int(request.POST.get('image_set', 0)) imageset = get_object_or_404(models.ImageSet, id=image_set_id) if request.FILES is None: return HttpResponseBadRequest('Must have files attached!') images = [] errors = [] for f in list(request.FILES.values()): error = { 'duplicates': 0, 'damaged': False, 'directories': False, 'exists': False, 'unsupported': False, 'zip': False, 'convert': False } magic_number = f.read(4) f.seek(0) # reset file cursor to the beginning of the file file_list = {} if magic_number == b'PK\x03\x04': zipname = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(6)) + '.zip' with open(os.path.join(imageset.root_path(), zipname), 'wb') as out: for chunk in f.chunks(): out.write(chunk) # unpack zip-file zip_ref = zipfile.ZipFile(os.path.join(imageset.root_path(), zipname), 'r') zip_ref.extractall(os.path.join(imageset.root_path())) zip_ref.close() # delete zip-file os.remove(os.path.join(imageset.root_path(), zipname)) filenames = [f.filename for f in zip_ref.filelist] filenames.sort() duplicat_count = 0 for filename in filenames: file_path = os.path.join(imageset.root_path(), filename) if models.Image.objects.filter(Q(filename=filename)|Q(name=f.name), image_set=imageset).count() == 0: try: if open_slide(file_path): # creates a checksum for image fchecksum = hashlib.sha512() with open(file_path, 'rb') as fil: while True: buf = fil.read(10000) if not buf: break fchecksum.update(buf) fchecksum = fchecksum.digest() # check if vms is in any images then just save the vms files # else for each jpg a new image will be created in the databse if any(".vms" in f for f in filenames) and ".vms" in filename: file_list[file_path] = fchecksum elif(any(".vms" in f for f in filenames) == False): file_list[file_path] = fchecksum else: error['unsupported'] = True except IsADirectoryError: error['directories'] = True except: error['unsupported'] = True else: duplicat_count += 1 if duplicat_count > 0: error['duplicates'] = duplicat_count else: # creates a checksum for image fchecksum = hashlib.sha512() for chunk in f.chunks(): fchecksum.update(chunk) fchecksum = fchecksum.digest() filename = os.path.join(imageset.root_path(), f.name) # tests for duplicats in imageset image = models.Image.objects.filter(Q(filename=filename)|Q(name=f.name), checksum=fchecksum, image_set=imageset).first() if image is None: with open(filename, 'wb') as out: for chunk in f.chunks(): out.write(chunk) file_list[filename] = fchecksum else: error['exists'] = True error['exists_id'] = image.id for path in file_list: try: fchecksum = file_list[path] path = Path(path) name = path.name # check if the file can be opened by OpenSlide if not convert it try: osr = OpenSlide(str(path)) except: old_path = path path = Path(path).with_suffix('.tiff') try: import pyvips vi = pyvips.Image.new_from_file(str(old_path)) vi.tiffsave(str(path), tile=True, compression='lzw', bigtiff=True, pyramid=True, tile_width=256, tile_height=256) except: error['unsupported'] = True image = models.Image( name=name, image_set=imageset, filename=path.name, image_type=image_type, checksum=fchecksum) osr = OpenSlide(image.path()) image.width, image.height = osr.level_dimensions[0] try: mpp_x = osr.properties[openslide.PROPERTY_NAME_MPP_X] mpp_y = osr.properties[openslide.PROPERTY_NAME_MPP_Y] image.mpp = (float(mpp_x) + float(mpp_y)) / 2 except (KeyError, ValueError): image.mpp = 0 try: image.objectivePower = osr.properties[openslide.PROPERTY_NAME_OBJECTIVE_POWER] except (KeyError, ValueError): image.objectivePower = 1 image.save() images.append(image) except: error['unsupported'] = True os.remove(str(path)) errors.append(error) queryset = self.get_queryset().filter(id__in=[image.id for image in images]) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data)
def hash(self): encoded_block = json.dumps(self.__dict__, sort_keys=True).encode() return hashlib.sha512(encoded_block).hexdigest()
def check_password(password: str) -> bool: hash_object = hashlib.sha512() hash_object.update(password.encode(ENCODING)) given_hash = hash_object.hexdigest() expected_hash = Store.get(StoreKey.PASSWORD_HASH) return given_hash == expected_hash
def hash_passwd(passwd): return hashlib.sha512(passwd).hexdigest()
def _sha512(data): return hashlib.sha512(data).hexdigest()
def scramble(cls, plain, salt): '''returns a sha512-hash of plain and salt as a hex string''' saltedpassword_bytes = cls.salted(plain, salt).encode('UTF-8') return hashlib.sha512(saltedpassword_bytes).hexdigest()
def _ECIES_KDF(self, R): h = hashlib.sha512() h.update(serialize_number(self.x, SER_BINARY, self.curve.elem_len_bin)) h.update(serialize_number(R.x, SER_BINARY, self.curve.elem_len_bin)) h.update(serialize_number(R.y, SER_BINARY, self.curve.elem_len_bin)) return h.digest()
def getSHA512(secret): h = hashlib.sha512() h.update(secret) return h.digest()
def __init__(self, bind_network, bind_component, endpoint, peering, seeds_list, peer_list, data_dir, config_dir, identity_signer, scheduler_type, permissions, minimum_peer_connectivity, maximum_peer_connectivity, network_public_key=None, network_private_key=None, roles=None): """Constructs a validator instance. Args: bind_network (str): the network endpoint bind_component (str): the component endpoint endpoint (str): the zmq-style URI of this validator's publically reachable endpoint peering (str): The type of peering approach. Either 'static' or 'dynamic'. In 'static' mode, no attempted topology buildout occurs -- the validator only attempts to initiate peering connections with endpoints specified in the peer_list. In 'dynamic' mode, the validator will first attempt to initiate peering connections with endpoints specified in the peer_list and then attempt to do a topology buildout starting with peer lists obtained from endpoints in the seeds_list. In either mode, the validator will accept incoming peer requests up to max_peers. seeds_list (list of str): a list of addresses to connect to in order to perform the initial topology buildout peer_list (list of str): a list of peer addresses data_dir (str): path to the data directory config_dir (str): path to the config directory identity_signer (str): cryptographic signer the validator uses for signing """ # -- Setup Global State Database and Factory -- # global_state_db_filename = os.path.join( data_dir, 'merkle-{}.lmdb'.format(bind_network[-2:])) LOGGER.debug('global state database file is %s', global_state_db_filename) global_state_db = NativeLmdbDatabase( global_state_db_filename, indexes=MerkleDatabase.create_index_configuration()) state_view_factory = StateViewFactory(global_state_db) # -- Setup Receipt Store -- # receipt_db_filename = os.path.join( data_dir, 'txn_receipts-{}.lmdb'.format(bind_network[-2:])) LOGGER.debug('txn receipt store file is %s', receipt_db_filename) receipt_db = LMDBNoLockDatabase(receipt_db_filename, 'c') receipt_store = TransactionReceiptStore(receipt_db) # -- Setup Block Store -- # block_db_filename = os.path.join( data_dir, 'block-{}.lmdb'.format(bind_network[-2:])) LOGGER.debug('block store file is %s', block_db_filename) block_db = IndexedDatabase( block_db_filename, BlockStore.serialize_block, BlockStore.deserialize_block, flag='c', indexes=BlockStore.create_index_configuration()) block_store = BlockStore(block_db) # The cache keep time for the journal's block cache must be greater # than the cache keep time used by the completer. base_keep_time = 1200 block_cache = BlockCache(block_store, keep_time=int(base_keep_time * 9 / 8), purge_frequency=30) # -- Setup Thread Pools -- # component_thread_pool = InstrumentedThreadPoolExecutor( max_workers=10, name='Component') network_thread_pool = InstrumentedThreadPoolExecutor(max_workers=10, name='Network') client_thread_pool = InstrumentedThreadPoolExecutor(max_workers=5, name='Client') sig_pool = InstrumentedThreadPoolExecutor(max_workers=3, name='Signature') # -- Setup Dispatchers -- # component_dispatcher = Dispatcher() network_dispatcher = Dispatcher() # -- Setup Services -- # component_service = Interconnect(bind_component, component_dispatcher, secured=False, heartbeat=False, max_incoming_connections=20, monitor=True, max_future_callback_workers=10) zmq_identity = hashlib.sha512( time.time().hex().encode()).hexdigest()[:23] secure = False if network_public_key is not None and network_private_key is not None: secure = True network_service = Interconnect(bind_network, dispatcher=network_dispatcher, zmq_identity=zmq_identity, secured=secure, server_public_key=network_public_key, server_private_key=network_private_key, heartbeat=True, public_endpoint=endpoint, connection_timeout=120, max_incoming_connections=100, max_future_callback_workers=10, authorize=True, signer=identity_signer, roles=roles) # -- Setup Transaction Execution Platform -- # context_manager = ContextManager(global_state_db) batch_tracker = BatchTracker(block_store) settings_cache = SettingsCache( SettingsViewFactory(state_view_factory), ) transaction_executor = TransactionExecutor( service=component_service, context_manager=context_manager, settings_view_factory=SettingsViewFactory(state_view_factory), scheduler_type=scheduler_type, invalid_observers=[batch_tracker]) component_service.set_check_connections( transaction_executor.check_connections) event_broadcaster = EventBroadcaster(component_service, block_store, receipt_store) # -- Setup P2P Networking -- # gossip = Gossip(network_service, settings_cache, lambda: block_store.chain_head, block_store.chain_head_state_root, endpoint=endpoint, peering_mode=peering, initial_seed_endpoints=seeds_list, initial_peer_endpoints=peer_list, minimum_peer_connectivity=minimum_peer_connectivity, maximum_peer_connectivity=maximum_peer_connectivity, topology_check_frequency=1) completer = Completer(block_store, gossip, cache_keep_time=base_keep_time, cache_purge_frequency=30, requested_keep_time=300) block_sender = BroadcastBlockSender(completer, gossip) batch_sender = BroadcastBatchSender(completer, gossip) chain_id_manager = ChainIdManager(data_dir) identity_view_factory = IdentityViewFactory( StateViewFactory(global_state_db)) id_cache = IdentityCache(identity_view_factory) # -- Setup Permissioning -- # permission_verifier = PermissionVerifier( permissions, block_store.chain_head_state_root, id_cache) identity_observer = IdentityObserver(to_update=id_cache.invalidate, forked=id_cache.forked) settings_observer = SettingsObserver( to_update=settings_cache.invalidate, forked=settings_cache.forked) # -- Setup Journal -- # batch_injector_factory = DefaultBatchInjectorFactory( block_store=block_store, state_view_factory=state_view_factory, signer=identity_signer) block_publisher = BlockPublisher( transaction_executor=transaction_executor, block_cache=block_cache, state_view_factory=state_view_factory, settings_cache=settings_cache, block_sender=block_sender, batch_sender=batch_sender, squash_handler=context_manager.get_squash_handler(), chain_head=block_store.chain_head, identity_signer=identity_signer, data_dir=data_dir, config_dir=config_dir, permission_verifier=permission_verifier, check_publish_block_frequency=0.1, batch_observers=[batch_tracker], batch_injector_factory=batch_injector_factory) block_validator = BlockValidator( block_cache=block_cache, state_view_factory=state_view_factory, transaction_executor=transaction_executor, squash_handler=context_manager.get_squash_handler(), identity_signer=identity_signer, data_dir=data_dir, config_dir=config_dir, permission_verifier=permission_verifier) chain_controller = ChainController( block_cache=block_cache, block_validator=block_validator, state_view_factory=state_view_factory, chain_head_lock=block_publisher.chain_head_lock, on_chain_updated=block_publisher.on_chain_updated, chain_id_manager=chain_id_manager, data_dir=data_dir, config_dir=config_dir, chain_observers=[ event_broadcaster, receipt_store, batch_tracker, identity_observer, settings_observer ]) genesis_controller = GenesisController( context_manager=context_manager, transaction_executor=transaction_executor, completer=completer, block_store=block_store, state_view_factory=state_view_factory, identity_signer=identity_signer, data_dir=data_dir, config_dir=config_dir, chain_id_manager=chain_id_manager, batch_sender=batch_sender) responder = Responder(completer) completer.set_on_batch_received(block_publisher.queue_batch) completer.set_on_block_received(chain_controller.queue_block) completer.set_chain_has_block(chain_controller.has_block) # -- Register Message Handler -- # network_handlers.add(network_dispatcher, network_service, gossip, completer, responder, network_thread_pool, sig_pool, chain_controller.has_block, block_publisher.has_batch, permission_verifier, block_publisher) component_handlers.add(component_dispatcher, gossip, context_manager, transaction_executor, completer, block_store, batch_tracker, global_state_db, self.get_chain_head_state_root_hash, receipt_store, event_broadcaster, permission_verifier, component_thread_pool, client_thread_pool, sig_pool, block_publisher) # -- Store Object References -- # self._component_dispatcher = component_dispatcher self._component_service = component_service self._component_thread_pool = component_thread_pool self._network_dispatcher = network_dispatcher self._network_service = network_service self._network_thread_pool = network_thread_pool self._client_thread_pool = client_thread_pool self._sig_pool = sig_pool self._context_manager = context_manager self._transaction_executor = transaction_executor self._genesis_controller = genesis_controller self._gossip = gossip self._block_publisher = block_publisher self._chain_controller = chain_controller self._block_validator = block_validator
#this script generates an encrypted password using sha512 method #from plain text user input from hashlib import sha512 from getpass import getpass print('Input your plain text password below!') plaintext_pwd = getpass() h = sha512() h.update(plaintext_pwd.encode()) hash_str = h.hexdigest() print('This is your encrypted password using sha512 method:', hash_str)
def generate_hash(self, hash_string): hashh = hashlib.sha512(hash_string.encode('utf-8')).hexdigest().lower() return hashh
def _check_password(self): if settings.vpn.stress_test or self.user.link_server_id: return if self.user.bypass_secondary: logger.info( 'Bypass secondary enabled, skipping password', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if self.has_token: logger.info( 'Client authentication cached, skipping password', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if self.whitelisted: logger.info( 'Client network whitelisted, skipping password', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if not limiter.auth_check(self.user.id): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. Too many ' + 'authentication attempts') % (self.server.name), remote_addr=self.remote_ip, ) raise AuthError('Too many authentication attempts') sso_mode = settings.app.sso or '' duo_mode = settings.app.sso_duo_mode onelogin_mode = utils.get_onelogin_mode() okta_mode = utils.get_okta_mode() auth_type = self.user.auth_type or '' has_duo_passcode = DUO_AUTH in sso_mode and \ DUO_AUTH in auth_type and duo_mode == 'passcode' has_onelogin_passcode = SAML_ONELOGIN_AUTH == sso_mode and \ SAML_ONELOGIN_AUTH in auth_type and onelogin_mode == 'passcode' has_okta_passcode = SAML_OKTA_AUTH == sso_mode and \ SAML_OKTA_AUTH in auth_type and okta_mode == 'passcode' if has_duo_passcode or has_onelogin_passcode or has_okta_passcode: if not self.password and self.has_challenge() and \ self.user.has_pin(): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') challenge = self.get_challenge() if challenge: self.password = challenge + self.password passcode_len = settings.app.sso_duo_passcode_length orig_password = self.password passcode = self.password[-passcode_len:] self.password = self.password[:-passcode_len] allow = False if settings.app.sso_cache and not self.auth_token: doc = self.sso_passcode_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, }) if doc: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, 'timestamp': utils.now(), }) allow = True logger.info( 'Authentication cached, skipping secondary passcode', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) if not allow: if DUO_AUTH in sso_mode: label = 'Duo' duo_auth = sso.Duo( username=self.user.name, factor=duo_mode, remote_ip=self.remote_ip, auth_type='Connection', passcode=passcode, ) allow = duo_auth.authenticate() elif SAML_ONELOGIN_AUTH == sso_mode: label = 'OneLogin' allow = sso.auth_onelogin_secondary( username=self.user.name, passcode=passcode, remote_ip=self.remote_ip, onelogin_mode=onelogin_mode, ) elif SAML_OKTA_AUTH == sso_mode: label = 'Okta' allow = sso.auth_okta_secondary( username=self.user.name, passcode=passcode, remote_ip=self.remote_ip, okta_mode=okta_mode, ) else: raise AuthError('Unknown secondary passcode challenge') if not allow: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed %s passcode authentication') % (self.server.name, label), remote_addr=self.remote_ip, ) if self.has_challenge(): if self.user.has_password(self.server): self.set_challenge(orig_password, 'Enter %s Passcode' % label, True) else: self.set_challenge(None, 'Enter %s Passcode' % label, True) raise AuthError('Challenge secondary passcode') raise AuthError('Invalid secondary passcode') if settings.app.sso_cache and not self.auth_token: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, 'timestamp': utils.now(), }, upsert=True) elif YUBICO_AUTH in sso_mode and YUBICO_AUTH in auth_type: if not self.password and self.has_challenge() and \ self.user.has_pin(): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') challenge = self.get_challenge() if challenge: self.password = challenge + self.password orig_password = self.password yubikey = self.password[-44:] self.password = self.password[:-44] yubikey_hash = hashlib.sha512() yubikey_hash.update(yubikey) yubikey_hash = base64.b64encode(yubikey_hash.digest()) allow = False if settings.app.sso_cache and not self.auth_token: doc = self.sso_passcode_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, }) if doc: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, 'timestamp': utils.now(), }) allow = True logger.info( 'Authentication cached, skipping Yubikey', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) if not allow: valid, yubico_id = sso.auth_yubico(yubikey) if yubico_id != self.user.yubico_id: valid = False if not valid: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed Yubico authentication') % (self.server.name), remote_addr=self.remote_ip, ) if self.has_challenge(): if self.user.has_password(self.server): self.set_challenge(orig_password, 'YubiKey', True) else: self.set_challenge(None, 'YubiKey', True) raise AuthError('Challenge YubiKey') raise AuthError('Invalid YubiKey') if settings.app.sso_cache and not self.auth_token: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, 'timestamp': utils.now(), }, upsert=True) elif self.server.otp_auth and self.user.type == CERT_CLIENT: if not self.password and self.has_challenge() and \ self.user.has_pin(): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') challenge = self.get_challenge() if challenge: self.password = challenge + self.password orig_password = self.password otp_code = self.password[-6:] self.password = self.password[:-6] allow = False if settings.app.sso_cache: doc = self.otp_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, }) if doc: self.otp_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, 'timestamp': utils.now(), }) allow = True logger.info( 'Authentication cached, skipping OTP', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) if not allow: if not self.user.verify_otp_code(otp_code): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed two-step authentication') % (self.server.name), remote_addr=self.remote_ip, ) if self.has_challenge(): if self.user.has_password(self.server): self.set_challenge(orig_password, 'Enter OTP Code', True) else: self.set_challenge(None, 'Enter OTP Code', True) raise AuthError('Challenge OTP code') raise AuthError('Invalid OTP code') if settings.vpn.otp_cache: self.otp_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, 'timestamp': utils.now(), }, upsert=True) if self.user.has_pin(): if not self.user.check_pin(self.password): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) if self.has_challenge(): self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') raise AuthError('Invalid pin') elif settings.user.pin_mode == PIN_REQUIRED: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User does not have a pin set') % (self.server.name), remote_addr=self.remote_ip, ) raise AuthError('User does not have a pin set')
def hashing_method(passwd_hash): hash3 = hashlib.sha512(passwd_hash) print "\033[0;97mYOUR HASHED PASSWORD IS:\033[0;91m ",hash3.hexdigest()
# Copyright 2018 Contributors to Hyperledger Sawtooth # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ----------------------------------------------------------------------------- from hashlib import sha512 import re FAMILY_NAME = "rbac" FAMILY_VERSION = "1.0" NAMESPACE = sha512(FAMILY_NAME.encode()).hexdigest()[:6] ADDRESS_LENGTH = 70 ADDRESS_PATTERN = re.compile(r"^[0-9a-f]{70}$") FAMILY_PATTERN = re.compile(r"^9f4448[0-9a-f]{64}$") def namespace_ok(address): """Address belongs to this family namespace""" return address[:len(NAMESPACE)] == NAMESPACE
def open(self, url: str, mode: str = "rb") -> Any: """Returns a file-like object for a particular URL opened in mode. If the file is remote, it will be downloaded and locally cached """ urllib.request.install_opener( urllib.request.build_opener(*self._handlers)) try: fp = urllib.request.urlopen(url, context=self._context) except error.URLError as excp: if excp.args: # TODO: As of python3.7 this can be removed unverified_retrieval = ( hasattr(ssl, "SSLCertVerificationError") and isinstance(excp.args[0], ssl.SSLCertVerificationError) ) or (isinstance(excp.args[0], ssl.SSLError) and excp.args[0].reason == "CERTIFICATE_VERIFY_FAILED") if unverified_retrieval: vollog.warning( "SSL certificate verification failed: attempting UNVERIFIED retrieval" ) non_verifying_ctx = ssl.SSLContext() non_verifying_ctx.check_hostname = False non_verifying_ctx.verify_mode = ssl.CERT_NONE fp = urllib.request.urlopen(url, context=non_verifying_ctx) else: raise excp else: raise excp with contextlib.closing(fp) as fp: # Cache the file locally parsed_url = urllib.parse.urlparse(url) if parsed_url.scheme == 'file': # ZipExtFiles (files in zips) cannot seek, so must be cached in order to use and/or decompress curfile = urllib.request.urlopen(url, context=self._context) else: # TODO: find a way to check if we already have this file (look at http headers?) block_size = 1028 * 8 temp_filename = os.path.join( constants.CACHE_PATH, "data_" + hashlib.sha512(bytes(url, 'latin-1')).hexdigest() + ".cache") if not os.path.exists(temp_filename): vollog.debug("Caching file at: {}".format(temp_filename)) try: content_length = fp.info().get('Content-Length', -1) except AttributeError: # If our fp doesn't have an info member, carry on gracefully content_length = -1 cache_file = open(temp_filename, "wb") count = 0 block = fp.read(block_size) while block: count += len(block) if self._progress_callback: self._progress_callback( count * 100 / max(count, int(content_length)), "Reading file {}".format(url)) cache_file.write(block) block = fp.read(block_size) cache_file.close() # Re-open the cache with a different mode curfile = open(temp_filename, mode="rb") # Determine whether the file is a particular type of file, and if so, open it as such IMPORTED_MAGIC = False if HAS_MAGIC: stop = False while not stop: detected = None try: # Detect the content detected = magic.detect_from_fobj(curfile) IMPORTED_MAGIC = True # This is because python-magic and file provide a magic module # Only file's python has magic.detect_from_fobj except AttributeError: pass except: pass if detected: if detected.mime_type == 'application/x-xz': curfile = cascadeCloseFile( lzma.LZMAFile(curfile, mode), curfile) elif detected.mime_type == 'application/x-bzip2': curfile = cascadeCloseFile(bz2.BZ2File(curfile, mode), curfile) elif detected.mime_type == 'application/x-gzip': curfile = cascadeCloseFile( gzip.GzipFile(fileobj=curfile, mode=mode), curfile) if detected.mime_type in [ 'application/x-xz', 'application/x-bzip2', 'application/x-gzip' ]: # Read and rewind to ensure we're inside any compressed file layers curfile.read(1) curfile.seek(0) else: stop = True else: stop = True if not IMPORTED_MAGIC: # Somewhat of a hack, but prevents a hard dependency on the magic module url_path = parsed_url.path stop = False while not stop: url_path_split = url_path.split(".") url_path, extension = url_path_split[:-1], url_path_split[-1] url_path = ".".join(url_path) if extension == "xz": curfile = cascadeCloseFile(lzma.LZMAFile(curfile, mode), curfile) elif extension == "bz2": curfile = cascadeCloseFile(bz2.BZ2File(curfile, mode), curfile) elif extension == "gz": curfile = cascadeCloseFile( gzip.GzipFile(fileobj=curfile, mode=mode), curfile) else: stop = True # Fallback in case the file doesn't exist if curfile is None: raise ValueError("URL does not reference an openable file") return curfile
def hash_file(path): """ Shortcut to properly hash a file. """ with open(path, 'rb') as hf: return hashlib.sha512(hf.read()).hexdigest()
def H(m): return hashlib.sha512(m).digest()
def generate_hash(pattern): hash = hashlib.sha512() hash.update(('%s' % (pattern)).encode('utf-8')) generated_hash = hash.hexdigest() return generated_hash
def validatesocket(sock): """Validate a socket meets security requirements. The passed socket must have been created with ``wrapsocket()``. """ host = sock._hgstate["hostname"] ui = sock._hgstate["ui"] settings = sock._hgstate["settings"] try: peercert = sock.getpeercert(True) peercert2 = sock.getpeercert() except AttributeError: raise error.Abort(_("%s ssl connection error") % host) if not peercert: raise error.Abort( _("%s certificate error: " "no certificate received") % host) if settings["disablecertverification"]: # We don't print the certificate fingerprint because it shouldn't # be necessary: if the user requested certificate verification be # disabled, they presumably already saw a message about the inability # to verify the certificate and this message would have printed the # fingerprint. So printing the fingerprint here adds little to no # value. ui.warn( _("warning: connection security to %s is disabled per current " "settings; communication is susceptible to eavesdropping " "and tampering\n") % host) return # If a certificate fingerprint is pinned, use it and only it to # validate the remote cert. peerfingerprints = { "sha1": hashlib.sha1(peercert).hexdigest(), "sha256": hashlib.sha256(peercert).hexdigest(), "sha512": hashlib.sha512(peercert).hexdigest(), } def fmtfingerprint(s): return ":".join([s[x:x + 2] for x in range(0, len(s), 2)]) nicefingerprint = "sha256:%s" % fmtfingerprint(peerfingerprints["sha256"]) if settings["certfingerprints"]: for hash, fingerprint in settings["certfingerprints"]: if peerfingerprints[hash].lower() == fingerprint: ui.debug("%s certificate matched fingerprint %s:%s\n" % (host, hash, fmtfingerprint(fingerprint))) if settings["legacyfingerprint"]: ui.warn( _("(SHA-1 fingerprint for %s found in legacy " "[hostfingerprints] section; " "if you trust this fingerprint, remove the old " "SHA-1 fingerprint from [hostfingerprints] and " "add the following entry to the new " "[hostsecurity] section: %s:fingerprints=%s)\n") % (host, host, nicefingerprint)) return # Pinned fingerprint didn't match. This is a fatal error. if settings["legacyfingerprint"]: section = "hostfingerprint" nice = fmtfingerprint(peerfingerprints["sha1"]) else: section = "hostsecurity" nice = "%s:%s" % (hash, fmtfingerprint(peerfingerprints[hash])) raise error.Abort( _("certificate for %s has unexpected " "fingerprint %s") % (host, nice), hint=_("check %s configuration") % section, ) # Security is enabled but no CAs are loaded. We can't establish trust # for the cert so abort. if not sock._hgstate["caloaded"]: raise error.Abort( _("unable to verify security of %s (no loaded CA certificates); " "refusing to connect") % host, hint=_("see https://mercurial-scm.org/wiki/SecureConnections for " "how to configure Mercurial to avoid this error or set " "hostsecurity.%s:fingerprints=%s to trust this server") % (host, nicefingerprint), ) msg = _verifycert(peercert2, host) if msg: raise error.Abort( _("%s certificate error: %s") % (host, msg), hint=_("set hostsecurity.%s:certfingerprints=%s " "config setting or use --insecure to connect " "insecurely") % (host, nicefingerprint), )
hashfile = open("hash-file.txt", 'w') for numStr in numbers: if HASH_TYPE == 100: numHashObj = hashlib.md5(numStr) elif HASH_TYPE == 200: numHashObj = hashlib.sha1(numStr) elif HASH_TYPE == 300: numHashObj = hashlib.sha224(numStr) elif HASH_TYPE == 400: numHashObj = hashlib.sha256(numStr) elif HASH_TYPE == 500: numHashObj = hashlib.sha384(numStr) elif HASH_TYPE == 600: numHashObj = hashlib.sha512(numStr) numHashStr = numHashObj.hexdigest() hashfile.write(numHashStr + "\n") hashes.append(numHashStr) hashfile.close() print "Wrote " + str(NUM_ENTRIES) + " hashes to file: hash-file.txt" keyfile = open("key-file.txt", 'w') for x in xrange(len(numbers)): concattedStr = hashes[x] + ":" + numbers[x] + "\n" keyfile.write(concattedStr)
def sign(cls, secret, o, time): return sha512("%s|%s|%d" % (secret, o, time)).digest()
def get_client_ipaddr(): if "X-Real-IP" in request.headers: # pythonanywhere specific ip = request.headers['X-Real-IP'] else: # general ip = request.remote_addr return hashlib.sha512(config['app-secret'] + ip).hexdigest()[:15]
print('\n--------------------\n') if iter == num_epoch - 1: for final in range(len(s_images)): current_batch_s = np.expand_dims(s_images[final, :, :, :], 0) current_batch_c = np.expand_dims(c_images[final, :, :, :], 0) sess_results = sess.run( [prep_layer5, hide_layer5, reve_layer5], feed_dict={ Secret: current_batch_s, Cover: current_batch_c }) # create hash table hash_object = hashlib.sha512(np.squeeze(current_batch_s)) secrete_hex_digit = hash_object.hexdigest() hash_object = hashlib.sha512( np.squeeze(sess_results[1][0, :, :, :])) prep_hex_digit = hash_object.hexdigest() plt.figure() plt.imshow(np.squeeze(current_batch_s[0, :, :, :])) plt.axis('off') plt.title('epoch_' + str(final) + ' Secret') plt.savefig('gif/' + str(final) + 'a_' + str(secrete_hex_digit) + 'Secret image.png') plt.figure() plt.imshow(np.squeeze(current_batch_c[0, :, :, :]))
def calc_signature(self, message): sha512 = hashlib.sha512() sha512.update(message.encode("utf-8")) sha512.update(self.configuration.callback_secret.encode("utf-8")) return sha512.hexdigest()
salt="ZDzPE45C" string="password" salt2="1111111111111111111111" if (len(sys.argv)>1): string=sys.argv[1] if (len(sys.argv)>2): salt=sys.argv[2] print ("General Hashes") print ("MD5:"+hashlib.md5(string.encode()).hexdigest()) print ("SHA1:"+hashlib.sha1(string.encode()).hexdigest()) print ("SHA256:"+hashlib.sha256(string.encode()).hexdigest()) print ("SHA512:"+hashlib.sha512(string.encode()).hexdigest()) print ("UNIX hashes (with salt)") print ("DES:"+passlib.hash.des_crypt.hash(string, salt=salt[:2])) print ("MD5:"+passlib.hash.md5_crypt.hash(string, salt=salt)) print ("Sun MD5:"+passlib.hash.sun_md5_crypt.hash(string, salt=salt)) print ("SHA1:"+passlib.hash.sha1_crypt.hash(string, salt=salt)) print ("SHA256:"+passlib.hash.sha256_crypt.hash(string, salt=salt)) print ("SHA512:"+passlib.hash.sha512_crypt.hash(string, salt=salt)) print ("APR1:"+passlib.hash.apr_md5_crypt.hash(string, salt=salt)) print ("PHPASS:"******"PBKDF2 (SHA1):"+passlib.hash.pbkdf2_sha1.hash(string, salt=salt.encode())) print ("PBKDF2 (SHA256):"+passlib.hash.pbkdf2_sha256.hash(string, salt=salt.encode())) print("PBKDF2 (SHA512):"+passlib.hash.pbkdf2_sha512.hash(string, salt=salt.encode())) print("CTA PBKDF2:"+passlib.hash.cta_pbkdf2_sha1.hash(string, salt=salt.encode()))
import hashlib # SHA -> Secure Hash Function # SHA256, SHA512 ... s1 = "This is Awesome" s2 = "John," + str(80) + ",[email protected]" s3 = "John," + str(80) + ",[email protected]" print(s1) print(s2) hashCode1 = int(hashlib.sha512(s1.encode("utf-8")).hexdigest(), 16) % 8 hashCode2 = int(hashlib.sha256(s2.encode("utf-8")).hexdigest(), 16) % 8 hashCode3 = int(hashlib.sha256(s3.encode("utf-8")).hexdigest(), 16) % 8 print(hashCode1) print(hashCode2) print(hashCode3)
def derive_keys(x): h = sha256d(x) h = hashlib.sha512(h).digest() return (h[:32], h[32:])
def _gen_message_id(): return hashlib.sha512(uuid.uuid4().hex.encode()).hexdigest()
def ourHash(msg): return sha512(msg).digest()