def test_data_transfer(self): # open stream random.shuffle(self.swarm) alice = self.swarm[0] bob = self.swarm[1] hexstreamid = alice.stream_open(bob.dht_id()) self.assertIsNotNone(hexstreamid) # send 1M sent = hashlib.sha256() for chunk in [os.urandom(1024) for i in range(512)]: hexdata = binascii.hexlify(chunk) bytes_written = alice.stream_write(hexstreamid, hexdata) self.assertEqual(bytes_written, 1024) sent.update(hexdata) sent_digest = sent.hexdigest() # receive 1M received = hashlib.sha256() while True: hexdata = bob.stream_read(hexstreamid, size=1024) if not hexdata: break received.update(hexdata) received_digetst = received.hexdigest() self.assertEqual(sent_digest, received_digetst) # close stream self.assertTrue(alice.stream_close(hexstreamid))
def forwards(self, orm): for award in orm['badges.Award'].objects.all(): user = award.user try: user.identity current_identity_hash = user.identity.identity_hash new_candidate_identity_hash = u'sha256$' + hashlib.sha256(user.email + user.identity.salt).hexdigest() if current_identity_hash != new_candidate_identity_hash: salt = uuid.uuid4().hex[:5] user.identity.salt = salt user.identity.identity_hash = u'sha256$' + hashlib.sha256(user.email + salt).hexdigest() user.identity.save() except: salt = uuid.uuid4().hex[:5] orm['badges.Identity'].objects.create( user=user, identity_hash=u'sha256$' + hashlib.sha256(user.email + salt).hexdigest(), salt=salt ) award.uuid = uuid.uuid1() award.identity_hash = award.user.identity.identity_hash award.identity_type = award.user.identity.type award.identity_hashed = award.user.identity.hashed award.identity_salt = award.user.identity.salt award.expires = None award.save()
def cross_check_consistency_against_opensource_algorithm(first, second, first_hash, second_hash, consistency): try: node = first - 1 last_node = second - 1 while node & 1: node >>= 1 last_node >>= 1 p = iter(consistency) if node: old_hash = p.next() else: # old was 2 ** n old_hash = first_hash new_hash = old_hash while node: if node & 1: x = p.next() old_hash = sha256(chr(1) + x + old_hash).digest() new_hash = sha256(chr(1) + x + new_hash).digest() elif node < last_node: new_hash = sha256(chr(1) + new_hash + p.next()).digest() node >>= 1 last_node >>= 1 while last_node: new_hash = sha256(chr(1) + new_hash + p.next()).digest() last_node >>= 1 for remaining in p: return False # we shouldn't have any elements left over return old_hash == first_hash and new_hash == second_hash except StopIteration: return False # ran out of elements
def submit_recording(connection, data): data_json = json.dumps(data, sort_keys=True, separators=(',', ':')) data_sha256 = sha256(data_json.encode("utf-8")).hexdigest() meta = {"artist": data["artist"], "title": data["title"]} meta_json = json.dumps(meta, sort_keys=True, separators=(',', ':')) meta_sha256 = sha256(meta_json.encode("utf-8")).hexdigest() artist = get_artist_credit(connection, data["artist"]) if not artist: artist = add_artist_credit(connection, data["artist"]) if "release" in data: release = get_release(connection, data["release"]) if not release: release = add_release(connection, data["release"]) else: release = None query = text("""INSERT INTO recording_json (data, data_sha256, meta_sha256) VALUES (:data, :data_sha256, :meta_sha256) RETURNING id""") result = connection.execute(query, {"data": data_json, "data_sha256": data_sha256, "meta_sha256": meta_sha256}) id = result.fetchone()["id"] gid = str(uuid.uuid4()) query = text("""INSERT INTO recording (gid, data, artist, release, submitted) VALUES (:gid, :data, :artist, :release, now())""") connection.execute(query, {"gid": gid, "data": id, "artist": artist, "release": release}) return gid
def cross_check_inclusion_via_opensource(hash, leaf_index, audit_path, tree_size, root_hash): audit_path = audit_path[:] node_index = leaf_index calculated_hash = hash last_node = tree_size - 1 while last_node > 0: if not audit_path: return False if node_index % 2: audit_hash = audit_path.pop(0) calculated_hash = sha256(chr(1) + audit_hash + calculated_hash).digest() elif node_index < last_node: audit_hash = audit_path.pop(0) calculated_hash = sha256(chr(1) + calculated_hash + audit_hash).digest() # node_index == last_node and node_index is even: A sibling does # not exist. Go further up the tree until node_index is odd so # calculated_hash will be used as the right-hand operand. node_index //= 2 last_node //= 2 if audit_path: return False return calculated_hash == root_hash
def outputfilename(self, data, filename, raw): directory = os.path.dirname(filename) if is_py2: id = hashlib.sha256(data["programVersionId"]).hexdigest()[:7] else: id = hashlib.sha256(data["programVersionId"].encode("utf-8")).hexdigest()[:7] datatitle = re.search('data-title="([^"]+)"', self.get_urldata()) if not datatitle: return None datat = decode_html_entities(datatitle.group(1)) name = self.name(datat) episode = self.seasoninfo(datat) if is_py2: name = name.encode("utf8") if episode: title = "{0}.{1}-{2}-svtplay".format(name, episode, id) else: title = "{0}-{1}-svtplay".format(name, id) title = filenamify(title) if len(directory): output = os.path.join(directory, title) else: output = title return output
def check_inclusion_via_rfc_algorithm(hash, leaf_index, audit_path, tree_size, root_hash): # 1. Set "fn" to "leaf_index" and "sn" to "tree_size - 1". fn, sn = leaf_index, tree_size - 1 # 2. Set "r" to "hash". r = hash # 3. For each value "p" in the "audit_path" array: for p in audit_path: # If "LSB(fn)" is set, or if "fn" is equal to "sn", then: if lsb(fn) or (fn == sn): # 1. Set "r" to "HASH(0x01 || p || r)" r = sha256(chr(1) + p + r).digest() # 2. If "LSB(fn)" is not set, then right-shift both "fn" and "sn" # equally until either "LSB(fn)" is set or "fn" is "fn". while not ((fn == 0) or lsb(fn)): fn >>= 1 sn >>= 1 # Otherwise: else: # Set "r" to "HASH(0x01 || r || p)" r = sha256(chr(1) + r + p).digest() # Finally, right-shift both "fn" and "sn" one time. fn >>= 1 sn >>= 1 # 4. Compare "r" against the "root_hash". If they are equal, # then the log has proven the inclusion of "hash". return r == root_hash
def test_derSigToHexSig(self): derSig = "304502204c01fee2d724fb2e34930c658f585d49be2f6ac87c126506c0179e6977716093022100faad0afd3ae536cfe11f83afaba9a8914fc0e70d4c6d1495333b2fb3df6e8cae" self.assertEqual("4c01fee2d724fb2e34930c658f585d49be2f6ac87c126506c0179e6977716093faad0afd3ae536cfe11f83afaba9a8914fc0e70d4c6d1495333b2fb3df6e8cae", derSigToHexSig(derSig)) txn = ("0100000001a97830933769fe33c6155286ffae34db44c6b8783a2d8ca52ebee6414d399ec300000000" + "8a47" + "304402202c2e1a746c556546f2c959e92f2d0bd2678274823cc55e11628284e4a13016f80220797e716835f9dbcddb752cd0115a970a022ea6f2d8edafff6e087f928e41baac01" + "41" + "04392b964e911955ed50e4e368a9476bc3f9dcc134280e15636430eb91145dab739f0d68b82cf33003379d885a0b212ac95e9cddfd2d391807934d25995468bc55" + "ffffffff02015f0000000000001976a914c8e90996c7c6080ee06284600c684ed904d14c5c88ac204e000000000000" + "1976a914348514b329fda7bd33c7b2336cf7cd1fc9544c0588ac00000000") myTxn_forSig = ("0100000001a97830933769fe33c6155286ffae34db44c6b8783a2d8ca52ebee6414d399ec300000000" + "1976a914" + "167c74f7491fe552ce9e1912810a984355b8ee07" + "88ac" + "ffffffff02015f0000000000001976a914c8e90996c7c6080ee06284600c684ed904d14c5c88ac204e000000000000" + "1976a914348514b329fda7bd33c7b2336cf7cd1fc9544c0588ac00000000" + "01000000") public_key = "04392b964e911955ed50e4e368a9476bc3f9dcc134280e15636430eb91145dab739f0d68b82cf33003379d885a0b212ac95e9cddfd2d391807934d25995468bc55" hashToSign = hashlib.sha256(hashlib.sha256(myTxn_forSig.decode('hex')).digest()).digest().encode('hex') sig_der = "304402202c2e1a746c556546f2c959e92f2d0bd2678274823cc55e11628284e4a13016f80220797e716835f9dbcddb752cd0115a970a022ea6f2d8edafff6e087f928e41baac01"[:-2] sig = derSigToHexSig(sig_der) vk = ecdsa.VerifyingKey.from_string(public_key[2:].decode('hex'), curve=ecdsa.SECP256k1) self.assertEquals(vk.verify_digest(sig.decode('hex'), hashToSign.decode('hex')), True)
def _generate_new_keypair(self): secret = str(random.randrange(2 ** 256)) self.secret = hashlib.sha256(secret).hexdigest() self.pubkey = privtopub(self.secret) self.privkey = random_key() print 'PRIVATE KEY: ', self.privkey self.btc_pubkey = privtopub(self.privkey) print 'PUBLIC KEY: ', self.btc_pubkey # Generate SIN sha_hash = hashlib.sha256() sha_hash.update(self.pubkey) ripe_hash = hashlib.new('ripemd160') ripe_hash.update(sha_hash.digest()) self.guid = ripe_hash.digest().encode('hex') self.sin = obelisk.EncodeBase58Check('\x0F\x02%s' + ripe_hash.digest()) newsettings = { "secret": self.secret, "pubkey": self.pubkey, "privkey": self.privkey, "guid": self.guid, "sin": self.sin } self.db.updateEntries("settings", {"market_id": self.market_id}, newsettings) self.settings.update(newsettings)
def __init__(self, username, password, pathToKeyFilename): #when making keys from password for a specific keyFilename salt = hashlib.sha256(username).digest() self.keyAES = makeKeyAES(password, salt) saltIv = hashlib.sha256(str(pathToKeyFilename)).digest() self.iv = makeIV(self.keyAES, saltIv) self.rsaKeyObj = makeRSAKeyObj(password, salt)
def checkSame(seq1, seq2, numOfBits): for i in xrange(numOfBits/4): if (int(hashlib.sha256(seq1).hexdigest()[i], 16) & FLAG_1) != \ (int(hashlib.sha256(seq2).hexdigest()[i], 16) & FLAG_1): return False return True
def wrappedSummationAndItems(fact, roundedSum, boundSummationItems): # need hash of facts and their values from boundSummationItems ''' ARELLE-281, replace: faster python-based hash (replace with hashlib for fewer collisions) itemValuesHash = hash( tuple(( hash(b.modelObject.qname), hash(b.extraProperties[1][1]) ) # sort by qname so we don't care about reordering of summation terms for b in sorted(boundSummationItems, key=lambda b: b.modelObject.qname)) ) sumValueHash = hash( (hash(fact.qname), hash(roundedSum)) ) ''' sha256 = hashlib.sha256() # items hash: sort by qname so we don't care about reordering of summation terms in linkbase updates for b in sorted(boundSummationItems, key=lambda b: b.modelObject.qname): sha256.update(b.modelObject.qname.namespaceURI.encode('utf-8','replace')) #qname of erroneous submission may not be utf-8 perfectly encodable sha256.update(b.modelObject.qname.localName.encode('utf-8','replace')) sha256.update(str(b.extraProperties[1][1]).encode('utf-8','replace')) itemValuesHash = sha256.hexdigest() # summation value hash sha256 = hashlib.sha256() sha256.update(fact.qname.namespaceURI.encode('utf-8','replace')) sha256.update(fact.qname.localName.encode('utf-8','replace')) sha256.update(str(roundedSum).encode('utf-8','replace')) sumValueHash = sha256.hexdigest() # return list of bound summation followed by bound contributing items return [ObjectPropertyViewWrapper(fact, ( ("sumValueHash", sumValueHash), ("itemValuesHash", itemValuesHash), ("roundedSum", roundedSum) ))] + \ boundSummationItems
def setUp(self): super(CollectionArchiveGeneratorTest, self).setUp() path1 = "aff4:/C.0000000000000000/fs/os/foo/bar/hello1.txt" with aff4.FACTORY.Create(path1, aff4.AFF4MemoryStream.__name__, token=self.token) as fd: fd.Write("hello1") fd.Set(fd.Schema.HASH, rdf_crypto.Hash(sha256=hashlib.sha256("hello1").digest())) path2 = u"aff4:/C.0000000000000000/fs/os/foo/bar/中国新闻网新闻中.txt" with aff4.FACTORY.Create(path2, aff4.AFF4MemoryStream.__name__, token=self.token) as fd: fd.Write("hello2") fd.Set(fd.Schema.HASH, rdf_crypto.Hash(sha256=hashlib.sha256("hello2").digest())) self.stat_entries = [] self.paths = [path1, path2] for path in self.paths: self.stat_entries.append(rdf_client.StatEntry( aff4path=path, pathspec=rdf_paths.PathSpec( path="fs/os/foo/bar/" + path.split("/")[-1], pathtype=rdf_paths.PathSpec.PathType.OS))) self.fd = None
def got_data(self): while True: if len(self.recvbuf) < 4: return if self.recvbuf[:4] != self.node.netmagic.msg_start: raise ValueError("got garbage %s" % repr(self.recvbuf)) # check checksum if len(self.recvbuf) < 4 + 12 + 4 + 4: return command = self.recvbuf[4:4+12].split("\x00", 1)[0] msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] checksum = self.recvbuf[4+12+4:4+12+4+4] if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen: return msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen] th = hashlib.sha256(msg).digest() h = hashlib.sha256(th).digest() if checksum != h[:4]: raise ValueError("got bad checksum %s" % repr(self.recvbuf)) self.recvbuf = self.recvbuf[4+12+4+4+msglen:] if command in messagemap: f = cStringIO.StringIO(msg) t = messagemap[command](self.ver_recv) t.deserialize(f) self.node.got_message(self, t) else: print("UNKNOWN COMMAND %s %s" % (command, repr(msg)))
def _delete_network_with_exception(self, network_id, ex): fake_neutron_network_id = "4e8e5957-649f-477b-9e5b-f1f75b21c03c" if ex == exceptions.NotFound: fake_networks_response = { "networks": [] } else: fake_networks_response = { "networks": [{ "status": "ACTIVE", "subnets": [], "name": network_id, "admin_state_up": True, "tenant_id": "9bacb3c5d39d41a79512987f338cf177", "router:external": False, "segments": [], "shared": False, "id": fake_neutron_network_id }] } self.mox.StubOutWithMock(app.neutron, 'list_networks') app.neutron.list_networks( name=network_id).AndReturn(fake_networks_response) subnet_v4_id = "9436e561-47bf-436a-b1f1-fe23a926e031" subnet_v6_id = "64dd4a98-3d7a-4bfd-acf4-91137a8d2f51" docker_network_id = hashlib.sha256( utils.getrandbits(256)).hexdigest() docker_endpoint_id = hashlib.sha256( utils.getrandbits(256)).hexdigest() fake_v4_subnet = self._get_fake_v4_subnet( docker_network_id, docker_endpoint_id, subnet_v4_id) fake_v6_subnet = self._get_fake_v6_subnet( docker_network_id, docker_endpoint_id, subnet_v6_id) fake_subnets_response = { "subnets": [ fake_v4_subnet['subnet'], fake_v6_subnet['subnet'] ] } self.mox.StubOutWithMock(app.neutron, 'list_subnets') app.neutron.list_subnets(network_id=fake_neutron_network_id).AndReturn( fake_subnets_response) self.mox.StubOutWithMock(app.neutron, 'list_subnetpools') fake_subnetpools_response = {"subnetpools": []} app.neutron.list_subnetpools(name='kuryr').AndReturn( fake_subnetpools_response) app.neutron.list_subnetpools(name='kuryr6').AndReturn( fake_subnetpools_response) self.mox.StubOutWithMock(app.neutron, 'delete_subnet') app.neutron.delete_subnet(subnet_v4_id).AndReturn(None) app.neutron.delete_subnet(subnet_v6_id).AndReturn(None) self.mox.StubOutWithMock(app.neutron, 'delete_network') app.neutron.delete_network(fake_neutron_network_id).AndRaise(ex) self.mox.ReplayAll()
def get_password_digest(self, password, salt=None): if not salt: salt = base64.b64encode(os.urandom(32)) digest = hashlib.sha256(salt + password).hexdigest() for x in range(0, 100001): digest = hashlib.sha256(digest).hexdigest() return salt, digest
def load_wallet(self, wallet, window): self.wallet = wallet self.wallet_nonce = self.wallet.storage.get("wallet_nonce") self.print_error("Wallet nonce is", self.wallet_nonce) if self.wallet_nonce is None: self.set_nonce(1) mpk = ''.join(sorted(self.wallet.get_master_public_keys().values())) self.encode_password = hashlib.sha1(mpk).digest().encode('hex')[:32] self.iv = hashlib.sha256(self.encode_password).digest()[:16] self.wallet_id = hashlib.sha256(mpk).digest().encode('hex') addresses = [] for account in self.wallet.accounts.values(): for address in account.get_addresses(0): addresses.append(address) self.addresses = addresses # If there is an auth token we can try to actually start syncing def do_pull_thread(): try: self.pull_thread() except Exception as e: self.print_error("could not retrieve labels:", e) t = threading.Thread(target=do_pull_thread) t.setDaemon(True) t.start()
def _set_shedule(self): settings = self.client.settings client = boto3.client('events', **settings) for function_name, function_config in self.lambda_config.items(): expression = function_config.get('shedule_expression') if expression: response = client.put_rule( Name=function_name, ScheduleExpression=expression, State='ENABLED' ) client.put_targets( Rule=function_name, Targets=[ {'Id': hashlib.sha256(function_name).hexdigest(), 'Arn': self.arns[function_name] } ] ) permission = dict( FunctionName=function_name, StatementId=hashlib.sha256(function_name).hexdigest(), Action="lambda:InvokeFunction", Principal="events.amazonaws.com", SourceArn=response['RuleArn'], ) try: self.client.instance.add_permission(**permission) except ClientError: pass
def test_all(self): for test in ['', 'a', 'b', 'abc', 'abc'*50, 'hello world']: #print test #print sha256.sha256(test).hexdigest() #print hashlib.sha256(test).hexdigest() #print assert sha256.sha256(test).hexdigest() == hashlib.sha256(test).hexdigest() def random_str(l): return ''.join(chr(random.randrange(256)) for i in xrange(l)) for length in xrange(150): test = random_str(length) a = sha256.sha256(test).hexdigest() b = hashlib.sha256(test).hexdigest() #print length, a, b if a != b: print 'ERROR!' raise ValueError() for i in xrange(100): test = random_str(int(random.expovariate(1/100))) test2 = random_str(int(random.expovariate(1/100))) a = sha256.sha256(test) a = a.copy() a.update(test2) a = a.hexdigest() b = hashlib.sha256(test) b = b.copy() b.update(test2) b = b.hexdigest() #print a, b if a != b: print 'ERROR!' raise ValueError()
def encrypt(seed, passphrase): """ Encrypt the Electrum seed """ #1. Decode the seed value to the original number seed = mn_decode(seed.split()) #2. Take a hash of the decoded seed to act as a scrypt salt salt = hashlib.sha256(hashlib.sha256(seed).digest()).digest()[:4] #3. Derive a key from the passphrase using scrypt key = scrypt.hash(passphrase, salt, 16384, 8, 8) #4. Split the key into half 1 and half 2 derivedhalf1 = key[0:32] derivedhalf2 = key[32:64] #5. Do AES256Encrypt(seedhalf1 xor derivedhalf1[0...15], derivedhalf2), call the 16-byte result encryptedhalf1 # (Electrum may change the number of words in a seed so we should future proof by just using the halfs rather than hardcoded lengths) Aes = aes.Aes(derivedhalf2) encryptedhalf1 = Aes.enc(enc.sxor(seed[:int(math.floor(len(seed)/2))], derivedhalf1[:16])) #6. Do AES256Encrypt(seedhalf2 xor derivedhalf1[16...31], derivedhalf2), call the 16-byte result encryptedhalf2 encryptedhalf2 = Aes.enc(enc.sxor(seed[int(math.floor(len(seed)/2)):len(seed)], derivedhalf1[16:32])) #7. The encrypted private key is the Base58Check-encoded concatenation of the following # \x4E\xE3\x13\x35 + salt + encryptedhalf1 + encryptedhalf2 # (\x4E\xE3\x13\x35) gives the 'SeedE' prefix) encSeed = '\x4E\xE3\x13\x35' + salt + encryptedhalf1 + encryptedhalf2 check = hashlib.sha256(hashlib.sha256(encSeed).digest()).digest()[:4] return enc.b58encode(encSeed + check)
def decrypt(secret, message): message = message.encode('utf-8') curve = SECP256k1.curve() order = SECP256k1.order() R_size = 1 + 32 mac_size = hashlib.sha256().digest_size message_binary = base64.b64decode(message) if len(message_binary) < (R_size + mac_size): return None R = decode_point(message_binary) d = message_binary[R_size:R_size + mac_size] prefix_bytes = message_binary[R_size + mac_size:R_size + mac_size + 8] c = message_binary[R_size + mac_size + 8:] S = (secret * R).x() S_bytes = S.to_bytes(32, 'big') k_E = hashlib.sha256(S_bytes + b'\0\0\0\1').digest() k_M = hashlib.sha256(S_bytes + b'\0\0\0\2').digest() d_verify = hmac.new(k_M, prefix_bytes + c, hashlib.sha256).digest() if d_verify != d: return None ctr = Counter.new(64, prefix=prefix_bytes) cipher = AES.new(key=k_E, mode=AES.MODE_CTR, counter=ctr) padded = cipher.decrypt(c) try: return unpad(padded, AES.block_size).decode('utf-8') except: return None
def decrypt_body(self, password, data): import time key = sha256(password).digest() hdr = self.header then = time.time() # sha256 the password, encrypt it upon itself 50000 times, sha256 it again, and sha256 it again concatenated with a random number :| cipher = AES.new(hdr['seed_key'], AES.MODE_ECB) for x in range(hdr['seed_rot_n']): key = cipher.encrypt(key) key = sha256(key).digest() key = sha256(hdr['seed_rand'] + key).digest() cipher = AES.new(key, AES.MODE_CBC, hdr['enc_iv']) body = cipher.decrypt(data) # remove some padding padding = unpack("b", body[-1])[0] body = body[:-padding] now = time.time() print 'spent %.3fms on decryption' % ((now - then) * 1000) if sha256(body).digest() != hdr['checksum']: raise KdbReaderDecodeFailError() return body
def validate(bitcoin_address, magicbyte=0): """Check the integrity of a bitcoin address Returns False if the address is invalid. >>> validate('1AGNa15ZQXAZUgFiqJ2i7Z2DPU2J6hW62i') True >>> validate('') False """ if isinstance(magicbyte, int): magicbyte = (magicbyte,) clen = len(bitcoin_address) if clen < 27 or clen > 35: # XXX or 34? return False allowed_first = tuple(string.digits) try: bcbytes = decode_base58(bitcoin_address, 25) except ValueError: return False # Check magic byte (for other altcoins, fix by Frederico Reiven) for mb in magicbyte: if bcbytes.startswith(chr(int(mb))): break else: return False # Compare checksum checksum = sha256(sha256(bcbytes[:-4]).digest()).digest()[:4] if bcbytes[-4:] != checksum: return False # Encoded bytestring should be equal to the original address, # for example '14oLvT2' has a valid checksum, but is not a valid btc # address return bitcoin_address == encode_base58(bcbytes)
def DerivePlayReadyKey(seed, kid, swap=True): if len(seed) < 30: raise Exception('seed must be >= 30 bytes') if len(kid) != 16: raise Exception('kid must be 16 bytes') if swap: kid = kid[3]+kid[2]+kid[1]+kid[0]+kid[5]+kid[4]+kid[7]+kid[6]+kid[8:] seed = seed[:30] sha = hashlib.sha256() sha.update(seed) sha.update(kid) sha_A = [ord(x) for x in sha.digest()] sha = hashlib.sha256() sha.update(seed) sha.update(kid) sha.update(seed) sha_B = [ord(x) for x in sha.digest()] sha = hashlib.sha256() sha.update(seed) sha.update(kid) sha.update(seed) sha.update(kid) sha_C = [ord(x) for x in sha.digest()] content_key = "" for i in range(16): content_key += chr(sha_A[i] ^ sha_A[i+16] ^ sha_B[i] ^ sha_B[i+16] ^ sha_C[i] ^ sha_C[i+16]) return content_key
def test_pick_blocks(self): integer = random.randint(0, 65535) decimal_ = Decimal(random.random()) + 5 hashobj = hashlib.sha256(os.urandom(24)) hexdigest = hashlib.sha256(os.urandom(24)).hexdigest() seeds = self.hb.pick_blocks(4, integer) self.assertEqual(len(seeds), 4) seeds = self.hb.pick_blocks(4, decimal_) self.assertEqual(len(seeds), 4) for seed in seeds: self.assertIsInstance(seed, int) seeds = self.hb.pick_blocks(4, hashobj) self.assertEqual(len(seeds), 4) for seed in seeds: self.assertIsInstance(seed, int) seeds = self.hb.pick_blocks(4, hexdigest) self.assertEqual(len(seeds), 4) for seed in seeds: self.assertIsInstance(seed, int) with self.assertRaises(HeartbeatError) as ex: self.hb.pick_blocks(-1, integer) ex_msg = ex.exception.message self.assertEqual("-1 is not greater than 0", ex_msg)
def encrypt(self, plaintext): if self.state['ratchet_flag']: self.state['DHRs_priv'], self.state['DHRs'] = self.genKey() self.state['HKs'] = self.state['NHKs'] self.state['RK'] = sha256(self.state['RK'] + self.gen_dh( self.state['DHRs_priv'], self.state['DHRr'])).digest() if self.mode: self.state['NHKs'] = pbkdf2(self.state['RK'], b'\x03', 10, prf='hmac-sha256') self.state['CKs'] = pbkdf2(self.state['RK'], b'\x05', 10, prf='hmac-sha256') else: self.state['NHKs'] = pbkdf2(self.state['RK'], b'\x04', 10, prf='hmac-sha256') self.state['CKs'] = pbkdf2(self.state['RK'], b'\x06', 10, prf='hmac-sha256') self.state['PNs'] = self.state['Ns'] self.state['Ns'] = 0 self.state['ratchet_flag'] = False mk = sha256(self.state['CKs'] + '0').digest() msg1 = self.enc(self.state['HKs'], str(self.state['Ns']).zfill(3) + str(self.state['PNs']).zfill(3) + self.state['DHRs']) msg2 = self.enc(mk, plaintext) pad_length = 106 - len(msg1) pad = os.urandom(pad_length - 1) + chr(pad_length) msg = msg1 + pad + msg2 self.state['Ns'] += 1 self.state['CKs'] = sha256(self.state['CKs'] + '1').digest() return msg
def encode_xmlrpc_chunks_iterator(file_obj): """ Prepare data for a xml-rpc transfer. Iterate through (chunk_start, chunk_len, chunk_checksum, encoded_chunk) tuples. Final tuple is (total_length, -1, total_checksum, ""). @param file_obj: file object (or StringIO, etc.) @type file_obj: file @return: (chunk_start, chunk_len, chunk_checksum, encoded_chunk) @rtype: (str, str, str, str) """ CHUNK_SIZE = 1024 ** 2 checksum = hashlib.sha256() chunk_start = file_obj.tell() while True: chunk = file_obj.read(CHUNK_SIZE) if not chunk: break checksum.update(chunk) encoded_chunk = base64.encodestring(chunk) yield (str(chunk_start), str(len(chunk)), hashlib.sha256(chunk).hexdigest().lower(), encoded_chunk) chunk_start += len(chunk) yield (str(chunk_start), -1, checksum.hexdigest().lower(), "")
def __init__(self, password = "******"): ''' This constructor will create a password hash that can be used to verify user. ''' if password: passhash = hashlib.sha256(str(password)).hexdigest() else: passhash = hashlib.sha256("key").hexdigest() self.passhash = passhash datetime.MINYEAR = 2016 build_time = tuple(x for x in time.localtime()[0:5]) self.creationtime = build_time self.last_post = build_time self.current_time = build_time #Eventually use self.hashlist as a way to merge IdeaPads. self.hashlist = ['END'] # Treat as a reverse queue: front is oldest. Tail is youngest self.PAYABLE = False self.name = str(passhash) self.post_texts = PostDict.postdict()
def update(self, obj_name, num_files, num_files_updated): """Update doc file if needed (if content has changed).""" # close temp file self._file.close() # compute checksum on old file try: with open(self.filename, 'r') as _file: shaold = hashlib.sha256(_file.read()).hexdigest() except IOError: shaold = '' # compute checksum on new (temp) file try: with open(self.filename_tmp, 'r') as _file: shanew = hashlib.sha256(_file.read()).hexdigest() except IOError: shanew = '' # compare checksums if shaold != shanew: # update doc file if os.path.exists(self.filename): os.unlink(self.filename) os.rename(self.filename_tmp, self.filename) num_files_updated['total1'] += 1 num_files_updated['total2'] += 1 num_files_updated[obj_name] += 1 else: os.unlink(self.filename_tmp) # update counters num_files['total1'] += 1 num_files['total2'] += 1 num_files[obj_name] += 1
def verify_certificate(c, pc): # TODO: need a new way c_signature_algorithm = c['signature_algorithm']['algorithm'].dotted c_tbs_encoded = c['tbs_certificate'].dump() if c_signature_algorithm == '1.2.840.113549.1.1.4': # RSA tbs_hash_hex = hashlib.md5(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.113549.1.1.5': tbs_hash_hex = hashlib.sha1(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.113549.1.1.11': tbs_hash_hex = hashlib.sha256(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.113549.1.1.12': tbs_hash_hex = hashlib.sha384(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.113549.1.1.13': tbs_hash_hex = hashlib.sha512(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.10040.4.3': # DSA tbs_hash_hex = hashlib.sha1(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '2.16.840.1.101.3.4.3.2': tbs_hash_hex = hashlib.sha256(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.10045.4.1': # ecdsa tbs_hash_hex = hashlib.sha1(c_tbs_encoded).hexdigest() elif c_signature_algorithm == '1.2.840.10045.4.3.2': tbs_hash_hex = hashlib.sha256(c_tbs_encoded).hexdigest() else: tbs_hash_hex = '' pub_key = pc.public_key return sig_verify(c.signature, pub_key, tbs_hash_hex)
def _get_key(salt): """ Combines `settings.SECRET_KEY` with a salt. """ if not salt: salt = "" return sha256("%s%s" % (settings.SECRET_KEY, salt)).digest()
def password_encryption(password, salt=''): sha = hashlib.sha256() new_password = '******' + password + salt + '$%^' sha.update(new_password.encode('utf-8')) return sha.hexdigest()
def dict_to_tf_example(data, image_file, annotations_dir, label_map_dict, include_masks, ignore_difficult_instances): with tf.gfile.GFile(image_file, 'rb') as fid: encoded_jpg = fid.read() encoded_jpg_io = io.BytesIO(encoded_jpg) image = PIL.Image.open(encoded_jpg_io) if image.format != 'JPEG': raise ValueError('Image format not JPEG') key = hashlib.sha256(encoded_jpg).hexdigest() width = int(data['size']['width']) height = int(data['size']['height']) xmin = [] ymin = [] xmax = [] ymax = [] classes = [] classes_text = [] truncated = [] poses = [] difficult_obj = [] masks = [] if 'object' in data: for idx, obj in enumerate(data['object']): difficult = bool(int(obj['difficult'])) if ignore_difficult_instances and difficult: continue class_id = getClassId(obj['name'], label_map_dict) if class_id < 0: continue difficult_obj.append(int(difficult)) xmin.append(float(obj['bndbox']['xmin']) / width) ymin.append(float(obj['bndbox']['ymin']) / height) xmax.append(float(obj['bndbox']['xmax']) / width) ymax.append(float(obj['bndbox']['ymax']) / height) classes_text.append(obj['name'].encode('utf8')) classes.append(class_id) truncated.append(int(obj['truncated'])) poses.append(obj['pose'].encode('utf8')) if include_masks: mask_path = os.path.join(annotations_dir, os.path.splitext(data['filename'])[0] + '_object' + str(idx) + '.png') with tf.gfile.GFile(mask_path, 'rb') as fid: encoded_mask_png = fid.read() encoded_png_io = io.BytesIO(encoded_mask_png) mask = PIL.Image.open(encoded_png_io) if mask.format != 'PNG': raise ValueError('Mask format not PNG') mask_np = np.asarray(mask) mask_remapped = (mask_np == 255).astype(np.uint8) masks.append(mask_remapped) feature_dict = { 'image/height': dataset_util.int64_feature(height), 'image/width': dataset_util.int64_feature(width), 'image/filename': dataset_util.bytes_feature( data['filename'].encode('utf8')), 'image/source_id': dataset_util.bytes_feature( data['filename'].encode('utf8')), 'image/key/sha256': dataset_util.bytes_feature(key.encode('utf8')), 'image/encoded': dataset_util.bytes_feature(encoded_jpg), 'image/format': dataset_util.bytes_feature('jpeg'.encode('utf8')), 'image/object/bbox/xmin': dataset_util.float_list_feature(xmin), 'image/object/bbox/xmax': dataset_util.float_list_feature(xmax), 'image/object/bbox/ymin': dataset_util.float_list_feature(ymin), 'image/object/bbox/ymax': dataset_util.float_list_feature(ymax), 'image/object/class/text': dataset_util.bytes_list_feature(classes_text), 'image/object/class/label': dataset_util.int64_list_feature(classes), 'image/object/difficult': dataset_util.int64_list_feature(difficult_obj), 'image/object/truncated': dataset_util.int64_list_feature(truncated), 'image/object/view': dataset_util.bytes_list_feature(poses), } if include_masks: encoded_mask_png_list = [] for mask in masks: img = PIL.Image.fromarray(mask) output = io.BytesIO() img.save(output, format='PNG') encoded_mask_png_list.append(output.getvalue()) feature_dict['image/object/mask'] = (dataset_util.bytes_list_feature(encoded_mask_png_list)) example = tf.train.Example(features=tf.train.Features(feature=feature_dict)) return example
def _aws_headers(service, access_key_id, secret_access_key, region, host, method, path, query, pre_auth_headers, payload): algorithm = 'AWS4-HMAC-SHA256' now = datetime.datetime.utcnow() amzdate = now.strftime('%Y%m%dT%H%M%SZ') datestamp = now.strftime('%Y%m%d') credential_scope = f'{datestamp}/{region}/{service}/aws4_request' headers_lower = { header_key.lower().strip(): header_value.strip() for header_key, header_value in pre_auth_headers.items() } required_headers = ['host', 'x-amz-content-sha256', 'x-amz-date'] signed_header_keys = sorted( [header_key for header_key in headers_lower.keys()] + required_headers) signed_headers = ';'.join(signed_header_keys) payload_hash = hashlib.sha256(payload).hexdigest() def signature(): def canonical_request(): header_values = { **headers_lower, 'host': host, 'x-amz-content-sha256': payload_hash, 'x-amz-date': amzdate, } canonical_uri = urllib.parse.quote(path, safe='/~') query_keys = sorted(query.keys()) canonical_querystring = '&'.join([ urllib.parse.quote(key, safe='~') + '=' + urllib.parse.quote(query[key], safe='~') for key in query_keys ]) canonical_headers = ''.join([ header_key + ':' + header_values[header_key] + '\n' for header_key in signed_header_keys ]) return f'{method}\n{canonical_uri}\n{canonical_querystring}\n' + \ f'{canonical_headers}\n{signed_headers}\n{payload_hash}' def sign(key, msg): return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest() string_to_sign = \ f'{algorithm}\n{amzdate}\n{credential_scope}\n' + \ hashlib.sha256(canonical_request().encode('utf-8')).hexdigest() date_key = sign(('AWS4' + secret_access_key).encode('utf-8'), datestamp) region_key = sign(date_key, region) service_key = sign(region_key, service) request_key = sign(service_key, 'aws4_request') return sign(request_key, string_to_sign).hex() return { **pre_auth_headers, 'x-amz-date': amzdate, 'x-amz-content-sha256': payload_hash, 'Authorization': (f'{algorithm} Credential={access_key_id}/{credential_scope}, ' + f'SignedHeaders={signed_headers}, Signature=' + signature()), }
import hashlib import xlrd import numpy as np from passlib.hash import scrypt import scrypt as sc h1 = sc.hash('password',"r") print("h1",h1) print("int",int.from_bytes(h1, byteorder='little', signed=True)) m = hashlib.sha256() m.update("password".encode('utf-8')) print(int(m.hexdigest(), 16)) m = hashlib.sha3_256() m.update("password".encode('utf-8')) print(int(m.hexdigest(), 16))
def khash(data): '''Single place to change hashes of attachments''' return hashlib.sha256(data.encode('ascii')).hexdigest()
from hashlib import sha256 x = 5 y = 0 # TBD while sha256(f'{x*y}'.encode()).hexdigest()[-1] != '0': y += 1 print(f'The solution is y = {y}')
def hash(self): print(json.dumps(self.data)) return hashlib.sha256( (self.timestamp+ str(self.nonce) + json.dumps(self.data)+self.previous_hash).encode()).hexdigest()
def verify_aws_secret_access_key(key, secret): # pragma: no cover """ Using requests, because we don't want to require boto3 for this one optional verification step. Loosely based off: https://docs.aws.amazon.com/general/latest/gr/sigv4-signed-request-examples.html :type key: str :type secret: str """ now = datetime.utcnow() amazon_datetime = now.strftime('%Y%m%dT%H%M%SZ') headers = { # This is a required header for the signing process 'Host': 'sts.amazonaws.com', 'X-Amz-Date': amazon_datetime, } body = { 'Action': 'GetCallerIdentity', 'Version': '2011-06-15', } # Step #1: Canonical Request signed_headers = ';'.join( map( lambda x: x.lower(), headers.keys(), ), ) canonical_request = textwrap.dedent(""" POST / {headers} {signed_headers} {hashed_payload} """)[1:-1].format( headers='\n'.join([ '{}:{}'.format(header.lower(), value) for header, value in headers.items() ]), signed_headers=signed_headers, # Poor man's method, but works for this use case. hashed_payload=hashlib.sha256( '&'.join([ '{}={}'.format(header, value) for header, value in body.items() ]).encode('utf-8'), ).hexdigest(), ) # Step #2: String to Sign region = 'us-east-1' scope = '{request_date}/{region}/sts/aws4_request'.format( request_date=now.strftime('%Y%m%d'), # STS is a global service; this is just for latency control. region=region, ) string_to_sign = textwrap.dedent(""" AWS4-HMAC-SHA256 {request_datetime} {scope} {hashed_canonical_request} """)[1:-1].format( request_datetime=amazon_datetime, scope=scope, hashed_canonical_request=hashlib.sha256( canonical_request.encode('utf-8'), ).hexdigest(), ) # Step #3: Calculate signature signing_key = _sign( _sign( _sign( _sign( 'AWS4{}'.format(secret).encode('utf-8'), now.strftime('%Y%m%d'), ), region, ), 'sts', ), 'aws4_request', ) signature = _sign( signing_key, string_to_sign, hex=True, ) # Step #4: Add to request headers headers['Authorization'] = ( 'AWS4-HMAC-SHA256 ' 'Credential={access_key}/{scope}, ' 'SignedHeaders={signed_headers}, ' 'Signature={signature}' ).format( access_key=key, scope=scope, signed_headers=signed_headers, signature=signature, ) # Step #5: Finally send the request response = requests.post( 'https://sts.amazonaws.com', headers=headers, data=body, ) if response.status_code == 403: return False return True
def generate_genesis(): hash = hashlib.sha256(str('Today'+str(0)+ json.dumps('data') + 'xyz').encode()).hexdigest() return Block('Today' ,0, 'data' , hash , 'xyz')
def H(x): return sha256(x).digest()
def redirect_payfort(self, **post): """ create payfort payment link and redirect to Payfort Page. ------------------------------- :return: redirect to payfort payment getway page. """ #----------------new code--------------------- currency = 'AED' #----------------new code--------------------- env = request.env(user=SUPERUSER_ID) user_id = 1 if user_id: res_user_obj = env['res.users'] res_user_obj_rec = res_user_obj.sudo().search( [('id', '=', user_id)], limit=1) #----------------new code--------------------- currency = str(res_user_obj_rec.company_id.currency_id.name) #----------------new code--------------------- payfort_conf_obj = env['payfort.config'] payfort_conf_rec = payfort_conf_obj.sudo().search( [('active', '=', 'True')], limit=1) if payfort_conf_rec.id: order_id = str(post.get('ORDERID')) total_amount = float(post.get('AMOUNT')) or 0.00 payment_method = str(post.get('payment_method')) #------------------------get partner email------------------------------# reg_ids = env['registration'].sudo().search([ ('registration_number', '=', order_id) ]) invoice_ids = env['account.invoice'].sudo().search( [('invoice_number', '=', order_id)], limit=1) voucher_rec = env['account.voucher'].sudo().search( [('payfort_type', '=', True), ('voucher_number', '=', order_id)], limit=1) next_year_advance_fee_rec = env['next.year.advance.fee'].sudo( ).search([('order_id', '=', order_id)]) re_registration_parent_rec = env[ 're.reg.waiting.responce.parents'].sudo().search( [('re_registration_number', '=', order_id)], limit=1) tc_student_rec = env['trensfer.certificate'].sudo().search( [('transfer_certificate_number', '=', order_id)], limit=1) # limit=1) customer_email = False if len(reg_ids) > 0: customer_email = reg_ids.parent_email elif len(invoice_ids) > 0: customer_email = invoice_ids.partner_id.email elif len(voucher_rec) > 0: customer_email = voucher_rec.partner_id.parents_email elif len(next_year_advance_fee_rec) > 0: customer_email = next_year_advance_fee_rec.partner_id.email elif len(tc_student_rec) > 0: customer_email = tc_student_rec.name.email elif len(re_registration_parent_rec) > 0: customer_email = re_registration_parent_rec.name.parents_email #------------------------get partner email------------------------------# #------------------------calculate payfort charge-----------------------# amount = 0.00 payfort_charge_amount = 0 payfort_transaction_charge = 0 payfort_url = payfort_conf_rec.payfort_url payfort_bank_charge = 0 access_code = str(payfort_conf_rec.access_code) merchant_identifier = str(payfort_conf_rec.merchant_identifier) return_url = str(payfort_conf_rec.return_url) language = str(payfort_conf_rec.language) if payfort_conf_rec.id and payfort_conf_rec.charge > 0: payfort_charge_amount = (total_amount / 100) * payfort_conf_rec.charge if payfort_conf_rec.transaction_charg_amount > 0.00: payfort_transaction_charge = payfort_conf_rec.transaction_charg_amount # if payfort_conf_rec.id and payfort_conf_rec.bank_service_charge > 0: # payfort_bank_charge = (total_amount / 100 ) * payfort_conf_rec.bank_service_charge total_payfort_charge = payfort_charge_amount + payfort_transaction_charge + payfort_bank_charge #------------------------calculate payfort charge-----------------------# #------------------------add payfort charge in amount-------------------# total_payable_amount = total_amount + total_payfort_charge total_net_amount = round(total_payable_amount) amount = int(total_net_amount * 100) #------------------------add payfort charge in amount-------------------# #----------------Redirection to sbcheckout page-------------------------# command = "PURCHASE" cart_details = '{"cart_items":[{"item_name":"Xbox360","item_description":"Xbox","item_quantity":"1","item_price":"300","item_image":"http://image.com"}],"sub_total":"300"}' # if payment_method == 'MASTERPASS': digital_wallet = payment_method message = 'TESTSHAINaccess_code=%samount=%scart_details=%scommand=%scurrency=%scustomer_email=%sdigital_wallet=%slanguage=%smerchant_identifier=%smerchant_reference=%sreturn_url=%sTESTSHAIN' % ( access_code, amount, cart_details, command, currency, customer_email, digital_wallet, language, merchant_identifier, order_id, return_url) signature = hashlib.sha256(message) return """ <html> <body> <form action=%s method='post' id="payu" name="payu"> <input type="hidden" name="access_code" value="%s" /> <input type="hidden" name="amount" value="%s" /> <input type="hidden" name="cart_details" value='{"cart_items":[{"item_name":"Xbox360","item_description":"Xbox","item_quantity":"1","item_price":"300","item_image":"http://image.com"}],"sub_total":"300"}'/> <input type="hidden" name="command" value="%s" /> <input type="hidden" name="currency" value="%s" /> <input type="hidden" name="customer_email" value ="%s" /> <input type="hidden" name="digital_wallet" value ="%s" /> <input type="hidden" name="language" value="%s" /> <input type="hidden" name="merchant_identifier" value="%s" /> <input type="hidden" name="merchant_reference" value="%s" /> <input type="hidden" name="return_url" value="%s" /> <input type="hidden" name="signature" value="%s" /> </form> </body> <script type='text/javascript'> window.onload = function(){ document.forms['payu'].submit() } </script> </html> """ % ( payfort_url, access_code, amount, command, currency, customer_email, digital_wallet, language, merchant_identifier, order_id, return_url, signature.hexdigest()) if payment_method == 'VISA_CHECKOUT': digital_wallet = payment_method message = 'TESTSHAINaccess_code=%samount=%scommand=%scurrency=%scustomer_email=%sdigital_wallet=%slanguage=%smerchant_identifier=%smerchant_reference=%sreturn_url=%sTESTSHAIN' % ( access_code, amount, command, currency, customer_email, digital_wallet, language, merchant_identifier, order_id, return_url) signature = hashlib.sha256(message) # cart_details = json.dumps(cart_details) return """ <html> <body> <form action=%s method='post' id="payu" name="payu"> <input type="hidden" name="access_code" value="%s" /> <input type="hidden" name="amount" value="%s" /> <input type="hidden" name="command" value="%s" /> <input type="hidden" name="currency" value="%s" /> <input type="hidden" name="customer_email" value ="%s" /> <input type="hidden" name="digital_wallet" value ="%s" /> <input type="hidden" name="language" value="%s" /> <input type="hidden" name="merchant_identifier" value="%s" /> <input type="hidden" name="merchant_reference" value="%s" /> <input type="hidden" name="return_url" value="%s" /> <input type="hidden" name="signature" value="%s" /> </form> </body> <script type='text/javascript'> window.onload = function(){ document.forms['payu'].submit() } </script> </html> """ % ( payfort_url, access_code, amount, command, currency, customer_email, digital_wallet, language, merchant_identifier, order_id, return_url, signature.hexdigest()) if payment_method == 'CREDIT_CARD': message = 'TESTSHAINaccess_code=%samount=%scommand=%scurrency=%scustomer_email=%slanguage=%smerchant_identifier=%smerchant_reference=%sreturn_url=%sTESTSHAIN' % ( access_code, amount, command, currency, customer_email, language, merchant_identifier, order_id, return_url) signature = hashlib.sha256(message) return """ <html> <body> <form action=%s method='post' id="payu" name="payu"> <input type="hidden" name="access_code" value="%s" /> <input type="hidden" name="amount" value="%s" /> <input type="hidden" name="command" value="%s" /> <input type="hidden" name="currency" value="%s" /> <input type="hidden" name="customer_email" value ="%s" /> <input type="hidden" name="language" value="%s" /> <input type="hidden" name="merchant_identifier" value="%s" /> <input type="hidden" name="merchant_reference" value="%s" /> <input type="hidden" name="return_url" value="%s" /> <input type="hidden" name="signature" value="%s" /> </form> </body> <script type='text/javascript'> window.onload = function(){ document.forms['payu'].submit() } </script> </html> """ % ( payfort_url, access_code, amount, command, currency, customer_email, language, merchant_identifier, order_id, return_url, signature.hexdigest())
def list_hash(lst): return hashlib.sha256(json.dumps(list(lst), sort_keys=True).encode()).hexdigest()
def get_SHA256(string): """ Compute the SHA256 signature of a string. """ return hashlib.sha256(string.encode("utf-8")).hexdigest()
def get_file_sha256(binary: AnyPath): """Hash a binary with the SHA-256 algorithm.""" # This doesn't do any sort of buffering; but our binaries are pretty small # in comparison to what we're storing as metadata, anyway with open(binary, "rb") as fp: return hashlib.sha256(fp.read()).hexdigest()
hasher.update(block) return hasher.hexdigest() if ashexstr else hasher.digest() def file_as_blockiter(afile, blocksize=65536): with afile: block = afile.read(blocksize) while len(block) > 0: yield block block = afile.read(blocksize) resources_file = {} resources_array = [] for filename in glob.iglob(work_dir + '**/*', recursive=True): if os.path.isfile(filename): hexSha256Half = hash_bytestr_iter(file_as_blockiter(open(os.path.abspath(filename), 'rb')), hashlib.sha256(), True)[:16] item = {} item['id'] = os.path.relpath(filename, root_dir).replace(os.sep, '_').split('.')[0] item['path'] = os.path.relpath(filename, root_dir) item['checksum']=hexSha256Half resources_array.append(item) print(item) resources_file['data'] = resources_array hasher = hashlib.sha256() hasher.update(json.dumps(resources_array, sort_keys=True).encode('utf-8')) resources_file['checksum'] = hasher.hexdigest()[:16] with open(os.path.join(result_dir, "description.json"), 'wt') as destFile: destFile.write(json.dumps(resources_file, sort_keys=True))
def get_hash(self): new_hash = str(self.index)+str(self.previous_hash)+str(self.timestamp)+str(self.transactions) return str(hashlib.sha256(new_hash.encode('utf-8')).hexdigest())
def bootstrap_dependency(settings, url, hash_, priority, version, on_complete): """ Downloads a dependency from a hard-coded URL - only used for bootstrapping _ssl on Linux and ST2/Windows :param settings: Package Control settings :param url: The non-secure URL to download from :param hash_: The sha256 hash of the package file :param version: The version number of the package :param priority: A three-digit number that controls what order packages are injected in :param on_complete: A callback to be run in the main Sublime thread, so it can use the API """ package_filename = path.basename(urlparse(url).path) package_basename, _ = path.splitext(package_filename) package_dir = path.join(sys_path.packages_path, package_basename) version = SemVer(version) # The package has already been installed. Don't reinstall unless we have # a newer version. if path.exists(package_dir) and loader.exists(package_basename): try: dep_metadata_path = path.join(package_dir, 'dependency-metadata.json') with open_compat(dep_metadata_path, 'r') as f: metadata = json.loads(read_compat(f)) old_version = SemVer(metadata['version']) if version <= old_version: sublime.set_timeout(mark_bootstrapped, 10) return console_write( u''' Upgrading bootstrapped dependency %s to %s from %s ''', (package_basename, version, old_version) ) except (KeyError, FileNotFoundError): # If we can't determine the old version, install the new one pass with downloader(url, settings) as manager: try: console_write( u''' Downloading bootstrapped dependency %s ''', package_basename ) data = manager.fetch(url, 'Error downloading bootstrapped dependency %s.' % package_basename) console_write( u''' Successfully downloaded bootstraped dependency %s ''', package_basename ) data_io = BytesIO(data) except (DownloaderException) as e: console_write(e) return data_hash = hashlib.sha256(data).hexdigest() if data_hash != hash_: console_write( u''' Error validating bootstrapped dependency %s (got %s instead of %s) ''', (package_basename, data_hash, hash_) ) return try: data_zip = zipfile.ZipFile(data_io, 'r') except (zipfile.BadZipfile): console_write( u''' Error unzipping bootstrapped dependency %s ''', package_filename ) return if not path.exists(package_dir): os.makedirs(package_dir, 0o755) else: clear_directory(package_dir) code = None for zip_path in data_zip.namelist(): dest = zip_path if not isinstance(dest, str_cls): dest = dest.decode('utf-8', 'strict') dest = dest.replace('\\', '/') # loader.py is included for backwards compatibility. New code should use # loader.code with Python inside of it. We no longer use loader.py since # we can't have any files ending in .py in the root of a package, # otherwise Sublime Text loads it as a plugin and then the dependency # path added to sys.path and the package path loaded by Sublime Text # conflict and there will be errors when Sublime Text tries to # initialize plugins. By using loader.code, developers can git clone a # dependency into their Packages folder without issue. if dest in set([u'loader.py', u'loader.code']): code = data_zip.read(zip_path).decode('utf-8') if dest == u'loader.py': continue dest = path.join(package_dir, dest) if dest[-1] == '/': if not path.exists(dest): os.makedirs(dest, 0o755) else: dest_dir = path.dirname(dest) if not path.exists(dest_dir): os.makedirs(dest_dir, 0o755) with open(dest, 'wb') as f: f.write(data_zip.read(zip_path)) data_zip.close() loader.add_or_update(priority, package_basename, code) console_write( u''' Successfully installed bootstrapped dependency %s ''', package_basename ) sublime.set_timeout(mark_bootstrapped, 10) if on_complete: sublime.set_timeout(on_complete, 100)
def generate_session_id(self): """ :param session: the new session instance for which an ID will be generated and then assigned """ return sha256(sha512(urandom(20)).digest()).hexdigest()
def get_bytes_sha256(contents: bytes): """Hash a byte string with the SHA-256 algorithm.""" return hashlib.sha256(contents).hexdigest()
# Step 4: Create the canonical headers and signed headers. Header names # must be trimmed and lowercase, and sorted in code point order from # low to high. Note that there is a trailing \n. canonical_headers = 'host:' + host + '\n' + 'x-amz-date:' + amzdate + '\n' # Step 5: Create the list of signed headers. This lists the headers # in the canonical_headers list, delimited with ";" and in alpha order. # Note: The request can include any headers; canonical_headers and # signed_headers lists those that you want to be included in the # hash of the request. "Host" and "x-amz-date" are always required. signed_headers = 'host;x-amz-date' # Step 6: Create payload hash (hash of the request body content). For GET # requests, the payload is an empty string (""). payload_hash = hashlib.sha256(('').encode('utf-8')).hexdigest() # Step 7: Combine elements to create canonical request canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash # ************* TASK 2: CREATE THE STRING TO SIGN************* # Match the algorithm to the hashing algorithm you use, either SHA-1 or # SHA-256 (recommended) algorithm = 'AWS4-HMAC-SHA256' credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request' string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib.sha256( canonical_request.encode('utf-8')).hexdigest() # ************* TASK 3: CALCULATE THE SIGNATURE ************* # Create the signing key using the function defined above. signing_key = getSignatureKey(secret_key, datestamp, region, service)
def sha256(msg): return int(hashlib.sha256(msg.encode('utf-8')).hexdigest(), 16)
def hash_password(password): return hashlib.sha256(password.encode()).hexdigest()
# add inside this flag at random bytes for each chal for i in chals: regex_done = False # we calculate the location of the identifying bytes salt_chal = small_hash(i + SECRET) salts = [] for z in range(0, 4): salts = add_salt(salts, salt_chal[z] % 32) for y in team_names: uni_hash = small_hash(i + y + SECRET) # make it a bit harder to guess the format of the sha2 chal_name = i + SECRET hash_final = bytearray( bytes.fromhex(hashlib.sha256(chal_name.encode()).hexdigest())) # we replace by the identifying bytes hash_final[salts[0]] = uni_hash[0] hash_final[salts[1]] = uni_hash[1] hash_final[salts[2]] = uni_hash[2] hash_final[salts[3]] = uni_hash[3] hash_final_str = hash_final.hex() uni_bytes.append([ salts[0], uni_hash[0], salts[1], uni_hash[1], salts[2], uni_hash[2], salts[3], uni_hash[3], y, i ]) flag = "GY{" + hash_final_str + "}" flags.append([y, flag]) for i in submissions: found_flag = False
def hash256(byte_str): sha256 = hashlib.sha256() sha256.update(byte_str) sha256d = hashlib.sha256() sha256d.update(sha256.digest()) return sha256d.digest()[::-1]
def path_key(path): return sha256(as_bytes(path)).hexdigest()
from __future__ import print_function import base64 import os import hashlib import struct password = '******' salt = os.urandom(4) tmp0 = salt + password.encode('utf-8') tmp1 = hashlib.sha256(tmp0).digest() salted_hash = salt + tmp1 pass_hash = base64.b64encode(salted_hash) print(pass_hash)
def deriveChecksum(self, s): """ Derive the checksum """ checksum = hashlib.sha256(compat_bytes(s, "ascii")).hexdigest() return checksum[:4]
def commit(winning_door, witness): m = sha256() m.update(u.zpad(u.encode_int(winning_door), 1)) m.update(witness) return m.digest()
def hash(self) -> str: return hashlib.sha256(self.serialize()).hexdigest()