def init_master(self): """ Generates the master key to encrypt all documents later Will ask for password to initialize and will put into the collection config :returns: bool """ # Configuration collection coll = self.collection # generate random 256 byte to use as plain masterkey masterkey = blake2b(digest_size=32) masterkey.update(nacl.utils.random(256)) masterkey = masterkey.digest() # ask for password to de/encrypt the masterkey key = blake2b(digest_size=32) utils.log_info("Encryption has not been initialized yet. Please enter your password.") pw = click.prompt("%s Set Password" % utils.query_prefix, type=str, hide_input=True, confirmation_prompt=True) key.update(pw.encode("utf-8")) key = key.digest() utils.log_info("Password set successfully. Dont forget it, otherwise you are f****d.") # encrypt masterkey with user password box = nacl.secret.SecretBox(key) nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) enc = box.encrypt(masterkey, nonce, encoder=nacl.encoding.HexEncoder) r = coll.insert({"masterkey": enc}) return True
def __init__(self, public_key: bytes = None, public_signing_key: bytes = None, private_key: bytes = None, private_signing_seed: bytes = None, private_nonce: bytes = None, private_signing_nonce: bytes = None): """ Create a new Gluino Key. This should not be directly called; use `Key.from_data` or `Key.generate` to generate a new key. :param public_key: The public key data to use. Can be extracted from the private key. :param public_signing_key: The public signing key to use. Can be extracted from the private key. :param private_key: The private key to use. :param private_signing_seed: The seed to use to create """ # Define a None fingerprint up here. self.fingerprint = None # Check if the public key is None. # It can be None, as when loading a private key the public key isn't immediately available. # The public key is always loaded either: # 1) whenever an operation is used with the private key # 2) whenever `Key.pub_from_priv(private_key, private_signing_key, passphrase)` is called. if public_key is not None: self._public_key = public.PublicKey(public_key) # Calculate the fingerprint. h = blake2b(digest_size=20) h.update(public_key) self.fingerprint = h.hexdigest().upper() else: self._public_key = None self._public_key_raw = public_key if public_signing_key is not None: self._public_signing_key = signing.VerifyKey(public_signing_key) else: self._public_signing_key = None self._public_signing_key_raw = public_signing_key # We don't actually store an instance of the private key. # Instead, it is decrypted when we require it via passing in the passphrase. # This ensures the decrypted private key isn't available to pluck out of the air whenever you want it. self._private_key_raw = private_key if self._private_key_raw is not None: h = blake2b(digest_size=20) h.update(private_key) self.fingerprint = h.hexdigest().upper() self._private_signing_seed = private_signing_seed # Set the current nonces. self._private_nonce = private_nonce self._private_signing_nonce = private_signing_nonce # Define the attributed for the key. self.userid = None self.self_signature = None self.signatures = {} self.metadata = {}
def generate(): messageLen = int(random.uniform(0, MAX_MESSAGE_LENGTH + 1)) message = os.urandom(messageLen) keyLen = int(random.uniform(0, MAX_KEY_LENGTH + 1)) key = os.urandom(keyLen) outLen = int(random.uniform(1, MAX_OUTPUT_LENGTH + 1)) if key: out = blake2b(data = message, key = key, digest_size = outLen) else: out = blake2b(data = message, digest_size = outLen) print("%s\t%s\t%u\t%s" % (binascii.hexlify(message), binascii.hexlify(key), outLen, out.hexdigest()))
def preview(src, kood): print("superluks") try: lexer = pygments.lexers.get_lexer_by_name(kood).aliases[0] except: lexer = pygments.lexers.guess_lexer(src).aliases[0] patch_ = "".join(diff("", src.splitlines(True))) file_id = blake2b(data=src.encode("utf-8")).digest() patch_id = blake2b(data=patch_.encode("utf-8")).digest() ctime = datetime.datetime.now(datetime.timezone.utc) return flask.redirect(format_url(file_id))
def serialize(self): """ Produce serialized output. This will produce base64-encoded representation of the msgpack packed key data within. A Blake2b hash is added at the end of the key block to represent a checksum for the data. """ dumped = self.dump() packed = msgpack.packb(dumped, use_bin_type=True, default=serialize_gluino_type) # Produce a checksum. hs = blake2b(digest_size=8) hs.update(packed) digest = hs.digest() # Encode the packed data using base64. bs64 = base64.b64encode(packed) bs64 = bs64.decode() checksum_bs64 = base64.b64encode(digest) checksum_bs64 = checksum_bs64.decode() # Split into 80-char long strings. sp = '\n'.join([bs64[i:i+64] for i in range(0, len(bs64), 64)]) # Print out the key. kex = """---BEGIN GLUINO KEY OBJECT--- Version: Gluino v1 {} {} ---END GLUINO KEY OBJECT---""".format(sp, checksum_bs64) return kex
def make_file_id(self, file_name): extension = os.path.splitext(file_name)[1] with open(file_name, 'r') as f: contents = f.read() blake2hash = blake2b(contents, digest_size=20).hexdigest() return blake2hash + extension
def __init__(self, filename, comment, metadata, python_version, filetype): self.filename = filename self.basefilename = os.path.basename(filename) self.comment = comment self.metadata = metadata self.python_version = python_version self.filetype = filetype self.safe_name = pkg_resources.safe_name(metadata.name) self.signed_filename = self.filename + '.asc' self.signed_basefilename = self.basefilename + '.asc' self.gpg_signature = None blake2_256_hash = None if pyblake2 is not None: blake2_256_hash = pyblake2.blake2b(digest_size=256 // 8) # NOTE(sigmavirus24): We may or may not be able to use blake2 so let's # either use the methods or lambdas to do nothing. blake_update = getattr(blake2_256_hash, 'update', lambda *args: None) blake_hexdigest = getattr(blake2_256_hash, 'hexdigest', lambda: None) md5_hash = hashlib.md5() sha2_hash = hashlib.sha256() with open(filename, "rb") as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b''): md5_hash.update(content) sha2_hash.update(content) blake_update(content) self.md5_digest = md5_hash.hexdigest() self.sha2_digest = sha2_hash.hexdigest() self.blake2_256_digest = blake_hexdigest()
def decrypt_content(self, content): """ Will decrypt the content that is given as argument using the masterkey from crypto class. :content: str (encrypted and encoded in hexdigest) :returns: str (plaintext) """ # init new blake2b keystore and hash # masterkey try: key = blake2b(digest_size=32) key.update(self._masterkey) key = key.digest() except TypeError: sys.exit(1) box = nacl.secret.SecretBox(key) content = content.encode("utf-8") try: plain = box.decrypt(ciphertext=content, encoder=nacl.encoding.HexEncoder) return plain except nacl.exceptions.CryptoError: utils.log_error("Invalid Password") return False
def get_master(self, password): """ Will be triggered everytime the user tries to access an encrypted document from the database. Asks for his password, decrypts master key and returns the masterkey what will be an attribute from the crypto class :returns: str """ # Fetch encrypted masterkey from db coll = self.collection masterkey = coll.find_one({"masterkey": {"$exists": True}}) masterkey = masterkey["masterkey"] # hash input pw key = blake2b(digest_size=32) if password is False: password = click.prompt("%s Password" % utils.query_prefix, type=str, hide_input=True) key.update(password.encode("utf-8")) key = key.digest() # init box box = nacl.secret.SecretBox(key) # use password to decrypt masterkey try: masterkey = box.decrypt(ciphertext=masterkey, encoder=nacl.encoding.HexEncoder) return masterkey except nacl.exceptions.CryptoError: utils.log_error("Invalid Password") sys.exit(1) return False
def encrypt_content(self, content): """ Will encrypt the content that is given with the masterkey from crypto class :content: str (plaintext) :returns: str (encrypted and encoded in hexdigest) """ # init new blake2b keystore # and get password as hex try: key = blake2b(digest_size=32) key.update(self._masterkey) key = key.digest() except TypeError: sys.exit(1) box = nacl.secret.SecretBox(key) # generate a nonce nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) try: msg = box.encrypt(content, nonce, encoder=nacl.encoding.HexEncoder) return msg except nacl.exceptions.CryptoError: utils.log_error("Invalid Password") return False
def creatCoinScript(self,senz , coin): # reducse size to coin name sting coin_name = blake2b(digest_size=10) coin_name.update(coin) # coin_name.hexdigest() #print "coin name :", coin_name.hexdigest() + ".scpp" if not os.path.exists('.coins'): # first we have to create .coins/ directory if not exists try: os.makedirs('.coins') except OSError: logger.info('coins exists') f = open('.coins/'+coin_name.hexdigest() + ".scpp", "w+") f.write("ID :%s\r\n" % coin) curdate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") #print "Request TIME :", curdate; f.write("TIME :%s\n" % curdate + "") f.write("IN :%s\n" % "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDkDbcW0W/7rrvokEf1LVELjIt5KsUwU/3Gq2x+qwpD9RJ1aQhCYsA3ds5ED0pLZzz5vpDwAFMn0zIBEbEKIhdCEocyQhPIQf26G05uXhV6NLWtZqp0wZClx6awYZG9ux9oahF39j+/OJXW4hA2NFEndyH0HF8Cvzadj7x6eZF8rwIDAQAB") f.write("OUT :%s\n" % "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDkMB9aRpJ+nxRvIoludQZYqlzvKLjVgH0PLjBB0TuyWXOb01mSq09Fqq/Em1RGzoUKvysxKwMuQy27fS5Bc+68pDhkZEuH3T2Okk8NH6XJgvO0ftnfE4IVHlLCHrtzUXjWBzlIHoZAmGfZ9OnFMWYrJrxEeF/apBWxZtDCFmuMgwIDAQAB") f.write("AMOUNT :%d\r\n" % 1) f.write("Signature :%s\r\n" % senz.signature) f.close()
def pick(self): owner = None if self.owners and randrange(0, 1000) == 0: owner = sample(self.owners, 1)[0] else: owner = blake2b(str(getrandbits(128)), 16).hexdigest() self.owners.add(owner) return owner
def get_hash(self) -> bytes: """ Get the hash of the UserID. """ bl = blake2b() to_hash = (self.name + self.email).encode() bl.update(to_hash) return bl.digest()
def generate_hashes(n, k, header): digest = blake2b(digest_size=n/8, person=zcash_person(n, k)) digest.update(header[:140]) numIndices = ord(header[140]) if ord(header[140]) < 256 else struct.unpack('<H', header[141:143]) assert numIndices == 2**k, 'Block header does not match Equihash parameters' i = 143 if ord(header[140]) == 256 else 141 soln = [struct.unpack('<I', header[i:i+4])[0] for i in range(i, i+numIndices*4, 4)] hashes = [hash_xi(digest.copy(), xi).digest() for xi in soln] return soln, hashes
def login(request, redirect_field_name=REDIRECT_FIELD_NAME, _form_class=forms.LoginForm): # TODO: Logging in should reset request.user # TODO: Configure the login view as the default view for not having # permission to view something. user_service = request.find_service(IUserService, context=None) redirect_to = request.POST.get(redirect_field_name, request.GET.get(redirect_field_name)) form = _form_class(request.POST, user_service=user_service) if request.method == "POST" and form.validate(): # Get the user id for the given username. username = form.username.data userid = user_service.find_userid(username) # If the user-originating redirection url is not safe, then redirect to # the index instead. if (not redirect_to or not is_safe_url(url=redirect_to, host=request.host)): redirect_to = "/" # Actually perform the login routine for our user. headers = _login_user(request, userid) # Now that we're logged in we'll want to redirect the user to either # where they were trying to go originally, or to the default view. resp = HTTPSeeOther(redirect_to, headers=dict(headers)) # We'll use this cookie so that client side javascript can Determine # the actual user ID (not username, user ID). This is *not* a security # sensitive context and it *MUST* not be used where security matters. # # We'll also hash this value just to avoid leaking the actual User IDs # here, even though it really shouldn't matter. resp.set_cookie( USER_ID_INSECURE_COOKIE, blake2b( str(userid).encode("ascii"), person=b"warehouse.userid", ).hexdigest().lower(), ) return resp return { "form": form, "redirect": { "field": REDIRECT_FIELD_NAME, "data": redirect_to, }, }
def submit(): value = flask.request.form["button"] print(value) src = flask.request.form["copycat"] kood = flask.request.form["lexer"] if value=="preview": preview(src, kood) return try: lexer = pygments.lexers.get_lexer_by_name(kood).aliases[0] except: lexer = pygments.lexers.guess_lexer(src).aliases[0] patch_ = "".join(diff("", src.splitlines(True))) file_id = blake2b(data=src.encode("utf-8")).digest() patch_id = blake2b(data=patch_.encode("utf-8")).digest() ctime = datetime.datetime.now(datetime.timezone.utc) db = get_db() try: db.execute("INSERT INTO blobs (id, patch, ctime) VALUES (?, ?, ?)", (sqlite3.Binary(file_id), sqlite3.Binary(patch_id), ctime)) except: blob = db.execute("SELECT id FROM blobs WHERE id = ?", (sqlite3.Binary(file_id),)).fetchone() if blob == None or blob[0] != file_id: raise with lzma.open(os.path.join(app.config["BASE_PATH"], binascii.hexlify(patch_id).decode("utf-8")), "w") as f: f.write(patch_.encode("utf-8")) user = verify() if user is None: owner_id = None else: owner_id = db.execute("SELECT id FROM users WHERE canonical_name = ?", (user,)).fetchone()[0] db.execute("INSERT INTO files (owner, name, type, revision) VALUES(?, ?, ?, ?)", (owner_id, flask.request.form["name"], lexer, sqlite3.Binary(file_id))) db.commit() return flask.redirect(format_url(file_id))
def __init__(self, known_hash, data=b''): self.known_hash = known_hash if known_hash == Known_Hashes.blake2b: from pyblake2 import blake2b self.implem = blake2b() elif known_hash == Known_Hashes.blake2s: from pyblake2 import blake2s self.implem = blake2s() elif known_hash == Known_Hashes.sha3: self.implem = hashlib.new('sha3_256') else: self.implem = hashlib.new(known_hash.name, data)
def _calculate_hash(self): '''16-byte blake2b hash over the content of this file''' # this takes just under 2s on my laptop for a 1GB file. b2 = blake2b(digest_size=16) current = self.tell() self.seek(current) while True: data = self.read(self._HASH_BUF_SIZE) if not data: break b2.update(data) self.seek(current) return b2.hexdigest()
def __init__(self, nonce=None, clientkey=None, serverkey=None): if not clientkey: if nonce: self._nonce = nonce else: self._nonce = nacl.utils.random(Box.NONCE_SIZE) else: b2 = blake2b(digest_size=24) if nonce: b2.update(bytes(nonce)) b2.update(bytes(clientkey)) b2.update(bytes(serverkey)) self._nonce = b2.digest()
def mine(n, k, d): print 'Miner starting' validate_params(n, k) print '- n: %d' % n print '- k: %d' % k print '- d: %d' % d # Genesis digest = hashes.Hash(hashes.SHA256(), backend=default_backend()) prev_hash = digest.finalize() while True: start = datetime.today() # H(I||... digest = blake2b(digest_size=n/8, person=zcash_person(n, k)) digest.update(prev_hash) nonce = 0 x = None while (nonce >> 161 == 0): if DEBUG: print print 'Nonce: %d' % nonce # H(I||V||... curr_digest = digest.copy() hash_nonce(curr_digest, nonce) # (x_1, x_2, ...) = A(I, V, n, k) if DEBUG: gbp_start = datetime.today() solns = gbp_basic(curr_digest, n, k) if DEBUG: print 'GBP took %s' % str(datetime.today() - gbp_start) print 'Number of solutions: %d' % len(solns) for soln in solns: if difficulty_filter(prev_hash, nonce, soln, d): x = soln break if x: break nonce += 1 duration = datetime.today() - start if not x: raise RuntimeError('Could not find any valid nonce. Wow.') curr_hash = block_hash(prev_hash, nonce, soln) print '-----------------' print 'Mined block!' print 'Previous hash: %s' % print_hash(prev_hash) print 'Current hash: %s' % print_hash(curr_hash) print 'Nonce: %s' % nonce print 'Time to find: %s' % str(duration) print '-----------------' prev_hash = curr_hash
def __call__(self, parentcap=None): # If no parent cap is specified, use the genesis capability. if parentcap is None: parentcap = self.genesiscap # Claim and update a unique sequence number to use in capability # generation. curseq = self.sequence self.sequence += 1 # Use Blake2B to create a new capability from the sequence number # and the parent capability. return "C" + blake2b( "C" + hex(curseq)[2:].zfill(16), digest_size=32, key=parentcap[1:65]).hexdigest()
def encode(self, hash_name, payload, expected_code): multihasher = multihash.new(hash_name) if hash_name == "blake2s": reference_hasher = pyblake2.blake2s() elif hash_name == "blake2b": reference_hasher = pyblake2.blake2b() elif hash_name == "sha3": reference_hasher = hashlib.new("sha3_256") else: reference_hasher = hashlib.new(hash_name) multihasher.update(payload.encode("utf8")) reference_hasher.update(payload.encode("utf8")) self.assertTrue(multihasher.hexdigest().endswith(reference_hasher.hexdigest())) self.assertEqual(multihasher.digest()[0], expected_code) self.assertEqual(multihasher.digest()[1], len(reference_hasher.digest()))
def is_valid(self, n=48, k=5): # H(I||... digest = blake2b(digest_size=(512/n)*n/8, person=zcash_person(n, k)) digest.update(super(CBlock, self).serialize()[:108]) hash_nonce(digest, self.nNonce) if not gbp_validate(self.nSolution, digest, n, k): return False self.calc_sha256() target = uint256_from_compact(self.nBits) if self.sha256 > target: return False for tx in self.vtx: if not tx.is_valid(): return False if self.calc_merkle_root() != self.hashMerkleRoot: return False return True
def __init__(self, resource, transition, ts, **kwargs): event = {} event['id'] = blake2b(str(getrandbits(128)), 16).hexdigest() event['time'] = ts event['type'] = resource.type event['resource'] = resource.resource event['state'] = transition event['source'] = 'OVH' event['depth'] = 0 if resource.type == "ip": event['ip_packed'] = netaddr.IPAddress(event['resource']).value self.event = event self.id = event['id'] self.ts = event['time'] for k, v in kwargs.iteritems(): event[k] = v print(ujson.dumps(event))
def solve(self, n=48, k=5): target = uint256_from_compact(self.nBits) # H(I||... digest = blake2b(digest_size=(512/n)*n/8, person=zcash_person(n, k)) digest.update(super(CBlock, self).serialize()[:108]) self.nNonce = 0 while True: # H(I||V||... curr_digest = digest.copy() hash_nonce(curr_digest, self.nNonce) # (x_1, x_2, ...) = A(I, V, n, k) solns = gbp_basic(curr_digest, n, k) for soln in solns: assert(gbp_validate(curr_digest, soln, n, k)) self.nSolution = soln self.rehash() if self.sha256 <= target: return self.nNonce += 1
def pick(self): resource = None if (not self.resources) or randrange(0, 17) == 0: if self.type == "ip": ip = "%u.%u.%u.%u" % (randrange(0, 255), randrange(0, 255), randrange(0, 255), randrange(0, 255)) resource = Resource(self.type, ip) elif self.type == "nsrec": nsrec = "{}.com".format(blake2b(str(getrandbits(128)), 4). hexdigest()) resource = Resource(self.type, nsrec) elif self.type == "email": nsrec = "x@{}.com".format(blake2b(str(getrandbits(128)), 4). hexdigest()) resource = Resource(self.type, nsrec) elif self.type == "domain": nsrec = "{}.com".format(blake2b(str(getrandbits(128)), 4). hexdigest()) resource = Resource(self.type, nsrec) elif self.type == "vhost": vhost = "vh-{}.com".format(blake2b(str(getrandbits(128)), 1). hexdigest()) resource = Resource(self.type, vhost) elif self.type == "uri": uri = "http://vh-{}.com/{}.html". \ format(blake2b(str(getrandbits(128)), 1).hexdigest(), blake2b(str(getrandbits(128)), 16).hexdigest()) resource = Resource(self.type, uri) else: abort() for r in self.resources: if r.type == resource.type and r.resource == resource.resource: return r self.resources.append(resource) else: resource = sample(self.resources, 1)[0] return resource
def hashChain(s): a = pyblake2.blake2b(s, digest_size=32).digest() b = keccak256.digest(a) return b
def hashChain(noncedSecret): b = pyblake2.blake2b(noncedSecret, digest_size=32).digest() return sha3.keccak_256(b).digest()
def sign(self, handle, test_mode=False): # This code acquires a mutex lock using https://github.com/chiradeep/dyndb-mutex # generate a unique name for this process/thread ddb_region = environ['REGION'] payload_chainid = self.get_chain_id() my_name = str(uuid.uuid4()).split("-")[0] if self.is_block(): sig_type = 'Baking_' + payload_chainid elif self.is_endorsement(): sig_type = 'Endorsement_' + payload_chainid else: sig_type = 'Transaction_' + payload_chainid m = DynamoDbMutex(sig_type, holder=my_name, timeoutms=60 * 1000, region_name=ddb_region) locked = m.lock() # attempt to acquire the lock if locked: encoded_sig = '' data_to_sign = self.payload logging.info('About to sign {} with key handle {}'.format( data_to_sign, handle)) if self.valid_block_format(data_to_sign): logging.info('Block format is valid') if self.is_block() or self.is_endorsement( ) or self.is_transaction(): logging.info('Preamble is valid') if self.not_already_signed(): if test_mode: return self.TEST_SIGNATURE else: logging.info( 'About to sign with HSM client. Slot = {}, lib = {}, handle = {}' .format(self.hsm_slot, self.hsm_libfile, handle)) with HsmClient(slot=self.hsm_slot, pin=self.hsm_pin, pkcs11_lib=self.hsm_libfile) as c: hashed_data = blake2b( hex_to_bytes(data_to_sign), digest_size=32).digest() logging.info('Hashed data to sign: {}'.format( hashed_data)) sig = c.sign(handle=handle, data=hashed_data, mechanism=HsmMech.ECDSA) logging.info('Raw signature: {}'.format(sig)) encoded_sig = RemoteSigner.b58encode_signature( sig, self.prefix) logging.info( 'Base58-encoded signature: {}'.format( encoded_sig)) c.logout() logging.info('Logged out from the HSM.') c.close_session() logging.info('Closed the HSM session.') else: logging.error('Invalid level') m.release() # release the lock raise Exception('Invalid level') else: logging.error('Invalid preamble') m.release() # release the lock raise Exception('Invalid preamble') else: logging.error('Invalid payload') m.release() # release the lock raise Exception('Invalid payload') m.release() # release the lock return encoded_sig else: # lock could not be acquired logging.error('Could not acquire lock') raise Exception('Could not acquire lock')
def H(m): #return hashlib.sha512(m).digest() return blake2b(m).digest()
from pyblake2 import blake2b k = blake2b(digest_size=ENC_KEY_SIZE + INDEX_SIZE, salt=SALT, key=MASTER_KEY, person=INSTANCE_KEY).digest() enc_key, index = k[:ENC_KEY_SIZE], k[-INDEX_SIZE:]
def hashChain(s): b = pyblake2.blake2b(s, digest_size=32).digest() return sha3.keccak_256(b).digest()
def handle_event(self, event): for key in event: if key in {"type", "from"}: continue elif key == "messageid": event[key] = int(event[key], 16) elif type(event[key]) is bool: continue elif type(event[key]) in {str, unicode}: event[key] = event[key].decode("hex") if event["type"] == "socket": self.tee = Tee(os.path.join(self.BASE_DIR, "session-{}.log".format(event["threadid"]))) self.log("session started") elif event["type"] == "keypair": self.sk = PrivateKey(event["sk"]) self.dump({"sk": self.sk}, function="PrivateKey") elif event["type"] == "send" or event["type"] == "recv": if event["messageid"] == 10100: event.update({"message": event["buffer"]}) self.dump(event) elif event["messageid"] == 20100: event.update({"message": event["buffer"]}) self.dump(event) else: if self.serverkey: if self.sk: if event["messageid"] == 10101: self.pk = PublicKey(event["buffer"][:32]) self.dump({"pk": bytes(self.pk)}, function="PublicKey") event["buffer"] = event["buffer"][32:] if self.pk: if event["messageid"] == 10101 or self.snonce: if event["messageid"] in {10101, 20104} or self.rnonce: if event["messageid"] in {10101, 20104} or self.k: if event["messageid"] in {10101, 20104}: k = Box(self.sk, self.serverkey) self.dump({"s": k}, function="Box") b2 = blake2b(digest_size=24) if event["messageid"] == 20104: b2.update(bytes(self.snonce)) b2.update(bytes(self.pk)) b2.update(bytes(self.serverkey)) nonce = b2.digest() if event["messageid"] == 10101: self.dump( {"pk": self.pk, "serverkey": self.serverkey, "nonce": nonce}, function="blake2b", ) elif event["messageid"] == 20104: self.dump( { "snonce": self.snonce, "pk": self.pk, "serverkey": self.serverkey, "nonce": nonce, }, function="blake2b", ) else: k = self.k if event["type"] == "send": self.snonce = self.increment_nonce(self.snonce) nonce = self.snonce elif event["type"] == "recv": self.rnonce = self.increment_nonce(self.rnonce) nonce = self.rnonce ciphertext = event["buffer"] event.update({"k": k, "nonce": nonce, "ciphertext": event["buffer"]}) try: message = k.decrypt(ciphertext, nonce) except: self.dump(event, error=True) self.log( "Warning: failed to decrypt {}".format(event["messageid"]), error=True ) if event["messageid"] in {10101, 20104}: raise else: if event["messageid"] == 10101: self.snonce = message[24:48] self.dump({"snonce": self.snonce}, function="slice") message = message[48:] elif event["messageid"] == 20104: self.rnonce = message[:24] self.k = Box.decode(message[24:56]) self.dump({"rnonce": self.rnonce, "k": self.k}, function="slice") message = message[56:] event.update({"message": message}) self.dump(event) else: raise Exception("Missing shared key ({}).".format(event["messageid"])) else: raise Exception("Missing server nonce ({}).".format(event["messageid"])) else: raise Exception("Missing client nonce ({}).".format(event["messageid"])) else: raise Exception("Missing public key ({}).".format(event["messageid"])) else: raise Exception("Missing secret key ({}).".format(event["messageid"])) else: raise Exception("Missing server key ({}).".format(event["messageid"])) elif event["type"] == "closing": self.log("session closed") elif event["type"] == "close": self.tee.flush() self.tee.close() else: raise Exception("Invalid event type ({}).".format(event["type"]))
def getHashJoinSplits(tx): digest = blake2b(digest_size=32, person=b'ZcashJSplitsHash') for jsdesc in tx.vJoinSplit: digest.update(jsdesc.serialize()) digest.update(tx.joinSplitPubKey) return digest.digest()
def txid_from_txdata(serialized_txdata): txid = pyblake2.blake2b(serialized_txdata, digest_size=32).digest() return base58.b58encode(txid)
def serialize_preimage(self, i): overwintered = self.overwintered version = self.version nHashType = int_to_hex(1, 4) nLocktime = int_to_hex(self.locktime, 4) inputs = self.inputs() outputs = self.outputs() txin = inputs[i] # TODO: py3 hex if overwintered: nHeader = int_to_hex(0x80000000 | version, 4) nVersionGroupId = int_to_hex(self.versionGroupId, 4) s_prevouts = bfh(''.join( self.serialize_outpoint(txin) for txin in inputs)) hashPrevouts = blake2b(s_prevouts, digest_size=32, person=b'ZcashPrevoutHash').hexdigest() s_sequences = bfh(''.join( int_to_hex(txin.get('sequence', 0xffffffff - 1), 4) for txin in inputs)) hashSequence = blake2b(s_sequences, digest_size=32, person=b'ZcashSequencHash').hexdigest() s_outputs = bfh(''.join(self.serialize_output(o) for o in outputs)) hashOutputs = blake2b(s_outputs, digest_size=32, person=b'ZcashOutputsHash').hexdigest() joinSplits = self.joinSplits #if joinSplits is None: # hashJoinSplits = '00'*32 #else: # s_joinSplits = bfh(''.join(self.serialize_join_split(j) for j in joinSplits)) # s_joinSplits += self.joinSplitPubKey # hashJoinSplits = blake2b(s_joinSplits, digest_size=32, person=b'bitcoinprivateJSplitsHash').hexdigest() hashJoinSplits = '00' * 32 hashShieldedSpends = '00' * 32 hashShieldedOutputs = '00' * 32 nExpiryHeight = int_to_hex(self.expiryHeight, 4) nValueBalance = int_to_hex(self.valueBalance, 8) txin = inputs[i] preimage_script = self.get_preimage_script(txin) scriptCode = var_int(len(preimage_script) // 2) + preimage_script preimage = (nHeader + nVersionGroupId + hashPrevouts + hashSequence + hashOutputs + hashJoinSplits + hashShieldedSpends + hashShieldedOutputs + nLocktime + nExpiryHeight + nValueBalance + nHashType + self.serialize_outpoint(txin) + scriptCode + int_to_hex(txin['value'], 8) + int_to_hex(txin.get('sequence', 0xffffffff - 1), 4)) else: nVersion = int_to_hex(version, 4) txins = var_int(len(inputs)) + ''.join( self.serialize_input( txin, self.get_preimage_script(txin) if i == k else '') for k, txin in enumerate(inputs)) txouts = var_int(len(outputs)) + ''.join( self.serialize_output(o) for o in outputs) preimage = nVersion + txins + txouts + nLocktime + nHashType return preimage
def blake2b_32(v=b''): return blake2b(scrub_input(v), digest_size=32)
def blake2b_hash(data: bytes) -> bytes: h = blake2b(digest_size=32) h.update(data) return h.digest()
'sha3': 64, 'blake2b': 64, 'blake2s': 32, } FUNCS = { SHA1: hashlib.sha1, SHA2_256: hashlib.sha256, SHA2_512: hashlib.sha512, } if sha3: FUNCS[SHA3] = lambda: hashlib.new('sha3_512') if pyblake2: FUNCS[BLAKE2B] = lambda: pyblake2.blake2b() FUNCS[BLAKE2S] = lambda: pyblake2.blake2s() def _hashfn(hashfn): """Return an initialised hash object, by function, name or integer id >>> _hashfn(SHA1) # doctest: +ELLIPSIS <sha1 HASH object @ 0x...> >>> _hashfn('sha2-256') # doctest: +ELLIPSIS <sha256 HASH object @ 0x...> >>> _hashfn('18') # doctest: +ELLIPSIS <sha256 HASH object @ 0x...> >>> _hashfn('md5')
def SignatureHash(script, txTo, inIdx, hashtype, amount, consensusBranchId): """Consensus-correct SignatureHash""" if inIdx >= len(txTo.vin): raise ValueError("inIdx %d out of range (%d)" % (inIdx, len(txTo.vin))) if consensusBranchId != 0: # ZIP 243 hashPrevouts = b'\x00' * 32 hashSequence = b'\x00' * 32 hashOutputs = b'\x00' * 32 hashJoinSplits = b'\x00' * 32 hashShieldedSpends = b'\x00' * 32 hashShieldedOutputs = b'\x00' * 32 if not (hashtype & SIGHASH_ANYONECANPAY): hashPrevouts = getHashPrevouts(txTo) if (not (hashtype & SIGHASH_ANYONECANPAY)) and \ (hashtype & 0x1f) != SIGHASH_SINGLE and \ (hashtype & 0x1f) != SIGHASH_NONE: hashSequence = getHashSequence(txTo) if (hashtype & 0x1f) != SIGHASH_SINGLE and \ (hashtype & 0x1f) != SIGHASH_NONE: hashOutputs = getHashOutputs(txTo) elif (hashtype & 0x1f) == SIGHASH_SINGLE and \ 0 <= inIdx and inIdx < len(txTo.vout): digest = blake2b(digest_size=32, person=b'ZcashOutputsHash') digest.update(txTo.vout[inIdx].serialize()) hashOutputs = digest.digest() if len(txTo.vJoinSplit) > 0: hashJoinSplits = getHashJoinSplits(txTo) if len(txTo.shieldedSpends) > 0: hashShieldedSpends = getHashShieldedSpends(txTo) if len(txTo.shieldedOutputs) > 0: hashShieldedOutputs = getHashShieldedOutputs(txTo) digest = blake2b( digest_size=32, person=b'ZcashSigHash' + struct.pack('<I', consensusBranchId), ) digest.update( struct.pack('<I', (int(txTo.fOverwintered) << 31) | txTo.nVersion)) digest.update(struct.pack('<I', txTo.nVersionGroupId)) digest.update(hashPrevouts) digest.update(hashSequence) digest.update(hashOutputs) digest.update(hashJoinSplits) digest.update(hashShieldedSpends) digest.update(hashShieldedOutputs) digest.update(struct.pack('<I', txTo.nLockTime)) digest.update(struct.pack('<I', txTo.nExpiryHeight)) digest.update(struct.pack('<Q', txTo.valueBalance)) digest.update(struct.pack('<I', hashtype)) if inIdx is not None: digest.update(txTo.vin[inIdx].prevout.serialize()) digest.update(ser_string(script)) digest.update(struct.pack('<Q', amount)) digest.update(struct.pack('<I', txTo.vin[inIdx].nSequence)) return (digest.digest(), None) else: # Pre-Overwinter txtmp = CTransaction(txTo) for txin in txtmp.vin: txin.scriptSig = b'' txtmp.vin[inIdx].scriptSig = script if (hashtype & 0x1f) == SIGHASH_NONE: txtmp.vout = [] for i in range(len(txtmp.vin)): if i != inIdx: txtmp.vin[i].nSequence = 0 elif (hashtype & 0x1f) == SIGHASH_SINGLE: outIdx = inIdx if outIdx >= len(txtmp.vout): raise ValueError("outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout))) tmp = txtmp.vout[outIdx] txtmp.vout = [] for i in range(outIdx): txtmp.vout.append(CTxOut()) txtmp.vout.append(tmp) for i in range(len(txtmp.vin)): if i != inIdx: txtmp.vin[i].nSequence = 0 if hashtype & SIGHASH_ANYONECANPAY: tmp = txtmp.vin[inIdx] txtmp.vin = [] txtmp.vin.append(tmp) s = txtmp.serialize() s += struct.pack(b"<I", hashtype) hash = hash256(s) return (hash, None)
def getHashPrevouts(tx): digest = blake2b(digest_size=32, person=b'ZcashPrevoutHash') for x in tx.vin: digest.update(x.prevout.serialize()) return digest.digest()
def getHashShieldedOutputs(tx): digest = blake2b(digest_size=32, person=b'ZcashSOutputHash') for desc in tx.shieldedOutputs: digest.update(desc.serialize()) return digest.digest()
def getHashOutputs(tx): digest = blake2b(digest_size=32, person=b'ZcashOutputsHash') for x in tx.vout: digest.update(x.serialize()) return digest.digest()
def receive_xrb(index, account): ws = create_connection('ws://yapraiwallet.space:8000') # Get pending blocks rx_data = get_pending(str(account)) if len(rx_data) == 0: return for block in rx_data: print(block) block_hash = block print(rx_data[block]) balance = int(rx_data[block]['amount']) source = rx_data[block]['source'] previous = get_previous(str(account)) current_balance = get_balance(previous) print(current_balance) new_balance = int(current_balance) + int(balance) hex_balance = hex(new_balance) print(hex_balance) hex_final_balance = hex_balance[2:].upper().rjust(32, '0') print(hex_final_balance) priv_key, pub_key = seed_account(settings.seed, int(index)) public_key = ed25519.SigningKey(priv_key).get_verifying_key().to_ascii( encoding="hex") # print("Starting PoW Generation") work = get_pow(previous) # print("Completed PoW Generation") # Calculate signature bh = blake2b(digest_size=32) bh.update( BitArray( hex= '0x0000000000000000000000000000000000000000000000000000000000000006' ).bytes) bh.update(BitArray(hex=xrb_account(account)).bytes) bh.update(BitArray(hex=previous).bytes) bh.update(BitArray(hex=xrb_account(account)).bytes) bh.update(BitArray(hex=hex_final_balance).bytes) bh.update(BitArray(hex=block_hash).bytes) sig = ed25519.SigningKey(priv_key + pub_key).sign(bh.digest()) signature = str(binascii.hexlify(sig), 'ascii') finished_block = '{ "type" : "state", "previous" : "%s", "representative" : "%s" , "account" : "%s", "balance" : "%s", "link" : "%s", \ "work" : "%s", "signature" : "%s" }' % \ (previous, account, account, new_balance, block_hash, work, signature) print(finished_block) data = json.dumps({'action': 'process', 'block': finished_block}) # print(data) ws.send(data) block_reply = ws.recv() print(block_reply) ws.close()
def getHashSequence(tx): digest = blake2b(digest_size=32, person=b'ZcashSequencHash') for x in tx.vin: digest.update(struct.pack('<I', x.nSequence)) return digest.digest()
def H(arg): if isinstance(arg, int): arg = arg.to_bytes(32, 'little') # XXX: ensure that (digest_size*8) >= log2(p) hashed = blake2b(data=arg, digest_size=32).digest() return int.from_bytes(hashed, 'little')
def generate(msg): h = blake2b(digest_size=AUTH_SIZE, key=SECRET_KEY.encode('utf-8')) h.update(msg.encode('utf-8')) return h.hexdigest()
def secret_code(pkh, blind): return blake2b(pkh, 20, key=blind).digest()
def H(x): digest = blake2b(person=b'Zcash_RedJubjubH') digest.update(x) return digest.digest()
def finish_V(start_data, X_s): (sdata, pw, Y_s, idA, idB, U, V) = start_data Z_s = _finish(sdata, X_s, U) transcript = idA + idB + X_s + Y_s + Z_s + pw key = blake2b(transcript).digest() return key
def getflag(sock): user = '******' # 6 lines of menu for i in range(6): recvline(sock) # Get zero cube (i.e. non-permuted) sendprint(sock, '1') recvline(sock) sendprint(sock, '0') recvline(sock) sendprint(sock, '0') recvline(sock) zero = recvline(sock) print "Zero cube:", zero # Solve permutations a and b perma = solveperm(sock, 'a', zero) print "Permutation a:", perma, "of order", getorder(perma) permb = solveperm(sock, 'b', zero) print "Permutation b:", permb, "of order", getorder(permb) # 6 lines of menu for i in range(6): recvline(sock) # Register a new user sendprint(sock, '2') recvline(sock) sendprint(sock, user) recvline(sock) sendprint(sock, zero) # 7 lines of menu for i in range(7): recvline(sock) # Login the new user sendprint(sock, '3') recvline(sock) sendprint(sock, user) recvline(sock) pk = recvline(sock) # 2 lines of information for i in range(2): recvline(sock) # Login challenge tmp = recvline(sock) challenge = tmp[26:26 + 16] print "Parsed challenge:", challenge salt = binascii.unhexlify(challenge) # Compute the handshake h = blake2b(key=salt, digest_size=16) h.update(pk) handshake = h.hexdigest() sendprint(sock, handshake) # 7 lines of menu for i in range(7): recvline(sock) # List users sendprint(sock, '4') # Extract admin pk from user list recvline(sock) for i in range(2): tmp = recvline(sock) tmpname = tmp[10:] tmp = recvline(sock) if tmpname == 'admin': adminpk = tmp[5:] print 'Admin pk:', adminpk recvline(sock) # 6 lines of menu for i in range(6): recvline(sock) # Login as admin sendprint(sock, '3') recvline(sock) sendprint(sock, 'admin') recvline(sock) pk = recvline(sock) # 2 lines of information for i in range(2): recvline(sock) # Login challenge tmp = recvline(sock) challenge = tmp[26:26 + 16] print "Parsed challenge:", challenge salt = binascii.unhexlify(challenge) # Extract challenger's sk sk = solvesk(zero, pk, perma, permb) a, b = sk print "Secret key for", pk, "is:", "(a=" + str(a) + ", b=" + str(b) + ")" # Extract permutation from admin's pk permadmin = cube2perm(zero, adminpk) print "Admin's permutation:", permadmin # Compute handshake cube cube = zero for i in range(a): cube = permstr(cube, perma) cube = permstr(cube, permadmin) for i in range(b): cube = permstr(cube, permb) print "Handshake cube:", cube # Compute the handshake h = blake2b(key=salt, digest_size=16) h.update(cube) handshake = h.hexdigest() sendprint(sock, handshake) # Get the flag \o/ recvline(sock) tmp = recvline(sock) flag = tmp.split(' ')[-1] print "Flag:", flag
def hash256(data): return blake2b(data, digest_size=32).digest()
def file_upload(request): # Before we do anything, if there isn't an authenticated user with this # request, then we'll go ahead and bomb out. if request.authenticated_userid is None: raise _exc_with_message( HTTPForbidden, "Invalid or non-existent authentication information.", ) # distutils "helpfully" substitutes unknown, but "required" values with the # string "UNKNOWN". This is basically never what anyone actually wants so # we'll just go ahead and delete anything whose value is UNKNOWN. for key in list(request.POST): if request.POST.get(key) == "UNKNOWN": del request.POST[key] # We require protocol_version 1, it's the only supported version however # passing a different version should raise an error. if request.POST.get("protocol_version", "1") != "1": raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.") # Look up all of the valid classifiers all_classifiers = request.db.query(Classifier).all() # Validate and process the incoming metadata. form = MetadataForm(request.POST) form.classifiers.choices = [ (c.classifier, c.classifier) for c in all_classifiers ] if not form.validate(): for field_name in _error_message_order: if field_name in form.errors: break else: field_name = sorted(form.errors.keys())[0] raise _exc_with_message( HTTPBadRequest, "{field}: {msgs[0]}".format( field=field_name, msgs=form.errors[field_name], ), ) # TODO: We need a better method of blocking names rather than just # hardcoding some names into source control. if form.name.data.lower() in {"requirements.txt", "rrequirements.txt"}: raise _exc_with_message( HTTPBadRequest, "The name {!r} is not allowed.".format(form.name.data), ) # Ensure that we have file data in the request. if "content" not in request.POST: raise _exc_with_message( HTTPBadRequest, "Upload payload does not have a file.", ) # Look up the project first before doing anything else, this is so we can # automatically register it if we need to and can check permissions before # going any further. try: project = ( request.db.query(Project) .filter( Project.normalized_name == func.normalize_pep426_name(form.name.data)).one() ) except NoResultFound: # The project doesn't exist in our database, so we'll add it along with # a role setting the current user as the "Owner" of the project. project = Project(name=form.name.data) request.db.add(project) request.db.add( Role(user=request.user, project=project, role_name="Owner") ) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=project.name, action="create", submitted_by=request.user, submitted_from=request.client_addr, ), ) request.db.add( JournalEntry( name=project.name, action="add Owner {}".format(request.user.username), submitted_by=request.user, submitted_from=request.client_addr, ), ) # Check that the user has permission to do things to this project, if this # is a new project this will act as a sanity check for the role we just # added above. if not request.has_permission("upload", project): raise _exc_with_message( HTTPForbidden, "You are not allowed to upload to {!r}.".format(project.name) ) try: release = ( request.db.query(Release) .filter( (Release.project == project) & (Release.version == form.version.data)).one() ) except NoResultFound: release = Release( project=project, _classifiers=[ c for c in all_classifiers if c.classifier in form.classifiers.data ], _pypi_hidden=False, dependencies=list(_construct_dependencies( form, { "requires": DependencyKind.requires, "provides": DependencyKind.provides, "obsoletes": DependencyKind.obsoletes, "requires_dist": DependencyKind.requires_dist, "provides_dist": DependencyKind.provides_dist, "obsoletes_dist": DependencyKind.obsoletes_dist, "requires_external": DependencyKind.requires_external, "project_urls": DependencyKind.project_url, } )), **{ k: getattr(form, k).data for k in { # This is a list of all the fields in the form that we # should pull off and insert into our new release. "version", "summary", "description", "license", "author", "author_email", "maintainer", "maintainer_email", "keywords", "platform", "home_page", "download_url", "requires_python", } } ) request.db.add(release) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=release.project.name, version=release.version, action="new release", submitted_by=request.user, submitted_from=request.client_addr, ), ) # TODO: We need a better solution to this than to just do it inline inside # this method. Ideally the version field would just be sortable, but # at least this should be some sort of hook or trigger. releases = ( request.db.query(Release) .filter(Release.project == project) .all() ) for i, r in enumerate(sorted( releases, key=lambda x: packaging.version.parse(x.version))): r._pypi_ordering = i # TODO: Again, we should figure out a better solution to doing this than # just inlining this inside this method. if project.autohide: for r in releases: r._pypi_hidden = bool(not r == release) # Pull the filename out of our POST data. filename = request.POST["content"].filename # Make sure that the filename does not contain any path separators. if "/" in filename or "\\" in filename: raise _exc_with_message( HTTPBadRequest, "Cannot upload a file with '/' or '\\' in the name.", ) # Make sure the filename ends with an allowed extension. if _dist_file_re.search(filename) is None: raise _exc_with_message(HTTPBadRequest, "Invalid file extension.") # Make sure that our filename matches the project that it is being uploaded # to. prefix = pkg_resources.safe_name(project.name).lower() if not pkg_resources.safe_name(filename).lower().startswith(prefix): raise _exc_with_message( HTTPBadRequest, "The filename for {!r} must start with {!r}.".format( project.name, prefix, ) ) # Check the content type of what is being uploaded if (not request.POST["content"].type or request.POST["content"].type.startswith("image/")): raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.") # Check to see if the file that was uploaded exists already or not. if request.db.query( request.db.query(File) .filter(File.filename == filename) .exists()).scalar(): raise _exc_with_message(HTTPBadRequest, "File already exists.") # Check to see if the file that was uploaded exists in our filename log. if (request.db.query( request.db.query(Filename) .filter(Filename.filename == filename) .exists()).scalar()): raise _exc_with_message( HTTPBadRequest, "This filename has previously been used, you should use a " "different version.", ) # The project may or may not have a file size specified on the project, if # it does then it may or may not be smaller or larger than our global file # size limits. file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit])) with tempfile.TemporaryDirectory() as tmpdir: temporary_filename = os.path.join(tmpdir, filename) # Buffer the entire file onto disk, checking the hash of the file as we # go along. with open(temporary_filename, "wb") as fp: file_size = 0 file_hashes = { "md5": hashlib.md5(), "sha256": hashlib.sha256(), "blake2_256": blake2b(digest_size=256 // 8), } for chunk in iter( lambda: request.POST["content"].file.read(8096), b""): file_size += len(chunk) if file_size > file_size_limit: raise _exc_with_message(HTTPBadRequest, "File too large.") fp.write(chunk) for hasher in file_hashes.values(): hasher.update(chunk) # Take our hash functions and compute the final hashes for them now. file_hashes = { k: h.hexdigest().lower() for k, h in file_hashes.items() } # Actually verify the digests that we've gotten. We're going to use # hmac.compare_digest even though we probably don't actually need to # because it's better safe than sorry. In the case of multiple digests # we expect them all to be given. if not all([ hmac.compare_digest( getattr(form, "{}_digest".format(digest_name)).data.lower(), digest_value, ) for digest_name, digest_value in file_hashes.items() if getattr(form, "{}_digest".format(digest_name)).data ]): raise _exc_with_message( HTTPBadRequest, "The digest supplied does not match a digest calculated " "from the uploaded file." ) # Check the file to make sure it is a valid distribution file. if not _is_valid_dist_file(temporary_filename, form.filetype.data): raise _exc_with_message( HTTPBadRequest, "Invalid distribution file.", ) # Check that if it's a binary wheel, it's on a supported platform if filename.endswith(".whl"): wheel_info = _wheel_file_re.match(filename) plats = wheel_info.group("plat").split(".") for plat in plats: if not _valid_platform_tag(plat): raise _exc_with_message( HTTPBadRequest, "Binary wheel '{filename}' has an unsupported " "platform tag '{plat}'." .format(filename=filename, plat=plat) ) # Also buffer the entire signature file to disk. if "gpg_signature" in request.POST: has_signature = True with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp: signature_size = 0 for chunk in iter( lambda: request.POST["gpg_signature"].file.read(8096), b""): signature_size += len(chunk) if signature_size > MAX_SIGSIZE: raise _exc_with_message( HTTPBadRequest, "Signature too large.", ) fp.write(chunk) # Check whether signature is ASCII armored with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp: if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"): raise _exc_with_message( HTTPBadRequest, "PGP signature is not ASCII armored.", ) else: has_signature = False # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add(Filename(filename=filename)) # Store the information about the file in the database. file_ = File( release=release, filename=filename, python_version=form.pyversion.data, packagetype=form.filetype.data, comment_text=form.comment.data, size=file_size, has_signature=bool(has_signature), md5_digest=file_hashes["md5"], sha256_digest=file_hashes["sha256"], blake2_256_digest=file_hashes["blake2_256"], # Figure out what our filepath is going to be, we're going to use a # directory structure based on the hash of the file contents. This # will ensure that the contents of the file cannot change without # it also changing the path that the file is saved too. path="/".join([ file_hashes[PATH_HASHER][:2], file_hashes[PATH_HASHER][2:4], file_hashes[PATH_HASHER][4:], filename, ]), ) request.db.add(file_) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=release.project.name, version=release.version, action="add {python_version} file {filename}".format( python_version=file_.python_version, filename=file_.filename, ), submitted_by=request.user, submitted_from=request.client_addr, ), ) # TODO: We need a better answer about how to make this transactional so # this won't take affect until after a commit has happened, for # now we'll just ignore it and save it before the transaction is # committed. storage = request.find_service(IFileStorage) storage.store( file_.path, os.path.join(tmpdir, filename), meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, "package-type": file_.packagetype, "python-version": file_.python_version, }, ) if has_signature: storage.store( file_.pgp_path, os.path.join(tmpdir, filename + ".asc"), meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, "package-type": file_.packagetype, "python-version": file_.python_version, }, ) # TODO: Once we no longer have the legacy code base running PyPI we can # go ahead and delete this tiny bit of shim code, since it only # exists to purge stuff on legacy PyPI when uploaded to Warehouse old_domain = request.registry.settings.get("warehouse.legacy_domain") if old_domain: request.tm.get().addAfterCommitHook( _legacy_purge, args=["https://{}/pypi".format(old_domain)], kws={"data": {":action": "purge", "project": project.name}}, ) return Response()
def batch(verify): # batching the database if verify != "OK": # if the user does not enter OK they are not going to run the script. print("Use the same command with 'OK' to verify you have enough storage.") raise argparse.ArgumentTypeError('') exit(1) c.execute("""SELECT DISTINCT ASCII FROM hashlist WHERE CALC='0'""") # select only one version of the term in case of duplication rows = c.fetchall() # get the output of the SQLite query count = 0 # set a counter. for ASCII in tqdm(rows, desc="Batching", smoothing=0.1, unit=" w"): # create a loop with a progress bar ASCII = ASCII[0] # the tupule is now a string. BASE32 = base64.b32encode(ASCII) # encode the string in Base32 BASE64 = base64.b64encode(ASCII) # encode the string in Base64 MD5 = hashlib.md5(ASCII).hexdigest() # encode the string in MD5 SHA1 = hashlib.sha1(ASCII).hexdigest() # ... SHA224 = hashlib.sha224(ASCII).hexdigest() SHA256 = hashlib.sha256(ASCII).hexdigest() SHA384 = hashlib.sha384(ASCII).hexdigest() SHA512 = hashlib.sha512(ASCII).hexdigest() NTLM = hashlib.new('md4', ASCII.encode('utf-16le')).hexdigest() # encode the string in NTLM BLAKE2B = pyblake2.blake2b(ASCII.encode('utf-8')).hexdigest() BLAKE2S = pyblake2.blake2s(ASCII.encode('utf-8')).hexdigest() HMAC = hmac.new(ASCII.encode('utf-8')).hexdigest() base32qry = "UPDATE hashlist SET BASE32 = ? WHERE ASCII = ?" # make a query to update the current string to have its alternative forms as an entry. base64qry = "UPDATE hashlist SET BASE64 = ? WHERE ASCII = ?" # ... md5qry = "UPDATE hashlist SET MD5 = ? WHERE ASCII = ? " sha1qry = "UPDATE hashlist SET SHA1 = ? WHERE ASCII = ? " sha224qry = "UPDATE hashlist SET SHA224 = ? WHERE ASCII = ?" sha256qry = "UPDATE hashlist SET SHA256 = ? WHERE ASCII = ?" sha384qry = "UPDATE hashlist SET SHA384 = ? WHERE ASCII = ?" sha512qry = "UPDATE hashlist SET SHA512 = ? WHERE ASCII = ?" NTLMqry = "UPDATE hashlist SET NTLM = ? WHERE ASCII = ?" BLAKE2Bqry = "UPDATE hashlist SET BLAKE2B = ? WHERE ASCII = ?" BLAKE2Sqry = "UPDATE hashlist SET BLAKE2S = ? WHERE ASCII = ?" HMACqry = "UPDATE hashlist SET HMAC = ? WHERE ASCII = ?" base32data = (BASE32, ASCII) # the command that will combine both to apply the change of the entry. base64data = (BASE64, ASCII) # ... md5data = (MD5, ASCII) sha1data = (SHA1, ASCII) sha224data = (SHA224, ASCII) sha256data = (SHA256, ASCII) sha384data = (SHA384, ASCII) sha512data = (SHA512, ASCII) ntlmdata = (NTLM, ASCII) blake2bdata = (BLAKE2B, ASCII) blake2sdata = (BLAKE2S, ASCII) hmacdata = (HMAC, ASCII) c.execute(base32qry, base32data) # execute the query. c.execute(base64qry, base64data) # ... c.execute(md5qry, md5data) c.execute(sha1qry, sha1data) c.execute(sha224qry, sha224data) c.execute(sha256qry, sha256data) c.execute(sha384qry, sha384data) c.execute(sha512qry, sha512data) c.execute(NTLMqry, ntlmdata) c.execute(BLAKE2Bqry, blake2bdata) c.execute(BLAKE2Sqry, blake2sdata) c.execute(HMACqry, hmacdata) update = "UPDATE hashlist SET CALC=? WHERE ASCII = ? " # set the row CALC to 1 as it doesn't need to be hashed again. updatedata = ("1", ASCII) c.execute(update, updatedata) count +=1 if count == 1000: # automatically commit every 1000 reps. conn.commit() # commit the changes. count= 0 print("Indexing database ...") c.execute('''CREATE UNIQUE INDEX "HASHED" ON "hashlist" ("BASE32","BASE64","MD5","SHA1","SHA224","SHA256","SHA384","SHA512", "NTLM", "BLAKE2B", "BLAKE2S", "HMAC");''') # index the changes as it will make searching MUCH faster. conn.commit() # commit changes print("Done.")
data = json.load(f) directory = 'accounts' account = {} if not os.path.exists(directory): os.makedirs(directory) for y in data['accounts']: fw = open(directory + '/' + y['info'] + '.json', 'w') keydata = bytes.fromhex(y['private']) sk = ed25519_blake2b.SigningKey(keydata) ################################################################################ ## GENERATE SEND INFORMATION ################################################################################ h = blake2b(digest_size=32) h.update(qlmdb3.fromaccount(y['account'])) # destination h.update(binascii.unhexlify(qlmdb3.hexstr(int(y['amount']), 16))) # amount send_hash = binascii.hexlify(h.digest()).decode('ascii') hashdata = bytes.fromhex(send_hash) sig = sk.sign(hashdata) hexSig = sig.hex().upper() account['funding_info'] = { 'account': binascii.hexlify(qlmdb3.fromaccount( y['account'])).decode('ascii').upper(), 'amount': y['amount'], 'signature':
def secureHash(message): h = blake2b(digest_size=32) h.update(message.encode()) return base58.b58encode(sha256(h.digest()).digest())