def _make_master_key(self): """ Make the master key by (1) combining the credentials to create a composite hash, (2) transforming the hash using the transform seed for a specific number of rounds and (3) finally hashing the result in combination with the master seed. """ super(KDB4File, self)._make_master_key() composite = sha256(''.join(self.keys)) tkey = transform_key( composite, self.header.TransformSeed, self.header.TransformRounds) self.master_key = sha256(self.header.MasterSeed + tkey)
def _make_master_key(self): """ Make the master key by (1) combining the credentials to create a composite hash, (2) transforming the hash using the transform seed for a specific number of rounds and (3) finally hashing the result in combination with the master seed. """ super(KDB4File, self)._make_master_key() composite = sha256(''.join(self.keys)) tkey = transform_key(composite, self.header.TransformSeed, self.header.TransformRounds) self.master_key = sha256(self.header.MasterSeed + tkey)
def _serialize_data(root, header, password): groups, entries = root.get_groups_and_entries() # Update header header.num_groups = len(groups) header.num_entries = len(entries) header.final_random_seed = crypto.randomize(16) header.encryption_iv = crypto.randomize(16) # Generate body body = str() for g in groups: body += g.to_bytearray() for e in entries: body += e.to_bytearray() # Calculate hash from the body header.contents_hash = crypto.sha256(body) # Encrypt body encrypted = crypto.encrypt(body, Database._generate_key(header, password), header.encryption_iv) # Generate file content data = str() data += header.to_bytearray() data += encrypted return data
def openReport(clouditem, uploadID=None): try: # get the upload if uploadID is not None: uploadQuery = Upload.objects.get(cloudItemID=clouditem, id=uploadID) else: uploadQuery = Upload.objects.get(cloudItemID=clouditem) #build the name of the folder hashFolder = crypto.sha256( uploadQuery.fileName + crypto.HASH_SEPARATOR + format(uploadQuery.uploadDate, "U")).hexdigest() #parse with JSON report = os.path.join(settings.UPLOAD_DIR, str(clouditem.id), hashFolder, uploadQuery.fileName, uploadQuery.fileName + ".report") openReport = open(report, "rb") jsonReport = json.load(openReport) return jsonReport except ObjectDoesNotExist: return None
def findExif(self): res = list() # for each folder for r in self.metadata: #for each file in the folder for f in r['contents']: if "photo_info" in f: if f["photo_info"]["lat_long"] is not None: lat = f["photo_info"]["lat_long"][0] lon = f["photo_info"]["lat_long"][1] #compute file name altName = dropboxAlternateName(f['path'],f['modified']) fName = crypto.sha256(os.path.basename(f['path'])+crypto.HASH_SEPARATOR+altName).hexdigest()+"_"+altName #copy src img to thumbnail folder srcDir = os.path.join(settings.DOWNLOAD_DIR,self.d.folder,"files",fName) dstDir = os.path.join(settings.DIFF_DIR,fName+".thubmnail") #generate thumbnail thumb = Thubmnailer() thumb.cacheImg(srcDir,dstDir,250,250) res.append({"title":f['path'],"lat":lat,"lon":lon,'fName':fName+".thubmnail"}) return res
def findExif(self): res = list() for f in self.metadata: if "imageMediaMetadata" in f: if "location" in f["imageMediaMetadata"]: lat = f["imageMediaMetadata"]["location"]["latitude"] lon = f["imageMediaMetadata"]["location"]["longitude"] #compute file name fName = crypto.sha256(f['title'] + crypto.HASH_SEPARATOR + f['id']).hexdigest() + "_" + f['id'] #copy src img to thumbnail folder srcDir = os.path.join(settings.DOWNLOAD_DIR, self.d.folder, "files", fName) dstDir = os.path.join(settings.DIFF_DIR, fName + ".thubmnail") #generate thumbnail thumb = Thubmnailer() thumb.cacheImg(srcDir, dstDir, 250, 250) res.append({ "title": f['title'], "lat": lat, "lon": lon, 'fName': fName + ".thubmnail" }) return res
def googleExifFinder(self): meta = self.decodeMetaData(FileMetadata.objects.get(tokenID=self.t).metadata) res = list() for f in meta: if "imageMediaMetadata" in f: if "location" in f["imageMediaMetadata"]: lat = f["imageMediaMetadata"]["location"]["latitude"] lon = f["imageMediaMetadata"]["location"]["longitude"] #compute file name fName = crypto.sha256(f['title']+crypto.HASH_SEPARATOR+f['id']).hexdigest()+"_"+f['id'] #copy src img to thumbnail folder srcDir = os.path.join(settings.DOWNLOAD_DIR,self.d.folder,"files",fName) dstDir = os.path.join(settings.DIFF_DIR,fName+".thubmnail") #generate thumbnail thumb = Thubmnailer() thumb.cacheImg(srcDir,dstDir,250,250) res.append({"title":f['title'],"lat":lat,"lon":lon,'fName': fName+".thubmnail"}) return res
def is_minikey(text): # Minikeys are typically 22 or 30 characters, but this routine # permits any length of 20 or more provided the minikey is valid. # A valid minikey must begin with an 'S', be in base58, and when # suffixed with '?' have its SHA256 hash begin with a zero byte. # They are widely used in Casascius physical bitcoins. return (len(text) >= 20 and text[0] == 'S' and all(ord(c) in __b58chars for c in text) and sha256(text + '?')[0] == 0x00)
def test_sha256(self): """ Test sha-256 of text string """ precomputed = dict() precomputed[0] = "python|test:dba9e76e92cfd0e09ec192bec8ea6b8e7ce48857bad498c5a6d19300ebd7f1bd" precomputed[1] = "summer|winter|autumn|spring:a4a8064b9781f1ea0107d8644e2d7ab266a3f66cb84f5590d2f6495d9eb62db1" precomputed[2] = "berner|fach|hoch|schule:48fb694fa1c17a4dee36c300e60ed2719300e45a9e19d1fb2521bb289630b745" precomputed[3] = "ייי|אאא|$$$:4b472ac5d71b4e5836666b6e7b2b979ce66a2f4d6fd09bf8a06c388621d4ac19" precomputed[4] = "--|--|''''|???:e9feaa5e40796a040b6f7c1482395232999d4caf96bfdaf20ba353358bf774a4" for key in precomputed: split = precomputed[key].split(":") string = split[0] hashString = split[1] self.assertEquals(hashString, crypto.sha256(string).hexdigest()) print crypto.sha256("Syst\u00e8me d'autorisation").hexdigest()
def __init__(self, hostname, username, password, profile): self._hostname = hostname self._username = username self._password = password self._profile = profile if self.hostname and self.username and self.password: self._signature = crypto.sha256(self.hostname + crypto.HASH_SEPARATOR + self.username + crypto.HASH_SEPARATOR + self.password).hexdigest() else: self._signature = "<empty signature>"
def __init__(self, unprotect=True): self._salsa_buffer = bytearray() self.salsa = Salsa20(sha256(self.header.ProtectedStreamKey), KDB4_SALSA20_IV) self.in_buffer.seek(0) self.tree = objectify.parse(self.in_buffer) self.obj_root = self.tree.getroot() if unprotect: self.unprotect()
def create_p2pkh_transaction(utxosets, outputs, custom_pushdata=False): version = VERSION_1 lock_time = LOCK_TIME # sequence = SEQUENCE hash_type = HASH_TYPE unspents = [Unspent.from_dict(utxo) for utxo in utxosets] input_count = int_to_varint(len(unspents)) output_count = int_to_varint(len(outputs)) output_block = construct_output_block(outputs, custom_pushdata=custom_pushdata) # Optimize for speed, not memory, by pre-computing values. inputs = [] for unspent in unspents: txid = hex_to_bytes(unspent.txid)[::-1] txindex = unspent.txindex.to_bytes(4, byteorder='little') amount = unspent.amount.to_bytes(8, byteorder='little') inputs.append(TxIn('', 0, txid, txindex, amount)) hashPrevouts = double_sha256(b''.join([i.txid + i.txindex for i in inputs])) hashSequence = double_sha256(b''.join([SEQUENCE for i in inputs])) hashOutputs = double_sha256(output_block) # scriptCode_len is part of the script. for i, txin in enumerate(inputs): private_key = bsv(wif=utxosets[i]['PrivateKey']) public_key = bytes.fromhex(private_key.public_key) public_key_len = len(public_key).to_bytes(1, byteorder='little') scriptCode = (OP_DUP + OP_HASH160 + OP_PUSH_20 + address_to_public_key_hash(private_key.address) + OP_EQUALVERIFY + OP_CHECKSIG) scriptCode_len = int_to_varint(len(scriptCode)) to_be_hashed = (version + hashPrevouts + hashSequence + txin.txid + txin.txindex + scriptCode_len + scriptCode + txin.amount + SEQUENCE + hashOutputs + lock_time + hash_type) hashed = sha256(to_be_hashed) # BIP-143: Used for Bitcoin SV # signature = private_key.sign(hashed) + b'\x01' signature = private_key.sign(hashed) + b'\x41' script_sig = (len(signature).to_bytes(1, byteorder='little') + signature + public_key_len + public_key) inputs[i].script = script_sig inputs[i].script_len = int_to_varint(len(script_sig)) return bytes_to_hex(version + input_count + construct_input_block(inputs) + output_count + output_block + lock_time)
def __init__(self, unprotect=True): self._salsa_buffer = bytearray() self.salsa = Salsa20( sha256(self.header.ProtectedStreamKey), KDB4_SALSA20_IV) self.in_buffer.seek(0) self.tree = objectify.parse(self.in_buffer) self.obj_root = self.tree.getroot() if unprotect: self.unprotect()
def _decrypt(self, stream): super(KDB3File, self)._decrypt(stream) data = aes_cbc_decrypt(stream.read(), self.master_key, self.header.EncryptionIV) data = unpad(data) if self.header.ContentHash == sha256(data): # put data in bytes io self.in_buffer = io.BytesIO(data) # set successful decryption flag self.opened = True else: raise IOError('Master key invalid.')
def __init__(self, unprotect=True): self._salsa_buffer = bytearray() self.salsa = Salsa20( sha256(self.header.ProtectedStreamKey), KDB4_SALSA20_IV) self.in_buffer.seek(0) self.tree = objectify.parse(self.in_buffer) self.obj_root = self.tree.getroot() objectify.deannotate(self.obj_root, xsi_nil=True) etree.cleanup_namespaces(self.obj_root) if unprotect: self.unprotect()
def get_random_key(): """Generates a random private key. You should only use this for testing purpose… unless you like to take risks. """ key_int = PRIVKEY_MIN - 1 while (key_int < PRIVKEY_MIN or key_int > PRIVKEY_MAX): seed = urandom(256) # Let's trust the OS here key = sha256(seed) key_int = bytes_to_int(key) return PrivateKey(key)
def load_plain_keyfile(filename): """ A "plain" keyfile is a file containing only the key. Any other file (JPEG, MP3, ...) can also be used as keyfile. """ with open(filename, 'rb') as f: key = f.read() # if the length is 32 bytes we assume it is the key if len(key) == 32: return key # if the length is 64 bytes we assume the key is hex encoded if len(key) == 64: return key.decode('hex') # anything else may be a file to hash for the key return sha256(key)
def _make_master_key(self): """ Make the master key by (1) combining the credentials to create a composite hash, (2) transforming the hash using the transform seed for a specific number of rounds and (3) finally hashing the result in combination with the master seed. """ super(KDB3File, self)._make_master_key() #print "masterkey:", ''.join(self.keys).encode('hex') #composite = sha256(''.join(self.keys)) #TODO python-keepass does not support keyfiles, there seems to be a # different way to hash those keys in kdb3 composite = self.keys[0] tkey = transform_key(composite, self.header.MasterSeed2, self.header.KeyEncRounds) self.master_key = sha256(self.header.MasterSeed + tkey)
def load_plain_keyfile(filename): """ A "plain" keyfile is a file containing only the key. Any other file (JPEG, MP3, ...) can also be used as keyfile. """ with open(filename, 'rb') as f: key = f.read() # if the length is 32 bytes we assume it is the key if len(key) == 32: return key # if the length is 64 bytes we assume the key is hex encoded if len(key) == 64: return key.decode('hex') # anything else may be a file to hash for the key return sha256(key) raise IOError('Could not read keyfile.')
def _write_header(self, stream): """Serialize the header fields from self.header into a byte stream, prefix with file signature and version before writing header and out-buffer to `stream`. Note, that `stream` is flushed, but not closed!""" # serialize header to stream header = bytearray() # write file signature header.extend(struct.pack('<II', *KDB4_SIGNATURE)) # and version header.extend(struct.pack('<hh', 0, 3)) field_ids = self.header.keys() field_ids.sort() field_ids.reverse() # field_id 0 must be last for field_id in field_ids: value = self.header.b[field_id] length = len(value) header.extend(struct.pack('<b', field_id)) header.extend(struct.pack('<h', length)) header.extend(struct.pack('{}s'.format(length), value)) # write header to stream stream.write(header) headerHash = base64.b64encode(sha256(header)) self.obj_root.Meta.HeaderHash = headerHash # create HeaderHash if it does not exist if len(self.obj_root.Meta.xpath("HeaderHash")) < 1: etree.SubElement(self.obj_root.Meta, "HeaderHash") # reload out_buffer because we just changed the HeaderHash self.protect() self.out_buffer = io.BytesIO(self.pretty_print()) # zip or not according to header setting if self.header.CompressionFlags == 1: self._zip() self._encrypt(); # write encrypted block to stream stream.write(self.out_buffer) stream.flush()
def _write_header(self, stream): """Serialize the header fields from self.header into a byte stream, prefix with file signature and version before writing header and out-buffer to `stream`. Note, that `stream` is flushed, but not closed!""" # serialize header to stream header = bytearray() # write file signature header.extend(struct.pack('<II', *KDB4_SIGNATURE)) # and version header.extend(struct.pack('<hh', 0, 3)) field_ids = self.header.keys() field_ids.sort() field_ids.reverse() # field_id 0 must be last for field_id in field_ids: value = self.header.b[field_id] length = len(value) header.extend(struct.pack('<b', field_id)) header.extend(struct.pack('<h', length)) header.extend(struct.pack('{}s'.format(length), value)) # write header to stream stream.write(header) headerHash = base64.b64encode(sha256(header)) self.obj_root.Meta.HeaderHash = headerHash # create HeaderHash if it does not exist if len(self.obj_root.Meta.xpath("HeaderHash")) < 1: etree.SubElement(self.obj_root.Meta, "HeaderHash") # reload out_buffer because we just changed the HeaderHash self.protect() self.out_buffer = io.BytesIO(self.pretty_print()) # zip or not according to header setting if self.header.CompressionFlags == 1: self._zip() self._encrypt() # write encrypted block to stream stream.write(self.out_buffer) stream.flush()
def _decrypt(header, data, password): final_key = Database._generate_key(header, password) if (header.cipher == Header.RIJNDAEL_CIPHER): decrypted_data = crypto.decrypt_aes(final_key, header.encryption_iv, data[Header.DB_HEADER_SIZE:]) crypto_size = len(decrypted_data) else: raise DatabaseException("Unknown encryption algorithm.") if crypto_size > 214783446 or (not crypto_size and header.num_groups): raise DatabaseException("Decryption failed. The key is wrong or the file is damaged.") contents_hash = crypto.sha256(decrypted_data[:crypto_size]) if header.contents_hash != contents_hash: raise DatabaseException("Hash test failed. The key is wrong or the file is damaged.") return decrypted_data
def openReport(clouditem,uploadID = None): try: # get the upload if uploadID is not None: uploadQuery = Upload.objects.get(cloudItemID=clouditem,id=uploadID) else: uploadQuery = Upload.objects.get(cloudItemID=clouditem) #build the name of the folder hashFolder = crypto.sha256(uploadQuery.fileName+crypto.HASH_SEPARATOR+format(uploadQuery.uploadDate,"U")).hexdigest() #parse with JSON report = os.path.join(settings.UPLOAD_DIR,str(clouditem.id),hashFolder,uploadQuery.fileName,uploadQuery.fileName+".report") openReport = open(report,"rb") jsonReport = json.load(openReport) return jsonReport except ObjectDoesNotExist: return None
def add_vote(self, vote): vote["header"]["hash_of_proof"] = crypto.sha256( json.dumps(vote["content"]["proof"], sort_keys=True)) self.validate_vote(vote) self.check_casted_vote(vote) self._votes.append(vote) if len(self._votes) >= config.BLOCKCHAIN_BLOCK_VOTE_LIMIT: self.blockchain.add_block({ "content": self._votes, "header": { "signature": crypto.sign(json.dumps(self._votes, sort_keys=True), self.private_key), "miner_id": self.id } }) self._votes = []
def findExif(self): res = list() # for each folder for r in self.metadata: #for each file in the folder for f in r['contents']: if "photo_info" in f: if f["photo_info"]["lat_long"] is not None: lat = f["photo_info"]["lat_long"][0] lon = f["photo_info"]["lat_long"][1] #compute file name altName = dropboxAlternateName(f['path'], f['modified']) fName = crypto.sha256( os.path.basename(f['path']) + crypto.HASH_SEPARATOR + altName).hexdigest() + "_" + altName #copy src img to thumbnail folder srcDir = os.path.join(settings.DOWNLOAD_DIR, self.d.folder, "files", fName) dstDir = os.path.join(settings.DIFF_DIR, fName + ".thubmnail") #generate thumbnail thumb = Thubmnailer() thumb.cacheImg(srcDir, dstDir, 250, 250) res.append({ "title": f['path'], "lat": lat, "lon": lon, 'fName': fName + ".thubmnail" }) return res
def _decrypt(header, data, password): final_key = Database._generate_key(header, password) if (header.cipher == Header.RIJNDAEL_CIPHER): decrypted_data = crypto.decrypt_aes(final_key, header.encryption_iv, data[Header.DB_HEADER_SIZE:]) crypto_size = len(decrypted_data) else: raise DatabaseException("Unknown encryption algorithm.") if crypto_size > 214783446 or (not crypto_size and header.num_groups): raise DatabaseException( "Decryption failed. The key is wrong or the file is damaged.") contents_hash = crypto.sha256(decrypted_data[:crypto_size]) if header.contents_hash != contents_hash: raise DatabaseException( "Hash test failed. The key is wrong or the file is damaged.") return decrypted_data
def parse_locking_script(script): # Returns type and address if (script[-1] == OP_CHECKSIG) & (len(script) in [35, 67]): # Pay-to-Public-Key pubkey, _ = read_data(script) return "p2pk", Address.from_pubkey(pubkey), None elif ((script[0] == OP_DUP) & (script[1] == OP_HASH160) & (script[-2] == OP_EQUALVERIFY) & (script[-1] == OP_CHECKSIG) & (len(script) == 25)): # Pay-to-Public-Key-Hash h, _ = read_data(script[2:-2]) return "p2pkh", Address.from_pubkey_hash(h), None elif ((script[0] == OP_HASH160) & (script[-1] == OP_EQUAL) & (len(script) == 23)): # Pay-to-Script-Hash h, _ = read_data(script[1:-1]) return "p2sh", Address.from_script_hash(h), None elif (script[0] == OP_RETURN) & (len(script) <= 223): address = "d-" + sha256(script.hex().encode('utf-8'))[:16].hex() data = read_nulldata_script(script) #return "nulldata", address, data else: raise ScriptError("cannot parse locking script")
def private_key_to_wif(private_key: int, compressed: bool = True) -> str: payload = b'\x80' + private_key.to_bytes(32, byteorder='big') if compressed: payload += b'\x01' checksum = sha256(sha256(payload))[0:4] return b58_encode(payload + checksum)
def generate_sighash_single_rawtx(utxosets, changeaddress, authrized_amount): unspents = [Unspent.from_dict(utxo) for utxo in utxosets] version = VERSION_1 lock_time = LOCK_TIME #sequence = SEQUENCE input_count = int_to_varint(len(unspents)) inputs = [] total_input_amount = 0 for unspent in unspents: txid = hex_to_bytes(unspent.txid)[::-1] txindex = unspent.txindex.to_bytes(4, byteorder='little') amount = unspent.amount.to_bytes(8, byteorder='little') inputs.append(TxIn('', 0, txid, txindex, amount)) total_input_amount += unspent.amount #satoshi output_count = int_to_varint(1) output_block = b'' output_script = (OP_DUP + OP_HASH160 + OP_PUSH_20 + address_to_public_key_hash(changeaddress) + OP_EQUALVERIFY + OP_CHECKSIG) output_block += (total_input_amount - authrized_amount).to_bytes( 8, byteorder='little') #satoshi output_block += int_to_varint(len(output_script)) output_block += output_script hashPrevouts = double_sha256(b''.join([i.txid + i.txindex for i in inputs])) hashSequence = bytes.fromhex( '0000000000000000000000000000000000000000000000000000000000000000') # scriptCode_len is part of the script. for i, txin in enumerate(inputs): if i == 0: hashOutputs = double_sha256(output_block) hash_type = 0x43.to_bytes(4, byteorder='little') #sighash single else: hashOutputs = bytes.fromhex( '0000000000000000000000000000000000000000000000000000000000000000' ) hash_type = 0x42.to_bytes(4, byteorder='little') #sighash none private_key = bsv(utxosets[i]['PrivateKey']) public_key = bytes.fromhex(private_key.public_key) public_key_len = len(public_key).to_bytes(1, byteorder='little') scriptCode = (OP_DUP + OP_HASH160 + OP_PUSH_20 + address_to_public_key_hash(private_key.address) + OP_EQUALVERIFY + OP_CHECKSIG) scriptCode_len = int_to_varint(len(scriptCode)) to_be_hashed = (version + hashPrevouts + hashSequence + txin.txid + txin.txindex + scriptCode_len + scriptCode + txin.amount + SEQUENCE + hashOutputs + lock_time + hash_type) hashed = sha256(to_be_hashed) # BIP-143: Used for Bitcoin SV # signature = private_key.sign(hashed) + b'\x01' sighash ALL ; single b'\x03' ,NONE b'\x02' if i == 0: signature = private_key.sign(hashed) + b'\x43' else: signature = private_key.sign(hashed) + b'\x42' script_sig = (len(signature).to_bytes(1, byteorder='little') + signature + public_key_len + public_key) inputs[i].script = script_sig inputs[i].script_len = int_to_varint(len(script_sig)) return { "version": bytes_to_hex(version), "input": bytes_to_hex(input_count + construct_input_block(inputs)), "output": bytes_to_hex(output_block), "lock_time": bytes_to_hex(lock_time) }
def get_address(self): """Get the associated Bitcoin address.""" pubkey = self.get_public_key() hash = hash160(sha256(pubkey)) return b58c_encode(hash)
def manageReportUpload(request,cloudItem): """ Uncrypt and store the report """ #check if an item is already in the DB impDb = Upload.objects.filter(cloudItemID=CloudItem.objects.get(id=cloudItem)) if len(impDb) != 0: raise ("A report already exists.") # add path for crypto cryptoPath = os.path.join(os.path.dirname(settings.BASE_DIR), "finder") if not cryptoPath in sys.path: sys.path.insert(1, cryptoPath) del cryptoPath import crypto fileUpload = request.FILES['fileUp'] fileName = strip_tags(fileUpload.name) #create a folder for this cloud item if do not exists path = os.path.join(settings.UPLOAD_DIR,cloudItem) if not os.path.isdir(path): os.mkdir(path) #upload name upTime = timezone.now() shaName = fileName uploadName = crypto.sha256(shaName[:-8]+crypto.HASH_SEPARATOR+format(upTime,"U")).hexdigest() wholeUploadPath = os.path.join(path,uploadName) if not os.path.isdir(wholeUploadPath): os.mkdir(wholeUploadPath) #write to disk with open(os.path.join(wholeUploadPath,fileName), 'wb+') as destination: for chunk in fileUpload.chunks(): destination.write(chunk) fileCont = open(os.path.join(wholeUploadPath,fileName), "r") jsonParsed = json.load(fileCont) cont = jsonParsed['enc'] k = jsonParsed['k'] #decrypt AES key aes = crypto.decryptRSA(k) #decrypt ZIP - first write encrypted cont into a temp file, read it, decrypt it and store the ZIP tempFileName = os.path.join(wholeUploadPath,fileName+".tmp") open(tempFileName, "w+b").write(cont) # fernet wants "bytes" as token fileBytes = crypto.decryptFernetFile(open(tempFileName, "rb").read(), aes) if fileName.endswith(".enc"): name = fileName[:-4] else: raise Exception("Invalid filename.") #write decrypted file to disc decZipFile = os.path.join(wholeUploadPath, name) open(decZipFile, "w+b").write(fileBytes) #delete temp file os.remove(tempFileName) aes = None del aes #unzip fileZip = zipfile.ZipFile(decZipFile) fileZip.extractall(wholeUploadPath) # set this report parsed newUpload = Upload(fileName=name[:-4],uploadDate=upTime,uploadIP=request.META['REMOTE_ADDR'],parsed=True,cloudItemID=CloudItem.objects.get(id=cloudItem)) newUpload.save()
(OP_MIN, lambda: (stack.append(min(stack.pop(), stack.pop())), inst.next())), (OP_MAX, lambda: (stack.append(min(stack.pop(), stack.pop()), inst.next()))), (OP_WITHIN, lambda: (stack.append(1 if stack[-2] <= stackstack[-1] < stack[-3] else 0), stack.pop(), stack.pop(), stack.pop(), inst.next())), (OP_RIPEMD160, lambda: (stack.append(crypto.ripemd160(stack.pop())), inst.next())), (OP_SHA1, lambda: (stack.append(crypto.sha1(stack.pop())), inst.next())), (OP_SHA256, lambda: (stack.append(crypto.hash(stack.pop())), inst.next())), (OP_HASH160, lambda: (stack.append( crypto.ripemd160(crypto.sha256(stack.pop()))), inst.next())), (OP_HASH256, lambda: (stack.append( crypto.hash(crypto.hash(stack.pop()))), inst.next())), (OP_CODESEPARATOR, lambda: (InvalidOpcode.rz( "Implement this plz", inst.next()))) #TODO what does this do? , (OP_CHECKSIG, lambda: (stack.append(1 if signed( stack.pop(), stack.pop()) else 0), inst.next())), (OP_CHECKSIGVERIFY, lambda: (stack.append(1 if signed( stack.pop(), stack.pop()) else 0), OP_VERIFY)), (OP_CHECKMULTISIG, lambda: (None, inst.next())) #TODO , (OP_CHECKMULTISIGVERIFY, lambda: (None, inst.next())) #TODO ] # OP_NA ,
def _generate_key(header, password): raw_master_key = Database._get_master_key(password) master_key = crypto.transform(raw_master_key, header.transf_random_seed, header.key_transf_rounds) return crypto.sha256([header.final_random_seed, master_key])
raise ScriptError("cannot parse locking script") if __name__ == '__main__': import sys if sys.version_info < (3, 5): sys.exit("Error: Must be using Python 3.5 or higher") print("SHA-1 collision bounty") script = bytes([ OP_2DUP, OP_EQUAL, OP_NOT, OP_VERIFY, OP_SHA1, OP_SWAP, OP_SHA1, OP_EQUAL ]) address = Address.from_script(script) print("script", script.hex()) print("address", address.to_legacy()) script = bytes([op_number(3), OP_ADD, op_number(5), OP_EQUAL]) address = Address.from_script(script) print("script", script.hex()) print("address", address.to_legacy()) print("hash", address.h.hex()) # # P2MS address script = "5121032df7cde5c76b9d8dc36317c74952cc3fdc6d0afb30580ea3b63394497469d47a51ae" address = "m-" + sha256(script.encode('utf-8'))[:16].hex() print("script", script) print("P2MS address", address)
def _get_master_key(pw): pw_cp1252 = pw.decode("cp1252") return crypto.sha256(pw_cp1252)
def add_credentials(self, **credentials): if credentials.has_key('password'): self.add_key_hash(sha256(credentials['password'])) if credentials.has_key('keyfile'): self.add_key_hash(load_keyfile(credentials['keyfile']))
def simple_secret_locking_script(secret): if isinstance(secret, str): secret = secret.encode('utf-8') h = sha256(secret) return bytes([OP_SHA256]) + push_data(h) + bytes([OP_EQUAL])
,(OP_BOOLAND,lambda:(stack.append(1 if stack.pop() != 0 and stack.pop() != 0 else 0),inst.next())) ,(OP_BOOLOR,lambda:(stack.append(0 if stack.pop() == 0 and stack.pop() == 0 else 1),inst.next())) ,(OP_NUMEQUAL,lambda:(stack.append(1 if stack.pop() == stack.pop() else 0,inst.next()))) ,(OP_NUMEQUALVERIFY,lambda:(stack.append(1 if stack.pop() == stack.pop() else 0),OP_VERIFY)) ,(OP_NUMNOTEQUAL,lambda:(stack.append(0 if stack.pop() == stack.pop() else 1),inst.next())) ,(OP_LESSTHAN,lambda:(stack.append( 1 if stack.pop() > stack.pop() else 0),inst.next())) ,(OP_GREATERTHAN,lambda:(stack.append(1 if stack.pop() < stack.pop() else 0),inst.next())) ,(OP_LESSTHANOREQUAL,lambda:(stack.append(1 if stack.pop() >= stack.pop() else 0),inst.next())) ,(OP_GREATERTHANOREQUAL,lambda:(stack.append( 1 if stack.pop() <= stack.pop() else 0),inst.next())) ,(OP_MIN,lambda:(stack.append(min(stack.pop(), stack.pop())),inst.next())) ,(OP_MAX,lambda:(stack.append(min(stack.pop(), stack.pop()),inst.next()))) ,(OP_WITHIN,lambda:(stack.append( 1 if stack[-2] <= stackstack[-1] < stack[-3] else 0 ), stack.pop(), stack.pop(), stack.pop(),inst.next())) ,(OP_RIPEMD160,lambda:(stack.append(crypto.ripemd160(stack.pop())),inst.next())) ,(OP_SHA1,lambda:(stack.append(crypto.sha1(stack.pop())),inst.next())) ,(OP_SHA256,lambda:(stack.append(crypto.hash(stack.pop())),inst.next())) ,(OP_HASH160,lambda:(stack.append(crypto.ripemd160(crypto.sha256(stack.pop()))),inst.next())) ,(OP_HASH256,lambda:(stack.append(crypto.hash(crypto.hash(stack.pop()))),inst.next())) ,(OP_CODESEPARATOR,lambda:(InvalidOpcode.rz("Implement this plz",inst.next()))) #TODO what does this do? ,(OP_CHECKSIG,lambda:( stack.append(1 if signed(stack.pop(),stack.pop()) else 0),inst.next())) ,(OP_CHECKSIGVERIFY,lambda:( stack.append(1 if signed(stack.pop(),stack.pop()) else 0),OP_VERIFY)) ,(OP_CHECKMULTISIG,lambda:(None,inst.next())) #TODO ,(OP_CHECKMULTISIGVERIFY,lambda:(None,inst.next())) #TODO ] # OP_NA ,genHandler(lambda num: lambda: (stack.append("".join([chr(inst.next()) for _ in xrange(num)])), inst.next()), 1, 75) # OP_int ,genHandler(lambda num: lambda: (stack.append(num-80), inst.next()), 82, 96) # OP_NOP ,genHandler(lambda _: lambda: (inst.next(),) , 176, 186))) DISABLED_OPS = set([ OP_SUBSTR, OP_LEFT, OP_RIGHT, OP_INVERT, OP_XOR,
print("SegWit Address (P2SH-P2WPKH)", segaddr_p2sh.to_legacy()) print() # P2WSH multisig address wifkeys_multisig = [ "KzwQjFQPytv5x6w2cLdF4BSweGVCPEt8b8HbcuTi8e75LRQfw94L", "Ky4yk7uTBZ1EDbqyVfkvoZXURpWdRCxTpCERZb4gkn67fY8kK95R", "Kz3Htg8mSfC997qkBxpVCdxYhEoRcFj5ikUjE96ipVAJPou7MwRD" ] pubkeys = [PublicKey.from_prvkey(wk) for wk in wifkeys_multisig] print("--- 2-of-3 multisig address ---") print("Private keys") for wk in wifkeys_multisig: print("", wk) redeem_script = multisig_locking_script(pubkeys, 2) redeem_script_hash = sha256(redeem_script) print("SHA256 of redeem script", redeem_script_hash.hex()) print("Legacy Address (P2SH)", Address.from_script(redeem_script).to_legacy()) # Witness program witprog_p2wsh = redeem_script_hash # Native segwit P2WSH address p2wsh_addr = SegWitAddr.encode(SegWitAddr.SEGWIT_HRP, witver, witprog_p2wsh) print("SegWit Address (P2WSH)", p2wsh_addr) # P2SH-nested segwit P2WPKH address witness_script = segwit_locking_script(witver, witprog_p2wsh) segaddr_p2sh_p2wsh = Address.from_script(witness_script)
def manageReportUpload(request, cloudItem): """ Uncrypt and store the report """ #check if an item is already in the DB impDb = Upload.objects.filter(cloudItemID=CloudItem.objects.get( id=cloudItem)) if len(impDb) != 0: raise ("A report already exists.") # add path for crypto cryptoPath = os.path.join(os.path.dirname(settings.BASE_DIR), "finder") if not cryptoPath in sys.path: sys.path.insert(1, cryptoPath) del cryptoPath import crypto fileUpload = request.FILES['fileUp'] fileName = strip_tags(fileUpload.name) #create a folder for this cloud item if do not exists path = os.path.join(settings.UPLOAD_DIR, cloudItem) if not os.path.isdir(path): os.mkdir(path) #upload name upTime = timezone.now() shaName = fileName uploadName = crypto.sha256(shaName[:-8] + crypto.HASH_SEPARATOR + format(upTime, "U")).hexdigest() wholeUploadPath = os.path.join(path, uploadName) if not os.path.isdir(wholeUploadPath): os.mkdir(wholeUploadPath) #write to disk with open(os.path.join(wholeUploadPath, fileName), 'wb+') as destination: for chunk in fileUpload.chunks(): destination.write(chunk) fileCont = open(os.path.join(wholeUploadPath, fileName), "r") jsonParsed = json.load(fileCont) cont = jsonParsed['enc'] k = jsonParsed['k'] #decrypt AES key aes = crypto.decryptRSA(k) #decrypt ZIP - first write encrypted cont into a temp file, read it, decrypt it and store the ZIP tempFileName = os.path.join(wholeUploadPath, fileName + ".tmp") open(tempFileName, "w+b").write(cont) # fernet wants "bytes" as token fileBytes = crypto.decryptFernetFile(open(tempFileName, "rb").read(), aes) if fileName.endswith(".enc"): name = fileName[:-4] else: raise Exception("Invalid filename.") #write decrypted file to disc decZipFile = os.path.join(wholeUploadPath, name) open(decZipFile, "w+b").write(fileBytes) #delete temp file os.remove(tempFileName) aes = None del aes #unzip fileZip = zipfile.ZipFile(decZipFile) fileZip.extractall(wholeUploadPath) # set this report parsed newUpload = Upload(fileName=name[:-4], uploadDate=upTime, uploadIP=request.META['REMOTE_ADDR'], parsed=True, cloudItemID=CloudItem.objects.get(id=cloudItem)) newUpload.save()
def add_credentials(self, **credentials): if 'password' in credentials: self.add_key_hash(sha256(credentials['password'])) if 'keyfile' in credentials: self.add_key_hash(load_keyfile(credentials['keyfile']))