def decrypt(self, data, password): # SPLIT SALT, DIGEST, AND DATA data = ''.join(data.split("\n")) data = unhexlify(data) salt, cryptedHmac, cryptedData = data.split("\n", 2) salt = unhexlify(salt) cryptedData = unhexlify(cryptedData) key1, key2, iv = self.gen_key_initctr(password, salt) # EXIT EARLY IF DIGEST DOESN'T MATCH hmacDecrypt = HMAC.new(key2, cryptedData, SHA256) if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()): return None # SET THE COUNTER AND THE CIPHER ctr = Counter.new(128, initial_value=long(iv, 16)) cipher = AES.new(key1, AES.MODE_CTR, counter=ctr) # DECRYPT PADDED DATA decryptedData = cipher.decrypt(cryptedData) # UNPAD DATA padding_length = ord(decryptedData[-1]) decryptedData = decryptedData[:-padding_length] return decryptedData
def offload(): # The entry point for the offload worker process address = cPickle.loads(unhexlify(os.environ['CALIBRE_WORKER_ADDRESS'])) key = unhexlify(os.environ['CALIBRE_WORKER_KEY']) func_cache = {} with closing(Client(address, authkey=key)) as conn: while True: args = eintr_retry_call(conn.recv) if args is None: break res = {'result':None, 'tb':None} try: mod, func, args, kwargs = args if mod is None: eintr_retry_call(conn.send, res) continue f = func_cache.get((mod, func), None) if f is None: try: m = importlib.import_module(mod) except ImportError: importlib.import_module('calibre.customize.ui') # Load plugins m = importlib.import_module(mod) func_cache[(mod, func)] = f = getattr(m, func) res['result'] = f(*args, **kwargs) except: import traceback res['tb'] = traceback.format_exc() eintr_retry_call(conn.send, res)
def hamming_distance(lh, rh): ''' Return the binary hamming distance between two hex strings. ''' if(len(lh) % 2 != 0): a = [0] a.extend(lh) lh = a if (len(rh) % 2 != 0): a = [0] a.extend(rh) rh = a lhb = binascii.unhexlify(lh) rhb = binascii.unhexlify(rh) if len(lhb)<len(rhb): a = [0] * (len(rhb)-len(lhb)) a.extend(lhb) lhb = a if len(rhb)<len(lhb): a = [0]*(len(lhb)-len(rhb)) a.extend(rhb) rhb = a return sum((bytesDiff[x ^ y]) for (x, y) in zip(lhb, rhb))
def encrypt(self, data): iv = binascii.unhexlify(self.iv) key = binascii.unhexlify(self.key) k = pyDes.triple_des(key, pyDes.CBC, iv, pad=None, padmode=pyDes.PAD_PKCS5) d = k.encrypt(data) d = base64.encodestring(d) return d
def main(): # The entry point for the simple worker process address = cPickle.loads(unhexlify(os.environ['CALIBRE_WORKER_ADDRESS'])) key = unhexlify(os.environ['CALIBRE_WORKER_KEY']) with closing(Client(address, authkey=key)) as conn: args = eintr_retry_call(conn.recv) try: mod, func, args, kwargs, module_is_source_code = args if module_is_source_code: importlib.import_module('calibre.customize.ui') # Load plugins mod = compile_code(mod) func = mod[func] else: try: mod = importlib.import_module(mod) except ImportError: importlib.import_module('calibre.customize.ui') # Load plugins mod = importlib.import_module(mod) func = getattr(mod, func) res = {'result':func(*args, **kwargs)} except: res = {'tb': traceback.format_exc()} try: conn.send(res) except: # Maybe EINTR conn.send(res)
def test_18_test_random(self): with self.app.test_request_context('/system/random?len=32', method="GET", headers={'Authorization': self.at}): res = self.app.full_dispatch_request() self.assertTrue(res.status_code == 200, res) result = json.loads(res.data).get("result") value = result.get("value") # hex encoded value self.assertEqual(len(value), 64) # This is hex, we can unhexlify import binascii binascii.unhexlify(value) with self.app.test_request_context('/system/random?len=32&encode=b64', method="GET", headers={'Authorization': self.at}): res = self.app.full_dispatch_request() self.assertTrue(res.status_code == 200, res) result = json.loads(res.data).get("result") value = result.get("value") # hex encoded value self.assertEqual(len(value), 44) # This is base64. We can decode import base64 base64.b64decode(value)
def extractSecretKey(self, globalSalt, masterPassword, entrySalt): (globalSalt, masterPassword, entrySalt) = self.is_masterpassword_correct(masterPassword) if unhexlify('f8000000000000000000000000000001') not in self.key3: return None privKeyEntry = self.key3[ unhexlify('f8000000000000000000000000000001') ] saltLen = ord( privKeyEntry[1] ) nameLen = ord( privKeyEntry[2] ) privKeyEntryASN1 = decoder.decode( privKeyEntry[3+saltLen+nameLen:] ) data = privKeyEntry[3+saltLen+nameLen:] self.printASN1(data, len(data), 0) #see https://github.com/philsmd/pswRecovery4Moz/blob/master/pswRecovery4Moz.txt entrySalt = privKeyEntryASN1[0][0][1][0].asOctets() privKeyData = privKeyEntryASN1[0][1].asOctets() privKey = self.decrypt3DES( globalSalt, masterPassword, entrySalt, privKeyData ) self.printASN1(privKey, len(privKey), 0) privKeyASN1 = decoder.decode( privKey ) prKey= privKeyASN1[0][2].asOctets() self.printASN1(prKey, len(prKey), 0) prKeyASN1 = decoder.decode( prKey ) id = prKeyASN1[0][1] key = long_to_bytes( prKeyASN1[0][3] ) print_debug('DEBUG', 'key: %s' % repr(key)) return key
def test_des3(self): # The following test vectors have been generated with gpg v1.4.0. # The command line used was: # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ # --disable-mdc --s2k-mode 0 --output ct pt # For an explanation, see test_AES.py . plaintext = 'ac1762037074324fb53ba3596f73656d69746556616c6c6579' ciphertext = '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d' key = '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2' iv='cd47e2afb8b7e4b0' encrypted_iv='6a7eef0b58050e8b904a' plaintext = unhexlify(plaintext) ciphertext = unhexlify(ciphertext) key = unhexlify(key) iv = unhexlify(iv) encrypted_iv = unhexlify(encrypted_iv) cipher = DES3.new(key, DES3.MODE_OPENPGP, iv) ct = cipher.encrypt(plaintext) self.assertEqual(ct[:10], encrypted_iv) self.assertEqual(ct[10:], ciphertext) cipher = DES3.new(key, DES3.MODE_OPENPGP, encrypted_iv) pt = cipher.decrypt(ciphertext) self.assertEqual(pt, plaintext)
def T(value, expected): value = unhexlify(value) expected = unhexlify(expected) actual = BytesSerializer.serialize(value) self.assertEqual(actual, expected) roundtrip = BytesSerializer.deserialize(actual) self.assertEqual(value, roundtrip)
def aead_exception_test(backend, cipher_factory, mode_factory): cipher = Cipher( cipher_factory(binascii.unhexlify(b"0" * 32)), mode_factory(binascii.unhexlify(b"0" * 24)), backend ) encryptor = cipher.encryptor() encryptor.update(b"a" * 16) with pytest.raises(NotYetFinalized): encryptor.tag with pytest.raises(AlreadyUpdated): encryptor.authenticate_additional_data(b"b" * 16) encryptor.finalize() with pytest.raises(AlreadyFinalized): encryptor.authenticate_additional_data(b"b" * 16) with pytest.raises(AlreadyFinalized): encryptor.update(b"b" * 16) with pytest.raises(AlreadyFinalized): encryptor.finalize() cipher = Cipher( cipher_factory(binascii.unhexlify(b"0" * 32)), mode_factory(binascii.unhexlify(b"0" * 24), b"0" * 16), backend ) decryptor = cipher.decryptor() decryptor.update(b"a" * 16) with pytest.raises(AttributeError): decryptor.tag
def test_creates_decryptor(self, backend): cipher = Cipher( algorithms.AES(binascii.unhexlify(b"0" * 32)), modes.CBC(binascii.unhexlify(b"0" * 32)), backend ) assert isinstance(cipher.decryptor(), base.CipherContext)
def test_crypto_stream_chacha20_xor(self): key = binascii.unhexlify(b"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") nonce = binascii.unhexlify(b"0001020304050607") input_ = b'\x00' * 256 output = pysodium.crypto_stream_chacha20_xor(input_, nonce, key) self.assertEqual(binascii.unhexlify(b"f798a189f195e66982105ffb640bb7757f579da31602fc93ec01ac56f85ac3c134a4547b733b46413042c9440049176905d3be59ea1c53f15916155c2be8241a38008b9a26bc35941e2444177c8ade6689de95264986d95889fb60e84629c9bd9a5acb1cc118be563eb9b3a4a472f82e09a7e778492b562ef7130e88dfe031c79db9d4f7c7a899151b9a475032b63fc385245fe054e3dd5a97a5f576fe064025d3ce042c566ab2c507b138db853e3d6959660996546cc9c4a6eafdc777c040d70eaf46f76dad3979e5c5360c3317166a1c894c94a371876a94df7628fe4eaaf2ccb27d5aaae0ad7ad0f9d4b6ad3b54098746d4524d38407a6deb3ab78fab78c9"), output)
def pack_py(self, val): if (len(val) == 17): return binascii.unhexlify(val[0:2] + val[3:5] + val[6:8] + val[9:11] + val[12:14] + val[15:17]) if (len(val) == 12): return binascii.unhexlify(val) raise "unkown mac format=" + val
def from_gbt(cls, retval, coinbase, extra_length=0, transactions=None, pos=False): """ Creates a block template object from a get block template call and a coinbase transaction object. extra_length needs to be the length of padding that was added for extranonces (both 1 and 2 if added). Transactions should be a list of Transaction objects that will be put into the block. """ if transactions is None: transactions = [] coinbase1, coinbase2 = coinbase.assemble(split=True) inst = cls(pos=pos) inst.hashprev = unhexlify(reverse_hash(retval['previousblockhash'])) inst.ntime = retval['curtime'] inst.bits = unhexlify(retval['bits']) inst.version = retval['version'] inst.total_value = retval['coinbasevalue'] # Darkcoin inst.masternode_payments = retval.get('masternode_payments') for vote in retval.get('votes', []): v = CMasterNodeVote() v.deserialize(StringIO.StringIO(unhexlify(vote))) inst.vmn.append(v) # chop the padding off the coinbase1 for extranonces to be put if extra_length > 0: inst.coinbase1 = coinbase1[:-1 * extra_length] else: inst.coinbase1 = coinbase1 inst.coinbase2 = coinbase2 inst.transactions = transactions return inst
def block_header(self, nonce, extra1, extra2, ntime=None): """ Builds a block header given nonces and extranonces. Assumes extra1 and extra2 are bytes of the proper length from when the coinbase fragments were originally generated (either manually, or using from_gbt) nonce: 4 bytes big endian hex extra1: direct from stratum, big endian extra2: direct from stratum, big endian ntime: 4 byte big endian hex """ # calculate the merkle root by assembling the coinbase transaction coinbase_raw = self.coinbase1 + unhexlify(extra1) + unhexlify(extra2) coinbase_raw += self.coinbase2 self.coinbase = Transaction(raw=coinbase_raw) #coinbase.disassemble() for testing to ensure proper coinbase constr header = self.version_be header += self.hashprev_le header += self.merkleroot_flipped(self.coinbase) if ntime is None: header += self.ntime_be else: if isinstance(ntime, basestring): header += unhexlify(ntime) else: raise AttributeError("ntime must be hex string") header += self.bits_be header += unhexlify(nonce) return b''.join([header[i*4:i*4+4][::-1] for i in range(0, 20)])
def x(h): """Convert a hex string to bytes""" import sys if sys.version > '3': return binascii.unhexlify(h.encode('utf8')) else: return binascii.unhexlify(h)
def test_rsa_pss_signature(backend, wycheproof): key = serialization.load_der_public_key( binascii.unhexlify(wycheproof.testgroup["keyDer"]), backend ) digest = _DIGESTS[wycheproof.testgroup["sha"]] mgf_digest = _DIGESTS[wycheproof.testgroup["mgfSha"]] if wycheproof.valid or wycheproof.acceptable: key.verify( binascii.unhexlify(wycheproof.testcase["sig"]), binascii.unhexlify(wycheproof.testcase["msg"]), padding.PSS( mgf=padding.MGF1(mgf_digest), salt_length=wycheproof.testgroup["sLen"] ), digest ) else: with pytest.raises(InvalidSignature): key.verify( binascii.unhexlify(wycheproof.testcase["sig"]), binascii.unhexlify(wycheproof.testcase["msg"]), padding.PSS( mgf=padding.MGF1(mgf_digest), salt_length=wycheproof.testgroup["sLen"] ), digest )
def base58_encode(a,version='',postfix=''): """ Base58 encode input Mostly ripped from: https://github.com/jgarzik/python-bitcoinlib/blob/master/bitcoin/base58.py """ try: a = hexlify(unhexlify(a)) version = hexlify(unhexlify(version)) postfix = hexlify(unhexlify(postfix)) except: raise Exception('base58_encode() Invalid input') a, version, postfix = hex_to_hexstr(a), hex_to_hexstr(version), hex_to_hexstr(postfix) b = version + a + postfix b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' n1 = int(b,16) res = [] while n1 > 0: n1, r = divmod(n1,58) res.append(b58_digits[r]) res = ''.join(res[::-1]) pad = 0 for i in range(len(b) // 2): j = int(2*i) teststr = str(b[j] + b[j+1]) if teststr == '00': pad += 1 else: break return str(b58_digits[0] * pad + res)
def response(context, request): req = request slcsResp = req.POST['CertificateRequestData'] session_key = req.POST['SessionKey'] print req.GET originURL=request.GET['url'] # Decrpyt session Key with host private key (RSA) encrypted = unhexlify(session_key) priv_key = RSA.load_key(get_settings()['host_privkey']) session_key = priv_key.private_decrypt(encrypted, RSA.pkcs1_padding) # Decrypt message with session key (AES) a = AES.new(session_key) plaintext = a.decrypt(unhexlify(slcsResp)) # remove AES padding n = ord(plaintext[-1]) # last byte contains number of padding bytes if n > AES.block_size or n > len(plaintext): raise Exception('invalid padding') print plaintext try: certificate = slcs_handler(StringIO(plaintext[:-n])) print "cert = " + str(certificate) except SLCSException, e: # TODO add error handling print "Exception: " + str(e) pass
def __init__(self, options): self.__fileName = options.binary self.__rawBinary = None self.__binary = None try: fd = open(self.__fileName, "rb") self.__rawBinary = fd.read() fd.close() except: print("[Error] Can't open the binary or binary not found") return None if options.rawArch and options.rawMode: self.__binary = Raw(self.__rawBinary, options.rawArch, options.rawMode) elif self.__rawBinary[:4] == unhexlify(b"7f454c46"): self.__binary = ELF(self.__rawBinary) elif self.__rawBinary[:2] == unhexlify(b"4d5a"): self.__binary = PE(self.__rawBinary) elif self.__rawBinary[:4] == unhexlify(b"cafebabe"): self.__binary = UNIVERSAL(self.__rawBinary) elif self.__rawBinary[:4] == unhexlify(b"cefaedfe") or self.__rawBinary[:4] == unhexlify(b"cffaedfe"): self.__binary = MACHO(self.__rawBinary) else: print("[Error] Binary format not supported") return None
def uncompress_pubkey(compressedPubKey): """ Turn a 02/03 prefix public key into an uncompressed 04 key pow_mod() and most of this function taken from: https://bitcointalk.org/index.php?topic=644919.msg7205689#msg7205689 """ try: test1 = unhexlify(compressedPubKey) test2 = int(compressedPubKey,16) tast1,test2 = "","" except: raise Exception('uncompress_pubkey() input not hex') #Sanitize input key compressedPubKey = hex_to_hexstr(hexlify(unhexlify(compressedPubKey))).zfill(66) if (len(compressedPubKey) != 66) \ or ((compressedPubKey[:-64] != '02') \ and (compressedPubKey[:-64] != '03')): raise Exception('uncompress_pubkey() Unknown input error') y_parity = int(compressedPubKey[:2],16) - 2 x = int(compressedPubKey[2:],16) a = (pow_mod(x, 3, P_CURVE) + 7) % P_CURVE y = pow_mod(a, (P_CURVE+1)//4, P_CURVE) if y % 2 != y_parity: y = -y % P_CURVE x = hexstrlify(x,64) y = hexstrlify(y,64) return hexlify(unhexlify('04' + x + y))
def test_verify_proof(self): claim1_name = 97 # 'a' claim1_txid = 'bd9fa7ffd57d810d4ce14de76beea29d847b8ac34e8e536802534ecb1ca43b68' claim1_outpoint = 0 claim1_height = 10 claim1_node_hash = claims.get_hash_for_outpoint( binascii.unhexlify(claim1_txid)[::-1], claim1_outpoint, claim1_height) claim2_name = 98 # 'b' claim2_txid = 'ad9fa7ffd57d810d4ce14de76beea29d847b8ac34e8e536802534ecb1ca43b68' claim2_outpoint = 1 claim2_height = 5 claim2_node_hash = claims.get_hash_for_outpoint( binascii.unhexlify(claim2_txid)[::-1], claim2_outpoint, claim2_height) to_hash1 = claim1_node_hash hash1 = Hash(to_hash1) to_hash2 = chr(claim1_name) + hash1 + chr(claim2_name) + claim2_node_hash root_hash = Hash(to_hash2) proof = { 'last takeover height': claim1_height, 'txhash': claim1_txid, 'nOut': claim1_outpoint, 'nodes': [ {'children': [ {'character': 97}, { 'character': 98, 'nodeHash': claim2_node_hash[::-1].encode('hex') } ]}, {'children': []}, ] } out = claims.verify_proof(proof, root_hash[::-1].encode('hex'), 'a') self.assertEqual(out, True)
def T(serialized1, serialized2, are_equal): script1 = CScript(unhexlify(serialized1)) script2 = CScript(unhexlify(serialized2)) if are_equal: self.assertEqual(script1, script2) else: self.assertNotEqual(script1, script2)
def decipher_key(data, crypt_algo, key=None): """ first step of authentification computation : decipher the given data (a nonce) with the authentification key - data must be a 8 bytes long binary string. If not, zero padding will be performed in front - crypt_algo must be a "des" or "3des", - key length must be 8 and (16 or 24) bytes long, for respecively des and triple des. """ if key is None: key = unhexlify("00" * 8) if len(key) not in [8, 16, 24]: raise ValueError("invalid key size: not in [8, 16, 24]") IV = unhexlify("00" * 8) if crypt_algo not in ["des", "3des"]: raise NotImplementedError("crypt_algo not supported: " + str(crypt_algo)) elif crypt_algo == "des": crypt_box = des(key, CBC, pad=None, padmode=PAD_NORMAL) elif crypt_algo == "3des": crypt_box = des(key, CBC, pad=None, padmode=PAD_NORMAL) crypt_box.setIV(IV) return crypt_box.decrypt(data)
def encrypt(mode, key, iv, plaintext): encryptor = botan.Cipher("IDEA/{0}/NoPadding".format(mode), "encrypt", binascii.unhexlify(key)) cipher_text = encryptor.cipher(binascii.unhexlify(plaintext), binascii.unhexlify(iv)) return binascii.hexlify(cipher_text)
def _build_vectors(): count = 0 output = [] key = None plaintext = binascii.unhexlify(32 * '0') for size in _SIZES_TO_GENERATE: for keyinfo in _RFC6229_KEY_MATERIALS: key = _key_for_size(size, keyinfo) cipher = ciphers.Cipher( algorithms.ARC4(binascii.unhexlify(key)), None, default_backend()) encryptor = cipher.encryptor() current_offset = 0 for offset in _RFC6229_OFFSETS: if offset % 16 != 0: raise ValueError( "Offset {} is not evenly divisible by 16" .format(offset)) while current_offset < offset: encryptor.update(plaintext) current_offset += len(plaintext) output.append("\nCOUNT = {}".format(count)) count += 1 output.append("KEY = {}".format(key)) output.append("OFFSET = {}".format(offset)) output.append("PLAINTEXT = {}".format( binascii.hexlify(plaintext))) output.append("CIPHERTEXT = {}".format( binascii.hexlify(encryptor.update(plaintext)))) current_offset += len(plaintext) assert not encryptor.finalize() return "\n".join(output)
def compose(db, source, order_match_id): tx0_hash, tx1_hash = order_match_id[:64], order_match_id[64:] # UTF-8 encoding means that the indices are doubled. destination, btc_quantity, escrowed_asset, escrowed_quantity, order_match, problems = validate( db, source, order_match_id ) if problems: raise exceptions.BTCPayError(problems) # Warn if down to the wire. time_left = order_match["match_expire_index"] - util.last_block(db)["block_index"] if time_left < 4: print( "WARNING: Only {} blocks until that order match expires. The payment might not make into the blockchain in time.".format( time_left ) ) if 10 - time_left < 4: print("WARNING: Order match has only {} confirmation(s).".format(10 - time_left)) tx0_hash_bytes, tx1_hash_bytes = ( binascii.unhexlify(bytes(tx0_hash, "utf-8")), binascii.unhexlify(bytes(tx1_hash, "utf-8")), ) data = config.PREFIX + struct.pack(config.TXTYPE_FORMAT, ID) data += struct.pack(FORMAT, tx0_hash_bytes, tx1_hash_bytes) return (source, [(destination, btc_quantity)], data)
def test_repr(self): def T(script, expected_repr): actual_repr = repr(script) self.assertEqual(actual_repr, expected_repr) T( CScript([]), 'CScript([])') T( CScript([1]), 'CScript([1])') T( CScript([1, 2, 3]), 'CScript([1, 2, 3])') T( CScript([1, x('7ac977d8373df875eceda362298e5d09d4b72b53'), OP_DROP]), "CScript([1, x('7ac977d8373df875eceda362298e5d09d4b72b53'), OP_DROP])") T(CScript(unhexlify(b'0001ff515261ff')), "CScript([x(''), x('ff'), 1, 2, OP_NOP, OP_INVALIDOPCODE])") # truncated scripts T(CScript(unhexlify(b'6101')), "CScript([OP_NOP, x('')...<ERROR: PUSHDATA(1): truncated data>])") T(CScript(unhexlify(b'614bff')), "CScript([OP_NOP, x('ff')...<ERROR: PUSHDATA(75): truncated data>])") T(CScript(unhexlify(b'614c')), "CScript([OP_NOP, <ERROR: PUSHDATA1: missing data length>])") T(CScript(unhexlify(b'614c0200')), "CScript([OP_NOP, x('00')...<ERROR: PUSHDATA1: truncated data>])")
def decryptFile(self, content): from Crypto.Cipher import AES Key = binascii.unhexlify('8C35192D964DC3182C6F84F3252239EB4A320D2500000000') IV = binascii.unhexlify('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF') IV_Cipher = AES.new(Key, AES.MODE_ECB) IV = IV_Cipher.encrypt(IV) obj = AES.new(Key, AES.MODE_CFB, IV) data = content if re.search(r"<title>404 - Not Found</title>", data) is None: data = binascii.unhexlify(''.join(data.split())) data = data.split("\xda") links = [] for link in data: if link == '': continue link = base64.b64decode(link + "\xda") link = obj.decrypt(link) decryptedUrl = link.replace('CCF: ', '') links.append(decryptedUrl) self.log.debug("%s: adding rsdf-package with %d links" % (self.__name__, len(links))) return links
def send_message(self, guid, handle, message, subject, message_type, recipient_encryption_key): self.factory.db.MessageStore().save_message( guid, handle, "", unhexlify(recipient_encryption_key), subject, message_type.upper(), message, time.time(), "", "", True, ) def send(node_to_send): n = node_to_send if node_to_send is not None else Node(unhexlify(guid)) self.factory.mserver.send_message( n, recipient_encryption_key, PlaintextMessage.Type.Value(message_type.upper()), message, subject, store_only=True if node_to_send is None else False, ) self.factory.kserver.resolve(unhexlify(guid)).addCallback(send)
def prepare_transfer_tx(hw_session: HwSessionInfo, utxos_to_spend: List[dict], dest_addresses: List[Tuple[str, int, str]], tx_fee): """ Creates a signed transaction. :param hw_session: :param utxos_to_spend: list of utxos to send :param dest_addresses: destination addresses. Fields: 0: dest Dash address. 1: the output value in satoshis, 2: the bip32 path of the address if the output is the change address or None otherwise :param tx_fee: transaction fee :return: tuple (serialized tx, total transaction amount in satoshis) """ insight_network = 'insight_dash' if hw_session.app_config.is_testnet(): insight_network += '_testnet' dash_network = hw_session.app_config.dash_network tx_api = MyTxApiInsight(insight_network, '', hw_session.dashd_intf, hw_session.app_config.cache_dir) client = hw_session.hw_client client.set_tx_api(tx_api) inputs = [] outputs = [] inputs_amount = 0 for utxo_index, utxo in enumerate(utxos_to_spend): if not utxo.get('bip32_path', None): raise Exception('No BIP32 path for UTXO ' + utxo['txid']) address_n = client.expand_path(utxo['bip32_path']) it = trezor_proto.TxInputType(address_n=address_n, prev_hash=binascii.unhexlify(utxo['txid']), prev_index=int(utxo['outputIndex'])) logging.debug('BIP32 path: %s, address_n: %s, utxo_index: %s, prev_hash: %s, prev_index %s' % (utxo['bip32_path'], str(address_n), str(utxo_index), utxo['txid'], str(utxo['outputIndex']) )) inputs.append(it) inputs_amount += utxo['satoshis'] outputs_amount = 0 for addr, amount, bip32_path in dest_addresses: outputs_amount += amount if addr[0] in dash_utils.get_chain_params(dash_network).B58_PREFIXES_SCRIPT_ADDRESS: stype = trezor_proto.OutputScriptType.PAYTOSCRIPTHASH logging.debug('Transaction type: PAYTOSCRIPTHASH' + str(stype)) elif addr[0] in dash_utils.get_chain_params(dash_network).B58_PREFIXES_PUBKEY_ADDRESS: stype = trezor_proto.OutputScriptType.PAYTOADDRESS logging.debug('Transaction type: PAYTOADDRESS ' + str(stype)) else: raise Exception('Invalid prefix of the destination address.') if bip32_path: address_n = client.expand_path(bip32_path) else: address_n = None ot = trezor_proto.TxOutputType( address=addr if address_n is None else None, address_n=address_n, amount=amount, script_type=stype ) outputs.append(ot) if outputs_amount + tx_fee != inputs_amount: raise Exception('Transaction validation failure: inputs + fee != outputs') signed = client.sign_tx(hw_session.app_config.hw_coin_name, inputs, outputs) logging.info('Signed transaction') return signed[1], inputs_amount
if bitcoin_sum > fee: change_output_script = standard_tx_out_script(address) outputs.append(TxOut(bitcoin_sum - fee, change_output_script)) op_return_output_script = script.tools.compile('OP_RETURN %s' % message) outputs.append(TxOut(0, op_return_output_script)) tx = Tx(version=1, txs_in=inputs, txs_out=outputs) tx.set_unspents(spendables) sign_tx(tx, wifs=[key.wif()]) return tx if __name__ == '__main__': key = Key.from_text(sys.argv[1]) spendables = spendables_for_address(key.address()) time_lengths = [] for i in range(1000): print("Run %s" % (i+1)) start_time = time.time() dummy_op_return_tx(key, unhexlify('ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb'), spendables) end_time = time.time() time_lengths.append(end_time - start_time) mean = sum(time_lengths) / len(time_lengths) sd = statistics.stdev(time_lengths) print("Average time: %ss" % mean) print("SD: %s" % sd)
def hex_to_b64_id(encoded_string): return base64.standard_b64encode(binascii.unhexlify(encoded_string)).decode("utf-8")
def test_sign_witness(self): # from https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki secret0 = "bbc27228ddcb9209d7fd6f36b02f7dfa6252af40bb2f1cbc7a557da8027ff866" private_key0 = PrivateKey( secret=int.from_bytes(unhexlify(secret0), 'big')) pub_sec0 = unhexlify( "03c9f4836b9a4f77fc0d81f7bcb01b7f1b35916864b9476c241ce9fc198bd25432" ) self.assertEqual(private_key0.point.sec(), pub_sec0) value0 = 6.25 secret1 = "619c335025c7f4012e556c2a58b2506e30b8511b53ade95ea316fd8c3286feb9" private_key1 = PrivateKey( secret=int.from_bytes(unhexlify(secret1), 'big')) pub_sec1 = unhexlify( "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357" ) self.assertEqual(private_key1.point.sec(), pub_sec1) value1 = 6 raw_tx_hex = '0100000002fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f0000000000eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac11000000' tx = Tx.parse(BytesIO(unhexlify(raw_tx_hex))) self.assertEqual( hexlify(tx.txid()).decode(), "3335ffae0df20c5407e8de12b49405c8e912371f00fe4132bfaf95ad49c40243") self.assertEqual( hexlify(tx.hash()).decode(), "3335ffae0df20c5407e8de12b49405c8e912371f00fe4132bfaf95ad49c40243") # check against # bitcoin-cli decoderawtransaction 0100000002fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f0000000000eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac11000000 self.assertEqual(tx.version, 1) self.assertEqual(len(tx.tx_ins), 2) self.assertEqual( tx.tx_ins[0].prev_tx, unhexlify( '9f96ade4b41d5433f4eda31e1738ec2b36f6e7d1420d94a6af99801a88f7f7ff' )) self.assertEqual(tx.tx_ins[0].prev_index, 0) self.assertEqual(tx.tx_ins[0].script_sig.serialize(), b'') self.assertEqual(tx.tx_ins[0].sequence, 4294967278) self.assertEqual( tx.tx_ins[1].prev_tx, unhexlify( '8ac60eb9575db5b2d987e29f301b5b819ea83a5c6579d282d189cc04b8e151ef' )) self.assertEqual(tx.tx_ins[1].prev_index, 1) self.assertEqual(tx.tx_ins[1].script_sig.serialize(), b'') self.assertEqual(tx.tx_ins[1].sequence, 4294967295) self.assertEqual(tx.locktime, 17) self.assertEqual(len(tx.tx_outs), 2) self.assertEqual(tx.tx_outs[0].amount, int(1.1234 * 1e8)) want = unhexlify('76a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac') self.assertEqual(tx.tx_outs[0].script_pubkey.serialize(), want) self.assertEqual(tx.tx_outs[1].amount, int(2.2345 * 1e8)) want = unhexlify('76a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac') self.assertEqual(tx.tx_outs[1].script_pubkey.serialize(), want) self.assertEqual(tx.serialize(), unhexlify(raw_tx_hex)) # test helper functions self.assertEqual( hexlify(tx.hash_prevouts()).decode(), '96b827c8483d4e9b96712b6713a7b68d6e8003a781feba36c31143470b4efd37') self.assertEqual( hexlify(tx.hash_sequence()).decode(), '52b0a642eea2fb7ae638c36f6252b6750293dbe574a806984b8e4d8548339a3b') self.assertEqual( hexlify(tx.hash_outputs()).decode(), '863ef3e1a92afbfdb97f31ad0fc7683ee943e9abcf2501590ff8f6551f47e5e5') tx0 = TxOut( int(6.25 * 1e8), unhexlify( "2103c9f4836b9a4f77fc0d81f7bcb01b7f1b35916864b9476c241ce9fc198bd25432ac" )) tx1 = TxOut(int(6 * 1e8), unhexlify("00141d0f172a0ecb48aee1be1f2687d2963ae33f71a1")) self.assertEqual(tx0.script_pubkey.address(), '1HkrFxLyNoQydvW889WmubyRHycE4bvw1Y') self.assertEqual(tx1.script_pubkey.address(), 'bc1qr583w2swedy2acd7rung055k8t3n7udp7vyzyg') # precache with a dummy previous transaction TxIn.cache[unhexlify( '9f96ade4b41d5433f4eda31e1738ec2b36f6e7d1420d94a6af99801a88f7f7ff' )] = Tx(1, [], [tx0], 0) TxIn.cache[unhexlify( '8ac60eb9575db5b2d987e29f301b5b819ea83a5c6579d282d189cc04b8e151ef' )] = Tx(1, [], [tx0, tx1], 0) hash_preimage = unhexlify( "0100000096b827c8483d4e9b96712b6713a7b68d6e8003a781feba36c31143470b4efd3752b0a642eea2fb7ae638c36f6252b6750293dbe574a806984b8e4d8548339a3bef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a010000001976a9141d0f172a0ecb48aee1be1f2687d2963ae33f71a188ac0046c32300000000ffffffff863ef3e1a92afbfdb97f31ad0fc7683ee943e9abcf2501590ff8f6551f47e5e51100000001000000" ) self.assertTrue(tx.sig_hash_w0_preimage(1, SIGHASH_ALL), hash_preimage) self.assertTrue(tx.sign_input(0, private_key0, SIGHASH_ALL)) self.assertTrue(tx.sign_input(1, private_key1, SIGHASH_ALL))
def sign(string_prvkey, msgHashed): thesignkey = ecdsa.SigningKey.from_string(binascii.unhexlify(privkey), curve=ecdsa.SECP256k1, hashfunc = sha256) signature = thesignkey.sign(msgHashed.encode(), hashfunc = sha256) # This throws error if not encoded. global theSign theSign = signature return signature
def bin_to_ascii(bin): asc = int(bin, 2) print(binascii.unhexlify('%x' % asc)) asc_fin = binascii.unhexlify('%x' % asc) print(asc_fin) strip(asc_fin)
def hex_str_to_bytes(hex_str): return unhexlify(hex_str.encode('ascii'))
def parseTelegramData(self, packet_data): self.packet_data = packet_data packet_offset = 0 calc_crc8 = CalcCRC8(self.logger) # check header size > 6 self.packet_length = len(self.packet_data) if self.packet_length < self.ESP3_HEADER_SIZE: self.logger.error( "ESP3: Invalid packet length:{0}".format(self.packet_length)) return False self.logger.debug("ESP3: packet length:{0}".format(self.packet_length)) # check sync byte self.sync_byte = self.packet_data[packet_offset] if self.sync_byte != self.ESP3_HEADER_SYNC_BYTE: self.logger.error( "ESP3: Invalid packet header sync byte:{0}".format(self.sync_byte)) return False self.logger.debug("ESP3: header sync:{0}".format(self.sync_byte)) # parse packet data length packet_offset += 1 self.data_length = struct.unpack('>H', binascii.unhexlify( self.packet_data[packet_offset] + self.packet_data[packet_offset + 1]))[0] self.logger.debug("ESP3: data length:{0}".format(self.data_length)) # parse packet optional length packet_offset += 2 self.optional_length = struct.unpack( '>B', binascii.unhexlify(self.packet_data[packet_offset]))[0] self.logger.debug( "ESP3: optional length:{0}".format(self.optional_length)) # parse Packet types packet_offset += 1 self.packet_types = struct.unpack( '>B', binascii.unhexlify(self.packet_data[packet_offset]))[0] # supported Packet Type: Radio ERP2 only if self.packet_types != self.ESP3_PACKET_TYPE_RADIO_ERP2: self.logger.error( "ESP3: Unsupported packet type:{0}".format(self.packet_types)) return False self.logger.debug("ESP3: packet types:{0}".format(self.packet_types)) # parse CRC8 Header packet_offset += 1 self.header_crc8h = self.packet_data[packet_offset] self.logger.debug("ESP3: header crc8h:{0}".format(self.header_crc8h)) # check CRC8 Header d_data = self.packet_data[1:self.ESP3_HEADER_SIZE - 1] self.logger.debug("ESP3: crc8 header data:{0}".format(d_data)) if calc_crc8.calcCRC8(d_data, self.header_crc8h) is not True: self.logger.error("ESP3: Invalid packet header CRC8 check error") return False # check packet length if self.packet_length != (self.ESP3_HEADER_SIZE + self.data_length + self.optional_length + 1): self.logger.error("ESP3: invalid packet length") return False if self.packet_length > self.ESP3_MAX_PACKET_SIZE: self.logger.error("ESP3: exceeded max packet length") return False self.logger.debug("ESP3: check packet length:ok") # parse ERP2 data contents for Length > 6 Bytes if self.data_length > 6: packet_offset += 1 # parse Address Control self.addctrl = struct.unpack('>B', binascii.unhexlify( self.packet_data[packet_offset]))[0] & self.ERP2_HEADER_ADDCTRL self.logger.debug( "ERP2: address control:{0}".format(bin(self.addctrl))) # parse Extended header available self.extended = struct.unpack('>B', binascii.unhexlify( self.packet_data[packet_offset]))[0] & self.ERP2_HEADER_EXTENDED_HEADER self.logger.debug( "ERP2: extended header available:{0}".format(bin(self.extended))) # parse Telegram type (R-ORG) self.telegram_type = struct.unpack('>B', binascii.unhexlify( self.packet_data[packet_offset]))[0] & self.ERP2_HEADER_TELEGRAM_TYPE self.logger.debug("ERP2: telegram type:{0}".format( bin(self.telegram_type))) # parse Extended Header if self.extended == self.ERP2_HEADER_EXTENDED_HEADER_AVAILABLE: packet_offset += 1 self.ext_header = struct.unpack( '>B', binascii.unhexlify(self.packet_data[packet_offset]))[0] self.logger.debug( "ERP2: extended header:{0}".format(bin(self.ext_header))) # parse Extended Telegram type if self.telegram_type == self.ERP2_HEADER_TELEGRAM_TYPE: packet_offset += 1 self.ext_telegram_type = struct.unpack( '>B', binascii.unhexlify(self.packet_data[packet_offset]))[0] self.logger.debug("ERP2: ext telegram type:{0}".format( bin(self.ext_telegram_type))) # parse Originator ID, Destination ID if self.addctrl == self.ERP2_HEADER_ADDCTRL_ID24_NODIST: packet_offset += 1 self.originator_id = self.packet_data[packet_offset] + \ self.packet_data[packet_offset + 1] + \ self.packet_data[packet_offset + 2] self.logger.debug( "ERP2: originator id:{0}".format(self.originator_id)) packet_offset += 2 elif self.addctrl == self.ERP2_HEADER_ADDCTRL_ID32_NODIST: packet_offset += 1 self.originator_id = self.packet_data[packet_offset] + \ self.packet_data[packet_offset + 1] + self.packet_data[packet_offset + 2] + \ self.packet_data[packet_offset + 3] self.logger.debug( "ERP2: originator id:{0}".format(self.originator_id)) packet_offset += 3 elif self.addctrl == self.ERP2_HEADER_ADDCTRL_ID32_DIST32: packet_offset += 1 self.originator_id = self.packet_data[packet_offset] + \ self.packet_data[packet_offset + 1] + self.packet_data[packet_offset + 2] + \ self.packet_data[packet_offset + 3] self.logger.debug( "ERP2: originator id:{0}".format(self.originator_id)) packet_offset += 3 packet_offset += 1 self.destination_id = self.packet_data[packet_offset] + \ self.packet_data[packet_offset + 1] + self.packet_data[packet_offset + 2] + \ self.packet_data[packet_offset + 3] self.logger.debug( "ERP2: destination id:{0}".format(self.destination_id)) packet_offset += 3 elif self.addctrl == self.ERP2_HEADER_ADDCTRL_ID48_NODIST: packet_offset += 1 self.originator_id = self.packet_data[packet_offset] + \ self.packet_data[packet_offset + 1] + self.packet_data[packet_offset + 2] + \ self.packet_data[packet_offset + 3] + self.packet_data[packet_offset + 4] + \ self.packet_data[packet_offset + 5] self.logger.debug( "ERP2: originator id:{0}".format(self.originator_id)) packet_offset += 5 # parse Data DL if self.telegram_type == self.ERP2_HEADER_TELEGRAM_TYPE_RPS: packet_offset += 1 self.data_dl.append(self.packet_data[packet_offset]) self.logger.debug("ERP2: RPS DATA DL:{0}".format(self.data_dl)) elif self.telegram_type == self.ERP2_HEADER_TELEGRAM_TYPE_1BS: packet_offset += 1 self.data_dl.append(self.packet_data[packet_offset]) self.logger.debug( "ERP2: 1BS DATA DL :{0}".format(self.data_dl)) elif self.telegram_type == self.ERP2_HEADER_TELEGRAM_TYPE_4BS: packet_offset += 1 self.data_dl.append(self.packet_data[packet_offset]) self.data_dl.append(self.packet_data[packet_offset + 1]) self.data_dl.append(self.packet_data[packet_offset + 2]) self.data_dl.append(self.packet_data[packet_offset + 3]) self.logger.debug("ERP2: 4BS DATA DL:{0}".format(self.data_dl)) packet_offset += 3 # parse optional data (unsupported) # parse CRC8 DATA packet_offset += 1 self.data_crc8 = self.packet_data[packet_offset] self.logger.debug("ERP2: data crc8:{0}".format(self.data_crc8)) # check CRC8 DATA d_data = self.packet_data[ self.ESP3_HEADER_SIZE:self.ESP3_HEADER_SIZE + self.data_length - 1] self.logger.debug("ERP2: crc8 data:{0}".format(d_data)) if calc_crc8.calcCRC8(d_data, self.data_crc8) is not True: self.logger.error("ERP2: Invalid packet data CRC8 check error") return False # parse optional Data: Number of sub telegram packet_offset += 1 self.optional_subtelnum = struct.unpack( '>B', binascii.unhexlify(self.packet_data[packet_offset]))[0] self.logger.debug("ESP3: optional SubTelNum:{0}".format( self.optional_subtelnum)) # parse optional Data: dBm packet_offset += 1 self.optional_dbm = struct.unpack( '>B', binascii.unhexlify(self.packet_data[packet_offset]))[0] * -1 self.logger.debug( "ESP3: optional dBm:{0}".format(self.optional_dbm)) # parse CRC8 DATA and OPTIONAL_DATA packet_offset += 1 self.header_crc8d = self.packet_data[packet_offset] self.logger.debug( "ESP3: header crc8d:{0}".format(self.header_crc8d)) # check CRC8 DATA and OPTIONAL_DATA d_data = self.packet_data[ self.ESP3_HEADER_SIZE:self.ESP3_HEADER_SIZE + self.data_length + self.optional_length] self.logger.debug( "ESP3: crc8 data + optional data:{0}".format(d_data)) if calc_crc8.calcCRC8(d_data, self.header_crc8d) is not True: self.logger.error( "ESP3: Invalid packet data and optonal data CRC8 check error") return False else: self.logger.error("ERP2: Unsupported ERP2 Data contents") return False return True
print "hashPrevouts:", hashPrevouts print "hashSequence:", hashSequence print "outpoint :", outpoint print "scriptCode :", scriptCode print "redeem_amount:", redeem_amount print "sequence :", sequence print "hashOutputs :", hashOutputs print "n_locktime :", n_locktime print "sig_hash_all:", sig_hash_all amount_satoshi=long(round(float(redeem_amount*__unit))) redeem_amount=formatamount(amount_satoshi) signed_tx=n_version+hashPrevouts+hashSequence+outpoint+scriptCode+redeem_amount+sequence+hashOutputs+n_locktime+sig_hash_all #print signed_tx sk = SigningKey.from_string(binascii.unhexlify(priv_key), curve=SECP256k1) sign_data_hash_d2=dhash256(signed_tx.decode('hex')) print reverse_byte_order(sign_data_hash_d2.encode('hex')) signature = sk.sign_digest_deterministic(sign_data_hash_d2, hashfunc=hashlib.sha256, sigencode=sigencode_der_canonize) #print signature.encode('hex') signature=signature.encode('hex') sig_hash_type='01' signature=signature+sig_hash_type #print signature p_len=(len(signature))/2 p_len=hex(p_len).lstrip('0x') signature=p_len + signature #print signature
def fix_out(in_str): try: return [ord(p) for p in binascii.unhexlify(in_str.rstrip()[1:])[:RAND_LEN]] except: return [0.0 for x in range(0,RAND_LEN)]
def __init__(self, path): super(LevelDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append(Blockchain.GenesisBlock().Header.Hash.ToBytes()) try: self._db = plyvel.DB(self._path, create_if_missing=True) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) except Exception as e: logger.info("leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Leveldb Unavailable') version = self._db.get(DBPrefix.SYS_Version) if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: for key, value in self._db.iterator(prefix=DBPrefix.IX_HeaderHashList): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Couldnt get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): dbhash = bytearray(value)[8:] headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass else: with self._db.write_batch() as wb: for key, value in self._db.iterator(): wb.delete(key) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion)
def run_test(self): for valid_case in case_spec.get('valid', []): description = valid_case['description'] if description in _TESTS_TO_SKIP: continue # Special case for testing encoding UUID as binary subtype 0x04. if description == 'subtype 0x04': encode_extjson = to_extjson_uuid_04 encode_bson = to_bson_uuid_04 else: encode_extjson = to_extjson encode_bson = to_bson cB = binascii.unhexlify(b(valid_case['canonical_bson'])) cEJ = valid_case['canonical_extjson'] rEJ = valid_case.get('relaxed_extjson') dEJ = valid_case.get('degenerate_extjson') lossy = valid_case.get('lossy') decoded_bson = decode_bson(cB) if not lossy: # Make sure we can parse the legacy (default) JSON format. legacy_json = json_util.dumps( decoded_bson, json_options=json_util.LEGACY_JSON_OPTIONS) self.assertEqual(decode_extjson(legacy_json), decoded_bson) if deprecated: if 'converted_bson' in valid_case: converted_bson = binascii.unhexlify( b(valid_case['converted_bson'])) self.assertEqual(encode_bson(decoded_bson), converted_bson) self.assertJsonEqual( encode_extjson(decode_bson(converted_bson)), valid_case['converted_extjson']) # Make sure we can decode the type. self.assertEqual(decoded_bson, decode_extjson(cEJ)) if test_key is not None: self.assertIsInstance(decoded_bson[test_key], _DEPRECATED_BSON_TYPES[bson_type]) continue # PyPy3 and Jython can't handle NaN with a payload from # struct.(un)pack if endianness is specified in the format string. if not ((('PyPy' in sys.version and sys.version_info[:2] < (3, 3)) or sys.platform.startswith("java")) and description == 'NaN with payload'): # Test round-tripping canonical bson. self.assertEqual(encode_bson(decoded_bson), cB) self.assertJsonEqual(encode_extjson(decoded_bson), cEJ) # Test round-tripping canonical extended json. decoded_json = decode_extjson(cEJ) self.assertJsonEqual(encode_extjson(decoded_json), cEJ) if not lossy and json_util._HAS_OBJECT_PAIRS_HOOK: self.assertEqual(encode_bson(decoded_json), cB) # Test round-tripping degenerate bson. if 'degenerate_bson' in valid_case: dB = binascii.unhexlify(b(valid_case['degenerate_bson'])) self.assertEqual(encode_bson(decode_bson(dB)), cB) # Test round-tripping degenerate extended json. if dEJ is not None: decoded_json = decode_extjson(dEJ) self.assertJsonEqual(encode_extjson(decoded_json), cEJ) if not lossy: # We don't need to check json_util._HAS_OBJECT_PAIRS_HOOK # because degenerate_extjson is always a single key so # the order cannot be changed. self.assertEqual(encode_bson(decoded_json), cB) # Test round-tripping relaxed extended json. if rEJ is not None: self.assertJsonEqual(to_relaxed_extjson(decoded_bson), rEJ) decoded_json = decode_extjson(rEJ) self.assertJsonEqual(to_relaxed_extjson(decoded_json), rEJ) for decode_error_case in case_spec.get('decodeErrors', []): with self.assertRaises(InvalidBSON): decode_bson( binascii.unhexlify(b(decode_error_case['bson']))) for parse_error_case in case_spec.get('parseErrors', []): if bson_type == '0x13': self.assertRaises( DecimalException, Decimal128, parse_error_case['string']) elif bson_type == '0x00': description = parse_error_case['description'] if description in _NON_PARSE_ERRORS: decode_extjson(parse_error_case['string']) else: try: decode_extjson(parse_error_case['string']) raise AssertionError('exception not raised for test ' 'case: ' + description) except (ValueError, KeyError, TypeError, InvalidId): pass else: raise AssertionError('cannot test parseErrors for type ' + bson_type)
def show_import_form(self): selection, start_ea, end_ea = idaapi.read_selection() if not selection: start_ea = idaapi.get_screen_ea() end_ea = start_ea + 1 # Create the form f = DataImportForm(start_ea, end_ea); # Execute the form ok = f.Execute() if ok == 1: start_ea = f.intStartEA.value end_ea = f.intEndEA.value if f.rFile.selected: imp_file = f.impFile.value try: f_imp_file = open(imp_file,'rb') except Exception as e: idaapi.warning("File I/O error({0}): {1}".format(e.errno, e.strerror)) return else: buf = f_imp_file.read() f_imp_file.close() else: buf = f.strPatch.value # Hex values, unlike string literal, needs additional processing if f.rHex.selected: buf = buf.replace(' ','') # remove spaces buf = buf.replace('\\x','') # remove '\x' prefixes buf = buf.replace('0x','') # remove '0x' prefixes try: buf = binascii.unhexlify(buf) # convert to bytes except Exception as e: idaapi.warning("Invalid input: %s" % e) f.Free() return if not len(buf): idaapi.warning("There was nothing to import.") return # Trim to selection if needed: if f.cSize.checked: buf_size = end_ea - start_ea buf = buf[0:buf_size] # Now apply newly patched bytes idaapi.patch_many_bytes(start_ea, buf) # Refresh all IDA views self.patch_view.Refresh() # Dispose the form f.Free()
def apply_parameters(cls, subtypes, names): return cls(subtypes, [unhexlify(name) if name is not None else name for name in names])
def test_sha1hash(self): h = 'a0' * 20 s = lt.sha1_hash(binascii.unhexlify(h)) self.assertEqual(h, str(s))
def hex_to_b64(data: str) -> str: return base64.b64encode(binascii.unhexlify(data)).decode()
def push_tx(self, nick, txhex): #Note: not currently used; will require prepare_privmsg call so #not in this class (see send_error) txb64 = base64.b64encode(binascii.unhexlify(txhex)).decode('ascii') self.privmsg(nick, 'push', txb64)
# teks = teks.getBinary() # print((teks > message)-(teks < message)) # lmfile = open('compare.txt', 'w') # # str1 = bz2.compress(str1.encode("utf-8")) # for item in message: # lmfile.write("%s," % item) # # lmfile.write("aduh") # # for item in teks: # lmfile.write("%s," % item) # # lmfile.close() # print(lenmes % 8) n = int(bitarray.bitarray(message[:-(lenmes % 8)]).tostring(), 2) write = binascii.unhexlify('%x' % n) print(write) f = open('hasyilgrde2.txt', 'wb') f.write(write) f.close() print(lx.count(0), lx.count(1), lx.count(2), lx.count(3), lx.count(4), lx.count(5), lx.count(6), lx.count(7))
def push_tx(self, nick, txhex): #TODO supporting sending to arbitrary nicks #adds quite a bit of complexity, not supported #initially; will fail if nick is not part of TX txb64 = base64.b64encode(binascii.unhexlify(txhex)).decode('ascii') self.prepare_privmsg(nick, "push", txb64)
def elaborate(x): #threading.Thread(target=pulizia_memoria) retrasmission=[] d={} ds={} certi={} t=int(time.time()-start_time) while True: pkt=code2[x].recv() ips=pkt[0] ipd=pkt[1] payload=pkt[2][14:] ip_lenght_hx=str(payload[0]).encode("HEX") ip_lenght=int(ip_lenght_hx[1],16)*8 proto=str(payload[9:10]).encode("HEX") lenght_T=len(payload) p=str(payload[ip_lenght/2+13:ip_lenght/2+14]).encode("HEX") if proto=='06': sp=int(str(payload[ip_lenght/2:ip_lenght/2+2]).encode("HEX"),16) dp=int(str(payload[ip_lenght/2+2:ip_lenght/2+4]).encode("HEX"),16) if sp==443 or dp==443: f=int(time.time()-start_time) list=ips.split('.',4) x1=list[-1] list=ipd.split('.',4) x2=list[-1] key=int(x1)+int(x2)+sp+dp list=d.get(key,None) print list!=None,list if list!=None: if list[4]=='0' and sp==443: stringa=str(payload).encode("HEX") print stringa hx=hex(int(str(sp))).lstrip("0x") inizio=stringa.index(str(hx),0,len(stringa)) posizione=inizio+len(str(hx))+20 lenght_TCP_hx=stringa[posizione] lenght=int(str(lenght_TCP_hx),16)*4 posizione_data=lenght*2+inizio-1 lenght_p=len(stringa)-posizione_data lenght_TLS=0 total_lenght=len(stringa)-posizione_data seq_num_hx=stringa[inizio+7:inizio+15] seq_num=int(seq_num_hx,16) next_seq_num=int(seq_num+total_lenght/2) next_seq_num_hx=hex(next_seq_num).lstrip("0x").zfill(8) next_seq_num_hx=next_seq_num_hx[:8] index=stringa.find('160303',posizione_data,len(stringa)) lenght_TLS_hx=stringa[index+6:index+10] try : lenght_TLS=int(str(lenght_TLS_hx),16)*2 except: index=-1 index_c=index+10 count=0 type='0' while index_c<len(stringa) and index!=-1: type=stringa[index_c:index_c+2] try: lenght_TLS_c=int(stringa[index_c+2:index_c+8],16)*2 except: break if len(stringa)-index_c<lenght_TLS_c and type=='0b': lenght_p=len(stringa)-index_c time_t=time.time()-start_time certi[key]=[ips,ipd,sp,dp,lenght_TLS_c+8,stringa[index_c:],lenght_p,next_seq_num_hx,time_t] index_c=len(stringa) elif type=='0b': certificate=stringa[index_c:index_c+8+lenght_TLS_c] index_c=len(stringa) try: certificate=certificate[20:] result='-----BEGIN CERTIFICATE-----\n'+base64.encodestring(binascii.unhexlify(certificate))+'-----END CERTIFICATE-----' cert=x509.load_pem_x509_certificate(result,default_backend()) certs=crypto.load_certificate(crypto.FILETYPE_PEM,result) certs2=certs.get_subject() name=certs2.organizationName print print name,'certificate' print l=0 list[4]=name d[key]=list except: print certificate elif type!='0b': count=count+lenght_TLS_c+8 if lenght_TLS<=count: index=stringa.find('160303',count,len(stringa)) index_c=index+10 lenght_TLS_hx=stringa[index+6:index+10] try: lenght_TLS=int(str(lenght_TLS_hx),16)*2 except: index_c=len(stringa) else: index_c=index_c+8+lenght_TLS_c lista=certi.get(key,None) if lista!=None: if type!='0b': if str(seq_num_hx)==str(lista[7]): var=lista[5] try: index_retr=retrasmission.index(ips,0,len(retrasmission)) except: index_retr=-1 while index_retr!=-1: if retrasmission[index_retr+1]==dp and retrasmission[index_retr+4]==next_seq_num_hx: stringa=stringa+retrasmission[index_retr+5] lenght_p=lenght_p+retrasmission[index_retr+2] next_seq_num_hx=next_seq_num_hx+hex(int(retrasmission[index_retr+3]/2)) retrasmission[index_retr+6]=[] index_retr=index_retr-5 try: index_retr=retrasmission.index(ips,index_retr+5,len(retrasmission)) except: index_retr=-1 if lista[4]-lista[6]<=len(stringa)-posizione_data: payload=var+stringa[posizione_data:posizione_data+lista[4]-lista[6]] certificate=payload try: certificate=certificate[20:] result='-----BEGIN CERTIFICATE-----\n'+base64.encodestring(binascii.unhexlify(certificate))+'-----END CERTIFICATE-----' cert=x509.load_pem_x509_certificate(result,default_backend()) certs=crypto.load_certificate(crypto.FILETYPE_PEM,result) certs2=certs.get_subject() name=certs2.organizationName print print name,'certificate' print l=0 list[4]=name del certi[key] d[key]=list except: print certificate else: payload=var+stringa[posizione_data:] lista[7]=next_seq_num_hx lenght_pN=lista[6]+lenght_p lista[5]=payload lista[6]=lenght_pN lista[8]=time.time()-start_time certi[key]=lista else: retrasmission.extend([ips,dp,lenght_p,total_lenght,seq_num_hx,stringa[posizione_data:],(time.time()-start_time)]) if list[5]=='0' and dp==443: stringa=str(payload).encode("HEX") hx=hex(int(str(sp))).lstrip("0x") inizio=stringa.index(str(hx),0,len(stringa)) posizione=inizio+len(str(hx))+20 lenght_TCP_hx=stringa[posizione] lenght=int(str(lenght_TCP_hx),16)*4 posizione_data=lenght*2+inizio-1 index_ch=stringa.find('160301',posizione_data,len(stringa)) t=posizione_data e_name='0' while t<len(stringa) and index_ch!=-1: tot_ch=int(stringa[index_ch+6:index_ch+10],16) if stringa[index_ch+10:index_ch+12]=='01' and stringa[index_ch+18:index_ch+22]=='0303': t=len(stringa) len_sid=int(stringa[index_ch+86:index_ch+88],16)*2 len_cs=int(stringa[index_ch+len_sid+88:index_ch+len_sid+92],16)*2 len_cm=int(stringa[index_ch+len_sid+len_cs+92:index_ch+len_sid+len_cs+94],16)*2 accu=index_ch+len_sid+len_cs+94+len_cm types=stringa[accu+4:accu+8] if types=='0015': padd=int(stringa[accu+8:accu+12],16) else: padd=0 if padd==0: m=0 else: m=8 while m<len(stringa): type=stringa[accu+4+padd*2+m:accu+8+padd*2+m] len_me=stringa[accu+8+padd*2+m:accu+12+padd*2+m] if type=='0000': len_server_name=int(stringa[accu+18+padd*2+m:accu+padd*2+m+22],16) e_name=binascii.unhexlify(stringa[accu+22+padd*2+m:accu+22+padd*2+len_server_name*2+m]) m=len(stringa) else: try: m=int(str(len_me),16)*2+m+8 except: print len_me break index_ch=-1 else: t=index_ch+6 index_ch=stringa.find('160301',index_ch+tot_ch+10,len(stringa)) if e_name!='0': list[5]=e_name d[key]=list flag=obtain_flag(p) print print ips,ipd,sp,dp print payload[ip_lenght/2+13:ip_lenght/2+14].encode("HEX") print flag analize_packet(ips,ipd,key,dp,sp,flag,lenght_T,d) if dp==80 or sp==80: list=ips.split('.',4) x1=list[-1] list=ipd.split('.',4) x2=list[-1] key=int(x1)+int(x2)+sp+dp list=ds.get(key,None) list=ds.get(key) if list!=None: if dp==80 and list[4]=='0': stringa=str(payload).encode("HEX") hx=hex(int(str(sp))).lstrip("0x") inizio=stringa.index(str(hx),0,len(stringa)) posizione=inizio+len(str(hx))+20 lenght_TCP_hx=stringa[posizione] lenght=int(str(lenght_TCP_hx),16)*4 posizione_data=lenght*2+inizio host='-1' if stringa[posizione_data:posizione_data+6]=='474554': try: start=stringa.index('486f73743a',posizione_data,len(stringa)) end=stringa.index('0d0a436f',start,len(stringa)) host=str(stringa[start+12:end]).decode("HEX") except: pass if host!='-1': list[4]=host list[5]='http' d[key]=list flag=obtain_flag(p) analize_packet(ips,ipd,key,dp,sp,flag,lenght_T,ds)
def tx_inputs(self, tx, for_sig=False, script_gen=SCRIPT_GEN_LEGACY): inputs = [] for txin in tx.inputs(): txinputtype = self.types.TxInputType() if txin['type'] == 'coinbase': prev_hash = "\0" * 32 prev_index = 0xffffffff # signed int -1 else: if for_sig: x_pubkeys = txin['x_pubkeys'] if len(x_pubkeys) == 1: x_pubkey = x_pubkeys[0] xpub, s = parse_xpubkey(x_pubkey) xpub_n = self.client_class.expand_path( self.xpub_path[xpub]) txinputtype._extend_address_n(xpub_n + s) if script_gen == SCRIPT_GEN_NATIVE_SEGWIT: txinputtype.script_type = self.types.InputScriptType.SPENDWITNESS elif script_gen == SCRIPT_GEN_P2SH_SEGWIT: txinputtype.script_type = self.types.InputScriptType.SPENDP2SHWITNESS else: txinputtype.script_type = self.types.InputScriptType.SPENDADDRESS else: def f(x_pubkey): if is_xpubkey(x_pubkey): xpub, s = parse_xpubkey(x_pubkey) else: xpub = xpub_from_pubkey(0, bfh(x_pubkey)) s = [] node = self.ckd_public.deserialize(xpub) return self.types.HDNodePathType(node=node, address_n=s) pubkeys = list(map(f, x_pubkeys)) multisig = self.types.MultisigRedeemScriptType( pubkeys=pubkeys, signatures=list( map(lambda x: bfh(x)[:-1] if x else b'', txin.get('signatures'))), m=txin.get('num_sig'), ) if script_gen == SCRIPT_GEN_NATIVE_SEGWIT: script_type = self.types.InputScriptType.SPENDWITNESS elif script_gen == SCRIPT_GEN_P2SH_SEGWIT: script_type = self.types.InputScriptType.SPENDP2SHWITNESS else: script_type = self.types.InputScriptType.SPENDMULTISIG txinputtype = self.types.TxInputType( script_type=script_type, multisig=multisig) # find which key is mine for x_pubkey in x_pubkeys: if is_xpubkey(x_pubkey): xpub, s = parse_xpubkey(x_pubkey) if xpub in self.xpub_path: xpub_n = self.client_class.expand_path( self.xpub_path[xpub]) txinputtype._extend_address_n(xpub_n + s) break prev_hash = unhexlify(txin['prevout_hash']) prev_index = txin['prevout_n'] if 'value' in txin: txinputtype.amount = txin['value'] txinputtype.prev_hash = prev_hash txinputtype.prev_index = prev_index if 'scriptSig' in txin: script_sig = bfh(txin['scriptSig']) txinputtype.script_sig = script_sig txinputtype.sequence = txin.get('sequence', 0xffffffff - 1) inputs.append(txinputtype) return inputs
def prepare_send_tx(self, mc, nick_list, txhex): txb64 = base64.b64encode(binascii.unhexlify(txhex)).decode('ascii') for nick in nick_list: self.prepare_privmsg(nick, "tx", txb64, mc=mc)
settings['hashlist'] = 'hashlist.txt' if 'file_timestamp' not in settings: settings['file_timestamp'] = 0 if 'split_timestamp' not in settings: settings['split_timestamp'] = 0 if 'max_out_sz' not in settings: settings['max_out_sz'] = 1000 * 1000 * 1000 if 'out_of_order_cache_sz' not in settings: settings['out_of_order_cache_sz'] = 100 * 1000 * 1000 if 'debug_output' not in settings: settings['debug_output'] = 'false' settings['max_out_sz'] = int(settings['max_out_sz']) settings['split_timestamp'] = int(settings['split_timestamp']) settings['file_timestamp'] = int(settings['file_timestamp']) settings['netmagic'] = unhexlify(settings['netmagic'].encode('utf-8')) settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz']) settings['debug_output'] = settings['debug_output'].lower() if 'output_file' not in settings and 'output' not in settings: print("Missing output file / directory") sys.exit(1) blkindex = get_block_hashes(settings) blkmap = mkblockmap(blkindex) # Block hash map won't be byte-reversed. Neither should the genesis hash. if not settings['genesis'] in blkmap: print("Genesis block not found in hashlist") else: BlockDataCopier(settings, blkindex, blkmap).run()
def toBase64(value): temp = binascii.unhexlify(value) return base64.b64encode(temp).decode('ascii')
def h2b(h): """ A version of binascii.unhexlify that accepts unicode. This is no longer necessary as of Python 3.3. But it doesn't hurt. """ return binascii.unhexlify(h.encode("ascii"))
def get_crt(account_key, csr, set_acme): # helper function base64 encode for jose spec def _b64(b): return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "") # parse account key to get public key logger.info("Parsing acme account key...", "acme") proc = subprocess.Popen( ["openssl", "rsa", "-in", account_key, "-noout", "-text"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if proc.returncode != 0: raise IOError("OpenSSL Error: {0}".format(err)) pub_hex, pub_exp = re.search( r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)", out.decode('utf8'), re.MULTILINE | re.DOTALL).groups() pub_exp = "{0:x}".format(int(pub_exp)) pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp header = { "alg": "RS256", "jwk": { "e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))), "kty": "RSA", "n": _b64( binascii.unhexlify( re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))), }, } accountkey_json = json.dumps(header['jwk'], sort_keys=True, separators=(',', ':')) thumbprint = _b64(hashlib.sha256(accountkey_json.encode('utf8')).digest()) # helper function make signed requests def _send_signed_request(url, payload): payload64 = _b64(json.dumps(payload).encode('utf8')) protected = copy.deepcopy(header) protected["nonce"] = urlopen(settings.app.acme_api_url + "/directory").headers['Replay-Nonce'] protected64 = _b64(json.dumps(protected).encode('utf8')) proc = subprocess.Popen( ["openssl", "dgst", "-sha256", "-sign", account_key], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode('utf8')) if proc.returncode != 0: raise IOError("OpenSSL Error: {0}".format(err)) data = json.dumps({ "header": header, "protected": protected64, "payload": payload64, "signature": _b64(out), }) try: resp = urlopen(url, data.encode('utf8'), timeout=10) return resp.getcode(), resp.read() except IOError as e: return getattr(e, "code", None), getattr(e, "read", e.__str__)() # find domains logger.info("Parsing acme CSR...", "acme") proc = subprocess.Popen(["openssl", "req", "-in", csr, "-noout", "-text"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if proc.returncode != 0: raise IOError("Error loading {0}: {1}".format(csr, err)) domains = set([]) common_name = re.search(r"Subject:.*? CN=([^\s,;/]+)", out.decode('utf8')) if common_name is not None: domains.add(common_name.group(1)) subject_alt_names = re.search( r"X509v3 Subject Alternative Name: \n +([^\n]+)\n", out.decode('utf8'), re.MULTILINE | re.DOTALL) if subject_alt_names is not None: for san in subject_alt_names.group(1).split(", "): if san.startswith("DNS:"): domains.add(san[4:]) # get the certificate domains and expiration logger.info("Registering acme account...", "acme") code, result = _send_signed_request( settings.app.acme_api_url + "/acme/new-reg", { "resource": "new-reg", "agreement": "https://letsencrypt.org/documents/LE-SA-v1.0.1-July-27-2015.pdf", }) if code == 201: logger.info("Registered acme certificate", "acme") elif code == 409: logger.info("Already registered acme certificate", "acme") else: raise ValueError("Error registering: {0} {1}".format(code, result)) # verify each domain for domain in domains: logger.info("Verifying acme domain {0}...".format(domain), "acme") # get new challenge code, result = _send_signed_request( settings.app.acme_api_url + "/acme/new-authz", { "resource": "new-authz", "identifier": { "type": "dns", "value": domain }, }) if code != 201: raise ValueError("Error requesting challenges: {0} {1}".format( code, result)) # make the challenge file challenge = [ c for c in json.loads(result.decode('utf8'))['challenges'] if c['type'] == "http-01" ][0] token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token']) keyauthorization = "{0}.{1}".format(token, thumbprint) set_acme(token, keyauthorization) # check that the file is in place wellknown_url = "http://{0}/.well-known/acme-challenge/{1}".format( domain, token) try: resp = urlopen(wellknown_url, timeout=10) resp_data = resp.read().decode('utf8').strip() assert resp_data == keyauthorization except (IOError, AssertionError): set_acme(None, None) raise ValueError("Couldn't download {0}".format(wellknown_url)) # notify challenge are met code, result = _send_signed_request( challenge['uri'], { "resource": "challenge", "keyAuthorization": keyauthorization, }) if code != 202: raise ValueError("Error triggering challenge: {0} {1}".format( code, result)) # wait for challenge to be verified while True: try: resp = urlopen(challenge['uri'], timeout=10) challenge_status = json.loads(resp.read().decode('utf8')) except IOError as e: raise ValueError("Error checking challenge: {0} {1}".format( e.code, json.loads(e.read().decode('utf8')))) if challenge_status['status'] == "pending": time.sleep(2) elif challenge_status['status'] == "valid": logger.info("Verified acme domain {0}".format(domain)) set_acme(None, None) break else: raise ValueError("{0} challenge did not pass: {1}".format( domain, challenge_status)) # get the new certificate logger.info("Signing acme certificate...") proc = subprocess.Popen(["openssl", "req", "-in", csr, "-outform", "DER"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) csr_der, err = proc.communicate() code, result = _send_signed_request( settings.app.acme_api_url + "/acme/new-cert", { "resource": "new-cert", "csr": _b64(csr_der), }) if code != 201: raise ValueError("Error signing certificate: {0} {1}".format( code, result)) # return signed certificate! logger.info("Signed acme certificate", "acme") return """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format( "\n".join(textwrap.wrap(base64.b64encode(result).decode('utf8'), 64)))
class CompressionMethod(ByteEnum): """Hold fixed values for method parameter.""" COPY = binascii.unhexlify('00') DELTA = binascii.unhexlify('03') BCJ = binascii.unhexlify('04') PPC = binascii.unhexlify('05') IA64 = binascii.unhexlify('06') ARM = binascii.unhexlify('07') ARMT = binascii.unhexlify('08') SPARC = binascii.unhexlify('09') # 7Z = 03.. LZMA = binascii.unhexlify('030101') PPMD = binascii.unhexlify('030401') P7Z_BCJ = binascii.unhexlify('03030103') P7Z_BCJ2 = binascii.unhexlify('0303011B') BCJ_PPC = binascii.unhexlify('03030205') BCJ_IA64 = binascii.unhexlify('03030401') BCJ_ARM = binascii.unhexlify('03030501') BCJ_ARMT = binascii.unhexlify('03030701') BCJ_SPARC = binascii.unhexlify('03030805') LZMA2 = binascii.unhexlify('21') # MISC : 04.. MISC_ZIP = binascii.unhexlify('0401') MISC_BZIP2 = binascii.unhexlify('040202') MISC_DEFLATE = binascii.unhexlify('040108') MISC_DEFLATE64 = binascii.unhexlify('040109') MISC_Z = binascii.unhexlify('0405') MISC_LZH = binascii.unhexlify('0406') NSIS_DEFLATE = binascii.unhexlify('040901') NSIS_BZIP2 = binascii.unhexlify('040902') # CRYPTO 06.. CRYPT_ZIPCRYPT = binascii.unhexlify('06f10101') CRYPT_RAR29AES = binascii.unhexlify('06f10303') CRYPT_AES256_SHA256 = binascii.unhexlify('06f10701')
def run_test(self): print("Mining blocks...") self.nodes[0].generate(105) self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) # Check that balances are correct balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 0) # Check p2pkh and p2sh address indexes print("Testing p2pkh and p2sh address index...") txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10) self.nodes[0].generate(1) txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10) self.nodes[0].generate(1) txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15) self.nodes[0].generate(1) txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15) self.nodes[0].generate(1) txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20) self.nodes[0].generate(1) txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20) self.nodes[0].generate(1) self.sync_all() txids = self.nodes[1].getaddresstxids("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4") assert_equal(len(txids), 3) assert_equal(txids[0], txid0) assert_equal(txids[1], txid1) assert_equal(txids[2], txid2) txidsb = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsb), 3) assert_equal(txidsb[0], txidb0) assert_equal(txidsb[1], txidb1) assert_equal(txidsb[2], txidb2) # Check that limiting by height works print("Testing querying txids by range of block heights..") height_txids = self.nodes[1].getaddresstxids({ "addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB"], "start": 105, "end": 110 }) assert_equal(len(height_txids), 2) assert_equal(height_txids[0], txidb0) assert_equal(height_txids[1], txidb1) # Check that multiple addresses works multitxids = self.nodes[1].getaddresstxids({"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", "yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4"]}) assert_equal(len(multitxids), 6) assert_equal(multitxids[0], txid0) assert_equal(multitxids[1], txidb0) assert_equal(multitxids[2], txid1) assert_equal(multitxids[3], txidb1) assert_equal(multitxids[4], txid2) assert_equal(multitxids[5], txidb2) # Check that balances are correct balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 45 * 100000000) # Check that outputs with the same address will only return one txid print("Testing for txid uniqueness...") addressHash = binascii.unhexlify("FE30B718DCF0BF8A2A686BF1820C073F8B2C3B37") scriptPubKey = CScript([OP_HASH160, addressHash, OP_EQUAL]) unspent = self.nodes[0].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] tx.vout = [CTxOut(10, scriptPubKey), CTxOut(11, scriptPubKey)] tx.rehash() signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.nodes[0].generate(1) self.sync_all() txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsmany), 4) assert_equal(txidsmany[3], sent_txid) # Check that balances are correct print("Testing balances...") balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 45 * 100000000 + 21) # Check that balances are correct after spending print("Testing balances after spending...") privkey2 = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc" address2 = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3" addressHash2 = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D") scriptPubKey2 = CScript([OP_DUP, OP_HASH160, addressHash2, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].importprivkey(privkey2) unspent = self.nodes[0].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] amount = int(unspent[0]["amount"] * 100000000) tx.vout = [CTxOut(amount, scriptPubKey2)] tx.rehash() signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.nodes[0].generate(1) self.sync_all() balance1 = self.nodes[1].getaddressbalance(address2) assert_equal(balance1["balance"], amount) tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(spending_txid, 16), 0))] send_amount = 1 * 100000000 + 12840 change_amount = amount - send_amount - 10000 tx.vout = [CTxOut(change_amount, scriptPubKey2), CTxOut(send_amount, scriptPubKey)] tx.rehash() signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.nodes[0].generate(1) self.sync_all() balance2 = self.nodes[1].getaddressbalance(address2) assert_equal(balance2["balance"], change_amount) # Check that deltas are returned correctly deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 0, "end": 200}) balance3 = 0 for delta in deltas: balance3 += delta["satoshis"] assert_equal(balance3, change_amount) assert_equal(deltas[0]["address"], address2) assert_equal(deltas[0]["blockindex"], 1) # Check that entire range will be queried deltasAll = self.nodes[1].getaddressdeltas({"addresses": [address2]}) assert_equal(len(deltasAll), len(deltas)) # Check that deltas can be returned from range of block heights deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 113, "end": 113}) assert_equal(len(deltas), 1) # Check that unspent outputs can be queried print("Testing utxos...") utxos = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos), 1) assert_equal(utxos[0]["satoshis"], change_amount) # Check that indexes will be updated with a reorg print("Testing reorg...") best_hash = self.nodes[0].getbestblockhash() self.nodes[0].invalidateblock(best_hash) self.nodes[1].invalidateblock(best_hash) self.nodes[2].invalidateblock(best_hash) self.nodes[3].invalidateblock(best_hash) # Allow some time for the reorg to start set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) self.sync_all() balance4 = self.nodes[1].getaddressbalance(address2) assert_equal(balance4, balance1) utxos2 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos2), 1) assert_equal(utxos2[0]["satoshis"], amount) # Check sorting of utxos self.nodes[2].generate(150) txidsort1 = self.nodes[2].sendtoaddress(address2, 50) self.nodes[2].generate(1) txidsort2 = self.nodes[2].sendtoaddress(address2, 50) self.nodes[2].generate(1) self.sync_all() utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos3), 3) assert_equal(utxos3[0]["height"], 114) assert_equal(utxos3[1]["height"], 264) assert_equal(utxos3[2]["height"], 265) # Check mempool indexing print("Testing mempool indexing...") privKey3 = "cRyrMvvqi1dmpiCmjmmATqjAwo6Wu7QTjKu1ABMYW5aFG4VXW99K" address3 = "yWB15aAdpeKuSaQHFVJpBDPbNSLZJSnDLA" addressHash3 = binascii.unhexlify("6C186B3A308A77C779A9BB71C3B5A7EC28232A13") scriptPubKey3 = CScript([OP_DUP, OP_HASH160, addressHash3, OP_EQUALVERIFY, OP_CHECKSIG]) # address4 = "2N8oFVB2vThAKury4vnLquW2zVjsYjjAkYQ" scriptPubKey4 = CScript([OP_HASH160, addressHash3, OP_EQUAL]) unspent = self.nodes[2].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] amount = int(unspent[0]["amount"] * 100000000) tx.vout = [CTxOut(amount, scriptPubKey3)] tx.rehash() signed_tx = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) memtxid1 = self.nodes[2].sendrawtransaction(signed_tx["hex"], True, False, True) set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(int(unspent[1]["txid"], 16), unspent[1]["vout"]))] amount = int(unspent[1]["amount"] * 100000000) tx2.vout = [ CTxOut(int(amount / 4), scriptPubKey3), CTxOut(int(amount / 4), scriptPubKey3), CTxOut(int(amount / 4), scriptPubKey4), CTxOut(int(amount / 4), scriptPubKey4) ] tx2.rehash() signed_tx2 = self.nodes[2].signrawtransaction(binascii.hexlify(tx2.serialize()).decode("utf-8")) memtxid2 = self.nodes[2].sendrawtransaction(signed_tx2["hex"], True, False, True) set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) mempool = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool), 3) assert_equal(mempool[0]["txid"], memtxid1) assert_equal(mempool[0]["address"], address3) assert_equal(mempool[0]["index"], 0) assert_equal(mempool[1]["txid"], memtxid2) assert_equal(mempool[1]["index"], 0) assert_equal(mempool[2]["txid"], memtxid2) assert_equal(mempool[2]["index"], 1) self.nodes[2].generate(1); self.sync_all(); mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool2), 0) tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(memtxid2, 16), 0)), CTxIn(COutPoint(int(memtxid2, 16), 1)) ] tx.vout = [CTxOut(int(amount / 2 - 10000), scriptPubKey2)] tx.rehash() self.nodes[2].importprivkey(privKey3) signed_tx3 = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) memtxid3 = self.nodes[2].sendrawtransaction(signed_tx3["hex"], True, False, True) set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) mempool3 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool3), 2) assert_equal(mempool3[0]["prevtxid"], memtxid2) assert_equal(mempool3[0]["prevout"], 0) assert_equal(mempool3[1]["prevtxid"], memtxid2) assert_equal(mempool3[1]["prevout"], 1) # sending and receiving to the same address privkey1 = "cMvZn1pVWntTEcsK36ZteGQXRAcZ8CoTbMXF1QasxBLdnTwyVQCc" address1 = "yM9Eed1bxjy7tYxD3yZDHxjcVT48WdRoB1" address1hash = binascii.unhexlify("0909C84A817651502E020AAD0FBCAE5F656E7D8A") address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].sendtoaddress(address1, 10) self.nodes[0].generate(1) self.sync_all() utxos = self.nodes[1].getaddressutxos({"addresses": [address1]}) assert_equal(len(utxos), 1) tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["outputIndex"])) ] amount = int(utxos[0]["satoshis"] - 10000) tx.vout = [CTxOut(amount, address1script)] tx.rehash() self.nodes[0].importprivkey(privkey1) signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) mem_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.sync_all() mempool_deltas = self.nodes[2].getaddressmempool({"addresses": [address1]}) assert_equal(len(mempool_deltas), 2) print("Passed\n")
class Property(ByteEnum): """Hold 7zip property fixed values.""" END = binascii.unhexlify('00') HEADER = binascii.unhexlify('01') ARCHIVE_PROPERTIES = binascii.unhexlify('02') ADDITIONAL_STREAMS_INFO = binascii.unhexlify('03') MAIN_STREAMS_INFO = binascii.unhexlify('04') FILES_INFO = binascii.unhexlify('05') PACK_INFO = binascii.unhexlify('06') UNPACK_INFO = binascii.unhexlify('07') SUBSTREAMS_INFO = binascii.unhexlify('08') SIZE = binascii.unhexlify('09') CRC = binascii.unhexlify('0a') FOLDER = binascii.unhexlify('0b') CODERS_UNPACK_SIZE = binascii.unhexlify('0c') NUM_UNPACK_STREAM = binascii.unhexlify('0d') EMPTY_STREAM = binascii.unhexlify('0e') EMPTY_FILE = binascii.unhexlify('0f') ANTI = binascii.unhexlify('10') NAME = binascii.unhexlify('11') CREATION_TIME = binascii.unhexlify('12') LAST_ACCESS_TIME = binascii.unhexlify('13') LAST_WRITE_TIME = binascii.unhexlify('14') ATTRIBUTES = binascii.unhexlify('15') COMMENT = binascii.unhexlify('16') ENCODED_HEADER = binascii.unhexlify('17') START_POS = binascii.unhexlify('18') DUMMY = binascii.unhexlify('19')
def tohex(v): return unhexlify("%0.4X" % (v+4096))