def test_input_greater_243(self): # noinspection SpellCheckingInspection inp = ( 'G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA' ) trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out, length=486) trytes_out = trits_to_trytes(trits_out) # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'LUCKQVACOGBFYSPPVSSOXJEKNSQQRQKPZC9NXFSMQNRQCGGUL9OHVV' 'KBDSKEQEBKXRNUJSRXYVHJTXBPDWQGNSCDCBAIRHAQCOWZEBSNHIJI' 'GPZQITIBJQ9LNTDIBTCQ9EUWKHFLGFUVGGUWJONK9GBCDUIMAYMMQX', )
def test_generate_multi_trytes_and_hash(self): filepath =\ join( dirname(__file__), 'test_vectors/generate_multi_trytes_and_hash.csv', ) with open(filepath,'r') as f: reader = DictReader(f) for count, line in enumerate(reader): trytes = line['multiTrytes'] hashes = line['Kerl_hash'] trits = trytes_to_trits(trytes) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) self.assertEqual( hashes, trytes_out, msg = 'line {count}: {hashes} != {trytes}'.format( count = count + 2, hashes = hashes, trytes = trytes_out, ), )
def test_generate_multi_trytes_and_hash(self): filepath =\ join( dirname(__file__), 'test_vectors/generate_multi_trytes_and_hash.csv', ) with open(filepath, 'r') as f: reader = DictReader(f) for count, line in enumerate(reader): trytes = line['multiTrytes'] hashes = line['Kerl_hash'] trits = trytes_to_trits(trytes) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) self.assertEqual( hashes, trytes_out, msg='line {count}: {hashes} != {trytes}'.format( count=count + 2, hashes=hashes, trytes=trytes_out, ), )
def create_next_address(address): # create next address from root_address astrits = TryteString(str(root_address).encode()).as_trits() checksum_trits = [] sponge = Kerl() sponge.absorb(astrits) sponge.squeeze(checksum_trits) result = TryteString.from_trits(checksum_trits) next_address = Address(result) # check if the next address is unsused check_address = api.find_transactions(addresses=[next_address]) if len(check_address['hashes']) == 0: address_empty = True else: address_empty = False # If new address is used create find an empty address if address_empty == False: astrits = TryteString( (str(next_address) + str(root_address)).encode()).as_trits() checksum_trits = [] sponge = Kerl() sponge.absorb(astrits) sponge.squeeze(checksum_trits) result = TryteString.from_trits(checksum_trits) next_address = Address(result) else: next_address = next_address return next_address
def get_digest(self): # type: () -> Digest """ Generates the digest used to do the actual signing. Signing keys can have variable length and tend to be quite long, which makes them not-well-suited for use in crypto algorithms. The digest is essentially the result of running the signing key through a PBKDF, yielding a constant-length hash that can be used for crypto. """ hashes_per_fragment = FRAGMENT_LENGTH // Hash.LEN key_fragments = self.iter_chunks(FRAGMENT_LENGTH) # The digest will contain one hash per key fragment. digest = [0] * HASH_LENGTH * len(key_fragments) # Iterate over each fragment in the key. for (i, fragment ) in enumerate(key_fragments): # type: Tuple[int, TryteString] fragment_trits = fragment.as_trits() key_fragment = [0] * FRAGMENT_LENGTH hash_trits = [] # Within each fragment, iterate over one hash at a time. for j in range(hashes_per_fragment): hash_start = j * HASH_LENGTH hash_end = hash_start + HASH_LENGTH hash_trits = fragment_trits[ hash_start:hash_end] # type: MutableSequence[int] for k in range(26): sponge = Kerl() sponge.absorb(hash_trits) sponge.squeeze(hash_trits) key_fragment[hash_start:hash_end] = hash_trits # # After processing all of the hashes in the fragment, generate a # final hash and append it to the digest. # # Note that we will do this once per fragment in the key, so the # longer the key is, the longer the digest will be. # sponge = Kerl() sponge.absorb(key_fragment) sponge.squeeze(hash_trits) fragment_start = i * FRAGMENT_LENGTH fragment_end = fragment_start + FRAGMENT_LENGTH digest[fragment_start:fragment_end] = hash_trits return Digest(TryteString.from_trits(digest), self.key_index)
class SignatureFragmentGenerator(Iterator[TryteString]): """ Used to generate signature fragments progressively. Each instance can generate 1 signature per fragment in the private key. """ def __init__(self, private_key: PrivateKey, hash_: Hash) -> None: super(SignatureFragmentGenerator, self).__init__() self._key_chunks = private_key.iter_chunks(FRAGMENT_LENGTH) self._iteration = -1 self._normalized_hash = normalize(hash_) self._sponge = Kerl() def __iter__(self) -> 'SignatureFragmentGenerator': return self def __len__(self) -> int: """ Returns the number of fragments this generator can create. Note: This method always returns the same result, no matter how many iterations have been completed. """ return len(self._key_chunks) def __next__(self) -> TryteString: """ Returns the next signature fragment. """ key_trytes: TryteString = next(self._key_chunks) self._iteration += 1 # If the key is long enough, loop back around to the start. normalized_chunk = ( self._normalized_hash[self._iteration % len(self._normalized_hash)] ) signature_fragment = key_trytes.as_trits() # Build the signature, one hash at a time. for i in range(key_trytes.count_chunks(Hash.LEN)): hash_start = i * HASH_LENGTH hash_end = hash_start + HASH_LENGTH buffer: List[int] = signature_fragment[hash_start:hash_end] for _ in range(13 - normalized_chunk[i]): self._sponge.reset() self._sponge.absorb(buffer) self._sponge.squeeze(buffer) signature_fragment[hash_start:hash_end] = buffer return TryteString.from_trits(signature_fragment)
def get_digest(self): # type: () -> Digest """ Generates the digest used to do the actual signing. Signing keys can have variable length and tend to be quite long, which makes them not-well-suited for use in crypto algorithms. The digest is essentially the result of running the signing key through a PBKDF, yielding a constant-length hash that can be used for crypto. """ hashes_per_fragment = FRAGMENT_LENGTH // Hash.LEN key_fragments = self.iter_chunks(FRAGMENT_LENGTH) # The digest will contain one hash per key fragment. digest = [0] * HASH_LENGTH * len(key_fragments) # Iterate over each fragment in the key. for (i, fragment) in enumerate(key_fragments): # type: Tuple[int, TryteString] fragment_trits = fragment.as_trits() key_fragment = [0] * FRAGMENT_LENGTH hash_trits = [] # Within each fragment, iterate over one hash at a time. for j in range(hashes_per_fragment): hash_start = j * HASH_LENGTH hash_end = hash_start + HASH_LENGTH hash_trits = fragment_trits[hash_start:hash_end] # type: MutableSequence[int] for k in range(26): sponge = Kerl() sponge.absorb(hash_trits) sponge.squeeze(hash_trits) key_fragment[hash_start:hash_end] = hash_trits # # After processing all of the hashes in the fragment, generate a # final hash and append it to the digest. # # Note that we will do this once per fragment in the key, so the # longer the key is, the longer the digest will be. # sponge = Kerl() sponge.absorb(key_fragment) sponge.squeeze(hash_trits) fragment_start = i * FRAGMENT_LENGTH fragment_end = fragment_start + FRAGMENT_LENGTH digest[fragment_start:fragment_end] = hash_trits return Digest(TryteString.from_trits(digest), self.key_index)
def finalize(self): # type: () -> None """ Finalizes the bundle, preparing it to be attached to the Tangle. """ if self.hash: raise RuntimeError('Bundle is already finalized.') if not self: raise ValueError('Bundle has no transactions.') # Quick validation. balance = self.balance if balance < 0: if self.change_address: self.add_transaction( ProposedTransaction( address=self.change_address, value=-balance, tag=self.tag, )) else: raise ValueError( 'Bundle has unspent inputs (balance: {balance}); ' 'use ``send_unspent_inputs_to`` to create ' 'change transaction.'.format(balance=balance, ), ) elif balance > 0: raise ValueError( 'Inputs are insufficient to cover bundle spend ' '(balance: {balance}).'.format(balance=balance, ), ) # Generate bundle hash. sponge = Kerl() last_index = len(self) - 1 for (i, txn) in enumerate(self): # type: Tuple[int, ProposedTransaction] txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int] sponge.squeeze(bundle_hash_trits) # Copy bundle hash to individual transactions. bundle_hash = BundleHash.from_trits(bundle_hash_trits) for txn in self: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. txn.signature_message_fragment = Fragment(txn.message or b'')
def _generate_checksum(self) -> 'AddressChecksum': """ Generates the correct checksum for this address. """ checksum_trits: MutableSequence[int] = [] sponge = Kerl() sponge.absorb(self.address.as_trits()) sponge.squeeze(checksum_trits) checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE return AddressChecksum.from_trits(checksum_trits[-checksum_length:])
def _generate_checksum(self): # type: () -> TryteString """ Generates the correct checksum for this address. """ checksum_trits = [] # type: MutableSequence[int] sponge = Kerl() sponge.absorb(self.address.as_trits()) sponge.squeeze(checksum_trits) checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE return TryteString.from_trits(checksum_trits[-checksum_length:])
def calc_hash(self, bundle): Trxn_HASH_Trytes = 81 HASH_LENGTH = Trxn_HASH_Trytes * 3 # Trits conversion # Generate bundle hash. (taken from python API client) while True: sponge = Kerl() last_index = len(bundle) - 1 for i, txn in enumerate(bundle): txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_bundle_essence_trits()) bundle_hash_trits = [0] * HASH_LENGTH sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits( bundle_hash_trits) # Convert trits to ascii Trytes bundle.bundle_hash = bundle_hash # Check that we generated a secure bundle hash. # https://github.com/iotaledger/iota.py/issues/84 if any(13 in part for part in normalize(bundle_hash)): # Increment the legacy tag and try again. bundle.tail_transaction.increment_legacy_tag() else: break # Copy bundle hash to individual transactions. for txn in bundle: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. if not txn.value_trxn: # Put dummy message in fragment. # txn.signature_message_fragment = Fragment('9' * 2187) # Fragment Length txn.signature_message_fragment = Fragment( TryteString.from_string( 'IOTA is cool! This is a meta transaction!') ) # Fragment Length else: # Generate signature for bundle transaction txn.signature_message_fragment = self.genSig( bundleHash=bundle_hash) bundle.data_payload = self.name + " (" + str( bundle.outputTrxn.value ) + ") ->" + bundle.outputTrxn.recName return self.conductPOW(bundle)
def address_from_digest(digest: Digest) -> Address: """ Generates an address from a private key digest. """ address_trits: List[int] = [0] * (Address.LEN * TRITS_PER_TRYTE) sponge = Kerl() sponge.absorb(digest.as_trits()) sponge.squeeze(address_trits) return Address.from_trits( trits=address_trits, key_index=digest.key_index, security_level=digest.security_level, )
def test_output_greater_243(self): # noinspection SpellCheckingInspection inp = ('9MIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH') trits = trytes_to_trits(inp) print_var_type_n_val( var001=trits, pointer="#XCVBNbvcSDF23458765") #XCVBNbvcSDF23458765 # Value: # # [0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, -1, 0, -1, -1, -1, -1, 0, 1, -1, 1, 0, -1, -1, 0, 1, 1, 1, -1, 1, 0, 0, 1, 0, 0, -1, 0, 1, 1, -1, 1, 1, 0, -1, -1, 1, 0, -1, 1, 0, -1, -1, 0, -1, 1, -1, -1, 0, 0, 0, 1, -1, -1, -1, 0, -1, 0, -1, 1, -1, -1, -1, 1, 0, 0, -1, 1, 0, 0, 0, 1, 1, 0, 1, -1, -1, 1, 1, 1, -1, 0, 1, -1, 0, 1, -1, -1, 1, -1, -1, -1, 0, 1, -1, 1, 1, 1, -1, -1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, -1, 1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 0, 0, 1, 0, 1, -1, 1, -1, 0, 0, 0, 0, 1, 1, 1, 0, 1, -1, 1, 1, 0, 1, 1, -1, -1, -1, -1, 0, -1, 1, -1, 0, 0, -1, 0, 1, 1, 1, 1, 1, 1, 1, -1, -1, 0, -1, 0, 0, 0, 1, -1, 1, -1, 0, 0, 1, -1, 1, 0, -1, -1, -1, 0, 1, 0, 0, 0, 0, 1, 0, -1, -1, -1, -1, 0, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 0, 1, -1, -1, -1, -1, -1, 0, 1, 1, 1, -1, 0, 1, 1, 0, 0, -1, -1, -1, -1, 1, 0, -1, 0, 1] # Type: <class 'list'> kerl = Kerl() print_var_type_n_val(var001=kerl, pointer="#SDFG345tredff") #SDFG345tredff # Value: # # <iota.crypto.kerl.pykerl.Kerl object at 0x0000018FAA7C6780> # Type: <class 'iota.crypto.kerl.pykerl.Kerl'> kerl.absorb(trits) print_var_type_n_val( var001=kerl, pointer="#ERERdfgfdrtre2345665777") #ERERdfgfdrtre2345665777 # Value: # # <iota.crypto.kerl.pykerl.Kerl object at 0x0000018FAA7C6780> # Type: <class 'iota.crypto.kerl.pykerl.Kerl'> trits_out = [] kerl.squeeze(trits_out, length=486) print_var_type_n_val(var001=kerl, pointer="#2345gDFRER") #2345gDFRER # Value: # # <iota.crypto.kerl.pykerl.Kerl object at 0x0000018FAA7C6780> # Type: <class 'iota.crypto.kerl.pykerl.Kerl'> trytes_out = trits_to_trytes(trits_out) print_var_type_n_val(var001=trytes_out, pointer="#23458765SDFfffFGH") #23458765SDFfffFGH # Value: G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJBVBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA # Type: <class 'str'> # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA', )
def finalize(bundle): sponge = Kerl() last_index = len(bundle) - 1 for (i, txn) in enumerate(bundle): txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits(bundle_hash_trits) for txn in bundle: txn.bundle_hash = bundle_hash txn.signature_message_fragment = Fragment(txn.message or b'')
def address_from_digest(digest): # type: (Digest) -> Address """ Generates an address from a private key digest. """ address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE) # type: MutableSequence[int] sponge = Kerl() sponge.absorb(digest.as_trits()) sponge.squeeze(address_trits) return Address.from_trits( trits = address_trits, key_index = digest.key_index, security_level = digest.security_level, )
def finalize(bundle): sponge = Kerl() last_index = len(bundle) - 1 for (i, txn) in enumerate(bundle): # type: Tuple[int, ProposedTransaction] txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int] sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits(bundle_hash_trits) for txn in bundle: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. txn.signature_message_fragment = Fragment(txn.message or b'')
def address_from_digest(digest): # type: (Digest) -> Address """ Generates an address from a private key digest. """ address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE ) # type: MutableSequence[int] sponge = Kerl() sponge.absorb(digest.as_trits()) sponge.squeeze(address_trits) return Address.from_trits( trits=address_trits, key_index=digest.key_index, security_level=digest.security_level, )
def test_correct_first(self): inp = ('EMIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH') trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) self.assertEqual( trytes_out, 'EJEAOOZYSAWFPZQESYDHZCGYNSTWXUMVJOVDWUNZ' 'JXDGWCLUFGIMZRMGCAZGKNPLBRLGUNYWKLJTYEAQX', )
def _create_sponge(self, index): # type: (int) -> Kerl """ Prepares the hash sponge for the generator. """ seed = self.seed_as_trits[:] sponge = Kerl() sponge.absorb(add_trits(seed, trits_from_int(index))) # Squeeze all of the trits out of the sponge and re-absorb them. # Note that the sponge transforms several times per operation, so # this sequence is not as redundant as it looks at first glance. sponge.squeeze(seed) sponge.reset() sponge.absorb(seed) return sponge
def _create_sponge(self, index: int) -> Kerl: """ Prepares the hash sponge for the generator. """ seed = self.seed_as_trits[:] sponge = Kerl() sponge.absorb(add_trits(seed, trits_from_int(index))) # Squeeze all of the trits out of the sponge and re-absorb them. # Note that the sponge transforms several times per operation, # so this sequence is not as redundant as it looks at first # glance. sponge.squeeze(seed) sponge.reset() sponge.absorb(seed) return sponge
def test_output_greater_243(self): inp = ('9MIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH') trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out, length=486) trytes_out = trits_to_trytes(trits_out) self.assertEqual( trytes_out, 'G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA', )
def MnemonicsToIotaSeed(recovery_words, passphrase='', bip44_account=0x00000000, bip44_page_index=0x00000000): """Recover an IOTA seed from the ledger Nano S recovery phrase Keyword arguments: recovery_words -- a list of 24 words (your ledger recovery phrase) passphrase -- a string containing the passphrase (only if set in ledger, not your pin number!) bip44_account -- an integer containing BIP44 path 'Account' bip44_page_index -- an integer containing BIP44 path 'Page index' """ print("\nCalculating your IOTA seed...") master_seed = mnemonic.Mnemonic.to_seed(mnemonic=' '.join(recovery_words), passphrase=passphrase) bip32_root_key = bip32utils.BIP32Key.fromEntropy(master_seed) bip44_purpose_key = bip32_root_key.ChildKey(0x8000002C) # Purpose bip44_coin_type_key = bip44_purpose_key.ChildKey(0x8000107A) # CoinType bip44_account_key = bip44_coin_type_key.ChildKey(0x80000000 + bip44_account) # Account bip44_page_index_key = bip44_account_key.ChildKey( 0x80000000 + bip44_page_index) # Page index if (sys.version_info.major >= 3): priv_key = bytearray(bip44_page_index_key.PrivateKey()) chain_code = bytearray(bip44_page_index_key.C) else: priv_key = bytearray.fromhex( bip44_page_index_key.PrivateKey().encode('hex')) chain_code = bytearray.fromhex(bip44_page_index_key.C.encode('hex')) trits_out = [] kerl = Kerl() kerl.k.update(priv_key[0:32] + chain_code[0:16] + priv_key[16:32] + chain_code[0:32]) kerl.squeeze(trits_out) iota_seed = conv.trits_to_trytes(trits_out) print("Seed: %s, Length: %d" % (iota_seed, len(iota_seed)))
def test_input_greater_243(self): inp = ('G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA') trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out, length=486) trytes_out = trits_to_trytes(trits_out) self.assertEqual( trytes_out, 'LUCKQVACOGBFYSPPVSSOXJEKNSQQRQKPZC9NXFSMQNRQCGGUL9OHVV' 'KBDSKEQEBKXRNUJSRXYVHJTXBPDWQGNSCDCBAIRHAQCOWZEBSNHIJI' 'GPZQITIBJQ9LNTDIBTCQ9EUWKHFLGFUVGGUWJONK9GBCDUIMAYMMQX', )
def test_correct_first(self): # noinspection SpellCheckingInspection inp = ( 'EMIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH' ) trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'EJEAOOZYSAWFPZQESYDHZCGYNSTWXUMVJOVDWUNZ' 'JXDGWCLUFGIMZRMGCAZGKNPLBRLGUNYWKLJTYEAQX', )
def test_correct_first(self): # noinspection SpellCheckingInspection inp = ('EMIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH') trits = trytes_to_trits(inp) print_var_type_n_val( var001=trits, pointer="#SDFGHhgfdAZER1234765555") #SDFGHhgfdAZER1234765555 # Value: # # [-1, -1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, -1, 0, -1, -1, -1, -1, 0, 1, -1, 1, 0, -1, -1, 0, 1, 1, 1, -1, 1, 0, 0, 1, 0, 0, -1, 0, 1, 1, -1, 1, 1, 0, -1, -1, 1, 0, -1, 1, 0, -1, -1, 0, -1, 1, -1, -1, 0, 0, 0, 1, -1, -1, -1, 0, -1, 0, -1, 1, -1, -1, -1, 1, 0, 0, -1, 1, 0, 0, 0, 1, 1, 0, 1, -1, -1, 1, 1, 1, -1, 0, 1, -1, 0, 1, -1, -1, 1, -1, -1, -1, 0, 1, -1, 1, 1, 1, -1, -1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, -1, 1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 0, 0, 1, 0, 1, -1, 1, -1, 0, 0, 0, 0, 1, 1, 1, 0, 1, -1, 1, 1, 0, 1, 1, -1, -1, -1, -1, 0, -1, 1, -1, 0, 0, -1, 0, 1, 1, 1, 1, 1, 1, 1, -1, -1, 0, -1, 0, 0, 0, 1, -1, 1, -1, 0, 0, 1, -1, 1, 0, -1, -1, -1, 0, 1, 0, 0, 0, 0, 1, 0, -1, -1, -1, -1, 0, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 0, 1, -1, -1, -1, -1, -1, 0, 1, 1, 1, -1, 0, 1, 1, 0, 0, -1, -1, -1, -1, 1, 0, -1, 0, 1] # Type: <class 'list'> # print('trits001: ', trits) # # [-1, -1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, -1, 0, -1, -1, -1, -1, 0, 1, -1, 1, 0, -1, -1, 0, 1, 1, 1, -1, 1, 0, 0, 1, 0, 0, -1, 0, 1, 1, -1, 1, 1, 0, -1, -1, 1, 0, -1, 1, 0, -1, -1, 0, -1, 1, -1, -1, 0, 0, 0, 1, -1, -1, -1, 0, -1, 0, -1, 1, -1, -1, -1, 1, 0, 0, -1, 1, 0, 0, 0, 1, 1, 0, 1, -1, -1, 1, 1, 1, -1, 0, 1, -1, 0, 1, -1, -1, 1, -1, -1, -1, 0, 1, -1, 1, 1, 1, -1, -1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, -1, 1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 0, 0, 1, 0, 1, -1, 1, -1, 0, 0, 0, 0, 1, 1, 1, 0, 1, -1, 1, 1, 0, 1, 1, -1, -1, -1, -1, 0, -1, 1, -1, 0, 0, -1, 0, 1, 1, 1, 1, 1, 1, 1, -1, -1, 0, -1, 0, 0, 0, 1, -1, 1, -1, 0, 0, 1, -1, 1, 0, -1, -1, -1, 0, 1, 0, 0, 0, 0, 1, 0, -1, -1, -1, -1, 0, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 0, 1, -1, -1, -1, -1, -1, 0, 1, 1, 1, -1, 0, 1, 1, 0, 0, -1, -1, -1, -1, 1, 0, -1, 0, 1] kerl = Kerl() kerl.absorb(trits) # print('kerl001: ', kerl) # # <iota.crypto.kerl.pykerl.Kerl object at 0x000002740CFA5BA8> trits_out = [] kerl.squeeze(trits_out) # print('kerl002: ', kerl) # # <iota.crypto.kerl.pykerl.Kerl object at 0x000002740CFA5BA8> trytes_out = trits_to_trytes(trits_out) print_var_type_n_val( var001=trytes_out, pointer="#QSEZEzZERTYsder23434") #QSEZEzZERTYsder23434 # Value: EJEAOOZYSAWFPZQESYDHZCGYNSTWXUMVJOVDWUNZJXDGWCLUFGIMZRMGCAZGKNPLBRLGUNYWKLJTYEAQX # Type: <class 'str'> # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'EJEAOOZYSAWFPZQESYDHZCGYNSTWXUMVJOVDWUNZ' 'JXDGWCLUFGIMZRMGCAZGKNPLBRLGUNYWKLJTYEAQX', )
def test_output_greater_243(self): # noinspection SpellCheckingInspection inp = ( '9MIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH' ) trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out, length=486) trytes_out = trits_to_trytes(trits_out) # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA', )
class MultisigAddressBuilder(object): """ Creates multisig addresses. Note that this class generates a single address from multiple inputs, (digests) unlike :py:class:`iota.crypto.addresses.AddressGenerator` which generates multiple addresses from a single input (seed). """ def __init__(self): super(MultisigAddressBuilder, self).__init__() self._digests = [] # type: List[Digest] """ Keeps track of digests that were added, so that we can attach them to the final :py:class:`MultisigAddress` object. """ self._address = None # type: Optional[MultisigAddress] """ Caches the generated address. Generating the address modifies the internal state of the curl sponge, so each :py:class:`MultisigAddressBuilder` instance can only generate a single address. """ self._sponge = Kerl() def add_digest(self, digest): # type: (Digest) -> None """ Absorbs a digest into the sponge. IMPORTANT: Keep track of the order that digests are added! To spend inputs from a multisig address, you must provide the private keys in the same order! References: - https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs """ if self._address: raise ValueError('Cannot add digests once an address is extracted.') self._sponge.absorb(digest.as_trits()) self._digests.append(digest) def get_address(self): # type: () -> MultisigAddress """ Returns the new multisig address. Note that you can continue to add digests after extracting an address; the next address will use *all* of the digests that have been added so far. """ if not self._digests: raise ValueError( 'Must call ``add_digest`` at least once ' 'before calling ``get_address``.', ) if not self._address: address_trits = [0] * HASH_LENGTH self._sponge.squeeze(address_trits) self._address =\ MultisigAddress.from_trits(address_trits, digests=self._digests[:]) return self._address
def finalize(self): # type: () -> None """ Finalizes the bundle, preparing it to be attached to the Tangle. """ if self.hash: raise RuntimeError('Bundle is already finalized.') if not self: raise ValueError('Bundle has no transactions.') # Quick validation. balance = self.balance if balance < 0: if self.change_address: self.add_transaction(ProposedTransaction( address = self.change_address, value = -balance, tag = self.tag, )) else: raise ValueError( 'Bundle has unspent inputs (balance: {balance}); ' 'use ``send_unspent_inputs_to`` to create ' 'change transaction.'.format( balance = balance, ), ) elif balance > 0: raise ValueError( 'Inputs are insufficient to cover bundle spend ' '(balance: {balance}).'.format( balance = balance, ), ) # Generate bundle hash. while True: sponge = Kerl() last_index = len(self) - 1 for (i, txn) in enumerate(self): # type: Tuple[int, ProposedTransaction] txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int] sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits(bundle_hash_trits) # Check that we generated a secure bundle hash. # https://github.com/iotaledger/iota.lib.py/issues/84 if any(13 in part for part in normalize(bundle_hash)): # Increment the legacy tag and try again. tail_transaction = self.tail_transaction # type: ProposedTransaction tail_transaction.increment_legacy_tag() else: break # Copy bundle hash to individual transactions. for txn in self: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. txn.signature_message_fragment = Fragment(txn.message or b'')
def finalize(self): # type: () -> None """ Finalizes the bundle, preparing it to be attached to the Tangle. """ if self.hash: raise RuntimeError('Bundle is already finalized.') if not self: raise ValueError('Bundle has no transactions.') # Quick validation. balance = self.balance if balance < 0: if self.change_address: self.add_transaction( ProposedTransaction( address=self.change_address, value=-balance, tag=self.tag, )) else: raise ValueError( 'Bundle has unspent inputs (balance: {balance}); ' 'use ``send_unspent_inputs_to`` to create ' 'change transaction.'.format(balance=balance, ), ) elif balance > 0: raise ValueError( 'Inputs are insufficient to cover bundle spend ' '(balance: {balance}).'.format(balance=balance, ), ) # Generate bundle hash. while True: sponge = Kerl() last_index = len(self) - 1 for (i, txn ) in enumerate(self): # type: Tuple[int, ProposedTransaction] txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int] sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits(bundle_hash_trits) # Check that we generated a secure bundle hash. # https://github.com/iotaledger/iota.lib.py/issues/84 if any(13 in part for part in normalize(bundle_hash)): # Increment the legacy tag and try again. tail_transaction = self.tail_transaction # type: ProposedTransaction tail_transaction.increment_legacy_tag() else: break # Copy bundle hash to individual transactions. for txn in self: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. txn.signature_message_fragment = Fragment(txn.message or b'')
def test_generate_trytes_and_multi_squeeze(self): filepath =\ join( dirname(__file__), 'test_vectors/generate_trytes_and_multi_squeeze.csv', ) with open(filepath, 'r') as f: reader = DictReader(f) for count, line in enumerate(reader): trytes = line['trytes'] hashes1 = line['Kerl_squeeze1'] hashes2 = line['Kerl_squeeze2'] hashes3 = line['Kerl_squeeze3'] trits = trytes_to_trits(trytes) # print(trits) # # [-1, 0, 1, 1, -1, 0, -1, 0, 0, 1, -1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, -1, -1, 1, -1, -1, -1, -1, 0, -1, 1, -1, 0, -1, 0, 1, 0, 1, 1, 0, 1, -1, 0, 0, -1, 1, 0, 1, -1, -1, 0, 1, 1, 0, 0, 1, 0, -1, -1, 0, 0, -1, 0, 0, -1, 0, 1, 1, 0, 0, 0, 0, -1, 0, -1, 1, -1, -1, -1, 0, 0, 1, 0, 0, 1, 1, -1, 1, 1, -1, 0, 1, -1, 1, 0, -1, 1, 0, 1, 1, -1, 0, 1, 0, 1, 1, -1, -1, -1, 1, 1, 1, -1, 0, 1, -1, 0, -1, -1, -1, -1, 1, 0, 0, 0, -1, -1, -1, -1, -1, -1, 0, 1, -1, -1, -1, 0, 1, 1, 0, -1, -1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, -1, 1, 0, -1, 1, 1, 1, -1, 1, 1, 1, 1, 1, 0, -1, 1, 1, -1, 1, 0, 0, 0, 1, 1, 1, -1, -1, 0, 0, -1, -1, 1, -1, -1, 0, -1, -1, 1, 0, 0, 1, 1, -1, -1, 1, 1, -1, -1, 0, 1, 1, 1, -1, -1, -1, 0, -1, -1, -1, -1, 0, 1, 1, 1, -1, -1, 0, 0, 1, 0, -1, 0, 1, -1, 0, -1, 1, -1, 1, -1, -1, -1, 0, 0, 1, 1, 0, 1, 0] kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) # print('hashes1: ', hashes1) # # IWDWJCUUE9EBBYAEDXPDNAKTJAVY9IFOUZBNRIHMZ9NWOGOL9GYKZZ9ZLXHAI9PVPSLEAUGX9TQKMIUAX # print('trytes_out: ', trytes_out) # # IWDWJCUUE9EBBYAEDXPDNAKTJAVY9IFOUZBNRIHMZ9NWOGOL9GYKZZ9ZLXHAI9PVPSLEAUGX9TQKMIUAX self.assertEqual( hashes1, trytes_out, msg='line {count}: {hashes} != {trytes}'.format( count=count + 2, hashes=hashes1, trytes=trytes_out, ), ) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) # print('line {count}: {hashes} != {trytes}'.format( # count = count + 2, # hashes = hashes2, # trytes = trytes_out, # )) # # ANLYSAFQ9RJKFEADAZDTLPMYCYSGTRIOUWFKZPWJIEQHDTREOPHSUMAGIZLVIRMZGAVKODZAYBUISSQNX != ANLYSAFQ9RJKFEADAZDTLPMYCYSGTRIOUWFKZPWJIEQHDTREOPHSUMAGIZLVIRMZGAVKODZAYBUISSQNX self.assertEqual( hashes2, trytes_out, msg='line {count}: {hashes} != {trytes}'.format( count=count + 2, hashes=hashes2, trytes=trytes_out, ), ) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) self.assertEqual( hashes3, trytes_out, msg='line {count}: {hashes} != {trytes}'.format( count=count + 2, hashes=hashes3, trytes=trytes_out, ), )
def finalize(self): # type: () -> None """ Finalizes the bundle, preparing it to be attached to the Tangle. This operation includes checking if the bundle has zero balance, generating the bundle hash and updating the transactions with it, furthermore to initialize signature/message fragment fields. Once this method is invoked, no new transactions may be added to the bundle. :raises RuntimeError: if bundle is already finalized. :raises ValueError: - if bundle has no transactions. - if bundle has unspent inputs (there is no ``change_address`` attribute specified.) - if inputs are insufficient to cover bundle spend. """ if self.hash: raise RuntimeError('Bundle is already finalized.') if not self: raise ValueError('Bundle has no transactions.') # Quick validation. balance = self.balance if balance < 0: if self.change_address: self.add_transaction( ProposedTransaction( address=self.change_address, value=-balance, tag=self.tag, )) else: raise ValueError( 'Bundle has unspent inputs (balance: {balance}); ' 'use ``send_unspent_inputs_to`` to create ' 'change transaction.'.format(balance=balance, ), ) elif balance > 0: raise ValueError( 'Inputs are insufficient to cover bundle spend ' '(balance: {balance}).'.format(balance=balance, ), ) # Generate bundle hash. while True: sponge = Kerl() last_index = len(self) - 1 for i, txn in enumerate(self): txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits(bundle_hash_trits) # Check that we generated a secure bundle hash. # https://github.com/iotaledger/iota.py/issues/84 if any(13 in part for part in normalize(bundle_hash)): # Increment the legacy tag and try again. tail_transaction = (self.tail_transaction ) # type: ProposedTransaction tail_transaction.increment_legacy_tag() else: break # Copy bundle hash to individual transactions. for txn in self: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. txn.signature_message_fragment = Fragment(txn.message or b'')
while True: print("Create your Message ") Message = input() print("Create your Tag ") TAG = input() print("Please type in the secret Key:") secret_key = input() ## Create next root_address astrits = TryteString(str(root_address).encode()).as_trits() checksum_trits = [] sponge = Kerl() sponge.absorb(astrits) sponge.squeeze(checksum_trits) result = TryteString.from_trits(checksum_trits) new_address = Address(result) ## transforming the secret_key into Base64 Key h = blake2b(digest_size=16) h_pw = h.update(bytes(secret_key.encode('utf-8'))) hh = h.hexdigest() pw_string = str(hh).encode('utf-8') b64_pw = base64.b64encode(pw_string) ## Encrypt the Message data = {'message': Message} msg = json.dumps(data) key = b64_pw f = Fernet(key)
class MultisigAddressBuilder(object): """ Creates multisig addresses. Note that this class generates a single address from multiple inputs (digests), unlike :py:class:`iota.crypto.addresses.AddressGenerator` which generates multiple addresses from a single input (seed). """ def __init__(self) -> None: super(MultisigAddressBuilder, self).__init__() self._digests: List[Digest] = [] """ Keeps track of digests that were added, so that we can attach them to the final :py:class:`MultisigAddress` object. """ self._address: Optional[MultisigAddress] = None """ Caches the generated address. Generating the address modifies the internal state of the curl sponge, so each :py:class:`MultisigAddressBuilder` instance can only generate a single address. """ self._sponge = Kerl() def add_digest(self, digest: Digest) -> None: """ Absorbs a digest into the sponge. .. important:: Keep track of the order that digests are added! To spend inputs from a multisig address, you must provide the private keys in the same order! References: - https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs """ if self._address: raise ValueError( 'Cannot add digests once an address is extracted.') self._sponge.absorb(digest.as_trits()) self._digests.append(digest) def get_address(self) -> MultisigAddress: """ Returns the new multisig address. Note that you can continue to add digests after extracting an address; the next address will use *all* of the digests that have been added so far. """ if not self._digests: raise ValueError( 'Must call ``add_digest`` at least once ' 'before calling ``get_address``.', ) if not self._address: address_trits = [0] * HASH_LENGTH self._sponge.squeeze(address_trits) self._address = MultisigAddress.from_trits( address_trits, digests=self._digests[:], ) return self._address
class SignatureFragmentGenerator(Iterator[TryteString]): """ Used to generate signature fragments progressively. Each instance can generate 1 signature per fragment in the private key. """ def __init__(self, private_key, hash_): # type: (PrivateKey, Hash) -> None super(SignatureFragmentGenerator, self).__init__() self._key_chunks = private_key.iter_chunks(FRAGMENT_LENGTH) self._iteration = -1 self._normalized_hash = normalize(hash_) self._sponge = Kerl() def __iter__(self): # type: () -> SignatureFragmentGenerator return self def __len__(self): # type: () -> int """ Returns the number of fragments this generator can create. Note: This method always returns the same result, no matter how many iterations have been completed. """ return len(self._key_chunks) def __next__(self): # type: () -> TryteString """ Returns the next signature fragment. """ key_trytes = next(self._key_chunks) # type: TryteString self._iteration += 1 # If the key is long enough, loop back around to the start. normalized_chunk =\ self._normalized_hash[self._iteration % len(self._normalized_hash)] signature_fragment = key_trytes.as_trits() # Build the signature, one hash at a time. for i in range(key_trytes.count_chunks(Hash.LEN)): hash_start = i * HASH_LENGTH hash_end = hash_start + HASH_LENGTH buffer = signature_fragment[hash_start:hash_end] # type: MutableSequence[int] for _ in range(13 - normalized_chunk[i]): self._sponge.reset() self._sponge.absorb(buffer) self._sponge.squeeze(buffer) signature_fragment[hash_start:hash_end] = buffer return TryteString.from_trits(signature_fragment) if PY2: next = __next__