def __init__(self, private_key, hash_): # type: (PrivateKey, Hash) -> None super(SignatureFragmentGenerator, self).__init__() self._key_chunks = private_key.iter_chunks(FRAGMENT_LENGTH) self._iteration = -1 self._normalized_hash = normalize(hash_) self._sponge = Kerl()
def _create_sponge(self, index): # type: (int) -> Kerl """ Prepares the hash sponge for the generator. """ seed = self.seed_as_trits[:] sponge = Kerl() sponge.absorb(add_trits(seed, trits_from_int(index))) # Squeeze all of the trits out of the sponge and re-absorb them. # Note that the sponge transforms several times per operation, # so this sequence is not as redundant as it looks at first # glance. sponge.squeeze(seed) sponge.reset() sponge.absorb(seed) return sponge
def __init__(self): super(MultisigAddressBuilder, self).__init__() self._digests = [] # type: List[Digest] """ Keeps track of digests that were added, so that we can attach them to the final :py:class:`MultisigAddress` object. """ self._address = None # type: Optional[MultisigAddress] """ Caches the generated address. Generating the address modifies the internal state of the curl sponge, so each :py:class:`MultisigAddressBuilder` instance can only generate a single address. """ self._sponge = Kerl()
def test_generate_multi_trytes_and_hash(self): filepath =\ join( dirname(__file__), 'test_vectors/generate_multi_trytes_and_hash.csv', ) with open(filepath, 'r') as f: reader = DictReader(f) for count, line in enumerate(reader): trytes = line['multiTrytes'] hashes = line['Kerl_hash'] trits = trytes_to_trits(trytes) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) self.assertEqual( hashes, trytes_out, msg='line {count}: {hashes} != {trytes}'.format( count=count + 2, hashes=hashes, trytes=trytes_out, ), )
def _generate_checksum(self): # type: () -> AddressChecksum """ Generates the correct checksum for this address. """ checksum_trits = [] # type: MutableSequence[int] sponge = Kerl() sponge.absorb(self.address.as_trits()) sponge.squeeze(checksum_trits) checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE return AddressChecksum.from_trits(checksum_trits[-checksum_length:])
def address_from_digest(digest): # type: (Digest) -> Address """ Generates an address from a private key digest. """ address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE) # type: List[int] sponge = Kerl() sponge.absorb(digest.as_trits()) sponge.squeeze(address_trits) return Address.from_trits( trits=address_trits, key_index=digest.key_index, security_level=digest.security_level, )
def test_correct_first(self): # noinspection SpellCheckingInspection inp = ('EMIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH') trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out) trytes_out = trits_to_trytes(trits_out) # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'EJEAOOZYSAWFPZQESYDHZCGYNSTWXUMVJOVDWUNZ' 'JXDGWCLUFGIMZRMGCAZGKNPLBRLGUNYWKLJTYEAQX', )
def test_output_greater_243(self): # noinspection SpellCheckingInspection inp = ('9MIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJ' 'FGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH') trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out, length=486) trytes_out = trits_to_trytes(trits_out) # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA', )
def test_input_greater_243(self): # noinspection SpellCheckingInspection inp = ('G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJB' 'VBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ' '9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA') trits = trytes_to_trits(inp) kerl = Kerl() kerl.absorb(trits) trits_out = [] kerl.squeeze(trits_out, length=486) trytes_out = trits_to_trytes(trits_out) # noinspection SpellCheckingInspection self.assertEqual( trytes_out, 'LUCKQVACOGBFYSPPVSSOXJEKNSQQRQKPZC9NXFSMQNRQCGGUL9OHVV' 'KBDSKEQEBKXRNUJSRXYVHJTXBPDWQGNSCDCBAIRHAQCOWZEBSNHIJI' 'GPZQITIBJQ9LNTDIBTCQ9EUWKHFLGFUVGGUWJONK9GBCDUIMAYMMQX', )
class SignatureFragmentGenerator(Iterator[TryteString]): """ Used to generate signature fragments progressively. Each instance can generate 1 signature per fragment in the private key. """ def __init__(self, private_key, hash_): # type: (PrivateKey, Hash) -> None super(SignatureFragmentGenerator, self).__init__() self._key_chunks = private_key.iter_chunks(FRAGMENT_LENGTH) self._iteration = -1 self._normalized_hash = normalize(hash_) self._sponge = Kerl() def __iter__(self): # type: () -> SignatureFragmentGenerator return self def __len__(self): # type: () -> int """ Returns the number of fragments this generator can create. Note: This method always returns the same result, no matter how many iterations have been completed. """ return len(self._key_chunks) def __next__(self): # type: () -> TryteString """ Returns the next signature fragment. """ key_trytes = next(self._key_chunks) # type: TryteString self._iteration += 1 # If the key is long enough, loop back around to the start. normalized_chunk = (self._normalized_hash[self._iteration % len(self._normalized_hash)]) signature_fragment = key_trytes.as_trits() # Build the signature, one hash at a time. for i in range(key_trytes.count_chunks(Hash.LEN)): hash_start = i * HASH_LENGTH hash_end = hash_start + HASH_LENGTH buffer = signature_fragment[hash_start:hash_end] # type: List[int] for _ in range(13 - normalized_chunk[i]): self._sponge.reset() self._sponge.absorb(buffer) self._sponge.squeeze(buffer) signature_fragment[hash_start:hash_end] = buffer return TryteString.from_trits(signature_fragment) if PY2: next = __next__
def get_digest(self): # type: () -> Digest """ Generates the digest used to do the actual signing. Signing keys can have variable length and tend to be quite long, which makes them not-well-suited for use in crypto algorithms. The digest is essentially the result of running the signing key through a PBKDF, yielding a constant-length hash that can be used for crypto. """ hashes_per_fragment = FRAGMENT_LENGTH // Hash.LEN key_fragments = self.iter_chunks(FRAGMENT_LENGTH) # The digest will contain one hash per key fragment. digest = [0] * HASH_LENGTH * len(key_fragments) # Iterate over each fragment in the key. for i, fragment in enumerate(key_fragments): fragment_trits = fragment.as_trits() key_fragment = [0] * FRAGMENT_LENGTH hash_trits = [] # Within each fragment, iterate over one hash at a time. for j in range(hashes_per_fragment): hash_start = j * HASH_LENGTH hash_end = hash_start + HASH_LENGTH hash_trits = fragment_trits[hash_start:hash_end] for k in range(26): sponge = Kerl() sponge.absorb(hash_trits) sponge.squeeze(hash_trits) key_fragment[hash_start:hash_end] = hash_trits # After processing all of the hashes in the fragment, # generate a final hash and append it to the digest. # # Note that we will do this once per fragment in the key, so # the longer the key is, the longer the digest will be. sponge = Kerl() sponge.absorb(key_fragment) sponge.squeeze(hash_trits) fragment_hash_start = i * HASH_LENGTH fragment_hash_end = fragment_hash_start + HASH_LENGTH digest[fragment_hash_start:fragment_hash_end] = hash_trits return Digest(TryteString.from_trits(digest), self.key_index)
class MultisigAddressBuilder(object): """ Creates multisig addresses. Note that this class generates a single address from multiple inputs (digests), unlike :py:class:`iota_async.crypto.addresses.AddressGenerator` which generates multiple addresses from a single input (seed). """ def __init__(self): super(MultisigAddressBuilder, self).__init__() self._digests = [] # type: List[Digest] """ Keeps track of digests that were added, so that we can attach them to the final :py:class:`MultisigAddress` object. """ self._address = None # type: Optional[MultisigAddress] """ Caches the generated address. Generating the address modifies the internal state of the curl sponge, so each :py:class:`MultisigAddressBuilder` instance can only generate a single address. """ self._sponge = Kerl() def add_digest(self, digest): # type: (Digest) -> None """ Absorbs a digest into the sponge. .. important:: Keep track of the order that digests are added! To spend inputs from a multisig address, you must provide the private keys in the same order! References: - https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs """ if self._address: raise ValueError( 'Cannot add digests once an address is extracted.') self._sponge.absorb(digest.as_trits()) self._digests.append(digest) def get_address(self): # type: () -> MultisigAddress """ Returns the new multisig address. Note that you can continue to add digests after extracting an address; the next address will use *all* of the digests that have been added so far. """ if not self._digests: raise ValueError( 'Must call ``add_digest`` at least once ' 'before calling ``get_address``.', ) if not self._address: address_trits = [0] * HASH_LENGTH self._sponge.squeeze(address_trits) self._address = MultisigAddress.from_trits( address_trits, digests=self._digests[:], ) return self._address
def finalize(self): # type: () -> None """ Finalizes the bundle, preparing it to be attached to the Tangle. """ if self.hash: raise RuntimeError('Bundle is already finalized.') if not self: raise ValueError('Bundle has no transactions.') # Quick validation. balance = self.balance if balance < 0: if self.change_address: self.add_transaction( ProposedTransaction( address=self.change_address, value=-balance, tag=self.tag, )) else: raise ValueError( 'Bundle has unspent inputs (balance: {balance}); ' 'use ``send_unspent_inputs_to`` to create ' 'change transaction.'.format(balance=balance, ), ) elif balance > 0: raise ValueError( 'Inputs are insufficient to cover bundle spend ' '(balance: {balance}).'.format(balance=balance, ), ) # Generate bundle hash. while True: sponge = Kerl() last_index = len(self) - 1 for i, txn in enumerate(self): txn.current_index = i txn.last_index = last_index sponge.absorb(txn.get_signature_validation_trytes().as_trits()) bundle_hash_trits = [0] * HASH_LENGTH sponge.squeeze(bundle_hash_trits) bundle_hash = BundleHash.from_trits(bundle_hash_trits) # Check that we generated a secure bundle hash. # https://github.com/iotaledger/iota.lib.py/issues/84 if any(13 in part for part in normalize(bundle_hash)): # Increment the legacy tag and try again. tail_transaction = (self.tail_transaction ) # type: ProposedTransaction tail_transaction.increment_legacy_tag() else: break # Copy bundle hash to individual transactions. for txn in self: txn.bundle_hash = bundle_hash # Initialize signature/message fragment. txn.signature_message_fragment = Fragment(txn.message or b'')