def _build_witnesses(self, tx_aux_hash: str): witnesses = [] for input in self.inputs: _, node = derive_address_and_node(self.keychain, input.address_n) message = (b"\x01" + cbor.encode(self.protocol_magic) + b"\x58\x20" + tx_aux_hash) signature = ed25519.sign_ext(node.private_key(), node.private_key_ext(), message) extended_public_key = (remove_ed25519_prefix(node.public_key()) + node.chain_code()) witnesses.append([ (input.type or 0), cbor.Tagged(24, cbor.encode([extended_public_key, signature])), ]) return witnesses
def test_encode_chunked(self): large_dict = {i: i for i in range(100)} encoded = encode(large_dict) encoded_len = len(encoded) assert encoded_len == 354 arbitrary_encoded_len_factor = 59 arbitrary_power_of_two = 64 larger_than_encoded_len = encoded_len + 1 for max_chunk_size in [ 1, 10, arbitrary_encoded_len_factor, arbitrary_power_of_two, encoded_len, larger_than_encoded_len ]: encoded_chunks = [ bytes(chunk) for chunk in encode_chunked(large_dict, max_chunk_size) ] expected_number_of_chunks = math.ceil( len(encoded) / max_chunk_size) self.assertEqual(len(encoded_chunks), expected_number_of_chunks) # all chunks except the last should be of chunk_size for i in range(len(encoded_chunks) - 1): self.assertEqual(len(encoded_chunks[i]), max_chunk_size) # last chunk should contain the remaining bytes or the whole chunk remaining_bytes = len(encoded) % max_chunk_size expected_last_chunk_size = remaining_bytes if remaining_bytes > 0 else max_chunk_size self.assertEqual(len(encoded_chunks[-1]), expected_last_chunk_size) self.assertEqual(b''.join(encoded_chunks), encoded)
def generate_id(self) -> None: self.creation_time = storage.device.next_u2f_counter() or 0 if not self.check_required_fields(): raise AssertionError data = { key: value for key, value in ( (_CRED_ID_RP_ID, self.rp_id), (_CRED_ID_RP_NAME, self.rp_name), (_CRED_ID_USER_ID, self.user_id), (_CRED_ID_USER_NAME, self.user_name), (_CRED_ID_USER_DISPLAY_NAME, self.user_display_name), (_CRED_ID_CREATION_TIME, self.creation_time), (_CRED_ID_HMAC_SECRET, self.hmac_secret), (_CRED_ID_USE_SIGN_COUNT, self.use_sign_count), ) if value } if self.algorithm != _DEFAULT_ALGORITHM or self.curve != _DEFAULT_CURVE: data[_CRED_ID_ALGORITHM] = self.algorithm data[_CRED_ID_CURVE] = self.curve key = seed.derive_slip21_node_without_passphrase( [b"SLIP-0022", _CRED_ID_VERSION, b"Encryption key"]).key() iv = random.bytes(12) ctx = chacha20poly1305(key, iv) ctx.auth(self.rp_id_hash) ciphertext = ctx.encrypt(cbor.encode(data)) tag = ctx.finish() self.id = _CRED_ID_VERSION + iv + ciphertext + tag if len(self.id) > CRED_ID_MAX_LENGTH: raise AssertionError
def _get_address_root(node, payload): extpubkey = remove_ed25519_prefix(node.public_key()) + node.chain_code() if payload: payload = {1: cbor.encode(payload)} else: payload = {} return _address_hash([0, [0, extpubkey], payload])
def _build_byron_witnesses( keychain: seed.Keychain, inputs: List[CardanoTxInputType], tx_body_hash: bytes, protocol_magic: int, ) -> List[Tuple[bytes, bytes, bytes, bytes]]: byron_witnesses = [] # include only one witness for each path paths = set() for input in inputs: if not is_byron_path(input.address_n): continue paths.add(tuple(input.address_n)) for path in paths: node = keychain.derive(list(path)) public_key = remove_ed25519_prefix(node.public_key()) signature = ed25519.sign_ext( node.private_key(), node.private_key_ext(), tx_body_hash ) chain_code = node.chain_code() address_attributes = cbor.encode(get_address_attributes(protocol_magic)) byron_witnesses.append((public_key, signature, chain_code, address_attributes)) return byron_witnesses
def generate_id(self) -> None: self.creation_time = storage.device.next_u2f_counter() or 0 data = cbor.encode( { key: value for key, value in ( (_CRED_ID_RP_ID, self.rp_id), (_CRED_ID_RP_NAME, self.rp_name), (_CRED_ID_USER_ID, self.user_id), (_CRED_ID_USER_NAME, self.user_name), (_CRED_ID_USER_DISPLAY_NAME, self.user_display_name), (_CRED_ID_CREATION_TIME, self.creation_time), (_CRED_ID_HMAC_SECRET, self.hmac_secret), (_CRED_ID_USE_SIGN_COUNT, self.use_sign_count), ) if value } ) key = seed.derive_slip21_node_without_passphrase( [b"SLIP-0022", _CRED_ID_VERSION, b"Encryption key"] ).key() iv = random.bytes(12) ctx = chacha20poly1305(key, iv) ctx.auth(self.rp_id_hash) ciphertext = ctx.encrypt(data) tag = ctx.finish() self.id = _CRED_ID_VERSION + iv + ciphertext + tag
def _cborize_byron_witnesses( keychain: seed.Keychain, inputs: list[CardanoTxInputType], tx_body_hash: bytes, protocol_magic: int, ) -> list[tuple[bytes, bytes, bytes, bytes]]: byron_witnesses = [] # include only one witness for each path paths = set() for tx_input in inputs: if is_byron_path(tx_input.address_n): paths.add(tuple(tx_input.address_n)) for path in paths: node = keychain.derive(list(path)) public_key = derive_public_key(keychain, list(path)) signature = ed25519.sign_ext(node.private_key(), node.private_key_ext(), tx_body_hash) chain_code = node.chain_code() address_attributes = cbor.encode( get_address_attributes(protocol_magic)) byron_witnesses.append( (public_key, signature, chain_code, address_attributes)) byron_witnesses.sort() return byron_witnesses
def get_address_attributes(protocol_magic: int) -> dict: # protocol magic is included in Byron addresses only on testnets if protocol_magics.is_mainnet(protocol_magic): address_attributes = {} else: address_attributes = {PROTOCOL_MAGIC_KEY: cbor.encode(protocol_magic)} return address_attributes
def test_encode_streamed(self): large_dict = {i: i for i in range(100)} encoded = encode(large_dict) encoded_streamed = [ bytes(item) for item in encode_streamed(large_dict) ] self.assertEqual(b''.join(encoded_streamed), encoded)
def derive(keychain: seed.Keychain, path: list, protocol_magic: int) -> bytes: address_attributes = get_address_attributes(protocol_magic) address_root = _get_address_root(keychain, path, address_attributes) address_type = 0 address_data = [address_root, address_attributes, address_type] address_data_encoded = cbor.encode(address_data) return _encode_raw(address_data_encoded)
def _get_catalyst_registration_auxiliary_data_hash( catalyst_registration_payload: CatalystRegistrationPayload, catalyst_registration_payload_signature: bytes, ) -> bytes: cborized_catalyst_registration = _cborize_catalyst_registration( catalyst_registration_payload, catalyst_registration_payload_signature, ) return _hash_auxiliary_data( cbor.encode(_wrap_metadata(cborized_catalyst_registration)))
def public_key(self) -> bytes: if self.curve == common.COSE_CURVE_P256: pubkey = nist256p1.publickey(self._private_key(), False) return cbor.encode({ common.COSE_KEY_ALG: self.algorithm, common.COSE_KEY_KTY: common.COSE_KEYTYPE_EC2, common.COSE_KEY_CRV: self.curve, common.COSE_KEY_X: pubkey[1:33], common.COSE_KEY_Y: pubkey[33:], }) elif self.curve == common.COSE_CURVE_ED25519: pubkey = ed25519.publickey(self._private_key()) return cbor.encode({ common.COSE_KEY_ALG: self.algorithm, common.COSE_KEY_KTY: common.COSE_KEYTYPE_OKP, common.COSE_KEY_CRV: self.curve, common.COSE_KEY_X: pubkey, }) raise TypeError
def _serialize_tx(keychain: seed.Keychain, msg: CardanoSignTx) -> Tuple[bytes, bytes]: tx_body = _build_tx_body(keychain, msg) tx_hash = _hash_tx_body(tx_body) witnesses = _build_witnesses(keychain, msg.inputs, tx_hash, msg.protocol_magic) serialized_tx = cbor.encode([tx_body, witnesses, None]) return serialized_tx, tx_hash
def derive_address_and_node(keychain, path: list): node = keychain.derive(path) address_payload = None address_attributes = {} address_root = _get_address_root(node, address_payload) address_type = 0 address_data = [address_root, address_attributes, address_type] address_data_encoded = cbor.encode(address_data) return (_encode_address_raw(address_data_encoded), node)
def serialise_tx(self): self._process_outputs() inputs_cbor = [] for input in self.inputs: inputs_cbor.append( [ (input.type or 0), cbor.Tagged(24, cbor.encode([input.prev_hash, input.prev_index])), ] ) inputs_cbor = cbor.IndefiniteLengthArray(inputs_cbor) outputs_cbor = [] for index, address in enumerate(self.output_addresses): outputs_cbor.append( [cbor.Raw(base58.decode(address)), self.outgoing_coins[index]] ) for index, address in enumerate(self.change_addresses): outputs_cbor.append( [cbor.Raw(base58.decode(address)), self.change_coins[index]] ) outputs_cbor = cbor.IndefiniteLengthArray(outputs_cbor) tx_aux_cbor = [inputs_cbor, outputs_cbor, self.attributes] tx_hash = hashlib.blake2b(data=cbor.encode(tx_aux_cbor), outlen=32).digest() witnesses = self._build_witnesses(tx_hash) tx_body = cbor.encode([tx_aux_cbor, witnesses]) self.fee = self.compute_fee( self.input_coins_sum, self.outgoing_coins, self.change_coins ) return tx_body, tx_hash
def test_cbor_tuples(self): """ Tuples should be encoded as arrays and decoded back as lists. """ test_vectors = [ ([], '80'), ([1, 2, 3], '83010203'), ([1, [2, 3], [4, 5]], '8301820203820405'), (list(range(1, 26)), '98190102030405060708090a0b0c0d0e0f101112131415161718181819'), ] for val, encoded in test_vectors: value_tuple = tuple(val) self.assertEqual(unhexlify(encoded), encode(value_tuple)) self.assertEqual(val, decode(unhexlify(encoded)))
def _create_catalyst_registration_payload_signature( keychain: seed.Keychain, catalyst_registration_payload: CatalystRegistrationPayload, path: list[int], ) -> bytes: node = keychain.derive(path) encoded_catalyst_registration = cbor.encode( {METADATA_KEY_CATALYST_REGISTRATION: catalyst_registration_payload}) catalyst_registration_hash = hashlib.blake2b( data=encoded_catalyst_registration, outlen=CATALYST_REGISTRATION_HASH_SIZE, ).digest() return ed25519.sign_ext(node.private_key(), node.private_key_ext(), catalyst_registration_hash)
def get_auxiliary_data_cbor( keychain: seed.Keychain, auxiliary_data: CardanoTxAuxiliaryDataType, protocol_magic: int, network_id: int, ) -> bytes: if auxiliary_data.blob: return auxiliary_data.blob elif auxiliary_data.catalyst_registration_parameters: cborized_catalyst_registration = _cborize_catalyst_registration( keychain, auxiliary_data.catalyst_registration_parameters, protocol_magic, network_id, ) return cbor.encode(_wrap_metadata(cborized_catalyst_registration)) else: raise INVALID_AUXILIARY_DATA
def test_cbor_ordered_map(self): """ OrderedMaps should be encoded as maps without any ordering and decoded back as dicts. """ test_vectors = [ ({}, 'a0'), ([[1, 2], [3, 4]], 'a201020304'), ([[3, 4], [1, 2]], 'a203040102'), ] for val, encoded_hex in test_vectors: ordered_map = OrderedMap() for key, value in val: ordered_map[key] = value encoded = unhexlify(encoded_hex) self.assertEqual(encode(ordered_map), encoded) self.assertEqual(decode(encoded), {k: v for k, v in val})
def _serialize_tx(keychain: seed.Keychain, msg: CardanoSignTx) -> Tuple[bytes, bytes]: tx_body = _build_tx_body(keychain, msg) tx_hash = _hash_tx_body(tx_body) witnesses = _build_witnesses( keychain, msg.inputs, msg.certificates, msg.withdrawals, tx_hash, msg.protocol_magic, ) metadata = None if msg.metadata: metadata = cbor.Raw(bytes(msg.metadata)) serialized_tx = cbor.encode([tx_body, witnesses, metadata]) return serialized_tx, tx_hash
def _validate_max_tx_output_size( keychain: seed.Keychain, output: CardanoTxOutputType, protocol_magic: int, network_id: int, ) -> None: """ This limitation is a mitigation measure to prevent sending large (especially change) outputs containing many tokens that Trezor would not be able to spend reliably given that currently the full Cardano transaction is held in-memory. Once Cardano-transaction signing is refactored to be streamed, this limit can be lifted """ cborized_output = _cborize_output(keychain, output, protocol_magic, network_id) serialized_output = cbor.encode(cborized_output) if len(serialized_output) > MAX_TX_OUTPUT_SIZE: raise wire.ProcessError("Maximum tx output size (%s bytes) exceeded!" % MAX_TX_OUTPUT_SIZE)
def _encode_address_raw(address_data_encoded): return base58.encode( cbor.encode([ cbor.Tagged(24, address_data_encoded), crc.crc32(address_data_encoded) ]))
def _hash_tx_body(tx_body: Dict) -> bytes: tx_body_cbor = cbor.encode(tx_body) return hashlib.blake2b(data=tx_body_cbor, outlen=32).digest()
def get_native_script_hash(keychain: Keychain, script: CardanoNativeScript) -> bytes: script_cbor = cbor.encode(cborize_native_script(keychain, script)) prefixed_script_cbor = b"\00" + script_cbor return hashlib.blake2b(data=prefixed_script_cbor, outlen=SCRIPT_HASH_SIZE).digest()
def _address_hash(data: list) -> bytes: cbor_data = cbor.encode(data) sha_data_hash = hashlib.sha3_256(cbor_data).digest() res = hashlib.blake2b(data=sha_data_hash, outlen=28).digest() return res
def _encode_address_raw(address_data_encoded) -> bytes: return cbor.encode([ cbor.Tagged(24, address_data_encoded), crc.crc32(address_data_encoded) ])
def _address_hash(data) -> bytes: data = cbor.encode(data) data = hashlib.sha3_256(data).digest() res = hashlib.blake2b(data=data, outlen=28).digest() return res
def test_cbor_encoding(self): test_vectors = [ # unsigned integers (0, '00'), (1, '01'), (10, '0a'), (23, '17'), (24, '1818'), (25, '1819'), (100, '1864'), (1000, '1903e8'), (1000000, '1a000f4240'), (1000000000000, '1b000000e8d4a51000'), # negative integers (-1, '20'), (-10, '29'), (-24, '37'), (-25, '3818'), (-26, '3819'), (-100, '3863'), (-1000, '3903E7'), (-1000000, '3A000F423F'), (-1000000000000, '3B000000E8D4A50FFF'), # binary strings (b'', '40'), (unhexlify('01020304'), '4401020304'), # text strings ('', '60'), ('Fun', '6346756e'), (u'P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f', '786550c599c3adc5a165726ec49b20c5be6c75c5a56f75c48d6bc3bd206bc5afc58820c3ba70c49b6c20c48fc3a162656c736bc3a920c3b36479207ac3a16b65c5996ec3bd2075c48d65c5882062c49bc5bec3ad20706f64c3a96c207ac3b36e7920c3ba6cc5af' ), # tags (Tagged(1, 1363896240), 'c11a514b67b0'), (Tagged(23, unhexlify('01020304')), 'd74401020304'), # arrays ([], '80'), ([1, 2, 3], '83010203'), ([1, [2, 3], [4, 5]], '8301820203820405'), (list(range(1, 26)), '98190102030405060708090a0b0c0d0e0f101112131415161718181819'), # maps ({}, 'a0'), ({ 1: 2, 3: 4 }, 'a201020304'), # indefinite (IndefiniteLengthArray([]), '9fff'), (IndefiniteLengthArray([1, [2, 3], [4, 5]]), '9f01820203820405ff'), (IndefiniteLengthArray([1, [2, 3], IndefiniteLengthArray([4, 5])]), '9f018202039f0405ffff'), # boolean (True, 'f5'), (False, 'f4'), # null (None, 'f6'), ] for val, encoded_hex in test_vectors: encoded = unhexlify(encoded_hex) self.assertEqual(encode(val), encoded) self.assertEqual(decode(encoded), val)
def _hash_item(self, item: Any) -> bytes: assert self.hash_fn is not None encoded_item = cbor.encode(item) self.hash_fn.update(encoded_item) return encoded_item