def test_favor_short_string_form(): data = decode_hex('b8056d6f6f7365') with pytest.raises(DecodingError): decode(data) data = decode_hex('856d6f6f7365') assert decode(data) == b'moose'
def get_header_by_hash(self, collation_hash: Hash32) -> CollationHeader: try: header = self.db[collation_hash] except KeyError: raise CollationHeaderNotFound( "No header with hash {} found".format(collation_hash)) return rlp.decode(header, sedes=CollationHeader)
def test_EIP155_transaction_sender_extraction(txn_fixture): key = keys.PrivateKey(decode_hex(txn_fixture['key'])) transaction = rlp.decode(decode_hex(txn_fixture['signed']), sedes=SpuriousDragonTransaction) sender = extract_transaction_sender(transaction) assert is_same_address(sender, transaction.sender) assert is_same_address(sender, key.public_key.to_canonical_address())
def _get_account(self, address): rlp_account = self._journaltrie.get(address, b'') if rlp_account: account = rlp.decode(rlp_account, sedes=Account) else: account = Account() return account
def load_historical_network_tpc_capability( self, sort: bool = True) -> Optional[List[List[Union[Timestamp, int]]]]: ''' Returns a list of [timestamp, transactions per second] :param mutable: :param sort: :return: ''' lookup_key = SchemaV1.make_historical_network_tpc_capability_lookup_key( ) try: data = rlp.decode(self.db[lookup_key], sedes=rlp.sedes.FCountableList( rlp.sedes.FList([ rlp.sedes.f_big_endian_int, rlp.sedes.f_big_endian_int ])), use_list=True) if sort: if len(data) > 0: data.sort() return data except KeyError: return None
def load_historical_minimum_gas_price( self, sort: bool = True, return_int=True) -> Optional[List[List[Union[Timestamp, int]]]]: ''' saved as timestamp, min gas price It is now divided by 100 to get decimals back ''' lookup_key = SchemaV1.make_historical_minimum_gas_price_lookup_key() try: data = rlp.decode(self.db[lookup_key], sedes=rlp.sedes.FCountableList( rlp.sedes.FList([ rlp.sedes.f_big_endian_int, rlp.sedes.f_big_endian_int ])), use_list=True) if sort: if len(data) > 0: data.sort() return_data = [] for timestamp_gas_price in data: if return_int: return_data.append([ timestamp_gas_price[0], int(timestamp_gas_price[1] / 100) ]) else: return_data.append( [timestamp_gas_price[0], timestamp_gas_price[1] / 100]) return return_data except KeyError: return None
def get_devp2p_cmd_id(msg: bytes) -> int: """Return the cmd_id for the given devp2p msg. The cmd_id, also known as the payload type, is always the first entry of the RLP, interpreted as an integer. """ return rlp.decode(msg[:1], sedes=rlp.sedes.big_endian_int)
def get_historical_root_hashes(self, after_timestamp: Timestamp = None) -> Optional[List[List[Union[Timestamp, Hash32]]]]: ''' This has been performance optimized December 22, 2018 :param after_timestamp: :return: ''' # Automatically sort when loading because we know the data will never be a mix of lists and tuples historical_head_root_lookup_key = SchemaV1.make_historical_head_root_lookup_key() try: data = rlp.decode(self.db[historical_head_root_lookup_key], sedes=rlp.sedes.FCountableList(rlp.sedes.FList([f_big_endian_int, hash32])), use_list=True) data.sort() except KeyError: return None if after_timestamp is None: to_return = data else: timestamps = [x[0] for x in data] index = bisect.bisect_left(timestamps, after_timestamp) to_return = data[index:] if len(to_return) == 0: return None return to_return
def test_decode(name, in_out): msg_format = 'Test {} failed (decoded {} to {} instead of {})' rlp_string = decode_hex(in_out['out']) decoded = decode(rlp_string) with pytest.raises(DecodingError): decode(rlp_string + b'\x00') assert decoded == decode(rlp_string + b'\x00', strict=False) assert decoded == evaluate(decode_lazy(rlp_string)) expected = in_out['in'] sedes = infer_sedes(expected) data = sedes.deserialize(decoded) assert compare_nested(data, decode(rlp_string, sedes)) if not compare_nested(data, expected): pytest.fail(msg_format.format(name, rlp_string, decoded, expected))
def test_transaction_fixtures(fixture, fixture_transaction_class): TransactionClass = fixture_transaction_class try: txn = rlp.decode(fixture['rlp_templates'], sedes=TransactionClass) except (rlp.DeserializationError, rlp.exceptions.DecodingError): assert 'hash' not in fixture, "Transaction was supposed to be valid" except TypeError as err: # Ensure we are only letting type errors pass that are caused by # RLP elements that are lists when they shouldn't be lists # (see: /TransactionTests/ttWrongRLP/RLPElementIsListWhenItShouldntBe.json) assert err.args == ( "'bytes' object cannot be interpreted as an integer", ) assert 'hash' not in fixture, "Transaction was supposed to be valid" # fixture normalization changes the fixture key from rlp_templates to rlpHex except KeyError: assert fixture['rlpHex'] assert 'hash' not in fixture, "Transaction was supposed to be valid" else: # check parameter correctness try: txn.validate() except ValidationError: return if 'sender' in fixture: assert 'hash' in fixture, "Transaction was supposed to be invalid" assert is_same_address(txn.get_sender(), fixture['sender'])
def test_pre_EIP155_transaction_signature_validation(transaction_class, txn_fixture): if txn_fixture['chainId'] is not None: pytest.skip("Only testng non-EIP155 transactions") transaction = rlp.decode(decode_hex(txn_fixture['signed']), sedes=transaction_class) validate_transaction_signature(transaction) transaction.check_signature_validity()
def get_timestamp_of_last_health_request(self) -> Timestamp: lookup_key = SchemaV1.make_latest_peer_node_health_timestamp_lookup_key( ) try: return rlp.decode(self.db[lookup_key], sedes=rlp.sedes.f_big_endian_int) except KeyError: return 0
def get_score(self, block_hash: Hash32) -> int: try: encoded_score = self.db[ SchemaV1.make_block_hash_to_score_lookup_key(block_hash)] except KeyError: raise HeaderNotFound("No header with hash {0} found".format( encode_hex(block_hash))) return rlp.decode(encoded_score, sedes=rlp.sedes.big_endian_int)
def get_smart_contracts_with_pending_transactions(self) -> List[Address]: key = SchemaV1.make_smart_contracts_with_pending_transactions_lookup_key() try: address_list = rlp.decode(self.db[key], sedes=rlp.sedes.FCountableList(address), use_list=True) return address_list except KeyError: return []
def micro_header_hash(self) -> Hash32: if self._micro_header_hash is None: header_parts = rlp.decode(rlp.encode(self), use_list=True) header_parts_for_hash = ( header_parts[:4] + [header_parts[6]] + header_parts[9:12] + header_parts[-3:] ) self._micro_header_hash = keccak(rlp.encode(header_parts_for_hash)) return self._micro_header_hash
def send(self, header: bytes, body: bytes) -> None: cmd_id = rlp.decode(body[:1], sedes=rlp.sedes.big_endian_int) self.logger.trace("Sending msg with cmd id %d to %s", cmd_id, self) if self.is_closing: self.logger.error( "Attempted to send msg with cmd id %d to disconnected peer %s", cmd_id, self) return self.writer.write(self.encrypt(header, body))
def _get_account(self, address): account_lookup_key = SchemaV1.make_account_lookup_key(address) rlp_account = self._journaldb.get(account_lookup_key, b'') if rlp_account: account = rlp.decode(rlp_account, sedes=Account) #account = hm_decode(rlp_account, sedes_classes=[Account]) else: account = Account() return account
def revert_to_account_from_hash(self, account_hash, wallet_address): validate_canonical_address(wallet_address, title="Address") validate_is_bytes(account_hash, title="account_hash") lookup_key = SchemaV1.make_account_by_hash_lookup_key(account_hash) try: rlp_encoded = self.db[lookup_key] account = rlp.decode(rlp_encoded, sedes=Account) self._set_account(wallet_address, account) except KeyError: raise StateRootNotFound()
def test_list_of_serializable_decoding_rlp_caching(rlp_obj): rlp_obj_code = encode(rlp_obj, cache=False) L = [rlp_obj, rlp_obj] list_code = encode(L, cache=False) L2 = decode(list_code, sedes=List((type(rlp_obj), type(rlp_obj))), recursive_cache=True) assert L2[0]._cached_rlp == rlp_obj_code assert L2[1]._cached_rlp == rlp_obj_code
def get_message_for_signing(self, chain_id: int = None) -> bytes: if chain_id is None: chain_id = self.chain_id transaction_parts = rlp.decode(rlp.encode(self), use_list=True) transaction_parts_for_signature = transaction_parts[:-3] + [int_to_big_endian(chain_id), b'', b''] message = rlp.encode(transaction_parts_for_signature) return message
def test_deserialization_for_custom_init_method(): type_3 = RLPType3(2, 1, 3) assert type_3.field1 == 1 assert type_3.field2 == 2 assert type_3.field3 == 3 result = decode(encode(type_3), sedes=RLPType3) assert result.field1 == 1 assert result.field2 == 2 assert result.field3 == 3
def test_pre_EIP155_transaction_sender_extraction(transaction_class, txn_fixture): if txn_fixture['chainId'] is not None: pytest.skip("Only testng non-EIP155 transactions") key = keys.PrivateKey(decode_hex(txn_fixture['key'])) transaction = rlp.decode(decode_hex(txn_fixture['signed']), sedes=transaction_class) sender = extract_transaction_sender(transaction) assert is_same_address(sender, transaction.sender) assert is_same_address(sender, key.public_key.to_canonical_address())
def load_chronological_block_window(self, timestamp: Timestamp) -> Optional[List[Union[int, Hash32]]]: validate_uint256(timestamp, title='timestamp') if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0: raise InvalidHeadRootTimestamp("Can only save or load chronological block for timestamps in increments of {} seconds.".format(TIME_BETWEEN_HEAD_HASH_SAVE)) chronological_window_lookup_key = SchemaV1.make_chronological_window_lookup_key(timestamp) try: data = rlp.decode(self.db[chronological_window_lookup_key], sedes=rlp.sedes.FCountableList(rlp.sedes.FList([f_big_endian_int, hash32])), use_list = True) data.sort() return data except KeyError: return None
async def sendRawBlock(self, encoded_micro_block): chain = self.get_new_chain() encoded_micro_block = decode_hex(encoded_micro_block) micro_block = rlp.decode(encoded_micro_block, sedes=chain.get_vm().micro_block_class) block_class = self._chain_class.get_vm_class_for_block_timestamp(timestamp = micro_block.header.timestamp).get_block_class() full_block = block_class.from_micro_block(micro_block) min_time_between_blocks = chain.get_vm(header=full_block.header).min_time_between_blocks # Validate the block here if(full_block.header.timestamp < (int(time.time()) - MAX_ALLOWED_AGE_OF_NEW_RPC_BLOCK)): raise BaseRPCError("The block timestamp is to old. We can only import new blocks over RPC.") if(full_block.header.timestamp > int(time.time() + BLOCK_TIMESTAMP_FUTURE_ALLOWANCE)): raise BaseRPCError("The block timestamp is in the future and cannot be accepted. You should check your computer clock.") try: canonical_head = chain.chaindb.get_canonical_head(full_block.header.chain_address) if canonical_head.block_number >= full_block.header.block_number: raise BaseRPCError("You are attempting to replace an existing block. This is not allowed.") if full_block.header.timestamp < (canonical_head.timestamp + min_time_between_blocks): raise BaseRPCError("Not enough time has passed for you to add a new block yet. New blocks can only be added to your chain every {} seconds".format(min_time_between_blocks)) except CanonicalHeadNotFound: pass if((full_block.header.block_number != 0) and (not chain.chaindb.is_in_canonical_chain(full_block.header.parent_hash))): raise BaseRPCError("Parent block not found on canonical chain.") #Check our current syncing stage. Must be sync stage 4. current_sync_stage_response = await self._event_bus.request( CurrentSyncStageRequest() ) if current_sync_stage_response.sync_stage < FULLY_SYNCED_STAGE_ID: raise BaseRPCError("This node is still syncing with the network. Please wait until this node has synced.") if not does_block_meet_min_gas_price(full_block, chain): required_min_gas_price = self._chain.chaindb.get_required_block_min_gas_price() raise Exception("Block transactions don't meet the minimum gas price requirement of {}".format(required_min_gas_price)) self._event_bus.broadcast( NewBlockEvent(block=cast(P2PBlock, full_block), from_rpc=True) ) return True
def decode_auth_eip8( ciphertext: bytes, privkey: datatypes.PrivateKey ) -> Tuple[datatypes.Signature, datatypes.PublicKey, bytes, int]: """Decode EIP-8 auth message format""" # The length of the actual msg is stored in plaintext on the first two bytes. encoded_size = ciphertext[:2] auth_msg = ciphertext[2:] message = ecies.decrypt(auth_msg, privkey, shared_mac_data=encoded_size) values = rlp.decode(message, sedes=eip8_auth_sedes, strict=False) signature_bytes, pubkey_bytes, nonce, version = values[:4] return (keys.Signature(signature_bytes=signature_bytes), keys.PublicKey(pubkey_bytes), nonce, version)
def load_root_hash_backup(self) -> List[Tuple[int, Hash32]]: db_key = SchemaV1.make_chain_head_root_hash_backup_key() try: data = rlp.decode(self.db[db_key], sedes=rlp.sedes.FCountableList( rlp.sedes.FList([f_big_endian_int, hash32])), use_list=True) data.sort() return data except KeyError: return []
def get_availability(self, chunk_root: Hash32) -> Availability: key = make_collation_availability_lookup_key(chunk_root) try: availability_entry = self.db[key] except KeyError: return Availability.UNKNOWN else: available = bool(rlp.decode(availability_entry, big_endian_int)) if available: return Availability.AVAILABLE else: return Availability.UNAVAILABLE
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeader: """ Returns the requested block header as specified by block hash. Raises BlockNotFound if it is not present in the db. """ validate_word(block_hash, title="Block Hash") try: header_rlp = self.db[block_hash] except KeyError: raise HeaderNotFound("No header with hash {0} found".format( encode_hex(block_hash))) return rlp.decode(header_rlp, BlockHeader)
def get_saved_state_root(cls, db) -> Hash32: """ Loads the last saved state root """ current_state_root_lookup_key = SchemaV1.make_current_state_root_lookup_key( ) try: loaded_state_root = rlp.decode(db[current_state_root_lookup_key], sedes=trie_root) except KeyError: raise ValueError("There is no saved state root to load") return loaded_state_root
def get_storage(self, address, slot): validate_canonical_address(address, title="Storage Address") validate_uint256(slot, title="Storage Slot") account = self._get_account(address) storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root)) slot_as_key = pad32(int_to_big_endian(slot)) if slot_as_key in storage: encoded_value = storage[slot_as_key] return rlp.decode(encoded_value, sedes=rlp.sedes.big_endian_int) else: return 0