def add_log_entry(self, account: Address, topics: List[int], data: bytes) -> None: validate_canonical_address(account, title="Log entry address") for topic in topics: validate_uint256(topic, title="Log entry topic") validate_is_bytes(data, title="Log entry data") self._log_entries.append( (self.transaction_context.get_next_log_counter(), account, topics, data))
def delete_receivable_transaction(self, address: Address, transaction_hash: Hash32) -> None: validate_canonical_address(address, title="Storage Address") validate_is_bytes(transaction_hash, title="Transaction Hash") self.logger.debug("deleting receivable tx {} from account {}".format(encode_hex(transaction_hash), encode_hex(address))) account = self._get_account(address) receivable_transactions = list(self.get_receivable_transactions(address)) i = 0 found = False for tx_key in receivable_transactions: if tx_key.transaction_hash == transaction_hash: found = True break i +=1 if found == True: del receivable_transactions[i] else: raise ReceivableTransactionNotFound("transaction hash {0} not found in receivable_transactions database for wallet {1}".format(transaction_hash, address)) self._set_account(address, account.copy(receivable_transactions=tuple(receivable_transactions))) if self.get_code_hash(address) != EMPTY_SHA3: if len(receivable_transactions) == 0: self.logger.debug("Removing address from list of smart contracts with pending transactions") self._remove_address_from_smart_contracts_with_pending_transactions(address)
def add_receivable_transaction(self, address: Address, transaction_hash: Hash32, sender_block_hash: Hash32, is_contract_deploy:bool = False) -> None: validate_canonical_address(address, title="Wallet Address") validate_is_bytes(transaction_hash, title="Transaction Hash") validate_is_bytes(sender_block_hash, title="Sender Block Hash") #this is the wallet address people send money to when slashed. It is a sink if address == SLASH_WALLET_ADDRESS: return account = self._get_account(address) receivable_transactions = account.receivable_transactions # first lets make sure we don't already have the transaction for tx_key in receivable_transactions: if tx_key.transaction_hash == transaction_hash: raise ValueError("Tried to save a receivable transaction that was already saved. TX HASH = {}".format(encode_hex(transaction_hash))) new_receivable_transactions = receivable_transactions + (TransactionKey(transaction_hash, sender_block_hash), ) #self.logger.debug(new_receivable_transactions) self.logger.debug("Adding receivable transaction {} to account {}".format(encode_hex(transaction_hash), encode_hex(address))) self._set_account(address, account.copy(receivable_transactions=new_receivable_transactions)) #finally, if this is a smart contract, lets add it to the list of smart contracts with pending transactions if is_contract_deploy or self.get_code_hash(address) != EMPTY_SHA3: self.logger.debug("Adding address to list of smart contracts with pending transactions") #we only need to run this when adding the first one. self._add_address_to_smart_contracts_with_pending_transactions(address)
def get_receivable_transaction(self, address: Address, transaction_hash: Hash32) -> Optional[TransactionKey]: validate_is_bytes(transaction_hash, title="Transaction Hash") all_tx = self.get_receivable_transactions(address) for tx_key in all_tx: if tx_key.transaction_hash == transaction_hash: return tx_key return None
def add_receivable_transaction(self, address, transaction_hash, sender_block_hash): validate_canonical_address(address, title="Storage Address") validate_is_bytes(transaction_hash, title="Transaction Hash") validate_is_bytes(sender_block_hash, title="Sender Block Hash") #this is the wallet address people send money to when slashed. It is a sink if address == SLASH_WALLET_ADDRESS: return #first lets make sure we don't already have the transaction if self.get_receivable_transaction(address, transaction_hash) is not False: raise ValueError( "Tried to save a receivable transaction that was already saved" ) account = self._get_account(address) receivable_transactions = account.receivable_transactions new_receivable_transactions = receivable_transactions + ( TransactionKey(transaction_hash, sender_block_hash), ) self.logger.debug( "adding receivable transaction {}".format(transaction_hash)) #self.logger.debug(new_receivable_transactions) self._set_account( address, account.copy(receivable_transactions=new_receivable_transactions))
def delete_receivable_transaction(self, address, transaction_hash): validate_canonical_address(address, title="Storage Address") validate_is_bytes(transaction_hash, title="Transaction Hash") self.logger.debug("deleting receivable tx {}".format(transaction_hash)) account = self._get_account(address) receivable_transactions = list( self.get_receivable_transactions(address)) i = 0 found = False for tx_key in receivable_transactions: if tx_key.transaction_hash == transaction_hash: found = True break i += 1 if found == True: del receivable_transactions[i] else: raise ValueError( "transaction hash {0} not found in receivable_transactions database for wallet {1}" .format(transaction_hash, address)) self._set_account( address, account.copy( receivable_transactions=tuple(receivable_transactions)))
def delete_block_hash_from_chronological_window( self, head_hash: Hash32, timestamp: Timestamp = None, window_timestamp: Timestamp = None) -> None: ''' If timestamp is given, then deleted [timestamp, head_hash] from the list. This is fastest. But if head_hash and window_timestamp is given, without a timestamp, then we search the list for the given hash and delete it. This is slower :param head_hash: :param timestamp: :param window_timestamp: :return: ''' validate_is_bytes(head_hash, title='Head Hash') validate_uint256(timestamp, title='timestamp') if timestamp is None and window_timestamp is not None: # we search now for just the hash if window_timestamp > int(time.time()) - ( NUMBER_OF_HEAD_HASH_TO_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE: # onlike the root hashes, this window is for the blocks added after the time window_timestamp = int( window_timestamp / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE data = self.load_chronological_block_window(window_timestamp) hashes = [x[1] for x in data] try: idx = hashes.index(head_hash) del (data[idx]) except ValueError: return if data is not None: # self.logger.debug("Saving chronological block window with new data {}".format(new_data)) self.save_chronological_block_window( data, window_timestamp) else: #only add blocks for the proper time period if timestamp > int(time.time()) - ( NUMBER_OF_HEAD_HASH_TO_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE: #onlike the root hashes, this window is for the blocks added after the time window_for_this_block = int( timestamp / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE data = self.load_chronological_block_window( window_for_this_block) if data is not None: #most of the time we will be adding the timestamp near the end. so lets iterate backwards try: data.remove([timestamp, head_hash]) except ValueError: pass if data is not None: #self.logger.debug("Saving chronological block window with new data {}".format(new_data)) self.save_chronological_block_window( data, window_for_this_block)
def get_receivable_transaction(self, address, transaction_hash): validate_is_bytes(transaction_hash, title="Transaction Hash") all_tx = self.get_receivable_transactions(address) for tx_key in all_tx: if tx_key.transaction_hash == transaction_hash: return tx_key return False
def set_current_syncing_info(self, timestamp: Timestamp, head_root_hash: Hash32) -> None: validate_is_bytes(head_root_hash, title='Head Root Hash') validate_uint256(timestamp, title='timestamp') encoded = rlp.encode([timestamp, head_root_hash], sedes=CurrentSyncingInfo) self.db[SchemaV1.make_current_syncing_info_lookup_key()] = encoded
def initialize_historical_root_hashes(self, root_hash: Hash32, timestamp: Timestamp) -> None: validate_is_bytes(root_hash, title='Head Hash') validate_historical_timestamp(timestamp, title="timestamp") #lets populate the root hash timestamp first_root_hash_timestamp = [[timestamp, root_hash]] self.save_historical_root_hashes(first_root_hash_timestamp)
def set_code(self, address, code): validate_canonical_address(address, title="Storage Address") validate_is_bytes(code, title="Code") account = self._get_account(address) code_hash = keccak(code) self._journaldb[code_hash] = code self._set_account(address, account.copy(code_hash=code_hash))
def revert_to_account_from_hash(self, account_hash, wallet_address): validate_canonical_address(wallet_address, title="Address") validate_is_bytes(account_hash, title="account_hash") lookup_key = SchemaV1.make_account_by_hash_lookup_key(account_hash) try: rlp_encoded = self.db[lookup_key] account = rlp.decode(rlp_encoded, sedes=Account) self._set_account(wallet_address, account) except KeyError: raise StateRootNotFound()
def __init__(self, gas, to, sender, value, data, code, depth=0, create_address=None, code_address=None, should_transfer_value=True, is_static=False, refund_amount=0): validate_uint256(gas, title="Message.gas") self.gas = gas # type: int if to != CREATE_CONTRACT_ADDRESS: validate_canonical_address(to, title="Message.to") self.to = to validate_canonical_address(sender, title="Message.sender") self.sender = sender validate_uint256(value, title="Message.value") self.value = value validate_is_bytes(data, title="Message.data") self.data = data validate_is_integer(depth, title="Message.depth") validate_gte(depth, minimum=0, title="Message.depth") self.depth = depth validate_is_bytes(code, title="Message.code") self.code = code if create_address is not None: validate_canonical_address(create_address, title="Message.storage_address") self.storage_address = create_address if code_address is not None: validate_canonical_address(code_address, title="Message.code_address") self.code_address = code_address validate_is_boolean(should_transfer_value, title="Message.should_transfer_value") self.should_transfer_value = should_transfer_value validate_is_integer(depth, title="Message.refund_amount") self.refund_amount = refund_amount validate_is_boolean(is_static, title="Message.is_static") self.is_static = is_static
def get_head_block_hashes(self, root_hash=None, reverse=False): """ Gets all of the head root hash leafs of the binary trie """ if root_hash is None: root_hash = self.root_hash validate_is_bytes(root_hash, title='Root Hash') yield from self._trie.get_leaf_nodes(root_hash, reverse)
def save_single_historical_root_hash(self, root_hash: Hash32, timestamp: Timestamp) -> None: validate_is_bytes(root_hash, title='Head Hash') validate_historical_timestamp(timestamp, title="timestamp") historical = self.get_historical_root_hashes() if historical is not None: historical = SortedList(historical) historical.add([timestamp, root_hash]) historical = list(historical) else: historical = [[timestamp, root_hash]] self.save_historical_root_hashes(historical)
def save_single_historical_root_hash(self, root_hash: Hash32, timestamp: Timestamp) -> None: validate_is_bytes(root_hash, title='Head Hash') validate_historical_timestamp(timestamp, title="timestamp") historical = self.get_historical_root_hashes() if historical is not None: historical_dict = dict(historical) historical_dict[timestamp] = root_hash historical = list(historical_dict.items()) else: historical = [[timestamp, root_hash]] self.save_historical_root_hashes(historical)
def get_next_n_head_block_hashes(self, prev_head_hash=ZERO_HASH32, window_start=0, window_length=1, root_hash=None, reverse=False): """ Gets the next head block hash in the leaves of the binary trie """ validate_is_bytes(prev_head_hash, title='prev_head_hash') validate_uint256(window_start, title='window_start') validate_uint256(window_length, title='window_length') if root_hash is None: root_hash = self.root_hash validate_is_bytes(root_hash, title='Root Hash') output_list = [] next = False i = 0 j = 0 last = None for head_hash in self.get_head_block_hashes(root_hash, reverse=reverse): if next == True or (prev_head_hash == ZERO_HASH32 and window_start == 0): output_list.append(head_hash) i += 1 if i >= window_length: return output_list if head_hash == prev_head_hash or prev_head_hash == ZERO_HASH32: if prev_head_hash == ZERO_HASH32: j += 1 if j >= window_start: next = True j += 1 last = head_hash #if it gets here then we got to the last chain if len(output_list) < 1: output_list.append(last) return output_list
def get_next_head_block_hash(self, prev_head_hash = ZERO_HASH32, root_hash = None, reverse = False): """ Gets the next head block hash in the leaves of the binary trie """ validate_is_bytes(prev_head_hash, title='prev_head_hash') if root_hash is None: root_hash = self.root_hash validate_is_bytes(root_hash, title='Root Hash') next = False for head_hash in self.get_head_block_hashes(root_hash, reverse = reverse): if prev_head_hash == ZERO_HASH32 or next == True: return head_hash if head_hash == prev_head_hash: next = True
def write(self, start_position: int, size: int, value: bytes) -> None: """ Write `value` into memory. """ if size: validate_uint256(start_position) validate_uint256(size) validate_is_bytes(value) validate_length(value, length=size) validate_lte(start_position + size, maximum=len(self)) if len(self._bytes) < start_position + size: self._bytes.extend(itertools.repeat( 0, len(self._bytes) - (start_position + size), )) for idx, v in enumerate(value): self._bytes[start_position + idx] = v
def add_block_hash_to_chronological_window(self, head_hash: Hash32, timestamp: Timestamp) -> None: self.logger.debug("add_block_hash_to_chronological_window, hash = {}, timestamp = {}".format(encode_hex(head_hash), timestamp)) validate_is_bytes(head_hash, title='Head Hash') validate_uint256(timestamp, title='timestamp') # only add blocks for the proper time period if timestamp >= int(time.time()) - (NUMBER_OF_HEAD_HASH_TO_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE: # unlike the root hashes, this window is for the blocks added after the time window_for_this_block = int(timestamp / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE data = self.load_chronological_block_window(window_for_this_block) if data is None: data = [[timestamp, head_hash]] else: data.append([timestamp, head_hash]) self.save_chronological_block_window(data, window_for_this_block)
def validate(self): validate_uint256(self.nonce, title="Transaction.nonce") validate_uint256(self.gas_price, title="Transaction.gas_price") validate_uint256(self.gas, title="Transaction.gas") if self.to != CREATE_CONTRACT_ADDRESS: validate_canonical_address(self.to, title="Transaction.to") validate_uint256(self.value, title="Transaction.value") validate_is_bytes(self.data, title="Transaction.data") validate_uint256(self.v, title="Transaction.v") validate_uint256(self.r, title="Transaction.r") validate_uint256(self.s, title="Transaction.s") validate_lt_secpk1n(self.r, title="Transaction.r") validate_gte(self.r, minimum=1, title="Transaction.r") validate_lt_secpk1n(self.s, title="Transaction.s") validate_gte(self.s, minimum=1, title="Transaction.s") validate_gte(self.v, minimum=self.v_min, title="Transaction.v") validate_lte(self.v, maximum=self.v_max, title="Transaction.v") super(FrontierTransaction, self).validate()
def get_head_block_hashes_by_idx_list(self, idx_list: List[int], root_hash: Hash32=None) -> List[Hash32]: """ Gets the head block hashes of the index range corresponding to the position of the leaves of the binary trie """ if root_hash is None: root_hash = self.root_hash idx_set = set(idx_list) validate_is_bytes(root_hash, title='Root Hash') output_list = [] for idx, head_hash in enumerate(self.get_head_block_hashes(root_hash)): if idx in idx_set: output_list.append(head_hash) idx_set.remove(idx) if len(idx_set) == 0: break return output_list
def output(self, value: bytes) -> None: """ Set the return value of the computation. """ validate_is_bytes(value) self._output = value
def set_chain_head_hash(self, address, head_hash): validate_canonical_address(address, title="Wallet Address") validate_is_bytes(head_hash, title='Head Hash') self._trie_cache[address] = head_hash
def add_block_hash_to_timestamp(self, address, head_hash, timestamp): self.logger.debug("add_block_hash_to_timestamp") validate_canonical_address(address, title="Wallet Address") validate_is_bytes(head_hash, title='Head Hash') validate_uint256(timestamp, title='timestamp') currently_saving_window = int(time.time()/TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE +TIME_BETWEEN_HEAD_HASH_SAVE #make sure it isnt in the future if timestamp > currently_saving_window: raise InvalidHeadRootTimestamp() #first make sure the timestamp is correct. if timestamp % TIME_BETWEEN_HEAD_HASH_SAVE != 0: raise InvalidHeadRootTimestamp() starting_timestamp, existing_root_hash = self.get_historical_root_hash(timestamp, return_timestamp = True) historical_roots = self.get_historical_root_hashes() if historical_roots is None: if head_hash == BLANK_HASH: self.delete_chain_head_hash(address) else: self.set_chain_head_hash(address, head_hash) self.persist() historical_roots = [[timestamp, self.root_hash]] else: if starting_timestamp is None: #this means there is no saved root hash that is at this time or before it. #so we have no root hash to load self.logger.debug("Tried appending block hash to timestamp for time earlier than earliest timestamp. " "Adding to timestamp {}. ".format(timestamp)) else: new_blockchain_head_db = ChainHeadDB(self.db, existing_root_hash) if head_hash == BLANK_HASH: new_blockchain_head_db.delete_chain_head_hash(address) else: new_blockchain_head_db.set_chain_head_hash(address, head_hash) new_blockchain_head_db.persist() new_root_hash = new_blockchain_head_db.root_hash if starting_timestamp == timestamp: #we already had a root hash for this timestamp. just update the existing one. #self.logger.debug("adding block hash to timestamp without propogating. root hash already existed. updating for time {}".format(timestamp)) historical_roots_dict = dict(historical_roots) historical_roots_dict[timestamp] = new_root_hash historical_roots = list(historical_roots_dict.items()) #self.logger.debug("finished adding block to timestamp. last_hist_root = {}, current_root_hash = {}".format(historical_roots[-1][1], self.root_hash)) #self.logger.debug(new_root_hash) else: #self.logger.debug("adding block hash to timestamp without propogating. root hash didnt exist") #sorted_historical_roots = SortedList(historical_roots) historical_roots_dict = dict(historical_roots) for loop_timestamp in range(starting_timestamp, timestamp, TIME_BETWEEN_HEAD_HASH_SAVE): historical_roots_dict[loop_timestamp] = existing_root_hash historical_roots_dict[timestamp] = new_root_hash historical_roots = list(historical_roots_dict.items()) #now propogate the new head hash to any saved historical root hashes newer than this one. #effeciently do this by starting from the end and working back. we can assume if historical_roots[-1][0] > timestamp: self.logger.debug("propogating historical root hash timestamps forward") for i in range(len(historical_roots)-1, -1, -1): if historical_roots[i][0] <= timestamp: break root_hash_to_load = historical_roots[i][1] new_blockchain_head_db = ChainHeadDB(self.db, root_hash_to_load) if head_hash == BLANK_HASH: new_blockchain_head_db.delete_chain_head_hash(address) else: new_blockchain_head_db.set_chain_head_hash(address, head_hash) new_blockchain_head_db.persist() new_root_hash = new_blockchain_head_db.root_hash #have to do this in case it is a tuple and we cannot modify cur_timestamp = historical_roots[i][0] historical_roots[i] = [cur_timestamp,new_root_hash] #lets now make sure our root hash is the same as the last historical. It is possible that another thread or chain object #has imported a block since this one was initialized. self.save_historical_root_hashes(historical_roots) self.root_hash = historical_roots[-1][1]
def test_validate_is_bytes(value, is_valid): if is_valid: validate_is_bytes(value) else: with pytest.raises(ValidationError): validate_is_bytes(value)
def __init__(self, code_bytes: bytes) -> None: validate_is_bytes(code_bytes, title="CodeStream bytes") self.stream = io.BytesIO(code_bytes) self.invalid_positions = set() # type: Set[int] self.depth_processed = 0