def generate_prepare_message(self, state): epoch = state.block_number // self.epoch_length # NO_DBL_PREPARE: Don't prepare if we have already if epoch in self.prepares: return None # Create a Casper contract which we can use to get related values casper = tester.ABIContract(tester.State(state), casper_utils.casper_abi, self.chain.casper_address) # Get the ancestry hash and source ancestry hash validator_index = self.get_validator_index(state) _e, _a, _se, _sa, _pce = self.get_recommended_casper_msg_contents(casper, validator_index) # PREPARE_COMMIT_CONSISTENCY if _se < self.prev_commit_epoch and self.prev_commit_epoch < epoch: return None prepare_msg = casper_utils.mk_prepare(validator_index, _e, _a, _se, _sa, self.key) try: # Attempt to submit the prepare, to make sure that it is justified casper.prepare(prepare_msg) except tester.TransactionFailed: log.info('Prepare failed! Validator {} - hash justified {} - validator start {} - valcode addr {}' .format(self.get_validator_index(state), casper.get_consensus_messages__ancestry_hash_justified(epoch, _a), casper.get_validators__dynasty_start(validator_index), utils.encode_hex(self.valcode_addr))) return None # Save the prepare message we generated self.prepares[epoch] = prepare_msg # Save the highest source epoch we have referenced in our prepare's source epoch if epoch > self.prev_prepare_epoch: self.prev_prepare_epoch = epoch log.info('Prepare submitted: validator %d - epoch %d - prev_commit_epoch %d - hash %s' % (self.get_validator_index(state), epoch, self.prev_commit_epoch, utils.encode_hex(self.epoch_blockhash(state, epoch)))) return prepare_msg
def run_test(name, pairs): logger.debug('testing %s' % name) def _dec(x): if is_string(x) and x.startswith(b'0x'): return decode_hex(x[2:]) return x pairs['in'] = [(_dec(k), _dec(v)) for k, v in pairs['in']] deletes = [(k, v) for k, v in pairs['in'] if v is None] N_PERMUTATIONS = 1000 for i, permut in enumerate(itertools.permutations(pairs['in'])): if i > N_PERMUTATIONS: break t = trie.Trie(db.EphemDB()) for k, v in permut: #logger.debug('updating with (%s, %s)' %(k, v)) if v is not None: t.update(to_string(k), to_string(v)) else: t.delete(to_string(k)) # make sure we have deletes at the end for k, v in deletes: t.delete(to_string(k)) if pairs['root'] != '0x' + encode_hex(t.root_hash): raise Exception("Mismatch: %r %r %r %r" % ( name, pairs['root'], '0x' + encode_hex(t.root_hash), (i, list(permut) + deletes)))
def to_snapshot(self, root_only=False, no_prevblocks=False): snapshot = {} if root_only: # Smaller snapshot format that only includes the state root # (requires original DB to re-initialize) snapshot["state_root"] = '0x' + encode_hex(self.trie.root_hash) else: # "Full" snapshot snapshot["alloc"] = self.to_dict() # Save non-state-root variables for k, default in STATE_DEFAULTS.items(): default = copy.copy(default) v = getattr(self, k) if is_numeric(default): snapshot[k] = str(v) elif isinstance(default, (str, bytes)): snapshot[k] = '0x' + encode_hex(v) elif k == 'prev_headers' and not no_prevblocks: snapshot[k] = [ prev_header_to_dict(h) for h in v[:self.config['PREV_HEADER_DEPTH']] ] elif k == 'recent_uncles' and not no_prevblocks: snapshot[k] = { str(n): ['0x' + encode_hex(h) for h in headers] for n, headers in v.items() } return snapshot
def decode_single(typ, data): base, sub, _ = typ if base == 'address': return '0x' + encode_hex(data[12:]) elif base == 'hash': return data[32 - int(sub):] elif base == 'string' or base == 'bytes': if len(sub): return data[:int(sub)] else: l = big_endian_to_int(data[0:32]) return data[32:][:l] elif base == 'uint': return big_endian_to_int(data) % 2**int(sub) elif base == 'int': o = big_endian_to_int(data) % 2 ** int(sub) return (o - 2 ** int(sub)) if o >= 2 ** (int(sub) - 1) else o elif base == 'ufixed': high, low = [int(x) for x in sub.split('x')] return big_endian_to_int(data) * 1.0 // 2 ** low elif base == 'fixed': high, low = [int(x) for x in sub.split('x')] o = big_endian_to_int(data) i = (o - 2 ** (high + low)) if o >= 2 ** (high + low - 1) else o return (i * 1.0 // 2 ** low) elif base == 'decimal': o = big_endian_to_int(data) i = (o - 2 ** 256 if o > 2 ** 255 else o) return i / 10 ** int(sub) elif base == 'bool': return bool(int(encode_hex(data), 16)) else: raise EncodingError("Unhandled type: %r %r" % (base, sub))
def to_dict(self): d = {} for name, _ in self.__class__.fields: d[name] = getattr(self, name) if name in ('to', 'data'): d[name] = '0x' + encode_hex(d[name]) d['sender'] = '0x' + encode_hex(self.sender) d['hash'] = '0x' + encode_hex(self.hash) return d
def create_state_snapshot(chain, block): env = chain.env state = State(block.state_root, env) alloc = dict() count = 0 for addr, account_rlp in state.trie.iter_branch(): alloc[encode_hex(addr)] = create_account_snapshot(env, account_rlp) count += 1 print("[%d] created account snapshot %s" % (count, encode_hex(addr))) return alloc
def prev_header_to_dict(h): return { "hash": '0x' + encode_hex(h.hash), "number": str(h.number), "timestamp": str(h.timestamp), "difficulty": str(h.difficulty), "gas_used": str(h.gas_used), "gas_limit": str(h.gas_limit), "uncles_hash": '0x' + encode_hex(h.uncles_hash) }
def create_account_snapshot(env, rlpdata): account = get_account(env, rlpdata) storage_trie = SecureTrie(Trie(env.db, account.storage)) storage = dict() for k, v in storage_trie.iter_branch(): storage[encode_hex(k.lstrip(b'\x00') or b'\x00')] = encode_hex(v) return { 'nonce': snapshot_form(account.nonce), 'balance': snapshot_form(account.balance), 'code': encode_hex(account.code), 'storage': storage }
def mk_state_test_postfill(c, prefill, filler_mode=False): txdata = c.last_tx.to_dict() modified_tx_data = { "data": [txdata["data"]], "gasLimit": [int_to_0x_hex(txdata["startgas"])], "gasPrice": int_to_0x_hex(txdata["gasprice"]), "nonce": int_to_0x_hex(txdata["nonce"]), "secretKey": '0x' + encode_hex(c.last_sender), "to": txdata["to"], "value": [int_to_0x_hex(txdata["value"])], } c.head_state.commit() postStateHash = '0x' + encode_hex(c.head_state.trie.root_hash) if c.chain.config == config_homestead: config = 'Homestead' elif c.chain.config == config_tangerine: config = 'EIP150' elif c.chain.config == config_spurious: config = 'EIP158' elif c.chain.config == config_metropolis: config = 'Metropolis' else: raise Exception("Cannot get config") o = { "env": prefill["env"], "pre": prefill["pre"], "transaction": modified_tx_data, } if not filler_mode: o["post"] = { config: [{ "hash": postStateHash, "indexes": { "data": 0, "gas": 0, "value": 0 } }] } else: o["expect"] = [{ "indexes": { "data": 0, "gas": 0, "value": 0 }, "network": ["Metropolis"], "result": c.head_state.to_dict() }] return o
def to_dict(self): odict = self.storage_trie.to_dict() for k, v in self.storage_cache.items(): odict[utils.encode_int(k)] = rlp.encode(utils.encode_int(v)) return { 'balance': str(self.balance), 'nonce': str(self.nonce), 'code': '0x' + encode_hex(self.code), 'storage': { '0x' + encode_hex(key.lstrip(b'\x00') or b'\x00'): '0x' + encode_hex(rlp.decode(val)) for key, val in odict.items() } }
def load_snapshot(chain, snapshot): base_header = rlp.decode(scan_bin(snapshot['base']), BlockHeader) limit = len(snapshot['blocks']) # first block is child of base block first_block_rlp = scan_bin(snapshot['blocks'][0]) first_header_data = rlp.decode(first_block_rlp)[0] head_block_rlp = scan_bin(snapshot['blocks'][limit - 1]) head_header_data = rlp.decode(head_block_rlp)[0] trie = load_state(chain.env, snapshot['alloc']) assert trie.root_hash == base_header.state_root chain.state.trie = trie chain.env.db.put(b'score:' + base_header.hash, snapshot['chainDifficulty']) chain.env.db.commit() print("Start loading recent blocks from snapshot") vbh = common.validate_header vus = consensus.validate_uncles def _vbh(state, header): return True def _vus(state, block): return True common.validate_header = _vbh consensus.validate_uncles = _vus # add the first block first_block = rlp.decode(first_block_rlp, sedes=Block) chain.head_hash = first_block.header.prevhash chain.add_block(first_block) assert chain.head_hash == first_block.header.hash common.validate_header = vbh count = 0 for block_rlp in snapshot['blocks'][1:]: block_rlp = scan_bin(block_rlp) block = rlp.decode(block_rlp, Block) if count == chain.state.config['MAX_UNCLE_DEPTH'] + 2: consensus.validate_uncles = vus if not chain.add_block(block): print("Failed to load block #%d (%s), abort." % (block.number, encode_hex(block.hash)[:8])) else: count += 1 print("[%d] block #%d (%s) added" % (count, block.number, encode_hex(block.hash)[:8])) print("Snapshot loaded.")
def to_dict(self): """Serialize the header to a readable dictionary.""" d = {} for field in ('prevhash', 'uncles_hash', 'extra_data', 'nonce', 'mixhash'): d[field] = '0x' + encode_hex(getattr(self, field)) for field in ('state_root', 'tx_list_root', 'receipts_root', 'coinbase'): d[field] = encode_hex(getattr(self, field)) for field in ('number', 'difficulty', 'gas_limit', 'gas_used', 'timestamp'): d[field] = utils.to_string(getattr(self, field)) d['bloom'] = encode_hex(int256.serialize(self.bloom)) assert len(d) == len(BlockHeader.fields) return d
def compute_state_test_unit(state, txdata, indices, konfig): state.env.config = konfig s = state.snapshot() try: # Create the transaction tx = transactions.Transaction( nonce=parse_int_or_hex(txdata['nonce'] or b"0"), gasprice=parse_int_or_hex(txdata['gasPrice'] or b"0"), startgas=parse_int_or_hex(txdata['gasLimit'][indices["gas"]] or b"0"), to=decode_hex(remove_0x_head(txdata['to'])), value=parse_int_or_hex(txdata['value'][indices["value"]] or b"0"), data=decode_hex(remove_0x_head(txdata['data'][indices["data"]]))) if 'secretKey' in txdata: tx.sign(decode_hex(remove_0x_head(txdata['secretKey']))) else: tx.v = parse_int_or_hex(txdata['v']) # Run it prev = state.to_dict() success, output = apply_transaction(state, tx) print("Applied tx") except InvalidTransaction as e: print("Exception: %r" % e) success, output = False, b'' # state.set_code('0x3e180b1862f9d158abb5e519a6d8605540c23682', b'') state.commit() post = state.to_dict() # print('pozt', post) output_decl = { "hash": '0x' + encode_hex(state.trie.root_hash), "indexes": indices, "diff": mk_state_diff(prev, post) } state.revert(s) return output_decl
def encode_int(v): s = encode_hex(int_to_big_endian(int(v))) if s[:1] == '0': # remove leading zero s = s[1:] if not s: s = '0' return '0x' + s
def test_library_from_code(): with open(path.join(CONTRACTS_DIR, 'seven_library.sol')) as handler: library_code = handler.read() with open(path.join(CONTRACTS_DIR, 'seven_contract_without_import.sol')) as handler: contract_code = handler.read() state = tester.state() state.env.config['HOMESTEAD_FORK_BLKNUM'] = 0 # enable CALLCODE opcode library = state.abi_contract( library_code, path=None, language='solidity', ) libraries = { 'SevenLibrary': encode_hex(library.address), } contract = state.abi_contract( contract_code, path=None, libraries=libraries, language='solidity', ) # pylint: disable=no-member assert library.seven() == 7 assert contract.test() == 7
def big_endian_to_int(value): if len(value) == 1: return ord(value) elif len(value) <= 8: return struct.unpack('>Q', value.rjust(8, b'\x00'))[0] else: return int(encode_hex(value), 16)
def to_dict(self): for addr in self.trie.to_dict().keys(): self.get_and_cache_account(addr) return { encode_hex(addr): acct.to_dict() for addr, acct in self.cache.items() }
def test_abi_logging(): c = tester.Chain() x = c.contract(abi_logging_code, language='serpent') o = [] c.head_state.log_listeners.append( lambda f: o.append(x.translator.listen(f))) x.test_rabbit(3) assert o == [{"_event_type": b"rabbit", "x": 3}] o.pop() x.test_frog(5) assert o == [{"_event_type": b"frog", "y": 5}] o.pop() x.test_moose(7, b"nine", 11, [13, 15, 17]) assert o == [{ "_event_type": b"moose", "a": 7, "b": b"nine", "c": 11, "d": [13, 15, 17] }] o.pop() x.test_chicken(tester.a0) assert o == [{ "_event_type": b"chicken", "m": "0x" + utils.encode_hex(tester.a0) }] o.pop()
def __init__(self, block): self.nonce = 0 self.block = block log.debug('mining', block_number=self.block.number, block_hash=utils.encode_hex(self.block.hash), block_difficulty=self.block.difficulty)
def casper_setup_block(chain, state=None, timestamp=None, coinbase=b'\x35' * 20, extra_data='moo ha ha says the laughing cow.'): state = state or chain.state blk = Block(BlockHeader()) now = timestamp or chain.time() prev_blknumber = call_casper(state, 'getBlockNumber') blk.header.number = prev_blknumber + 1 blk.header.difficulty = 1 blk.header.gas_limit = call_casper(state, 'getGasLimit') blk.header.timestamp = max(now, state.prev_headers[0].timestamp + 1) blk.header.prevhash = apply_const_message( state, sender=casper_config['METROPOLIS_ENTRY_POINT'], to=casper_config['METROPOLIS_BLOCKHASH_STORE'], data=utils.encode_int32(prev_blknumber)) blk.header.coinbase = coinbase blk.header.extra_data = extra_data blk.header.bloom = 0 blk.uncles = [] initialize(state, blk) for tx in get_dunkle_candidates(chain, state): assert apply_transaction(state, tx) blk.transactions.append(tx) log_bc.info('Block set up with number %d and prevhash %s, %d dunkles' % (blk.header.number, utils.encode_hex( blk.header.prevhash), len(blk.transactions))) return blk
def save_block(self, saved_block_id): if saved_block_id in self.saved_blocks: raise Exception( 'Checkpoint {} already exists'.format(saved_block_id)) blockhash = self.t.head_state.prev_headers[0].hash self.saved_blocks[saved_block_id] = blockhash print('Saving checkpoint with hash: {}'.format( encode_hex(self.saved_blocks[saved_block_id])))
def revert_to_block(self, saved_block_id): if saved_block_id not in self.saved_blocks: raise Exception( 'Checkpoint {} does not exist'.format(saved_block_id)) blockhash = self.saved_blocks[saved_block_id] self.t.change_head(blockhash) print('Reverting to checkpoint with hash: {}'.format( encode_hex(self.saved_blocks[saved_block_id])))
def run_abi_test(params, mode): types, args = params['types'], params['args'] out = abi.encode_abi(types, args) assert bytesify(abi.decode_abi(types, out)) == bytesify(args) if mode == FILL: params['result'] = encode_hex(out) return params elif mode == VERIFY: assert params['result'] == encode_hex(out) elif mode == TIME: x = time.time() abi.encode_abi(types, args) y = time.time() abi.decode_abi(out, args) return { 'encoding': y - x, 'decoding': time.time() - y }
def test_key( filename, testname, testdata, ): logger.debug('running test:%r in %r' % (testname, filename)) assert keys.check_keystore_json(testdata["json"]) privkey = keys.decode_keystore_json(testdata["json"], testdata["password"]) assert utils.encode_hex(privkey) == testdata["priv"]
def inc_refcount(self, k, v): # raise Exception("WHY AM I CHANGING A REFCOUNT?!:?") try: node_object = rlp.decode(self.db.get(b'r:' + k)) refcount = utils.decode_int(node_object[0]) self.journal.append([node_object[0], k]) if refcount >= DEATH_ROW_OFFSET: refcount = 0 new_refcount = utils.encode_int(refcount + 1) self.db.put(b'r:' + k, rlp.encode([new_refcount, v])) if self.logging: sys.stderr.write('increasing %s %r to: %d\n' % (utils.encode_hex(k), v, refcount + 1)) except BaseException: self.db.put(b'r:' + k, rlp.encode([ONE_ENCODED, v])) self.journal.append([ZERO_ENCODED, k]) if self.logging: sys.stderr.write('increasing %s %r to: %d\n' % (utils.encode_hex(k), v, 1))
def dict_to_prev_header(h): return FakeHeader(hash=parse_as_bin(h['hash']), number=parse_as_int(h['number']), timestamp=parse_as_int(h['timestamp']), difficulty=parse_as_int(h['difficulty']), gas_used=parse_as_int(h.get('gas_used', '0')), gas_limit=parse_as_int(h['gas_limit']), uncles_hash=parse_as_bin( h.get('uncles_hash', '0x' + encode_hex(BLANK_UNCLES_HASH))))
def slash(self, casper): if len(self.double_prepare_evidence) > 0: casper.double_prepare_slash(self.double_prepare_evidence[0], self.double_prepare_evidence[1]) elif len(self.prepare_commit_consistency_evidence) > 0: casper.prepare_commit_inconsistency_slash( self.prepare_commit_consistency_evidence[0], self.prepare_commit_consistency_evidence[1]) else: raise Exception('No slash evidence found') print('Slashed validator:', encode_hex(self.withdrawal_addr))
def mk_state_test_prefill(c): env = { "currentCoinbase": checksum_encode(c.head_state.block_coinbase), "currentDifficulty": int_to_0x_hex(c.head_state.block_difficulty), "currentGasLimit": int_to_0x_hex(c.head_state.gas_limit), "currentNumber": int_to_0x_hex(c.head_state.block_number), "currentTimestamp": int_to_0x_hex(c.head_state.timestamp), "previousHash": "0x" + encode_hex(c.head_state.prev_headers[0].hash), } pre = c.head_state.to_dict() return {"env": env, "pre": pre}
def do_test_bloom(test_logs): """ The logs sections is a mapping between the blooms and their corresponding logentries. Each logentry has the format: address: The address of the logentry. data: The data of the logentry. topics: The topics of the logentry, given as an array of values. """ for data in test_logs: address = data['address'] # Test via bloom b = bloom.bloom_insert(0, decode_hex(address)) for t in data['topics']: b = bloom.bloom_insert(b, decode_hex(t)) # Test via Log topics = [decode_int_from_hex(x) for x in data['topics']] log = Log(decode_hex(address), topics, '') log_bloom = bloom.b64(bloom.bloom_from_list(log.bloomables())) assert encode_hex(log_bloom) == encode_hex_from_int(b) assert data['bloom'] == encode_hex(log_bloom)
def test_interop(): serpent_contract = """ extern solidity: [sub2:[]:i] def main(a): return(a.sub2() * 2) def sub1(): return(5) """ solidity_contract = """ contract serpent { function sub1() returns (int256 y) {} } contract zoo { function main(address a) returns (int256 y) { y = serpent(a).sub1() * 2; } function sub2() returns (int256 y) { y = 7; } function sub3(address a) returns (address b) { b = a; } } """ state = tester.state() serpent_abi = state.abi_contract(serpent_contract) solidity_abi = state.abi_contract(solidity_contract, language='solidity') # should be zoo solidity_address = utils.encode_hex(solidity_abi.address) # pylint: disable=no-member assert serpent_abi.sub1() == 5 assert serpent_abi.main(solidity_abi.address) == 14 assert solidity_abi.sub2() == 7 assert solidity_abi.sub3(utils.encode_hex( solidity_abi.address)) == solidity_address assert solidity_abi.main(serpent_abi.address) == 10