Example #1
0
 def msg_wrapper(msg):
     hexdata = encode_hex(msg.data.extract_all())
     apply_message_calls.append(dict(gasLimit=to_string(msg.gas),
                                     value=to_string(msg.value),
                                     destination=encode_hex(msg.to),
                                     data=b'0x' + hexdata))
     return 1, msg.gas, b''
def test_basic_pruning():
    db = RefcountDB(EphemDB())
    NODES = 60

    t = pruning_trie.Trie(db)
    db.ttl = 0
    db.logging = True

    for i in range(NODES):
        t.update(to_string(i), to_string(i))
        db.commit_refcount_changes(0)
        db.cleanup(0)
        check_db_tightness([t], db)
    for i in range(NODES):
        t.update(to_string(i), to_string(i ** 3))
        db.commit_refcount_changes(0)
        db.cleanup(0)
        check_db_tightness([t], db)
    for i in range(NODES):
        t.delete(to_string(i))
        db.commit_refcount_changes(0)
        db.cleanup(0)
        check_db_tightness([t], db)
    assert len(t.to_dict()) == 0
    assert len(db.kv) == 0
Example #3
0
 def reorganize_head_to(self, block):
     log.info('Replacing head')
     b = block
     new_chain = {}
     while b.header.number >= int(self.db.get(b'GENESIS_NUMBER')):
         new_chain[b.header.number] = b
         key = b'block:' + to_string(b.header.number)
         orig_at_height = self.db.get(key) if key in self.db else None
         if orig_at_height == b.header.hash:
             break
         if b.prevhash not in self.db or self.db.get(
                 b.prevhash) == b'GENESIS':
             break
         b = self.get_parent(b)
     replace_from = b.header.number
     for i in itertools.count(replace_from):
         log.info('Rewriting height %d' % i)
         key = b'block:' + to_string(i)
         orig_at_height = self.db.get(key) if key in self.db else None
         if orig_at_height:
             self.db.delete(key)
             orig_block_at_height = self.get_block(orig_at_height)
             for tx in orig_block_at_height.transactions:
                 if b'txindex:' + tx.hash in self.db:
                     self.db.delete(b'txindex:' + tx.hash)
         if i in new_chain:
             new_block_at_height = new_chain[i]
             self.db.put(key, new_block_at_height.header.hash)
             for i, tx in enumerate(new_block_at_height.transactions):
                 self.db.put(b'txindex:' + tx.hash,
                             rlp.encode([new_block_at_height.number, i]))
         if i not in new_chain and not orig_at_height:
             break
     self.head_hash = block.header.hash
     self.state = self.mk_poststate_of_blockhash(block.hash)
Example #4
0
 def commit_refcount_changes(self, epoch):
     # Save death row nodes
     timeout_epoch = epoch + self.ttl
     try:
         death_row_nodes = rlp.decode(
             self.db.get('deathrow:' + utils.to_string(timeout_epoch)))
     except BaseException:
         death_row_nodes = []
     for nodekey in self.death_row:
         refcount, val = rlp.decode(self.db.get(b'r:' + nodekey))
         if refcount == ZERO_ENCODED:
             new_refcount = utils.encode_int(DEATH_ROW_OFFSET +
                                             timeout_epoch)
             self.db.put(b'r:' + nodekey, rlp.encode([new_refcount, val]))
     if len(self.death_row) > 0:
         sys.stderr.write('%d nodes marked for pruning during block %d\n' %
                          (len(self.death_row), timeout_epoch))
     death_row_nodes.extend(self.death_row)
     self.death_row = []
     self.db.put(b'deathrow:' + utils.to_string(timeout_epoch),
                 rlp.encode(death_row_nodes))
     # Save journal
     try:
         journal = rlp.decode(
             self.db.get(b'journal:' + utils.to_string(epoch)))
     except BaseException:
         journal = []
     journal.extend(self.journal)
     self.journal = []
     self.db.put(b'journal:' + utils.to_string(epoch), rlp.encode(journal))
Example #5
0
 def run():
     st = time.time()
     x = trie.Trie(db.EphemDB())
     for i in range(10000):
         x.update(to_string(i), to_string(i**3))
     print('elapsed', time.time() - st)
     return x.root_hash
Example #6
0
 def msg_wrapper(msg):
     hexdata = encode_hex(msg.data.extract_all())
     apply_message_calls.append(dict(gasLimit=to_string(msg.gas),
                                     value=to_string(msg.value),
                                     destination=encode_hex(msg.to),
                                     data=b'0x' + hexdata))
     return 1, msg.gas, b''
Example #7
0
 def cleanup(self, epoch):
     try:
         death_row_node = self.db.get(b'deathrow:' + utils.to_string(epoch))
     except BaseException:
         death_row_node = rlp.encode([])
     death_row_nodes = rlp.decode(death_row_node)
     pruned = 0
     for nodekey in death_row_nodes:
         try:
             refcount, val = rlp.decode(self.db.get(b'r:' + nodekey))
             if utils.decode_int(refcount) == DEATH_ROW_OFFSET + epoch:
                 self.db.delete(b'r:' + nodekey)
                 pruned += 1
         except BaseException:
             pass
     sys.stderr.write('%d nodes successfully pruned\n' % pruned)
     # Delete the deathrow after processing it
     try:
         self.db.delete(b'deathrow:' + utils.to_string(epoch))
     except BaseException:
         pass
     # Delete journals that are too old
     try:
         self.db.delete(b'journal:' + utils.to_string(epoch - self.ttl))
     except BaseException:
         pass
Example #8
0
def run_test(name, pairs):

    logger.debug('testing %s' % name)

    def _dec(x):
        if is_string(x) and x.startswith(b'0x'):
            return decode_hex(x[2:])
        return x

    pairs['in'] = [(_dec(k), _dec(v)) for k, v in pairs['in']]
    deletes = [(k, v) for k, v in pairs['in'] if v is None]

    N_PERMUTATIONS = 1000
    for i, permut in enumerate(itertools.permutations(pairs['in'])):
        if i > N_PERMUTATIONS:
            break
        t = trie.Trie(db.EphemDB())
        for k, v in permut:
            #logger.debug('updating with (%s, %s)' %(k, v))
            if v is not None:
                t.update(to_string(k), to_string(v))
            else:
                t.delete(to_string(k))
        # make sure we have deletes at the end
        for k, v in deletes:
            t.delete(to_string(k))
        if pairs['root'] != '0x' + encode_hex(t.root_hash):
            raise Exception(
                "Mismatch: %r %r %r %r" %
                (name, pairs['root'], '0x' + encode_hex(t.root_hash),
                 (i, list(permut) + deletes)))
Example #9
0
    def _iter_branch(self, node):
        '''yield (key, value) stored in this and the descendant nodes
        :param node: node in form of list, or BLANK_NODE

        .. note::
            Here key is in full form, rather than key of the individual node
        '''
        if node == BLANK_NODE:
            raise StopIteration

        node_type = self._get_node_type(node)

        if is_key_value_type(node_type):
            nibbles = without_terminator(unpack_to_nibbles(node[0]))
            key = b'+'.join([to_string(x) for x in nibbles])
            if node_type == NODE_TYPE_EXTENSION:
                sub_tree = self._iter_branch(self._decode_to_node(node[1]))
            else:
                sub_tree = [(to_string(NIBBLE_TERMINATOR), node[1])]

            # prepend key of this node to the keys of children
            for sub_key, sub_value in sub_tree:
                full_key = (key + b'+' + sub_key).strip(b'+')
                yield (full_key, sub_value)

        elif node_type == NODE_TYPE_BRANCH:
            for i in range(16):
                sub_tree = self._iter_branch(self._decode_to_node(node[i]))
                for sub_key, sub_value in sub_tree:
                    full_key = (str_to_bytes(str(i)) + b'+' + sub_key).strip(b'+')
                    yield (full_key, sub_value)
            if node[16]:
                yield (to_string(NIBBLE_TERMINATOR), node[-1])
Example #10
0
    def _iter_branch(self, node):
        '''yield (key, value) stored in this and the descendant nodes
        :param node: node in form of list, or BLANK_NODE

        .. note::
            Here key is in full form, rather than key of the individual node
        '''
        if node == BLANK_NODE:
            raise StopIteration

        node_type = self._get_node_type(node)

        if is_key_value_type(node_type):
            nibbles = without_terminator(unpack_to_nibbles(node[0]))
            key = b'+'.join([to_string(x) for x in nibbles])
            if node_type == NODE_TYPE_EXTENSION:
                sub_tree = self._iter_branch(self._decode_to_node(node[1]))
            else:
                sub_tree = [(to_string(NIBBLE_TERMINATOR), node[1])]

            # prepend key of this node to the keys of children
            for sub_key, sub_value in sub_tree:
                full_key = (key + b'+' + sub_key).strip(b'+')
                yield (full_key, sub_value)

        elif node_type == NODE_TYPE_BRANCH:
            for i in range(16):
                sub_tree = self._iter_branch(self._decode_to_node(node[i]))
                for sub_key, sub_value in sub_tree:
                    full_key = (str_to_bytes(str(i)) + b'+' + sub_key).strip(b'+')
                    yield (full_key, sub_value)
            if node[16]:
                yield (to_string(NIBBLE_TERMINATOR), node[-1])
Example #11
0
 def create_wrapper(msg):
     sender = decode_hex(msg.sender) if \
         len(msg.sender) == 40 else msg.sender
     nonce = utils.encode_int(ext._block.get_nonce(msg.sender))
     addr = utils.sha3(rlp.encode([sender, nonce]))[12:]
     hexdata = encode_hex(msg.data.extract_all())
     apply_message_calls.append(dict(gasLimit=to_string(msg.gas),
                                     value=to_string(msg.value),
                                     destination=b'', data=b'0x' + hexdata))
     return 1, msg.gas, addr
Example #12
0
 def create_wrapper(msg):
     sender = decode_hex(msg.sender) if \
         len(msg.sender) == 40 else msg.sender
     nonce = utils.encode_int(ext._block.get_nonce(msg.sender))
     addr = utils.sha3(rlp.encode([sender, nonce]))[12:]
     hexdata = encode_hex(msg.data.extract_all())
     apply_message_calls.append(dict(gasLimit=to_string(msg.gas),
                                     value=to_string(msg.value),
                                     destination=b'', data=b'0x' + hexdata))
     return 1, msg.gas, addr
Example #13
0
def initialize_genesis_keys(state, genesis):
    db = state.db
    db.put(b'GENESIS_NUMBER', to_string(genesis.header.number))
    db.put(b'GENESIS_HASH', to_string(genesis.header.hash))
    db.put(b'GENESIS_STATE', json.dumps(state.to_snapshot()))
    db.put(b'GENESIS_RLP', rlp.encode(genesis))
    db.put(b'block:0', genesis.header.hash)
    db.put(b'score:' + genesis.header.hash, "0")
    db.put(b'state:' + genesis.header.hash, state.trie.root_hash)
    db.put(genesis.header.hash, b'GENESIS')
    db.commit()
Example #14
0
def run_test(name):

    logger.debug('testing %s' % name)
    t = trie.Trie(new_db())
    data = load_tests()[name]

    for k in data['in']:
        logger.debug('updating with (%s, %s)' % (k, k))
        k = to_string(k)
        t.update(k, k)
    for point, prev, nxt in data['tests']:
        assert to_string(nxt) == (t.next(point) or b'')
        assert to_string(prev) == (t.prev(point) or b'')
def test_clear():
    db = RefcountDB(EphemDB())
    NODES = 60
    t = pruning_trie.Trie(db)
    db.ttl = 0
    for i in range(NODES):
        t.update(to_string(i), to_string(i))
        db.commit_refcount_changes(i)
        db.cleanup(i)
    t.clear_all()
    db.commit_refcount_changes(NODES)
    db.cleanup(NODES)
    assert len(db.kv) == 0
def run_test(name):

    logger.debug('testing %s' % name)
    t = trie.Trie(new_db())
    data = fixture_to_bytes(load_tests()[name])

    for k in data['in']:
        logger.debug('updating with (%s, %s)' % (k, k))
        k = to_string(k)
        t.update(k, k)
    for point, prev, nxt in data['tests']:
        assert to_string(nxt) == (t.next(point) or b'')
        assert to_string(prev) == (t.prev(point) or b'')
def test_two_trees():
    db = RefcountDB(EphemDB())
    NODES = 60
    t1 = pruning_trie.Trie(db)
    t2 = pruning_trie.Trie(db)
    db.ttl = 0
    for i in range(NODES):
        t1.update(to_string(i), to_string(i))
        if i < NODES // 2:
            t2.update(to_string(i), to_string(i))
        db.commit_refcount_changes(i)
        db.cleanup(i)
        check_db_tightness([t1, t2], db)
    for i in range(NODES):
        sys.stderr.write('clearing: %d\n' % i)
        t1.delete(to_string(NODES - 1 - i))
        db.commit_refcount_changes(NODES + i)
        db.cleanup(NODES + i)
        check_db_tightness([t1, t2], db)
    assert t2.to_dict() == {to_string(i): to_string(i) for i in range(NODES // 2)}
    for i in range(NODES // 2):
        t2.delete(to_string(i))
        db.commit_refcount_changes(NODES * 2 + i)
        db.cleanup(NODES * 2 + i)
        check_db_tightness([t1, t2], db)
    assert len(db.kv) == 0
Example #18
0
 def listen(self, log, noprint=False):
     if not len(log.topics) or log.topics[0] not in self.event_data:
         return
     types = self.event_data[log.topics[0]]['types']
     name = self.event_data[log.topics[0]]['name']
     names = self.event_data[log.topics[0]]['names']
     indexed = self.event_data[log.topics[0]]['indexed']
     indexed_types = [types[i] for i in range(len(types))
                      if indexed[i]]
     unindexed_types = [types[i] for i in range(len(types))
                        if not indexed[i]]
     # print('listen', log.data.encode('hex'), log.topics)
     deserialized_args = decode_abi(unindexed_types, log.data)
     o = {}
     c1, c2 = 0, 0
     for i in range(len(names)):
         if indexed[i]:
             topic_bytes = utils.zpad(utils.encode_int(log.topics[c1 + 1]), 32)
             o[names[i]] = decode_single(process_type(indexed_types[c1]),
                                         topic_bytes)
             c1 += 1
         else:
             o[names[i]] = deserialized_args[c2]
             c2 += 1
     o["_event_type"] = utils.to_string(name)
     if not noprint:
         print(o)
     return o
Example #19
0
def compile_code(sourcecode,
                 libraries=None,
                 combined='bin,abi',
                 optimize=True,
                 extra_args=None):
    args = solc_arguments(libraries=libraries,
                          combined=combined,
                          optimize=optimize,
                          extra_args=extra_args)
    compiler = get_compiler_path()
    if compiler is None:
        raise SolcMissing("solc not found")
    args.insert(0, compiler)

    process = subprocess.Popen(args,
                               stdin=subprocess.PIPE,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    stdoutdata, stderrdata = process.communicate(
        input=utils.to_string(sourcecode))

    if process.returncode != 0:
        raise CompileError(stderrdata)

    return solc_parse_output(stdoutdata)
Example #20
0
 def listen(self, log, noprint=False):
     if not len(log.topics) or log.topics[0] not in self.event_data:
         return
     types = self.event_data[log.topics[0]]['types']
     name = self.event_data[log.topics[0]]['name']
     names = self.event_data[log.topics[0]]['names']
     indexed = self.event_data[log.topics[0]]['indexed']
     indexed_types = [types[i] for i in range(len(types)) if indexed[i]]
     unindexed_types = [
         types[i] for i in range(len(types)) if not indexed[i]
     ]
     # print('listen', log.data.encode('hex'), log.topics)
     deserialized_args = decode_abi(unindexed_types, log.data)
     o = {}
     c1, c2 = 0, 0
     for i in range(len(names)):
         if indexed[i]:
             topic_bytes = utils.zpad(utils.encode_int(log.topics[c1 + 1]),
                                      32)
             o[names[i]] = decode_single(process_type(indexed_types[c1]),
                                         topic_bytes)
             c1 += 1
         else:
             o[names[i]] = deserialized_args[c2]
             c2 += 1
     o["_event_type"] = utils.to_string(name)
     if not noprint:
         print(o)
     return o
Example #21
0
 def add_block_to_head(self, block):
     log.info('Adding to head', head=encode_hex(block.header.prevhash))
     apply_block(self.state, block)
     self.db.put(b'block:' + to_string(block.header.number), block.header.hash)
     self.get_pow_difficulty(block)  # side effect: put 'score:' cache in db
     self.head_hash = block.header.hash
     for i, tx in enumerate(block.transactions):
         self.db.put(b'txindex:' + tx.hash, rlp.encode([block.number, i]))
Example #22
0
def acct_standard_form(a):
    return {
        "balance": parse_int_or_hex(a["balance"]),
        "nonce": parse_int_or_hex(a["nonce"]),
        "code": to_string(a["code"]),
        "storage": {normalize_hex(k): normalize_hex(v) for
                    k, v in a["storage"].items() if normalize_hex(v).rstrip(b'0') != b'0x'}
    }
Example #23
0
    def _to_dict(self, node):
        """convert (key, value) stored in this and the descendant nodes
        to dict items.

        :param node: node in form of list, or BLANK_NODE

        .. note::

            Here key is in full form, rather than key of the individual node
        """
        if node == BLANK_NODE:
            return {}

        node_type = self._get_node_type(node)

        if is_key_value_type(node_type):
            nibbles = without_terminator(unpack_to_nibbles(node[0]))
            key = b'+'.join([to_string(x) for x in nibbles])
            if node_type == NODE_TYPE_EXTENSION:
                sub_dict = self._to_dict(self._decode_to_node(node[1]))
            else:
                sub_dict = {to_string(NIBBLE_TERMINATOR): node[1]}

            # prepend key of this node to the keys of children
            res = {}
            for sub_key, sub_value in sub_dict.items():
                full_key = (key + b'+' + sub_key).strip(b'+')
                res[full_key] = sub_value
            return res

        elif node_type == NODE_TYPE_BRANCH:
            res = {}
            for i in range(16):
                sub_dict = self._to_dict(self._decode_to_node(node[i]))

                for sub_key, sub_value in sub_dict.items():
                    full_key = (
                        str_to_bytes(
                            str(i)) +
                        b'+' +
                        sub_key).strip(b'+')
                    res[full_key] = sub_value

            if node[16]:
                res[to_string(NIBBLE_TERMINATOR)] = node[-1]
            return res
Example #24
0
 def revert_refcount_changes(self, epoch):
     timeout_epoch = epoch + self.ttl
     # Delete death row additions
     try:
         self.db.delete(b'deathrow:' + utils.to_string(timeout_epoch))
     except BaseException:
         pass
     # Revert journal changes
     try:
         journal = rlp.decode(
             self.db.get(b'journal:' + utils.to_string(epoch)))
         for new_refcount, hashkey in journal[::-1]:
             node_object = rlp.decode(self.db.get(b'r:' + hashkey))
             self.db.put(b'r:' + hashkey,
                         rlp.encode([new_refcount, node_object[1]]))
     except BaseException:
         pass
Example #25
0
def acct_standard_form(a):
    return {
        "balance": parse_int_or_hex(a["balance"]),
        "nonce": parse_int_or_hex(a["nonce"]),
        "code": to_string(a["code"]),
        "storage": {normalize_hex(k): normalize_hex(v) for
                    k, v in a["storage"].items() if normalize_hex(v).rstrip(b'0') != b'0x'}
    }
Example #26
0
 def test(self):
     # Create 50 accounts
     accounts = []
     keys = []
     account_count = 50
     for i in range(account_count):
         keys.append(sha3(to_string(i)))
         accounts.append(privtoaddr(keys[-1]))
         self.s.block.set_balance(accounts[-1], 10**18)
     # Create wallet
     required_accounts = 2
     constructor_parameters = (accounts, required_accounts)
     self.multisig_wallet = self.s.abi_contract(
         open('solidity/MultiSigWallet.sol').read(),
         language='solidity',
         constructor_parameters=constructor_parameters)
     # Create ABIs
     multisig_abi = self.multisig_wallet.translator
     # Should not be able to breach the maximum number of owners
     key_51 = sha3(to_string(51))
     account_51 = privtoaddr(key_51)
     add_owner_data = multisig_abi.encode("addOwner", [account_51])
     self.assertFalse(self.multisig_wallet.isOwner(account_51))
     nonce = self.multisig_wallet.getNonce(self.multisig_wallet.address, 0,
                                           add_owner_data)
     add_owner_tx_hash = self.multisig_wallet.submitTransaction(
         self.multisig_wallet.address,
         0,
         add_owner_data,
         nonce,
         sender=keys[0])
     include_pending = True
     exclude_executed = False
     self.assertEqual(
         self.multisig_wallet.getTransactionHashes(0, 1, include_pending,
                                                   exclude_executed),
         [add_owner_tx_hash])
     # Transaction is confirmed but cannot be executed due to too many owners.
     self.multisig_wallet.confirmTransaction(add_owner_tx_hash,
                                             sender=keys[1])
     # Transaction remains pending
     self.assertEqual(
         self.multisig_wallet.getTransactionHashes(0, 1, include_pending,
                                                   exclude_executed),
         [add_owner_tx_hash])
def test_insert_delete():
    for a in (5, 15, 60):
        db = RefcountDB(EphemDB())
        NODES = a
        t1 = pruning_trie.Trie(db)
        db.ttl = 0
        db.logging = True
        for i in range(NODES):
            t1.update(to_string(i), to_string(i))
            db.commit_refcount_changes(i)
            db.cleanup(i)
            check_db_tightness([t1], db)
        for i in range(NODES):
            t1.delete(to_string(NODES - 1 - i))
            db.commit_refcount_changes(NODES + i)
            db.cleanup(NODES + i)
            check_db_tightness([t1], db)
        assert len(db.kv) == 0
Example #28
0
 def normalize_value(k, p):
     if k in p:
         if k == 'gas':
             return parse_int_or_hex(p[k])
         elif k == 'callcreates':
             return list(map(callcreate_standard_form, p[k]))
         else:
             return utils.to_string(k)
     return None
Example #29
0
 def normalize_value(k, p):
     if k in p:
         if k == 'gas':
             return parse_int_or_hex(p[k])
         elif k == 'callcreates':
             return list(map(callcreate_standard_form, p[k]))
         else:
             return utils.to_string(k)
     return None
Example #30
0
def test_key(
    filename,
    testname,
    testdata,
):
    logger.debug('running test:%r in %r' % (testname, filename))
    assert keys.check_keystore_json(testdata["json"])
    privkey = keys.decode_keystore_json(testdata["json"], testdata["password"])
    assert utils.encode_hex(privkey) == utils.to_string(testdata["priv"])
Example #31
0
 def __init__(self, full_signature):
     self.function_data = {}
     self.event_data = {}
     v = vars(self)
     if is_string(full_signature):
         full_signature = json_decode(full_signature)
     for sig_item in full_signature:
         encode_types = [f['type'] for f in sig_item['inputs']]
         signature = [(f['type'], f['name']) for f in sig_item['inputs']]
         name = sig_item['name']
         if '(' in name:
             name = name[:name.find('(')]
         if name in v:
             i = 2
             while name + utils.to_string(i) in v:
                 i += 1
             name += utils.to_string(i)
             sys.stderr.write("Warning: multiple methods with the same "
                              " name. Use %s to call %s with types %r"
                              % (name, sig_item['name'], encode_types))
         sig = name + '(' + ','.join(encode_types) + ')'
         if sig_item['type'] == 'function':
             prefix = big_endian_to_int(utils.sha3(sig)[:4])
             decode_types = [f['type'] for f in sig_item['outputs']]
             is_unknown_type = len(sig_item['outputs']) and \
                 sig_item['outputs'][0]['name'] == 'unknown_out'
             self.function_data[name] = {
                 "prefix": prefix,
                 "encode_types": encode_types,
                 "decode_types": decode_types,
                 "is_unknown_type": is_unknown_type,
                 "is_constant": sig_item.get('constant', False),
                 "signature": signature
             }
         elif sig_item['type'] == 'event':
             prefix = big_endian_to_int(utils.sha3(sig))
             indexed = [f['indexed'] for f in sig_item['inputs']]
             names = [f['name'] for f in sig_item['inputs']]
             self.event_data[prefix] = {
                 "types": encode_types,
                 "name": name,
                 "names": names,
                 "indexed": indexed,
             }
Example #32
0
 def __init__(self, full_signature):
     self.function_data = {}
     self.event_data = {}
     v = vars(self)
     if is_string(full_signature):
         full_signature = json_decode(full_signature)
     for sig_item in full_signature:
         encode_types = [f['type'] for f in sig_item['inputs']]
         signature = [(f['type'], f['name']) for f in sig_item['inputs']]
         name = sig_item['name']
         if '(' in name:
             name = name[:name.find('(')]
         if name in v:
             i = 2
             while name + utils.to_string(i) in v:
                 i += 1
             name += utils.to_string(i)
             sys.stderr.write("Warning: multiple methods with the same "
                              " name. Use %s to call %s with types %r" %
                              (name, sig_item['name'], encode_types))
         sig = name + '(' + ','.join(encode_types) + ')'
         if sig_item['type'] == 'function':
             prefix = big_endian_to_int(utils.sha3(sig)[:4])
             decode_types = [f['type'] for f in sig_item['outputs']]
             is_unknown_type = len(sig_item['outputs']) and \
                 sig_item['outputs'][0]['name'] == 'unknown_out'
             self.function_data[name] = {
                 "prefix": prefix,
                 "encode_types": encode_types,
                 "decode_types": decode_types,
                 "is_unknown_type": is_unknown_type,
                 "is_constant": sig_item.get('constant', False),
                 "signature": signature
             }
         elif sig_item['type'] == 'event':
             prefix = big_endian_to_int(utils.sha3(sig))
             indexed = [f['indexed'] for f in sig_item['inputs']]
             names = [f['name'] for f in sig_item['inputs']]
             self.event_data[prefix] = {
                 "types": encode_types,
                 "name": name,
                 "names": names,
                 "indexed": indexed,
             }
Example #33
0
 def __init__(self, full_signature):
     self.function_data = {}
     self.event_data = {}
     v = vars(self)
     if is_string(full_signature):
         full_signature = json_decode(full_signature)
     for sig_item in full_signature:
         if sig_item['type'] == 'constructor':
             continue
         encode_types = [f['type'] for f in sig_item['inputs']]
         signature = [(f['type'], f['name']) for f in sig_item['inputs']]
         name = sig_item['name']
         if '(' in name:
             name = name[:name.find('(')]
         if name in v:
             i = 2
             while name + utils.to_string(i) in v:
                 i += 1
             name += utils.to_string(i)
             sys.stderr.write("Warning: multiple methods with the same "
                              " name. Use %s to call %s with types %r"
                              % (name, sig_item['name'], encode_types))
         if sig_item['type'] == 'function':
             decode_types = [f['type'] for f in sig_item['outputs']]
             is_unknown_type = len(sig_item['outputs']) and \
                 sig_item['outputs'][0]['name'] == 'unknown_out'
             self.function_data[name] = {
                 "prefix": method_id(name, encode_types),
                 "encode_types": encode_types,
                 "decode_types": decode_types,
                 "is_unknown_type": is_unknown_type,
                 "is_constant": sig_item.get('constant', False),
                 "signature": signature
             }
         elif sig_item['type'] == 'event':
             indexed = [f['indexed'] for f in sig_item['inputs']]
             names = [f['name'] for f in sig_item['inputs']]
             self.event_data[event_id(name, encode_types)] = {
                 "types": encode_types,
                 "name": name,
                 "names": names,
                 "indexed": indexed,
                 "anonymous": sig_item.get('anonymous', False)
             }
Example #34
0
 def __init__(self, full_signature):
     self.function_data = {}
     self.event_data = {}
     v = vars(self)
     if is_string(full_signature):
         full_signature = json_decode(full_signature)
     for sig_item in full_signature:
         if sig_item['type'] == 'constructor':
             continue
         encode_types = [f['type'] for f in sig_item['inputs']]
         signature = [(f['type'], f['name']) for f in sig_item['inputs']]
         name = sig_item['name']
         if '(' in name:
             name = name[:name.find('(')]
         if name in v:
             i = 2
             while name + utils.to_string(i) in v:
                 i += 1
             name += utils.to_string(i)
             sys.stderr.write("Warning: multiple methods with the same "
                              " name. Use %s to call %s with types %r" %
                              (name, sig_item['name'], encode_types))
         if sig_item['type'] == 'function':
             decode_types = [f['type'] for f in sig_item['outputs']]
             is_unknown_type = len(sig_item['outputs']) and \
                 sig_item['outputs'][0]['name'] == 'unknown_out'
             self.function_data[name] = {
                 "prefix": method_id(name, encode_types),
                 "encode_types": encode_types,
                 "decode_types": decode_types,
                 "is_unknown_type": is_unknown_type,
                 "is_constant": sig_item.get('constant', False),
                 "signature": signature
             }
         elif sig_item['type'] == 'event':
             indexed = [f['indexed'] for f in sig_item['inputs']]
             names = [f['name'] for f in sig_item['inputs']]
             self.event_data[event_id(name, encode_types)] = {
                 "types": encode_types,
                 "name": name,
                 "names": names,
                 "indexed": indexed,
                 "anonymous": sig_item.get('anonymous', False)
             }
Example #35
0
    def update(self, key, value):
        '''
        :param key: a string
        :value: a string
        '''
        if not is_string(key):
            raise Exception("Key must be string")

        # if len(key) > 32:
        #     raise Exception("Max key length is 32")

        if not is_string(value):
            raise Exception("Value must be string")

        # if value == '':
        #     return self.delete(key)
        self.root_node = self._update_and_delete_storage(
            self.root_node, bin_to_nibbles(to_string(key)), to_string(value))
        self.get_root_hash()
def test_trie_transfer():
    db = RefcountDB(EphemDB())
    NODES = 60
    t1 = pruning_trie.Trie(db)
    db.ttl = NODES * 2
    for i in range(NODES):
        t1.update(to_string(i), to_string(i))
        db.commit_refcount_changes(i)
        db.cleanup(i)
    t2 = pruning_trie.Trie(db)
    t2.root_hash = t1.root_hash
    assert t2.to_dict() == {to_string(i): to_string(i) for i in range(NODES)}
    for i in range(NODES):
        t2.delete(to_string(i))
        db.commit_refcount_changes(NODES + i)
        db.cleanup(NODES + i)
    for i in range(NODES * 2):
        db.cleanup(2 * NODES + i)
    assert len(db.kv) == 0
Example #37
0
    def _to_dict(self, node):
        '''convert (key, value) stored in this and the descendant nodes
        to dict items.

        :param node: node in form of list, or BLANK_NODE

        .. note::

            Here key is in full form, rather than key of the individual node
        '''
        if node == BLANK_NODE:
            return {}

        node_type = self._get_node_type(node)

        if is_key_value_type(node_type):
            nibbles = without_terminator(unpack_to_nibbles(node[0]))
            key = b'+'.join([to_string(x) for x in nibbles])
            if node_type == NODE_TYPE_EXTENSION:
                sub_dict = self._to_dict(self._decode_to_node(node[1]))
            else:
                sub_dict = {to_string(NIBBLE_TERMINATOR): node[1]}

            # prepend key of this node to the keys of children
            res = {}
            for sub_key, sub_value in sub_dict.items():
                full_key = (key + b'+' + sub_key).strip(b'+')
                res[full_key] = sub_value
            return res

        elif node_type == NODE_TYPE_BRANCH:
            res = {}
            for i in range(16):
                sub_dict = self._to_dict(self._decode_to_node(node[i]))

                for sub_key, sub_value in sub_dict.items():
                    full_key = (str_to_bytes(str(i)) + b'+' + sub_key).strip(b'+')
                    res[full_key] = sub_value

            if node[16]:
                res[to_string(NIBBLE_TERMINATOR)] = node[-1]
            return res
Example #38
0
    def update(self, key, value):
        """
        :param key: a string
        :value: a string
        """
        if not is_string(key):
            raise Exception("Key must be string")

        # if len(key) > 32:
        #     raise Exception("Max key length is 32")

        if not is_string(value):
            raise Exception("Value must be string")

        # if value == '':
        #     return self.delete(key)
        old_root = copy.deepcopy(self.root_node)
        self.root_node = self._update_and_delete_storage(
            self.root_node, bin_to_nibbles(to_string(key)), to_string(value))
        self.replace_root_hash(old_root, self.root_node)
Example #39
0
def enc(typ, arg):
    base, sub, arrlist = typ
    sz = get_size(typ)
    # Encode dynamic-sized strings as <len(str)> + <str>
    if base in ('string', 'bytes') and not sub:
        assert isinstance(arg, (str, bytes, utils.unicode)), \
            "Expecting a string"
        return enc(lentyp, len(arg)) + \
            utils.to_string(arg) + \
            b'\x00' * (utils.ceil32(len(arg)) - len(arg))
    # Encode dynamic-sized lists via the head/tail mechanism described in
    # https://github.com/ethereum/wiki/wiki/Proposal-for-new-ABI-value-encoding
    elif sz is None:
        assert isinstance(arg, list), \
            "Expecting a list argument"
        subtyp = base, sub, arrlist[:-1]
        subsize = get_size(subtyp)
        myhead, mytail = b'', b''
        if arrlist[-1] == []:
            myhead += enc(lentyp, len(arg))
        else:
            assert len(arg) == arrlist[-1][0], \
                "Wrong array size: found %d, expecting %d" % \
                (len(arg), arrlist[-1][0])
        for i in range(len(arg)):
            if subsize is None:
                myhead += enc(lentyp, 32 * len(arg) + len(mytail))
                mytail += enc(subtyp, arg[i])
            else:
                myhead += enc(subtyp, arg[i])
        return myhead + mytail
    # Encode static-sized lists via sequential packing
    else:
        if arrlist == []:
            return utils.to_string(encode_single(typ, arg))
        else:
            subtyp = base, sub, arrlist[:-1]
            o = b''
            for x in arg:
                o += enc(subtyp, x)
            return o
Example #40
0
def vm_trace(ext, msg, compustate, opcode, pushcache, tracer=log_vm_op):
    """
    This diverges from normal logging, as we use the logging namespace
    only to decide which features get logged in 'eth.vm.op'
    i.e. tracing can not be activated by activating a sub
    like 'eth.vm.op.stack'
    """

    op, in_args, out_args, fee = opcodes.opcodes[opcode]

    trace_data = {}
    trace_data['stack'] = list(map(to_string, list(compustate.prev_stack)))
    if compustate.prev_prev_op in ('MLOAD', 'MSTORE', 'MSTORE8', 'SHA3',
                                   'CALL', 'CALLCODE', 'CREATE',
                                   'CALLDATACOPY', 'CODECOPY', 'EXTCODECOPY'):
        if len(compustate.prev_memory) < 4096:
            trace_data['memory'] = \
                ''.join([encode_hex(ascii_chr(x)) for x
                          in compustate.prev_memory])
        else:
            trace_data['sha3memory'] = \
                encode_hex(utils.sha3(b''.join([ascii_chr(x) for
                                      x in compustate.prev_memory])))
    if compustate.prev_prev_op in ('SSTORE', ) or compustate.steps == 0:
        trace_data['storage'] = ext.log_storage(msg.to)
    trace_data['gas'] = to_string(compustate.prev_gas)
    trace_data['gas_cost'] = to_string(compustate.prev_gas - compustate.gas)
    trace_data['fee'] = fee
    trace_data['inst'] = opcode
    trace_data['pc'] = to_string(compustate.prev_pc)
    if compustate.steps == 0:
        trace_data['depth'] = msg.depth
        trace_data['address'] = msg.to
    trace_data['steps'] = compustate.steps
    trace_data['depth'] = msg.depth
    if op[:4] == 'PUSH':
        print(repr(pushcache))
        trace_data['pushvalue'] = pushcache[compustate.prev_pc]
    tracer.trace('vm', op=op, **trace_data)
    compustate.steps += 1
    compustate.prev_prev_op = op
Example #41
0
def enc(typ, arg):
    base, sub, arrlist = typ
    sz = get_size(typ)
    # Encode dynamic-sized strings as <len(str)> + <str>
    if base in ('string', 'bytes') and not sub:
        assert isinstance(arg, (str, bytes, utils.unicode)), \
            "Expecting a string"
        return enc(lentyp, len(arg)) + \
            utils.to_string(arg) + \
            b'\x00' * (utils.ceil32(len(arg)) - len(arg))
    # Encode dynamic-sized lists via the head/tail mechanism described in
    # https://github.com/ethereum/wiki/wiki/Proposal-for-new-ABI-value-encoding
    elif sz is None:
        assert isinstance(arg, list), \
            "Expecting a list argument"
        subtyp = base, sub, arrlist[:-1]
        subsize = get_size(subtyp)
        myhead, mytail = b'', b''
        if arrlist[-1] == []:
            myhead += enc(lentyp, len(arg))
        else:
            assert len(arg) == arrlist[-1][0], \
                "Wrong array size: found %d, expecting %d" % \
                (len(arg), arrlist[-1][0])
        for i in range(len(arg)):
            if subsize is None:
                myhead += enc(lentyp, 32 * len(arg) + len(mytail))
                mytail += enc(subtyp, arg[i])
            else:
                myhead += enc(subtyp, arg[i])
        return myhead + mytail
    # Encode static-sized lists via sequential packing
    else:
        if arrlist == []:
            return utils.to_string(encode_single(typ, arg))
        else:
            subtyp = base, sub, arrlist[:-1]
            o = b''
            for x in arg:
                o += enc(subtyp, x)
            return o
Example #42
0
def test_delayed_pruning():
    NODES = 60
    db = RefcountDB(EphemDB())
    t = pruning_trie.Trie(db)
    db.ttl = NODES // 4
    for i in range(NODES):
        t.update(to_string(i), to_string(i))
        db.commit_refcount_changes(i)
        db.cleanup(i)
    for i in range(NODES):
        t.update(to_string(i), to_string(i ** 3))
        db.commit_refcount_changes(i + NODES)
        db.cleanup(i + NODES)
    for i in range(NODES):
        t.delete(to_string(i))
        db.commit_refcount_changes(i + NODES * 2)
        db.cleanup(i + NODES * 2)
    for i in range(NODES // 4):
        db.cleanup(i + NODES * 3)
    assert len(t.to_dict()) == 0
    assert len(db.kv) == 0
Example #43
0
    def update(self, key, value):
        '''
        :param key: a string
        :value: a string
        '''
        if not is_string(key):
            raise Exception("Key must be string")

        # if len(key) > 32:
        #     raise Exception("Max key length is 32")

        if not is_string(value):
            raise Exception("Value must be string")

        # if value == '':
        #     return self.delete(key)
        self.root_node = self._update_and_delete_storage(
            self.root_node,
            bin_to_nibbles(to_string(key)),
            to_string(value))
        self.get_root_hash()
Example #44
0
def test_revert_deletes():
    db = RefcountDB(EphemDB())
    NODES = 60
    t1 = pruning_trie.Trie(db)
    db.ttl = NODES * 2
    for i in range(NODES):
        t1.update(to_string(i), to_string(i))
        db.commit_refcount_changes(i)
        db.cleanup(i)
    x = t1.root_hash
    for i in range(NODES):
        t1.delete(to_string(i))
        db.commit_refcount_changes(NODES + i)
        db.cleanup(NODES + i)
    for i in range(NODES * 2 - 1, NODES - 1, -1):
        db.revert_refcount_changes(i)
    for i in range(NODES * 2):
        db.cleanup(NODES + i)
        db.revert_refcount_changes(i)
    t1.root_hash = x
    assert t1.to_dict() == {to_string(i): to_string(i) for i in range(NODES)}
Example #45
0
 def run(profiler=None):
     print('running')
     i = 0
     seen = b''
     for filename, tests in fixtures.items():
         for testname, testdata in tests.items():
             if i == num:
                 break
             do_test_vm(filename, testname, testdata, profiler=profiler)
             seen += to_string(testname)
             i += 1
     print('ran %d tests' % i)
     print('test key', encode_hex(sha3(seen)))
Example #46
0
def dump_genesis_block_tests_data(db):
    import json
    g = genesis(db)
    data = dict(
        genesis_state_root=encode_hex(g.state_root),
        genesis_hash=g.hex_hash(),
        genesis_rlp_hex=encode_hex(g.serialize()),
        initial_alloc=dict()
    )
    for addr, balance in GENESIS_INITIAL_ALLOC.items():
        data['initial_alloc'][addr] = to_string(balance)

    print(json.dumps(data, indent=1))
Example #47
0
    def add_block(self, block, forward_pending_transactions=True):
        "returns True if block was added sucessfully"
        _log = log.bind(block_hash=block)
        # make sure we know the parent
        if not block.has_parent() and not block.is_genesis():
            _log.debug('missing parent')
            return False

        if not block.validate_uncles():
            _log.debug('invalid uncles')
            return False

        if not len(block.nonce) == 8:
            _log.debug('nonce not set')
            return False
        elif not block.header.check_pow(nonce=block.nonce) and\
                not block.is_genesis():
            _log.debug('invalid nonce')
            return False

        if block.has_parent():
            try:
                processblock.verify(block, block.get_parent())
            except processblock.VerificationFailed as e:
                _log.critical('VERIFICATION FAILED', error=e)
                f = os.path.join(utils.data_dir, 'badblock.log')
                open(f, 'w').write(to_string(block.hex_serialize()))
                return False

        if block.number < self.head.number:
            _log.debug("older than head", head_hash=self.head)
            # Q: Should we have any limitations on adding blocks?

        self.index.add_block(block)
        self._store_block(block)

        # set to head if this makes the longest chain w/ most work for that number
        if block.chain_difficulty() > self.head.chain_difficulty():
            _log.debug('new head')
            self._update_head(block, forward_pending_transactions)
        elif block.number > self.head.number:
            _log.warn('has higher blk number than head but lower chain_difficulty',
                      head_hash=self.head, block_difficulty=block.chain_difficulty(),
                      head_difficulty=self.head.chain_difficulty())
        block.transactions.clear_all()
        block.receipts.clear_all()
        block.state.db.commit_refcount_changes(block.number)
        block.state.db.cleanup(block.number)
        self.commit()  # batch commits all changes that came with the new block
        return True
Example #48
0
def test_revert_adds():
    db = RefcountDB(EphemDB())
    NODES = 60
    t1 = pruning_trie.Trie(db)
    t2 = pruning_trie.Trie(db)
    db.ttl = NODES * 2
    for i in range(NODES):
        t1.update(to_string(i), to_string(i))
        db.commit_refcount_changes(i)
        db.cleanup(i)
    for i in range(NODES):
        t2.update(to_string(i), to_string(i))
        db.commit_refcount_changes(NODES + i)
        db.cleanup(NODES + i)
    for i in range(NODES * 2 - 1, NODES - 1, -1):
        db.revert_refcount_changes(i)
    for i in range(NODES):
        t1.delete(to_string(i))
        db.commit_refcount_changes(NODES + i)
        db.cleanup(NODES + i)
    for i in range(NODES * 2):
        db.cleanup(NODES * 2 + i)
    assert len(db.kv) == 0
Example #49
0
    def delete(self, key):
        '''
        :param key: a string with length of [0, 32]
        '''
        if not is_string(key):
            raise Exception("Key must be string")

        if len(key) > 32:
            raise Exception("Max key length is 32")

        self.root_node = self._delete_and_delete_storage(
            self.root_node,
            bin_to_nibbles(to_string(key)))
        self.get_root_hash()
Example #50
0
 def to_dict(self):
     """Serialize the header to a readable dictionary."""
     d = {}
     for field in ('prevhash', 'uncles_hash', 'extra_data', 'nonce',
                   'mixhash'):
         d[field] = b'0x' + encode_hex(getattr(self, field))
     for field in ('state_root', 'tx_list_root', 'receipts_root',
                   'coinbase'):
         d[field] = encode_hex(getattr(self, field))
     for field in ('number', 'difficulty', 'gas_limit', 'gas_used',
                   'timestamp'):
         d[field] = to_string(getattr(self, field))
     d['bloom'] = encode_hex(int256.serialize(self.bloom))
     assert len(d) == len(BlockHeader.fields)
     return d
Example #51
0
    def account_to_dict(self, address, with_storage_root=False,
                        with_storage=True):
        """Serialize an account to a dictionary with human readable entries.

        :param address: the 20 bytes account address
        :param with_storage_root: include the account's storage root
        :param with_storage: include the whole account's storage
        """
        if len(address) == 40:
            address = decode_hex(address)
        assert len(address) == 20

        if with_storage_root:
            # if there are uncommited account changes the current storage root
            # is meaningless
            assert len(self.journal) == 0
        med_dict = {}

        account = self._get_acct(address)
        for field in ('balance', 'nonce'):
            value = self.caches[field].get(address, getattr(account, field))
            med_dict[field] = to_string(value)
        code = self.caches['code'].get(address, account.code)
        med_dict['code'] = b'0x' + encode_hex(code)

        storage_trie = SecureTrie(Trie(self.db, account.storage))
        if with_storage_root:
            med_dict['storage_root'] = encode_hex(storage_trie.get_root_hash())
        if with_storage:
            med_dict['storage'] = {}
            d = storage_trie.to_dict()
            subcache = self.caches.get(b'storage:' + address, {})
            subkeys = [utils.zpad(utils.coerce_to_bytes(kk), 32)
                       for kk in list(subcache.keys())]
            for k in list(d.keys()) + subkeys:
                v = d.get(k, None)
                v2 = subcache.get(utils.big_endian_to_int(k), None)
                hexkey = b'0x' + encode_hex(utils.zunpad(k))
                if v2 is not None:
                    if v2 != 0:
                        med_dict['storage'][hexkey] = \
                            b'0x' + encode_hex(utils.int_to_big_endian(v2))
                elif v is not None:
                    med_dict['storage'][hexkey] = b'0x' + encode_hex(rlp.decode(v))

        return med_dict
Example #52
0
def decint(n):
    if isinstance(n, str):
        n = utils.to_string(n)
    if is_numeric(n) and n < 2**256 and n > -2**255:
        return n
    elif is_numeric(n):
        raise EncodingError("Number out of range: %r" % n)
    elif is_string(n) and len(n) == 40:
        return big_endian_to_int(decode_hex(n))
    elif is_string(n) and len(n) <= 32:
        return big_endian_to_int(n)
    elif is_string(n) and len(n) > 32:
        raise EncodingError("String too long: %r" % n)
    elif n is True:
        return 1
    elif n is False or n is None:
        return 0
    else:
        raise EncodingError("Cannot encode integer: %r" % n)
Example #53
0
def test_create_gnt(chain):
    owner_addr, receiver_addr, gnt, gntb, cdep = mysetup(chain)
    faucet, _ = chain.provider.get_or_deploy_contract('Faucet',
                                                      deploy_args=[gnt.address])

    assert gnt.call().balanceOf(faucet.address) == 0
    chain.wait.for_receipt(gnt.transact({'from': encode_hex(ethereum.tester.a0)}).transfer(
        faucet.address, 1000 * utils.denoms.ether ))
    assert gnt.call().balanceOf(faucet.address) == 1000 * utils.denoms.ether
    key = sha3(to_string(11))
    account = privtoaddr(key)

    ethereum.tester.accounts.append(account)
    ethereum.tester.keys.append(key)

    assert chain.web3.eth.getBalance(encode_hex(account)) == 0
    previousA0 = chain.web3.eth.getBalance(encode_hex(ethereum.tester.a0))
    assert previousA0 > utils.denoms.ether

    tx = Transaction(
        nonce=chain.web3.eth.getTransactionCount(ethereum.tester.a0),
        gasprice=chain.web3.eth.gasPrice,
        startgas=100000,
        to=encode_hex(account),
        value=utils.denoms.ether,
        data=b'',
    )

    tx.sign(ethereum.tester.k0)
    raw_tx = rlp.encode(tx)
    raw_tx_hex = chain.web3.toHex(raw_tx)
    chain.web3.eth.sendRawTransaction(raw_tx_hex)

    assert gnt.call().balanceOf(faucet.address) == 1000 * utils.denoms.ether

    assert chain.web3.eth.getBalance(encode_hex(account)) == utils.denoms.ether

    assert gnt.call().decimals() == 18
    assert gnt.call().balanceOf(encode_hex(account)) == 0
    tx = chain.wait.for_receipt(
        faucet.transact({'from': encode_hex(account)}).create())
    assert gnt.call().balanceOf(encode_hex(account)) == 1000 * utils.denoms.ether
    assert gnt.call().balanceOf(faucet.address) == 0
Example #54
0
def decint(n, signed=False):
    if isinstance(n, str):
        n = utils.to_string(n)

    if is_numeric(n):
        min_, max_ = (-TT255, TT255 - 1) if signed else (0, TT256 - 1)
        if n > max_ or n < min_:
            raise EncodingError("Number out of range: %r" % n)
        return n
    elif is_string(n):
        if len(n) == 40:
            n = decode_hex(n)
        if len(n) > 32:
            raise EncodingError("String too long: %r" % n)

        i = big_endian_to_int(n)
        return (i - TT256) if signed and i >= TT255 else i
    elif n is True:
        return 1
    elif n is False or n is None:
        return 0
    else:
        raise EncodingError("Cannot encode integer: %r" % n)
Example #55
0
    def to_dict(self, with_state=False, full_transactions=False,
                with_storage_roots=False, with_uncles=False):
        """Serialize the block to a readable dictionary.

        :param with_state: include state for all accounts
        :param full_transactions: include serialized transactions (hashes
                                  otherwise)
        :param with_storage_roots: if account states are included also include
                                   their storage roots
        :param with_uncles: include uncle hashes
        """
        b = {"header": self.header.to_dict()}
        txlist = []
        for i, tx in enumerate(self.get_transactions()):
            receipt_rlp = self.receipts.get(rlp.encode(i))
            receipt = rlp.decode(receipt_rlp, Receipt)
            if full_transactions:
                txjson = tx.to_dict()
            else:
                txjson = tx.hash
            txlist.append({
                "tx": txjson,
                "medstate": encode_hex(receipt.state_root),
                "gas": to_string(receipt.gas_used),
                "logs": [Log.serialize(log) for log in receipt.logs],
                "bloom": utils.int256.serialize(receipt.bloom)
            })
        b["transactions"] = txlist
        if with_state:
            state_dump = {}
            for address, v in self.state.to_dict().items():
                state_dump[encode_hex(address)] = self.account_to_dict(address, with_storage_roots)
            b['state'] = state_dump
        if with_uncles:
            b['uncles'] = [self.__class__.deserialize_header(u)
                           for u in self.uncles]
        return b