def sender(self):
     if not self._sender:
         # Determine sender
         if self.r == 0 and self.s == 0:
             self._sender = null_address
         else:
             if self.v in (27, 28):
                 vee = self.v
                 sighash = utils.sha3(rlp.encode(self, UnsignedTransaction))
             elif self.v >= 37:
                 vee = self.v - self.network_id * 2 - 8
                 assert vee in (27, 28)
                 rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[
                                      :-3] + [self.network_id, '', ''])
                 sighash = utils.sha3(rlpdata)
             else:
                 raise InvalidTransaction("Invalid V value")
             if self.r >= secpk1n or self.s >= secpk1n or self.r == 0 or self.s == 0:
                 raise InvalidTransaction("Invalid signature values!")
             pub = ecrecover_to_pub(sighash, vee, self.r, self.s)
             if pub == b'\x00' * 64:
                 raise InvalidTransaction(
                     "Invalid signature (zero privkey cannot sign)")
             self._sender = utils.sha3(pub)[-20:]
     return self._sender
示例#2
0
 def commit_refcount_changes(self, epoch):
     # Save death row nodes
     timeout_epoch = epoch + self.ttl
     try:
         death_row_nodes = rlp.decode(
             self._keyValueStorage.get('deathrow:' + str(timeout_epoch)))
     except BaseException:
         death_row_nodes = []
     for nodekey in self.death_row:
         refcount, val = rlp.decode(
             self._keyValueStorage.get(b'r:' + nodekey))
         if refcount == ZERO_ENCODED:
             new_refcount = utils.encode_int(
                 DEATH_ROW_OFFSET + timeout_epoch)
             self._keyValueStorage.put(
                 b'r:' + nodekey, rlp.encode([new_refcount, val]))
     if len(self.death_row) > 0:
         sys.stderr.write('%d nodes marked for pruning during block %d\n' %
                          (len(self.death_row), timeout_epoch))
     death_row_nodes.extend(self.death_row)
     self.death_row = []
     self._keyValueStorage.put('deathrow:' + str(timeout_epoch),
                               rlp.encode(death_row_nodes))
     # Save journal
     try:
         journal = rlp.decode(
             self._keyValueStorage.get('journal:' + str(epoch)))
     except BaseException:
         journal = []
     journal.extend(self.journal)
     self.journal = []
     self._keyValueStorage.put('journal:' + str(epoch), rlp.encode(journal))
示例#3
0
    def save_util_root(self):
        new_hash = self.compute_hash(self.value)

        # not changed
        if self.value_hash_saved == new_hash:
            return

        if self.value_hash_saved and new_hash is None:
            self.delete()

        # self is already root node
        if not self.parent:
            self.db.put(self.key, rlp.encode(self.value))
            self.db.commit()
            self.set_sanity()
            return

        # self is intermediate node
        self.db.put(new_hash, rlp.encode(self.value))

        # update parent value
        if self.value_hash_saved is None:
            # newly created
            self.parent.value.append(new_hash)
        else:
            # modified
            self.parent.value[self.parent.value.index(self.key)] = self.key

        self.parent.save_util_root()
        self.set_sanity()
示例#4
0
def remote_blocks_received_handler(sender, block_lst, peer, **kwargs):
    logger.debug("received %d remote blocks", len(block_lst))

    old_head = chain_manager.head
    # assuming chain order w/ newest block first
    for block_data in reversed(block_lst):
        try:
            block = blocks.Block.deserialize(rlp.encode(block_data))
        except blocks.UnknownParentException:
            # no way to ask peers for older parts of chain
            bhash = utils.sha3(rlp.encode(block_data)).encode('hex')[:4]
            phash = block_data[0][0].encode('hex')[:4]
            number = utils.decode_int(block_data[0][6])
            logger.debug('Block(#%d %s %s) with unknown parent, requesting ...',
                         number, bhash, phash.encode('hex')[:4])
            chain_manager.synchronize_blockchain()
            break
        if block.hash in chain_manager:
            logger.debug('Known %r', block)
        else:
            if block.has_parent():
                # add block & set HEAD if it's longest chain
                success = chain_manager.add_block(block)
                if success:
                    logger.debug('Added %r', block)
            else:
                logger.debug('Orphant %r', block)
    if chain_manager.head != old_head:
        chain_manager.synchronize_blockchain()
示例#5
0
def _recv_Blocks(self, data):
    print("RECEIVED BLOCKS", len(data))
    if len(data) < MIN_BLOCKS:
        return
    assert blocks.TransientBlock(rlp.encode(data[0])).number >= blocks.TransientBlock(
        rlp.encode(data[-1])).number
    for x in data:
        enc = rlp.encode(x)
        tb = blocks.TransientBlock(enc)
        print tb
        self.blk_counter += 1
        if self.lowest_block is None:
            self.lowest_block = tb.number
        else:
            if self.lowest_block - 1 == tb.number:
                self.lowest_block = tb.number
            else:  # i.e. newly mined block sent
                return
        if tb not in collected_blocks:
            collected_blocks.append(tb)
        # exit if we are at the genesis
        if tb.number == 1:
            print 'done'
            for tb in sorted(collected_blocks, key=attrgetter('number')):
                print 'writing', tb
                fh.write(tb.rlpdata.encode('hex') + '\n')  # LOG line
            sys.exit(0)
    # fetch more
    print("ASKING FOR MORE HASHES", tb.hash.encode('hex'), tb.number)
    self.send_GetBlockHashes(tb.hash, NUM_BLOCKS_PER_REQUEST)
示例#6
0
    def to_dict(self, with_state=False, full_transactions=False,
                      with_storage_roots=False, with_uncles=False):
        """
        serializes the block
        with_state:             include state for all accounts
        full_transactions:      include serialized tx (hashes otherwise)
        with_uncles:            include uncle hashes
        """
        b = {}
        for name, typ, default in block_structure:
            b[name] = utils.printers[typ](getattr(self, name))
        txlist = []
        for i in range(self.transaction_count):
            tx_rlp = self.transactions.get(rlp.encode(utils.encode_int(i)))
            tx, msr, gas = rlp.decode(tx_rlp)
            if full_transactions:
                txjson = transactions.Transaction.create(tx).to_dict()
            else:
                txjson = utils.sha3(rlp.descend(tx_rlp, 0)).encode('hex')  # tx hash
            txlist.append({
                "tx": txjson,
                "medstate": msr.encode('hex'),
                "gas": str(utils.decode_int(gas))
            })
        b["transactions"] = txlist
        if with_state:
            state_dump = {}
            for address, v in self.state.to_dict().iteritems():
                state_dump[address.encode('hex')] = \
                    self.account_to_dict(address, with_storage_roots)
            b['state'] = state_dump
        if with_uncles:
            b['uncles'] = [utils.sha3(rlp.encode(u)).encode('hex') for u in self.uncles]

        return b
示例#7
0
 def commit_state(self):
     changes = []
     if not len(self.journal):
         # log_state.trace('delta', changes=[])
         return
     for address in self.caches['all']:
         acct = rlp.decode(self.state.get(address.decode('hex'))) \
             or self.mk_blank_acct()
         for i, (key, typ, default) in enumerate(acct_structure):
             if key == 'storage':
                 t = trie.Trie(self.db, acct[i])
                 for k, v in self.caches.get('storage:' + address, {}).iteritems():
                     enckey = utils.zpad(utils.coerce_to_bytes(k), 32)
                     val = rlp.encode(utils.int_to_big_endian(v))
                     changes.append(['storage', address, k, v])
                     if v:
                         t.update(enckey, val)
                     else:
                         t.delete(enckey)
                 acct[i] = t.root_hash
             else:
                 if address in self.caches[key]:
                     v = self.caches[key].get(address, default)
                     changes.append([key, address, v])
                     acct[i] = self.encoders[acct_structure[i][1]](v)
         self.state.update(address.decode('hex'), rlp.encode(acct))
     log_state.trace('delta', changes=changes)
     self.reset_cache()
示例#8
0
 def validate_uncles(self):
     if utils.sha3(rlp.encode(self.uncles)) != self.uncles_hash:
         return False
     # Check uncle validity
     ancestor_chain = [self]
     # Uncle can have a block from 2-7 blocks ago as its parent
     for i in [1, 2, 3, 4, 5, 6, 7]:
         if ancestor_chain[-1].number > 0:
             ancestor_chain.append(ancestor_chain[-1].get_parent())
     ineligible = []
     # Uncles of this block cannot be direct ancestors and cannot also
     # be uncles included 1-6 blocks ago
     for ancestor in ancestor_chain[1:]:
         ineligible.extend(ancestor.uncles)
     ineligible.extend([b.list_header() for b in ancestor_chain])
     eligible_ancestor_hashes = [x.hash for x in ancestor_chain[2:]]
     for uncle in self.uncles:
         if not check_header_pow(uncle):
             return False
         # uncle's parent cannot be the block's own parent
         prevhash = uncle[block_structure_rev['prevhash'][0]]
         if prevhash not in eligible_ancestor_hashes:
             logger.debug("%r: Uncle does not have a valid ancestor", self)
             return False
         if uncle in ineligible:
             logger.debug("%r: Duplicate uncle %r", self, utils.sha3(rlp.encode(uncle)).encode('hex'))
             return False
         ineligible.append(uncle)
     return True
示例#9
0
    def commit_state(self):
        """Commit account caches"""
        """Write the acount caches on the corresponding tries."""
        changes = []
        if len(self.journal) == 0:
            # log_state.trace('delta', changes=[])
            return
        addresses = sorted(list(self.caches['all'].keys()))
        for addr in addresses:
            acct = self._get_acct(addr)

            # storage
            for field in ('balance', 'nonce', 'code', 'storage'):
                if addr in self.caches[field]:
                    v = self.caches[field][addr]
                    changes.append([field, addr, v])
                    setattr(acct, field, v)

            t = SecureTrie(Trie(self.db, acct.storage))
            for k, v in self.caches.get(b'storage:' + addr, {}).items():
                enckey = utils.zpad(utils.coerce_to_bytes(k), 32)
                val = rlp.encode(v)
                changes.append(['storage', addr, k, v])
                if v:
                    t.update(enckey, val)
                else:
                    t.delete(enckey)
            acct.storage = t.root_hash
            self.state.update(addr, rlp.encode(acct))
        log_state.trace('delta', changes=changes)
        self.reset_cache()
        self.db.put(b'validated:' + self.hash, '1')
示例#10
0
    def __init__(self, data=None):

        if not data:
            return

        if re.match('^[0-9a-fA-F]*$', data):
            data = data.decode('hex')

        header,  transaction_list, self.uncles = rlp.decode(data)
        [self.number,
         self.prevhash,
         self.uncles_root,
         self.coinbase,
         state_root,
         self.transactions_root,
         self.difficulty,
         self.timestamp,
         self.nonce,
         self.extra] = header
        self.transactions = [Transaction(x) for x in transaction_list]
        self.state = Trie('statedb', state_root)
        self.reward = 0

        # Verifications
        if self.state.root != '' and self.state.db.get(self.state.root) == '':
            raise Exception("State Merkle root not found in database!")
        if bin_sha256(rlp.encode(transaction_list)) != self.transactions_root:
            raise Exception("Transaction list root hash does not match!")
        if bin_sha256(rlp.encode(self.uncles)) != self.uncles_root:
            raise Exception("Uncle root hash does not match!")
示例#11
0
 def commit_state(self):
     if not len(self.journal):
         return
     for address in self.caches['all']:
         acct = rlp.decode(self.state.get(address.decode('hex'))) \
             or self.mk_blank_acct()
         for i, (key, typ, default) in enumerate(acct_structure):
             if key == 'storage':
                 t = trie.Trie(utils.get_db_path(), acct[i])
                 t.proof_mode = self.proof_mode
                 t.proof_nodes = self.proof_nodes
                 for k, v in self.caches.get('storage:'+address, {}).iteritems():
                     enckey = utils.zpad(utils.coerce_to_bytes(k), 32)
                     val = rlp.encode(utils.int_to_big_endian(v))
                     if v:
                         t.update(enckey, val)
                     else:
                         t.delete(enckey)
                 acct[i] = t.root_hash
                 if self.proof_mode == RECORDING:
                     self.proof_nodes.extend(t.proof_nodes)
             else:
                 if address in self.caches[key]:
                     v = self.caches[key].get(address, default)
                     acct[i] = utils.encoders[acct_structure[i][1]](v)
         self.state.update(address.decode('hex'), rlp.encode(acct))
     if self.proof_mode == RECORDING:
         self.proof_nodes.extend(self.state.proof_nodes)
         self.state.proof_nodes = []
     self.reset_cache()
示例#12
0
def test_returnten():
    s = tester.state()
    open(filename, 'w').write(mul2_code)
    c = s.contract(returnten_code)
    s.send(tester.k0, c, 0)
    b2 = rlp.decode(rlp.encode(s.block), blocks.Block, db=s.db)
    assert rlp.encode(b2) == rlp.encode(s.block)
示例#13
0
def test_add_side_chain(db, alt_db):
    """"
    Local: L0, L1, L2
    add
    Remote: R0, R1
    """
    k, v, k2, v2 = accounts()
    # Remote: mine one block
    R0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(R0)
    tx0 = get_transaction(nonce=0)
    R1 = mine_next_block(R0, transactions=[tx0])
    store_block(R1)
    assert tx0.hash in [x.hash for x in R1.get_transactions()]

    # Local: mine two blocks
    L0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, alt_db)
    chain = Chain(env=env(L0.db), genesis=L0)
    tx0 = get_transaction(nonce=0)
    L1 = mine_next_block(L0, transactions=[tx0])
    chain.add_block(L1)
    tx1 = get_transaction(nonce=1)
    L2 = mine_next_block(L1, transactions=[tx1])
    chain.add_block(L2)

    # receive serialized remote blocks, newest first
    rlp_blocks = [rlp.encode(R0), rlp.encode(R1)]
    for rlp_block in rlp_blocks:
        block = blocks.Block.deserialize(rlp.decode(rlp_block), env=chain.env)
        chain.add_block(block)

    assert L2.hash in chain
示例#14
0
文件: chain.py 项目: firefox0x/py-evm
    def persist_header_to_db(self, header):
        """
        :returns: iterable of headers newly on the canonical chain
        """
        if header.parent_hash != GENESIS_PARENT_HASH and not self.header_exists(header.parent_hash):
            raise ParentNotFound(
                "Cannot persist block header ({}) with unknown parent ({})".format(
                    encode_hex(header.hash), encode_hex(header.parent_hash)))

        self.db.set(
            header.hash,
            rlp.encode(header),
        )

        if header.parent_hash == GENESIS_PARENT_HASH:
            score = header.difficulty
        else:
            score = self.get_score(header.parent_hash) + header.difficulty
        self.db.set(
            make_block_hash_to_score_lookup_key(header.hash),
            rlp.encode(score, sedes=rlp.sedes.big_endian_int))

        try:
            head_score = self.get_score(self.get_canonical_head().hash)
        except CanonicalHeadNotFound:
            new_headers = self._set_as_canonical_chain_head(header)
        else:
            if score > head_score:
                new_headers = self._set_as_canonical_chain_head(header)
            else:
                new_headers = []

        return new_headers
示例#15
0
def test_block_serialization_same_db(db):
    k, v, k2, v2 = accounts()
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db)
    assert blk.hash == rlp.decode(rlp.encode(blk), blocks.Block, env=env(db)).hash
    store_block(blk)
    blk2 = mine_next_block(blk)
    assert blk.hash == rlp.decode(rlp.encode(blk), blocks.Block, env=env(db)).hash
    assert blk2.hash == rlp.decode(rlp.encode(blk2), blocks.Block, env=env(db)).hash
示例#16
0
文件: test_lazy.py 项目: janx/pyrlp
def test_peek():
    assert rlp.peek(rlp.encode(b''), []) == b''
    nested = rlp.encode([0, 1, [2, 3]])
    assert rlp.peek(nested, [2, 0], big_endian_int) == 2
    for index in [3, [3], [0, 0], [2, 2], [2, 1, 0]]:
        with pytest.raises(IndexError):
            rlp.peek(nested, index)
    assert rlp.peek(nested, 2, CountableList(big_endian_int)) == [2, 3]
示例#17
0
 def _add_transaction_to_list(self, tx_lst_serialized,
                              state_root, gas_used_encoded):
     # adds encoded data # FIXME: the constructor should get objects
     assert isinstance(tx_lst_serialized, list)
     data = [tx_lst_serialized, state_root, gas_used_encoded]
     self.transactions.update(
         rlp.encode(utils.encode_int(self.transaction_count)),
         rlp.encode(data))
     self.transaction_count += 1
示例#18
0
 def encode_payload(cls, data):
     if isinstance(data, dict):  # convert dict to ordered list
         assert isinstance(cls.structure, list)
         data = [data[x[0]] for x in cls.structure]
     if isinstance(cls.structure, sedes.CountableList):
         return rlp.encode(data, cls.structure)
     else:
         assert len(data) == len(cls.structure)
         return rlp.encode(data, sedes=sedes.List([x[1] for x in cls.structure]))
示例#19
0
文件: test_sedes.py 项目: janx/pyrlp
def test_serializable():
    class Test1(Serializable):
        fields = (
            ('field1', big_endian_int),
            ('field2', binary),
            ('field3', List((big_endian_int, binary)))
        )

    class Test2(Serializable):
        fields = (
            ('field1', Test1),
            ('field2', List((Test1, Test1))),
        )

    t1a_data = (5, 'a', (0, ''))
    t1b_data = (9, 'b', (2, ''))
    test1a = Test1(*t1a_data)
    test1b = Test1(*t1b_data)
    test2 = Test2(test1a, [test1a, test1b])

    # equality
    assert test1a == test1a
    assert test1b == test1b
    assert test2 == test2
    assert test1a != test1b
    assert test1b != test2
    assert test2 != test1a

    with pytest.raises(SerializationError):
        Test1.serialize(test2)
    with pytest.raises(SerializationError):
        Test2.serialize(test1a)
    with pytest.raises(SerializationError):
        Test2.serialize(test1b)

    # inference
    assert infer_sedes(test1a) == Test1
    assert infer_sedes(test1b) == Test1
    assert infer_sedes(test2) == Test2

    # serialization
    serial_1a = Test1.serialize(test1a)
    serial_1b = Test1.serialize(test1b)
    serial_2 = Test2.serialize(test2)
    assert serial_1a == [b'\x05', b'a', [b'', b'']]
    assert serial_1b == [b'\x09', b'b', [b'\x02', b'']]
    assert serial_2 == [serial_1a, [serial_1a, serial_1b]]

    # deserialization
    assert Test1.deserialize(serial_1a) == test1a
    assert Test1.deserialize(serial_1b) == test1b
    assert Test2.deserialize(serial_2) == test2

    # encoding and decoding
    assert decode(encode(test1a), Test1) == test1a
    assert decode(encode(test1b), Test1) == test1b
    assert decode(encode(test2), Test2) == test2
示例#20
0
文件: state.py 项目: firefox0x/py-evm
def make_trie_root_and_nodes(transactions, trie_class=HexaryTrie, chain_db_class=ChainDB):
    chaindb = chain_db_class(MemoryDB(), trie_class=trie_class)
    db = chaindb.db
    transaction_db = trie_class(db, chaindb.empty_root_hash)

    for index, transaction in enumerate(transactions):
        index_key = rlp.encode(index, sedes=rlp.sedes.big_endian_int)
        transaction_db[index_key] = rlp.encode(transaction)

    return transaction_db.root_hash, transaction_db.db.wrapped_db.kv_store
示例#21
0
 def save_prepare(self, prepare, my=False):
     if my:
         self.db.put(self.my_prepare_key_ % prepare.epoch, rlp.encode(prepare))
     count_key = self.prepare_count_key_ % prepare.hash
     try:
         count = self.get_int(count_key)
     except KeyError:
         count = 0
     self.db.put(self.prepare_key_ % (prepare.hash, count), rlp.encode(prepare))
     self.put_int(count_key, count+1)
示例#22
0
 def save_commit(self, commit, my=False):
     if my:
         self.db.put(self.my_commit_key_ % commit.epoch, rlp.encode(commit))
     count_key = self.commit_count_key_ % commit.hash
     try:
         count = self.get_int(count_key)
     except KeyError:
         count = 0
     self.db.put(self.commit_key_ % (commit.hash, count), rlp.encode(commit))
     self.put_int(count_key, count+1)
示例#23
0
 def _add_transactions(self, blk):
     "'tx_hash' -> 'rlp([blockhash,tx_number])"
     for i in range(blk.transaction_count):
         i_enc = utils.encode_int(i)
         # work on rlp data to avoid unnecessary de/serialization
         td = blk.transactions.get(rlp.encode(i_enc))
         tx = rlp.descend(td, 0)
         key = utils.sha3(tx)
         value = rlp.encode([blk.hash, i_enc])
         self.db.put(key, value)
示例#24
0
 def serialize(self):
     txlist = []
     for i in range(self.transaction_count):
         txlist.append(self.transactions.get(utils.encode_int(i)))
     self.state_root = self.state.root
     self.tx_list_root = self.transactions.root
     self.uncles_hash = sha3(rlp.encode(self.uncles))
     header = []
     for name, typ, default in block_structure:
         header.append(utils.encoders[typ](getattr(self, name)))
     return rlp.encode([header, txlist, self.uncles])
示例#25
0
文件: test_lazy.py 项目: janx/pyrlp
def test_string():
    for s in (b'', b'asdf', b'a' * 56, b'b' * 123):
        dec = lambda: rlp.decode_lazy(rlp.encode(s))
        assert isinstance(dec(), bytes)
        assert len(dec()) == len(s)
        assert dec() == s
        assert rlp.peek(rlp.encode(s), []) == s
        with pytest.raises(IndexError):
            rlp.peek(rlp.encode(s), 0)
        with pytest.raises(IndexError):
            rlp.peek(rlp.encode(s), [0])
示例#26
0
    def add_transaction_to_list(self, tx):
        """Add a transaction to the transaction trie.

        Note that this does not execute anything, i.e. the state is not
        updated.
        """
        k = rlp.encode(self.transaction_count)
        self.transactions.update(k, rlp.encode(tx))
        r = self.mk_transaction_receipt(tx)
        self.receipts.update(k, rlp.encode(r))
        self.bloom |= r.bloom  # int
        self.transaction_count += 1
示例#27
0
def run_ethash_test(params, mode):
    if 'header' not in params:
        b = blocks.genesis(db)
        b.nonce = decode_hex(params['nonce'])
        b.number = params.get('number', 0)
        header = b.header
        params['header'] = encode_hex(rlp.encode(b.header))
    else:
        header = blocks.BlockHeader(decode_hex(params['header']))
    header_hash = header.mining_hash
    cache_size = ethash.get_cache_size(header.number)
    full_size = ethash.get_full_size(header.number)
    seed = b'\x00' * 32
    for i in range(header.number // ethash_utils.EPOCH_LENGTH):
        seed = utils.sha3(seed)
    nonce = header.nonce
    assert len(nonce) == 8
    assert len(seed) == 32
    t1 = time.time()
    cache = ethash.mkcache(cache_size, seed)
    t2 = time.time()
    cache_hash = encode_hex(utils.sha3(ethash.serialize_cache(cache)))
    t6 = time.time()
    light_verify = ethash.hashimoto_light(full_size, cache, header_hash, nonce)
    t7 = time.time()
    # assert full_mine == light_mine
    out = {
        "seed": encode_hex(seed),
        "header_hash": encode_hex(header_hash),
        "nonce": encode_hex(nonce),
        "cache_size": cache_size,
        "full_size": full_size,
        "cache_hash": cache_hash,
        "mixhash": encode_hex(light_verify["mix digest"]),
        "result": encode_hex(light_verify["result"]),
    }
    if mode == FILL:
        header.mixhash = light_verify["mixhash"]
        params["header"] = encode_hex(rlp.encode(header))
        for k, v in list(out.items()):
            params[k] = v
        return params
    elif mode == VERIFY:
        should, actual = header.mixhash, light_verify['mixhash']
        assert should == actual, "Mismatch: mixhash %r %r" % (should, actual)
        for k, v in list(out.items()):
            assert params[k] == v, "Mismatch: " + k + ' %r %r' % (params[k], v)
    elif mode == TIME:
        return {
            "cache_gen": t2 - t1,
            "verification_time": t7 - t6
        }
示例#28
0
 def serialize(self):
     txlist = [x.serialize() for x in self.transactions]
     header = [self.number,
               self.prevhash,
               bin_sha256(rlp.encode(self.uncles)),
               self.coinbase,
               self.state.root,
               bin_sha256(rlp.encode(txlist)),
               self.difficulty,
               self.timestamp,
               self.extradata,
               self.nonce]
     return rlp.encode([header, txlist, self.uncles])
示例#29
0
def warn_invalid(block, errortype='other'):
    try:
        make_request('http://badblocks.ethereum.org', {
            "block": utils.encode_hex(rlp.encode(block)),
            "errortype": errortype,
            "hints": {
                "receipts": [utils.encode_hex(rlp.encode(x)) for x in
                             block.get_receipts()],
                "vmtrace": "NOT YET IMPLEMENTED"
            }
        })
    except:
        sys.stderr.write('Failed to connect to badblocks.ethdev.com\n')
示例#30
0
文件: blocks.py 项目: jo/pyethereum
 def serialize(self):
     txlist = [x.serialize() for x in self.transactions]
     header = [encode_int(self.number),
               self.prevhash,
               sha3(rlp.encode(self.uncles)),
               self.coinbase.decode('hex'),
               self.state.root,
               sha3(rlp.encode(txlist)),
               encode_int(self.difficulty),
               encode_int(self.timestamp),
               self.extradata,
               encode_int(self.nonce)]
     return rlp.encode([header, txlist, self.uncles])
示例#31
0
def sign_tx(
    client: "TrezorClient",
    chain_id: int,
    address: str,
    amount: int,
    gas_limit: Optional[int],
    gas_price: Optional[int],
    nonce: Optional[int],
    data: Optional[str],
    publish: bool,
    to_address: str,
    tx_type: Optional[int],
    token: Optional[str],
    max_gas_fee: Optional[int],
    max_priority_fee: Optional[int],
    access_list: List[ethereum.messages.EthereumAccessList],
    eip2718_type: Optional[int],
) -> str:
    """Sign (and optionally publish) Ethereum transaction.

    Use TO_ADDRESS as destination address, or set to "" for contract creation.

    Specify a contract address with the --token option to send an ERC20 token.

    You can specify AMOUNT and gas price either as a number of wei,
    or you can use a unit suffix.

    Use the --list-units option to show all known currency units.
    ERC20 token amounts are specified in eth/wei, custom units are not supported.

    If any of gas price, gas limit and nonce is not specified, this command will
    try to connect to an ethereum node and auto-fill these values. You can configure
    the connection with WEB3_PROVIDER_URI environment variable.
    """
    if not HAVE_SIGN_TX:
        click.echo("Ethereum requirements not installed.")
        click.echo("Please run:")
        click.echo()
        click.echo("  pip install web3 rlp")
        sys.exit(1)

    is_eip1559 = eip2718_type == 2
    w3 = web3.Web3()
    if ((not is_eip1559 and gas_price is None) or any(
            x is None
            for x in (gas_limit, nonce)) or publish) and not w3.isConnected():
        click.echo("Failed to connect to Ethereum node.")
        click.echo(
            "If you want to sign offline, make sure you provide --gas-price, "
            "--gas-limit and --nonce arguments")
        sys.exit(1)

    if data is not None and token is not None:
        click.echo("Can't send tokens and custom data at the same time")
        sys.exit(1)

    address_n = tools.parse_path(address)
    from_address = ethereum.get_address(client, address_n)

    if token:
        data = _erc20_contract(w3, token, to_address, amount)
        to_address = token
        amount = 0

    if data:
        data_bytes = ethereum.decode_hex(data)
    else:
        data_bytes = b""

    if gas_limit is None:
        gas_limit = w3.eth.estimateGas({
            "to": to_address,
            "from": from_address,
            "value": amount,
            "data": f"0x{data_bytes.hex()}",
        })

    if nonce is None:
        nonce = w3.eth.getTransactionCount(from_address)

    assert gas_limit is not None
    assert nonce is not None

    if is_eip1559:
        assert max_gas_fee is not None
        assert max_priority_fee is not None
        sig = ethereum.sign_tx_eip1559(
            client,
            n=address_n,
            nonce=nonce,
            gas_limit=gas_limit,
            to=to_address,
            value=amount,
            data=data_bytes,
            chain_id=chain_id,
            max_gas_fee=max_gas_fee,
            max_priority_fee=max_priority_fee,
            access_list=access_list,
        )
    else:
        if gas_price is None:
            gas_price = w3.eth.gasPrice
        assert gas_price is not None
        sig = ethereum.sign_tx(
            client,
            n=address_n,
            tx_type=tx_type,
            nonce=nonce,
            gas_price=gas_price,
            gas_limit=gas_limit,
            to=to_address,
            value=amount,
            data=data_bytes,
            chain_id=chain_id,
        )

    to = ethereum.decode_hex(to_address)
    if is_eip1559:
        transaction = rlp.encode((
            chain_id,
            nonce,
            max_priority_fee,
            max_gas_fee,
            gas_limit,
            to,
            amount,
            data_bytes,
            _format_access_list(access_list
                                ) if access_list is not None else [],
        ) + sig)
    elif tx_type is None:
        transaction = rlp.encode((nonce, gas_price, gas_limit, to, amount,
                                  data_bytes) + sig)
    else:
        transaction = rlp.encode((tx_type, nonce, gas_price, gas_limit, to,
                                  amount, data_bytes) + sig)
    if eip2718_type is not None:
        eip2718_prefix = f"{eip2718_type:02x}"
    else:
        eip2718_prefix = ""
    tx_hex = f"0x{eip2718_prefix}{transaction.hex()}"

    if publish:
        tx_hash = w3.eth.sendRawTransaction(tx_hex).hex()
        return f"Transaction published with ID: {tx_hash}"
    else:
        return f"Signed raw transaction:\n{tx_hex}"
示例#32
0
async def test_lightchain_integration(request, event_loop, caplog,
                                      geth_ipc_path, enode, geth_process):
    """Test LightChainSyncer/LightPeerChain against a running geth instance.

    In order to run this manually, you can use `tox -e py36-lightchain_integration` or:

        pytest --integration --capture=no tests/integration/test_lightchain_integration.py

    The fixture for this test was generated with:

        geth --testnet --syncmode full

    It only needs the first 11 blocks for this test to succeed.
    """
    if not pytest.config.getoption("--integration"):
        pytest.skip("Not asked to run integration tests")

    # will almost certainly want verbose logging in a failure
    caplog.set_level(logging.DEBUG)

    # make sure geth has been launched
    wait_for_socket(geth_ipc_path)

    remote = Node.from_uri(enode)
    base_db = AtomicDB()
    chaindb = AsyncChainDB(base_db)
    chaindb.persist_header(ROPSTEN_GENESIS_HEADER)
    headerdb = AsyncHeaderDB(base_db)
    context = ChainContext(
        headerdb=headerdb,
        network_id=ROPSTEN_NETWORK_ID,
        vm_configuration=ROPSTEN_VM_CONFIGURATION,
        client_version_string='trinity-test',
        listen_port=30303,
        p2p_version=DEVP2P_V5,
    )
    peer_pool = LESPeerPool(
        privkey=ecies.generate_privkey(),
        context=context,
    )
    chain = AsyncRopstenChain(base_db)
    syncer = LightChainSyncer(chain, chaindb, peer_pool)
    syncer.min_peers_to_sync = 1
    peer_chain = LightPeerChain(headerdb, peer_pool)

    asyncio.ensure_future(peer_pool.run())
    asyncio.ensure_future(connect_to_peers_loop(peer_pool, tuple([remote])))
    asyncio.ensure_future(peer_chain.run())
    asyncio.ensure_future(syncer.run())
    await asyncio.sleep(
        0)  # Yield control to give the LightChainSyncer a chance to start

    def finalizer():
        event_loop.run_until_complete(peer_pool.cancel())
        event_loop.run_until_complete(peer_chain.cancel())
        event_loop.run_until_complete(syncer.cancel())

    request.addfinalizer(finalizer)

    n = 11

    # Wait for the chain to sync a few headers.
    async def wait_for_header_sync(block_number):
        while headerdb.get_canonical_head().block_number < block_number:
            await asyncio.sleep(0.1)

    await asyncio.wait_for(wait_for_header_sync(n), 5)

    # https://ropsten.etherscan.io/block/11
    header = headerdb.get_canonical_block_header_by_number(n)
    body = await peer_chain.coro_get_block_body_by_hash(header.hash)
    assert len(body['transactions']) == 15

    receipts = await peer_chain.coro_get_receipts(header.hash)
    assert len(receipts) == 15
    assert encode_hex(keccak(rlp.encode(receipts[0]))) == (
        '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860')

    assert len(peer_pool) == 1
    peer = peer_pool.highest_td_peer
    head = await peer_chain.coro_get_block_header_by_hash(
        peer.head_info.head_hash)

    # In order to answer queries for contract code, geth needs the state trie entry for the block
    # we specify in the query, but because of fast sync we can only assume it has that for recent
    # blocks, so we use the current head to lookup the code for the contract below.
    # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287
    contract_addr = decode_hex('0x8B09D9ac6A4F7778fCb22852e879C7F3B2bEeF81')
    contract_code = await peer_chain.coro_get_contract_code(
        head.hash, contract_addr)
    assert encode_hex(contract_code) == '0x600060006000600060006000356000f1'

    account = await peer_chain.coro_get_account(head.hash, contract_addr)
    assert account.code_hash == keccak(contract_code)
    assert account.balance == 0
示例#33
0
def mk_contract_address(sender, nonce):
    return utils.sha3(rlp.encode([sender, nonce]))[12:]
示例#34
0
 def serialize_header(self):
     return rlp.encode(self.list_header())
示例#35
0
文件: common.py 项目: obutuz/epoch
def encode_signed_tx(encoded_tx, signatures):
    tag = bytearray([11])
    vsn = bytearray([1])
    payload = rlp.encode([tag, vsn, signatures, encoded_tx])
    return "tx$" + base58.b58encode_check(payload)
示例#36
0
def test_blockchain_fixtures(fixture_data, fixture):
    try:
        chain = new_chain_from_fixture(fixture)
    except ValueError as e:
        raise AssertionError(f"could not load chain for {fixture_data}") from e

    genesis_fields = genesis_fields_from_fixture(fixture)

    genesis_block = chain.get_canonical_block_by_number(0)
    genesis_header = genesis_block.header

    # Validate the genesis header RLP against the generated header
    if 'genesisRLP' in fixture:
        # Super hacky, but better than nothing: extract the header, then re-decode it
        fixture_decoded_block = rlp.decode(fixture['genesisRLP'])
        fixture_encoded_header = rlp.encode(fixture_decoded_block[0])
        fixture_header = rlp.decode(fixture_encoded_header, sedes=HeaderSedes)
        # Error message with pretty output if header doesn't match
        assert_headers_eq(fixture_header, genesis_header)
        # Last gut check that transactions & receipts are valid, too
        assert rlp.encode(genesis_block) == fixture['genesisRLP']

    assert_imported_genesis_header_unchanged(genesis_fields, genesis_header)

    # 1 - mine the genesis block
    # 2 - loop over blocks:
    #     - apply transactions
    #     - mine block
    # 3 - diff resulting state with expected state
    # 4 - check that all previous blocks were valid

    for block_fixture in fixture['blocks']:
        should_be_good_block = 'expectException' not in block_fixture

        if 'rlp_error' in block_fixture:
            assert not should_be_good_block
            continue

        if should_be_good_block:
            (original_block, executed_block,
             block_rlp) = apply_fixture_block_to_chain(
                 block_fixture,
                 chain,
                 perform_validation=False  # we manually validate below
             )
            assert_mined_block_unchanged(original_block, executed_block)
            chain.validate_block(original_block)
        else:
            try:
                apply_fixture_block_to_chain(block_fixture, chain)
            except EXPECTED_BAD_BLOCK_EXCEPTIONS:
                # failure is expected on this bad block
                pass
            else:
                raise AssertionError(
                    "Block should have caused a validation error")

    latest_block_hash = chain.get_canonical_block_by_number(
        chain.get_block().number - 1).hash
    if latest_block_hash != fixture['lastblockhash']:
        verify_state(fixture['postState'], chain.get_vm().state)
示例#37
0
VERIFYING = -1

GENESIS_INITIAL_ALLOC = \
    {"51ba59315b3a95761d0863b05ccc7a7f54703d99": 2 ** 200,  # (G)
     "e6716f9544a56c530d868e4bfbacb172315bdead": 2 ** 200,  # (J)
     "b9c015918bdaba24b4ff057a92a3873d6eb201be": 2 ** 200,  # (V)
     "1a26338f0d905e295fccb71fa9ea849ffa12aaf4": 2 ** 200,  # (A)
     "2ef47100e0787b915105fd5e3f4ff6752079d5cb": 2 ** 200,  # (M)
     "cd2a3d9f938e13cd947ec05abc7fe734df8dd826": 2 ** 200,  # (R)
     "6c386a4b26f73c802f34673f7248bb118f97424a": 2 ** 200,  # (HH)
     "e4157b34ea9615cfbde6b4fda419828124b70c78": 2 ** 200,  # (CH)
     }

block_structure = [
    ["prevhash", "bin", "\00" * 32],
    ["uncles_hash", "bin", utils.sha3(rlp.encode([]))],
    ["coinbase", "addr", GENESIS_COINBASE],
    ["state_root", "trie_root", trie.BLANK_ROOT],
    ["tx_list_root", "trie_root", trie.BLANK_ROOT],
    ["difficulty", "int", INITIAL_DIFFICULTY],
    ["number", "int", 0],
    ["min_gas_price", "int", GENESIS_MIN_GAS_PRICE],
    ["gas_limit", "int", GENESIS_GAS_LIMIT],
    ["gas_used", "int", 0],
    ["timestamp", "int", 0],
    ["extra_data", "bin", ""],
    ["nonce", "bin", ""],
]

block_structure_rev = {}
for i, (name, typ, default) in enumerate(block_structure):
示例#38
0
 def withdrawDelegateReward(self, pri_key, transaction_cfg=None):
     data = rlp.encode([rlp.encode(int(5000))])
     return send_obj_transaction(self, data, self.delegateRewardAddress,
                                 pri_key, transaction_cfg)
示例#39
0
 def get_transaction(self, num):
     # returns [tx_lst_serialized, state_root, gas_used_encoded]
     return rlp.decode(
         self.transactions.get(rlp.encode(utils.encode_int(num))))
示例#40
0
 def serialize_header_without_nonce(self):
     return rlp.encode(self.list_header(exclude=['nonce']))
示例#41
0
 def serialize(self):
     # Serialization method; should act as perfect inverse function of the
     # constructor assuming no verification failures
     return rlp.encode(
         [self.list_header(),
          self._list_transactions(), self.uncles])
示例#42
0
def test_transaction_serialization():
    k, v, k2, v2 = accounts()
    tx = get_transaction()
    assert tx in set([tx])
    assert tx.hash == rlp.decode(rlp.encode(tx), transactions.Transaction).hash
    assert tx in set([tx])
示例#43
0
 def _set_account(self, address, account):
     self._trie[address] = rlp.encode(account, sedes=Account)
def mk_logout(validator_index, epoch, key):
    sighash = utils.sha3(rlp.encode([validator_index, epoch]))
    v, r, s = utils.ecdsa_raw_sign(sighash, key)
    sig = utils.encode_int32(v) + utils.encode_int32(r) + utils.encode_int32(s)
    return rlp.encode([validator_index, epoch, sig])
示例#45
0
def _pack_v4(cmd_id, payload, privkey) -> bytes:
    cmd_id_bytes = int(cmd_id).to_bytes(1, byteorder='big')
    encoded_data = cmd_id_bytes + rlp.encode(payload)
    signature = privkey.sign_msg(encoded_data)
    message_hash = keccak256(signature.to_bytes() + encoded_data)
    return message_hash + signature.to_bytes() + encoded_data
示例#46
0
文件: eth.py 项目: zhengger/py-evm
 def send_block_bodies(self, blocks: List[BlockBody]) -> None:
     cmd = BlockBodies(self)
     header, body = cmd.encode([rlp.encode(block) for block in blocks])
     self.send(header, body)
示例#47
0
async def test_lightchain_integration(request, event_loop):
    """Test LightChain against a local geth instance.

    This test assumes a geth/ropsten instance is listening on 127.0.0.1:30303 and serving light
    clients. In order to achieve that, simply run it with the following command line:

        $ geth -nodekeyhex 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8 \
               -testnet -lightserv 90
    """
    # TODO: Implement a pytest fixture that runs geth as above, so that we don't need to run it
    # manually.
    if not pytest.config.getoption("--integration"):
        pytest.skip("Not asked to run integration tests")

    chaindb = ChainDB(MemoryDB())
    chaindb.persist_header_to_db(ROPSTEN_GENESIS_HEADER)
    peer_pool = LocalGethPeerPool(LESPeer, chaindb, ROPSTEN_NETWORK_ID,
                                  ecies.generate_privkey())
    chain = IntegrationTestLightChain(chaindb, peer_pool)
    asyncio.ensure_future(peer_pool.run())
    asyncio.ensure_future(chain.run())
    await asyncio.sleep(
        0)  # Yield control to give the LightChain a chance to start

    def finalizer():
        event_loop.run_until_complete(peer_pool.stop())
        event_loop.run_until_complete(chain.stop())

    request.addfinalizer(finalizer)

    n = 11

    # Wait for the chain to sync a few headers.
    async def wait_for_header_sync(block_number):
        while chaindb.get_canonical_head().block_number < block_number:
            await asyncio.sleep(0.1)

    await asyncio.wait_for(wait_for_header_sync(n), 2)

    # https://ropsten.etherscan.io/block/11
    b = await chain.get_canonical_block_by_number(n)
    assert isinstance(b, FrontierBlock)
    assert b.number == 11
    assert encode_hex(b.hash) == (
        '0xda882aeff30f59eda9da2b3ace3023366ab9d4219b5a83cdd589347baae8678e')
    assert len(b.transactions) == 15
    assert isinstance(b.transactions[0], b.transaction_class)

    receipts = await chain.get_receipts(b.hash)
    assert len(receipts) == 15
    assert encode_hex(keccak(rlp.encode(receipts[0]))) == (
        '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860')

    assert len(chain.peer_pool.peers) == 1
    head_info = chain.peer_pool.peers[0].head_info
    head = await chain.get_block_by_hash(head_info.block_hash)
    assert head.number == head_info.block_number

    # In order to answer queries for contract code, geth needs the state trie entry for the block
    # we specify in the query, but because of fast sync we can only assume it has that for recent
    # blocks, so we use the current head to lookup the code for the contract below.
    # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287
    contract_addr = decode_hex('95a48dca999c89e4e284930d9b9af973a7481287')
    contract_code = await chain.get_contract_code(head.hash,
                                                  keccak(contract_addr))
    assert encode_hex(keccak(contract_code)) == (
        '0x1e0b2ad970b365a217c40bcf3582cbb4fcc1642d7a5dd7a82ae1e278e010123e')

    account = await chain.get_account(head.hash, contract_addr)
    assert account.code_hash == keccak(contract_code)
    assert account.balance == 0
示例#48
0
    def send_transaction(self,
                         sender: address,
                         to: address,
                         value: int = 0,
                         data: bytes = b'',
                         startgas: int = 0,
                         gasprice: int = GAS_PRICE,
                         nonce: Optional[int] = None):
        """ Helper to send signed messages.

        This method will use the `privkey` provided in the constructor to
        locally sign the transaction. This requires an extended server
        implementation that accepts the variables v, r, and s.
        """

        if not self.privkey and not sender:
            raise ValueError('Either privkey or sender needs to be supplied.')

        if self.privkey:
            privkey_address = privatekey_to_address(self.privkey)
            sender = sender or privkey_address

            if sender != privkey_address:
                raise ValueError('sender for a different privkey.')

            if nonce is None:
                nonce = self.nonce(sender)
        else:
            if nonce is None:
                nonce = 0

        if not startgas:
            startgas = self.gaslimit() - 1

        tx = Transaction(nonce,
                         gasprice,
                         startgas,
                         to=to,
                         value=value,
                         data=data)

        if self.privkey:
            tx.sign(self.privkey)
            result = self.call(
                'eth_sendRawTransaction',
                data_encoder(rlp.encode(tx)),
            )
            return result[2 if result.startswith('0x') else 0:]

        else:

            # rename the fields to match the eth_sendTransaction signature
            tx_dict = tx.to_dict()
            tx_dict.pop('hash')
            tx_dict['sender'] = sender
            tx_dict['gasPrice'] = tx_dict.pop('gasprice')
            tx_dict['gas'] = tx_dict.pop('startgas')

            res = self.eth_sendTransaction(**tx_dict)

        assert len(res) in (20, 32)
        return hexlify(res)
示例#49
0
 def encode(self, payload: TCommandPayload) -> bytes:
     return rlp.encode(self._process_outbound_payload_fn(payload),
                       sedes=self.sedes)
示例#50
0
文件: eth.py 项目: zhengger/py-evm
 def send_block_headers(self, headers: List[BlockHeader]) -> None:
     cmd = BlockHeaders(self)
     header, body = cmd.encode([rlp.encode(header) for header in headers])
     self.send(header, body)
示例#51
0
 def createStaking(self,
                   typ,
                   benifit_address,
                   node_id,
                   external_id,
                   node_name,
                   website,
                   details,
                   amount,
                   program_version,
                   program_version_sign,
                   bls_pubkey,
                   bls_proof,
                   pri_key,
                   reward_per,
                   transaction_cfg=None):
     """
     Initiate Staking
     :param typ: Indicates whether the account free amount or the account's lock amount is used for staking, 0: free amount; 1: lock amount;
                 2: Give priority to lock amount , use free amount provided that staking amount over lock amount
     :param benifit_address: Income account for accepting block rewards and staking rewards
     :param node_id: The idled node Id (also called the candidate's node Id)
     :param external_id: External Id (with length limit, Id for the third party to pull the node description)
     :param node_name: The name of the staking node (with a length limit indicating the name of the node)
     :param website: The third-party home page of the node (with a length limit indicating the home page of the node)
     :param details: Description of the node (with a length limit indicating the description of the node)
     :param amount: staking von (unit:von, 1LAT = 10**18 von)
     :param program_version: The real version of the program, admin_getProgramVersion
     :param program_version_sign: The real version of the program is signed, admin_getProgramVersion
     :param bls_pubkey: Bls public key
     :param bls_proof: Proof of bls, obtained by pulling the proof interface, admin_getSchnorrNIZKProve
     :param pri_key: Private key for transaction
     :param reward_per: Proportion of the reward share obtained from the commission, using BasePoint 1BP = 0.01%
     :param transaction_cfg: Transaction basic configuration
           type: dict
           example:cfg = {
               "gas":100000000,
               "gasPrice":2000000000000,
               "nonce":1,
           }
     :return: if is need analyze return transaction result dict
             if is not need analyze return transaction hash
     """
     benifit_address = bech32_address_bytes(benifit_address)
     if program_version_sign[:2] == '0x':
         program_version_sign = program_version_sign[2:]
     data = HexBytes(
         rlp.encode([
             rlp.encode(int(1000)),
             rlp.encode(typ),
             rlp.encode(benifit_address),
             rlp.encode(bytes.fromhex(node_id)),
             rlp.encode(external_id),
             rlp.encode(node_name),
             rlp.encode(website),
             rlp.encode(details),
             rlp.encode(amount),
             rlp.encode(reward_per),
             rlp.encode(program_version),
             rlp.encode(bytes.fromhex(program_version_sign)),
             rlp.encode(bytes.fromhex(bls_pubkey)),
             rlp.encode(bytes.fromhex(bls_proof))
         ])).hex()
     return send_obj_transaction(self, data, self.stakingAddress, pri_key,
                                 transaction_cfg)
示例#52
0
 def hex_serialize_header(self):
     return rlp.encode(self.list_header()).encode('hex')
示例#53
0
        s.journal = copy.copy(self.journal)
        s.cache = {}
        return s


def prev_header_to_dict(h):
    return {
        "hash": '0x' + encode_hex(h.hash),
        "number": str(h.number),
        "timestamp": str(h.timestamp),
        "difficulty": str(h.difficulty),
        "gas_used": str(h.gas_used),
        "gas_limit": str(h.gas_limit),
        "uncles_hash": '0x' + encode_hex(h.uncles_hash)
    }


BLANK_UNCLES_HASH = sha3(rlp.encode([]))


def dict_to_prev_header(h):
    return FakeHeader(hash=parse_as_bin(h['hash']),
                      number=parse_as_int(h['number']),
                      timestamp=parse_as_int(h['timestamp']),
                      difficulty=parse_as_int(h['difficulty']),
                      gas_used=parse_as_int(h.get('gas_used', '0')),
                      gas_limit=parse_as_int(h['gas_limit']),
                      uncles_hash=parse_as_bin(
                          h.get('uncles_hash',
                                '0x' + encode_hex(BLANK_UNCLES_HASH))))
示例#54
0
 def getAvgPackTime(self, from_address=None):
     data = rlp.encode([rlp.encode(int(1202))])
     raw_data = call_obj(self, from_address, self.stakingAddress, data)
     receive = json.loads(str(raw_data, encoding="ISO-8859-1"))
     return receive
 def to_bytes(self) -> bytes:
     return b"".join((
         int_to_big_endian(self.message_type),
         rlp.encode(self),
     ))
示例#56
0
def encode_transaction(unsigned_transaction, vrs):
    (v, r, s) = vrs
    chain_naive_transaction = dissoc(unsigned_transaction.as_dict(), 'v', 'r', 's')
    signed_transaction = Transaction(v=v, r=r, s=s, **chain_naive_transaction)
    return rlp.encode(signed_transaction)
示例#57
0
 def hash(self) -> Hash32:
     if self._hash is None:
         self._hash = hash_eth2(rlp.encode(self.data))
     return self._hash
示例#58
0
    def add_block(self, block):
        now = self.localtime
        # Are we receiving the block too early?
        if block.header.timestamp > now:
            i = 0
            while i < len(self.time_queue
                          ) and block.timestamp > self.time_queue[i].timestamp:
                i += 1
            self.time_queue.insert(i, block)
            log.info(
                'Block received too early (%d vs %d). Delaying for %d seconds'
                % (now, block.header.timestamp, block.header.timestamp - now))
            return False
        # Is the block being added to the head?
        if block.header.prevhash == self.head_hash:
            log.info('Adding to head',
                     head=encode_hex(block.header.prevhash[:4]))
            self.state.deletes = []
            self.state.changed = {}
            try:
                apply_block(self.state, block)
            except (AssertionError, KeyError, ValueError, InvalidTransaction,
                    VerificationFailed) as e:
                log.info('Block %d (%s) with parent %s invalid, reason: %s' %
                         (block.number, encode_hex(block.header.hash[:4]),
                          encode_hex(block.header.prevhash[:4]), str(e)))
                return False
            self.db.put(b'block:%d' % block.header.number, block.header.hash)
            # side effect: put 'score:' cache in db
            block_score = self.get_score(block)
            self.head_hash = block.header.hash
            for i, tx in enumerate(block.transactions):
                self.db.put(b'txindex:' + tx.hash,
                            rlp.encode([block.number, i]))
            assert self.get_blockhash_by_number(
                block.header.number) == block.header.hash
            deletes = self.state.deletes
            changed = self.state.changed
        # Or is the block being added to a chain that is not currently the
        # head?
        elif block.header.prevhash in self.env.db:
            log.info(
                'Receiving block %d (%s) not on head (%s), adding to secondary post state %s'
                % (block.number, encode_hex(
                    block.header.hash[:4]), encode_hex(self.head_hash[:4]),
                   encode_hex(block.header.prevhash[:4])))
            temp_state = self.mk_poststate_of_blockhash(block.header.prevhash)
            try:
                apply_block(temp_state, block)
            except (AssertionError, KeyError, ValueError, InvalidTransaction,
                    VerificationFailed) as e:
                log.info('Block %s with parent %s invalid, reason: %s' %
                         (encode_hex(block.header.hash[:4]),
                          encode_hex(block.header.prevhash[:4]), str(e)))
                return False
            deletes = temp_state.deletes
            block_score = self.get_score(block)
            changed = temp_state.changed
            # If the block should be the new head, replace the head
            if block_score > self.get_score(self.head):
                b = block
                new_chain = {}
                # Find common ancestor
                while b.header.number >= int(self.db.get(b'GENESIS_NUMBER')):
                    new_chain[b.header.number] = b
                    key = b'block:%d' % b.header.number
                    orig_at_height = self.db.get(
                        key) if key in self.db else None
                    if orig_at_height == b.header.hash:
                        break
                    if b.prevhash not in self.db or self.db.get(
                            b.prevhash) == b'GENESIS':
                        break
                    b = self.get_parent(b)
                replace_from = b.header.number
                # Replace block index and tx indices, and edit the state cache

                # Get a list of all accounts that have been edited along the old and
                # new chains
                changed_accts = {}
                # Read: for i in range(common ancestor block number...new block
                # number)
                for i in itertools.count(replace_from):
                    log.info('Rewriting height %d' % i)
                    key = b'block:%d' % i
                    # Delete data for old blocks
                    orig_at_height = self.db.get(
                        key) if key in self.db else None
                    if orig_at_height:
                        orig_block_at_height = self.get_block(orig_at_height)
                        log.info('%s no longer in main chain' %
                                 encode_hex(orig_block_at_height.header.hash))
                        # Delete from block index
                        self.db.delete(key)
                        # Delete from txindex
                        for tx in orig_block_at_height.transactions:
                            if b'txindex:' + tx.hash in self.db:
                                self.db.delete(b'txindex:' + tx.hash)
                        # Add to changed list
                        acct_list = self.db.get(b'changed:' +
                                                orig_block_at_height.hash)
                        for j in range(0, len(acct_list), 20):
                            changed_accts[acct_list[j:j + 20]] = True
                    # Add data for new blocks
                    if i in new_chain:
                        new_block_at_height = new_chain[i]
                        log.info('%s now in main chain' %
                                 encode_hex(new_block_at_height.header.hash))
                        # Add to block index
                        self.db.put(key, new_block_at_height.header.hash)
                        # Add to txindex
                        for j, tx in enumerate(
                                new_block_at_height.transactions):
                            self.db.put(
                                b'txindex:' + tx.hash,
                                rlp.encode([new_block_at_height.number, j]))
                        # Add to changed list
                        if i < b.number:
                            acct_list = self.db.get(b'changed:' +
                                                    new_block_at_height.hash)
                            for j in range(0, len(acct_list), 20):
                                changed_accts[acct_list[j:j + 20]] = True
                    if i not in new_chain and not orig_at_height:
                        break
                # Add changed list from new head to changed list
                for c in changed.keys():
                    changed_accts[c] = True
                # Update the on-disk state cache
                for addr in changed_accts.keys():
                    data = temp_state.trie.get(addr)
                    if data:
                        self.state.db.put(b'address:' + addr, data)
                    else:
                        try:
                            self.state.db.delete(b'address:' + addr)
                        except KeyError:
                            pass
                self.head_hash = block.header.hash
                self.state = temp_state
                self.state.executing_on_head = True
        # Block has no parent yet
        else:
            if block.header.prevhash not in self.parent_queue:
                self.parent_queue[block.header.prevhash] = []
            self.parent_queue[block.header.prevhash].append(block)
            log.info(
                'Got block %d (%s) with prevhash %s, parent not found. Delaying for now'
                % (block.number, encode_hex(
                    block.hash[:4]), encode_hex(block.prevhash[:4])))
            return False
        self.add_child(block)

        self.db.put(b'head_hash', self.head_hash)

        self.db.put(block.hash, rlp.encode(block))
        self.db.put(
            b'changed:' + block.hash, b''.join([
                k.encode() if not is_string(k) else k
                for k in list(changed.keys())
            ]))
        print('Saved %d address change logs' % len(changed.keys()))
        self.db.put(b'deletes:' + block.hash, b''.join(deletes))
        log.debug('Saved %d trie node deletes for block %d (%s)' %
                  (len(deletes), block.number, utils.encode_hex(block.hash)))
        # Delete old junk data
        old_block_hash = self.get_blockhash_by_number(block.number -
                                                      self.max_history)
        if old_block_hash:
            try:
                deletes = self.db.get(b'deletes:' + old_block_hash)
                log.debug('Deleting up to %d trie nodes' %
                          (len(deletes) // 32))
                rdb = RefcountDB(self.db)
                for i in range(0, len(deletes), 32):
                    rdb.delete(deletes[i:i + 32])
                self.db.delete(b'deletes:' + old_block_hash)
                self.db.delete(b'changed:' + old_block_hash)
            except KeyError as e:
                print(e)
                pass
        self.db.commit()
        assert (b'deletes:' + block.hash) in self.db
        log.info('Added block %d (%s) with %d txs and %d gas' %
                 (block.header.number, encode_hex(block.header.hash)[:8],
                  len(block.transactions), block.header.gas_used))
        # Call optional callback
        if self.new_head_cb and block.header.number != 0:
            self.new_head_cb(block)
        # Are there blocks that we received that were waiting for this block?
        # If so, process them.
        if block.header.hash in self.parent_queue:
            for _blk in self.parent_queue[block.header.hash]:
                self.add_block(_blk)
            del self.parent_queue[block.header.hash]
        return True
示例#59
0
 def _store_block(self, block):
     if block.number > 0:
         self.blockchain.put_temporarily(block.hash, rlp.encode(block))
     else:
         self.blockchain.put(block.hash, rlp.encode(block))
示例#60
0
 def add_receipt(self, block_header, index_key, receipt):
     receipt_db = HexaryTrie(db=self.db, root_hash=block_header.receipt_root)
     receipt_db[index_key] = rlp.encode(receipt)
     return receipt_db.root_hash