예제 #1
0
 def __init__(self, genesis, key, network, env, time_offset=5):
     # Create a chain object
     self.chain = Chain(genesis, env=env)
     # Use the validator's time as the chain's time
     self.chain.time = lambda: self.get_timestamp()
     # My private key
     self.key = key
     # My address
     self.address = privtoaddr(key)
     # My randao
     self.randao = RandaoManager(sha3(self.key))
     # Pointer to the test p2p network
     self.network = network
     # Record of objects already received and processed
     self.received_objects = {}
     # The minimum eligible timestamp given a particular number of skips
     self.next_skip_count = 0
     self.next_skip_timestamp = 0
     # This validator's indices in the state
     self.indices = None
     # Is this validator active?
     self.active = False
     # Code that verifies signatures from this validator
     self.validation_code = generate_validation_code(privtoaddr(key))
     # Parents that this validator has already built a block on
     self.used_parents = {}
     # This validator's clock offset (for testing purposes)
     self.time_offset = random.randrange(time_offset) - (time_offset // 2)
     # Determine the epoch length
     self.epoch_length = self.call_casper('getEpochLength')
     # Give this validator a unique ID
     self.id = len(ids)
     ids.append(self.id)
     self.find_my_indices()
     self.cached_head = self.chain.head_hash
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise RuntimeError(
                    "The database in '{}' was initialized as non-pruning. "
                    "Can not enable pruning now.".format(
                        self.config['data_dir']))
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise RuntimeError(
                    "The database in '{}' was initialized as pruning. "
                    "Can not disable pruning now".format(
                        self.config['data_dir']))
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise RuntimeError(
                    "The database in '{}' was initialized with network id {} and can not be used "
                    "when connecting to network id {}. Please choose a different data directory."
                    .format(self.config['data_dir'], db_network_id,
                            sce['network_id']))

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash(), \
                "Genesis hash mismatch.\n  Expected: %s\n  Got: %s" % (
                    sce['genesis_hash'], self.chain.genesis.hex_hash())

        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
예제 #3
0
def mine_next_block(parent, coinbase=None, transactions=[]):
    if coinbase:
        c = Chain(env=parent.env, genesis=parent, coinbase=coinbase)
    else:
        c = Chain(env=parent.env, genesis=parent)
    for tx in transactions:
        c.add_transaction(tx)
    block = mine_on_chain(c)
    return block
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception(
                    "This database was initialized with network_id {} "
                    "and can not be used when connecting to network_id {}".
                    format(db_network_id, sce['network_id']))

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
예제 #5
0
def test_add_side_chain(db, alt_db):
    """"
    Local: L0, L1, L2
    add
    Remote: R0, R1
    """
    k, v, k2, v2 = accounts()
    # Remote: mine one block
    R0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(R0)
    tx0 = get_transaction(nonce=0)
    R1 = mine_next_block(R0, transactions=[tx0])
    store_block(R1)
    assert tx0.hash in [x.hash for x in R1.get_transactions()]

    # Local: mine two blocks
    L0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, alt_db)
    chain = Chain(env=env(L0.db), genesis=L0)
    tx0 = get_transaction(nonce=0)
    L1 = mine_next_block(L0, transactions=[tx0])
    chain.add_block(L1)
    tx1 = get_transaction(nonce=1)
    L2 = mine_next_block(L1, transactions=[tx1])
    chain.add_block(L2)

    # receive serialized remote blocks, newest first
    rlp_blocks = [rlp.encode(R0), rlp.encode(R1)]
    for rlp_block in rlp_blocks:
        block = blocks.Block.deserialize(rlp.decode(rlp_block), env=chain.env)
        chain.add_block(block)

    assert L2.hash in chain
예제 #6
0
def test_add_longer_side_chain(db, alt_db):
    """"
    Local: L0, L1, L2
    Remote: R0, R1, R2, R3
    """
    k, v, k2, v2 = accounts()
    # Remote: mine one block
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(blk)
    remote_blocks = [blk]
    for i in range(3):
        tx = get_transaction(nonce=i)
        blk = mine_next_block(remote_blocks[-1], transactions=[tx])
        store_block(blk)
        remote_blocks.append(blk)
    # Local: mine two blocks
    L0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=alt_db)
    chain = Chain(env=env(L0.db), genesis=L0)
    tx0 = get_transaction(nonce=0)
    L1 = mine_next_block(L0, transactions=[tx0])
    chain.add_block(L1)
    tx1 = get_transaction(nonce=1)
    L2 = mine_next_block(L1, transactions=[tx1])
    chain.add_block(L2)

    # receive serialized remote blocks, newest first
    rlp_blocks = [rlp.encode(x) for x in remote_blocks]
    for rlp_block in rlp_blocks:
        block = blocks.Block.deserialize(rlp.decode(rlp_block), env=chain.env)
        chain.add_block(block)

    assert chain.head == remote_blocks[-1]
예제 #7
0
    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        self.chain = Chain(self.db, new_head_cb=self._on_new_head)
        self.synchronizer = Synchronizer(self, force_sync=None)
        self.chain.coinbase = privtoaddr(
            self.config['eth']['privkey_hex'].decode('hex'))

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.broadcast_filter = DuplicatesFilter()
예제 #8
0
    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        self.chain = Chain(self.db, new_head_cb=self._on_new_head, coinbase=coinbase)
        log.info('chain at', number=self.chain.head.number)
        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
예제 #9
0
def test_reward_uncles(db):
    """
    B0 B1 B2
    B0 Uncle

    We raise the block's coinbase account by Rb, the block reward,
    and also add uncle and nephew rewards
    """
    k, v, k2, v2 = accounts()
    blk0 = mkquickgenesis(db=db)
    local_coinbase = decode_hex('1' * 40)
    uncle_coinbase = decode_hex('2' * 40)
    chain = Chain(env=env(blk0.db), genesis=blk0)
    uncle = mine_on_chain(chain, blk0, coinbase=uncle_coinbase)
    assert uncle.get_balance(uncle_coinbase) == 1 * chain.env.config['BLOCK_REWARD']
    blk1 = mine_on_chain(chain, blk0, coinbase=local_coinbase)
    assert blk1.hash in chain
    assert uncle.hash in chain
    assert uncle.hash != blk1.hash
    assert chain.head == blk1
    assert chain.head.get_balance(local_coinbase) == 1 * chain.env.config['BLOCK_REWARD']
    assert chain.head.get_balance(uncle_coinbase) == 0
    # next block should reward uncles
    blk2 = mine_on_chain(chain, coinbase=local_coinbase)
    assert blk2.get_parent().prevhash == uncle.prevhash
    assert len(blk2.uncles) == 1
    assert blk2 == chain.head
    assert chain.head.get_balance(local_coinbase) == \
        2 * chain.env.config['BLOCK_REWARD'] + chain.env.config['NEPHEW_REWARD']
    assert chain.head.get_balance(uncle_coinbase) == chain.env.config['BLOCK_REWARD'] * 7 // 8
예제 #10
0
파일: export.py 프로젝트: qfkjbpc/eth_bpc
def get_chain(data_dir=default_data_dir):
    """
    returns an ethereum.chain.Chain instance
    """
    dbfile = os.path.join(data_dir, 'leveldb')
    db = LevelDB(dbfile)
    return Chain(Env(db))
예제 #11
0
 def __init__(self, genesis, key, network, env, time_offset=5):
     # Create a chain object
     self.chain = Chain(genesis, env=env)
     # Use the validator's time as the chain's time
     self.chain.time = lambda: self.get_timestamp()
     # My private key
     self.key = key
     # My address
     self.address = privtoaddr(key)
     # My randao
     self.randao = RandaoManager(sha3(self.key))
     # Pointer to the test p2p network
     self.network = network
     # Record of objects already received and processed
     self.received_objects = {}
     # The minimum eligible timestamp given a particular number of skips
     self.next_skip_count = 0
     self.next_skip_timestamp = 0
     # This validator's indices in the state
     self.indices = None
     # Code that verifies signatures from this validator
     self.validation_code = generate_validation_code(privtoaddr(key))
     # Parents that this validator has already built a block on
     self.used_parents = {}
     # This validator's clock offset (for testing purposes)
     self.time_offset = random.randrange(time_offset) - (time_offset // 2)
     # Give this validator a unique ID
     self.id = len(ids)
     ids.append(self.id)
     self.find_my_indices()
     self.cached_head = self.chain.head_hash
예제 #12
0
def test_add_longer_side_chain(db, alt_db):
    """"
    Local: L0, L1, L2
    Remote: R0, R1, R2, R3
    """
    k, v, k2, v2 = accounts()
    # Remote: mine one block
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(blk)
    remote_blocks = [blk]
    for i in range(3):
        tx = get_transaction(nonce=i)
        blk = mine_next_block(remote_blocks[-1], transactions=[tx])
        store_block(blk)
        remote_blocks.append(blk)
    # Local: mine two blocks
    L0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=alt_db)
    chain = Chain(env=env(L0.db), genesis=L0)
    tx0 = get_transaction(nonce=0)
    L1 = mine_next_block(L0, transactions=[tx0])
    chain.add_block(L1)
    tx1 = get_transaction(nonce=1)
    L2 = mine_next_block(L1, transactions=[tx1])
    chain.add_block(L2)

    # receive serialized remote blocks, newest first
    rlp_blocks = [rlp.encode(x) for x in remote_blocks]
    for rlp_block in rlp_blocks:
        block = blocks.Block.deserialize(rlp.decode(rlp_block), env=chain.env)
        chain.add_block(block)

    assert chain.head == remote_blocks[-1]
예제 #13
0
def test_add_side_chain(db, alt_db):
    """"
    Local: L0, L1, L2
    add
    Remote: R0, R1
    """
    k, v, k2, v2 = accounts()
    # Remote: mine one block
    R0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(R0)
    tx0 = get_transaction(nonce=0)
    R1 = mine_next_block(R0, transactions=[tx0])
    store_block(R1)
    assert tx0.hash in [x.hash for x in R1.get_transactions()]

    # Local: mine two blocks
    L0 = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, alt_db)
    chain = Chain(env=env(L0.db), genesis=L0)
    tx0 = get_transaction(nonce=0)
    L1 = mine_next_block(L0, transactions=[tx0])
    chain.add_block(L1)
    tx1 = get_transaction(nonce=1)
    L2 = mine_next_block(L1, transactions=[tx1])
    chain.add_block(L2)

    # receive serialized remote blocks, newest first
    rlp_blocks = [rlp.encode(R0), rlp.encode(R1)]
    for rlp_block in rlp_blocks:
        block = blocks.Block.deserialize(rlp.decode(rlp_block), env=chain.env)
        chain.add_block(block)

    assert L2.hash in chain
예제 #14
0
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception("This database was initialized with network_id {} "
                                "and can not be used when connecting to network_id {}".format(
                                    db_network_id, sce['network_id'])
                                )

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        WiredService.__init__(self, app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env, new_head_cb=self._on_new_head, coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.BoundedSemaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)

        # Consensus
        self.consensus_contract = ConsensusContract(validators=self.config['hdc']['validators'])
        self.consensus_manager = ConsensusManager(self, self.consensus_contract,
                                                  self.consensus_privkey)

        # lock blocks that where proposed, so they don't get mutated
        self.proposal_lock = ProposalLock()
        assert not self.proposal_lock.is_locked()
예제 #15
0
 def __init__(self, app):
     self.config = app.config
     self.db = app.services.db
     assert self.db is not None
     super(ChainService, self).__init__(app)
     log.info('initializing chain')
     self.chain = Chain(self.db, new_head_cb=self._on_new_head)
     self.synchronizer = Synchronizer(self.chain)
     self.chain.coinbase = privtoaddr(self.config['eth']['privkey_hex'].decode('hex'))
예제 #16
0
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        if sce['genesis']:
            log.info('loading genesis', path=sce['genesis'])
            _json = json.load(open(sce['genesis']))
        else:
            log.info('loaded default genesis alloc')
            _json = None
        _genesis = genesis(self.db, json=_json)
        log.info('created genesis block', hash=encode_hex(_genesis.hash))
        self.chain = Chain(self.db,
                           genesis=_genesis,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)
        log.info('chain at', number=self.chain.head.number)
        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
예제 #17
0
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception("This database was initialized with network_id {} "
                                "and can not be used when connecting to network_id {}".format(
                                    db_network_id, sce['network_id'])
                                )

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env, new_head_cb=self._on_new_head, coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
예제 #18
0
def test_genesis_chain(db):
    k, v, k2, v2 = accounts()
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    chain = Chain(env=env(blk.db), genesis=blk)

    assert chain.has_block(blk.hash)
    assert blk.hash in chain
    assert chain.get(blk.hash) == blk
    assert chain.head == blk
    assert chain.get_children(blk) == []
    assert chain.get_uncles(blk) == []
    assert chain.get_chain() == [blk]
    assert chain.get_chain(blk.hash) == [blk]
    assert chain.get_descendants(blk, count=10) == []
    assert chain.index.has_block_by_number(0)
    assert not chain.index.has_block_by_number(1)
    assert chain.index.get_block_by_number(0) == blk.hash
    with pytest.raises(KeyError):
        chain.index.get_block_by_number(1)
예제 #19
0
    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        self.chain = Chain(self.db, new_head_cb=self._on_new_head)
        self.synchronizer = Synchronizer(self, force_sync=None)
        self.chain.coinbase = privtoaddr(self.config['eth']['privkey_hex'].decode('hex'))

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.broadcast_filter = DuplicatesFilter()
예제 #20
0
def mine_next_block(parent, coinbase=None, transactions=[]):
    if coinbase:
        c = Chain(env=parent.env, genesis=parent, coinbase=coinbase)
    else:
        c = Chain(env=parent.env, genesis=parent)
    for tx in transactions:
        c.add_transaction(tx)
    block = mine_on_chain(c)
    return block
예제 #21
0
def test_genesis_chain(db):
    k, v, k2, v2 = accounts()
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    chain = Chain(env=env(blk.db), genesis=blk)

    assert chain.has_block(blk.hash)
    assert blk.hash in chain
    assert chain.get(blk.hash) == blk
    assert chain.head == blk
    assert chain.get_children(blk) == []
    assert chain.get_uncles(blk) == []
    assert chain.get_chain() == [blk]
    assert chain.get_chain(blk.hash) == [blk]
    assert chain.get_descendants(blk, count=10) == []
    assert chain.index.has_block_by_number(0)
    assert not chain.index.has_block_by_number(1)
    assert chain.index.get_block_by_number(0) == blk.hash
    with pytest.raises(KeyError):
        chain.index.get_block_by_number(1)
예제 #22
0
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        try:
            _json = json.load(open(sce['genesis']))
            log.info('loading genesis', filename=sce['genesis'])
        except Exception as e:
            log.warn(str(e))
            _json = GENESIS_JSON
            log.info('loaded default genesis alloc')
        _genesis = genesis(self.db, json=_json)
        log.info('created genesis block', hash=encode_hex(_genesis.hash))
        self.chain = Chain(self.db, genesis=_genesis, new_head_cb=self._on_new_head,
                           coinbase=coinbase)
        log.info('chain at', number=self.chain.head.number)
        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
예제 #23
0
    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        _genesis = genesis(self.db, nonce=self.config['eth']['genesis_nonce'].decode('hex'))
        self.chain = Chain(self.db, genesis=_genesis, new_head_cb=self._on_new_head,
                           coinbase=coinbase)
        log.info('chain at', number=self.chain.head.number)
        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
예제 #24
0
파일: casper.py 프로젝트: ethereum/research
 def __init__(self, genesis, key, network, env, time_offset=5):
     # Create a chain object
     self.chain = Chain(genesis, env=env)
     # Create a transaction queue
     self.txqueue = TransactionQueue()
     # Use the validator's time as the chain's time
     self.chain.time = lambda: self.get_timestamp()
     # My private key
     self.key = key
     # My address
     self.address = privtoaddr(key)
     # My randao
     self.randao = RandaoManager(sha3(self.key))
     # Pointer to the test p2p network
     self.network = network
     # Record of objects already received and processed
     self.received_objects = {}
     # The minimum eligible timestamp given a particular number of skips
     self.next_skip_count = 0
     self.next_skip_timestamp = 0
     # Is this validator active?
     self.active = False
     # Code that verifies signatures from this validator
     self.validation_code = generate_validation_code(privtoaddr(key))
     # Validation code hash
     self.vchash = sha3(self.validation_code)
     # Parents that this validator has already built a block on
     self.used_parents = {}
     # This validator's clock offset (for testing purposes)
     self.time_offset = random.randrange(time_offset) - (time_offset // 2)
     # Determine the epoch length
     self.epoch_length = self.call_casper('getEpochLength')
     # My minimum gas price
     self.mingasprice = 20 * 10**9
     # Give this validator a unique ID
     self.id = len(ids)
     ids.append(self.id)
     self.update_activity_status()
     self.cached_head = self.chain.head_hash
예제 #25
0
class ChainService(WiredService):

    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(network_id=0, genesis_nonce=GENESIS_NONCE.encode('hex')))

    # required by WiredService
    wire_protocol = eth_protocol.ETHProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024
    processed_gas = 0
    processed_elapsed = 0

    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        _genesis = genesis(self.db, nonce=self.config['eth']['genesis_nonce'].decode('hex'))
        self.chain = Chain(self.db, genesis=_genesis, new_head_cb=self._on_new_head,
                           coinbase=coinbase)
        log.info('chain at', number=self.chain.head.number)
        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)
        # gevent.spawn(update_watcher, self)

    @property
    def is_syncing(self):
        return self.synchronizer.synctask is not None

    @property
    def is_mining(self):
        if 'pow' in self.app.services:
            return self.app.services.pow.active
        return False

    def _on_new_head(self, block):
        for cb in self.on_new_head_cbs:
            cb(block)
        self._on_new_head_candidate()  # we implicitly have a new head_candidate

    def _on_new_head_candidate(self):
        for cb in self.on_new_head_candidate_cbs:
            cb(self.chain.head_candidate)

    def add_transaction(self, tx, origin=None):
        log.debug('add_transaction', locked=self.add_transaction_lock.locked(), tx=tx)
        assert isinstance(tx, Transaction)
        assert origin is None or isinstance(origin, BaseProtocol)

        if tx.hash in self.broadcast_filter:
            log.debug('discarding known tx')  # discard early
            return

        # validate transaction
        try:
            validate_transaction(self.chain.head_candidate, tx)
            log.debug('valid tx, broadcasting')
            self.broadcast_transaction(tx, origin=origin)  # asap
        except InvalidTransaction as e:
            log.debug('invalid tx', error=e)
            return

        if origin is not None:  # not locally added via jsonrpc
            if not self.is_mining or self.is_syncing:
                log.debug('discarding tx', syncing=self.is_syncing, mining=self.is_mining)
                return

        self.add_transaction_lock.acquire()
        success = self.chain.add_transaction(tx)
        self.add_transaction_lock.release()
        if success:
            self._on_new_head_candidate()


    def add_block(self, t_block, proto):
        "adds a block to the block_queue and spawns _add_block if not running"
        self.block_queue.put((t_block, proto))  # blocks if full
        if not self.add_blocks_lock:
            self.add_blocks_lock = True  # need to lock here (ctx switch is later)
            gevent.spawn(self._add_blocks)

    def add_mined_block(self, block):
        log.debug('adding mined block', block=block)
        assert isinstance(block, Block)
        assert block.header.check_pow()
        if self.chain.add_block(block):
            log.debug('added', block=block, ts=time.time())
            assert block == self.chain.head
            self.broadcast_newblock(block, chain_difficulty=block.chain_difficulty())

    def knows_block(self, block_hash):
        "if block is in chain or in queue"
        if block_hash in self.chain:
            return True
        # check if queued or processed
        for i in range(len(self.block_queue.queue)):
            if block_hash == self.block_queue.queue[i][0].header.hash:
                return True
        return False

    def _add_blocks(self):
        log.debug('add_blocks', qsize=self.block_queue.qsize(),
                  add_tx_lock=self.add_transaction_lock.locked())
        assert self.add_blocks_lock is True
        self.add_transaction_lock.acquire()
        try:
            while not self.block_queue.empty():
                t_block, proto = self.block_queue.peek()  # peek: knows_block while processing
                if t_block.header.hash in self.chain:
                    log.warn('known block', block=t_block)
                    self.block_queue.get()
                    continue
                if t_block.header.prevhash not in self.chain:
                    log.warn('missing parent', block=t_block)
                    self.block_queue.get()
                    continue
                # FIXME, this is also done in validation and in synchronizer for new_blocks
                if not t_block.header.check_pow():
                    log.warn('invalid pow', block=t_block, FIXME='ban node')
                    self.block_queue.get()
                    continue
                try:  # deserialize
                    st = time.time()
                    block = t_block.to_block(db=self.chain.db)
                    elapsed = time.time() - st
                    log.debug('deserialized', elapsed='%.4fs' % elapsed, ts=time.time(),
                              gas_used=block.gas_used, gpsec=self.gpsec(block.gas_used, elapsed))
                except processblock.InvalidTransaction as e:
                    log.warn('invalid transaction', block=t_block, error=e, FIXME='ban node')
                    self.block_queue.get()
                    continue
                except VerificationFailed as e:
                    log.warn('verification failed', error=e, FIXME='ban node')
                    self.block_queue.get()
                    continue
                log.debug('adding', block=block, ts=time.time())
                if self.chain.add_block(block, forward_pending_transactions=self.is_mining):
                    now = time.time()
                    log.debug('added', block=block, ts=now, txs=len(block.get_transactions()))
                    if t_block.newblock_timestamp:
                        total = now - t_block.newblock_timestamp
                        self.newblock_processing_times.append(total)
                        avg = statistics.mean(self.newblock_processing_times)
                        med = statistics.median(self.newblock_processing_times)
                        max_ = max(self.newblock_processing_times)
                        min_ = min(self.newblock_processing_times)
                        log.debug('processing time', last=total, avg=avg, max=max_, min=min_,
                                 median=med)
                else:
                    log.warn('could not add', block=block)
                self.block_queue.get()  # remove block from queue (we peeked only)
                gevent.sleep(0.001)
        finally:
            self.add_blocks_lock = False
            self.add_transaction_lock.release()

    def gpsec(self, gas_spent=0, elapsed=0):
        if gas_spent:
            self.processed_gas += gas_spent
            self.processed_elapsed += elapsed
        return int(self.processed_gas / (0.001 + self.processed_elapsed))

    def broadcast_newblock(self, block, chain_difficulty=None, origin=None):
        if not chain_difficulty:
            assert block.hash in self.chain
            chain_difficulty = block.chain_difficulty()
        assert isinstance(block, (eth_protocol.TransientBlock, Block))
        if self.broadcast_filter.update(block.header.hash):
            log.debug('broadcasting newblock', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol, 'newblock', args=(block, chain_difficulty),
                  exclude_peers=[origin.peer] if origin else [])
        else:
            log.debug('already broadcasted block')

    def broadcast_transaction(self, tx, origin=None):
        assert isinstance(tx, Transaction)
        if self.broadcast_filter.update(tx.hash):
            log.debug('broadcasting tx', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol, 'transactions', args=(tx,),
                  exclude_peers=[origin.peer] if origin else [])
        else:
            log.debug('already broadcasted tx')

    # wire protocol receivers ###########

    def on_wire_protocol_start(self, proto):
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(self.on_receive_transactions)
        proto.receive_getblockhashes_callbacks.append(self.on_receive_getblockhashes)
        proto.receive_blockhashes_callbacks.append(self.on_receive_blockhashes)
        proto.receive_getblocks_callbacks.append(self.on_receive_getblocks)
        proto.receive_blocks_callbacks.append(self.on_receive_blocks)
        proto.receive_newblock_callbacks.append(self.on_receive_newblock)
        proto.receive_newblockhashes_callbacks.append(self.on_newblockhashes)

        # send status
        head = self.chain.head
        proto.send_status(chain_difficulty=head.chain_difficulty(), chain_head_hash=head.hash,
                          genesis_hash=self.chain.genesis.hash)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_stop', proto=proto)

    def on_receive_status(self, proto, eth_version, network_id, chain_difficulty, chain_head_hash,
                          genesis_hash):
        log.debug('----------------------------------')
        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != self.config['eth'].get('network_id', proto.network_id):
            log.warn("invalid network id", remote_network_id=network_id,
                     expected_network_id=self.config['eth'].get('network_id', proto.network_id))
            raise eth_protocol.ETHProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash", remote_id=proto, genesis=genesis_hash.encode('hex'))
            raise eth_protocol.ETHProtocolError('wrong genesis block')

        # request chain
        self.synchronizer.receive_status(proto, chain_head_hash, chain_difficulty)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('----------------------------------')
        log.debug('remote_transactions_received', count=len(transactions), remote_id=proto)
        for tx in transactions:
            self.add_transaction(tx, origin=proto)

    # blockhashes ###########

    def on_newblockhashes(self, proto, newblockhashes):
        """
        msg sent out if not the full block is propagated
        chances are high, that we get the newblock, though.
        """
        log.debug('----------------------------------')
        log.debug("recv newnewblockhashes", num=len(newblockhashes), remote_id=proto)
        self.synchronizer.receive_newblockhashes(proto, newblockhashes)

    def on_receive_getblockhashes(self, proto, child_block_hash, count):
        log.debug('----------------------------------')
        log.debug("handle_get_blockhashes", count=count, block_hash=encode_hex(child_block_hash))
        max_hashes = min(count, self.wire_protocol.max_getblockhashes_count)
        found = []
        if child_block_hash not in self.chain:
            log.debug("unknown block")
            proto.send_blockhashes(*[])
            return

        last = child_block_hash
        while len(found) < max_hashes:
            try:
                last = rlp.decode_lazy(self.chain.db.get(last))[0][0]  # [head][prevhash]
            except KeyError:
                # this can happen if we started a chain download, which did not complete
                # should not happen if the hash is part of the canonical chain
                log.warn('KeyError in getblockhashes', hash=last)
                break
            if last:
                found.append(last)
            else:
                break

        log.debug("sending: found block_hashes", count=len(found))
        proto.send_blockhashes(*found)

    def on_receive_blockhashes(self, proto, blockhashes):
        log.debug('----------------------------------')
        if blockhashes:
            log.debug("on_receive_blockhashes", count=len(blockhashes), remote_id=proto,
                      first=encode_hex(blockhashes[0]), last=encode_hex(blockhashes[-1]))
        else:
            log.debug("recv 0 remote block hashes, signifying genesis block")
        self.synchronizer.receive_blockhashes(proto, blockhashes)

    # blocks ################

    def on_receive_getblocks(self, proto, blockhashes):
        log.debug('----------------------------------')
        log.debug("on_receive_getblocks", count=len(blockhashes))
        found = []
        for bh in blockhashes[:self.wire_protocol.max_getblocks_count]:
            try:
                found.append(self.chain.db.get(bh))
            except KeyError:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found))
            proto.send_blocks(*found)

    def on_receive_blocks(self, proto, transient_blocks):
        log.debug('----------------------------------')
        blk_number = max(x.header.number for x in transient_blocks) if transient_blocks else 0
        log.debug("recv blocks", count=len(transient_blocks), remote_id=proto,
                  highest_number=blk_number)
        if transient_blocks:
            self.synchronizer.receive_blocks(proto, transient_blocks)

    def on_receive_newblock(self, proto, block, chain_difficulty):
        log.debug('----------------------------------')
        log.debug("recv newblock", block=block, remote_id=proto)
        self.synchronizer.receive_newblock(proto, block, chain_difficulty)
예제 #26
0
class ChainService(WiredService):

    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(privkey_hex=''))

    # required by WiredService
    wire_protocol = eth_protocol.ETHProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    miner = None
    synchronizer = None
    config = None

    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        self.chain = Chain(self.db, new_head_cb=self._on_new_head)
        self.synchronizer = Synchronizer(self.chain)
        self.chain.coinbase = privtoaddr(self.config['eth']['privkey_hex'].decode('hex'))

    def _on_new_head(self, block):
        self.miner = Miner(self.chain.head_candidate)
        # if we are not syncing, forward all blocks
        if not self.synchronizer.synchronization_tasks:
            log.debug("_on_new_head", block=block)
            # signals.broadcast_new_block.send(sender=None, block=block)  # FIXME

    def loop_body(self):
        ts = time.time()
        pct_cpu = self.config['misc']['mining']
        if pct_cpu > 0:
            self.mine()
            delay = (time.time() - ts) * (100. / pct_cpu - 1)
            assert delay >= 0
            time.sleep(min(delay, 1.))
        else:
            time.sleep(.01)

    def mine(self):
        block = self.miner.mine()
        if block:
            # create new block
            assert self.chain.add_block(block), ("newly mined block is invalid!?", block)

    def receive_chain(self, transient_blocks, proto=None, new_block=False):
        _db = EphemDB()
        # assuming to receive chain order w/ oldest block first
        transient_blocks.sort(key=lambda x: x.header.number)
        assert transient_blocks[0].header.number <= transient_blocks[-1].header.number

        # notify syncer
        self.synchronizer.received_blocks(proto, transient_blocks)

        for t_block in transient_blocks:  # oldest to newest

            if t_block.header.hash in self.chain:
                log.debug('known', block=t_block)
                continue

            if t_block.header.prevhash not in self.chain:
                log.debug('unknown parent', block=t_block,
                          parent=t_block.header.prevhash.encode('hex'))
                # FIXME: not properly handled if we receive a differnt chain?
                # no problem with new getBlocks?
                self.synchronizer.synchronize_unknown_block(proto, t_block.header.hash,
                                                            t_block=t_block)
                continue

            log.debug('checking pow', block=t_block)
            if not t_block.header.check_pow(_db):
                log.warn('invalid pow', block=t_block, proto=proto)
                continue

            # BROADCAST HERE !!!

            log.debug('deserializing', block=t_block, gas_used=t_block.header.gas_used)
            if t_block.header.prevhash == self.chain.head.hash:
                log.trace('is child')
            if t_block.header.prevhash == self.chain.genesis.hash:
                log.trace('is child of genesis')
            try:
                # block = blocks.Block(t_block.header, t_block.transaction_list, t_block.uncles,
                #                      db=self.chain.db)
                st = time.time()
                block = t_block.to_block(db=self.chain.db)
                elapsed = time.time() - st
                log.debug('deserialized', elapsed='%.2fs' % elapsed,
                          gas_used=block.gas_used, gpsec=int(block.gas_used / elapsed))
            except processblock.InvalidTransaction as e:
                # FIXME there might be another exception in
                # blocks.deserializeChild when replaying transactions
                # if this fails, we need to rewind state
                log.warn('invalid transaction', block=t_block, error=e, proto=proto)
                # stop current syncing of this chain and skip the child blocks
                self.synchronizer.stop_synchronization(proto)
                return
            except blocks.UnknownParentException:
                # gets never called # FIXME
                log.debug('unknown parent', block=t_block)
                if t_block.header.prevhash == blocks.GENESIS_PREVHASH:
                    log.warn('wrong genesis', block=t_block, proto=proto)
                    if proto is not None:
                        proto.send_disconnect(reason='Wrong genesis block')
                    raise eth_protocol.ETHProtocolError('wrong genesis')
                else:  # should be a single newly mined block
                    assert t_block.header.prevhash not in self.chain
                    if t_block.header.prevhash == self.chain.genesis.hash:
                        print t_block.serialize().encode('hex')
                    assert t_block.header.prevhash != self.chain.genesis.hash
                    log.debug('unknown parent', block=t_block,
                              parent_hash=encode_hex(t_block.header.prevhash), remote_id=proto)
                    if len(transient_blocks) != 1:
                        # strange situation here.
                        # we receive more than 1 block, so it's not a single newly mined one
                        # sync/network/... failed to add the needed parent at some point
                        # well, this happens whenever we can't validate a block!
                        # we should disconnect!
                        log.warn(
                            'blocks received, but unknown parent.', num=len(transient_blocks))
                    if proto is not None:
                        # request chain for newest known hash
                        self.synchronizer.synchronize_unknown_block(
                            proto, transient_blocks[-1].header.hash, t_block=transient_blocks[-1])
                break
            if block.hash in self.chain:
                log.debug('known', block=block)
            else:
                assert block.has_parent()
                # assume single block is newly mined block
                old_head_num = self.chain.head.number
                success = self.chain.add_block(block)
                if success:
                    log.debug('added', block=block)
                else:
                    raise eth_protocol.ETHProtocolError('could not add block')

                # broadcast
                if block.number > old_head_num and new_block:
                    log.debug('broadcasting new head', block=block)
                    f = self.app.services.peermanager.broadcast
                    f(eth_protocol.ETHProtocol, 'newblock', args=(block, block.chain_difficulty()),
                      num_peers=None, exclude_protos=[proto])

    # wire protocol receivers ###########

    def on_wire_protocol_start(self, proto):
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(self.on_receive_transactions)
        proto.receive_getblockhashes_callbacks.append(self.on_receive_getblockhashes)
        proto.receive_blockhashes_callbacks.append(self.on_receive_blockhashes)
        proto.receive_getblocks_callbacks.append(self.on_receive_getblocks)
        proto.receive_blocks_callbacks.append(self.on_receive_blocks)
        proto.receive_newblock_callbacks.append(self.on_receive_newblock)

        # send status
        head = self.chain.head
        proto.send_status(total_difficulty=head.chain_difficulty(), chain_head_hash=head.hash,
                          genesis_hash=self.chain.genesis.hash)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('on_wire_protocol_stop', proto=proto)

    def on_receive_status(self, proto, eth_version, network_id, total_difficulty, chain_head_hash,
                          genesis_hash):

        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != proto.network_id:
            log.warn("invalid network id", remote_id=proto, network_id=network_id)
            raise eth_protocol.ETHProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash", remote_id=proto, genesis=genesis_hash.encode('hex'))
            raise eth_protocol.ETHProtocolError('wrong genesis block')

        # request chain
        self.synchronizer.synchronize_status(proto, chain_head_hash, total_difficulty)

        # send transactions
        log.debug("sending transactions", remote_id=proto)
        transactions = self.chain.get_transactions()
        proto.send_transactions(*transactions)

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('remote_transactions_received', count=len(transactions), remote_id=proto)
        log.debug('skipping, FIXME')
        return
        for tx in transactions:
            # fixme bloomfilter
            self.chain.add_transaction(tx)

    # blockhashes ###########

    def on_receive_getblockhashes(self, proto, child_block_hash, count):
        log.debug("handle_get_block_hashes", count=count, block_hash=encode_hex(child_block_hash))
        max_hashes = min(count, MAX_GET_CHAIN_SEND_HASHES)
        found = []
        if child_block_hash not in self.chain:
            log.debug("unknown block")
            proto.send_blockhashes([])
        last = self.chain.get(child_block_hash)
        while len(found) < max_hashes:
            if last.has_parent():
                last = last.get_parent()
                found.append(last.hash)
            else:
                break
        log.debug("sending: found block_hashes", count=len(found))
        proto.send_blockhashes(*found)

    def on_receive_blockhashes(self, proto, block_hashes):
        if block_hashes:
            log.debug("on_receive_blockhashes", count=len(block_hashes), remote_id=proto,
                      first=encode_hex(block_hashes[0]), last=encode_hex(block_hashes[-1]))
        else:
            log.debug("recv 0 remote block hashes, signifying genesis block")
        self.synchronizer.received_block_hashes(proto, block_hashes)

    # blocks ################

    def on_receive_getblocks(self, proto, block_hashes):
        log.debug("on_receive_getblocks", count=len(block_hashes))
        found = []
        for bh in block_hashes[:MAX_GET_CHAIN_REQUEST_BLOCKS]:
            if bh in self.chain:
                found.append(self.chain.get(bh))
            else:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found), first=found[0].hex_hash())
            proto.send_blocks(*found)

    def on_receive_blocks(self, proto, transient_blocks):
        log.debug("recv remote blocks", count=len(transient_blocks), remote_id=proto,
                  highest_number=max(x.header.number for x in transient_blocks))
        if transient_blocks:
            self.receive_chain(transient_blocks, proto)

    def on_receive_newblock(self, proto, block, total_difficulty):
        log.debug("recv new remote block", block=block, remote_id=proto)
        self.receive_chain([block], proto, new_block=True)
예제 #27
0
class Validator():
    def __init__(self, genesis, key, network, env, time_offset=5):
        # Create a chain object
        self.chain = Chain(genesis, env=env)
        # Use the validator's time as the chain's time
        self.chain.time = lambda: self.get_timestamp()
        # My private key
        self.key = key
        # My address
        self.address = privtoaddr(key)
        # My randao
        self.randao = RandaoManager(sha3(self.key))
        # Pointer to the test p2p network
        self.network = network
        # Record of objects already received and processed
        self.received_objects = {}
        # The minimum eligible timestamp given a particular number of skips
        self.next_skip_count = 0
        self.next_skip_timestamp = 0
        # This validator's indices in the state
        self.indices = None
        # Is this validator active?
        self.active = False
        # Code that verifies signatures from this validator
        self.validation_code = generate_validation_code(privtoaddr(key))
        # Parents that this validator has already built a block on
        self.used_parents = {}
        # This validator's clock offset (for testing purposes)
        self.time_offset = random.randrange(time_offset) - (time_offset // 2)
        # Determine the epoch length
        self.epoch_length = self.call_casper('getEpochLength')
        # Give this validator a unique ID
        self.id = len(ids)
        ids.append(self.id)
        self.find_my_indices()
        self.cached_head = self.chain.head_hash

    def call_casper(self, fun, args=[]):
        return call_casper(self.chain.state, fun, args)

    def find_my_indices(self):
        epoch = self.chain.state.block_number // self.epoch_length
        print 'Finding indices for epoch %d' % epoch, self.call_casper(
            'getEpoch')
        for i in range(len(validator_sizes)):
            valcount = self.call_casper('getHistoricalValidatorCount',
                                        [epoch, i])
            print i, valcount, self.call_casper('getHistoricalValidatorCount',
                                                [0, i])
            for j in range(valcount):
                valcode = self.call_casper('getValidationCode', [i, j])
                print(valcode, self.validation_code)
                if valcode == self.validation_code:
                    self.indices = i, j
                    start = self.call_casper('getStartEpoch', [i, j])
                    end = self.call_casper('getEndEpoch', [i, j])
                    if start <= epoch < end:
                        self.active = True
                        self.next_skip_count = 0
                        self.next_skip_timestamp = get_timestamp(
                            self.chain, self.next_skip_count)
                        print 'In current validator set at (%d, %d)' % (i, j)
                        return
                    else:
                        self.indices = None
                        self.active = False
                        self.next_skip_count, self.next_skip_timestamp = 0, 0
                        print 'Registered at (%d, %d) but not in current set' % (
                            i, j)
                        return
        self.indices = None
        self.active = False
        self.next_skip_count, self.next_skip_timestamp = 0, 0
        print 'Not in current validator set'

    def get_uncles(self):
        anc = self.chain.get_block(
            self.chain.get_blockhash_by_number(self.chain.state.block_number -
                                               CHECK_FOR_UNCLES_BACK))
        if anc:
            descendants = self.chain.get_descendants(anc)
        else:
            descendants = self.chain.get_descendants(
                self.chain.db.get('GENESIS_HASH'))
        potential_uncles = [
            x for x in descendants
            if x not in self.chain and isinstance(x, Block)
        ]
        uncles = [
            x.header for x in potential_uncles if not call_casper(
                self.chain.state, 'isDunkleIncluded', [x.header.hash])
        ]
        return uncles

    def get_timestamp(self):
        return int(self.network.time * 0.01) + self.time_offset

    def on_receive(self, obj):
        if isinstance(obj, list):
            for _obj in obj:
                self.on_receive(_obj)
            return
        if obj.hash in self.received_objects:
            return
        if isinstance(obj, Block):
            print 'Receiving block', obj
            assert obj.hash not in self.chain
            block_success = self.chain.add_block(obj)
            self.network.broadcast(self, obj)
            self.network.broadcast(self, ChildRequest(obj.header.hash))
            self.update_head()
        elif isinstance(obj, Transaction):
            if self.chain.add_transaction(obj):
                self.network.broadcast(self, obj)
        self.received_objects[obj.hash] = True
        for x in self.chain.get_chain():
            assert x.hash in self.received_objects

    def tick(self):
        # Try to create a block
        # Conditions:
        # (i) you are an active validator,
        # (ii) you have not yet made a block with this parent
        if self.indices and self.chain.head_hash not in self.used_parents:
            t = self.get_timestamp()
            # Is it early enough to create the block?
            if t >= self.next_skip_timestamp and (
                    not self.chain.head
                    or t > self.chain.head.header.timestamp):
                # Wrong validator; in this case, just wait for the next skip count
                if not check_skips(self.chain, self.indices,
                                   self.next_skip_count):
                    self.next_skip_count += 1
                    self.next_skip_timestamp = get_timestamp(
                        self.chain, self.next_skip_count)
                    # print 'Incrementing proposed timestamp for block %d to %d' % \
                    #     (self.chain.head.header.number + 1 if self.chain.head else 0, self.next_skip_timestamp)
                    return
                self.used_parents[self.chain.head_hash] = True
                # Simulated 15% chance of validator failure to make a block
                if random.random() > 0.999:
                    print 'Simulating validator failure, block %d not created' % (
                        self.chain.head.header.number +
                        1 if self.chain.head else 0)
                    return
                # Make the block, make sure it's valid
                pre_dunkle_count = call_casper(self.chain.state,
                                               'getTotalDunklesIncluded', [])
                dunkle_txs = []
                for i, u in enumerate(self.get_uncles()[:4]):
                    start_nonce = self.chain.state.get_nonce(self.address)
                    txdata = casper_ct.encode('includeDunkle', [rlp.encode(u)])
                    dunkle_txs.append(
                        Transaction(start_nonce + i, 0, 650000,
                                    self.chain.config['CASPER_ADDR'], 0,
                                    txdata).sign(self.key))
                for dtx in dunkle_txs[::-1]:
                    self.chain.add_transaction(dtx, force=True)
                blk = make_block(self.chain, self.key, self.randao,
                                 self.indices, self.next_skip_count)
                global global_block_counter
                global_block_counter += 1
                for dtx in dunkle_txs:
                    assert dtx in blk.transactions, (dtx, blk.transactions)
                print 'made block with timestamp %d and %d dunkles' % (
                    blk.timestamp, len(dunkle_txs))
                assert blk.timestamp >= self.next_skip_timestamp
                assert self.chain.add_block(blk)
                self.update_head()
                post_dunkle_count = call_casper(self.chain.state,
                                                'getTotalDunklesIncluded', [])
                assert post_dunkle_count - pre_dunkle_count == len(dunkle_txs)
                self.received_objects[blk.hash] = True
                print 'Validator %d making block %d (%s)' % (
                    self.id, blk.header.number,
                    blk.header.hash[:8].encode('hex'))
                self.network.broadcast(self, blk)
        # Sometimes we received blocks too early or out of order;
        # run an occasional loop that processes these
        if random.random() < 0.02:
            self.chain.process_time_queue()
            self.chain.process_parent_queue()
            self.update_head()

    def update_head(self):
        if self.cached_head == self.chain.head_hash:
            return
        self.cached_head = self.chain.head_hash
        if self.chain.state.block_number % self.epoch_length == 0:
            self.find_my_indices()
        if self.indices:
            self.next_skip_count = 0
            self.next_skip_timestamp = get_timestamp(self.chain,
                                                     self.next_skip_count)
        print 'Head changed: %s, will attempt creating a block at %d' % (
            self.chain.head_hash.encode('hex'), self.next_skip_timestamp)

    def withdraw(self, gasprice=20 * 10**9):
        h = sha3(b'withdrawwithdrawwithdrawwithdraw')
        v, r, s = ecsign(h, self.key)
        sigdata = encode_int32(v) + encode_int32(r) + encode_int32(s)
        txdata = casper_ct.encode('startWithdrawal',
                                  [self.indices[0], self.indices[1], sigdata])
        tx = Transaction(self.chain.state.get_nonce(self.address), gasprice,
                         650000, self.chain.config['CASPER_ADDR'], 0,
                         txdata).sign(self.key)
        self.chain.add_transaction(tx)
        self.network.broadcast(self, tx)
        print 'Withdrawing!'
class ChainService(WiredService):
    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(network_id=0, genesis='', pruning=-1),
                          block=ethereum_config.default_config)

    # required by WiredService
    wire_protocol = eth_protocol.ETHProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024
    processed_gas = 0
    processed_elapsed = 0

    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception(
                    "This database was initialized with network_id {} "
                    "and can not be used when connecting to network_id {}".
                    format(db_network_id, sce['network_id']))

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)

    @property
    def is_syncing(self):
        return self.synchronizer.synctask is not None

    @property
    def is_mining(self):
        if 'pow' in self.app.services:
            return self.app.services.pow.active
        return False

    def _on_new_head(self, block):
        # DEBUG('new head cbs', len(self.on_new_head_cbs))
        for cb in self.on_new_head_cbs:
            cb(block)
        self._on_new_head_candidate(
        )  # we implicitly have a new head_candidate

    def _on_new_head_candidate(self):
        # DEBUG('new head candidate cbs', len(self.on_new_head_candidate_cbs))
        for cb in self.on_new_head_candidate_cbs:
            cb(self.chain.head_candidate)

    def add_transaction(self, tx, origin=None):
        if self.is_syncing:
            return  # we can not evaluate the tx based on outdated state
        log.debug('add_transaction',
                  locked=self.add_transaction_lock.locked(),
                  tx=tx)
        assert isinstance(tx, Transaction)
        assert origin is None or isinstance(origin, BaseProtocol)

        if tx.hash in self.broadcast_filter:
            log.debug('discarding known tx')  # discard early
            return

        # validate transaction
        try:
            validate_transaction(self.chain.head_candidate, tx)
            log.debug('valid tx, broadcasting')
            self.broadcast_transaction(tx, origin=origin)  # asap
        except InvalidTransaction as e:
            log.debug('invalid tx', error=e)
            return

        if origin is not None:  # not locally added via jsonrpc
            if not self.is_mining or self.is_syncing:
                log.debug('discarding tx',
                          syncing=self.is_syncing,
                          mining=self.is_mining)
                return

        self.add_transaction_lock.acquire()
        success = self.chain.add_transaction(tx)
        self.add_transaction_lock.release()
        if success:
            self._on_new_head_candidate()

    def add_block(self, t_block, proto):
        "adds a block to the block_queue and spawns _add_block if not running"
        self.block_queue.put((t_block, proto))  # blocks if full
        if not self.add_blocks_lock:
            self.add_blocks_lock = True  # need to lock here (ctx switch is later)
            gevent.spawn(self._add_blocks)

    def add_mined_block(self, block):
        log.debug('adding mined block', block=block)
        assert isinstance(block, Block)
        assert block.header.check_pow()
        if self.chain.add_block(block):
            log.debug('added', block=block, ts=time.time())
            assert block == self.chain.head
            self.broadcast_newblock(block,
                                    chain_difficulty=block.chain_difficulty())

    def knows_block(self, block_hash):
        "if block is in chain or in queue"
        if block_hash in self.chain:
            return True
        # check if queued or processed
        for i in range(len(self.block_queue.queue)):
            if block_hash == self.block_queue.queue[i][0].header.hash:
                return True
        return False

    def _add_blocks(self):
        log.debug('add_blocks',
                  qsize=self.block_queue.qsize(),
                  add_tx_lock=self.add_transaction_lock.locked())
        assert self.add_blocks_lock is True
        self.add_transaction_lock.acquire()
        try:
            while not self.block_queue.empty():
                t_block, proto = self.block_queue.peek(
                )  # peek: knows_block while processing
                if t_block.header.hash in self.chain:
                    log.warn('known block', block=t_block)
                    self.block_queue.get()
                    continue
                if t_block.header.prevhash not in self.chain:
                    log.warn('missing parent',
                             block=t_block,
                             head=self.chain.head)
                    self.block_queue.get()
                    continue
                # FIXME, this is also done in validation and in synchronizer for new_blocks
                if not t_block.header.check_pow():
                    log.warn('invalid pow', block=t_block, FIXME='ban node')
                    sentry.warn_invalid(t_block, 'InvalidBlockNonce')
                    self.block_queue.get()
                    continue
                try:  # deserialize
                    st = time.time()
                    block = t_block.to_block(env=self.chain.env)
                    elapsed = time.time() - st
                    log.debug('deserialized',
                              elapsed='%.4fs' % elapsed,
                              ts=time.time(),
                              gas_used=block.gas_used,
                              gpsec=self.gpsec(block.gas_used, elapsed))
                except processblock.InvalidTransaction as e:
                    log.warn('invalid transaction',
                             block=t_block,
                             error=e,
                             FIXME='ban node')
                    errtype = \
                        'InvalidNonce' if isinstance(e, InvalidNonce) else \
                        'NotEnoughCash' if isinstance(e, InsufficientBalance) else \
                        'OutOfGasBase' if isinstance(e, InsufficientStartGas) else \
                        'other_transaction_error'
                    sentry.warn_invalid(t_block, errtype)
                    self.block_queue.get()
                    continue
                except VerificationFailed as e:
                    log.warn('verification failed', error=e, FIXME='ban node')
                    sentry.warn_invalid(t_block, 'other_block_error')
                    self.block_queue.get()
                    continue
                # Check canary
                score = 0
                for address in canary_addresses:
                    if block.get_storage_data(address, 1) > 0:
                        score += 1
                if score >= 2:
                    log.warn('canary triggered')
                    continue
                # All checks passed
                log.debug('adding', block=block, ts=time.time())
                if self.chain.add_block(
                        block, forward_pending_transactions=self.is_mining):
                    now = time.time()
                    log.info('added',
                             block=block,
                             txs=block.transaction_count,
                             gas_used=block.gas_used)
                    if t_block.newblock_timestamp:
                        total = now - t_block.newblock_timestamp
                        self.newblock_processing_times.append(total)
                        avg = statistics.mean(self.newblock_processing_times)
                        med = statistics.median(self.newblock_processing_times)
                        max_ = max(self.newblock_processing_times)
                        min_ = min(self.newblock_processing_times)
                        log.info('processing time',
                                 last=total,
                                 avg=avg,
                                 max=max_,
                                 min=min_,
                                 median=med)
                else:
                    log.warn('could not add', block=block)

                self.block_queue.get(
                )  # remove block from queue (we peeked only)
                gevent.sleep(0.001)
        finally:
            self.add_blocks_lock = False
            self.add_transaction_lock.release()

    def gpsec(self, gas_spent=0, elapsed=0):
        if gas_spent:
            self.processed_gas += gas_spent
            self.processed_elapsed += elapsed
        return int(self.processed_gas / (0.001 + self.processed_elapsed))

    def broadcast_newblock(self, block, chain_difficulty=None, origin=None):
        if not chain_difficulty:
            assert block.hash in self.chain
            chain_difficulty = block.chain_difficulty()
        assert isinstance(block, (eth_protocol.TransientBlock, Block))
        if self.broadcast_filter.update(block.header.hash):
            log.debug('broadcasting newblock', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol,
                  'newblock',
                  args=(block, chain_difficulty),
                  exclude_peers=[origin.peer] if origin else [])
        else:
            log.debug('already broadcasted block')

    def broadcast_transaction(self, tx, origin=None):
        assert isinstance(tx, Transaction)
        if self.broadcast_filter.update(tx.hash):
            log.debug('broadcasting tx', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol,
                  'transactions',
                  args=(tx, ),
                  exclude_peers=[origin.peer] if origin else [])
        else:
            log.debug('already broadcasted tx')

    # wire protocol receivers ###########

    def on_wire_protocol_start(self, proto):
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(
            self.on_receive_transactions)
        proto.receive_getblockhashes_callbacks.append(
            self.on_receive_getblockhashes)
        proto.receive_blockhashes_callbacks.append(self.on_receive_blockhashes)
        proto.receive_getblocks_callbacks.append(self.on_receive_getblocks)
        proto.receive_blocks_callbacks.append(self.on_receive_blocks)
        proto.receive_newblock_callbacks.append(self.on_receive_newblock)
        proto.receive_newblockhashes_callbacks.append(self.on_newblockhashes)

        # send status
        head = self.chain.head
        proto.send_status(chain_difficulty=head.chain_difficulty(),
                          chain_head_hash=head.hash,
                          genesis_hash=self.chain.genesis.hash)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_stop', proto=proto)

    def on_receive_status(self, proto, eth_version, network_id,
                          chain_difficulty, chain_head_hash, genesis_hash):
        log.debug('----------------------------------')
        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != self.config['eth'].get('network_id',
                                                proto.network_id):
            log.warn("invalid network id",
                     remote_network_id=network_id,
                     expected_network_id=self.config['eth'].get(
                         'network_id', proto.network_id))
            raise eth_protocol.ETHProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash",
                     remote_id=proto,
                     genesis=genesis_hash.encode('hex'))
            raise eth_protocol.ETHProtocolError('wrong genesis block')

        # request chain
        self.synchronizer.receive_status(proto, chain_head_hash,
                                         chain_difficulty)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('----------------------------------')
        log.debug('remote_transactions_received',
                  count=len(transactions),
                  remote_id=proto)
        for tx in transactions:
            self.add_transaction(tx, origin=proto)

    # blockhashes ###########

    def on_newblockhashes(self, proto, newblockhashes):
        """
        msg sent out if not the full block is propagated
        chances are high, that we get the newblock, though.
        """
        log.debug('----------------------------------')
        log.debug("recv newnewblockhashes",
                  num=len(newblockhashes),
                  remote_id=proto)
        assert len(newblockhashes) <= 32
        self.synchronizer.receive_newblockhashes(proto, newblockhashes)

    def on_receive_getblockhashes(self, proto, child_block_hash, count):
        log.debug('----------------------------------')
        log.debug("handle_get_blockhashes",
                  count=count,
                  block_hash=encode_hex(child_block_hash))
        max_hashes = min(count, self.wire_protocol.max_getblockhashes_count)
        found = []
        if child_block_hash not in self.chain:
            log.debug("unknown block")
            proto.send_blockhashes(*[])
            return

        last = child_block_hash
        while len(found) < max_hashes:
            try:
                last = rlp.decode_lazy(
                    self.chain.db.get(last))[0][0]  # [head][prevhash]
            except KeyError:
                # this can happen if we started a chain download, which did not complete
                # should not happen if the hash is part of the canonical chain
                log.warn('KeyError in getblockhashes', hash=last)
                break
            if last:
                found.append(last)
            else:
                break

        log.debug("sending: found block_hashes", count=len(found))
        proto.send_blockhashes(*found)

    def on_receive_blockhashes(self, proto, blockhashes):
        log.debug('----------------------------------')
        if blockhashes:
            log.debug("on_receive_blockhashes",
                      count=len(blockhashes),
                      remote_id=proto,
                      first=encode_hex(blockhashes[0]),
                      last=encode_hex(blockhashes[-1]))
        else:
            log.debug("recv 0 remote block hashes, signifying genesis block")
        self.synchronizer.receive_blockhashes(proto, blockhashes)

    # blocks ################

    def on_receive_getblocks(self, proto, blockhashes):
        log.debug('----------------------------------')
        log.debug("on_receive_getblocks", count=len(blockhashes))
        found = []
        for bh in blockhashes[:self.wire_protocol.max_getblocks_count]:
            try:
                found.append(self.chain.db.get(bh))
            except KeyError:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found))
            proto.send_blocks(*found)

    def on_receive_blocks(self, proto, transient_blocks):
        log.debug('----------------------------------')
        blk_number = max(x.header.number
                         for x in transient_blocks) if transient_blocks else 0
        log.debug("recv blocks",
                  count=len(transient_blocks),
                  remote_id=proto,
                  highest_number=blk_number)
        if transient_blocks:
            self.synchronizer.receive_blocks(proto, transient_blocks)

    def on_receive_newblock(self, proto, block, chain_difficulty):
        log.debug('----------------------------------')
        log.debug("recv newblock", block=block, remote_id=proto)
        self.synchronizer.receive_newblock(proto, block, chain_difficulty)

    def on_receive_getblockheaders(self, proto, blockhashes):
        log.debug('----------------------------------')
        log.debug("on_receive_getblockheaders", count=len(blockhashes))
        found = []
        for bh in blockhashes[:self.wire_protocol.max_getblocks_count]:
            try:
                found.append(rlp.encode(rlp.decode(self.chain.db.get(bh))[0]))
            except KeyError:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found))
            proto.send_blockheaders(*found)

    def on_receive_blockheaders(self, proto, transient_blocks):
        log.debug('----------------------------------')
        pass
        # TODO: implement headers first syncing

    def on_receive_hashlookup(self, proto, hashes):
        found = []
        for h in hashes:
            try:
                found.append(
                    utils.encode_hex(
                        self.chain.db.get('node:' + utils.decode_hex(h))))
            except KeyError:
                found.append('')
        proto.send_hashlookupresponse(h)

    def on_receive_hashlookupresponse(self, proto, hashresponses):
        pass
예제 #29
0
class ChainService(WiredService):
    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(privkey_hex=''))

    # required by WiredService
    wire_protocol = eth_protocol.ETHProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024

    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        self.chain = Chain(self.db, new_head_cb=self._on_new_head)
        self.synchronizer = Synchronizer(self, force_sync=None)
        self.chain.coinbase = privtoaddr(
            self.config['eth']['privkey_hex'].decode('hex'))

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.broadcast_filter = DuplicatesFilter()

    def _on_new_head(self, block):
        pass

    def add_block(self, t_block, proto):
        "adds a block to the block_queue and spawns _add_block if not running"
        self.block_queue.put((t_block, proto))  # blocks if full
        if not self.add_blocks_lock:
            self.add_blocks_lock = True
            gevent.spawn(self._add_blocks)

    def _add_blocks(self):
        log.debug('add_blocks', qsize=self.block_queue.qsize())
        try:
            while not self.block_queue.empty():
                t_block, proto = self.block_queue.get()
                if t_block.header.hash in self.chain:
                    log.warn('known block', block=t_block)
                    continue
                if t_block.header.prevhash not in self.chain:
                    log.warn('missing parent', block=t_block)
                    continue
                if not t_block.header.check_pow():
                    log.warn('invalid pow', block=t_block)
                    # FIXME ban node
                    continue
                try:  # deserialize
                    st = time.time()
                    block = t_block.to_block(db=self.chain.db)
                    elapsed = time.time() - st
                    log.debug('deserialized',
                              elapsed='%.2fs' % elapsed,
                              gas_used=block.gas_used,
                              gpsec=int(block.gas_used / elapsed))
                except processblock.InvalidTransaction as e:
                    log.warn('invalid transaction', block=t_block, error=e)
                    # FIXME ban node
                    continue

                if self.chain.add_block(block):
                    log.debug('added', block=block)
                gevent.sleep(0.001)
        finally:
            self.add_blocks_lock = False

    def broadcast_newblock(self, block, chain_difficulty, origin=None):
        assert isinstance(block, eth_protocol.TransientBlock)
        if self.broadcast_filter.known(block.header.hash):
            log.debug('already broadcasted block')
        else:
            log.debug('broadcasting newblock', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol,
                  'newblock',
                  args=(block, chain_difficulty),
                  num_peers=None,
                  exclude_protos=[origin])

    # wire protocol receivers ###########

    def on_wire_protocol_start(self, proto):
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(
            self.on_receive_transactions)
        proto.receive_getblockhashes_callbacks.append(
            self.on_receive_getblockhashes)
        proto.receive_blockhashes_callbacks.append(self.on_receive_blockhashes)
        proto.receive_getblocks_callbacks.append(self.on_receive_getblocks)
        proto.receive_blocks_callbacks.append(self.on_receive_blocks)
        proto.receive_newblock_callbacks.append(self.on_receive_newblock)

        # send status
        head = self.chain.head
        proto.send_status(chain_difficulty=head.chain_difficulty(),
                          chain_head_hash=head.hash,
                          genesis_hash=self.chain.genesis.hash)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('on_wire_protocol_stop', proto=proto)

    def on_receive_status(self, proto, eth_version, network_id,
                          chain_difficulty, chain_head_hash, genesis_hash):

        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != proto.network_id:
            log.warn("invalid network id",
                     remote_id=proto.network_id,
                     network_id=network_id)
            raise eth_protocol.ETHProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash",
                     remote_id=proto,
                     genesis=genesis_hash.encode('hex'))
            raise eth_protocol.ETHProtocolError('wrong genesis block')

        # request chain
        self.synchronizer.receive_status(proto, chain_head_hash,
                                         chain_difficulty)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('remote_transactions_received',
                  count=len(transactions),
                  remote_id=proto)
        log.debug('skipping, FIXME')
        return
        for tx in transactions:
            # fixme bloomfilter
            self.chain.add_transaction(tx)

    # blockhashes ###########

    def on_receive_getblockhashes(self, proto, child_block_hash, count):
        log.debug("handle_get_blockhashes",
                  count=count,
                  block_hash=encode_hex(child_block_hash))
        max_hashes = min(count, self.wire_protocol.max_getblockhashes_count)
        found = []
        if child_block_hash not in self.chain:
            log.debug("unknown block")
            proto.send_blockhashes(*[])
            return

        last = child_block_hash
        while len(found) < max_hashes:
            last = rlp.decode_lazy(self.chain.db.get(last))[0][0]
            if last:
                found.append(last)
            else:
                break

        log.debug("sending: found block_hashes", count=len(found))
        proto.send_blockhashes(*found)

    def on_receive_blockhashes(self, proto, blockhashes):
        if blockhashes:
            log.debug("on_receive_blockhashes",
                      count=len(blockhashes),
                      remote_id=proto,
                      first=encode_hex(blockhashes[0]),
                      last=encode_hex(blockhashes[-1]))
        else:
            log.debug("recv 0 remote block hashes, signifying genesis block")
        self.synchronizer.receive_blockhashes(proto, blockhashes)

    # blocks ################

    def on_receive_getblocks(self, proto, blockhashes):
        log.debug("on_receive_getblocks", count=len(blockhashes))
        found = []
        for bh in blockhashes[:self.wire_protocol.max_getblocks_count]:
            try:
                found.append(self.chain.db.get(bh))
            except KeyError:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found))
            proto.send_blocks(*found)

    def on_receive_blocks(self, proto, transient_blocks):
        log.debug("recv blocks",
                  count=len(transient_blocks),
                  remote_id=proto,
                  highest_number=max(x.header.number
                                     for x in transient_blocks))
        if transient_blocks:
            self.synchronizer.receive_blocks(proto, transient_blocks)

    def on_receive_newblock(self, proto, block, chain_difficulty):
        log.debug("recv newblock", block=block, remote_id=proto)
        self.synchronizer.receive_newblock(proto, block, chain_difficulty)
예제 #30
0
class ChainService(eth_ChainService):
    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(
        eth=dict(network_id=0,
                 genesis='',
                 pruning=-1,
                 block=ethereum_config.default_config),
        hdc=dict(validators=[]),
    )

    # required by WiredService
    wire_protocol = HDCProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024
    processed_gas = 0
    processed_elapsed = 0

    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception(
                    "This database was initialized with network_id {} "
                    "and can not be used when connecting to network_id {}".
                    format(db_network_id, sce['network_id']))

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        WiredService.__init__(self, app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.BoundedSemaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)

        # Consensus
        self.consensus_contract = ConsensusContract(
            validators=self.config['hdc']['validators'])
        self.consensus_manager = ConsensusManager(self,
                                                  self.consensus_contract,
                                                  self.consensus_privkey)

        # lock blocks that where proposed, so they don't get mutated
        self.proposal_lock = ProposalLock()
        assert not self.proposal_lock.is_locked()

    def start(self):
        super(ChainService, self).start()
        self.consensus_manager.process()
        gevent.spawn(self.announce)

    def announce(self):
        while not self.consensus_manager.is_ready:
            self.consensus_manager.send_ready()
            gevent.sleep(0.5)

    # interface accessed by ConensusManager

    def log(self, msg, *args, **kargs):
        log.debug(msg, *args, **kargs)

    @property
    def consensus_privkey(self):
        return self.app.services.accounts[0].privkey

    def sign(self, obj):
        assert isinstance(obj, Signed)
        obj.sign(self.consensus_privkey)

    @property
    def now(self):
        return time.time()

    def setup_alarm(self, delay, cb, *args):
        log.debug('setting up alarm')

        def _trigger():
            gevent.sleep(delay)
            log.debug('alarm triggered')
            cb(*args)

        gevent.spawn(_trigger)

    def setup_transaction_alarm(self, cb, *args):
        log.debug('setting up alarm')

        class Trigger(object):
            def __call__(me, blk):
                self.on_new_head_candidate_cbs.remove(me)
                log.debug('transaction alarm triggered')
                cb(*args)

        self.on_new_head_candidate_cbs.append(Trigger())

    def commit_block(self, blk):
        assert isinstance(blk.header, HDCBlockHeader)
        log.debug('trying to acquire transaction lock')
        self.add_transaction_lock.acquire()
        success = self.chain.add_block(blk, forward_pending_transactions=True)
        self.add_transaction_lock.release()
        log.debug('transaction lock release')
        log.info('new head', head=self.chain.head)
        return success

    def link_block(self, t_block):
        assert isinstance(t_block.header, HDCBlockHeader)
        self.add_transaction_lock.acquire()
        block = self._link_block(t_block)
        if not block:
            return
        assert block.get_parent() == self.chain.head, (block.get_parent(),
                                                       self.chain.head)
        assert block.header.coinbase == t_block.header.coinbase
        self.add_transaction_lock.release()
        return block

    def _link_block(self, t_block):
        assert isinstance(t_block.header, HDCBlockHeader)
        if t_block.header.hash in self.chain:
            log.warn('known block', block=t_block)
            return
        if t_block.header.prevhash not in self.chain:
            log.warn('missing parent',
                     block=t_block,
                     head=self.chain.head,
                     prevhash=phx(t_block.header.prevhash))
            return
        if isinstance(t_block, Block):
            return True  # already deserialized
        try:  # deserialize
            st = time.time()
            block = t_block.to_block(env=self.chain.env)
            elapsed = time.time() - st
            log.debug('deserialized',
                      elapsed='%.4fs' % elapsed,
                      ts=time.time(),
                      gas_used=block.gas_used,
                      gpsec=self.gpsec(block.gas_used, elapsed))
            assert block.header.check_pow()
        except processblock.InvalidTransaction as e:
            log.warn('invalid transaction',
                     block=t_block,
                     error=e,
                     FIXME='ban node')
            return
        except VerificationFailed as e:
            log.warn('verification failed', error=e, FIXME='ban node')
            return
        return block

    def add_transaction(self, tx, origin=None, force_broadcast=False):
        """
        Warning:
        Locking proposal_lock may block incoming events which are necessary to unlock!
        I.e. votes / blocks!
        Take care!
        """
        self.consensus_manager.log('add_transaction',
                                   blk=self.chain.head_candidate,
                                   lock=self.proposal_lock)
        log.debug('add_transaction', lock=self.proposal_lock)
        block = self.proposal_lock.block
        self.proposal_lock.acquire()
        self.consensus_manager.log('add_transaction acquired lock',
                                   lock=self.proposal_lock)
        assert not hasattr(self.chain.head_candidate, 'should_be_locked')
        super(ChainService, self).add_transaction(tx, origin, force_broadcast)
        if self.proposal_lock.is_locked(
        ):  # can be unlock if we are at a new block
            self.proposal_lock.release(if_block=block)
        log.debug('added transaction',
                  num_txs=self.chain.head_candidate.num_transactions())

    def _on_new_head(self, blk):
        self.release_proposal_lock(blk)
        super(ChainService, self)._on_new_head(blk)

    def set_proposal_lock(self, blk):
        log.debug('set_proposal_lock', locked=self.proposal_lock)
        if not self.proposal_lock.is_locked():
            self.proposal_lock.acquire()
        self.proposal_lock.block = blk
        assert self.proposal_lock.is_locked()  # can not be aquired
        log.debug('did set_proposal_lock', lock=self.proposal_lock)

    def release_proposal_lock(self, blk):
        log.debug('releasing proposal_lock', lock=self.proposal_lock)
        if self.proposal_lock.is_locked():
            if self.proposal_lock.height <= blk.number:
                assert self.chain.head_candidate.number > self.proposal_lock.height
                assert not hasattr(self.chain.head_candidate,
                                   'should_be_locked')
                assert not isinstance(self.chain.head_candidate.header,
                                      HDCBlockHeader)
                self.proposal_lock.release()
                log.debug('released')
                assert not self.proposal_lock.is_locked()
            else:
                log.debug('could not release',
                          head=blk,
                          lock=self.proposal_lock)

    ###############################################################################

    @property
    def is_syncing(self):
        return self.consensus_manager.synchronizer.is_syncing

    @property
    def is_mining(self):
        return self.chain.coinbase in self.config['hdc']['validators']

    # wire protocol receivers ###########

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('----------------------------------')
        log.debug('remote_transactions_received',
                  count=len(transactions),
                  remote_id=proto)

        def _add_txs():
            for tx in transactions:
                self.add_transaction(tx, origin=proto)

        gevent.spawn(
            _add_txs
        )  # so the locks in add_transaction won't lock the connection

    # blocks / proposals ################

    def on_receive_getblockproposals(self, proto, blocknumbers):
        log.debug('----------------------------------')
        log.debug("on_receive_getblockproposals", count=len(blocknumbers))
        found = []
        for i, height in enumerate(blocknumbers):
            if i == self.wire_protocol.max_getproposals_count:
                break
            assert isinstance(height, int)  # integers
            assert i == 0 or height > blocknumbers[i - 1]  # sorted
            if height > self.chain.head.number:
                log.debug("unknown block requested", height=height)
                break
            rlp_data = self.consensus_manager.get_blockproposal_rlp_by_height(
                height)
            assert isinstance(rlp_data, bytes)
            found.append(rlp_data)
        if found:
            log.debug("found", count=len(found))
            proto.send_blockproposals(*found)

    def on_receive_blockproposals(self, proto, proposals):
        log.debug('----------------------------------')
        self.consensus_manager.log('received proposals', sender=proto)
        log.debug("recv proposals", num=len(proposals), remote_id=proto)
        self.consensus_manager.synchronizer.receive_blockproposals(proposals)

    def on_receive_newblockproposal(self, proto, proposal):
        if proposal.hash in self.broadcast_filter:
            return
        log.debug('----------------------------------')
        self.consensus_manager.log('receive proposal', sender=proto)
        log.debug("recv newblockproposal", proposal=proposal, remote_id=proto)
        # self.synchronizer.receive_newproposal(proto, proposal)
        assert isinstance(proposal, BlockProposal)
        assert isinstance(proposal.block.header, HDCBlockHeader)
        isvalid = self.consensus_manager.add_proposal(proposal, proto)
        if isvalid:
            self.broadcast(proposal, origin=proto)
        self.consensus_manager.process()

    def on_receive_votinginstruction(self, proto, votinginstruction):
        if votinginstruction.hash in self.broadcast_filter:
            return
        log.debug('----------------------------------')
        log.debug("recv votinginstruction",
                  proposal=votinginstruction,
                  remote_id=proto)
        # self.synchronizer.receive_newproposal(proto, proposal)
        isvalid = self.consensus_manager.add_proposal(votinginstruction, proto)
        if isvalid:
            self.broadcast(votinginstruction, origin=proto)

        self.consensus_manager.process()

    #  votes

    def on_receive_vote(self, proto, vote):
        self.consensus_manager.log('on_receive_vote', v=vote)
        if vote.hash in self.broadcast_filter:
            log.debug('filtered!!!')
            return
        log.debug('----------------------------------')
        log.debug("recv vote", vote=vote, remote_id=proto)
        isvalid = self.consensus_manager.add_vote(vote, proto)
        if isvalid:
            self.broadcast(vote, origin=proto)
        self.consensus_manager.process()

    def on_receive_ready(self, proto, ready):
        if ready.hash in self.broadcast_filter:
            return
        log.debug('----------------------------------')
        log.debug("recv ready", ready=ready, remote_id=proto)
        self.consensus_manager.add_ready(ready, proto)
        self.broadcast(ready, origin=proto)
        self.consensus_manager.process()

    #  start

    def on_receive_status(self, proto, eth_version, network_id, genesis_hash,
                          current_lockset):
        log.debug('----------------------------------')
        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != self.config['eth'].get('network_id',
                                                proto.network_id):
            log.warn("invalid network id",
                     remote_network_id=network_id,
                     expected_network_id=self.config['eth'].get(
                         'network_id', proto.network_id))
            raise HDCProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash",
                     remote_id=proto,
                     genesis=genesis_hash.encode('hex'))
            raise HDCProtocolError('wrong genesis block')

        assert isinstance(current_lockset, LockSet)
        if len(current_lockset):
            log.debug('adding received lockset', ls=current_lockset)
            for v in current_lockset.votes:
                self.consensus_manager.add_vote(v, proto)

        self.consensus_manager.process()

        # send last BlockProposal
        p = self.consensus_manager.last_blockproposal
        if p:
            log.debug('sending proposal', p=p)
            proto.send_newblockproposal(p)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    def on_wire_protocol_start(self, proto):
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(
            self.on_receive_transactions)
        proto.receive_blockproposals_callbacks.append(
            self.on_receive_blockproposals)
        proto.receive_getblockproposals_callbacks.append(
            self.on_receive_getblockproposals)
        proto.receive_newblockproposal_callbacks.append(
            self.on_receive_newblockproposal)
        proto.receive_votinginstruction_callbacks.append(
            self.on_receive_votinginstruction)
        proto.receive_vote_callbacks.append(self.on_receive_vote)
        proto.receive_ready_callbacks.append(self.on_receive_ready)

        # send status
        proto.send_status(
            genesis_hash=self.chain.genesis.hash,
            current_lockset=self.consensus_manager.active_round.lockset)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_stop', proto=proto)

    def broadcast(self, obj, origin=None):
        """
        """
        fmap = {
            BlockProposal: 'newblockproposal',
            VoteBlock: 'vote',
            VoteNil: 'vote',
            VotingInstruction: 'votinginstruction',
            Transaction: 'transactions',
            Ready: 'ready'
        }
        if self.broadcast_filter.update(obj.hash) == False:
            log.debug('already broadcasted', obj=obj)
            return
        if isinstance(obj, BlockProposal):
            assert obj.sender == obj.block.header.coinbase
        log.debug('broadcasting', obj=obj)
        bcast = self.app.services.peermanager.broadcast
        bcast(HDCProtocol,
              fmap[type(obj)],
              args=(obj, ),
              exclude_peers=[origin.peer] if origin else [])

    broadcast_transaction = broadcast
예제 #31
0
파일: casper.py 프로젝트: yep/research
class Validator():
    def __init__(self, genesis, key, network, env, time_offset=5):
        # Create a chain object
        self.chain = Chain(genesis, env=env)
        # Create a transaction queue
        self.txqueue = TransactionQueue()
        # Use the validator's time as the chain's time
        self.chain.time = lambda: self.get_timestamp()
        # My private key
        self.key = key
        # My address
        self.address = privtoaddr(key)
        # My randao
        self.randao = RandaoManager(sha3(self.key))
        # Pointer to the test p2p network
        self.network = network
        # Record of objects already received and processed
        self.received_objects = {}
        # The minimum eligible timestamp given a particular number of skips
        self.next_skip_count = 0
        self.next_skip_timestamp = 0
        # Is this validator active?
        self.active = False
        # Code that verifies signatures from this validator
        self.validation_code = generate_validation_code(privtoaddr(key))
        # Validation code hash
        self.vchash = sha3(self.validation_code)
        # Parents that this validator has already built a block on
        self.used_parents = {}
        # This validator's clock offset (for testing purposes)
        self.time_offset = random.randrange(time_offset) - (time_offset // 2)
        # Determine the epoch length
        self.epoch_length = self.call_casper('getEpochLength')
        # My minimum gas price
        self.mingasprice = 20 * 10**9
        # Give this validator a unique ID
        self.id = len(ids)
        ids.append(self.id)
        self.update_activity_status()
        self.cached_head = self.chain.head_hash

    def call_casper(self, fun, args=[]):
        return call_casper(self.chain.state, fun, args)

    def update_activity_status(self):
        start_epoch = self.call_casper('getStartEpoch', [self.vchash])
        now_epoch = self.call_casper('getEpoch')
        end_epoch = self.call_casper('getEndEpoch', [self.vchash])
        if start_epoch <= now_epoch < end_epoch:
            self.active = True
            self.next_skip_count = 0
            self.next_skip_timestamp = get_timestamp(self.chain,
                                                     self.next_skip_count)
            print 'In current validator set'
        else:
            self.active = False

    def get_timestamp(self):
        return int(self.network.time * 0.01) + self.time_offset

    def on_receive(self, obj):
        if isinstance(obj, list):
            for _obj in obj:
                self.on_receive(_obj)
            return
        if obj.hash in self.received_objects:
            return
        if isinstance(obj, Block):
            print 'Receiving block', obj
            assert obj.hash not in self.chain
            block_success = self.chain.add_block(obj)
            self.network.broadcast(self, obj)
            self.network.broadcast(self, ChildRequest(obj.header.hash))
            self.update_head()
        elif isinstance(obj, Transaction):
            print 'Receiving transaction', obj
            if obj.gasprice >= self.mingasprice:
                self.txqueue.add_transaction(obj)
                print 'Added transaction, txqueue size %d' % len(
                    self.txqueue.txs)
                self.network.broadcast(self, obj)
            else:
                print 'Gasprice too low'
        self.received_objects[obj.hash] = True
        for x in self.chain.get_chain():
            assert x.hash in self.received_objects

    def tick(self):
        # Try to create a block
        # Conditions:
        # (i) you are an active validator,
        # (ii) you have not yet made a block with this parent
        if self.active and self.chain.head_hash not in self.used_parents:
            t = self.get_timestamp()
            # Is it early enough to create the block?
            if t >= self.next_skip_timestamp and (
                    not self.chain.head
                    or t > self.chain.head.header.timestamp):
                # Wrong validator; in this case, just wait for the next skip count
                if not check_skips(self.chain, self.vchash,
                                   self.next_skip_count):
                    self.next_skip_count += 1
                    self.next_skip_timestamp = get_timestamp(
                        self.chain, self.next_skip_count)
                    # print 'Incrementing proposed timestamp for block %d to %d' % \
                    #     (self.chain.head.header.number + 1 if self.chain.head else 0, self.next_skip_timestamp)
                    return
                self.used_parents[self.chain.head_hash] = True
                # Simulated 15% chance of validator failure to make a block
                if random.random() > 0.999:
                    print 'Simulating validator failure, block %d not created' % (
                        self.chain.head.header.number +
                        1 if self.chain.head else 0)
                    return
                # Make the block
                s1 = self.chain.state.trie.root_hash
                pre_dunkle_count = self.call_casper('getTotalDunklesIncluded')
                dunkle_txs = get_dunkle_candidates(self.chain,
                                                   self.chain.state)
                blk = make_head_candidate(self.chain, self.txqueue)
                randao = self.randao.get_parent(
                    self.call_casper('getRandao', [self.vchash]))
                blk = sign_block(blk, self.key, randao, self.vchash,
                                 self.next_skip_count)
                # Make sure it's valid
                global global_block_counter
                global_block_counter += 1
                for dtx in dunkle_txs:
                    assert dtx in blk.transactions, (dtx, blk.transactions)
                print 'made block with timestamp %d and %d dunkles' % (
                    blk.timestamp, len(dunkle_txs))
                s2 = self.chain.state.trie.root_hash
                assert s1 == s2
                assert blk.timestamp >= self.next_skip_timestamp
                assert self.chain.add_block(blk)
                self.update_head()
                post_dunkle_count = self.call_casper('getTotalDunklesIncluded')
                assert post_dunkle_count - pre_dunkle_count == len(dunkle_txs)
                self.received_objects[blk.hash] = True
                print 'Validator %d making block %d (%s)' % (
                    self.id, blk.header.number,
                    blk.header.hash[:8].encode('hex'))
                self.network.broadcast(self, blk)
        # Sometimes we received blocks too early or out of order;
        # run an occasional loop that processes these
        if random.random() < 0.02:
            self.chain.process_time_queue()
            self.chain.process_parent_queue()
            self.update_head()

    def update_head(self):
        if self.cached_head == self.chain.head_hash:
            return
        self.cached_head = self.chain.head_hash
        if self.chain.state.block_number % self.epoch_length == 0:
            self.update_activity_status()
        if self.active:
            self.next_skip_count = 0
            self.next_skip_timestamp = get_timestamp(self.chain,
                                                     self.next_skip_count)
        print 'Head changed: %s, will attempt creating a block at %d' % (
            self.chain.head_hash.encode('hex'), self.next_skip_timestamp)

    def withdraw(self, gasprice=20 * 10**9):
        sigdata = make_withdrawal_signature(self.key)
        txdata = casper_ct.encode('startWithdrawal', [self.vchash, sigdata])
        tx = Transaction(self.chain.state.get_nonce(self.address), gasprice,
                         650000, self.chain.config['CASPER_ADDR'], 0,
                         txdata).sign(self.key)
        self.txqueue.add_transaction(tx, force=True)
        self.network.broadcast(self, tx)
        print 'Withdrawing!'

    def deposit(self, gasprice=20 * 10**9):
        assert value * 10**18 >= self.chain.state.get_balance(
            self.address) + gasprice * 1000000
        tx = Transaction(
            self.chain.state.get_nonce(self.address) * 10**18, gasprice,
            1000000, casper_config['CASPER_ADDR'], value * 10**18,
            ct.encode('deposit', [self.validation_code,
                                  self.randao.get(9999)]))
예제 #32
0
class Validator():
    def __init__(self, genesis, key, network, env, time_offset=5):
        # Create a chain object
        self.chain = Chain(genesis, env=env)
        # Use the validator's time as the chain's time
        self.chain.time = lambda: self.get_timestamp()
        # My private key
        self.key = key
        # My address
        self.address = privtoaddr(key)
        # My randao
        self.randao = RandaoManager(sha3(self.key))
        # Pointer to the test p2p network
        self.network = network
        # Record of objects already received and processed
        self.received_objects = {}
        # The minimum eligible timestamp given a particular number of skips
        self.next_skip_count = 0
        self.next_skip_timestamp = 0
        # This validator's indices in the state
        self.indices = None
        # Code that verifies signatures from this validator
        self.validation_code = generate_validation_code(privtoaddr(key))
        # Parents that this validator has already built a block on
        self.used_parents = {}
        # This validator's clock offset (for testing purposes)
        self.time_offset = random.randrange(time_offset) - (time_offset // 2)
        # Give this validator a unique ID
        self.id = len(ids)
        ids.append(self.id)
        self.find_my_indices()
        self.cached_head = self.chain.head_hash

    def find_my_indices(self):
        for i in range(len(validatorSizes)):
            epoch = self.chain.state.block_number // EPOCH_LENGTH
            valcount = call_casper(self.chain.state, 'getHistoricalValidatorCount', [epoch, i])
            for j in range(valcount):
                valcode = call_casper(self.chain.state, 'getValidationCode', [i, j])
                if valcode == self.validation_code:
                    self.indices = i, j
                    self.next_skip_count = 0
                    self.next_skip_timestamp = get_timestamp(self.chain, self.next_skip_count)
                    print 'In current validator set at (%d, %d)' % (i, j)
                    return
        self.indices = None
        self.next_skip_count, self.next_skip_timestamp = 0, 0
        print 'Not in current validator set'

    def get_uncles(self):
        anc = self.chain.get_block(self.chain.get_blockhash_by_number(self.chain.state.block_number - CHECK_FOR_UNCLES_BACK))
        if anc:
            descendants = self.chain.get_descendants(anc)
        else:
            descendants = self.chain.get_descendants(self.chain.db.get('GENESIS_HASH'))
        potential_uncles = [x for x in descendants if x not in self.chain and isinstance(x, Block)]
        uncles = [x.header for x in potential_uncles if not call_casper(self.chain.state, 'isDunkleIncluded', [x.header.hash])]
        return uncles

    def get_timestamp(self):
        return int(self.network.time * 0.01) + self.time_offset

    def on_receive(self, obj):
        if isinstance(obj, list):
            for _obj in obj:
                self.on_receive(_obj)
            return
        if obj.hash in self.received_objects:
            return
        if isinstance(obj, Block):
            print 'Receiving block', obj
            assert obj.hash not in self.chain, (self.received_objects, obj.hash, [x.hash for x in self.chain.get_chain()])
            block_success = self.chain.add_block(obj)
            self.network.broadcast(self, obj)
            self.network.broadcast(self, ChildRequest(obj.header.hash))
            self.update_head()
        elif isinstance(obj, Transaction):
            self.chain.add_transaction(obj)
        self.received_objects[obj.hash] = True
        for x in self.chain.get_chain():
            assert x.hash in self.received_objects

    def tick(self):
        # Try to create a block
        # Conditions:
        # (i) you are an active validator,
        # (ii) you have not yet made a block with this parent
        if self.indices and self.chain.head_hash not in self.used_parents:
            t = self.get_timestamp()
            # Is it early enough to create the block?
            if t >= self.next_skip_timestamp and (not self.chain.head or t > self.chain.head.header.timestamp):
                print 'creating', t, self.next_skip_timestamp
                # Wrong validator; in this case, just wait for the next skip count
                if not check_skips(self.chain, self.indices, self.next_skip_count):
                    self.next_skip_count += 1
                    self.next_skip_timestamp = get_timestamp(self.chain, self.next_skip_count)
                    print 'Incrementing proposed timestamp for block %d to %d' % \
                        (self.chain.head.header.number + 1 if self.chain.head else 0, self.next_skip_timestamp)
                    return
                self.used_parents[self.chain.head_hash] = True
                # Simulated 15% chance of validator failure to make a block
                if random.random() > 0.999:
                    print 'Simulating validator failure, block %d not created' % (self.chain.head.header.number + 1 if self.chain.head else 0)
                    return
                # Make the block, make sure it's valid
                pre_dunkle_count = call_casper(self.chain.state, 'getTotalDunklesIncluded', [])
                dunkle_txs = []
                for i, u in enumerate(self.get_uncles()[:4]):
                    start_nonce = self.chain.state.get_nonce(self.address)
                    print 'start_nonce', start_nonce
                    txdata = casper_ct.encode('includeDunkle', [rlp.encode(u)])
                    dunkle_txs.append(Transaction(start_nonce + i, 0, 650000, self.chain.config['CASPER_ADDR'], 0, txdata).sign(self.key))
                for dtx in dunkle_txs[::-1]:
                    self.chain.add_transaction(dtx, force=True)
                blk = make_block(self.chain, self.key, self.randao, self.indices, self.next_skip_count)
                global global_block_counter
                global_block_counter += 1
                for dtx in dunkle_txs:
                    assert dtx in blk.transactions, (dtx, blk.transactions)
                print 'made block with timestamp %d and %d dunkles' % (blk.timestamp, len(dunkle_txs))
                assert blk.timestamp >= self.next_skip_timestamp
                assert self.chain.add_block(blk)
                self.update_head()
                post_dunkle_count = call_casper(self.chain.state, 'getTotalDunklesIncluded', [])
                assert post_dunkle_count - pre_dunkle_count == len(dunkle_txs)
                self.received_objects[blk.hash] = True
                print 'Validator %d making block %d (%s)' % (self.id, blk.header.number, blk.header.hash[:8].encode('hex'))
                self.network.broadcast(self, blk)
        # Sometimes we received blocks too early or out of order;
        # run an occasional loop that processes these
        if random.random() < 0.02:
            self.chain.process_time_queue()
            self.chain.process_parent_queue()
            self.update_head()

    def update_head(self):
        if self.cached_head == self.chain.head_hash:
            return
        self.cached_head = self.chain.head_hash
        if self.chain.state.block_number % EPOCH_LENGTH == 0:
            self.find_my_indices()
        if self.indices:
            self.next_skip_count = 0
            self.next_skip_timestamp = get_timestamp(self.chain, self.next_skip_count)
        print 'Head changed: %s, will attempt creating a block at %d' % (self.chain.head_hash.encode('hex'), self.next_skip_timestamp)
예제 #33
0
def test_simple_chain(db):
    k, v, k2, v2 = accounts()
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(blk)
    chain = Chain(env=env(blk.db), genesis=blk)
    tx = get_transaction()
    blk2 = mine_next_block(blk, transactions=[tx])
    store_block(blk2)
    chain.add_block(blk2)

    assert blk.hash in chain
    assert blk2.hash in chain
    assert chain.has_block(blk2.hash)
    assert chain.get(blk2.hash) == blk2
    assert chain.head == blk2
    assert chain.get_children(blk) == [blk2]
    assert chain.get_uncles(blk2) == []

    assert chain.get_chain() == [blk2, blk]
    assert chain.get_chain(count=0) == []
    assert chain.get_chain(count=1) == [blk2]
    assert chain.get_chain(count=2) == [blk2, blk]
    assert chain.get_chain(count=100) == [blk2, blk]
    assert chain.get_chain(blk.hash) == [blk]
    assert chain.get_chain(blk.hash, 0) == []
    assert chain.get_chain(blk2.hash) == [blk2, blk]
    assert chain.get_chain(blk2.hash, 1) == [blk2]
    assert chain.get_descendants(blk, count=10) == [blk2]
    assert chain.get_descendants(blk, count=1) == [blk2]
    assert chain.get_descendants(blk, count=0) == []

    assert chain.index.has_block_by_number(1)
    assert not chain.index.has_block_by_number(2)
    assert chain.index.get_block_by_number(1) == blk2.hash
    with pytest.raises(KeyError):
        chain.index.get_block_by_number(2)
    assert chain.index.get_transaction(tx.hash) == (tx, blk2, 0)
예제 #34
0
def test_prevhash(db):
    g = mkquickgenesis({}, db=db)
    chain = Chain(env(g.db), g)
    L1 = mine_on_chain(chain)
    L1.get_ancestor_list(2)
예제 #35
0
class ChainService(WiredService):
    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(network_id=0))

    # required by WiredService
    wire_protocol = eth_protocol.ETHProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024
    processed_gas = 0
    processed_elapsed = 0

    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        self.chain = Chain(self.db,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)
        log.info('chain at', number=self.chain.head.number)
        self.synchronizer = Synchronizer(self, force_sync=None)

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []

    @property
    def is_syncing(self):
        return self.synchronizer.synctask is not None

    def _on_new_head(self, block):
        for cb in self.on_new_head_cbs:
            cb(block)
        self._on_new_head_candidate(
        )  # we implicitly have a new head_candidate

    def _on_new_head_candidate(self):
        for cb in self.on_new_head_candidate_cbs:
            cb(self.chain.head_candidate)

    def add_transaction(self, tx, origin=None):
        assert isinstance(tx, Transaction)
        log.debug('add_transaction', locked=self.add_transaction_lock.locked())
        self.add_transaction_lock.acquire()
        success = self.chain.add_transaction(tx)
        self.add_transaction_lock.release()
        if success:
            self._on_new_head_candidate()
            self.broadcast_transaction(tx, origin=origin)  # asap

    def add_block(self, t_block, proto):
        "adds a block to the block_queue and spawns _add_block if not running"
        self.block_queue.put((t_block, proto))  # blocks if full
        if not self.add_blocks_lock:
            self.add_blocks_lock = True  # need to lock here (ctx switch is later)
            gevent.spawn(self._add_blocks)

    def add_mined_block(self, block):
        log.debug('adding mined block', block=block)
        assert block.check_pow()
        if self.chain.add_block(block):
            log.info('added', block=block, ts=time.time())
            assert block == self.chain.head
            self.broadcast_newblock(block,
                                    chain_difficulty=block.chain_difficulty())

    def knows_block(self, block_hash):
        "if block is in chain or in queue"
        if block_hash in self.chain:
            return True
        # check if queued or processed
        for i in range(len(self.block_queue.queue)):
            if block_hash == self.block_queue.queue[i][0].header.hash:
                return True
        return False

    def _add_blocks(self):
        log.debug('add_blocks',
                  qsize=self.block_queue.qsize(),
                  add_tx_lock=self.add_transaction_lock.locked())
        assert self.add_blocks_lock is True
        self.add_transaction_lock.acquire()
        try:
            while not self.block_queue.empty():
                t_block, proto = self.block_queue.peek(
                )  # peek: knows_block while processing
                if t_block.header.hash in self.chain:
                    log.warn('known block', block=t_block)
                    self.block_queue.get()
                    continue
                if t_block.header.prevhash not in self.chain:
                    log.warn('missing parent', block=t_block)
                    self.block_queue.get()
                    continue
                # FIXME, this is also done in validation and in synchronizer for new_blocks
                if not t_block.header.check_pow():
                    log.warn('invalid pow', block=t_block, FIXME='ban node')
                    self.block_queue.get()
                    continue
                try:  # deserialize
                    st = time.time()
                    block = t_block.to_block(db=self.chain.db)
                    elapsed = time.time() - st
                    log.debug('deserialized',
                              elapsed='%.4fs' % elapsed,
                              gas_used=block.gas_used,
                              gpsec=self.gpsec(block.gas_used, elapsed))
                except processblock.InvalidTransaction as e:
                    log.warn('invalid transaction',
                             block=t_block,
                             error=e,
                             FIXME='ban node')
                    self.block_queue.get()
                    continue
                except VerificationFailed as e:
                    log.warn('verification failed', error=e, FIXME='ban node')
                    self.block_queue.get()
                    continue

                if self.chain.add_block(block):
                    log.info('added', block=block, ts=time.time())
                self.block_queue.get(
                )  # remove block from queue (we peeked only)
                gevent.sleep(0.001)
        finally:
            self.add_blocks_lock = False
            self.add_transaction_lock.release()

    def gpsec(self, gas_spent=0, elapsed=0):
        self.processed_gas += gas_spent
        self.processed_elapsed += elapsed
        return int(self.processed_gas / (0.001 + self.processed_elapsed))

    def broadcast_newblock(self, block, chain_difficulty=None, origin=None):
        if not chain_difficulty:
            assert block.hash in self.chain
            chain_difficulty = block.chain_difficulty()
        assert isinstance(block, (eth_protocol.TransientBlock, Block))
        if self.broadcast_filter.known(block.header.hash):
            log.debug('already broadcasted block')
        else:
            log.debug('broadcasting newblock', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol,
                  'newblock',
                  args=(block, chain_difficulty),
                  exclude_peers=[origin.peer] if origin else [])

    def broadcast_transaction(self, tx, origin=None):
        assert isinstance(tx, Transaction)
        if self.broadcast_filter.known(tx.hash):
            log.debug('already broadcasted tx')
        else:
            log.debug('broadcasting tx', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol,
                  'transactions',
                  args=(tx, ),
                  exclude_peers=[origin.peer] if origin else [])

    # wire protocol receivers ###########

    def on_wire_protocol_start(self, proto):
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(
            self.on_receive_transactions)
        proto.receive_getblockhashes_callbacks.append(
            self.on_receive_getblockhashes)
        proto.receive_blockhashes_callbacks.append(self.on_receive_blockhashes)
        proto.receive_getblocks_callbacks.append(self.on_receive_getblocks)
        proto.receive_blocks_callbacks.append(self.on_receive_blocks)
        proto.receive_newblock_callbacks.append(self.on_receive_newblock)

        # send status
        head = self.chain.head
        proto.send_status(chain_difficulty=head.chain_difficulty(),
                          chain_head_hash=head.hash,
                          genesis_hash=self.chain.genesis.hash)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('on_wire_protocol_stop', proto=proto)

    def on_receive_status(self, proto, eth_version, network_id,
                          chain_difficulty, chain_head_hash, genesis_hash):

        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != self.config['eth'].get('network_id',
                                                proto.network_id):
            log.warn("invalid network id",
                     remote_network_id=network_id,
                     expected_network_id=self.config['eth'].get(
                         'network_id', proto.network_id))
            raise eth_protocol.ETHProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash",
                     remote_id=proto,
                     genesis=genesis_hash.encode('hex'))
            raise eth_protocol.ETHProtocolError('wrong genesis block')

        # request chain
        self.synchronizer.receive_status(proto, chain_head_hash,
                                         chain_difficulty)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('remote_transactions_received',
                  count=len(transactions),
                  remote_id=proto)
        for tx in transactions:
            self.add_transaction(tx, origin=proto)

    # blockhashes ###########

    def on_receive_getblockhashes(self, proto, child_block_hash, count):
        log.debug("handle_get_blockhashes",
                  count=count,
                  block_hash=encode_hex(child_block_hash))
        max_hashes = min(count, self.wire_protocol.max_getblockhashes_count)
        found = []
        if child_block_hash not in self.chain:
            log.debug("unknown block")
            proto.send_blockhashes(*[])
            return

        last = child_block_hash
        while len(found) < max_hashes:
            try:
                last = rlp.decode_lazy(self.chain.db.get(last))[0][0]
            except KeyError:
                # this can happen if we started a chain download, which did not complete
                # should not happen if the hash is part of the canonical chain
                log.warn('KeyError in getblockhashes', hash=last)
                break
            if last:
                found.append(last)
            else:
                break

        log.debug("sending: found block_hashes", count=len(found))
        proto.send_blockhashes(*found)

    def on_receive_blockhashes(self, proto, blockhashes):
        if blockhashes:
            log.debug("on_receive_blockhashes",
                      count=len(blockhashes),
                      remote_id=proto,
                      first=encode_hex(blockhashes[0]),
                      last=encode_hex(blockhashes[-1]))
        else:
            log.debug("recv 0 remote block hashes, signifying genesis block")
        self.synchronizer.receive_blockhashes(proto, blockhashes)

    # blocks ################

    def on_receive_getblocks(self, proto, blockhashes):
        log.debug("on_receive_getblocks", count=len(blockhashes))
        found = []
        for bh in blockhashes[:self.wire_protocol.max_getblocks_count]:
            try:
                found.append(self.chain.db.get(bh))
            except KeyError:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found))
            proto.send_blocks(*found)

    def on_receive_blocks(self, proto, transient_blocks):
        blk_number = max(x.header.number
                         for x in transient_blocks) if transient_blocks else 0
        log.debug("recv blocks",
                  count=len(transient_blocks),
                  remote_id=proto,
                  highest_number=blk_number)
        if transient_blocks:
            self.synchronizer.receive_blocks(proto, transient_blocks)

    def on_receive_newblock(self, proto, block, chain_difficulty):
        log.debug("recv newblock", block=block, remote_id=proto)
        self.synchronizer.receive_newblock(proto, block, chain_difficulty)
예제 #36
0
deposit_sizes = [i * 500 + 500 for i in range(NUM_PARTICIPANTS)]
vcodes = [generate_validation_code(a) for a in addrs]
vchashes = [utils.sha3(c) for c in vcodes]
assert len(privkeys) == len(addrs) == len(randaos) == len(deposit_sizes) == len(vcodes) == len(vchashes) == NUM_PARTICIPANTS

# Creating casper contract translator
ct = get_casper_ct()
assert ct
print('Constructing genesis')
s = make_casper_genesis(validators=[(generate_validation_code(a), ds * 10**18, r.get(9999), a)
                                    for a, ds, r in zip(addrs, deposit_sizes, randaos)][:-1],
                        alloc={a: {'balance': 10**18} for a in addrs},
                        timestamp=int(time.time() - 99999),
                        epoch_length=100)
print('Genesis constructed successfully')
chains = [Chain(s.to_snapshot(), env=s.env) for i in range(NUM_PARTICIPANTS)]
withdrawal_time_1 = call_casper(chains[0].state, 'getLockDuration', [vchashes[0]])

# List of validator IDs that created each block
vids = []

# Create and sign a block
def make_block(chain, key, randao, vchash, skips):
    h, _ = make_head_candidate(chain, TransactionQueue(), timestamp=get_timestamp(chain, skips))
    return sign_block(h, key, randao.get_parent(call_casper(chain.state, 'getRandao', [vchash])), vchash, skips)

next_validator = call_casper(s, 'getValidator', [0])
print('Next validator:', next_validator.encode('hex'))
next_validator_id = vchashes.index(next_validator)
print('Next validator index:', next_validator_id)
예제 #37
0
class ChainService(eth_ChainService):

    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(network_id=0,
                                   genesis='',
                                   pruning=-1,
                                   block=ethereum_config.default_config),
                          hdc=dict(validators=[]),
                          )

    # required by WiredService
    wire_protocol = HDCProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024
    processed_gas = 0
    processed_elapsed = 0

    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception("This database was initialized with network_id {} "
                                "and can not be used when connecting to network_id {}".format(
                                    db_network_id, sce['network_id'])
                                )

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        WiredService.__init__(self, app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env, new_head_cb=self._on_new_head, coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.Semaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)

        # Consensus
        self.consensus_contract = ConsensusContract(validators=self.config['hdc']['validators'])
        self.consensus_manager = ConsensusManager(self, self.consensus_contract,
                                                  self.consensus_privkey)
        #  self.consensus_manager.process()
        #  ConsensusManager is started once a peer connects and status_message is received

    # interface accessed by ConensusManager

    def log(self, msg, *args, **kargs):
        log.debug(msg, *args, **kargs)

    @property
    def consensus_privkey(self):
        return self.app.services.accounts[0].privkey

    def sign(self, obj):
        assert isinstance(obj, Signed)
        obj.sign(self.consensus_privkey)

    @property
    def now(self):
        return time.time()

    def setup_alarm(self, delay, cb, *args):
        log.debug('setting up alarm')

        def _trigger():
            gevent.sleep(delay)
            log.debug('alarm triggered')
            cb(*args)
        gevent.spawn(_trigger)

    def commit_block(self, blk):
        assert isinstance(blk.header, HDCBlockHeader)
        self.add_transaction_lock.acquire()
        success = self.chain.add_block(blk,  forward_pending_transactions=True)
        self.add_transaction_lock.release()
        return success

    def link_block(self, t_block):
        assert isinstance(t_block.header, HDCBlockHeader)
        self.add_transaction_lock.acquire()
        block = self._link_block(t_block)
        if not block:
            return
        assert block.get_parent() == self.chain.head, (block.get_parent(), self.chain.head)
        assert block.header.coinbase == t_block.header.coinbase
        self.add_transaction_lock.release()
        return block

    def _link_block(self, t_block):
        assert isinstance(t_block.header, HDCBlockHeader)
        if t_block.header.hash in self.chain:
            log.warn('known block', block=t_block)
            return
        if t_block.header.prevhash not in self.chain:
            log.warn('missing parent', block=t_block, head=self.chain.head,
                     prevhash=phx(t_block.header.prevhash))
            return
        if isinstance(t_block, Block):
            return True  # already deserialized
        try:  # deserialize
            st = time.time()
            block = t_block.to_block(env=self.chain.env)
            elapsed = time.time() - st
            log.debug('deserialized', elapsed='%.4fs' % elapsed, ts=time.time(),
                      gas_used=block.gas_used, gpsec=self.gpsec(block.gas_used, elapsed))
            assert block.header.check_pow()
        except processblock.InvalidTransaction as e:
            log.warn('invalid transaction', block=t_block, error=e, FIXME='ban node')
            return
        except VerificationFailed as e:
            log.warn('verification failed', error=e, FIXME='ban node')
            return
        return block

    ###############################################################################

    @property
    def is_syncing(self):
        return self.consensus_manager.synchronizer.is_syncing

    # wire protocol receivers ###########

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('----------------------------------')
        log.debug('remote_transactions_received', count=len(transactions), remote_id=proto)
        for tx in transactions:
            self.add_transaction(tx, origin=proto)

    # blocks / proposals ################

    def on_receive_getblockproposals(self, proto, blocknumbers):
        log.debug('----------------------------------')
        log.debug("on_receive_getblockproposals", count=len(blocknumbers))
        found = []
        for i, height in enumerate(blocknumbers):
            if i == self.wire_protocol.max_getproposals_count:
                break
            assert isinstance(height, int)  # integers
            assert i == 0 or height > blocknumbers[i - 1]   # sorted
            if height > self.chain.head.number:
                log.debug("unknown block requested", height=height)
                break
            rlp_data = self.consensus_manager.get_blockproposal_rlp_by_height(height)
            assert isinstance(rlp_data, bytes)
            found.append(rlp_data)
        if found:
            log.debug("found", count=len(found))
            proto.send_blockproposals(*found)

    def on_receive_blockproposals(self, proto, proposals):
        log.debug('----------------------------------')
        self.consensus_manager.log('received proposals', sender=proto)
        log.debug("recv proposals", num=len(proposals), remote_id=proto)
        self.consensus_manager.synchronizer.receive_blockproposals(proposals)

    def on_receive_newblockproposal(self, proto, proposal):
        if proposal.hash in self.broadcast_filter:
            return
        log.debug('----------------------------------')
        self.consensus_manager.log('receive proposal', sender=proto)
        log.debug("recv newblockproposal", proposal=proposal, remote_id=proto)
        # self.synchronizer.receive_newproposal(proto, proposal)
        assert isinstance(proposal, BlockProposal)
        assert isinstance(proposal.block.header, HDCBlockHeader)
        isvalid = self.consensus_manager.add_proposal(proposal, proto)
        if isvalid:
            self.broadcast(proposal, origin_proto=proto)
        self.consensus_manager.process()

    def on_receive_votinginstruction(self, proto, votinginstruction):
        if votinginstruction.hash in self.broadcast_filter:
            return
        log.debug('----------------------------------')
        log.debug("recv votinginstruction", proposal=votinginstruction, remote_id=proto)
        # self.synchronizer.receive_newproposal(proto, proposal)
        isvalid = self.consensus_manager.add_proposal(votinginstruction, proto)
        if isvalid:
            self.broadcast(votinginstruction, origin_proto=proto)

        self.consensus_manager.process()

    #  votes

    def on_receive_vote(self, proto, vote):
        if vote.hash in self.broadcast_filter:
            return
        log.debug('----------------------------------')
        log.debug("recv vote", vote=vote, remote_id=proto)
        isvalid = self.consensus_manager.add_vote(vote, proto)
        if isvalid:
            self.broadcast(vote, origin_proto=proto)
        self.consensus_manager.process()

    #  start

    def on_receive_status(self, proto, eth_version, network_id, genesis_hash, current_lockset):
        log.debug('----------------------------------')
        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != self.config['eth'].get('network_id', proto.network_id):
            log.warn("invalid network id", remote_network_id=network_id,
                     expected_network_id=self.config['eth'].get('network_id', proto.network_id))
            raise HDCProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash", remote_id=proto, genesis=genesis_hash.encode('hex'))
            raise HDCProtocolError('wrong genesis block')

        assert isinstance(current_lockset, LockSet)
        if len(current_lockset):
            log.debug('adding received lockset', ls=current_lockset)
            for v in current_lockset.votes:
                self.consensus_manager.add_vote(v, proto)

        self.consensus_manager.process()

        # send last BlockProposal
        p = self.consensus_manager.last_blockproposal
        if p:
            self.consensus_manager.log('sending proposal', p=p)
            proto.send_newblockproposal(p)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    def on_wire_protocol_start(self, proto):
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(self.on_receive_transactions)
        proto.receive_blockproposals_callbacks.append(self.on_receive_blockproposals)
        proto.receive_getblockproposals_callbacks.append(self.on_receive_getblockproposals)
        proto.receive_newblockproposal_callbacks.append(self.on_receive_newblockproposal)
        proto.receive_votinginstruction_callbacks.append(self.on_receive_votinginstruction)
        proto.receive_vote_callbacks.append(self.on_receive_vote)

        # send status
        proto.send_status(genesis_hash=self.chain.genesis.hash,
                          current_lockset=self.consensus_manager.active_round.lockset)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('----------------------------------')
        log.debug('on_wire_protocol_stop', proto=proto)

    def broadcast(self, obj, origin_proto=None):
        """
        """
        fmap = {BlockProposal: 'newblockproposal', VoteBlock: 'vote', VoteNil: 'vote',
                VotingInstruction: 'votinginstruction', Transaction: 'transaction'}
        if not self.broadcast_filter.update(obj.hash):
            log.debug('already broadcasted', obj=obj)
            return
        if isinstance(obj, BlockProposal):
            assert obj.sender == obj.block.header.coinbase
        log.debug('broadcasting', obj=obj)
        bcast = self.app.services.peermanager.broadcast
        bcast(HDCProtocol, fmap[type(obj)], args=(obj,),
              exclude_peers=[origin_proto.peer] if origin_proto else [])
예제 #38
0
    def __init__(self, app):
        self.config = app.config
        sce = self.config['eth']
        if int(sce['pruning']) >= 0:
            self.db = RefcountDB(app.services.db)
            if "I am not pruning" in self.db.db:
                raise Exception("This database was initialized as non-pruning."
                                " Kinda hard to start pruning now.")
            self.db.ttl = int(sce['pruning'])
            self.db.db.put("I am pruning", "1")
        else:
            self.db = app.services.db
            if "I am pruning" in self.db:
                raise Exception("This database was initialized as pruning."
                                " Kinda hard to stop pruning now.")
            self.db.put("I am not pruning", "1")

        if 'network_id' in self.db:
            db_network_id = self.db.get('network_id')
            if db_network_id != str(sce['network_id']):
                raise Exception(
                    "This database was initialized with network_id {} "
                    "and can not be used when connecting to network_id {}".
                    format(db_network_id, sce['network_id']))

        else:
            self.db.put('network_id', str(sce['network_id']))
            self.db.commit()

        assert self.db is not None

        WiredService.__init__(self, app)
        log.info('initializing chain')
        coinbase = app.services.accounts.coinbase
        env = Env(self.db, sce['block'])
        self.chain = Chain(env,
                           new_head_cb=self._on_new_head,
                           coinbase=coinbase)

        log.info('chain at', number=self.chain.head.number)
        if 'genesis_hash' in sce:
            assert sce['genesis_hash'] == self.chain.genesis.hex_hash()

        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.add_transaction_lock = gevent.lock.BoundedSemaphore()
        self.broadcast_filter = DuplicatesFilter()
        self.on_new_head_cbs = []
        self.on_new_head_candidate_cbs = []
        self.newblock_processing_times = deque(maxlen=1000)

        # Consensus
        self.consensus_contract = ConsensusContract(
            validators=self.config['hdc']['validators'])
        self.consensus_manager = ConsensusManager(self,
                                                  self.consensus_contract,
                                                  self.consensus_privkey)

        # lock blocks that where proposed, so they don't get mutated
        self.proposal_lock = ProposalLock()
        assert not self.proposal_lock.is_locked()
예제 #39
0
class ChainService(WiredService):

    """
    Manages the chain and requests to it.
    """
    # required by BaseService
    name = 'chain'
    default_config = dict(eth=dict(privkey_hex=''))

    # required by WiredService
    wire_protocol = eth_protocol.ETHProtocol  # create for each peer

    # initialized after configure:
    chain = None
    genesis = None
    synchronizer = None
    config = None
    block_queue_size = 1024
    transaction_queue_size = 1024

    def __init__(self, app):
        self.config = app.config
        self.db = app.services.db
        assert self.db is not None
        super(ChainService, self).__init__(app)
        log.info('initializing chain')
        self.chain = Chain(self.db, new_head_cb=self._on_new_head)
        self.synchronizer = Synchronizer(self, force_sync=None)
        self.chain.coinbase = privtoaddr(self.config['eth']['privkey_hex'].decode('hex'))

        self.block_queue = Queue(maxsize=self.block_queue_size)
        self.transaction_queue = Queue(maxsize=self.transaction_queue_size)
        self.add_blocks_lock = False
        self.broadcast_filter = DuplicatesFilter()

    def _on_new_head(self, block):
        pass

    def add_block(self, t_block, proto):
        "adds a block to the block_queue and spawns _add_block if not running"
        self.block_queue.put((t_block, proto))  # blocks if full
        if not self.add_blocks_lock:
            self.add_blocks_lock = True
            gevent.spawn(self._add_blocks)

    def _add_blocks(self):
        log.debug('add_blocks', qsize=self.block_queue.qsize())
        try:
            while not self.block_queue.empty():
                t_block, proto = self.block_queue.get()
                if t_block.header.hash in self.chain:
                    log.warn('known block', block=t_block)
                    continue
                if t_block.header.prevhash not in self.chain:
                    log.warn('missing parent', block=t_block)
                    continue
                if not t_block.header.check_pow():
                    log.warn('invalid pow', block=t_block)
                    # FIXME ban node
                    continue
                try:  # deserialize
                    st = time.time()
                    block = t_block.to_block(db=self.chain.db)
                    elapsed = time.time() - st
                    log.debug('deserialized', elapsed='%.2fs' % elapsed,
                              gas_used=block.gas_used, gpsec=int(block.gas_used / elapsed))
                except processblock.InvalidTransaction as e:
                    log.warn('invalid transaction', block=t_block, error=e)
                    # FIXME ban node
                    continue

                if self.chain.add_block(block):
                    log.debug('added', block=block)
                gevent.sleep(0.001)
        finally:
            self.add_blocks_lock = False

    def broadcast_newblock(self, block, chain_difficulty, origin=None):
        assert isinstance(block, eth_protocol.TransientBlock)
        if self.broadcast_filter.known(block.header.hash):
            log.debug('already broadcasted block')
        else:
            log.debug('broadcasting newblock', origin=origin)
            bcast = self.app.services.peermanager.broadcast
            bcast(eth_protocol.ETHProtocol, 'newblock', args=(block, chain_difficulty),
                  num_peers=None, exclude_protos=[origin])

    # wire protocol receivers ###########

    def on_wire_protocol_start(self, proto):
        log.debug('on_wire_protocol_start', proto=proto)
        assert isinstance(proto, self.wire_protocol)
        # register callbacks
        proto.receive_status_callbacks.append(self.on_receive_status)
        proto.receive_transactions_callbacks.append(self.on_receive_transactions)
        proto.receive_getblockhashes_callbacks.append(self.on_receive_getblockhashes)
        proto.receive_blockhashes_callbacks.append(self.on_receive_blockhashes)
        proto.receive_getblocks_callbacks.append(self.on_receive_getblocks)
        proto.receive_blocks_callbacks.append(self.on_receive_blocks)
        proto.receive_newblock_callbacks.append(self.on_receive_newblock)

        # send status
        head = self.chain.head
        proto.send_status(chain_difficulty=head.chain_difficulty(), chain_head_hash=head.hash,
                          genesis_hash=self.chain.genesis.hash)

    def on_wire_protocol_stop(self, proto):
        assert isinstance(proto, self.wire_protocol)
        log.debug('on_wire_protocol_stop', proto=proto)

    def on_receive_status(self, proto, eth_version, network_id, chain_difficulty, chain_head_hash,
                          genesis_hash):

        log.debug('status received', proto=proto, eth_version=eth_version)
        assert eth_version == proto.version, (eth_version, proto.version)
        if network_id != proto.network_id:
            log.warn("invalid network id", remote_id=proto.network_id, network_id=network_id)
            raise eth_protocol.ETHProtocolError('wrong network_id')

        # check genesis
        if genesis_hash != self.chain.genesis.hash:
            log.warn("invalid genesis hash", remote_id=proto, genesis=genesis_hash.encode('hex'))
            raise eth_protocol.ETHProtocolError('wrong genesis block')

        # request chain
        self.synchronizer.receive_status(proto, chain_head_hash, chain_difficulty)

        # send transactions
        transactions = self.chain.get_transactions()
        if transactions:
            log.debug("sending transactions", remote_id=proto)
            proto.send_transactions(*transactions)

    # transactions

    def on_receive_transactions(self, proto, transactions):
        "receives rlp.decoded serialized"
        log.debug('remote_transactions_received', count=len(transactions), remote_id=proto)
        log.debug('skipping, FIXME')
        return
        for tx in transactions:
            # fixme bloomfilter
            self.chain.add_transaction(tx)

    # blockhashes ###########

    def on_receive_getblockhashes(self, proto, child_block_hash, count):
        log.debug("handle_get_blockhashes", count=count, block_hash=encode_hex(child_block_hash))
        max_hashes = min(count, self.wire_protocol.max_getblockhashes_count)
        found = []
        if child_block_hash not in self.chain:
            log.debug("unknown block")
            proto.send_blockhashes(*[])
            return

        last = child_block_hash
        while len(found) < max_hashes:
            last = rlp.decode_lazy(self.chain.db.get(last))[0][0]
            if last:
                found.append(last)
            else:
                break

        log.debug("sending: found block_hashes", count=len(found))
        proto.send_blockhashes(*found)

    def on_receive_blockhashes(self, proto, blockhashes):
        if blockhashes:
            log.debug("on_receive_blockhashes", count=len(blockhashes), remote_id=proto,
                      first=encode_hex(blockhashes[0]), last=encode_hex(blockhashes[-1]))
        else:
            log.debug("recv 0 remote block hashes, signifying genesis block")
        self.synchronizer.receive_blockhashes(proto, blockhashes)

    # blocks ################

    def on_receive_getblocks(self, proto, blockhashes):
        log.debug("on_receive_getblocks", count=len(blockhashes))
        found = []
        for bh in blockhashes[:self.wire_protocol.max_getblocks_count]:
            try:
                found.append(self.chain.db.get(bh))
            except KeyError:
                log.debug("unknown block requested", block_hash=encode_hex(bh))
        if found:
            log.debug("found", count=len(found))
            proto.send_blocks(*found)

    def on_receive_blocks(self, proto, transient_blocks):
        log.debug("recv blocks", count=len(transient_blocks), remote_id=proto,
                  highest_number=max(x.header.number for x in transient_blocks))
        if transient_blocks:
            self.synchronizer.receive_blocks(proto, transient_blocks)

    def on_receive_newblock(self, proto, block, chain_difficulty):
        log.debug("recv newblock", block=block, remote_id=proto)
        self.synchronizer.receive_newblock(proto, block, chain_difficulty)
예제 #40
0
def test_simple_chain(db):
    k, v, k2, v2 = accounts()
    blk = mkquickgenesis({v: {"balance": utils.denoms.ether * 1}}, db=db)
    store_block(blk)
    chain = Chain(env=env(blk.db), genesis=blk)
    tx = get_transaction()
    blk2 = mine_next_block(blk, transactions=[tx])
    store_block(blk2)
    chain.add_block(blk2)

    assert blk.hash in chain
    assert blk2.hash in chain
    assert chain.has_block(blk2.hash)
    assert chain.get(blk2.hash) == blk2
    assert chain.head == blk2
    assert chain.get_children(blk) == [blk2]
    assert chain.get_uncles(blk2) == []

    assert chain.get_chain() == [blk2, blk]
    assert chain.get_chain(count=0) == []
    assert chain.get_chain(count=1) == [blk2]
    assert chain.get_chain(count=2) == [blk2, blk]
    assert chain.get_chain(count=100) == [blk2, blk]
    assert chain.get_chain(blk.hash) == [blk]
    assert chain.get_chain(blk.hash, 0) == []
    assert chain.get_chain(blk2.hash) == [blk2, blk]
    assert chain.get_chain(blk2.hash, 1) == [blk2]
    assert chain.get_descendants(blk, count=10) == [blk2]
    assert chain.get_descendants(blk, count=1) == [blk2]
    assert chain.get_descendants(blk, count=0) == []

    assert chain.index.has_block_by_number(1)
    assert not chain.index.has_block_by_number(2)
    assert chain.index.get_block_by_number(1) == blk2.hash
    with pytest.raises(KeyError):
        chain.index.get_block_by_number(2)
    assert chain.index.get_transaction(tx.hash) == (tx, blk2, 0)
예제 #41
0
파일: casper.py 프로젝트: ethereum/research
class Validator():
    def __init__(self, genesis, key, network, env, time_offset=5):
        # Create a chain object
        self.chain = Chain(genesis, env=env)
        # Create a transaction queue
        self.txqueue = TransactionQueue()
        # Use the validator's time as the chain's time
        self.chain.time = lambda: self.get_timestamp()
        # My private key
        self.key = key
        # My address
        self.address = privtoaddr(key)
        # My randao
        self.randao = RandaoManager(sha3(self.key))
        # Pointer to the test p2p network
        self.network = network
        # Record of objects already received and processed
        self.received_objects = {}
        # The minimum eligible timestamp given a particular number of skips
        self.next_skip_count = 0
        self.next_skip_timestamp = 0
        # Is this validator active?
        self.active = False
        # Code that verifies signatures from this validator
        self.validation_code = generate_validation_code(privtoaddr(key))
        # Validation code hash
        self.vchash = sha3(self.validation_code)
        # Parents that this validator has already built a block on
        self.used_parents = {}
        # This validator's clock offset (for testing purposes)
        self.time_offset = random.randrange(time_offset) - (time_offset // 2)
        # Determine the epoch length
        self.epoch_length = self.call_casper('getEpochLength')
        # My minimum gas price
        self.mingasprice = 20 * 10**9
        # Give this validator a unique ID
        self.id = len(ids)
        ids.append(self.id)
        self.update_activity_status()
        self.cached_head = self.chain.head_hash

    def call_casper(self, fun, args=[]):
        return call_casper(self.chain.state, fun, args)

    def update_activity_status(self):
        start_epoch = self.call_casper('getStartEpoch', [self.vchash])
        now_epoch = self.call_casper('getEpoch')
        end_epoch = self.call_casper('getEndEpoch', [self.vchash])
        if start_epoch <= now_epoch < end_epoch:
            self.active = True
            self.next_skip_count = 0
            self.next_skip_timestamp = get_timestamp(self.chain, self.next_skip_count)
            print 'In current validator set'
        else:
            self.active = False

    def get_timestamp(self):
        return int(self.network.time * 0.01) + self.time_offset

    def on_receive(self, obj):
        if isinstance(obj, list):
            for _obj in obj:
                self.on_receive(_obj)
            return
        if obj.hash in self.received_objects:
            return
        if isinstance(obj, Block):
            print 'Receiving block', obj
            assert obj.hash not in self.chain
            block_success = self.chain.add_block(obj)
            self.network.broadcast(self, obj)
            self.network.broadcast(self, ChildRequest(obj.header.hash))
            self.update_head()
        elif isinstance(obj, Transaction):
            print 'Receiving transaction', obj
            if obj.gasprice >= self.mingasprice:
                self.txqueue.add_transaction(obj)
                print 'Added transaction, txqueue size %d' % len(self.txqueue.txs)
                self.network.broadcast(self, obj)
            else:
                print 'Gasprice too low'
        self.received_objects[obj.hash] = True
        for x in self.chain.get_chain():
            assert x.hash in self.received_objects

    def tick(self):
        # Try to create a block
        # Conditions:
        # (i) you are an active validator,
        # (ii) you have not yet made a block with this parent
        if self.active and self.chain.head_hash not in self.used_parents:
            t = self.get_timestamp()
            # Is it early enough to create the block?
            if t >= self.next_skip_timestamp and (not self.chain.head or t > self.chain.head.header.timestamp):
                # Wrong validator; in this case, just wait for the next skip count
                if not check_skips(self.chain, self.vchash, self.next_skip_count):
                    self.next_skip_count += 1
                    self.next_skip_timestamp = get_timestamp(self.chain, self.next_skip_count)
                    # print 'Incrementing proposed timestamp for block %d to %d' % \
                    #     (self.chain.head.header.number + 1 if self.chain.head else 0, self.next_skip_timestamp)
                    return
                self.used_parents[self.chain.head_hash] = True
                # Simulated 15% chance of validator failure to make a block
                if random.random() > 0.999:
                    print 'Simulating validator failure, block %d not created' % (self.chain.head.header.number + 1 if self.chain.head else 0)
                    return
                # Make the block
                s1 = self.chain.state.trie.root_hash
                pre_dunkle_count = self.call_casper('getTotalDunklesIncluded')
                dunkle_txs = get_dunkle_candidates(self.chain, self.chain.state)
                blk = make_head_candidate(self.chain, self.txqueue)
                randao = self.randao.get_parent(self.call_casper('getRandao', [self.vchash]))
                blk = sign_block(blk, self.key, randao, self.vchash, self.next_skip_count)
                # Make sure it's valid
                global global_block_counter
                global_block_counter += 1
                for dtx in dunkle_txs:
                    assert dtx in blk.transactions, (dtx, blk.transactions)
                print 'made block with timestamp %d and %d dunkles' % (blk.timestamp, len(dunkle_txs))
                s2 = self.chain.state.trie.root_hash
                assert s1 == s2
                assert blk.timestamp >= self.next_skip_timestamp
                assert self.chain.add_block(blk)
                self.update_head()
                post_dunkle_count = self.call_casper('getTotalDunklesIncluded')
                assert post_dunkle_count - pre_dunkle_count == len(dunkle_txs)
                self.received_objects[blk.hash] = True
                print 'Validator %d making block %d (%s)' % (self.id, blk.header.number, blk.header.hash[:8].encode('hex'))
                self.network.broadcast(self, blk)
        # Sometimes we received blocks too early or out of order;
        # run an occasional loop that processes these
        if random.random() < 0.02:
            self.chain.process_time_queue()
            self.chain.process_parent_queue()
            self.update_head()

    def update_head(self):
        if self.cached_head == self.chain.head_hash:
            return
        self.cached_head = self.chain.head_hash
        if self.chain.state.block_number % self.epoch_length == 0:
            self.update_activity_status()
        if self.active:
            self.next_skip_count = 0
            self.next_skip_timestamp = get_timestamp(self.chain, self.next_skip_count)
        print 'Head changed: %s, will attempt creating a block at %d' % (self.chain.head_hash.encode('hex'), self.next_skip_timestamp)

    def withdraw(self, gasprice=20 * 10**9):
        sigdata = make_withdrawal_signature(self.key)
        txdata = casper_ct.encode('startWithdrawal', [self.vchash, sigdata])
        tx = Transaction(self.chain.state.get_nonce(self.address), gasprice, 650000, self.chain.config['CASPER_ADDR'], 0, txdata).sign(self.key)
        self.txqueue.add_transaction(tx, force=True)
        self.network.broadcast(self, tx)
        print 'Withdrawing!'

    def deposit(self, gasprice=20 * 10**9):
        assert value * 10**18 >= self.chain.state.get_balance(self.address) + gasprice * 1000000
        tx = Transaction(self.chain.state.get_nonce(self.address) * 10**18, gasprice, 1000000,
                         casper_config['CASPER_ADDR'], value * 10**18,
                         ct.encode('deposit', [self.validation_code, self.randao.get(9999)]))