class ChainManager(StoppableLoopThread): """ Manages the chain and requests to it. """ # initialized after configure: genesis = None index = None miner = None blockchain = None synchronizer = None def __init__(self): super(ChainManager, self).__init__() def configure(self, config, genesis=None): self.config = config logger.info('Opening chain @ %s', utils.get_db_path()) db = self.blockchain = DB(utils.get_db_path()) self.index = Index(db) if genesis: self._initialize_blockchain(genesis) logger.debug('Chain @ #%d %s', self.head.number, self.head.hex_hash()) self.genesis = blocks.CachedBlock.create_cached(blocks.genesis()) self.new_miner() self.synchronizer = Synchronizer(self) @property def head(self): if 'HEAD' not in self.blockchain: self._initialize_blockchain() ptr = self.blockchain.get('HEAD') return blocks.get_block(ptr) def _update_head(self, block): if not block.is_genesis(): assert self.head.chain_difficulty() < block.chain_difficulty() if block.get_parent() != self.head: logger.debug( 'New Head %r is on a different branch. Old was:%r', block, self.head) self.blockchain.put('HEAD', block.hash) self.index.update_blocknumbers(self.head) self.new_miner() # reset mining def get(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blocks.get_block(blockhash) def has_block(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blockhash in self.blockchain def __contains__(self, blockhash): return self.has_block(blockhash) def _store_block(self, block): self.blockchain.put(block.hash, block.serialize()) def commit(self): self.blockchain.commit() def _initialize_blockchain(self, genesis=None): logger.info('Initializing new chain @ %s', utils.get_db_path()) if not genesis: genesis = blocks.genesis() self.index.add_block(genesis) self._store_block(genesis) self._update_head(genesis) assert genesis.hash in self def loop_body(self): ts = time.time() pct_cpu = self.config.getint('misc', 'mining') if pct_cpu > 0: self.mine() delay = (time.time() - ts) * (100. / pct_cpu - 1) if delay < 0: logger.warn('delay %r<0!?', delay) delay = 1 assert delay >= 0 time.sleep(min(delay, 1.)) else: time.sleep(.01) def new_miner(self): "new miner is initialized if HEAD is updated" # prepare uncles uncles = set(self.get_uncles(self.head)) # logger.debug('%d uncles for next block %r', len(uncles), uncles) ineligible = set() # hashes blk = self.head for i in range(8): for u in blk.uncles: # assuming uncle headres u = utils.sha3(rlp.encode(u)) if u in self: # logger.debug('ineligible uncle %r', u.encode('hex')) uncles.discard(self.get(u)) if blk.has_parent(): blk = blk.get_parent() # logger.debug('%d uncles after filtering %r', len(uncles), uncles) miner = Miner(self.head, uncles, self.config.get('wallet', 'coinbase')) if self.miner: for tx in self.miner.get_transactions(): miner.add_transaction(tx) self.miner = miner def mine(self): with self.lock: block = self.miner.mine() if block: # create new block if self.add_block(block): logger.debug("broadcasting new %r" % block) signals.broadcast_new_block.send(sender=None, block=block) else: self.new_miner() def receive_chain(self, transient_blocks, peer=None): with self.lock: old_head = self.head # assuming to receive chain order w/ oldest block first transient_blocks.sort(key=attrgetter('number')) assert transient_blocks[0].number <= transient_blocks[-1].number # notify syncer self.synchronizer.received_blocks(peer, transient_blocks) for t_block in transient_blocks: # oldest to newest logger.debug('Deserializing %r', t_block) #logger.debug(t_block.rlpdata.encode('hex')) try: block = blocks.Block.deserialize(t_block.rlpdata) except processblock.InvalidTransaction as e: # FIXME there might be another exception in # blocks.deserializeChild when replaying transactions # if this fails, we need to rewind state logger.debug('%r w/ invalid Transaction %r', t_block, e) # stop current syncing of this chain and skip the child blocks self.synchronizer.stop_synchronization(peer) return except blocks.UnknownParentException: if t_block.prevhash == blocks.GENESIS_PREVHASH: logger.debug('Rec Incompatible Genesis %r', t_block) if peer: peer.send_Disconnect(reason='Wrong genesis block') else: # should be a single newly mined block assert t_block.prevhash not in self assert t_block.prevhash != blocks.genesis().hash logger.debug('%s with unknown parent %s, peer:%r', t_block, t_block.prevhash.encode('hex'), peer) if len(transient_blocks) != 1: # strange situation here. # we receive more than 1 block, so it's not a single newly mined one # sync/network/... failed to add the needed parent at some point # well, this happens whenever we can't validate a block! # we should disconnect! logger.warn('%s received, but unknown parent.', len(transient_blocks)) if peer: # request chain for newest known hash self.synchronizer.synchronize_unknown_block( peer, transient_blocks[-1].hash) break if block.hash in self: logger.debug('Known %r', block) else: assert block.has_parent() success = self.add_block(block) if success: logger.debug('Added %r', block) def add_block(self, block): "returns True if block was added sucessfully" # make sure we know the parent if not block.has_parent() and not block.is_genesis(): logger.debug('Missing parent for block %r', block) return False if not block.validate_uncles(): logger.debug('Invalid uncles %r', block) return False # check PoW and forward asap in order to avoid stale blocks if not len(block.nonce) == 32: logger.debug('Nonce not set %r', block) return False elif not block.check_proof_of_work(block.nonce) and\ not block.is_genesis(): logger.debug('Invalid nonce %r', block) return False # FIXME: Forward blocks w/ valid PoW asap if block.has_parent(): try: #logger.debug('verifying: %s', block) #logger.debug('GETTING ACCOUNT FOR COINBASE:') #acct = block.get_acct(block.coinbase) #logger.debug('GOT ACCOUNT FOR COINBASE: %r', acct) processblock.verify(block, block.get_parent()) except processblock.VerificationFailed as e: logger.debug('### VERIFICATION FAILED ### %r', e) f = os.path.join(utils.data_dir, 'badblock.log') open(f, 'w').write(str(block.hex_serialize())) print block.hex_serialize() return False if block.number < self.head.number: logger.debug("%r is older than head %r", block, self.head) # Q: Should we have any limitations on adding blocks? self.index.add_block(block) self._store_block(block) # set to head if this makes the longest chain w/ most work for that number #logger.debug('Head: %r @%s New:%r @%d', self.head, self.head.chain_difficulty(), block, block.chain_difficulty()) if block.chain_difficulty() > self.head.chain_difficulty(): logger.debug('New Head %r', block) self._update_head(block) elif block.number > self.head.number: logger.warn( '%r has higher blk number than head %r but lower chain_difficulty of %d vs %d', block, self.head, block.chain_difficulty(), self.head.chain_difficulty()) self.commit() # batch commits all changes that came with the new block return True def get_children(self, block): return [self.get(c) for c in self.index.get_children(block.hash)] def get_uncles(self, block): if not block.has_parent(): return [] parent = block.get_parent() o = [] i = 0 while parent.has_parent() and i < 6: grandparent = parent.get_parent() o.extend( [u for u in self.get_children(grandparent) if u != parent]) parent = grandparent i += 1 return o def add_transaction(self, transaction): logger.debug("add transaction %r" % transaction) with self.lock: res = self.miner.add_transaction(transaction) if res: logger.debug("broadcasting valid %r" % transaction) signals.send_local_transactions.send( sender=None, transactions=[transaction]) return res def get_transactions(self): logger.debug("get_transactions called") return self.miner.get_transactions() def get_chain(self, start='', count=NUM_BLOCKS_PER_REQUEST): "return 'count' blocks starting from head or start" logger.debug("get_chain: start:%s count%d", start.encode('hex'), count) blocks = [] block = self.head if start: if start in self.index.db: return [] block = self.get(start) if not self.in_main_branch(block): return [] for i in range(count): blocks.append(block) if block.is_genesis(): break block = block.get_parent() return blocks def in_main_branch(self, block): try: return block.hash == self.index.get_block_by_number(block.number) except KeyError: return False def get_descendants(self, block, count=1): logger.debug("get_descendants: %r ", block) assert block.hash in self block_numbers = range(block.number + 1, min(self.head.number, block.number + count)) return [ self.get(self.index.get_block_by_number(n)) for n in block_numbers ]
class ChainManager(StoppableLoopThread): """ Manages the chain and requests to it. """ # initialized after configure: genesis = None index = None miner = None blockchain = None synchronizer = None config = None def __init__(self): super(ChainManager, self).__init__() def configure(self, config, genesis=None, db=None): self.config = config if not db: db_path = utils.db_path(config.get('misc', 'data_dir')) log.info('opening chain', db_path=db_path) db = self.blockchain = DB(db_path) self.blockchain = db self.index = Index(db) if genesis: self._initialize_blockchain(genesis) log.debug('chain @', head_hash=self.head) self.genesis = blocks.genesis(db=db) log.debug('got genesis', genesis_hash=self.genesis) self.new_miner() self.synchronizer = Synchronizer(self) def _initialize_blockchain(self, genesis=None): log.info('Initializing new chain') if not genesis: genesis = blocks.genesis(self.blockchain) log.info('new genesis', genesis_hash=genesis) self.index.add_block(genesis) self._store_block(genesis) assert genesis == blocks.get_block(self.blockchain, genesis.hash) self._update_head(genesis) assert genesis.hash in self @property def head(self): if not self.config: self.configure(config.read_config()) if not self.blockchain or 'HEAD' not in self.blockchain: self._initialize_blockchain() ptr = self.blockchain.get('HEAD') return blocks.get_block(self.blockchain, ptr) def _update_head(self, block): if not block.is_genesis(): assert self.head.chain_difficulty() < block.chain_difficulty() if block.get_parent() != self.head: log.debug('New Head is on a different branch', head_hash=block, old_head_hash=self.head) self.blockchain.put('HEAD', block.hash) self.index.update_blocknumbers(self.head) self.new_miner() # reset mining def get(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blocks.get_block(self.blockchain, blockhash) def has_block(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blockhash in self.blockchain def __contains__(self, blockhash): return self.has_block(blockhash) def _store_block(self, block): self.blockchain.put(block.hash, block.serialize()) def commit(self): self.blockchain.commit() def loop_body(self): ts = time.time() pct_cpu = self.config.getint('misc', 'mining') if pct_cpu > 0: self.mine() delay = (time.time() - ts) * (100. / pct_cpu - 1) assert delay >= 0 time.sleep(min(delay, 1.)) else: time.sleep(.01) def new_miner(self): "new miner is initialized if HEAD is updated" # prepare uncles uncles = set(self.get_uncles(self.head)) ineligible = set() # hashes blk = self.head for i in range(8): for u in blk.uncles: # assuming uncle headers u = utils.sha3(rlp.encode(u)) if u in self: uncles.discard(self.get(u)) if blk.has_parent(): blk = blk.get_parent() miner = Miner(self.head, uncles, self.config.get('wallet', 'coinbase')) if self.miner: for tx in self.miner.get_transactions(): miner.add_transaction(tx) self.miner = miner def mine(self): with self.lock: block = self.miner.mine() if block: # create new block if not self.add_block(block, forward=True): log.debug("newly mined block is invalid!?", block_hash=block) self.new_miner() def receive_chain(self, transient_blocks, peer=None): with self.lock: old_head = self.head # assuming to receive chain order w/ oldest block first transient_blocks.sort(key=attrgetter('number')) assert transient_blocks[0].number <= transient_blocks[-1].number # notify syncer self.synchronizer.received_blocks(peer, transient_blocks) for t_block in transient_blocks: # oldest to newest log.debug('Checking PoW', block_hash=t_block) if not blocks.check_header_pow(t_block.header_args): log.debug('Invalid PoW', block_hash=t_block) continue log.debug('Deserializing', block_hash=t_block) try: block = blocks.Block.deserialize(self.blockchain, t_block.rlpdata) except processblock.InvalidTransaction as e: # FIXME there might be another exception in # blocks.deserializeChild when replaying transactions # if this fails, we need to rewind state log.debug('invalid transaction', block_hash=t_block, error=e) # stop current syncing of this chain and skip the child blocks self.synchronizer.stop_synchronization(peer) return except blocks.UnknownParentException: if t_block.prevhash == blocks.GENESIS_PREVHASH: log.debug('Rec Incompatible Genesis', block_hash=t_block) if peer: peer.send_Disconnect(reason='Wrong genesis block') else: # should be a single newly mined block assert t_block.prevhash not in self assert t_block.prevhash != self.genesis.hash log.debug('unknown parent', block_hash=t_block, parent_hash=t_block.prevhash.encode('hex'), remote_id=peer) if len(transient_blocks) != 1: # strange situation here. # we receive more than 1 block, so it's not a single newly mined one # sync/network/... failed to add the needed parent at some point # well, this happens whenever we can't validate a block! # we should disconnect! log.warn( 'blocks received, but unknown parent.', num=len(transient_blocks)) if peer: # request chain for newest known hash self.synchronizer.synchronize_unknown_block( peer, transient_blocks[-1].hash) break if block.hash in self: log.debug('known', block_hash=block) else: assert block.has_parent() # assume single block is newly mined block forward = len(transient_blocks) == 1 success = self.add_block(block, forward=forward) if success: log.debug('added', block_hash=block) def add_block(self, block, forward=False): "returns True if block was added sucessfully" _log = log.bind(block_hash=block) # make sure we know the parent if not block.has_parent() and not block.is_genesis(): _log.debug('missing parent') return False if not block.validate_uncles(): _log.debug('invalid uncles') return False # check PoW and forward asap in order to avoid stale blocks if not len(block.nonce) == 32: _log.debug('nonce not set') return False elif not block.check_proof_of_work(block.nonce) and\ not block.is_genesis(): _log.debug('invalid nonce') return False # Forward block w/ valid PoW asap (if not syncing) # FIXME: filter peer by wich block was received if forward: _log.debug("broadcasting new") signals.broadcast_new_block.send(sender=None, block=block) if block.has_parent(): try: processblock.verify(block, block.get_parent()) except processblock.VerificationFailed as e: _log.critical('VERIFICATION FAILED', error=e) f = os.path.join(utils.data_dir, 'badblock.log') open(f, 'w').write(str(block.hex_serialize())) return False if block.number < self.head.number: _log.debug("older than head", head_hash=self.head) # Q: Should we have any limitations on adding blocks? self.index.add_block(block) self._store_block(block) # set to head if this makes the longest chain w/ most work for that number if block.chain_difficulty() > self.head.chain_difficulty(): _log.debug('new head') self._update_head(block) elif block.number > self.head.number: _log.warn('has higher blk number than head but lower chain_difficulty', head_hash=self.head, block_difficulty=block.chain_difficulty(), head_difficulty=self.head.chain_difficulty()) self.commit() # batch commits all changes that came with the new block return True def get_children(self, block): return [self.get(c) for c in self.index.get_children(block.hash)] def get_uncles(self, block): if not block.has_parent(): return [] parent = block.get_parent() o = [] i = 0 while parent.has_parent() and i < 6: grandparent = parent.get_parent() o.extend([u for u in self.get_children(grandparent) if u != parent]) parent = grandparent i += 1 return o def add_transaction(self, transaction): _log = log.bind(tx_hash=transaction) _log.debug("add transaction") with self.lock: res = self.miner.add_transaction(transaction) if res: _log.debug("broadcasting valid") signals.send_local_transactions.send( sender=None, transactions=[transaction]) return res def get_transactions(self): log.debug("get_transactions called") return self.miner.get_transactions() def get_chain(self, start='', count=NUM_BLOCKS_PER_REQUEST): "return 'count' blocks starting from head or start" log.debug("get_chain", start=start.encode('hex'), count=count) blocks = [] block = self.head if start: if start in self.index.db: return [] block = self.get(start) if not self.in_main_branch(block): return [] for i in range(count): blocks.append(block) if block.is_genesis(): break block = block.get_parent() return blocks def in_main_branch(self, block): try: return block.hash == self.index.get_block_by_number(block.number) except KeyError: return False def get_descendants(self, block, count=1): log.debug("get_descendants", block_hash=block) assert block.hash in self block_numbers = range(block.number + 1, min(self.head.number, block.number + count)) return [self.get(self.index.get_block_by_number(n)) for n in block_numbers]
class ChainManager(StoppableLoopThread): """ Manages the chain and requests to it. """ def __init__(self): super(ChainManager, self).__init__() # initialized after configure self.miner = None self.blockchain = None self.synchronizer = Synchronizer(self) self.genesis = blocks.CachedBlock.create_cached(blocks.genesis()) def configure(self, config, genesis=None): self.config = config logger.info('Opening chain @ %s', utils.get_db_path()) db = self.blockchain = DB(utils.get_db_path()) self.index = Index(db) if genesis: self._initialize_blockchain(genesis) logger.debug('Chain @ #%d %s', self.head.number, self.head.hex_hash()) self.new_miner() @property def head(self): if 'HEAD' not in self.blockchain: self._initialize_blockchain() ptr = self.blockchain.get('HEAD') return blocks.get_block(ptr) def _update_head(self, block): if not block.is_genesis(): assert self.head.chain_difficulty() < block.chain_difficulty() if block.get_parent() != self.head: logger.debug('New Head %r is on a different branch. Old was:%r', block, self.head) self.blockchain.put('HEAD', block.hash) self.index.update_blocknumbers(self.head) self.new_miner() # reset mining def get(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blocks.get_block(blockhash) def has_block(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blockhash in self.blockchain def __contains__(self, blockhash): return self.has_block(blockhash) def _store_block(self, block): self.blockchain.put(block.hash, block.serialize()) def commit(self): self.blockchain.commit() def _initialize_blockchain(self, genesis=None): logger.info('Initializing new chain @ %s', utils.get_db_path()) if not genesis: genesis = blocks.genesis() self.index.add_block(genesis) self._store_block(genesis) self._update_head(genesis) assert genesis.hash in self def loop_body(self): ts = time.time() pct_cpu = self.config.getint('misc', 'mining') if pct_cpu > 0: self.mine() delay = (time.time() - ts) * (100. / pct_cpu - 1) time.sleep(min(delay, 1.)) else: time.sleep(.01) def new_miner(self): "new miner is initialized if HEAD is updated" # prepare uncles uncles = set(self.get_uncles(self.head)) # logger.debug('%d uncles for next block %r', len(uncles), uncles) ineligible = set() # hashes blk = self.head for i in range(8): for u in blk.uncles: # assuming uncle headres u = utils.sha3(rlp.encode(u)) if u in self: # logger.debug('ineligible uncle %r', u.encode('hex')) uncles.discard(self.get(u)) if blk.has_parent(): blk = blk.get_parent() # logger.debug('%d uncles after filtering %r', len(uncles), uncles) miner = Miner(self.head, uncles, self.config.get('wallet', 'coinbase')) if self.miner: for tx in self.miner.get_transactions(): miner.add_transaction(tx) self.miner = miner def mine(self): with self.lock: block = self.miner.mine() if block: # create new block if self.add_block(block): logger.debug("broadcasting new %r" % block) signals.broadcast_new_block.send(sender=None, block=block) else: self.new_miner() def receive_chain(self, transient_blocks, peer=None): with self.lock: old_head = self.head # assuming to receive chain order w/ oldest block first transient_blocks.sort(key=attrgetter('number')) assert transient_blocks[0].number <= transient_blocks[-1].number # notify syncer self.synchronizer.received_blocks(peer, transient_blocks) for t_block in transient_blocks: # oldest to newest logger.debug('Deserializing %r', t_block) #logger.debug(t_block.rlpdata.encode('hex')) try: block = blocks.Block.deserialize(t_block.rlpdata) except processblock.InvalidTransaction as e: # FIXME there might be another exception in # blocks.deserializeChild when replaying transactions # if this fails, we need to rewind state logger.debug('%r w/ invalid Transaction %r', t_block, e) # stop current syncing of this chain and skip the child blocks self.synchronizer.stop_synchronization(peer) return except blocks.UnknownParentException: if t_block.prevhash == blocks.GENESIS_PREVHASH: logger.debug('Rec Incompatible Genesis %r', t_block) if peer: peer.send_Disconnect(reason='Wrong genesis block') else: # should be a single newly mined block assert t_block.prevhash not in self assert t_block.prevhash != blocks.genesis().hash logger.debug('%s with unknown parent %s, peer:%r', t_block, t_block.prevhash.encode('hex'), peer) if len(transient_blocks) != 1: # strange situation here. # we receive more than 1 block, so it's not a single newly mined one # sync/network/... failed to add the needed parent at some point # well, this happens whenever we can't validate a block! # we should disconnect! logger.warn('%s received, but unknown parent.',len(transient_blocks)) if peer: # request chain for newest known hash self.synchronizer.synchronize_unknown_block(peer, transient_blocks[-1].hash) break if block.hash in self: logger.debug('Known %r', block) else: assert block.has_parent() success = self.add_block(block) if success: logger.debug('Added %r', block) def add_block(self, block): "returns True if block was added sucessfully" # make sure we know the parent if not block.has_parent() and not block.is_genesis(): logger.debug('Missing parent for block %r', block) return False if not block.validate_uncles(): logger.debug('Invalid uncles %r', block) return False # check PoW and forward asap in order to avoid stale blocks if not len(block.nonce) == 32: logger.debug('Nonce not set %r', block) return False elif not block.check_proof_of_work(block.nonce) and\ not block.is_genesis(): logger.debug('Invalid nonce %r', block) return False # FIXME: Forward blocks w/ valid PoW asap if block.has_parent(): try: #logger.debug('verifying: %s', block) #logger.debug('GETTING ACCOUNT FOR COINBASE:') #acct = block.get_acct(block.coinbase) #logger.debug('GOT ACCOUNT FOR COINBASE: %r', acct) processblock.verify(block, block.get_parent()) except processblock.VerificationFailed as e: logger.debug('%r', e) return False if block.number < self.head.number: logger.debug("%r is older than head %r", block, self.head) # Q: Should we have any limitations on adding blocks? self.index.add_block(block) self._store_block(block) # set to head if this makes the longest chain w/ most work for that number #logger.debug('Head: %r @%s New:%r @%d', self.head, self.head.chain_difficulty(), block, block.chain_difficulty()) if block.chain_difficulty() > self.head.chain_difficulty(): logger.debug('New Head %r', block) self._update_head(block) elif block.number > self.head.number: logger.warn('%r has higher blk number than head %r but lower chain_difficulty of %d vs %d', block, self.head, block.chain_difficulty(), self.head.chain_difficulty()) self.commit() # batch commits all changes that came with the new block return True def get_children(self, block): return [self.get(c) for c in self.index.get_children(block.hash)] def get_uncles(self, block): if not block.has_parent(): return [] parent = block.get_parent() o = [] i = 0 while parent.has_parent() and i < 6: grandparent = parent.get_parent() o.extend([u for u in self.get_children(grandparent) if u != parent]) parent = grandparent i += 1 return o def add_transaction(self, transaction): logger.debug("add transaction %r" % transaction) with self.lock: res = self.miner.add_transaction(transaction) if res: logger.debug("broadcasting valid %r" % transaction) signals.send_local_transactions.send( sender=None, transactions=[transaction]) return res def get_transactions(self): logger.debug("get_transactions called") return self.miner.get_transactions() def get_chain(self, start='', count=NUM_BLOCKS_PER_REQUEST): "return 'count' blocks starting from head or start" logger.debug("get_chain: start:%s count%d", start.encode('hex'), count) blocks = [] block = self.head if start: if start in self.index.db: return [] block = self.get(start) if not self.in_main_branch(block): return [] for i in range(count): blocks.append(block) if block.is_genesis(): break block = block.get_parent() return blocks def in_main_branch(self, block): try: return block.hash == self.index.get_block_by_number(block.number) except KeyError: return False def get_descendants(self, block, count=1): logger.debug("get_descendants: %r ", block) assert block.hash in self block_numbers = range(block.number+1, min(self.head.number, block.number+count)) return [self.get(self.index.get_block_by_number(n)) for n in block_numbers]
class ChainManager(StoppableLoopThread): """ Manages the chain and requests to it. """ # initialized after configure: genesis = None index = None miner = None blockchain = None synchronizer = None config = None def __init__(self): super(ChainManager, self).__init__() def configure(self, config, genesis=None, db=None): self.config = config if not db: db_path = utils.db_path(config.get('misc', 'data_dir')) log.info('opening chain', db_path=db_path) db = self.blockchain = DB(db_path) self.blockchain = db self.index = Index(db) if genesis: self._initialize_blockchain(genesis) log.debug('chain @', head_hash=self.head) self.genesis = blocks.genesis(db=db) log.debug('got genesis', genesis_hash=self.genesis) self.new_miner() self.synchronizer = Synchronizer(self) def _initialize_blockchain(self, genesis=None): log.info('Initializing new chain') if not genesis: genesis = blocks.genesis(self.blockchain) log.info('new genesis', genesis_hash=genesis) self.index.add_block(genesis) self._store_block(genesis) assert genesis == blocks.get_block(self.blockchain, genesis.hash) self._update_head(genesis) assert genesis.hash in self @property def head(self): if not self.config: self.configure(config.read_config()) if not self.blockchain or 'HEAD' not in self.blockchain: self._initialize_blockchain() ptr = self.blockchain.get('HEAD') return blocks.get_block(self.blockchain, ptr) def _update_head(self, block): if not block.is_genesis(): assert self.head.chain_difficulty() < block.chain_difficulty() if block.get_parent() != self.head: log.debug('New Head is on a different branch', head_hash=block, old_head_hash=self.head) self.blockchain.put('HEAD', block.hash) self.index.update_blocknumbers(self.head) self.new_miner() # reset mining def get(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blocks.get_block(self.blockchain, blockhash) def has_block(self, blockhash): assert isinstance(blockhash, str) assert len(blockhash) == 32 return blockhash in self.blockchain def __contains__(self, blockhash): return self.has_block(blockhash) def _store_block(self, block): self.blockchain.put(block.hash, block.serialize()) def commit(self): self.blockchain.commit() def loop_body(self): ts = time.time() pct_cpu = self.config.getint('misc', 'mining') if pct_cpu > 0: self.mine() delay = (time.time() - ts) * (100. / pct_cpu - 1) assert delay >= 0 time.sleep(min(delay, 1.)) else: time.sleep(.01) def new_miner(self): "new miner is initialized if HEAD is updated" # prepare uncles uncles = set(self.get_uncles(self.head)) ineligible = set() # hashes blk = self.head for i in range(8): for u in blk.uncles: # assuming uncle headers u = utils.sha3(rlp.encode(u)) if u in self: uncles.discard(self.get(u)) if blk.has_parent(): blk = blk.get_parent() miner = Miner(self.head, uncles, self.config.get('wallet', 'coinbase')) if self.miner: for tx in self.miner.get_transactions(): miner.add_transaction(tx) self.miner = miner def mine(self): with self.lock: block = self.miner.mine() if block: # create new block if not self.add_block(block, forward=True): log.debug("newly mined block is invalid!?", block_hash=block) self.new_miner() def receive_chain(self, transient_blocks, peer=None): with self.lock: old_head = self.head # assuming to receive chain order w/ oldest block first transient_blocks.sort(key=attrgetter('number')) assert transient_blocks[0].number <= transient_blocks[-1].number # notify syncer self.synchronizer.received_blocks(peer, transient_blocks) for t_block in transient_blocks: # oldest to newest log.debug('Checking PoW', block_hash=t_block) if not blocks.check_header_pow(t_block.header_args): log.debug('Invalid PoW', block_hash=t_block) continue log.debug('Deserializing', block_hash=t_block) try: block = blocks.Block.deserialize(self.blockchain, t_block.rlpdata) except processblock.InvalidTransaction as e: # FIXME there might be another exception in # blocks.deserializeChild when replaying transactions # if this fails, we need to rewind state log.debug('invalid transaction', block_hash=t_block, error=e) # stop current syncing of this chain and skip the child blocks self.synchronizer.stop_synchronization(peer) return except blocks.UnknownParentException: if t_block.prevhash == blocks.GENESIS_PREVHASH: log.debug('Rec Incompatible Genesis', block_hash=t_block) if peer: peer.send_Disconnect(reason='Wrong genesis block') else: # should be a single newly mined block assert t_block.prevhash not in self assert t_block.prevhash != self.genesis.hash log.debug('unknown parent', block_hash=t_block, parent_hash=t_block.prevhash.encode('hex'), remote_id=peer) if len(transient_blocks) != 1: # strange situation here. # we receive more than 1 block, so it's not a single newly mined one # sync/network/... failed to add the needed parent at some point # well, this happens whenever we can't validate a block! # we should disconnect! log.warn('blocks received, but unknown parent.', num=len(transient_blocks)) if peer: # request chain for newest known hash self.synchronizer.synchronize_unknown_block( peer, transient_blocks[-1].hash) break if block.hash in self: log.debug('known', block_hash=block) else: assert block.has_parent() # assume single block is newly mined block forward = len(transient_blocks) == 1 success = self.add_block(block, forward=forward) if success: log.debug('added', block_hash=block) def add_block(self, block, forward=False): "returns True if block was added sucessfully" _log = log.bind(block_hash=block) # make sure we know the parent if not block.has_parent() and not block.is_genesis(): _log.debug('missing parent') return False if not block.validate_uncles(): _log.debug('invalid uncles') return False # check PoW and forward asap in order to avoid stale blocks if not len(block.nonce) == 32: _log.debug('nonce not set') return False elif not block.check_proof_of_work(block.nonce) and\ not block.is_genesis(): _log.debug('invalid nonce') return False # Forward block w/ valid PoW asap (if not syncing) # FIXME: filter peer by wich block was received if forward: _log.debug("broadcasting new") signals.broadcast_new_block.send(sender=None, block=block) if block.has_parent(): try: processblock.verify(block, block.get_parent()) except processblock.VerificationFailed as e: _log.critical('VERIFICATION FAILED', error=e) f = os.path.join(utils.data_dir, 'badblock.log') open(f, 'w').write(str(block.hex_serialize())) return False if block.number < self.head.number: _log.debug("older than head", head_hash=self.head) # Q: Should we have any limitations on adding blocks? self.index.add_block(block) self._store_block(block) # set to head if this makes the longest chain w/ most work for that number if block.chain_difficulty() > self.head.chain_difficulty(): _log.debug('new head') self._update_head(block) elif block.number > self.head.number: _log.warn( 'has higher blk number than head but lower chain_difficulty', head_hash=self.head, block_difficulty=block.chain_difficulty(), head_difficulty=self.head.chain_difficulty()) self.commit() # batch commits all changes that came with the new block return True def get_children(self, block): return [self.get(c) for c in self.index.get_children(block.hash)] def get_uncles(self, block): if not block.has_parent(): return [] parent = block.get_parent() o = [] i = 0 while parent.has_parent() and i < 6: grandparent = parent.get_parent() o.extend( [u for u in self.get_children(grandparent) if u != parent]) parent = grandparent i += 1 return o def add_transaction(self, transaction): _log = log.bind(tx_hash=transaction) _log.debug("add transaction") with self.lock: res = self.miner.add_transaction(transaction) if res: _log.debug("broadcasting valid") signals.send_local_transactions.send( sender=None, transactions=[transaction]) return res def get_transactions(self): log.debug("get_transactions called") return self.miner.get_transactions() def get_chain(self, start='', count=NUM_BLOCKS_PER_REQUEST): "return 'count' blocks starting from head or start" log.debug("get_chain", start=start.encode('hex'), count=count) blocks = [] block = self.head if start: if start in self.index.db: return [] block = self.get(start) if not self.in_main_branch(block): return [] for i in range(count): blocks.append(block) if block.is_genesis(): break block = block.get_parent() return blocks def in_main_branch(self, block): try: return block.hash == self.index.get_block_by_number(block.number) except KeyError: return False def get_descendants(self, block, count=1): log.debug("get_descendants", block_hash=block) assert block.hash in self block_numbers = range(block.number + 1, min(self.head.number, block.number + count)) return [ self.get(self.index.get_block_by_number(n)) for n in block_numbers ]