def recv_peers(self, json_data): """ Receive Peers Received peers list is saved. :return: """ if not config.user.enable_peer_discovery: return data = helper.json_decode(json_data) new_ips = [] for ip in data: if ip not in new_ips: new_ips.append(ip) peer_addresses = self.factory.node.peer_addresses logger.info('%s peers data received: %s', self.transport.getPeer().host, new_ips) for node in new_ips: if node not in peer_addresses: if node != self.transport.getHost().host: peer_addresses.append(node) reactor.connectTCP(node, 9000, self.factory) self.factory.node.update_peer_addresses(peer_addresses) return
def BM(self, data=None): # blockheight map for synchronisation and error correction prior to POS cycle resync.. """ Blockheight Map Simply maps the peer with their respective blockheight. If no data is provided in parameter, the node sends its own current blockheight. :return: """ if not data: logger.info('<<< Sending block_map %s', self.transport.getPeer().host) z = {'block_number': self.factory.chain.m_blockchain[-1].blockheader.blocknumber, 'headerhash': self.factory.chain.m_blockchain[-1].blockheader.headerhash} self.transport.write(self.wrap_message('BM', helper.json_encode(z))) return else: logger.info('>>> Receiving block_map') z = helper.json_decode(data) block_number = z['block_number'] headerhash = z['headerhash'].encode('latin1') i = [block_number, headerhash, self.transport.getPeer().host] logger.info('%s', i) if i not in self.factory.chain.blockheight_map: self.factory.chain.blockheight_map.append(i) return
def VE(self, data=None): """ Version If data is None then sends the version & genesis_prev_headerhash. Otherwise, process the content of data and incase of non matching, genesis_prev_headerhash, it disconnects the odd peer. :return: """ if not data: version_details = { 'version': config.dev.version_number, 'genesis_prev_headerhash': config.dev.genesis_prev_headerhash } self.transport.write(self.wrap_message('VE', helper.json_encode(version_details))) else: try: data = helper.json_decode(data) self.version = str(data['version']) logger.info('%s version: %s | genesis prev_headerhash %s', self.transport.getPeer().host, data['version'], data['genesis_prev_headerhash']) if data['genesis_prev_headerhash'] == config.dev.genesis_prev_headerhash: return logger.warning('%s genesis_prev_headerhash mismatch', self.conn_identity) logger.warning('Expected: %s', config.dev.genesis_prev_headerhash) logger.warning('Found: %s', data['genesis_prev_headerhash']) except Exception as e: logger.error('Peer Caused Exception %s', self.conn_identity) logger.exception(e) self.transport.loseConnection() return
def VE(self, data=None): if not data: version_details = { 'version': config.dev.version_number, 'genesis_prev_headerhash': config.dev.genesis_prev_headerhash } self.transport.write( self.wrap_message('VE', helper.json_encode(version_details))) else: try: data = helper.json_decode(data) self.version = str(data['version']) logger.info('%s version: %s | genesis prev_headerhash %s', self.transport.getPeer().host, data['version'], data['genesis_prev_headerhash']) if data['genesis_prev_headerhash'] == config.dev.genesis_prev_headerhash: return logger.warning('%s genesis_prev_headerhash mismatch', self.identity) logger.warning('Expected: ', config.dev.genesis_prev_headerhash) logger.warning('Found: ', data['genesis_prev_headerhash']) except Exception as e: logger.error('Peer Caused Exception %s', self.identity) logger.exception(e) self.transport.loseConnection() return
def BM( self, data=None ): # blockheight map for synchronisation and error correction prior to POS cycle resync.. if not data: logger.info('<<< Sending block_map %s', self.transport.getPeer().host) z = { 'block_number': self.factory.chain.m_blockchain[-1].blockheader.blocknumber, 'headerhash': self.factory.chain.m_blockchain[-1].blockheader.headerhash } self.transport.write(self.wrap_message('BM', helper.json_encode(z))) return else: logger.info('>>> Receiving block_map') z = helper.json_decode(data) block_number = z['block_number'] headerhash = z['headerhash'].encode('latin1') i = [block_number, headerhash, self.transport.getPeer().host] logger.info('%s', i) if i not in self.factory.chain.blockheight_map: self.factory.chain.blockheight_map.append(i) return
def PBB(self, data): """ Push Block Buffer This function executes while syncing block from other peers. Blocks received by this function, directly added into chain.block_chain_buffer. So it is expected to receive multiple of blocks having same blocknumber. :return: """ self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return data = helper.json_decode(data) blocknumber = int(list(data.keys())[0].encode('ascii')) if blocknumber != self.last_requested_blocknum: logger.info('Blocknumber not found in pending_blocks %s %s', blocknumber, self.conn_identity) return for jsonBlock in data[str(blocknumber)]: block = Block.from_json(json.dumps(jsonBlock)) logger.info('>>>Received Block #%s', block.blockheader.blocknumber) status = self.factory.chain.block_chain_buffer.add_block(block) if type(status) == bool and not status: logger.info( "[PBB] Failed to add block by add_block, re-requesting the block #%s", blocknumber) logger.info('Skipping one block') continue try: reactor.download_block.cancel() except Exception: pass # Below code is to stop downloading, once we see that we reached to blocknumber that are in pending_blocks # This could be exploited by sybil node, to send blocks in pending_blocks in order to disrupt downloading # TODO: requires a better fix if len(self.factory.chain.block_chain_buffer.pending_blocks ) > 0 and min(self.factory.chain.block_chain_buffer. pending_blocks.keys()) == blocknumber: self.factory.chain.block_chain_buffer.process_pending_blocks() return self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def PMBH(self, data): # Push Maximum Blockheight and Headerhash data = helper.json_decode(data) if not data or 'headerhash' not in data or 'blocknumber' not in data: return if self.identity in self.factory.pos.fmbh_allowed_peers: self.factory.pos.fmbh_allowed_peers[self.identity] = data if data['headerhash'] not in self.factory.pos.fmbh_blockhash_peers: self.factory.pos.fmbh_blockhash_peers[data['headerhash']] = { 'blocknumber': data['blocknumber'], 'peers': [] } self.factory.pos.fmbh_blockhash_peers[ data['headerhash']]['peers'].append(self)
def CB(self, data): z = helper.json_decode(data) block_number = z['block_number'] headerhash = z['headerhash'].encode('latin1') self.blockheight = block_number logger.info( '>>>Blockheight from: %s blockheight: %s local blockheight: %s %s', self.transport.getPeer().host, block_number, self.factory.chain.m_blockheight(), str(time.time())) self.factory.peers_blockheight[ self.transport.getPeer().host + ':' + str(self.transport.getPeer().port)] = z['block_number'] if self.factory.nodeState.state == NState.syncing: return if block_number == self.factory.chain.m_blockheight(): # if self.factory.chain.m_blockchain[block_number].blockheader.headerhash != headerhash: if self.factory.chain.m_get_block( block_number).blockheader.headerhash != headerhash: logger.info('>>> WARNING: headerhash mismatch from %s', self.transport.getPeer().host) # initiate fork recovery and protection code here.. # call an outer function which sets a flag and scrutinises the chains from all connected hosts to see what is going on.. # again need to think this one through in detail.. return if block_number > self.factory.chain.m_blockheight(): return if len(self.factory.chain.m_blockchain ) == 1 and self.factory.genesis == 0: # set the flag so that no other Protocol instances trigger the genesis stake functions.. self.factory.genesis = 1 logger.info( 'genesis pos countdown to block 1 begun, 60s until stake tx circulated..' ) reactor.callLater(1, self.factory.pos.pre_pos_1) return # connected to multiple hosts and already passed through.. elif len(self.factory.chain.m_blockchain ) == 1 and self.factory.genesis == 1: return
def PMBH(self, data): # Push Maximum Blockheight and Headerhash """ Push Maximum Blockheight and Headerhash Function processes, received maximum blockheight and headerhash. :return: """ data = helper.json_decode(data) if not data or 'headerhash' not in data or 'blocknumber' not in data: return tmp = tuple(data['headerhash']) if self.conn_identity in self.factory.pos.fmbh_allowed_peers: self.factory.pos.fmbh_allowed_peers[self.conn_identity] = data if tmp not in self.factory.pos.fmbh_blockhash_peers: self.factory.pos.fmbh_blockhash_peers[tmp] = {'blocknumber': data['blocknumber'], 'peers': []} self.factory.pos.fmbh_blockhash_peers[tmp]['peers'].append(self)
def CB(self, data): """ Check Blockheight :return: # FIXME: This test grew too much. Convert doctest into unit test using mocks >>> from collections import namedtuple, defaultdict >>> p=P2PProtocol() >>> Transport = namedtuple("Transport", "getPeer write") >>> Peer = namedtuple("Peer", "host port") >>> Factory = namedtuple("Factory", "peers_blockheight chain nodeState") >>> Chain = namedtuple("Chain", "m_blockchain m_blockheight") >>> def getPeer(): ... return Peer("host", 1234) >>> message = None >>> def write(msg): ... global message ... message = msg >>> def m_blockheight(): ... return 0 >>> p.transport = Transport(getPeer, write) >>> p.chain = Chain([], m_blockheight) >>> tmp = NodeState() >>> tmp.state = NState.synced >>> p.factory = Factory(defaultdict(), p.chain, tmp) >>> p.CB('{"block_number": 3, "headerhash": [53, 130, 168, 57, 183, 215, 120, 178, 209, 30, 194, 223, 221, 58, 72, 124, 62, 148, 110, 81, 19, 189, 27, 243, 218, 87, 217, 203, 198, 97, 84, 19]}') """ z = helper.json_decode(data) block_number = z['block_number'] headerhash = tuple(z['headerhash']) tmp = "{}:{}".format(self.transport.getPeer().host, self.transport.getPeer().port) self.factory.peers_blockheight[tmp] = z['block_number'] self.blockheight = block_number logger.info( '>>>Blockheight from: %s blockheight: %s local blockheight: %s %s', self.transport.getPeer().host, block_number, self.factory.chain.m_blockheight(), str(time.time())) if self.factory.nodeState.state == NState.syncing: return if block_number == self.factory.chain.m_blockheight(): # if self.factory.chain.m_blockchain[block_number].blockheader.headerhash != headerhash: if self.factory.chain.m_get_block( block_number).blockheader.headerhash != headerhash: logger.warning('>>> headerhash mismatch from %s', self.transport.getPeer().host) # initiate fork recovery and protection code here.. # call an outer function which sets a flag and scrutinises the chains from all connected hosts to see what is going on.. # again need to think this one through in detail.. return if block_number > self.factory.chain.m_blockheight(): return if self.factory.chain.height() == 1 and self.factory.genesis == 0: # set the flag so that no other Protocol instances trigger the genesis stake functions.. self.factory.genesis = 1 logger.info( 'genesis pos countdown to block 1 begun, 60s until stake tx circulated..' ) reactor.callLater(1, self.factory.pos.pre_pos_1) return # connected to multiple hosts and already passed through.. elif self.factory.chain.height() == 1 and self.factory.genesis == 1: return