def m_create_block(self, reveal_hash, vote_hash, last_block_number=-1): myBlock = Block() myBlock.create(self, reveal_hash, vote_hash, last_block_number) slave_xmss = self.block_chain_buffer.get_slave_xmss(last_block_number + 1) self.wallet.save_slave(slave_xmss) return myBlock
def m_create_block(self, nonce, reveal_list=None, vote_hashes=None, last_block_number=-1): myBlock = Block() myBlock.create(self, nonce, reveal_list, vote_hashes, last_block_number) return myBlock
def PB(self, data): self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return block = Block.from_json(data) blocknumber = block.blockheader.blocknumber logger.info('>>> Received Block #%d', blocknumber) if blocknumber != self.last_requested_blocknum: logger.warning('Did not match %s %s', self.last_requested_blocknum, self.identity) return if blocknumber > self.factory.chain.height(): if not self.factory.chain.block_chain_buffer.add_block_mainchain( block): logger.warning('PB failed to add block to mainchain') return try: reactor.download_monitor.cancel() except Exception as e: logger.warning("PB: %s", e) self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def PBB(self, data): """ Push Block Buffer This function executes while syncing block from other peers. Blocks received by this function, directly added into chain.block_chain_buffer. So it is expected to receive multiple of blocks having same blocknumber. :return: """ self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return data = helper.json_decode(data) blocknumber = int(list(data.keys())[0].encode('ascii')) if blocknumber != self.last_requested_blocknum: logger.info('Blocknumber not found in pending_blocks %s %s', blocknumber, self.conn_identity) return for jsonBlock in data[str(blocknumber)]: block = Block.from_json(json.dumps(jsonBlock)) logger.info('>>>Received Block #%s', block.blockheader.blocknumber) status = self.factory.chain.block_chain_buffer.add_block(block) if type(status) == bool and not status: logger.info( "[PBB] Failed to add block by add_block, re-requesting the block #%s", blocknumber) logger.info('Skipping one block') continue try: reactor.download_block.cancel() except Exception: pass # Below code is to stop downloading, once we see that we reached to blocknumber that are in pending_blocks # This could be exploited by sybil node, to send blocks in pending_blocks in order to disrupt downloading # TODO: requires a better fix if len(self.factory.chain.block_chain_buffer.pending_blocks ) > 0 and min(self.factory.chain.block_chain_buffer. pending_blocks.keys()) == blocknumber: self.factory.chain.block_chain_buffer.process_pending_blocks() return self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def load_from_file(self, blocknum): epoch = int(blocknum // config.dev.blocks_per_chain_file) block_offset, block_size = self.get_block_metadata(blocknum) with open(self.get_chaindatafile(epoch), 'rb') as f: f.seek(block_offset) jsonBlock = bz2.decompress(f.read(block_size)) block = Block.from_json(jsonBlock) return block
def load_from_file(self, blocknum): epoch = int(blocknum // config.dev.blocks_per_chain_file) with open(self.get_chaindatafile(epoch), 'rb') as f: pos_size = self.state.db.db.Get('block_' + str(blocknum)) pos, size = pos_size.split(',') pos = int(pos) size = int(size) f.seek(pos) jsonBlock = bz2.decompress(f.read(size)) block = Block.from_json(jsonBlock) return block
def load_from_file(self, blocknum): epoch = int(blocknum // config.dev.blocks_per_chain_file) pos, size = self.get_block_metadata(blocknum) with open(self.get_chaindatafile(epoch), 'rb') as f: # FIXME: Accessing DB directly f.seek(pos) jsonBlock = bz2.decompress(f.read(size)) block = Block.from_json(jsonBlock) return block
def f_read_chain(self, epoch): delimiter = config.dev.binary_file_delimiter block_list = [] if not os.path.isfile(self.get_chaindatafile(epoch)): if epoch != 0: return [] logger.info('Creating new chain file') genesis_block = GenesisBlock().set_chain(self) block_list.append(genesis_block) return block_list try: with open(self.get_chaindatafile(epoch), 'rb') as myfile: jsonBlock = bytearray() tmp = bytearray() count = 0 offset = 0 while True: chars = myfile.read(config.dev.chain_read_buffer_size) for char in chars: offset += 1 if count > 0 and char != delimiter[count]: count = 0 jsonBlock += tmp tmp = bytearray() if char == delimiter[count]: tmp.append(delimiter[count]) count += 1 if count < len(delimiter): continue tmp = bytearray() count = 0 pos = offset - len(delimiter) - len(jsonBlock) jsonBlock = bz2.decompress(jsonBlock) block = Block.from_json(jsonBlock) self.update_block_metadata(block.blockheader.blocknumber, pos, len(jsonBlock)) block_list.append(block) jsonBlock = bytearray() continue jsonBlock.append(char) if len(chars) < config.dev.chain_read_buffer_size: break except Exception as e: logger.error('IO error %s', e) return [] return block_list
def BK(self, data): # block received """ Block This function processes any new block received. :return: """ try: block = Block.from_json(data) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return logger.info('>>>Received block from %s %s %s', self.conn_identity, block.blockheader.blocknumber, block.blockheader.stake_selector) if not self.factory.master_mr.isRequested(block.blockheader.headerhash, self, block): return block_chain_buffer = self.factory.chain.block_chain_buffer if block_chain_buffer.is_duplicate_block( blocknum=block.blockheader.blocknumber, prev_blockheaderhash=block.blockheader.prev_blockheaderhash, stake_selector=block.blockheader.stake_selector): logger.info('Found duplicate block #%s by %s', block.blockheader.blocknumber, block.blockheader.stake_selector) coinbase_txn = block.transactions[0] if coinbase_txn.validate_tx(chain=self.factory.chain, blockheader=block.blockheader): self.factory.master_mr.register_duplicate( block.blockheader.headerhash) block2 = block_chain_buffer.get_block_n( block.blockheader.blocknumber) duplicate_txn = DuplicateTransaction().create(block1=block, block2=block2) if duplicate_txn.validate_tx(): self.factory.chain.add_tx_to_duplicate_pool(duplicate_txn) self.factory.register_and_broadcast( 'DT', duplicate_txn.get_message_hash(), duplicate_txn.to_json()) self.factory.pos.pre_block_logic(block) self.factory.master_mr.register(block.blockheader.headerhash, data, 'BK') return
def f_read_chain(self, epoch): delimiter = config.dev.binary_file_delimiter block_list = [] if os.path.isfile(self.get_chaindatafile(epoch)) is False: if epoch != 0: return [] logger.info('Creating new chain file') genesis_block = CreateGenesisBlock(self) block_list.append(genesis_block) return block_list try: with open(self.get_chaindatafile(epoch), 'rb') as myfile: jsonBlock = StringIO() tmp = "" count = 0 offset = 0 while True: chars = myfile.read(config.dev.chain_read_buffer_size) for char in chars: offset += 1 if count > 0 and char != delimiter[count]: count = 0 jsonBlock.write(tmp) tmp = "" if char == delimiter[count]: tmp += delimiter[count] count += 1 if count < len(delimiter): continue tmp = "" count = 0 compressedBlock = jsonBlock.getvalue() pos = offset - len(delimiter) - len( compressedBlock) jsonBlock = bz2.decompress(compressedBlock) block = Block.from_json(jsonBlock) self.update_block_metadata( block.blockheader.blocknumber, pos, len(compressedBlock)) block_list.append(block) jsonBlock = StringIO() continue jsonBlock.write(char) if len(chars) < config.dev.chain_read_buffer_size: break except Exception as e: logger.error('IO error %s', e) return [] gc.collect() return block_list
def PB(self, data): """ Push Block This function processes requested blocks received while syncing. Block received under this function are directly added to the main chain i.e. chain.m_blockchain It is expected to receive only one block for a given blocknumber. :return: """ self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return block = Block.from_json(data) blocknumber = block.blockheader.blocknumber logger.info('>>> Received Block #%d', blocknumber) if blocknumber != self.last_requested_blocknum: logger.warning('Did not match %s %s', self.last_requested_blocknum, self.conn_identity) return if blocknumber > self.factory.chain.height(): if not self.factory.chain.add_block_mainchain(block): logger.warning('PB failed to add block to mainchain') return try: reactor.download_monitor.cancel() except Exception as e: logger.warning("PB: %s", e) self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def BK(self, data): # block received try: block = Block.from_json(data) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return logger.info('>>>Received block from %s %s %s', self.identity, block.blockheader.blocknumber, block.blockheader.stake_selector) if not self.factory.master_mr.isRequested(block.blockheader.headerhash, self): return self.factory.pos.pre_block_logic(block, self.identity) self.factory.master_mr.register(block.blockheader.headerhash, data, 'BK') self.broadcast(block.blockheader.headerhash, 'BK') return
def test_init(self): # TODO: Not much going on here.. block = Block() self.assertIsNotNone(block) # just to avoid warnings