def __init__(self, data, metadata, subproc_count=None): """ Onionr proof of work using multiple processes Accepts block data, block metadata if subproc_count is not set, os.cpu_count() is used to determine the number of processes Due to Python GIL multiprocessing/use of external libraries is necessary to accelerate CPU bound tasks """ # No known benefit to using more processes than there are cores. # Note: os.cpu_count perhaps not always accurate if subproc_count is None: subproc_count = os.cpu_count() self.subproc_count = subproc_count self.result = '' self.shutdown = False self.data = data self.metadata = metadata """dump dict to measure bytes of json metadata Cannot reuse later bc the pow token must be added """ json_metadata = json.dumps(metadata).encode() self.data = bytesconverter.str_to_bytes(data) compiled_data = bytes(json_metadata + b'\n' + self.data) # Calculate difficulty. May use better algorithm in the future. self.difficulty = onionrproofs.getDifficultyForNewBlock(compiled_data) logger.info('Computing POW (difficulty: %s)...' % (self.difficulty,)) self.main_hash = '0' * 64 self.puzzle = self.main_hash[0:min(self.difficulty, len(self.main_hash))] self.shutdown = False self.payload = None
def __init__(self, data, metadata, core_inst=None, subproc_count=None): ''' Onionr proof of work using multiple processes Accepts block data, block metadata and optionally an onionr core library instance. if subproc_count is not set, os.cpu_count() is used to determine the number of processes Do to Python GIL multiprocessing or use of external libraries is necessary to accelerate CPU bound tasks ''' # Option to accept existing core instance to save memory if core_inst is None: core_inst = core.Core() # No known benefit to using more processes than there are cores. # Note: os.cpu_count perhaps not always accurate if subproc_count is None: subproc_count = os.cpu_count() self.subproc_count = subproc_count self.result = '' self.shutdown = False self.core_inst = core_inst self.data = data self.metadata = metadata # dump dict to measure bytes of json metadata. Cannot reuse later because the pow token must be added json_metadata = json.dumps(metadata).encode() self.data = onionrutils.OnionrUtils.strToBytes(data) # Calculate difficulty. Dumb for now, may use good algorithm in the future. self.difficulty = onionrproofs.getDifficultyForNewBlock( bytes(json_metadata + b'\n' + self.data), coreInst=self.core_inst) logger.info('Computing POW (difficulty: %s)...' % self.difficulty) self.mainHash = '0' * 64 self.puzzle = self.mainHash[0:min(self.difficulty, len(self.mainHash))] self.shutdown = False self.payload = None
def verifyPow(self, blockContent): ''' Verifies the proof of work associated with a block ''' retData = False dataLen = len(blockContent) try: blockContent = blockContent.encode() except AttributeError: pass blockHash = self.sha3Hash(blockContent) try: blockHash = blockHash.decode( ) # bytes on some versions for some reason except AttributeError: pass difficulty = onionrproofs.getDifficultyForNewBlock(blockContent, ourBlock=False, coreInst=self._core) if difficulty < int(config.get('general.minimum_block_pow')): difficulty = int(config.get('general.minimum_block_pow')) mainHash = '0000000000000000000000000000000000000000000000000000000000000000' #nacl.hash.blake2b(nacl.utils.random()).decode() puzzle = mainHash[:difficulty] if blockHash[:difficulty] == puzzle: # logger.debug('Validated block pow') retData = True else: logger.debug("Invalid token, bad proof") return retData