def algo_verify(block_hex, hash_function): block_bytes = unhexlify(block_hex) hash_func = import_helper(hash_function) block_hash = hash_func(block_bytes) block_hash_int = uint256_from_str(block_hash) bits = block_bytes[72:76][::-1] target = target_unpack(bits) print("Unpacked target of {}".format(hexlify(uint256_to_str(target)))) print("Block hash is {}".format(hexlify(block_hash))) print("Block hash is valid? {}".format(block_hash_int <= target))
def test_stratum_confirm(self): """ Test some raw data from cgminer submitting a share, confirm hashes come out the same as cgminer. Raw stratum params: """ gbt = {u'bits': u'1e00e92b', u'coinbaseaux': {u'flags': u'062f503253482f'}, u'coinbasevalue': 5000000000, u'curtime': 1392509565, u'height': 203588, u'mintime': 1392508633, u'mutable': [u'time', u'transactions', u'prevblock'], u'noncerange': u'00000000ffffffff', u'previousblockhash': u'b0f5ecb62774f2f07fdc0f72fa0585ae3e8ca78ad8692209a355d12bc690fb73', u'sigoplimit': 20000, u'sizelimit': 1000000, u'target': u'000000e92b000000000000000000000000000000000000000000000000000000', u'transactions': [], u'version': 2} extra1 = '0000000000000000' submit = {'extra2': '00000000', 'nonce': 'd5160000', 'result': '000050ccfe8a3efe93b2ee33d2aecf4a60c809995c7dd19368a7d00c86880f30'} # build a block template object from the raw data coinbase = Transaction() coinbase.version = 2 coinbase.inputs.append(Input.coinbase(gbt['height'], b'\0' * 12)) coinbase.outputs.append(Output.to_address(gbt['coinbasevalue'], 'D7QJyeBNuwEqxsyVCLJi3pHs64uPdMDuBa')) transactions = [] for trans in gbt['transactions']: new_trans = Transaction(unhexlify(trans['data']), fees=trans['fee']) assert trans['hash'] == new_trans.lehexhash transactions.append(new_trans) bt = BlockTemplate.from_gbt(gbt, coinbase, 12, transactions) send_params = bt.stratum_params() print("job_id: {0}\nprevhash: {1}\ncoinbase1: {2}\ncoinbase2: {3}" "\nmerkle_branch: {4}\nversion: {5}\nnbits: {6}\nntime: {7}" .format(*send_params)) header = bt.block_header(submit['nonce'], extra1, submit['extra2']) hash_bin = scrypt(header) target = target_from_diff(1, 0x0000FFFF00000000000000000000000000000000000000000000000000000000) hash_int = uint256_from_str(hash_bin) hash_hex = "%064x" % hash_int self.assertEquals(hash_hex, submit['result']) assert hash_int < target
def submit_job(self, data, t): """ Handles recieving work submission and checking that it is valid , if it meets network diff, etc. Sends reply to stratum client. """ params = data['params'] # [worker_name, job_id, extranonce2, ntime, nonce] # ["slush.miner1", "bf", "00000001", "504e86ed", "b2957c02"] if __debug__: self.logger.debug( "Recieved work submit:\n\tworker_name: {0}\n\t" "job_id: {1}\n\textranonce2: {2}\n\t" "ntime: {3}\n\tnonce: {4} ({int_nonce})" .format( *params, int_nonce=struct.unpack(str("<L"), unhexlify(params[4])))) if self.idle: self.idle = False self.server.idle_clients -= 1 self.last_share_submit = time.time() try: difficulty, job = self.job_mapper[data['params'][1]] job = job() # weakref will be none if it's been GCed except KeyError: try: difficulty, job = self.old_job_mapper[data['params'][1]] job = job() # weakref will be none if it's been GCed except KeyError: job = None # Job not in jobmapper at all, we got a bogus submit # since we can't identify the diff we just have to assume it's # current diff difficulty = self.difficulty if job is None: self.send_error(self.STALE_SHARE_ERR, id_val=data['id']) self.reporter.log_share(client=self, diff=self.difficulty, typ=self.STALE_SHARE, params=params, start=t) return difficulty, self.STALE_SHARE # assemble a complete block header bytestring header = job.block_header( nonce=params[4], extra1=self._id, extra2=params[2], ntime=params[3]) # Check a submitted share against previous shares to eliminate # duplicates share = (self._id, params[2], params[4], params[3]) if share in job.acc_shares: self.logger.info("Duplicate share rejected from worker {}.{}!" .format(self.address, self.worker)) self.send_error(self.DUP_SHARE_ERR, id_val=data['id']) self.reporter.log_share(client=self, diff=difficulty, typ=self.DUP_SHARE, params=params, job=job, start=t) return difficulty, self.DUP_SHARE job_target = target_from_diff(difficulty, job.diff1) hash_int = uint256_from_str(self.algo['module'](header)) if hash_int >= job_target: self.logger.info("Low diff share rejected from worker {}.{}!" .format(self.address, self.worker)) self.send_error(self.LOW_DIFF_ERR, id_val=data['id']) self.reporter.log_share(client=self, diff=difficulty, typ=self.LOW_DIFF_SHARE, params=params, job=job, start=t) return difficulty, self.LOW_DIFF_SHARE # we want to send an ack ASAP, so do it here self.send_success(id_val=data['id']) # Add the share to the accepted set to check for dups job.acc_shares.add(share) self.accepted_shares += difficulty self.reporter.log_share(client=self, diff=difficulty, typ=self.VALID_SHARE, params=params, job=job, header_hash=hash_int, header=header, start=t) return difficulty, self.VALID_SHARE
def submit_job(self, data): """ Handles recieving work submission and checking that it is valid , if it meets network diff, etc. Sends reply to stratum client. """ start = time.time() params = data['params'] # [worker_name, job_id, extranonce2, ntime, nonce] # ["slush.miner1", "bf", "00000001", "504e86ed", "b2957c02"] if __debug__: self.logger.debug( "Recieved work submit:\n\tworker_name: {0}\n\t" "job_id: {1}\n\textranonce2: {2}\n\t" "ntime: {3}\n\tnonce: {4} ({int_nonce})" .format( *params, int_nonce=struct.unpack(str("<L"), unhexlify(params[4])))) if self.idle: self.idle = False self.stratum_manager.idle_clients -= 1 self.last_share_submit = time.time() try: difficulty, jobid = self.job_mapper[data['params'][1]] except KeyError: # since we can't identify the diff we just have to assume it's # current diff self.send_error(self.STALE_SHARE_ERR, id_val=self.msg_id) self.server['reject_stale'].incr(self.difficulty) self.server['reject_stale_shares'].incr() return self.STALE_SHARE, self.difficulty # lookup the job in the global job dictionary. If it's gone from here # then a new block was announced which wiped it try: job = self.jobmanager.jobs[jobid] except KeyError: self.send_error(self.STALE_SHARE_ERR, id_val=self.msg_id) self.server['reject_stale'].incr(difficulty) self.server['reject_stale_shares'].incr() return self.STALE_SHARE, difficulty # assemble a complete block header bytestring header = job.block_header( nonce=params[4], extra1=self.id, extra2=params[2], ntime=params[3]) # Grab the raw coinbase out of the job object before gevent can preempt # to another thread and change the value. Very important! coinbase_raw = job.coinbase.raw # Check a submitted share against previous shares to eliminate # duplicates share = (self.id, params[2], params[4], params[3]) if share in job.acc_shares: self.logger.info("Duplicate share rejected from worker {}.{}!" .format(self.address, self.worker)) self.send_error(self.DUP_SHARE_ERR, id_val=self.msg_id) self.server['reject_dup'].incr(difficulty) self.server['reject_dup_shares'].incr() return self.DUP_SHARE, difficulty job_target = target_from_diff(difficulty, job.diff1) hash_int = uint256_from_str(self.algos[job.algo](header)) if hash_int >= job_target: self.logger.info("Low diff share rejected from worker {}.{}!" .format(self.address, self.worker)) self.send_error(self.LOW_DIFF_ERR, id_val=self.msg_id) self.server['reject_low'].incr(difficulty) self.server['reject_low_shares'].incr() return self.LOW_DIFF, difficulty # we want to send an ack ASAP, so do it here self.send_success(id_val=self.msg_id) self.logger.debug("Valid share accepted from worker {}.{}!" .format(self.address, self.worker)) # Add the share to the accepted set to check for dups job.acc_shares.add(share) self.server['valid'].incr(difficulty) self.server['valid_shares'].incr() # Some coins use POW function to do blockhash, while others use SHA256. # Allow toggling if job.pow_block_hash: header_hash = self.algos[job.algo](header)[::-1] else: header_hash = sha256(sha256(header).digest()).digest()[::-1] hash_hex = hexlify(header_hash) # valid network hash? if hash_int <= job.bits_target: spawn(self.jobmanager.found_block, coinbase_raw, self.address, self.worker, hash_hex, header, job.job_id, start) outcome = self.BLOCK_FOUND else: outcome = self.VALID_SHARE # check each aux chain for validity for chain_id, data in job.merged_data.iteritems(): if hash_int <= data['target']: spawn(self.jobmanager.found_merged_block, self.address, self.worker, header, job.job_id, coinbase_raw, data['type']) return outcome, difficulty
def submit_job(self, data, t): """ Handles recieving work submission and checking that it is valid , if it meets network diff, etc. Sends reply to stratum client. """ params = data['params'] # [worker_name, job_id, extranonce2, ntime, nonce] # ["slush.miner1", "bf", "00000001", "504e86ed", "b2957c02"] if __debug__: self.logger.debug("Recieved work submit:\n\tworker_name: {0}\n\t" "job_id: {1}\n\textranonce2: {2}\n\t" "ntime: {3}\n\tnonce: {4} ({int_nonce})".format( *params, int_nonce=struct.unpack( str("<L"), unhexlify(params[4])))) if self.idle: self.idle = False self.server.idle_clients -= 1 self.last_share_submit = time.time() try: difficulty, job = self.job_mapper[data['params'][1]] job = job() # weakref will be None if the job has been GCed except KeyError: try: difficulty, job = self.old_job_mapper[data['params'][1]] job = job() # weakref will be None if the job has been GCed except KeyError: job = None # Job not in jobmapper at all, we got a bogus submit # since we can't identify the diff we just have to assume it's # current diff difficulty = self.difficulty if job not in self.server.active_jobs: self.send_error(self.STALE_SHARE_ERR, id_val=data['id']) self.counter['stale'] += 1 self.reporter.log_share(client=self, diff=self.difficulty, typ=self.STALE_SHARE, params=params, job=job, start=t) return difficulty, self.STALE_SHARE # assemble a complete block header bytestring header = job.block_header(nonce=params[4], extra1=self._id, extra2=params[2], ntime=params[3]) # Check a submitted share against previous shares to eliminate # duplicates share_lower = (self._id.lower(), params[2].lower(), params[4].lower(), params[3].lower()) if share_lower in job.acc_shares: self.logger.info( "Duplicate share rejected from worker {}.{}!".format( self.address, self.worker)) self.send_error(self.DUP_SHARE_ERR, id_val=data['id']) self.counter['duplicate'] += 1 self.reporter.log_share(client=self, diff=difficulty, typ=self.DUP_SHARE, params=params, job=job, start=t) return difficulty, self.DUP_SHARE job_target = target_from_diff(difficulty, job.diff1) hash_int = uint256_from_str(self.algo['module'](header)) if hash_int >= job_target: self.logger.info( "Low diff share rejected from worker {}.{}!".format( self.address, self.worker)) self.send_error(self.LOW_DIFF_ERR, id_val=data['id']) self.counter['LowDiff'] += 1 self.reporter.log_share(client=self, diff=difficulty, typ=self.LOW_DIFF_SHARE, params=params, job=job, start=t) return difficulty, self.LOW_DIFF_SHARE # we want to send an ack ASAP, so do it here self.send_success(id_val=data['id']) # Add the share to the accepted set to check for dups job.acc_shares.add(share_lower) self.counter['accepted'] += 1 multi = float( job.diff1 ) / 0x00000000FFFF0000000000000000000000000000000000000000000000000000 self.accepted_shares += difficulty / multi self.server.jobmanager.current_accepted_shares += difficulty / multi self.reporter.log_share(client=self, diff=difficulty, typ=self.VALID_SHARE, params=params, job=job, header_hash=hash_int, header=header, start=t) return difficulty, self.VALID_SHARE
def test_stratum_confirm(self): """ Test some raw data from cgminer submitting a share, confirm hashes come out the same as cgminer. Raw stratum params: """ gbt = { u'bits': u'1e00e92b', u'coinbaseaux': { u'flags': u'062f503253482f' }, u'coinbasevalue': 5000000000, u'curtime': 1392509565, u'height': 203588, u'mintime': 1392508633, u'mutable': [u'time', u'transactions', u'prevblock'], u'noncerange': u'00000000ffffffff', u'previousblockhash': u'b0f5ecb62774f2f07fdc0f72fa0585ae3e8ca78ad8692209a355d12bc690fb73', u'sigoplimit': 20000, u'sizelimit': 1000000, u'target': u'000000e92b000000000000000000000000000000000000000000000000000000', u'transactions': [], u'version': 2 } extra1 = '0000000000000000' submit = { 'extra2': '00000000', 'nonce': 'd5160000', 'result': '000050ccfe8a3efe93b2ee33d2aecf4a60c809995c7dd19368a7d00c86880f30' } # build a block template object from the raw data coinbase = Transaction() coinbase.version = 2 coinbase.inputs.append(Input.coinbase(gbt['height'], b'\0' * 12)) coinbase.outputs.append( Output.to_address(gbt['coinbasevalue'], 'D7QJyeBNuwEqxsyVCLJi3pHs64uPdMDuBa')) transactions = [] for trans in gbt['transactions']: new_trans = Transaction(unhexlify(trans['data']), fees=trans['fee']) assert trans['hash'] == new_trans.lehexhash transactions.append(new_trans) bt = BlockTemplate.from_gbt(gbt, coinbase, 12, transactions) send_params = bt.stratum_params() print("job_id: {0}\nprevhash: {1}\ncoinbase1: {2}\ncoinbase2: {3}" "\nmerkle_branch: {4}\nversion: {5}\nnbits: {6}\nntime: {7}". format(*send_params)) header = bt.block_header(submit['nonce'], extra1, submit['extra2']) hash_bin = scrypt(header) target = target_from_diff( 1, 0x0000FFFF00000000000000000000000000000000000000000000000000000000) hash_int = uint256_from_str(hash_bin) hash_hex = "%064x" % hash_int self.assertEquals(hash_hex, submit['result']) assert hash_int < target
def submit_job(self, data): """ Handles recieving work submission and checking that it is valid , if it meets network diff, etc. Sends reply to stratum client. """ start = time.time() params = data['params'] # [worker_name, job_id, extranonce2, ntime, nonce] # ["slush.miner1", "bf", "00000001", "504e86ed", "b2957c02"] if __debug__: self.logger.debug("Recieved work submit:\n\tworker_name: {0}\n\t" "job_id: {1}\n\textranonce2: {2}\n\t" "ntime: {3}\n\tnonce: {4} ({int_nonce})".format( *params, int_nonce=struct.unpack( str("<L"), unhexlify(params[4])))) if self.idle: self.idle = False self.stratum_manager.idle_clients -= 1 self.last_share_submit = time.time() try: difficulty, jobid = self.job_mapper[data['params'][1]] except KeyError: # since we can't identify the diff we just have to assume it's # current diff self.send_error(self.STALE_SHARE_ERR, id_val=self.msg_id) self.server['reject_stale'].incr(self.difficulty) self.server['reject_stale_shares'].incr() return self.STALE_SHARE, self.difficulty # lookup the job in the global job dictionary. If it's gone from here # then a new block was announced which wiped it try: job = self.jobmanager.jobs[jobid] except KeyError: self.send_error(self.STALE_SHARE_ERR, id_val=self.msg_id) self.server['reject_stale'].incr(difficulty) self.server['reject_stale_shares'].incr() return self.STALE_SHARE, difficulty # assemble a complete block header bytestring header = job.block_header(nonce=params[4], extra1=self.id, extra2=params[2], ntime=params[3]) # Grab the raw coinbase out of the job object before gevent can preempt # to another thread and change the value. Very important! coinbase_raw = job.coinbase.raw # Check a submitted share against previous shares to eliminate # duplicates share = (self.id, params[2], params[4], params[3]) if share in job.acc_shares: self.logger.info( "Duplicate share rejected from worker {}.{}!".format( self.address, self.worker)) self.send_error(self.DUP_SHARE_ERR, id_val=self.msg_id) self.server['reject_dup'].incr(difficulty) self.server['reject_dup_shares'].incr() return self.DUP_SHARE, difficulty job_target = target_from_diff(difficulty, job.diff1) hash_int = uint256_from_str(self.algos[job.algo](header)) if hash_int >= job_target: self.logger.info( "Low diff share rejected from worker {}.{}!".format( self.address, self.worker)) self.send_error(self.LOW_DIFF_ERR, id_val=self.msg_id) self.server['reject_low'].incr(difficulty) self.server['reject_low_shares'].incr() return self.LOW_DIFF, difficulty # we want to send an ack ASAP, so do it here self.send_success(id_val=self.msg_id) self.logger.debug("Valid share accepted from worker {}.{}!".format( self.address, self.worker)) # Add the share to the accepted set to check for dups job.acc_shares.add(share) self.server['valid'].incr(difficulty) self.server['valid_shares'].incr() # Some coins use POW function to do blockhash, while others use SHA256. # Allow toggling if job.pow_block_hash: header_hash = self.algos[job.algo](header)[::-1] else: header_hash = sha256(sha256(header).digest()).digest()[::-1] hash_hex = hexlify(header_hash) # valid network hash? if hash_int <= job.bits_target: spawn(self.jobmanager.found_block, coinbase_raw, self.address, self.worker, hash_hex, header, job.job_id, start) outcome = self.BLOCK_FOUND else: outcome = self.VALID_SHARE # check each aux chain for validity for chain_id, data in job.merged_data.iteritems(): if hash_int <= data['target']: spawn(self.jobmanager.found_merged_block, self.address, self.worker, header, job.job_id, coinbase_raw, data['type']) return outcome, difficulty