def submit(self, header, worker_name): # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]) hash_bin = utils.doublesha(rev) block_hash = ''.join([ hash_bin[i*4:i*4+4][::-1] for i in range(0, 8) ]) #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17*8 # 17th integer in datastring noncepos = 19*8 # 19th integer in datastring ntime = header[ntimepos:ntimepos+8] nonce = header[noncepos:noncepos+8] # 5. Submit share to the pool return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def submit(self, newjob, nonce, worker_name): #job_id = newjob['library_number'] #merkle = newjob['solutions'] ntime = newjob['timestamp'] merkle = ''.join('{:02x}'.format(x) for x in newjob['merkle tree root']) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_merkle(merkle) except KeyError: log.info("Job not found from merkle") return False ### GET HEADER header = job.serialize_header(merkle, ntime, nonce) #print(header[:160]) ### BELOW IS LIKE SUBMIT(header, worker_name) # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]) hash_bin = utils.doublesha(rev) block_hash = ''.join([ hash_bin[i*4:i*4+4][::-1] for i in range(0, 8) ]) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True else: #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found from header") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17*8 # 17th integer in datastring noncepos = 19*8 # 19th integer in datastring ntime = header[ntimepos:ntimepos+8] nonce = header[noncepos:noncepos+8] # 5. Submit share to the pool #print(worker_name, job.job_id, extranonce2_hex, ntime, nonce) return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def getwork(self, no_midstate=True): '''Miner requests for new getwork''' job = self.last_job # Pick the latest job from pool # 1. Increase extranonce2 extranonce2 = job.increase_extranonce2(self.randomize_xn2) # 2. Build final extranonce extranonce = self.build_full_extranonce(extranonce2) log.debug('XN = %s' % (binascii.hexlify(extranonce))) # 3. Put coinbase transaction together coinbase_bin = job.build_coinbase(extranonce) # 4. Calculate coinbase hash coinbase_hash = utils.doublesha(coinbase_bin) # 5. Calculate merkle root merkle_root = binascii.hexlify(utils.reverse_hash(job.build_merkle_root(coinbase_hash))) # 6. Generate current ntime ntime = int(time.time()) + job.ntime_delta # 7. Serialize header block_header = job.serialize_header(merkle_root, ntime, 0) # 8. Register job params self.register_merkle(job, merkle_root, extranonce2) # 9. Prepare hash1, calculate midstate and fill the response object header_bin = binascii.unhexlify(block_header)[:64] hash1 = "00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000" result = {'data': block_header, 'hash1': hash1} if self.use_old_target: result['target'] = 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000' elif self.real_target: result['target'] = self.target_hex else: result['target'] = self.target1_hex if calculateMidstate and not (no_midstate or self.no_midstate): # Midstate module not found or disabled result['midstate'] = binascii.hexlify(calculateMidstate(header_bin)) return result
def getwork(self, no_midstate=True): '''Miner requests for new getwork''' job = self.last_job # Pick the latest job from pool # 1. Increase extranonce2 extranonce2 = job.increase_extranonce2() # 2. Build final extranonce extranonce = self.build_full_extranonce(extranonce2) # 3. Put coinbase transaction together coinbase_bin = job.build_coinbase(extranonce) # 4. Calculate coinbase hash coinbase_hash = utils.doublesha(coinbase_bin) # 5. Calculate merkle root merkle_root = binascii.hexlify( utils.reverse_hash(job.build_merkle_root(coinbase_hash))) # 6. Generate current ntime ntime = int(time.time()) + job.ntime_delta # 7. Serialize header block_header = job.serialize_header(merkle_root, ntime, 0) # 8. Register job params self.register_merkle(job, merkle_root, extranonce2) # 9. Prepare hash1, calculate midstate and fill the response object header_bin = binascii.unhexlify(block_header)[:64] hash1 = "00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000" result = {'data': block_header, 'hash1': hash1} if self.use_old_target: result[ 'target'] = 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000' elif self.real_target: result['target'] = self.target_hex else: result['target'] = self.target1_hex if calculateMidstate and not (no_midstate or self.no_midstate): # Midstate module not found or disabled result['midstate'] = binascii.hexlify( calculateMidstate(header_bin)) return result
def getwork(self): '''Miner requests for new getwork''' job = self.last_job # Pick the latest job from pool # 1. Increase extranonce2 extranonce2 = job.increase_extranonce2() # 2. Build final extranonce extranonce = self.build_full_extranonce(extranonce2) # 3. Put coinbase transaction together coinbase_bin = job.build_coinbase(extranonce) # 4. Calculate coinbase hash coinbase_hash = utils.doublesha(coinbase_bin) # 5. Calculate merkle root merkle_root = binascii.hexlify( utils.reverse_hash(job.build_merkle_root(coinbase_hash))) # 6. Generate current ntime ntime = int(time.time()) + job.ntime_delta # 7. Serialize header block_header = job.serialize_header(merkle_root, ntime, 0) # 8. Register job params self.register_merkle(job, merkle_root, extranonce2) # 9. Fill the response object header_bin = binascii.unhexlify(block_header)[:64] result = {'data': block_header} result['target'] = self.target_hex return result
def getwork(self): '''Miner requests for new getwork''' job = self.last_job # Pick the latest job from pool # 1. Increase extranonce2 extranonce2 = job.increase_extranonce2() # 2. Build final extranonce extranonce = self.build_full_extranonce(extranonce2) # 3. Put coinbase transaction together coinbase_bin = job.build_coinbase(extranonce) # 4. Calculate coinbase hash coinbase_hash = utils.doublesha(coinbase_bin) # 5. Calculate merkle root merkle_root = binascii.hexlify(utils.reverse_hash(job.build_merkle_root(coinbase_hash))) # 6. Generate current ntime ntime = int(time.time()) + job.ntime_delta # 7. Serialize header block_header = job.serialize_header(merkle_root, ntime, 0) # 8. Register job params self.register_merkle(job, merkle_root, extranonce2) # 9. Fill the response object header_bin = binascii.unhexlify(block_header)[:64] result = {'data': block_header} result['target'] = self.target_hex return result
def build_merkle_root(self, coinbase_hash): merkle_root = coinbase_hash for h in self.merkle_branch: merkle_root = utils.doublesha(merkle_root + h) return merkle_root
def getwork(self, no_midstate=True): '''Miner requests for new getwork''' job = self.last_job # Pick the latest job from pool # 1. Increase extranonce2 extranonce2 = job.increase_extranonce2() # 2. Build final extranonce extranonce = self.build_full_extranonce(extranonce2) # 3. Put coinbase transaction together coinbase_bin = job.build_coinbase(extranonce) # 4. Calculate coinbase hash coinbase_hash = utils.doublesha(coinbase_bin) # 5. Calculate merkle root merkle_root = binascii.hexlify(utils.reverse_hash(job.build_merkle_root(coinbase_hash))) # 6. Generate current ntime ntime = int(time.time()) + job.ntime_delta # 7. Serialize header block_header = job.serialize_header(merkle_root, ntime, 0) # 8. Register job params self.register_merkle(job, merkle_root, extranonce2) # 9. Prepare hash1, calculate midstate and fill the response object header_bin = binascii.unhexlify(block_header)[:64] hash1 = "00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000" result = {'data': block_header, 'hash1': hash1} if self.use_old_target: result['target'] = 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000' elif self.real_target: result['target'] = self.target_hex else: result['target'] = self.target1_hex if calculateMidstate and not (no_midstate or self.no_midstate): # Midstate module not found or disabled result['midstate'] = binascii.hexlify(calculateMidstate(header_bin)) #''' newjob = {} newjob['version'] = int(job.version) newjob['previous block hash'] = list(bytearray(binascii.unhexlify(job.prevhash))) #array.array('B', binascii.unhexlify(job.prevhash)) newjob['merkle tree root'] = list(bytearray(binascii.unhexlify(merkle_root))) #array.array('B', binascii.unhexlify(merkle_root)) newjob['timestamp'] = ntime newjob['bits'] = list(bytearray(binascii.unhexlify(job.nbits))) newjob['starting nonce'] = 0 newjob['nonce loops'] = 0 newjob['ntime loops'] = 0 ''' prevhash = "0000000000000000066d65d43c939518101df3e56d983375b5ae9c502c32b801" xprev = map(ord, binascii.unhexlify(prevhash)) #array.array('B', binascii.unhexlify(job.prevhash)) merkle_root = "aa8eca0f15bff273918b20e66a559fb5b8d53342fc8127581c9d34fd96990f95" xmerk = map(ord, binascii.unhexlify(merkle_root)) #array.array('B', binascii.unhexlify(merkle_root)) version = 2 timestamp = 1401405809 bits = 409544770 newjob = {} newjob['version'] = version newjob['previous block hash'] = xprev[::-1] newjob['merkle tree root'] = xmerk[::-1] newjob['timestamp'] = timestamp newjob['bits'] = bits newjob['starting nonce'] = 0 newjob['nonce loops'] = 0 newjob['ntime loops'] = 0 #''' return newjob
def submit(self, newjob, nonce, worker_name): #job_id = newjob['library_number'] #merkle = newjob['solutions'] ntime = newjob['timestamp'] merkle = ''.join('{:02x}'.format(x) for x in newjob['merkle tree root']) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_merkle(merkle) except KeyError: log.info("Job not found from merkle") return False ### GET HEADER header = job.serialize_header(merkle, ntime, nonce) #print(header[:160]) ### BELOW IS LIKE SUBMIT(header, worker_name) # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)]) hash_bin = utils.doublesha(rev) block_hash = ''.join( [hash_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 8)]) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True else: #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found from header") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify( self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17 * 8 # 17th integer in datastring noncepos = 19 * 8 # 19th integer in datastring ntime = header[ntimepos:ntimepos + 8] nonce = header[noncepos:noncepos + 8] # 5. Submit share to the pool #print(worker_name, job.job_id, extranonce2_hex, ntime, nonce) return self.f.rpc( 'mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def getwork(self, no_midstate=True): '''Miner requests for new getwork''' job = self.last_job # Pick the latest job from pool # 1. Increase extranonce2 extranonce2 = job.increase_extranonce2() # 2. Build final extranonce extranonce = self.build_full_extranonce(extranonce2) # 3. Put coinbase transaction together coinbase_bin = job.build_coinbase(extranonce) # 4. Calculate coinbase hash coinbase_hash = utils.doublesha(coinbase_bin) # 5. Calculate merkle root merkle_root = binascii.hexlify( utils.reverse_hash(job.build_merkle_root(coinbase_hash))) # 6. Generate current ntime ntime = int(time.time()) + job.ntime_delta # 7. Serialize header block_header = job.serialize_header(merkle_root, ntime, 0) # 8. Register job params self.register_merkle(job, merkle_root, extranonce2) # 9. Prepare hash1, calculate midstate and fill the response object header_bin = binascii.unhexlify(block_header)[:64] hash1 = "00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000" result = {'data': block_header, 'hash1': hash1} if self.use_old_target: result[ 'target'] = 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000' elif self.real_target: result['target'] = self.target_hex else: result['target'] = self.target1_hex if calculateMidstate and not (no_midstate or self.no_midstate): # Midstate module not found or disabled result['midstate'] = binascii.hexlify( calculateMidstate(header_bin)) #''' newjob = {} newjob['version'] = int(job.version) newjob['previous block hash'] = list( bytearray(binascii.unhexlify(job.prevhash)) ) #array.array('B', binascii.unhexlify(job.prevhash)) newjob['merkle tree root'] = list( bytearray(binascii.unhexlify(merkle_root)) ) #array.array('B', binascii.unhexlify(merkle_root)) newjob['timestamp'] = ntime newjob['bits'] = list(bytearray(binascii.unhexlify(job.nbits))) newjob['starting nonce'] = 0 newjob['nonce loops'] = 0 newjob['ntime loops'] = 0 ''' prevhash = "0000000000000000066d65d43c939518101df3e56d983375b5ae9c502c32b801" xprev = map(ord, binascii.unhexlify(prevhash)) #array.array('B', binascii.unhexlify(job.prevhash)) merkle_root = "aa8eca0f15bff273918b20e66a559fb5b8d53342fc8127581c9d34fd96990f95" xmerk = map(ord, binascii.unhexlify(merkle_root)) #array.array('B', binascii.unhexlify(merkle_root)) version = 2 timestamp = 1401405809 bits = 409544770 newjob = {} newjob['version'] = version newjob['previous block hash'] = xprev[::-1] newjob['merkle tree root'] = xmerk[::-1] newjob['timestamp'] = timestamp newjob['bits'] = bits newjob['starting nonce'] = 0 newjob['nonce loops'] = 0 newjob['ntime loops'] = 0 #''' return newjob