def handle_header(new_header, valid=False): new_hash = self.net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(new_header)) # check that header matches current target if new_hash > self.bitcoind_work.value['bits'].target: return if not valid: try: _ = (yield self.bitcoind.rpc_getblockheader(new_hash)) except: return bitcoind_best_block = self.bitcoind_work.value['previous_block'] if (self.best_block_header.value is None or (new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256( bitcoin_data.block_header_type.pack( self.best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or (bitcoin_data.hash256( bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and self.best_block_header.value['previous_block'] != bitcoind_best_block) ): # new is current and previous is not a child of current self.best_block_header.set(new_header)
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce, birthdayA, birthdayB): print 'Submit recieved' if job_id not in self.handler_map: print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') print "COINBN: %i NL: %i" % (len(coinb_nonce), self.wb.COINBASE_NONCE_LENGTH) assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] print new_packed_gentx.encode('hex'); print bitcoin_data.hash256(new_packed_gentx) print bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']) print birthdayA print birthdayB header = dict( version=x['version'], previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']), timestamp=pack.IntType(32).unpack(ntime.decode('hex')), bits=x['bits'], nonce=pack.IntType(32).unpack(nonce.decode('hex')), birthdayA=pack.IntType(32).unpack(birthdayA.decode('hex')), birthdayB=pack.IntType(32).unpack(birthdayB.decode('hex')), ) return got_response(header, worker_name, coinb_nonce)
def rpc_getblocktemplate(self, param): if param['mode'] == 'template': pass elif param['mode'] == 'submit': result = param['data'] block = bitcoin_data.block_type.unpack(result.decode('hex')) if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum( tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000: print 'invalid fee' if block['header']['previous_block'] != self.blocks[-1]: return False if bitcoin_data.hash256( result.decode('hex')) > block['header']['bits'].target: return False header_hash = bitcoin_data.hash256( bitcoin_data.block_header_type.pack(block['header'])) self.blocks.append(header_hash) self.headers[header_hash] = block['header'] reactor.callLater(0, self.new_block.happened) return True else: raise jsonrpc.Error_for_code(-1)('invalid request') txs = [] for i in xrange(100): fee = i txs.append( dict( data=bitcoin_data.tx_type.pack( dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!' * 100)], lock_time=0)).encode('hex'), fee=fee, )) return { "version": 2, "previousblockhash": '%064x' % (self.blocks[-1], ), "transactions": txs, "coinbaseaux": { "flags": "062f503253482f" }, "coinbasevalue": 5000000000 + sum(tx['fee'] for tx in txs), "target": "0000000000000513c50000000000000000000000000000000000000000000000", "mintime": 1351655621, "mutable": ["time", "transactions", "prevblock"], "noncerange": "00000000ffffffff", "sigoplimit": 20000, "sizelimit": 1000000, "curtime": 1351659940, "bits": "21008000", "height": len(self.blocks), }
def __init__(self, net, header, share_info, merkle_branch=None, other_txs=None): self.net = net if merkle_branch is None and other_txs is None: raise ValueError('need either merkle_branch or other_txs') if other_txs is not None: new_merkle_branch = bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) if merkle_branch is not None: if merke_branch != new_merkle_branch: raise ValueError('invalid merkle_branch and other_txs') merkle_branch = new_merkle_branch if len(merkle_branch) > 16: raise ValueError('merkle_branch too long!') self.header = header self.previous_block = header['previous_block'] self.share_info = share_info self.merkle_branch = merkle_branch self.share_data = self.share_info['share_data'] self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.new_script = self.share_data['new_script'] self.subsidy = self.share_data['subsidy'] self.donation = self.share_data['donation'] if len(self.new_script) > 100: raise ValueError('new_script too long!') self.previous_hash = self.previous_share_hash = self.share_data['previous_share_hash'] self.nonce = self.share_data['nonce'] if len(self.nonce) > 100: raise ValueError('nonce too long!') if len(self.share_data['coinbase']) > 100: raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),)) self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) self.hash = bitcoin_data.hash256(share1a_type.pack(self.as_share1a())) if self.pow_hash > self.target: print 'hash %x' % self.pow_hash print 'targ %x' % self.target raise ValueError('not enough work!') self.other_txs = other_txs if self.pow_hash <= self.header['bits'].target else None # XXX eww self.time_seen = time.time() self.peer = None
def rpc_getblocktemplate(self, param): if param["mode"] == "template": pass elif param["mode"] == "submit": result = param["data"] block = bitcoin_data.block_type.unpack(result.decode("hex")) if ( sum(tx_out["value"] for tx_out in block["txs"][0]["tx_outs"]) != sum(tx["tx_outs"][0]["value"] for tx in block["txs"][1:]) + 5000000000 ): print "invalid fee" if block["header"]["previous_block"] != self.blocks[-1]: return False if bitcoin_data.hash256(result.decode("hex")) > block["header"]["bits"].target: return False header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block["header"])) self.blocks.append(header_hash) self.headers[header_hash] = block["header"] reactor.callLater(0, self.new_block.happened) return True else: raise jsonrpc.Error_for_code(-1)("invalid request") txs = [] for i in xrange(100): fee = i txs.append( dict( data=bitcoin_data.tx_type.pack( dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script="hello!" * 100)], lock_time=0) ).encode("hex"), fee=fee, ) ) return { "version": 2, "previousblockhash": "%064x" % (self.blocks[-1],), "transactions": txs, "coinbaseaux": {"flags": "062f503253482f"}, "coinbasevalue": 5000000000 + sum(tx["fee"] for tx in txs), "target": "0000000000000513c50000000000000000000000000000000000000000000000", "mintime": 1351655621, "mutable": ["time", "transactions", "prevblock"], "noncerange": "00000000ffffffff", "sigoplimit": 20000, "sizelimit": 1000000, "curtime": 1351659940, "bits": "21008000", "height": len(self.blocks), }
def __init__(self, net, header, share_info, merkle_branch, other_txs=None): self.net = net if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_branch: raise ValueError('merkle_branch and other_txs do not match') if len(merkle_branch) > 16: raise ValueError('merkle_branch too long!') self.header = header self.share_info = share_info self.merkle_branch = merkle_branch self.share_data = self.share_info['share_data'] self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] if len(self.share_data['new_script']) > 100: raise ValueError('new_script too long!') if script.get_sigop_count(self.share_data['new_script']) > 1: raise ValueError('too many sigops!') self.previous_hash = self.share_data['previous_share_hash'] if len(self.share_data['nonce']) > 100: raise ValueError('nonce too long!') if len(self.share_data['coinbase']) > 100: raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),)) self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) self.hash = bitcoin_data.hash256(share1a_type.pack(self.as_share1a())) if self.pow_hash > self.target: print 'hash %x' % self.pow_hash print 'targ %x' % self.target raise ValueError('not enough work!') if other_txs is not None and not self.pow_hash <= self.header['bits'].target: raise ValueError('other_txs provided when not a block solution') if other_txs is None and self.pow_hash <= self.header['bits'].target: raise ValueError('other_txs not provided when a block solution') self.other_txs = other_txs # XXX eww self.time_seen = time.time() self.peer = None
def handle_header(new_header): # check that header matches current target if not (self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= self.bitcoind_work.value['bits'].target): return bitcoind_best_block = self.bitcoind_work.value['previous_block'] if (self.best_block_header.value is None or ( new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or ( bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and self.best_block_header.value['previous_block'] != bitcoind_best_block )): # new is current and previous is not a child of current self.best_block_header.set(new_header)
def test_hashlink3(self): for i in xrange(100): d = random_bytes(random.randrange(2048)) d2 = random_bytes(random.randrange(200)) d3 = random_bytes(random.randrange(2048)) x = data.prefix_to_hash_link(d + d2, d2) assert data.check_hash_link(x, d3, d2) == bitcoin_data.hash256(d + d2 + d3)
def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack( bitcoin_data.check_merkle_link( bitcoin_data.hash256(cls.ref_type.pack(dict(identifier=net.IDENTIFIER, share_info=share_info))), ref_merkle_link, ) )
def test_tx_hash(self): assert data.hash256( data.tx_type.pack( dict( version=1, tx_ins=[ dict( previous_output=None, sequence=None, script= '70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88' .decode('hex'), ) ], tx_outs=[ dict( value=5003880250, script=data.pubkey_hash_to_script2( pack.IntType(160).unpack( 'ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'. decode('hex'))), ) ], lock_time=0, )) ) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce): if job_id not in self.handler_map: print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] header = dict( version=x['version'], previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']), timestamp=pack.IntType(32).unpack(getwork._swap4(ntime.decode('hex'),True)), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'),True)), ) #print "stratum: jobid: %s" %(job_id) #print "stratum bits: %s %X" % (hex(header['bits'].bits), header['bits'].target) #print "stratum: timestamp: %X" % (header['timestamp']) #print "stratum: extranonce2: %s" % (extranonce2) #print "stratum: nonce: %s %s\n" % (hex(header['nonce']),nonce) #print "stratum: coinb1: %s\n" % (x['coinb1'].encode('hex')) #print "stratum: coinb2: %s\n" % (x['coinb2'].encode('hex')) print "stratum: header: \n %s\n" % (str(header)) #print "stratum: new_packed_gentx:\n %s\n" % (new_packed_gentx.encode('hex')) #print "stratum: merkle_root: %s \n\n" % (header['merkle_root']) return got_response(header, worker_name, coinb_nonce)
def handle_block(block_data): block = block_data['block'] txouts = block['txs'][0]['tx_outs'] if len(txouts) < 25: return if not txouts[-1]['script'].startswith('\x6a'): return if len(txouts[-1]['script']) < 33: return if txouts[-1]['value'] != 0: return if txouts[-2]['script'] != p2pool_data.DONATION_SCRIPT: return hash_str = '%064x' % bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])) print hash_str if hash_str not in blocks_dict: print 'inserted' x = dict( Id=hash_str, PrevBlock='%064x' % block['header']['previous_block'], GenerationTxHash='%064x' % block_data['gentx_hash'], BlockHeight=block_data['height'], Difficulty=bitcoin_data.target_to_difficulty(block['header']['bits'].target), Timestamp=block['header']['timestamp'], IsOrphaned=None, # XXX ) blocks.append(x) blocks_dict[hash_str] = x
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce): if job_id not in self.handler_map: print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' return False x, got_response = self.handler_map[job_id] coinb_nonce = getwork._swap4(extranonce2.decode('hex')) assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] #build header header = dict( version=x['version'], previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']), timestamp=pack.IntType(32).unpack(getwork._swap4(ntime.decode('hex'),True)), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'),True)), ) #print "stratum: nonce %s extranonce2: %s" %(nonce, extranonce2) # print "stratum: header:%s\n" % (str(header)) # print "stratum: merkle_root %s" % (hex(header['merkle_root'])) # print "stratum: merkle_link %s" % (repr(x['merkle_link'])) # print "stratum: new_packed_gentx %s" % (repr(new_packed_gentx)) # #definition of got_response is in work.py return got_response(header, worker_name, coinb_nonce)
def get_share(header, transactions): assert transactions[0] == gentx and [ bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions[1:] ] == other_transaction_hashes min_header = dict(header) del min_header['merkle_root'] hash_link = prefix_to_hash_link( bitcoin_data.tx_type.pack(gentx)[:-32 - 4], cls.gentx_before_refhash) merkle_link = bitcoin_data.calculate_merkle_link( [None] + other_transaction_hashes, 0) pow_hash = net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(header)) return cls(net, None, dict( min_header=min_header, share_info=share_info, hash_link=hash_link, ref_merkle_link=dict(branch=[], index=0), ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
def check(self, tracker): from p2pool import p2p if self.share_data['previous_share_hash'] is not None: previous_share = tracker.items[self.share_data['previous_share_hash']] if type(self) is type(previous_share): pass elif type(self) is type(previous_share).SUCCESSOR: if tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH: from p2pool import p2p raise p2p.PeerMisbehavingError('switch without enough history') # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version counts = get_desired_version_counts(tracker, tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10) if counts.get(self.VERSION, 0) < sum(counts.itervalues())*85//100: raise p2p.PeerMisbehavingError('switch without enough hash power upgraded') else: raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__)) other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()] share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce']) assert other_tx_hashes2 == other_tx_hashes if share_info != self.share_info: raise ValueError('share_info invalid') if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link: raise ValueError('merkle_link and other_tx_hashes do not match') return gentx # only used by as_block
def check(self, tracker): share_info, gentx = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.common['ref_merkle_link'], self.net) if share_info != self.share_info: raise ValueError('share_info invalid') if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') return gentx # only used by as_block
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce): if job_id not in self.handler_map: print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] header = dict( version=x['version'], previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link( bitcoin_data.hash256(new_packed_gentx), x['merkle_link'] ), # new_packed_gentx has witness data stripped timestamp=pack.IntType(32).unpack( getwork._swap4(ntime.decode('hex'))), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))), ) # Disconnect miners with large DOA rates to prevent DoS res = got_response(header, worker_name, coinb_nonce) if len(self.wb._inner.my_share_hashes) > 20: if float(len(self.wb._inner.my_doa_share_hashes)) / float( len(self.wb._inner.my_share_hashes)) > 0.60: self.transport.loseConnection() # Disconnect miners with large hash > target to prevent DoS if self.wb._inner.total_hashes > 20: if float(self.wb._inner.invalid_hashes) / float( self.wb._inner.total_hashes) > 0.05: self.transport.loseConnection() return res
def check(self, tracker): share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.net) if share_info != self.share_info: raise ValueError('share difficulty invalid') if bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)), 0, self.merkle_branch) != self.header['merkle_root']: raise ValueError('''gentx doesn't match header via merkle_branch''')
def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % ( len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 all_new_txs = {} for share, new_txs in shares: if new_txs is not None: all_new_txs.update( (bitcoin_data.hash256(bitcoin_data.tx_type.pack(new_tx)), new_tx) for new_tx in new_txs) if share.hash in self.node.tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr) self.node.tracker.add(share) new_known_txs = dict(self.node.known_txs_var.value) new_known_txs.update(all_new_txs) self.node.known_txs_var.set(new_known_txs) if new_count: self.node.set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % ( len(shares), new_count, len( self.node.tracker.items), 2 * self.node.net.CHAIN_LENGTH)
def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 all_new_txs = {} for share, new_txs in shares: if new_txs is not None: all_new_txs.update((bitcoin_data.hash256(bitcoin_data.tx_type.pack(new_tx)), new_tx) for new_tx in new_txs) if share.hash in self.node.tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr) self.node.tracker.add(share) new_known_txs = dict(self.node.known_txs_var.value) new_known_txs.update(all_new_txs) self.node.known_txs_var.set(new_known_txs) if new_count: self.node.set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(self.node.tracker.items), 2*self.node.net.CHAIN_LENGTH)
def __init__(self, net, peer_addr, contents): self.net = net self.peer_addr = peer_addr self.contents = contents self.min_header = contents["min_header"] self.share_info = contents["share_info"] self.hash_link = contents["hash_link"] self.merkle_link = contents["merkle_link"] if not (2 <= len(self.share_info["share_data"]["coinbase"]) <= 100): raise ValueError("""bad coinbase size! %i bytes""" % (len(self.share_info["share_data"]["coinbase"]),)) if len(self.merkle_link["branch"]) > 16: raise ValueError("merkle branch too long!") assert not self.hash_link["extra_data"], repr(self.hash_link["extra_data"]) self.share_data = self.share_info["share_data"] self.max_target = self.share_info["max_bits"].target self.target = self.share_info["bits"].target self.timestamp = self.share_info["timestamp"] self.previous_hash = self.share_data["previous_share_hash"] self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data["pubkey_hash"]) self.desired_version = self.share_data["desired_version"] n = set() for share_count, tx_count in self.iter_transaction_hash_refs(): assert share_count < 110 if share_count == 0: n.add(tx_count) assert n == set(range(len(self.share_info["new_transaction_hashes"]))) self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, contents["ref_merkle_link"]) + pack.IntType(32).pack(self.contents["last_txout_nonce"]) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header)) self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header)) if self.target > net.MAX_TARGET: from p2pool import p2p raise p2p.PeerMisbehavingError("share target invalid") if self.pow_hash > self.target: from p2pool import p2p raise p2p.PeerMisbehavingError("share PoW invalid") self.new_transaction_hashes = self.share_info["new_transaction_hashes"] # XXX eww self.time_seen = time.time()
def _(tx): if tx.timestamp > time.time() + 3600: return if tx.timestamp > self.bitcoind_work.value['txn_timestamp']: self.bitcoind_work.value['txn_timestamp'] = tx.timestamp new_known_txs = dict(self.known_txs_var.value) new_known_txs[bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))] = tx self.known_txs_var.set(new_known_txs)
def submit_block_p2p(block, factory, net): if factory.conn.value is None: print >>sys.stderr, "No bitcoind connection when block submittal attempted! %s%064x" % ( net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block["header"])), ) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block)
def submit_block_p2p(block, factory, net): if factory.conn.value is None: print >> sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % ( net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256( bitcoin_data.block_header_type.pack(block['header']))) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block)
def __init__(self, net, peer_addr, contents): self.net = net self.peer_addr = peer_addr self.contents = contents self.min_header = contents['min_header'] self.share_info = contents['share_info'] self.hash_link = contents['hash_link'] self.merkle_link = contents['merkle_link'] if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100): raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),)) if len(self.merkle_link['branch']) > 16: raise ValueError('merkle branch too long!') assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data']) self.share_data = self.share_info['share_data'] self.max_target = self.share_info['max_bits'].target self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.previous_hash = self.share_data['previous_share_hash'] self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash']) self.desired_version = self.share_data['desired_version'] self.absheight = self.share_info['absheight'] self.abswork = self.share_info['abswork'] n = set() for share_count, tx_count in self.iter_transaction_hash_refs(): assert share_count < 110 if share_count == 0: n.add(tx_count) assert n == set(range(len(self.share_info['new_transaction_hashes']))) self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header)) self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header)) if self.target > net.MAX_TARGET: from p2pool import p2p raise p2p.PeerMisbehavingError('share target invalid') if self.pow_hash > self.target: from p2pool import p2p raise p2p.PeerMisbehavingError('share PoW invalid') self.new_transaction_hashes = self.share_info['new_transaction_hashes'] # XXX eww self.time_seen = time.time()
def test_header_hash(self): assert data.hash256(data.block_header_type.pack(dict( version=1, previous_block=0x000000000000038a2a86b72387f93c51298298a732079b3b686df3603d2f6282, merkle_root=0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44, timestamp=1323752685, bits=data.FloatingInteger(437159528), nonce=3658685446, ))) == 0x000000000000003aaaf7638f9f9c0d0c60e8b0eb817dcdb55fd2b1964efc5175
def rpc_getmemorypool(self, result=None): if result is not None: block = bitcoin_data.block_type.unpack(result.decode('hex')) if block['header']['previous_block'] != self.blocks[-1]: return False header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])) self.blocks.append(header_hash) self.headers[header_hash] = block['header'] return True return { "version" : 2, "previousblockhash" : '%064x' % (self.blocks[-1],), "transactions" : ([ { "data" : "01000000014c6ade5af7e7803fdf2ecae88d939ec044fdb3de84bc70168723969666b30e38010000008b483045022100e1fce6361811e24d57b494c3d71a9e653e54b9489dd5a1889affdef8a1e912b002204079a4720f25b55a0f092bcd70a5824e38a85072bb8e58477df8eb6a66b967ae01410426952de5ee7e5fea3c2065ffceada913d7d643f5631f0d714d667a0b81b599aada24f6e0b46d4bd4051b8111be95cf460fbd1977eadb3f2adc68b4018f8b5ba6ffffffff020065cd1d000000001976a9144799fc9c1b2cfb2c0187551be50f6ea41ed37ed888ac80969800000000001976a914ac2092a73378e8b80a127748a10274c962579f5188ac00000000", "hash" : "9665fece00aceffd175a28912707889c9b2039be004c77e1497b59d8f5132966", "depends" : [ ], "fee" : 0, "sigops" : 2 }, { "data" : "0100000003bbb3bda750ea9bc057906a7fb12b7a0bf81e4a2c5ffbf3117d0aff9f6e4a7d8c000000006b483045022100fbadaa914af56955dca66c1cca59f7ec9fadbfe01fdec7d72e6cde85abd67be302202713f52dacf7da9c678c33440caef5e2de65dc02994197b5b59d284214088fb1012102ace616bb7d1e5a58118c83466f410fd2c5423450da0dbeb5b1fca158873a92cbffffffffd5c3ec30d816ecc203c581cb7365c1c51c1917b59660ca16683c2f4e1e394337010000006a47304402207dc3644c8a14175e1cec939fcec4d60702f556ee153f602b764adcf32c5a1e6b02207aee1c6ed4d0e8004f1a4fe0a82401bf7e8f285ae1a506fe1be25670ebdb092d0121034d77fd7088a2ee52bc1a3f850772aa61a47d230b3093065a23fd909d95c38ffbfffffffffbf4692b046b684fc51bc7112da5bbd6094fb92eb87f25c6a0893fea15fac13b000000006c493046022100b8d79f514b2bd20f9f2aa5bb5031cf038a5b97fd2fe9ea182187bbeea454d5d202210085f55c96c1e2be5faf26f6122e2d105f9ffaa49b61890a1def9b30ff48be3362012102716ee02e7f5a9f2e5619b5ac7c092e5e5aab6fc45708504bb1f8aac4ea31a84cffffffff0262cc990a000000001976a9141f7dcec4f61c2a1488c7ccf03673120f230d1fd988ac005ed0b2000000001976a914f2b29da6ac6a2aac1f088ead181b553d60d35e9c88ac00000000", "hash" : "27ac960a159b7f8a7d3cc3095d0248375ca65be2c98b16a5818814262eabe01c", "depends" : [ ], "fee" : 0, "sigops" : 2 }, { "data" : "01000000012d0b6b9d9f57de5c567ea43f26e488321bfcfd0226f3043f7151d504702cfacd010000008b483045022025830bac86c09f77fb132507952210fd0b2452d8d583c12be80e274d943c7127022100f1674c75ae0b38fcee9489daa4164d6f84a386534be0eb1cc063e853bc1d3258014104f993167e332d7fe550b5049d35a972463944beb9ae8e9abe888f832ba6847883a2fd3464765b6350b89a84c8fe7ecee0cca4352494413b4c15791c1cd0694022ffffffff02007ddaac000000001976a9140e0c40f1b244e2dd07c95f52978f50a6fe5ec85188ac66208687080000001976a9143e64cf12ce0369ce9fe78b37708ae6f8a565b2d288ac00000000", "hash" : "00d1647f78e05715b171c9169d555141c9a6ec54d1ec177534aae4555d7bbc7a", "depends" : [ ], "fee" : 0, "sigops" : 2 } ] if random.randrange(2) else []), "coinbaseaux" : { "flags" : "062f503253482f" }, "coinbasevalue" : 5044450000, "target" : "0000000000000513c50000000000000000000000000000000000000000000000", "mintime" : 1351655621, "mutable" : [ "time", "transactions", "prevblock" ], "noncerange" : "00000000ffffffff", "sigoplimit" : 20000, "sizelimit" : 1000000, "curtime" : 1351659940, "bits" : "21008000", "height" : len(self.blocks), }
def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack( bitcoin_data.check_merkle_link( bitcoin_data.hash256( cls.ref_type.pack( dict( identifier=net.IDENTIFIER, share_info=share_info, ))), ref_merkle_link))
def rpc_getmemorypool(self, result=None): if result is not None: block = bitcoin_data.block_type.unpack(result.decode('hex')) if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum(tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000: print 'invalid fee' if block['header']['previous_block'] != self.blocks[-1]: return False if bitcoin_data.hash256(result.decode('hex')) > block['header']['bits'].target: return False header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])) self.blocks.append(header_hash) self.headers[header_hash] = block['header'] reactor.callLater(0, self.new_block.happened) return True txs = [] for i in xrange(100): fee = i txs.append(dict( data=bitcoin_data.tx_type.pack(dict(version=1, tx_ins=[], tx_outs=[dict(value=fee*1000 + i, script='hello!'*100)], lock_time=0)).encode('hex'), fee=fee, )) return { "version" : 2, "previousblockhash" : '%064x' % (self.blocks[-1],), "transactions" : txs, "coinbaseaux" : { "flags" : "062f503253482f" }, "coinbasevalue" : 5000000000 + sum(tx['fee'] for tx in txs), "target" : "0000000000000513c50000000000000000000000000000000000000000000000", "mintime" : 1351655621, "mutable" : [ "time", "transactions", "prevblock" ], "noncerange" : "00000000ffffffff", "sigoplimit" : 20000, "sizelimit" : 1000000, "curtime" : 1351659940, "bits" : "21008000", "height" : len(self.blocks), }
def get_share(header, transactions): assert transactions[0] == gentx and [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions[1:]] == other_transaction_hashes min_header = dict(header);del min_header['merkle_root'] hash_link = prefix_to_hash_link(bitcoin_data.tx_type.pack(gentx)[:-32-4], cls.gentx_before_refhash) merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0) pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) return cls(net, None, dict( min_header=min_header, share_info=share_info, hash_link=hash_link, ref_merkle_link=dict(branch=[], index=0), ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
def get_blocks2(n): height = yield blockchain('q/getblockcount') res = [] for i in xrange(n): x = yield blockchain('block-height/%i?format=json' % (height - i,)) for block in x['blocks']: #print block header = dict( version=block['ver'], previous_block=int(block['prev_block'], 16), merkle_root=int(block['mrkl_root'], 16), timestamp=block['time'], bits=bitcoin_data.FloatingInteger(block['bits']), nonce=block['nonce'], ) assert bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) == int(block['hash'], 16) # there seems to be no way to get the raw transaction # from blockchain.info (?format=hex doesn't work for # coinbase transctions ): so instead fake it txs = [dict( version=tx['ver'], tx_ins=[dict( previous_output=None, script='', sequence=0, ) for tx_in in tx['inputs']], tx_outs=[dict( value=tx_out['value'], script='\x6a' + 'blah'*100 if tx_out['type'] == -1 else p2pool_data.DONATION_SCRIPT if tx_out['addr'] == bitcoin_data.script2_to_address(p2pool_data.DONATION_SCRIPT, net) else bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(tx_out['addr'], net)), ) for tx_out in tx['out']], lock_time=0, ) for tx in block['tx']] #print txs[0] # fails because we don't have coinbase script ): #assert bitcoin_data.hash256(bitcoin_data.tx_type.pack(txs[0])) == block['tx'][0]['hash'] block2 = dict(header=header, txs=txs) res.append(dict( block=block2, height=block['height'], gentx_hash=int(block['tx'][0]['hash'], 16), )) defer.returnValue(res)
def test_get_block(self): factory = p2p.ClientFactory(networks.nets['bitcoin']) c = reactor.connectTCP('127.0.0.1', 8333, factory) try: h = 0x000000000000046acff93b0e76cd10490551bf871ce9ac9fad62e67a07ff1d1e block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))() assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root'] assert data.hash256(data.block_header_type.pack(block['header'])) == h finally: factory.stopTrying() c.disconnect()
def test_get_block(self): factory = p2p.ClientFactory(networks.nets['bitcoin_regtest']) c = reactor.connectTCP('127.0.0.1', 18444, factory) try: h = 0x0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206 block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))() assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root'] assert data.hash256(data.block_header_type.pack(block['header'])) == h finally: factory.stopTrying() c.disconnect()
def test_get_block(self): factory = p2p.ClientFactory(networks.nets['bitcoin']) c = reactor.connectTCP('127.0.0.1', 8333, factory) try: h = 0x000000000000046acff93b0e76cd10490551bf871ce9ac9fad62e67a07ff1d1e block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))() assert data.merkle_hash(map(data.get_txid, block['txs'])) == block['header']['merkle_root'] assert data.hash256(data.block_header_type.pack(block['header'])) == h finally: factory.stopTrying() c.disconnect()
def __init__(self, net, peer, common, merkle_link, other_txs): self.net = net self.peer = peer self.common = common self.min_header = common['min_header'] self.share_info = common['share_info'] self.hash_link = common['hash_link'] self.merkle_link = merkle_link self.other_txs = other_txs if len(self.share_info['share_data']['coinbase']) > 100: raise ValueError('''coinbase too large! %i bytes''' % (len(self.self.share_data['coinbase']),)) if len(merkle_link['branch']) > 16: raise ValueError('merkle branch too long!') if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_link: raise ValueError('merkle_link and other_txs do not match') assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data']) self.share_data = self.share_info['share_data'] self.max_target = self.share_info['max_bits'].target self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.previous_hash = self.share_data['previous_share_hash'] self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash']) self.desired_version = self.share_data['desired_version'] if self.timestamp < net.SWITCH_TIME: from p2pool import p2p raise p2p.PeerMisbehavingError('peer sent a new-style share with a timestamp before the switch time') self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, common['ref_merkle_link']) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header)) self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header)) if self.pow_hash > self.target: raise p2p.PeerMisbehavingError('share PoW invalid') if other_txs is not None and not self.pow_hash <= self.header['bits'].target: raise ValueError('other_txs provided when not a block solution') if other_txs is None and self.pow_hash <= self.header['bits'].target: raise ValueError('other_txs not provided when a block solution') # XXX eww self.time_seen = time.time()
def got_conn(self, conn): p2p.Node.got_conn(self, conn) yield deferral.sleep(0.5) new_mining_txs = dict(self.mining_txs_var.value) for i in xrange(3): huge_tx = dict(version=0, tx_ins=[], tx_outs=[dict(value=0, script="x" * 900000)], lock_time=i) new_mining_txs[bitcoin_data.hash256(bitcoin_data.tx_type.pack(huge_tx))] = huge_tx self.mining_txs_var.set(new_mining_txs) self.sent_time = reactor.seconds()
def handle_header(new_header, valid=False): new_hash = self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) # check that header matches current target if new_hash > self.bitcoind_work.value['bits'].target: return if not valid: try: _ = (yield self.bitcoind.rpc_getblockheader(new_hash)) except: return bitcoind_best_block = self.bitcoind_work.value['previous_block'] if (self.best_block_header.value is None or ( new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or ( bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and self.best_block_header.value['previous_block'] != bitcoind_best_block )): # new is current and previous is not a child of current self.best_block_header.set(new_header)
def check(self, tracker): share_info, gentx, other_transaction_hashes, get_share = self.generate_transaction( tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.common['ref_merkle_link'], [], self.net ) # ok because other_transaction_hashes is only used in get_share if share_info != self.share_info: raise ValueError('share_info invalid') if bitcoin_data.hash256( bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') return gentx # only used by as_block
def _getwork(self, request, data, long_poll): request.setHeader('X-Long-Polling', '/long-polling') request.setHeader('X-Roll-NTime', 'expire=100') request.setHeader('X-Is-P2Pool', 'true') if request.getHeader('Host') is not None: request.setHeader('X-Stratum', 'stratum+tcp://' + request.getHeader('Host')) if data is not None: header = getwork.decode_data(data) if header['merkle_root'] not in self.merkle_root_to_handler: print >>sys.stderr, '''Couldn't link returned work's merkle root with its handler. This should only happen if this process was recently restarted!''' defer.returnValue(False) defer.returnValue(self.merkle_root_to_handler[header['merkle_root']](header, request.getUser() if request.getUser() is not None else '', '\0'*self.worker_bridge.COINBASE_NONCE_LENGTH)) if p2pool.DEBUG: id = random.randrange(1000, 10000) print 'POLL %i START is_long_poll=%r user_agent=%r user=%r' % (id, long_poll, request.getHeader('User-Agent'), request.getUser()) if long_poll: request_id = request.getClientIP(), request.getHeader('Authorization') if self.worker_views.get(request_id, self.worker_bridge.new_work_event.times) != self.worker_bridge.new_work_event.times: if p2pool.DEBUG: print 'POLL %i PUSH' % (id,) else: if p2pool.DEBUG: print 'POLL %i WAITING' % (id,) yield self.worker_bridge.new_work_event.get_deferred() self.worker_views[request_id] = self.worker_bridge.new_work_event.times x, handler = self.worker_bridge.get_work(*self.worker_bridge.preprocess_request(request.getUser() if request.getUser() is not None else '')) res = getwork.BlockAttempt( version=x['version'], previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(x['coinb1'] + '\0'*self.worker_bridge.COINBASE_NONCE_LENGTH + x['coinb2']), x['merkle_link']), timestamp=x['timestamp'], bits=x['bits'], share_target=x['share_target'], ) assert res.merkle_root not in self.merkle_root_to_handler self.merkle_root_to_handler[res.merkle_root] = handler if p2pool.DEBUG: print 'POLL %i END identifier=%i' % (id, self.worker_bridge.new_work_event.times) extra_params = {} if request.getHeader('User-Agent') == 'Jephis PIC Miner': # ASICMINER BE Blades apparently have a buffer overflow bug and # can't handle much extra in the getwork response extra_params = {} else: extra_params = dict(identifier=str(self.worker_bridge.new_work_event.times), submitold=True) defer.returnValue(res.getwork(**extra_params))
def check(self, tracker): if self.share_data['previous_share_hash'] is not None: previous_share = tracker.shares[self.share_data['previous_share_hash']] if isinstance(previous_share, NewShare): from p2pool import p2p raise p2p.PeerMisbehavingError('''Share can't follow NewShare''') share_info, gentx = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.common['ref_merkle_link'], self.net) if share_info != self.share_info: raise ValueError('share_info invalid') if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') return gentx # only used by as_block
def handle_remember_tx(self, tx_hashes, txs): t0 = time.time() for tx_hash in tx_hashes: if tx_hash in self.remembered_txs: print >> sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value: tx = self.node.known_txs_var.value[tx_hash] else: for cache in self.known_txs_cache.itervalues(): if tx_hash in cache: tx = cache[tx_hash] print 'Transaction %064x rescued from peer latency cache!' % ( tx_hash, ) break else: print >> sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % ( tx_hash, ) self.disconnect() return self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size( tx) added_known_txs = {} warned = False for tx in txs: tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) if tx_hash in self.remembered_txs: print >> sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value and not warned and p2pool.DEBUG: print 'Peer sent entire transaction %064x that was already received' % ( tx_hash, ) warned = True self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size( tx) added_known_txs[tx_hash] = tx self.node.known_txs_var.add(added_known_txs) if self.remembered_txs_size >= self.max_remembered_txs_size: raise PeerMisbehavingError('too much transaction data stored') t1 = time.time() if p2pool.BENCH and (t1 - t0) > .01: print "%8.3f ms for %i txs in p2p.py:handle_remember_tx (%3.3f ms/tx)" % ( (t1 - t0) * 1000., len(tx_hashes), ((t1 - t0) * 1000. / max(1, len(tx_hashes))))
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce, version_bits = None, *args): #asicboost: version_bits is the version mask that the miner used worker_name = worker_name.strip() if job_id not in self.handler_map: print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' #self.other.svc_client.rpc_reconnect().addErrback(lambda err: None) return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] job_version = x['version'] nversion = job_version #check if miner changed bits that they were not supposed to change if version_bits: if ((~self.pool_version_mask) & int(version_bits,16)) != 0: #todo: how to raise error back to miner? #protocol does not say error needs to be returned but ckpool returns #{"error": "Invalid version mask", "id": "id", "result":""} raise ValueError("Invalid version mask {0}".format(version_bits)) nversion = (job_version & ~self.pool_version_mask) | (int(version_bits,16) & self.pool_version_mask) #nversion = nversion & int(version_bits,16) header = dict( version=nversion, previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']), # new_packed_gentx has witness data stripped timestamp=pack.IntType(32).unpack(getwork._swap4(ntime.decode('hex'))), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))), ) result = got_response(header, worker_name, coinb_nonce, self.target) # self.target from RPC response # adjust difficulty on this stratum to target ~10sec/pseudoshare!!! if not self.fixed_target: self.recent_shares.append(time.time()) if len(self.recent_shares) > 12 or (time.time() - self.recent_shares[0]) > 10*len(self.recent_shares)*self.share_rate: old_time = self.recent_shares[0] del self.recent_shares[0] olddiff = bitcoin_data.target_to_difficulty(self.target) # calculate new target based on time needed for generating previous shares self.target = int(self.target * clip((time.time() - old_time)/(len(self.recent_shares)*self.share_rate), 0.5, 2.) + 0.5) newtarget = clip(self.target, self.wb.net.SANE_TARGET_RANGE[0], self.wb.net.SANE_TARGET_RANGE[1]) if newtarget != self.target: print "Clipping target from %064x to %064x" % (self.target, newtarget) self.target = newtarget self.target = max(x['min_share_target'], self.target) self.recent_shares = [time.time()] self._send_work() return result
def get_blocks(b, n, callback): h = yield b.rpc_getbestblockhash() for i in xrange(n): print 'getting block', i, 'out of', n, 'back' block_data = yield b.rpc_getblock(h, False) block_data2 = yield b.rpc_getblock(h) block = bitcoin_data.block_type.unpack(block_data.decode('hex')) callback(dict( block=block, height=block_data2['height'], gentx_hash=bitcoin_data.hash256(bitcoin_data.tx_type.pack(block['txs'][0])), )) h = '%064x' % (block['header']['previous_block'],)
def test_header_hash(self): assert data.hash256( data.block_header_type.pack( dict( version=1, previous_block= 0x000000000000038a2a86b72387f93c51298298a732079b3b686df3603d2f6282, merkle_root= 0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44, timestamp=1323752685, bits=data.FloatingInteger(437159528), nonce=3658685446, )) ) == 0x000000000000003aaaf7638f9f9c0d0c60e8b0eb817dcdb55fd2b1964efc5175
def handle_remember_tx(self, tx_hashes, txs): t0 = time.time() for tx_hash in tx_hashes: if tx_hash in self.remembered_txs: print >>sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value: tx = self.node.known_txs_var.value[tx_hash] else: for cache in self.known_txs_cache.itervalues(): if tx_hash in cache: tx = cache[tx_hash] print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,) break else: print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,) self.disconnect() return self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx) added_known_txs = {} warned = False for tx in txs: tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) if tx_hash in self.remembered_txs: print >>sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value and not warned: """ It's due to latency. Multiple peers want you to have a transaction that they're now mining but you don't have yet, so they all send it to you at the same time. This could be solved by adding a small delay before forwarding a transaction at the cost of increased latency should they find a share or block... I'll think about it. """ print 'Peer sent entire transaction %064x that was already received' % (tx_hash,) warned = True self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx) added_known_txs[tx_hash] = tx self.node.known_txs_var.add(added_known_txs) if self.remembered_txs_size >= self.max_remembered_txs_size: raise PeerMisbehavingError('too much transaction data stored') t1 = time.time() if p2pool.BENCH and (t1-t0) > .01: print "%8.3f ms for %i txs in p2p.py:handle_remember_tx (%3.3f ms/tx)" % ((t1-t0)*1000., len(tx_hashes), ((t1-t0)*1000. / max(1,len(tx_hashes)) ))
def get_blocks(b, n): h = yield b.rpc_getbestblockhash() res = [] for i in xrange(n): block_data = yield b.rpc_getblock(h, False) block_data2 = yield b.rpc_getblock(h) block = bitcoin_data.block_type.unpack(block_data.decode('hex')) res.append( dict( block=block, height=block_data2['height'], gentx_hash=bitcoin_data.hash256( bitcoin_data.tx_type.pack(block['txs'][0])), )) h = '%064x' % (block['header']['previous_block'], ) defer.returnValue(res)
def handle_remember_tx(self, tx_hashes, txs): for tx_hash in tx_hashes: if tx_hash in self.remembered_txs: print >> sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value: tx = self.node.known_txs_var.value[tx_hash] else: for cache in self.known_txs_cache.itervalues(): if tx_hash in cache: tx = cache[tx_hash] print 'Transaction %064x rescued from peer latency cache!' % ( tx_hash, ) break else: print >> sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % ( tx_hash, ) self.disconnect() return self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size( tx) new_known_txs = dict(self.node.known_txs_var.value) warned = False for tx in txs: tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) if tx_hash in self.remembered_txs: print >> sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value and not warned: print 'Peer sent entire transaction %064x that was already received' % ( tx_hash, ) warned = True self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size( tx) new_known_txs[tx_hash] = tx self.node.known_txs_var.set(new_known_txs) if self.remembered_txs_size >= self.max_remembered_txs_size: raise PeerMisbehavingError('too much transaction data stored')
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce): if job_id not in self.handler_map: print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] header = dict( version=x['version'], previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']), # new_packed_gentx has witness data stripped timestamp=pack.IntType(32).unpack(getwork._swap4(ntime.decode('hex'))), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))), ) return got_response(header, worker_name, coinb_nonce)
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce, version_bits=None, *args): #asicboost: version_bits is the version mask that the miner used worker_name = worker_name.strip() if job_id not in self.handler_map: print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' #self.other.svc_client.rpc_reconnect().addErrback(lambda err: None) return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] job_version = x['version'] nversion = job_version #check if miner changed bits that they were not supposed to change #print version_bits if version_bits: if ((~self.pool_version_mask) & int(version_bits, 16)) != 0: #todo: how to raise error back to miner? #protocol does not say error needs to be returned but ckpool returns #{"error": "Invalid version mask", "id": "id", "result":""} raise ValueError( "Invalid version mask {0}".format(version_bits)) nversion = (job_version & ~self.pool_version_mask) | ( int(version_bits, 16) & self.pool_version_mask) #nversion = nversion & int(version_bits,16) header = dict( version=nversion, previous_block=x['previous_block'], merkle_root=bitcoin_data.check_merkle_link( bitcoin_data.hash256(new_packed_gentx), x['merkle_link'] ), # new_packed_gentx has witness data stripped timestamp=pack.IntType(32).unpack( getwork._swap4(ntime.decode('hex'))), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))), ) return got_response(header, worker_name, coinb_nonce)
def load_share(share, net, peer): if share['type'] in [0, 1, 2, 3]: from p2pool import p2p raise p2p.PeerMisbehavingError('sent an obsolete share') elif share['type'] == 4: return Share(net, peer, other_txs=None, **Share.share1a_type.unpack(share['contents'])) elif share['type'] == 5: share1b = Share.share1b_type.unpack(share['contents']) return Share(net, peer, merkle_link=bitcoin_data.calculate_merkle_link([0] + [ bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs'] ], 0), **share1b) else: raise ValueError('unknown share type: %r' % (share['type'], ))
def getwork(bitcoind, use_getblocktemplate=False, txidcache={}, feecache={}, feefifo=[], known_txs={}): def go(): if use_getblocktemplate: return bitcoind.rpc_getblocktemplate(dict(mode='template', rules=['segwit'])) else: return bitcoind.rpc_getmemorypool() try: start = time.time() work = yield go() end = time.time() except jsonrpc.Error_for_code(-32601): # Method not found use_getblocktemplate = not use_getblocktemplate try: start = time.time() work = yield go() end = time.time() except jsonrpc.Error_for_code(-32601): # Method not found print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!' raise deferral.RetrySilentlyException() if not 'start' in txidcache: # we clear it every 30 min txidcache['start'] = time.time() t0 = time.time() unpacked_transactions = [] txhashes = [] cachehits = 0 cachemisses = 0 knownhits = 0 knownmisses = 0 for x in work['transactions']: fee = x['fee'] x = x['data'] if isinstance(x, dict) else x packed = None if x in txidcache: cachehits += 1 txid = (txidcache[x]) txhashes.append(txid) else: cachemisses += 1 packed = x.decode('hex') txid = bitcoin_data.hash256(packed) txidcache[x] = txid txhashes.append(txid) if txid in known_txs: knownhits += 1 unpacked = known_txs[txid] else: knownmisses += 1 if not packed: packed = x.decode('hex') unpacked = bitcoin_data.tx_type.unpack(packed) unpacked_transactions.append(unpacked) # The only place where we can get information on transaction fees is in GBT results, so we need to store those # for a while so we can spot shares that miscalculate the block reward if not txid in feecache: feecache[txid] = fee feefifo.append(txid) if time.time() - txidcache['start'] > 30*60.: keepers = {(x['data'] if isinstance(x, dict) else x):txid for x, txid in zip(work['transactions'], txhashes)} txidcache.clear() txidcache.update(keepers) # limit the fee cache to 100,000 entries, which should be about 10-20 MB fum = 100000 while len(feefifo) > fum: del feecache[feefifo.pop(0)] packed_transactions = [x['data'].decode('hex') for x in work['transactions'] if len(x.get('depends', [])) == 0] if 'height' not in work: work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1 elif p2pool.DEBUG: assert work['height'] == (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1 t1 = time.time() if p2pool.BENCH: print "%8.3f ms for helper.py:getwork(). Cache: %i hits %i misses, %i known_tx %i unknown %i cached" % ((t1 - t0)*1000., cachehits, cachemisses, knownhits, knownmisses, len(txidcache)) defer.returnValue(dict( version=work['version'], previous_block=int(work['previousblockhash'], 16), transactions=unpacked_transactions, transaction_hashes=txhashes, transaction_fees=[x.get('fee', None) if isinstance(x, dict) else None for x in work['transactions']], subsidy=work['coinbasevalue'], time=work['time'] if 'time' in work else work['curtime'], bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']), coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '', height=work['height'], rules=work.get('rules', []), last_update=time.time(), use_getblocktemplate=use_getblocktemplate, latency=end - start, ))