def handle_header(new_header, valid=False): new_hash = self.net.PARENT.POW_FUNC( starwels_data.block_header_type.pack(new_header)) # check that header matches current target if new_hash > self.starwelsd_work.value['bits'].target: return if not valid: try: _ = (yield self.starwelsd.rpc_getblockheader(new_hash)) except: return starwelsd_best_block = self.starwelsd_work.value['previous_block'] if (self.best_block_header.value is None or (new_header['previous_block'] == starwelsd_best_block and starwels_data.hash256( starwels_data.block_header_type.pack( self.best_block_header.value)) == starwelsd_best_block ) # new is child of current and previous is current or (starwels_data.hash256( starwels_data.block_header_type.pack(new_header)) == starwelsd_best_block and self.best_block_header.value['previous_block'] != starwelsd_best_block) ): # new is current and previous is not a child of current self.best_block_header.set(new_header)
def rpc_getblocktemplate(self, param): if param['mode'] == 'template': pass elif param['mode'] == 'submit': result = param['data'] block = starwels_data.block_type.unpack(result.decode('hex')) if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum( tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000: print 'invalid fee' if block['header']['previous_block'] != self.blocks[-1]: return False if starwels_data.hash256( result.decode('hex')) > block['header']['bits'].target: return False header_hash = starwels_data.hash256( starwels_data.block_header_type.pack(block['header'])) self.blocks.append(header_hash) self.headers[header_hash] = block['header'] reactor.callLater(0, self.new_block.happened) return True else: raise jsonrpc.Error_for_code(-1)('invalid request') txs = [] for i in xrange(100): fee = i txs.append( dict( data=starwels_data.tx_type.pack( dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!' * 100)], lock_time=0)).encode('hex'), fee=fee, )) return { "version": 2, "previousblockhash": '%064x' % (self.blocks[-1], ), "transactions": txs, "coinbaseaux": { "flags": "062f503253482f" }, "coinbasevalue": 5000000000 + sum(tx['fee'] for tx in txs), "target": "0000000000000513c50000000000000000000000000000000000000000000000", "mintime": 1351655621, "mutable": ["time", "transactions", "prevblock"], "noncerange": "00000000ffffffff", "sigoplimit": 20000, "sizelimit": 1000000, "curtime": 1351659940, "bits": "21008000", "height": len(self.blocks), }
def test_hashlink3(self): for i in xrange(100): d = random_bytes(random.randrange(2048)) d2 = random_bytes(random.randrange(200)) d3 = random_bytes(random.randrange(2048)) x = data.prefix_to_hash_link(d + d2, d2) assert data.check_hash_link(x, d3, d2) == starwels_data.hash256(d + d2 + d3)
def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % ( len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 all_new_txs = {} for share, new_txs in shares: if new_txs is not None: all_new_txs.update( (starwels_data.hash256(starwels_data.tx_type.pack(new_tx)), new_tx) for new_tx in new_txs) if share.hash in self.node.tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr) self.node.tracker.add(share) new_known_txs = dict(self.node.known_txs_var.value) new_known_txs.update(all_new_txs) self.node.known_txs_var.set(new_known_txs) if new_count: self.node.set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % ( len(shares), new_count, len( self.node.tracker.items), 2 * self.node.net.CHAIN_LENGTH)
def submit_block_p2p(block, factory, net): if factory.conn.value is None: print >> sys.stderr, 'No starwelsd connection when block submittal attempted! %s%064x' % ( net.PARENT.BLOCK_EXPLORER_URL_PREFIX, starwels_data.hash256( starwels_data.block_header_type.pack(block['header']))) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block)
def test_header_hash(self): assert data.hash256(data.block_header_type.pack(dict( version=1, previous_block=0x000000000000038a2a86b72387f93c51298298a732079b3b686df3603d2f6282, merkle_root=0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44, timestamp=1323752685, bits=data.FloatingInteger(437159528), nonce=3658685446, ))) == 0x000000000000003aaaf7638f9f9c0d0c60e8b0eb817dcdb55fd2b1964efc5175
def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack( starwels_data.check_merkle_link( starwels_data.hash256( cls.get_dynamic_types(net)['ref_type'].pack( dict( identifier=net.IDENTIFIER, share_info=share_info, ))), ref_merkle_link))
def test_get_block(self): factory = p2p.ClientFactory(networks.nets['starwels']) c = reactor.connectTCP('127.0.0.1', 8343, factory) try: h = 0x000000000000046acff93b0e76cd10490551bf871ce9ac9fad62e67a07ff1d1e block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))() assert data.merkle_hash(map(data.get_txid, block['txs'])) == block['header']['merkle_root'] assert data.hash256(data.block_header_type.pack(block['header'])) == h finally: factory.stopTrying() c.disconnect()
def handle_remember_tx(self, tx_hashes, txs): for tx_hash in tx_hashes: if tx_hash in self.remembered_txs: print >> sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value: tx = self.node.known_txs_var.value[tx_hash] else: for cache in self.known_txs_cache.itervalues(): if tx_hash in cache: tx = cache[tx_hash] print 'Transaction %064x rescued from peer latency cache!' % ( tx_hash, ) break else: print >> sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % ( tx_hash, ) self.disconnect() return self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + starwels_data.tx_type.packed_size( tx) new_known_txs = dict(self.node.known_txs_var.value) warned = False for tx in txs: tx_hash = starwels_data.hash256(starwels_data.tx_type.pack(tx)) if tx_hash in self.remembered_txs: print >> sys.stderr, 'Peer referenced transaction twice, disconnecting' self.disconnect() return if tx_hash in self.node.known_txs_var.value and not warned: print 'Peer sent entire transaction %064x that was already received' % ( tx_hash, ) warned = True self.remembered_txs[tx_hash] = tx self.remembered_txs_size += 100 + starwels_data.tx_type.packed_size( tx) new_known_txs[tx_hash] = tx self.node.known_txs_var.set(new_known_txs) if self.remembered_txs_size >= self.max_remembered_txs_size: raise PeerMisbehavingError('too much transaction data stored')
def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce): if job_id not in self.handler_map: print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!''' return False x, got_response = self.handler_map[job_id] coinb_nonce = extranonce2.decode('hex') assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2'] header = dict( version=x['version'], previous_block=x['previous_block'], merkle_root=starwels_data.check_merkle_link( starwels_data.hash256(new_packed_gentx), x['merkle_link'] ), # new_packed_gentx has witness data stripped timestamp=pack.IntType(32).unpack( getwork._swap4(ntime.decode('hex'))), bits=x['bits'], nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))), ) return got_response(header, worker_name, coinb_nonce)
def from_header(cls, header): return cls(starwels_data.hash256(starwels_data.block_header_type.pack(header)), header['previous_block'])
def _(tx): new_known_txs = dict(self.known_txs_var.value) new_known_txs[starwels_data.hash256( starwels_data.tx_type.pack(tx))] = tx self.known_txs_var.set(new_known_txs)
def check(self, tracker, other_txs=None): from p2pool import p2p counts = None if self.share_data['previous_share_hash'] is not None: previous_share = tracker.items[ self.share_data['previous_share_hash']] if tracker.get_height(self.share_data['previous_share_hash'] ) >= self.net.CHAIN_LENGTH: counts = get_desired_version_counts( tracker, tracker.get_nth_parent_hash( previous_share.hash, self.net.CHAIN_LENGTH * 9 // 10), self.net.CHAIN_LENGTH // 10) if type(self) is type(previous_share): pass elif type(self) is type(previous_share).SUCCESSOR: # switch only valid if 60% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version if counts.get(self.VERSION, 0) < sum(counts.itervalues()) * 60 // 100: raise p2p.PeerMisbehavingError( 'switch without enough hash power upgraded') else: raise p2p.PeerMisbehavingError( '''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__)) elif type(self) is type(previous_share).SUCCESSOR: raise p2p.PeerMisbehavingError('switch without enough history') other_tx_hashes = [ tracker.items[tracker.get_nth_parent_hash( self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs() ] if other_txs is not None and not isinstance(other_txs, dict): other_txs = dict( (starwels_data.hash256(starwels_data.tx_type.pack(tx)), tx) for tx in other_txs) share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction( tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, known_txs=other_txs, last_txout_nonce=self.contents['last_txout_nonce'], segwit_data=self.share_info.get('segwit_data', None)) assert other_tx_hashes2 == other_tx_hashes if share_info != self.share_info: raise ValueError('share_info invalid') if starwels_data.get_txid(gentx) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') if starwels_data.calculate_merkle_link( [None] + other_tx_hashes, 0 ) != self.merkle_link: # the other hash commitments are checked in the share_info assertion raise ValueError('merkle_link and other_tx_hashes do not match') update_min_protocol_version(counts, self) return gentx # only used by as_block
def __init__(self, net, peer_addr, contents): dynamic_types = self.get_dynamic_types(net) self.share_info_type = dynamic_types['share_info_type'] self.share_type = dynamic_types['share_type'] self.ref_type = dynamic_types['ref_type'] self.net = net self.peer_addr = peer_addr self.contents = contents self.min_header = contents['min_header'] self.share_info = contents['share_info'] self.hash_link = contents['hash_link'] self.merkle_link = contents['merkle_link'] segwit_activated = is_segwit_activated(self.VERSION, net) if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100): raise ValueError( '''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']), )) if len(self.merkle_link['branch']) > 16 or (segwit_activated and len( self.share_info['segwit_data']['txid_merkle_link']['branch']) > 16): raise ValueError('merkle branch too long!') assert not self.hash_link['extra_data'], repr( self.hash_link['extra_data']) self.share_data = self.share_info['share_data'] self.max_target = self.share_info['max_bits'].target self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.previous_hash = self.share_data['previous_share_hash'] self.new_script = starwels_data.pubkey_hash_to_script2( self.share_data['pubkey_hash']) self.desired_version = self.share_data['desired_version'] self.absheight = self.share_info['absheight'] self.abswork = self.share_info['abswork'] n = set() for share_count, tx_count in self.iter_transaction_hash_refs(): assert share_count < 110 if share_count == 0: n.add(tx_count) assert n == set(range(len(self.share_info['new_transaction_hashes']))) self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = starwels_data.check_merkle_link( self.gentx_hash, self.share_info['segwit_data']['txid_merkle_link'] if segwit_activated else self.merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC( starwels_data.block_header_type.pack(self.header)) self.hash = self.header_hash = starwels_data.hash256( starwels_data.block_header_type.pack(self.header)) if self.target > net.MAX_TARGET: from p2pool import p2p raise p2p.PeerMisbehavingError('share target invalid') if self.pow_hash > self.target: from p2pool import p2p raise p2p.PeerMisbehavingError('share PoW invalid') self.new_transaction_hashes = self.share_info['new_transaction_hashes'] # XXX eww self.time_seen = time.time()
def test_hashlink1(self): for i in xrange(100): d = random_bytes(random.randrange(2048)) x = data.prefix_to_hash_link(d) assert data.check_hash_link(x, '') == starwels_data.hash256(d)