Example #1
0
    def handle_shares(self, shares, peer):
        if len(shares) > 5:
            print 'Processing %i shares from %s...' % (
                len(shares), '%s:%i' % peer.addr if peer is not None else None)

        new_count = 0
        all_new_txs = {}
        for share, new_txs in shares:
            if new_txs is not None:
                all_new_txs.update((bitcoin_data.single_hash256(
                    bitcoin_data.tx_type.pack(new_tx)), new_tx)
                                   for new_tx in new_txs)

            if share.hash in self.node.tracker.items:
                #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
                continue

            new_count += 1

            #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr)

            self.node.tracker.add(share)

        self.node.known_txs_var.add(all_new_txs)

        if new_count:
            self.node.set_best_share()

        if len(shares) > 5:
            print '... done processing %i shares. New: %i Have: %i/~%i' % (
                len(shares), new_count, len(
                    self.node.tracker.items), 2 * self.node.net.CHAIN_LENGTH)
 def _getwork(self, request, data, long_poll):
     request.setHeader('X-Long-Polling', '/long-polling')
     request.setHeader('X-Roll-NTime', 'expire=100')
     request.setHeader('X-Is-P2Pool', 'true')
     if request.getHeader('Host') is not None:
         request.setHeader('X-Stratum', 'stratum+tcp://' + request.getHeader('Host'))
     
     if data is not None:
         header = getwork.decode_data(data)
         if header['merkle_root'] not in self.merkle_root_to_handler:
             print >>sys.stderr, '''Couldn't link returned work's merkle root with its handler. This should only happen if this process was recently restarted!'''
             defer.returnValue(False)
         defer.returnValue(self.merkle_root_to_handler[header['merkle_root']](header, request.getUser() if request.getUser() is not None else '', '\0'*self.worker_bridge.COINBASE_NONCE_LENGTH))
     
     if p2pool.DEBUG:
         id = random.randrange(1000, 10000)
         print 'POLL %i START is_long_poll=%r user_agent=%r user=%r' % (id, long_poll, request.getHeader('User-Agent'), request.getUser())
     
     if long_poll:
         request_id = request.getClientIP(), request.getHeader('Authorization')
         if self.worker_views.get(request_id, self.worker_bridge.new_work_event.times) != self.worker_bridge.new_work_event.times:
             if p2pool.DEBUG:
                 print 'POLL %i PUSH' % (id,)
         else:
             if p2pool.DEBUG:
                 print 'POLL %i WAITING' % (id,)
             yield self.worker_bridge.new_work_event.get_deferred()
         self.worker_views[request_id] = self.worker_bridge.new_work_event.times
     
     x, handler = self.worker_bridge.get_work(*self.worker_bridge.preprocess_request(request.getUser() if request.getUser() is not None else ''))
     res = getwork.BlockAttempt(
         version=x['version'],
         previous_block=x['previous_block'],
         merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.single_hash256(x['coinb1'] + '\0'*self.worker_bridge.COINBASE_NONCE_LENGTH + x['coinb2']), x['merkle_link']),
         timestamp=x['timestamp'],
         bits=x['bits'],
         share_target=x['share_target'],
     )
     assert res.merkle_root not in self.merkle_root_to_handler
     
     self.merkle_root_to_handler[res.merkle_root] = handler
     
     if p2pool.DEBUG:
         print 'POLL %i END identifier=%i' % (id, self.worker_bridge.new_work_event.times)
     
     extra_params = {}
     if request.getHeader('User-Agent') == 'Jephis PIC Miner':
         # ASICMINER BE Blades apparently have a buffer overflow bug and
         # can't handle much extra in the getwork response
         extra_params = {}
     else:
         extra_params = dict(identifier=str(self.worker_bridge.new_work_event.times), submitold=True)
     defer.returnValue(res.getwork(**extra_params))
Example #3
0
 def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce):
     if job_id not in self.handler_map:
         print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!'''
         return False
     x, got_response = self.handler_map[job_id]
     coinb_nonce = extranonce2.decode('hex')
     assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH
     new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2']
     header = dict(
         version=x['version'],
         previous_block=x['previous_block'],
         merkle_root=bitcoin_data.check_merkle_link(
             bitcoin_data.single_hash256(new_packed_gentx), x['merkle_link']
         ),  # new_packed_gentx has witness data stripped
         timestamp=pack.IntType(32).unpack(
             getwork._swap4(ntime.decode('hex'))),
         bits=x['bits'],
         nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))),
     )
     return got_response(header, worker_name, coinb_nonce)
Example #4
0
 def handle_remember_tx(self, tx_hashes, txs):
     for tx_hash in tx_hashes:
         if tx_hash in self.remembered_txs:
             print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
             self.disconnect()
             return
         
         if tx_hash in self.node.known_txs_var.value:
             tx = self.node.known_txs_var.value[tx_hash]
         else:
             for cache in self.known_txs_cache.itervalues():
                 if tx_hash in cache:
                     tx = cache[tx_hash]
                     print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
                     break
             else:
                 print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
                 self.disconnect()
                 return
         
         self.remembered_txs[tx_hash] = tx
         self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
     added_known_txs = {}
     warned = False
     for tx in txs:
         tx_hash = bitcoin_data.single_hash256(bitcoin_data.tx_type.pack(tx))
         if tx_hash in self.remembered_txs:
             print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
             self.disconnect()
             return
         
         if tx_hash in self.node.known_txs_var.value and not warned:
             print 'Peer sent entire transaction %064x that was already received' % (tx_hash,)
             warned = True
         
         self.remembered_txs[tx_hash] = tx
         self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
         added_known_txs[tx_hash] = tx
     self.node.known_txs_var.add(added_known_txs)
     if self.remembered_txs_size >= self.max_remembered_txs_size:
         raise PeerMisbehavingError('too much transaction data stored')
Example #5
0
 def _(tx):
     self.known_txs_var.add({
         bitcoin_data.single_hash256(bitcoin_data.tx_type.pack(tx)):
         tx,
     })
Example #6
0
    def check(self, tracker, other_txs=None):
        from p2pool import p2p
        counts = None
        if self.share_data['previous_share_hash'] is not None:
            previous_share = tracker.items[
                self.share_data['previous_share_hash']]
            if tracker.get_height(self.share_data['previous_share_hash']
                                  ) >= self.net.CHAIN_LENGTH:
                counts = get_desired_version_counts(
                    tracker,
                    tracker.get_nth_parent_hash(
                        previous_share.hash, self.net.CHAIN_LENGTH * 9 // 10),
                    self.net.CHAIN_LENGTH // 10)
                if type(self) is type(previous_share):
                    pass
                elif type(self) is type(previous_share).SUCCESSOR:
                    # switch only valid if 60% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
                    if counts.get(self.VERSION,
                                  0) < sum(counts.itervalues()) * 60 // 100:
                        raise p2p.PeerMisbehavingError(
                            'switch without enough hash power upgraded')
                else:
                    raise p2p.PeerMisbehavingError(
                        '''%s can't follow %s''' %
                        (type(self).__name__, type(previous_share).__name__))
            elif type(self) is type(previous_share).SUCCESSOR:
                raise p2p.PeerMisbehavingError('switch without enough history')

        other_tx_hashes = [
            tracker.items[tracker.get_nth_parent_hash(
                self.hash,
                share_count)].share_info['new_transaction_hashes'][tx_count]
            for share_count, tx_count in self.iter_transaction_hash_refs()
        ]
        if other_txs is not None and not isinstance(other_txs, dict):
            other_txs = dict(
                (bitcoin_data.single_hash256(bitcoin_data.tx_type.pack(tx)),
                 tx) for tx in other_txs)

        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(
            tracker,
            self.share_info['share_data'],
            self.header['bits'].target,
            self.share_info['timestamp'],
            self.share_info['bits'].target,
            self.contents['ref_merkle_link'],
            [(h, None) for h in other_tx_hashes],
            self.net,
            known_txs=other_txs,
            last_txout_nonce=self.contents['last_txout_nonce'],
            segwit_data=self.share_info.get('segwit_data', None))

        assert other_tx_hashes2 == other_tx_hashes
        if share_info != self.share_info:
            raise ValueError('share_info invalid')
        if bitcoin_data.get_txid(gentx) != self.gentx_hash:
            raise ValueError('''gentx doesn't match hash_link''')
        if bitcoin_data.calculate_merkle_link(
            [None] + other_tx_hashes, 0
        ) != self.merkle_link:  # the other hash commitments are checked in the share_info assertion
            raise ValueError('merkle_link and other_tx_hashes do not match')

        update_min_protocol_version(counts, self)

        return gentx  # only used by as_block