Example #1
0
 def get_ref_hash(cls, net, share_info, ref_merkle_link):
     return pack.IntType(256).pack(
         axe_data.check_merkle_link(
             axe_data.hash256(
                 cls.ref_type.pack(
                     dict(
                         identifier=net.IDENTIFIER,
                         share_info=share_info,
                     ))), ref_merkle_link))
Example #2
0
 def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce):
     if job_id not in self.handler_map:
         print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!'''
         return False
     x, got_response = self.handler_map[job_id]
     coinb_nonce = extranonce2.decode('hex')
     assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH
     new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2']
     header = dict(
         version=x['version'],
         previous_block=x['previous_block'],
         merkle_root=axe_data.check_merkle_link(
             axe_data.hash256(new_packed_gentx), x['merkle_link']),
         timestamp=pack.IntType(32).unpack(
             getwork._swap4(ntime.decode('hex'))),
         bits=x['bits'],
         nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))),
     )
     return got_response(header, worker_name, coinb_nonce)
Example #3
0
    def __init__(self, net, peer_addr, contents):
        self.net = net
        self.peer_addr = peer_addr
        self.contents = contents

        self.min_header = contents['min_header']
        self.share_info = contents['share_info']
        self.hash_link = contents['hash_link']
        self.merkle_link = contents['merkle_link']

        if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
            raise ValueError(
                '''bad coinbase size! %i bytes''' %
                (len(self.share_info['share_data']['coinbase']), ))

        if len(self.merkle_link['branch']) > 16:
            raise ValueError('merkle branch too long!')

        assert not self.hash_link['extra_data'], repr(
            self.hash_link['extra_data'])

        self.share_data = self.share_info['share_data']
        self.max_target = self.share_info['max_bits'].target
        self.target = self.share_info['bits'].target
        self.timestamp = self.share_info['timestamp']
        self.previous_hash = self.share_data['previous_share_hash']
        self.new_script = axe_data.pubkey_hash_to_script2(
            self.share_data['pubkey_hash'])
        self.desired_version = self.share_data['desired_version']
        self.absheight = self.share_info['absheight']
        self.abswork = self.share_info['abswork']

        n = set()
        for share_count, tx_count in self.iter_transaction_hash_refs():
            assert share_count < 110
            if share_count == 0:
                n.add(tx_count)
        assert n == set(xrange(len(self.share_info['new_transaction_hashes'])))

        self.gentx_hash = check_hash_link(
            self.hash_link,
            self.get_ref_hash(net, self.share_info,
                              contents['ref_merkle_link']) +
            pack.IntType(64).pack(self.contents['last_txout_nonce']) +
            pack.IntType(32).pack(0),
            self.gentx_before_refhash,
        )
        merkle_root = axe_data.check_merkle_link(self.gentx_hash,
                                                 self.merkle_link)
        self.header = dict(self.min_header, merkle_root=merkle_root)
        self.pow_hash = net.PARENT.POW_FUNC(
            axe_data.block_header_type.pack(self.header))
        self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(
            axe_data.block_header_type.pack(self.header))

        if self.target > net.MAX_TARGET:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share target invalid')

        if self.pow_hash > self.target:
            from p2pool import p2p
            raise p2p.PeerMisbehavingError('share PoW invalid')

        self.new_transaction_hashes = self.share_info['new_transaction_hashes']

        # XXX eww
        self.time_seen = time.time()
Example #4
0
    def _getwork(self, request, data, long_poll):
        request.setHeader('X-Long-Polling', '/long-polling')
        request.setHeader('X-Roll-NTime', 'expire=100')
        request.setHeader('X-Is-P2Pool', 'true')
        if request.getHeader('Host') is not None:
            request.setHeader('X-Stratum',
                              'stratum+tcp://' + request.getHeader('Host'))

        if data is not None:
            header = getwork.decode_data(data)
            if header['merkle_root'] not in self.merkle_root_to_handler:
                print >> sys.stderr, '''Couldn't link returned work's merkle root with its handler. This should only happen if this process was recently restarted!'''
                defer.returnValue(False)
            defer.returnValue(
                self.merkle_root_to_handler[header['merkle_root']](
                    header,
                    request.getUser() if request.getUser() is not None else '',
                    '\0' * self.worker_bridge.COINBASE_NONCE_LENGTH))

        if p2pool.DEBUG:
            id = random.randrange(1000, 10000)
            print 'POLL %i START is_long_poll=%r user_agent=%r user=%r' % (
                id, long_poll, request.getHeader('User-Agent'),
                request.getUser())

        if long_poll:
            request_id = request.getClientIP(), request.getHeader(
                'Authorization')
            if self.worker_views.get(
                    request_id, self.worker_bridge.new_work_event.times
            ) != self.worker_bridge.new_work_event.times:
                if p2pool.DEBUG:
                    print 'POLL %i PUSH' % (id, )
            else:
                if p2pool.DEBUG:
                    print 'POLL %i WAITING' % (id, )
                yield self.worker_bridge.new_work_event.get_deferred()
            self.worker_views[
                request_id] = self.worker_bridge.new_work_event.times

        x, handler = self.worker_bridge.get_work(
            *self.worker_bridge.preprocess_request(
                request.getUser() if request.getUser() is not None else ''))
        res = getwork.BlockAttempt(
            version=x['version'],
            previous_block=x['previous_block'],
            merkle_root=axe_data.check_merkle_link(
                axe_data.hash256(x['coinb1'] + '\0' *
                                 self.worker_bridge.COINBASE_NONCE_LENGTH +
                                 x['coinb2']), x['merkle_link']),
            timestamp=x['timestamp'],
            bits=x['bits'],
            share_target=x['share_target'],
        )
        assert res.merkle_root not in self.merkle_root_to_handler

        self.merkle_root_to_handler[res.merkle_root] = handler

        if p2pool.DEBUG:
            print 'POLL %i END identifier=%i' % (
                id, self.worker_bridge.new_work_event.times)

        extra_params = {}
        if request.getHeader('User-Agent') == 'Jephis PIC Miner':
            # ASICMINER BE Blades apparently have a buffer overflow bug and
            # can't handle much extra in the getwork response
            extra_params = {}
        else:
            extra_params = dict(identifier=str(
                self.worker_bridge.new_work_event.times),
                                submitold=True)
        defer.returnValue(res.getwork(**extra_params))