Esempio n. 1
0
 def compute(request):
     state = current_work.value
     user = worker_interface.get_username(request)
     
     payout_script = get_payout_script_from_username(user)
     if payout_script is None or random.uniform(0, 100) < args.worker_fee:
         payout_script = my_script
     
     if len(p2p_node.peers) == 0 and net.PERSIST:
         raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
     if state['best_share_hash'] is None and net.PERSIST:
         raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
     if time.time() > current_work2.value['last_update'] + 60:
         raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
     
     previous_share = None if state['best_share_hash'] is None else tracker.shares[state['best_share_hash']]
     subsidy = current_work2.value['subsidy']
     share_info, generate_tx = p2pool_data.generate_transaction(
         tracker=tracker,
         share_data=dict(
             previous_share_hash=state['best_share_hash'],
             coinbase='' if state['aux_work'] is None else '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0),
             nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
             new_script=payout_script,
             subsidy=subsidy,
             donation=math.perfect_round(65535*args.donation_percentage/100),
             stale_frac=(lambda shares, stales:
                 255 if shares == 0 else math.perfect_round(254*stales/shares)
             )(*get_share_counts()),
         ),
         block_target=state['target'],
         desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
         net=net,
     )
     
     print 'New work for worker %s! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
         user,
         bitcoin_data.target_to_difficulty(share_info['target']),
         (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.BITCOIN_SYMBOL,
         subsidy*1e-8, net.BITCOIN_SYMBOL,
         len(current_work2.value['transactions']),
     )
     
     transactions = [generate_tx] + list(current_work2.value['transactions'])
     merkle_root = bitcoin_data.merkle_hash(transactions)
     merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time()
     
     return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['target'], share_info['target']), state['best_share_hash']
Esempio n. 2
0
    def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
        
        if self.merged_work.value:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
            mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
                merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                size=size,
                nonce=0,
            ))
            mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
        else:
            mm_data = ''
            mm_later = []
        
        tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
        
        previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None
        if previous_share is None:
            share_type = p2pool_data.Share
        else:
            previous_share_type = type(previous_share)
            
            if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR
                
                counts = p2pool_data.get_desired_version_counts(self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
                upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues())
                if upgraded > .65:
                    print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95)
                print 
                # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type
        
        if desired_share_target is None:
            desired_share_target = 2**256-1  # will be reduced to SANE_TARGET_RANGE later
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                desired_share_target = min(desired_share_target,
                    bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty
           
            local_addr_rates = self.get_local_addr_rates()
            lookbehind = 3600//self.node.net.SHARE_PERIOD
            block_subsidy = self.node.bitcoind_work.value['subsidy']
            if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind:
                expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \
                    * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash
                if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
                    desired_share_target = min(desired_share_target,
                        bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy)
                    )
	
        
        if True:
            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(script.create_push_script([
                        self.current_work.value['height'],
                        ] + ([mm_data] if mm_data else []) + [
                    ]) + self.current_work.value['coinbaseflags'])[:100],
                    nonce=random.randrange(2**32),
                    pubkey_hash=pubkey_hash,
                    subsidy=self.current_work.value['subsidy'],
                    donation=math.perfect_round(65535*self.donation_percentage/100),
                    stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
                        'orphan' if orphans > orphans_recorded_in_chain else
                        'doa' if doas > doas_recorded_in_chain else
                        None
                    )(*self.get_stale_counts()),
                    desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION,
                ),
                block_target=self.current_work.value['bits'].target,
                desired_timestamp=int(time.time() + 0.5),
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
                net=self.node.net,
                known_txs=tx_map,
                base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']),
            )
		    
		    
       
        
        
        packed_gentx = bitcoin_data.tx_type.pack(gentx)
        other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
        
        mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] == 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
        
        if desired_pseudoshare_target is None:
            target = 2**256-1
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                target = min(target,
                    bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty
        else:
            target = desired_pseudoshare_target
           
       # target = max(target, share_info['bits'].target)
        
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work['target'])
         
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
      
        
        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
        
        print 'New work for worker! Difficulty: %.06f Share difficulty: %.08f Total block value: %.6f %s including %i transactions' % (
            bitcoin_data.target_to_difficulty(target),
            bitcoin_data.target_to_difficulty(share_info['bits'].target),
            self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL,
            len(self.current_work.value['transactions']),
        )
        
        ba = dict(
            version=min(self.current_work.value['version'], 2),
            previous_block=self.current_work.value['previous_block'],
            merkle_link=merkle_link,
            coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4],
            coinb2=packed_gentx[-4:],
            timestamp=self.current_work.value['time'],
            bits=self.current_work.value['bits'],
            share_target=target,
        )
        
        received_header_hashes = set()
        
        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
            
           
            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')
            
            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']
            
            on_time = self.new_work_event.times == lp_count
            
            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
                            pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
                           # neoscrypt uses little endian only
                           # pack.IntType(256, 'little').pack(aux_work['hash']).encode('hex'),
                            bitcoin_data.aux_pow_type.pack(dict(
                                merkle_tx=dict(
                                    tx=new_gentx,
                                    block_hash=header_hash,
                                    merkle_link=merkle_link,
                                ),
                                merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                parent_block_header=header,
                            )).encode('hex'),
                        )
                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (result,)
                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')
            
            if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)
                
                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)
                
                self.node.tracker.add(share)
                self.node.set_best_share()
                
                try:
                    if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')
                
                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash)
	    if p2pool.DEBUG:
		print 'Hash:   %X' % (pow_hash)
		print 'Target: %X' % (target)
            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (user,)
            elif header_hash in received_header_hashes:
                print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
            else:
                received_header_hashes.add(header_hash)
                
                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))
            
            return on_time
        
        return ba, got_response
Esempio n. 3
0
    def get_work(self, pubkey_hash, desired_share_target,
                 desired_pseudoshare_target):
        global print_throttle
        if (self.node.p2p_node is None or len(self.node.p2p_node.peers)
                == 0) and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(
                u'p2pool is not connected to any peers')
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(
                u'p2pool is downloading shares')

        if self.merged_work.value:
            tree, size = dash_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [
                self.merged_work.value.get(tree.get(i), dict(hash=0))['hash']
                for i in xrange(size)
            ]
            mm_data = '\xfa\xbemm' + dash_data.aux_pow_coinbase_type.pack(
                dict(
                    merkle_root=dash_data.merkle_hash(mm_hashes),
                    size=size,
                    nonce=0,
                ))
            mm_later = [
                (aux_work, mm_hashes.index(aux_work['hash']), mm_hashes)
                for chain_id, aux_work in self.merged_work.value.iteritems()
            ]
        else:
            mm_data = ''
            mm_later = []

        tx_hashes = [
            dash_data.hash256(dash_data.tx_type.pack(tx))
            for tx in self.current_work.value['transactions']
        ]
        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))

        previous_share = self.node.tracker.items[
            self.node.best_share_var.
            value] if self.node.best_share_var.value is not None else None
        if previous_share is None:
            share_type = p2pool_data.Share
        else:
            previous_share_type = type(previous_share)

            if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(
                    previous_share.hash) < self.node.net.CHAIN_LENGTH:
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR

                counts = p2pool_data.get_desired_version_counts(
                    self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(
                        previous_share.hash,
                        self.node.net.CHAIN_LENGTH * 9 // 10),
                    self.node.net.CHAIN_LENGTH // 10)
                upgraded = counts.get(successor_type.VERSION, 0) / sum(
                    counts.itervalues())
                if upgraded > .65:
                    print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (
                        upgraded * 100, 95)
                # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION,
                              0) > sum(counts.itervalues()) * 95 // 100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type

        if desired_share_target is None:
            desired_share_target = 2**256 - 1
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                desired_share_target = min(
                    desired_share_target,
                    dash_data.average_attempts_to_target(
                        local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)
                )  # limit to 1.67% of pool shares by modulating share difficulty

            local_addr_rates = self.get_local_addr_rates()
            lookbehind = 3600 // self.node.net.SHARE_PERIOD
            block_subsidy = self.node.dashd_work.value['subsidy']
            if previous_share is not None and self.node.tracker.get_height(
                    previous_share.hash) > lookbehind:
                expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \
                    * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash
                if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
                    desired_share_target = min(
                        desired_share_target,
                        dash_data.average_attempts_to_target(
                            (dash_data.target_to_average_attempts(
                                self.node.dashd_work.value['bits'].target) *
                             self.node.net.SPREAD) *
                            self.node.net.PARENT.DUST_THRESHOLD /
                            block_subsidy))

        if True:
            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(script.create_push_script([
                        self.current_work.value['height'],
                    ] + ([mm_data] if mm_data else []) + []) +
                              self.current_work.value['coinbaseflags'] +
                              self.node.net.COINBASEEXT)[:100],
                    nonce=random.randrange(2**32),
                    pubkey_hash=pubkey_hash,
                    subsidy=self.current_work.value['subsidy'],
                    donation=math.perfect_round(
                        65535 * self.donation_percentage / 100),
                    stale_info=(
                        lambda (orphans, doas), total,
                        (orphans_recorded_in_chain, doas_recorded_in_chain
                         ): 'orphan'
                        if orphans > orphans_recorded_in_chain else 'doa'
                        if doas > doas_recorded_in_chain else None)(
                            *self.get_stale_counts()),
                    desired_version=(share_type.SUCCESSOR
                                     if share_type.SUCCESSOR is not None else
                                     share_type).VOTING_VERSION,
                    payment_amount=self.current_work.value['payment_amount'],
                    packed_payments=self.current_work.value['packed_payments'],
                ),
                block_target=self.current_work.value['bits'].target,
                desired_timestamp=int(time.time() + 0.5),
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes_and_fees=zip(
                    tx_hashes, self.current_work.value['transaction_fees']),
                net=self.node.net,
                known_txs=tx_map,
                base_subsidy=self.current_work.value['subsidy'],
            )

        packed_gentx = dash_data.tx_type.pack(gentx)
        other_transactions = [
            tx_map[tx_hash] for tx_hash in other_transaction_hashes
        ]

        mm_later = [
            (dict(aux_work,
                  target=aux_work['target'] if aux_work['target'] != 'p2pool'
                  else share_info['bits'].target), index, hashes)
            for aux_work, index, hashes in mm_later
        ]

        if desired_pseudoshare_target is None:
            target = 2**256 - 1
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                target = min(
                    target,
                    dash_data.average_attempts_to_target(local_hash_rate * 1)
                )  # limit to 1 share response every second by modulating pseudoshare difficulty
        else:
            target = desired_pseudoshare_target
        target = max(target, share_info['bits'].target)
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work['target'])
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)

        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = dash_data.calculate_merkle_link(
            [None] + other_transaction_hashes, 0)

        if print_throttle is 0.0:
            print_throttle = time.time()
        else:
            current_time = time.time()
            if (current_time - print_throttle) > 5.0:
                print 'New work for worker %s! Difficulty: %.06f Share difficulty: %.06f Block %s Total value: %.6f %s including %i transactions' % (
                    dash_data.pubkey_hash_to_address(pubkey_hash,
                                                     self.node.net.PARENT),
                    dash_data.target_to_difficulty(target),
                    dash_data.target_to_difficulty(share_info['bits'].target),
                    self.current_work.value['height'],
                    self.current_work.value['subsidy'] * 1e-8,
                    self.node.net.PARENT.SYMBOL,
                    len(self.current_work.value['transactions']),
                )
                print_throttle = time.time()

        #need this for stats
        self.last_work_shares.value[dash_data.pubkey_hash_to_address(
            pubkey_hash, self.node.net.PARENT)] = share_info['bits']

        ba = dict(
            version=self.current_work.value['version'],
            previous_block=self.current_work.value['previous_block'],
            merkle_link=merkle_link,
            coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4],
            coinb2=packed_gentx[-4:],
            timestamp=self.current_work.value['time'],
            bits=self.current_work.value['bits'],
            share_target=target,
        )

        received_header_hashes = set()

        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[
                -4:] if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = dash_data.tx_type.unpack(
                new_packed_gentx
            ) if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else gentx

            header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(
                dash_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                dash_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header,
                             txs=[new_gentx] + other_transactions), False,
                        self.node.factory, self.node.dashd,
                        self.node.dashd_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to dashd! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
                        # New block found
                        self.node.factory.new_block.happened(header_hash)
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == dash_data.check_merkle_link(
                dash_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry(
                            'Error submitting merged block: (will retry)', 10,
                            10)(aux_work['merged_proxy'].rpc_getauxblock)(
                                pack.IntType(256, 'big').pack(
                                    aux_work['hash']).encode('hex'),
                                dash_data.aux_pow_type.pack(
                                    dict(
                                        merkle_tx=dict(
                                            tx=new_gentx,
                                            block_hash=header_hash,
                                            merkle_link=merkle_link,
                                        ),
                                        merkle_link=dash_data.
                                        calculate_merkle_link(hashes, index),
                                        parent_block_header=header,
                                    )).encode('hex'),
                            )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >> sys.stderr, 'Merged block submittal result: %s Expected: %s' % (
                                    result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (
                                    result, )

                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(
                    8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    dash_data.target_to_average_attempts(share.target),
                    not on_time, share.hash)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    user, )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s submitted share more than once!' % (
                    user, )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    dash_data.target_to_average_attempts(target), not on_time,
                    user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     dash_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=dash_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user,
                         share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(
                    dict(work=dash_data.target_to_average_attempts(target),
                         pubkey_hash=pubkey_hash))

            return on_time

        return ba, got_response
Esempio n. 4
0
    def get_work(self, pubkey, desired_share_target, desired_pseudoshare_target):
        if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
        if time.time() > self.current_work.value['last_update'] + 60:
            raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
        
        if self.merged_work.value:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
            mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
                merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                size=size,
                nonce=0,
            ))
            mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
        else:
            mm_data = ''
            mm_later = []
        
        tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
        txn_timestamp = self.current_work.value['txn_timestamp']
        
        #print
        #print txn_timestamp
        #print
        
        if self.node.best_share_var.value is None:
            share_type = p2pool_data.Share
        else:
            previous_share = self.node.tracker.items[self.node.best_share_var.value]
            previous_share_type = type(previous_share)
            
            if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR
                
                counts = p2pool_data.get_desired_version_counts(self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
                # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type
        
        if True:
            subsidy = self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['bits'].target)
            desired_timestamp = int(time.time() + 0.5)

            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(script.create_push_script([
                        self.current_work.value['height'],
                        ] + ([mm_data] if mm_data else []) + [
                    ]) + self.current_work.value['coinbaseflags'])[:100],
                    nonce=random.randrange(2**32),
                    pubkey=pubkey,
                    subsidy=self.current_work.value['subsidy'],
                    donation=math.perfect_round(65535*self.donation_percentage/100),
                    stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
                        'orphan' if orphans > orphans_recorded_in_chain else
                        'doa' if doas > doas_recorded_in_chain else
                        None
                    )(*self.get_stale_counts()),
                    desired_version=14,
                ),
                block_target=self.current_work.value['bits'].target,
                desired_timestamp=desired_timestamp if txn_timestamp < desired_timestamp else txn_timestamp + 1,
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
                net=self.node.net,
                known_txs=tx_map,
                base_subsidy=subsidy
            )
        
        packed_gentx = bitcoin_data.tx_type.pack(gentx)
        other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
        
        mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
        
        if desired_pseudoshare_target is None:
            target = 2**256-1
            if len(self.recent_shares_ts_work) == 50:
                hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
                if hash_rate:
                    target = min(target, int(2**256/hash_rate))
        else:
            target = desired_pseudoshare_target
        target = max(target, share_info['bits'].target)
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work['target'])
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
        
        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
        
        print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
            bitcoin_data.target_to_difficulty(target),
            bitcoin_data.target_to_difficulty(share_info['bits'].target),
            self.current_work.value['subsidy']*1e-6, self.node.net.PARENT.SYMBOL,
            len(self.current_work.value['transactions']),
        )
        
        ba = dict(
            version=min(self.current_work.value['version'], 3),
            previous_block=self.current_work.value['previous_block'],
            merkle_link=merkle_link,
            coinb1=packed_gentx[:-4-4],
            coinb2=packed_gentx[-4:],
            timestamp=gentx['timestamp'],
            bits=self.current_work.value['bits'],
            share_target=target,
        )
        
        received_header_hashes = set()
        
        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH == 4
            new_packed_gentx = packed_gentx[:-4-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
            
            header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if header['timestamp'] > new_gentx['timestamp'] + 3600:
                    print 
                    print header['timestamp'], '>', new_gentx['timestamp'] + 3600
                    print 'Coinbase timestamp is too early!'
                    print 

                    return

                if header['timestamp'] < new_gentx['timestamp']:
                    print 
                    print header['timestamp'], '<', new_gentx['timestamp']
                    print 'Block header timestamp is before coinbase timestamp!'
                    print 
                    return

                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions, signature=''), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')
            
            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']
            
            on_time = self.new_work_event.times == lp_count
            
            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
                            pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
                            bitcoin_data.aux_pow_type.pack(dict(
                                merkle_tx=dict(
                                    tx=new_gentx,
                                    block_hash=header_hash,
                                    merkle_link=merkle_link,
                                ),
                                merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                parent_block_header=header,
                            )).encode('hex'),
                        )
                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (result,)
                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')
            
            if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
                share = get_share(header, pack.IntType(32).unpack(coinbase_nonce))
                
                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)
                
                self.node.tracker.add(share)
                self.node.set_best_share()
                
                try:
                    if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')
                
                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)
            
            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (user,)
                print '    Hash:   %56x' % (pow_hash,)
                print '    Target: %56x' % (target,)
            elif header_hash in received_header_hashes:
                print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
            else:
                received_header_hashes.add(header_hash)
                
                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user))
            
            return on_time
        
        return ba, got_response
Esempio n. 5
0
    def get_work(self, pubkey_hash, desired_share_target,
                 desired_pseudoshare_target):
        if (self.node.p2p_node is None or len(self.node.p2p_node.peers)
                == 0) and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(
                u'p2pool is not connected to any peers')
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(
                u'p2pool is downloading shares')
        if time.time() > self.current_work.value['last_update'] + 60:
            raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')

        if self.merged_work.value:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [
                self.merged_work.value.get(tree.get(i), dict(hash=0))['hash']
                for i in xrange(size)
            ]
            mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(
                dict(
                    merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                    size=size,
                    nonce=0,
                ))
            mm_later = [
                (aux_work, mm_hashes.index(aux_work['hash']), mm_hashes)
                for chain_id, aux_work in self.merged_work.value.iteritems()
            ]
        else:
            mm_data = ''
            mm_later = []

        tx_hashes = [
            bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
            for tx in self.current_work.value['transactions']
        ]
        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))

        if self.node.best_share_var.value is None:
            share_type = p2pool_data.Share
        else:
            previous_share = self.node.tracker.items[
                self.node.best_share_var.value]
            previous_share_type = type(previous_share)

            if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(
                    previous_share.hash) < self.node.net.CHAIN_LENGTH:
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR

                counts = p2pool_data.get_desired_version_counts(
                    self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(
                        previous_share.hash,
                        self.node.net.CHAIN_LENGTH * 9 // 10),
                    self.node.net.CHAIN_LENGTH // 10)
                # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION,
                              0) > sum(counts.itervalues()) * 95 // 100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type

        if True:
            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(script.create_push_script([
                        self.current_work.value['height'],
                    ] + ([mm_data] if mm_data else []) + []) +
                              self.current_work.value['coinbaseflags'])[:100],
                    nonce=random.randrange(2**32),
                    pubkey_hash=pubkey_hash,
                    subsidy=self.current_work.value['subsidy'],
                    donation=math.perfect_round(
                        65535 * self.donation_percentage / 100),
                    stale_info=(
                        lambda (orphans, doas), total,
                        (orphans_recorded_in_chain, doas_recorded_in_chain
                         ): 'orphan'
                        if orphans > orphans_recorded_in_chain else 'doa'
                        if doas > doas_recorded_in_chain else None)(
                            *self.get_stale_counts()),
                    desired_version=share_type.SUCCESSOR.VERSION if
                    share_type.SUCCESSOR is not None else share_type.VERSION,
                ),
                block_target=self.current_work.value['bits'].target,
                desired_timestamp=int(time.time() + 0.5),
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes=tx_hashes,
                net=self.node.net,
                known_txs=tx_map,
            )

        transactions = [gentx] + [
            tx_map[tx_hash] for tx_hash in other_transaction_hashes
        ]

        mm_later = [
            (dict(aux_work,
                  target=aux_work['target'] if aux_work['target'] != 'p2pool'
                  else share_info['bits'].target), index, hashes)
            for aux_work, index, hashes in mm_later
        ]

        if desired_pseudoshare_target is None:
            target = 2**256 - 1
            if len(self.recent_shares_ts_work) == 50:
                hash_rate = sum(
                    work for ts, work in self.recent_shares_ts_work[1:]) // (
                        self.recent_shares_ts_work[-1][0] -
                        self.recent_shares_ts_work[0][0])
                if hash_rate:
                    target = min(target, int(2**256 / hash_rate))
        else:
            target = desired_pseudoshare_target
        target = max(target, share_info['bits'].target)
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work['target'])
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)

        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = bitcoin_data.calculate_merkle_link([
            bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
            for tx in transactions
        ], 0)

        print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
            bitcoin_data.target_to_difficulty(target),
            bitcoin_data.target_to_difficulty(share_info['bits'].target),
            self.current_work.value['subsidy'] * 1e-8,
            self.node.net.PARENT.SYMBOL,
            len(self.current_work.value['transactions']),
        )

        ba = bitcoin_getwork.BlockAttempt(
            version=min(self.current_work.value['version'], 2),
            previous_block=self.current_work.value['previous_block'],
            merkle_root=bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(bitcoin_data.tx_type.pack(
                    transactions[0])), merkle_link),
            timestamp=self.current_work.value['time'],
            bits=self.current_work.value['bits'],
            share_target=target,
        )

        received_header_hashes = set()

        def got_response(header, request):
            header_hash = bitcoin_data.hash256(
                bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(dict(header=header,
                                             txs=transactions), False,
                                        self.node.factory, self.node.bitcoind,
                                        self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(request)
            assert header['previous_block'] == ba.previous_block
            assert header['merkle_root'] == ba.merkle_root
            assert header['bits'] == ba.bits

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry(
                            'Error submitting merged block: (will retry)', 10,
                            10)(aux_work['merged_proxy'].rpc_getauxblock)(
                                pack.IntType(256, 'big').pack(
                                    aux_work['hash']).encode('hex'),
                                bitcoin_data.aux_pow_type.pack(
                                    dict(
                                        merkle_tx=dict(
                                            tx=transactions[0],
                                            block_hash=header_hash,
                                            merkle_link=merkle_link,
                                        ),
                                        merkle_link=bitcoin_data.
                                        calculate_merkle_link(hashes, index),
                                        parent_block_header=header,
                                    )).encode('hex'),
                            )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >> sys.stderr, 'Merged block submittal result: %s Expected: %s' % (
                                    result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (
                                    result, )

                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                share = get_share(header, transactions)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    request.getUser(),
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                if not p2pool.DEBUG:
                    self.node.tracker.verified.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    bitcoin_data.target_to_average_attempts(share.target),
                    not on_time)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    request.getUser(), )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s @ %s submitted share more than once!' % (
                    request.getUser(), request.getClientIP())
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    bitcoin_data.target_to_average_attempts(target),
                    not on_time, user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user))

            return on_time

        return ba, got_response
Esempio n. 6
0
    def get_work(self,
                 pubkey_hash,
                 desired_share_target,
                 desired_pseudoshare_target,
                 worker_ip=None):
        global print_throttle
        t0 = time.time()
        if (self.node.p2p_node is None or len(self.node.p2p_node.peers)
                == 0) and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(
                u'p2pool is not connected to any peers')
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(
                u'p2pool is downloading shares')
        unknown_rules = set(
            r[1:] if r.startswith('!') else r
            for r in self.node.bitcoind_work.value['rules']) - set(
                getattr(self.node.net, 'SOFTFORKS_REQUIRED', []))
        if unknown_rules:
            print "Unknown softforks found: ", unknown_rules
            raise jsonrpc.Error_for_code(-12345)(u'unknown rule activated')

        if self.merged_work.value:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [
                self.merged_work.value.get(tree.get(i), dict(hash=0))['hash']
                for i in xrange(size)
            ]
            mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(
                dict(
                    merkle_root=bitcoin_data.merkle_hash(mm_hashes),
                    size=size,
                    nonce=0,
                ))
            mm_later = [
                (aux_work, mm_hashes.index(aux_work['hash']), mm_hashes)
                for chain_id, aux_work in self.merged_work.value.iteritems()
            ]
        else:
            mm_data = ''
            mm_later = []

        tx_hashes = [
            bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
            for tx in self.current_work.value['transactions']
        ]
        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))

        self.node.mining2_txs_var.set(
            tx_map)  # let node.py know not to evict these transactions

        previous_share = self.node.tracker.items[
            self.node.best_share_var.
            value] if self.node.best_share_var.value is not None else None
        if previous_share is None:
            share_type = p2pool_data.Share
        else:
            previous_share_type = type(previous_share)

            if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(
                    previous_share.hash) < self.node.net.CHAIN_LENGTH:
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR

                counts = p2pool_data.get_desired_version_counts(
                    self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(
                        previous_share.hash,
                        self.node.net.CHAIN_LENGTH * 9 // 10),
                    self.node.net.CHAIN_LENGTH // 10)
                upgraded = counts.get(successor_type.VERSION, 0) / sum(
                    counts.itervalues())
                if upgraded > .65:
                    print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (
                        upgraded * 100, 95)
                # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION,
                              0) > sum(counts.itervalues()) * 95 // 100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type

        local_addr_rates = self.get_local_addr_rates()

        if desired_share_target is None:
            desired_share_target = bitcoin_data.difficulty_to_target(
                float(1.0 / self.node.net.PARENT.DUMB_SCRYPT_DIFF))
            local_hash_rate = local_addr_rates.get(pubkey_hash, 0)
            if local_hash_rate > 0.0:
                desired_share_target = min(
                    desired_share_target,
                    bitcoin_data.average_attempts_to_target(
                        local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)
                )  # limit to 1.67% of pool shares by modulating share difficulty

            lookbehind = 3600 // self.node.net.SHARE_PERIOD
            block_subsidy = self.node.bitcoind_work.value['subsidy']
            if previous_share is not None and self.node.tracker.get_height(
                    previous_share.hash) > lookbehind:
                expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \
                    * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash
                if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
                    desired_share_target = min(
                        desired_share_target,
                        bitcoin_data.average_attempts_to_target(
                            (bitcoin_data.target_to_average_attempts(
                                self.node.bitcoind_work.value['bits'].target) *
                             self.node.net.SPREAD) *
                            self.node.net.PARENT.DUST_THRESHOLD /
                            block_subsidy))

        if True:
            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(script.create_push_script([
                        self.current_work.value['height'],
                    ] + ([mm_data] if mm_data else []) + self.args.coinb_texts)
                              +
                              self.current_work.value['coinbaseflags'])[:100],
                    nonce=random.randrange(2**32),
                    pubkey_hash=pubkey_hash,
                    subsidy=self.current_work.value['subsidy'],
                    donation=math.perfect_round(
                        65535 * self.donation_percentage / 100),
                    stale_info=(
                        lambda (orphans, doas), total,
                        (orphans_recorded_in_chain, doas_recorded_in_chain
                         ): 'orphan'
                        if orphans > orphans_recorded_in_chain else 'doa'
                        if doas > doas_recorded_in_chain else None)(
                            *self.get_stale_counts()),
                    desired_version=(share_type.SUCCESSOR
                                     if share_type.SUCCESSOR is not None else
                                     share_type).VOTING_VERSION,
                ),
                block_target=self.current_work.value['bits'].target,
                desired_timestamp=int(time.time() + 0.5),
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes_and_fees=zip(
                    tx_hashes, self.current_work.value['transaction_fees']),
                net=self.node.net,
                known_txs=tx_map,
                base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(
                    self.current_work.value['height']),
            )

        packed_gentx = bitcoin_data.tx_id_type.pack(
            gentx)  # stratum miners work with stripped transactions
        other_transactions = [
            tx_map[tx_hash] for tx_hash in other_transaction_hashes
        ]

        mm_later = [
            (dict(aux_work,
                  target=aux_work['target'] if aux_work['target'] != 'p2pool'
                  else share_info['bits'].target), index, hashes)
            for aux_work, index, hashes in mm_later
        ]

        if desired_pseudoshare_target is None:
            target = bitcoin_data.difficulty_to_target(
                float(1.0 / self.node.net.PARENT.DUMB_SCRYPT_DIFF))
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                target = bitcoin_data.average_attempts_to_target(
                    local_hash_rate * 1
                )  # target 10 share responses every second by modulating pseudoshare difficulty
            else:
                # If we don't yet have an estimated node hashrate, then we still need to not undershoot the difficulty.
                # Otherwise, we might get 1 PH/s of hashrate on difficulty settings appropriate for 1 GH/s.
                # 1/3000th the difficulty of a full share should be a reasonable upper bound. That way, if
                # one node has the whole p2pool hashrate, it will still only need to process one pseudoshare
                # every ~0.01 seconds.
                target = min(
                    target, 3000 * bitcoin_data.average_attempts_to_target(
                        (bitcoin_data.target_to_average_attempts(
                            self.node.bitcoind_work.value['bits'].target) *
                         self.node.net.SPREAD) *
                        self.node.net.PARENT.DUST_THRESHOLD / block_subsidy))
        else:
            target = desired_pseudoshare_target
        target = max(target, share_info['bits'].target)
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work['target'])
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)

        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = bitcoin_data.calculate_merkle_link(
            [None] + other_transaction_hashes, 0) if share_info.get(
                'segwit_data', None
            ) is None else share_info['segwit_data']['txid_merkle_link']

        if print_throttle is 0.0:
            print_throttle = time.time()
        else:
            current_time = time.time()
            if (current_time - print_throttle) > 5.0:
                print 'New work for %s! Diff: %.02f Share diff: %.02f Block value: %.2f %s (%i tx, %.0f kB)' % (
                    bitcoin_data.pubkey_hash_to_address(
                        pubkey_hash, self.node.net.PARENT),
                    bitcoin_data.target_to_difficulty(target),
                    bitcoin_data.target_to_difficulty(
                        share_info['bits'].target),
                    self.current_work.value['subsidy'] * 1e-8,
                    self.node.net.PARENT.SYMBOL,
                    len(self.current_work.value['transactions']),
                    sum(
                        map(bitcoin_data.tx_type.packed_size,
                            self.current_work.value['transactions'])) / 1000.,
                )
                print_throttle = time.time()

        #need this for stats
        self.last_work_shares.value[bitcoin_data.pubkey_hash_to_address(
            pubkey_hash, self.node.net.PARENT)] = share_info['bits']

        ba = dict(
            version=max(self.current_work.value['version'], 0x20000000),
            previous_block=self.current_work.value['previous_block'],
            merkle_link=merkle_link,
            coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4],
            coinb2=packed_gentx[-4:],
            timestamp=self.current_work.value['time'],
            bits=self.current_work.value['bits'],
            share_target=target,
        )

        received_header_hashes = set()

        def got_response(header, user, coinbase_nonce):
            t0 = time.time()
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[
                -4:] if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = bitcoin_data.tx_type.unpack(
                new_packed_gentx
            ) if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else gentx
            if bitcoin_data.is_segwit_tx(
                    gentx
            ):  # reintroduce witness data to the gentx produced by stratum miners
                new_gentx['marker'] = 0
                new_gentx['flag'] = gentx['flag']
                new_gentx['witness'] = gentx['witness']

            header_hash = bitcoin_data.hash256(
                bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header,
                             txs=[new_gentx] + other_transactions), False,
                        self.node.factory, self.node.bitcoind,
                        self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry(
                            'Error submitting merged block: (will retry)', 10,
                            10)(aux_work['merged_proxy'].rpc_getauxblock)(
                                pack.IntType(256, 'big').pack(
                                    aux_work['hash']).encode('hex'),
                                bitcoin_data.aux_pow_type.pack(
                                    dict(
                                        merkle_tx=dict(
                                            tx=new_gentx,
                                            block_hash=header_hash,
                                            merkle_link=merkle_link,
                                        ),
                                        merkle_link=bitcoin_data.
                                        calculate_merkle_link(hashes, index),
                                        parent_block_header=header,
                                    )).encode('hex'),
                            )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >> sys.stderr, 'Merged block submittal result: %s Expected: %s' % (
                                    result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (
                                    result, )

                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(
                    8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )

                # node.py will sometimes forget transactions if bitcoind's work has changed since this stratum
                # job was assigned. Fortunately, the tx_map is still in in our scope from this job, so we can use that
                # to refill it if needed.

                known_txs = self.node.known_txs_var.value
                missing = {
                    hsh: val
                    for (hsh, val) in tx_map.iteritems()
                    if not hsh in known_txs
                }
                if missing:
                    print "Warning: %i transactions were erroneously evicted from known_txs_var. Refilling now." % len(
                        missing)
                    self.node.known_txs_var.add(missing)

                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    bitcoin_data.target_to_average_attempts(share.target),
                    not on_time, share.hash)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    user, )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s submitted share more than once!' % (
                    user, )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    bitcoin_data.target_to_average_attempts(target),
                    not on_time, user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user,
                         share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target),
                         pubkey_hash=pubkey_hash))
            t1 = time.time()
            if p2pool.BENCH and (t1 - t1) > .01:
                print "%8.3f ms for work.py:got_response()" % (
                    (t1 - t0) * 1000.)

            return on_time

        t1 = time.time()
        if p2pool.BENCH:
            print "%8.3f ms for work.py:get_work()" % ((t1 - t0) * 1000.)
        return ba, got_response
Esempio n. 7
0
    def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(u"p2pool is downloading shares")

        if self.merged_work.value:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))["hash"] for i in xrange(size)]
            mm_data = "\xfa\xbemm" + bitcoin_data.aux_pow_coinbase_type.pack(
                dict(merkle_root=bitcoin_data.merkle_hash(mm_hashes), size=size, nonce=0)
            )
            mm_later = [
                (aux_work, mm_hashes.index(aux_work["hash"]), mm_hashes)
                for chain_id, aux_work in self.merged_work.value.iteritems()
            ]
        else:
            mm_data = ""
            mm_later = []

        tx_hashes = [
            bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value["transactions"]
        ]
        tx_map = dict(zip(tx_hashes, self.current_work.value["transactions"]))

        previous_share = (
            self.node.tracker.items[self.node.best_share_var.value]
            if self.node.best_share_var.value is not None
            else None
        )
        if previous_share is None:
            share_type = p2pool_data.Share
        else:
            previous_share_type = type(previous_share)

            if (
                previous_share_type.SUCCESSOR is None
                or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH
            ):
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR

                counts = p2pool_data.get_desired_version_counts(
                    self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH * 9 // 10),
                    self.node.net.CHAIN_LENGTH // 10,
                )
                upgraded = counts.get(successor_type.VERSION, 0) / sum(counts.itervalues())
                if upgraded > 0.65:
                    print "Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%" % (upgraded * 100, 95)
                print
                # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues()) * 95 // 100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type

        if desired_share_target is None:
            desired_share_target = 2 ** 256 - 1
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                desired_share_target = min(
                    desired_share_target,
                    bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167),
                )  # limit to 1.67% of pool shares by modulating share difficulty

            local_addr_rates = self.get_local_addr_rates()
            lookbehind = 3600 // self.node.net.SHARE_PERIOD
            block_subsidy = self.node.bitcoind_work.value["subsidy"]
            if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind:
                expected_payout_per_block = (
                    local_addr_rates.get(pubkey_hash, 0)
                    / p2pool_data.get_pool_attempts_per_second(
                        self.node.tracker, self.node.best_share_var.value, lookbehind
                    )
                    * block_subsidy
                    * (1 - self.donation_percentage / 100)
                )  # XXX doesn't use global stale rate to compute pool hash
                if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
                    desired_share_target = min(
                        desired_share_target,
                        bitcoin_data.average_attempts_to_target(
                            (
                                bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value["bits"].target)
                                * self.node.net.SPREAD
                            )
                            * self.node.net.PARENT.DUST_THRESHOLD
                            / block_subsidy
                        ),
                    )

        if True:
            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(
                        script.create_push_script(
                            [self.current_work.value["height"]] + ([mm_data] if mm_data else []) + []
                        )
                        + self.current_work.value["coinbaseflags"]
                    )[:100],
                    nonce=random.randrange(2 ** 32),
                    pubkey_hash=pubkey_hash,
                    subsidy=self.current_work.value["subsidy"],
                    donation=math.perfect_round(65535 * self.donation_percentage / 100),
                    stale_info=(
                        lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain): "orphan"
                        if orphans > orphans_recorded_in_chain
                        else "doa"
                        if doas > doas_recorded_in_chain
                        else None
                    )(*self.get_stale_counts()),
                    desired_version=(
                        share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type
                    ).VOTING_VERSION,
                ),
                block_target=self.current_work.value["bits"].target,
                desired_timestamp=int(time.time() + 0.5),
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value["transaction_fees"]),
                net=self.node.net,
                known_txs=tx_map,
                base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value["height"]),
            )

        packed_gentx = bitcoin_data.tx_type.pack(gentx)
        other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]

        mm_later = [
            (
                dict(
                    aux_work, target=aux_work["target"] if aux_work["target"] != "p2pool" else share_info["bits"].target
                ),
                index,
                hashes,
            )
            for aux_work, index, hashes in mm_later
        ]

        if desired_pseudoshare_target is None:
            target = 2 ** 256 - 1
            local_hash_rate = self._estimate_local_hash_rate()
            if local_hash_rate is not None:
                target = min(
                    target, bitcoin_data.average_attempts_to_target(local_hash_rate * 3)
                )  # limit to 1 share response every 3 seconds by modulating pseudoshare difficulty
        else:
            target = desired_pseudoshare_target
        target = max(target, share_info["bits"].target)
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work["target"])
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)

        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)

        print "New work for worker! Difficulty: %.09f Share difficulty: %.09f Total block value: %.6f %s including %i transactions" % (
            bitcoin_data.target_to_difficulty(target),
            bitcoin_data.target_to_difficulty(share_info["bits"].target),
            self.current_work.value["subsidy"] * 1e-8,
            self.node.net.PARENT.SYMBOL,
            len(self.current_work.value["transactions"]),
        )

        ba = dict(
            version=min(self.current_work.value["version"], 2),
            previous_block=self.current_work.value["previous_block"],
            merkle_link=merkle_link,
            coinb1=packed_gentx[: -self.COINBASE_NONCE_LENGTH - 4],
            coinb2=packed_gentx[-4:],
            timestamp=self.current_work.value["time"],
            bits=self.current_work.value["bits"],
            share_target=target,
        )

        received_header_hashes = set()

        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = (
                packed_gentx[: -self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[-4:]
                if coinbase_nonce != "\0" * self.COINBASE_NONCE_LENGTH
                else packed_gentx
            )
            new_gentx = (
                bitcoin_data.tx_type.unpack(new_packed_gentx)
                if coinbase_nonce != "\0" * self.COINBASE_NONCE_LENGTH
                else gentx
            )

            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header["bits"].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header, txs=[new_gentx] + other_transactions),
                        False,
                        self.node.factory,
                        self.node.bitcoind,
                        self.node.bitcoind_work,
                        self.node.net,
                    )
                    if pow_hash <= header["bits"].target:
                        print
                        print "GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x" % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash,
                        )
                        print
            except:
                log.err(None, "Error while processing potential block:")

            user, _, _, _ = self.get_user_details(user)
            assert header["previous_block"] == ba["previous_block"]
            assert header["merkle_root"] == bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(new_packed_gentx), merkle_link
            )
            assert header["bits"] == ba["bits"]
            # Check momentum using midhash (who knows why it is called that) and birthday values
            midhash = hashlib.sha256(hashlib.sha256(bitcoin_data.block_header_type.pack(header)[:80]).digest()).digest()
            # print 'MIDHASH: {0}'.format(midhash.encode('hex'))
            # print 'A: {0}'.format(header['birthdayA'])
            # print 'B: {0}'.format(header['birthdayB'])
            momentumc = rewardcoin_momentum.checkMomentum(midhash, header["birthdayA"], header["birthdayB"])
            # print momentumc
            if momentumc == False:
                print "Invalid Momentum from Client!"
                return False

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work["target"] or p2pool.DEBUG:
                        df = deferral.retry("Error submitting merged block: (will retry)", 10, 10)(
                            aux_work["merged_proxy"].rpc_getauxblock
                        )(
                            pack.IntType(256, "big").pack(aux_work["hash"]).encode("hex"),
                            bitcoin_data.aux_pow_type.pack(
                                dict(
                                    merkle_tx=dict(tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link),
                                    merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                    parent_block_header=header,
                                )
                            ).encode("hex"),
                        )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work["target"]):
                                print >> sys.stderr, "Merged block submittal result: %s Expected: %s" % (
                                    result,
                                    pow_hash <= aux_work["target"],
                                )
                            else:
                                print "Merged block submittal result: %s" % (result,)

                        @df.addErrback
                        def _(err):
                            log.err(err, "Error submitting merged block:")

                except:
                    log.err(None, "Error while processing merged mining POW:")

            if pow_hash <= share_info["bits"].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print "GOT SHARE! %s %s prev %s age %.2fs%s" % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    " DEAD ON ARRIVAL" if not on_time else "",
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header["bits"].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, "Error forwarding block solution:")

                self.share_received.happened(
                    bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash
                )

            if pow_hash > target:
                print "Worker %s submitted share with hash > target:" % (user,)
                print "    Hash:   %56x" % (pow_hash,)
                print "    Target: %56x" % (target,)
            elif header_hash in received_header_hashes:
                print >> sys.stderr, "Worker %s submitted share more than once!" % (user,)
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(
                        work=bitcoin_data.target_to_average_attempts(target),
                        dead=not on_time,
                        user=user,
                        share_target=share_info["bits"].target,
                    )
                )
                self.local_addr_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash)
                )

            return on_time

        return ba, got_response
Esempio n. 8
0
        def compute(request):
            state = current_work.value
            user = worker_interface.get_username(request)

            payout_script = get_payout_script_from_username(user)
            if payout_script is None or random.uniform(0,
                                                       100) < args.worker_fee:
                payout_script = my_script

            if len(p2p_node.peers) == 0 and net.PERSIST:
                raise jsonrpc.Error(-12345,
                                    u'p2pool is not connected to any peers')
            if state['best_share_hash'] is None and net.PERSIST:
                raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
            if time.time() > current_work2.value['last_update'] + 60:
                raise jsonrpc.Error(-12345, u'lost contact with bitcoind')

            previous_share = None if state[
                'best_share_hash'] is None else tracker.shares[
                    state['best_share_hash']]
            subsidy = current_work2.value['subsidy']
            share_info, generate_tx = p2pool_data.generate_transaction(
                tracker=tracker,
                share_data=dict(
                    previous_share_hash=state['best_share_hash'],
                    coinbase='' if state['aux_work'] is None else
                    '\xfa\xbemm' + bitcoin_data.HashType().pack(
                        state['aux_work']['hash'])[::-1] +
                    struct.pack('<ii', 1, 0),
                    nonce=run_identifier +
                    struct.pack('<Q', random.randrange(2**64)),
                    new_script=payout_script,
                    subsidy=subsidy,
                    donation=math.perfect_round(
                        65535 * args.donation_percentage / 100),
                    stale_frac=(lambda shares, stales: 255 if shares == 0 else
                                math.perfect_round(254 * stales / shares))(
                                    *get_share_counts()),
                ),
                block_target=state['target'],
                desired_timestamp=int(time.time() -
                                      current_work2.value['clock_offset']),
                net=net,
            )

            print 'New work for worker %s! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
                user,
                bitcoin_data.target_to_difficulty(share_info['target']),
                (sum(t['value'] for t in generate_tx['tx_outs']
                     if t['script'] == payout_script) - subsidy // 200) * 1e-8,
                net.BITCOIN_SYMBOL,
                subsidy * 1e-8,
                net.BITCOIN_SYMBOL,
                len(current_work2.value['transactions']),
            )

            transactions = [generate_tx] + list(
                current_work2.value['transactions'])
            merkle_root = bitcoin_data.merkle_hash(transactions)
            merkle_root_to_transactions[
                merkle_root] = share_info, transactions, time.time()

            return bitcoin_getwork.BlockAttempt(
                state['version'], state['previous_block'], merkle_root,
                current_work2.value['time'], state['target'],
                share_info['target']), state['best_share_hash']
Esempio n. 9
0
    def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
        if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(u"p2pool is not connected to any peers")
        if self.node.best_share_var.value is None and self.node.net.PERSIST:
            raise jsonrpc.Error_for_code(-12345)(u"p2pool is downloading shares")
        if time.time() > self.current_work.value["last_update"] + 60:
            raise jsonrpc.Error_for_code(-12345)(u"lost contact with bitcoind")

        if self.merged_work.value:
            tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
            mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))["hash"] for i in xrange(size)]
            mm_data = "\xfa\xbemm" + bitcoin_data.aux_pow_coinbase_type.pack(
                dict(merkle_root=bitcoin_data.merkle_hash(mm_hashes), size=size, nonce=0)
            )
            mm_later = [
                (aux_work, mm_hashes.index(aux_work["hash"]), mm_hashes)
                for chain_id, aux_work in self.merged_work.value.iteritems()
            ]
        else:
            mm_data = ""
            mm_later = []

        tx_hashes = [
            bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value["transactions"]
        ]
        tx_map = dict(zip(tx_hashes, self.current_work.value["transactions"]))

        if self.node.best_share_var.value is None:
            share_type = p2pool_data.Share
        else:
            previous_share = self.node.tracker.items[self.node.best_share_var.value]
            previous_share_type = type(previous_share)

            if (
                previous_share_type.SUCCESSOR is None
                or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH
            ):
                share_type = previous_share_type
            else:
                successor_type = previous_share_type.SUCCESSOR

                counts = p2pool_data.get_desired_version_counts(
                    self.node.tracker,
                    self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH * 9 // 10),
                    self.node.net.CHAIN_LENGTH // 10,
                )
                # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
                if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues()) * 95 // 100:
                    share_type = successor_type
                else:
                    share_type = previous_share_type

        if True:
            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                tracker=self.node.tracker,
                share_data=dict(
                    previous_share_hash=self.node.best_share_var.value,
                    coinbase=(
                        script.create_push_script(
                            [self.current_work.value["height"]] + ([mm_data] if mm_data else []) + []
                        )
                        + self.current_work.value["coinbaseflags"]
                    )[:100],
                    nonce=random.randrange(2 ** 32),
                    pubkey_hash=pubkey_hash,
                    subsidy=self.current_work.value["subsidy"],
                    donation=math.perfect_round(65535 * self.donation_percentage / 100),
                    stale_info=(
                        lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain): "orphan"
                        if orphans > orphans_recorded_in_chain
                        else "doa"
                        if doas > doas_recorded_in_chain
                        else None
                    )(*self.get_stale_counts()),
                    desired_version=share_type.SUCCESSOR.VERSION
                    if share_type.SUCCESSOR is not None
                    else share_type.VERSION,
                ),
                block_target=self.current_work.value["bits"].target,
                desired_timestamp=int(time.time() + 0.5),
                desired_target=desired_share_target,
                ref_merkle_link=dict(branch=[], index=0),
                desired_other_transaction_hashes=tx_hashes,
                net=self.node.net,
                known_txs=tx_map,
            )

        transactions = [gentx] + [tx_map[tx_hash] for tx_hash in other_transaction_hashes]

        mm_later = [
            (
                dict(
                    aux_work, target=aux_work["target"] if aux_work["target"] != "p2pool" else share_info["bits"].target
                ),
                index,
                hashes,
            )
            for aux_work, index, hashes in mm_later
        ]

        if desired_pseudoshare_target is None:
            target = 2 ** 256 - 1
            if len(self.recent_shares_ts_work) == 50:
                hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:]) // (
                    self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0]
                )
                if hash_rate:
                    target = min(target, int(2 ** 256 / hash_rate))
        else:
            target = desired_pseudoshare_target
        target = max(target, share_info["bits"].target)
        for aux_work, index, hashes in mm_later:
            target = max(target, aux_work["target"])
        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)

        getwork_time = time.time()
        lp_count = self.new_work_event.times
        merkle_link = bitcoin_data.calculate_merkle_link(
            [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions], 0
        )

        print "New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions" % (
            bitcoin_data.target_to_difficulty(target),
            bitcoin_data.target_to_difficulty(share_info["bits"].target),
            self.current_work.value["subsidy"] * 1e-8,
            self.node.net.PARENT.SYMBOL,
            len(self.current_work.value["transactions"]),
        )

        ba = bitcoin_getwork.BlockAttempt(
            version=min(self.current_work.value["version"], 2),
            previous_block=self.current_work.value["previous_block"],
            merkle_root=bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(bitcoin_data.tx_type.pack(transactions[0])), merkle_link
            ),
            timestamp=self.current_work.value["time"],
            bits=self.current_work.value["bits"],
            share_target=target,
        )

        received_header_hashes = set()

        def got_response(header, request):
            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header["bits"].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header, txs=transactions),
                        False,
                        self.node.factory,
                        self.node.bitcoind,
                        self.node.bitcoind_work,
                        self.node.net,
                    )
                    if pow_hash <= header["bits"].target:
                        print
                        print "GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x" % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash,
                        )
                        print
            except:
                log.err(None, "Error while processing potential block:")

            user, _, _, _ = self.get_user_details(request)
            assert header["previous_block"] == ba.previous_block
            assert header["merkle_root"] == ba.merkle_root
            assert header["bits"] == ba.bits

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work["target"] or p2pool.DEBUG:
                        df = deferral.retry("Error submitting merged block: (will retry)", 10, 10)(
                            aux_work["merged_proxy"].rpc_getauxblock
                        )(
                            pack.IntType(256, "big").pack(aux_work["hash"]).encode("hex"),
                            bitcoin_data.aux_pow_type.pack(
                                dict(
                                    merkle_tx=dict(tx=transactions[0], block_hash=header_hash, merkle_link=merkle_link),
                                    merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                    parent_block_header=header,
                                )
                            ).encode("hex"),
                        )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work["target"]):
                                print >> sys.stderr, "Merged block submittal result: %s Expected: %s" % (
                                    result,
                                    pow_hash <= aux_work["target"],
                                )
                            else:
                                print "Merged block submittal result: %s" % (result,)

                        @df.addErrback
                        def _(err):
                            log.err(err, "Error submitting merged block:")

                except:
                    log.err(None, "Error while processing merged mining POW:")

            if pow_hash <= share_info["bits"].target and header_hash not in received_header_hashes:
                share = get_share(header, transactions)

                print "GOT SHARE! %s %s prev %s age %.2fs%s" % (
                    request.getUser(),
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    " DEAD ON ARRIVAL" if not on_time else "",
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                if not p2pool.DEBUG:
                    self.node.tracker.verified.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header["bits"].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, "Error forwarding block solution:")

                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)

            if pow_hash > target:
                print "Worker %s submitted share with hash > target:" % (request.getUser(),)
                print "    Hash:   %56x" % (pow_hash,)
                print "    Target: %56x" % (target,)
            elif header_hash in received_header_hashes:
                print >> sys.stderr, "Worker %s @ %s submitted share more than once!" % (
                    request.getUser(),
                    request.getClientIP(),
                )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user)
                )

            return on_time

        return ba, got_response
Esempio n. 10
0
 def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
     if len(self.p2p_node.peers) == 0 and self.net.PERSIST:
         raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
     if self.best_share_var.value is None and self.net.PERSIST:
         raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
     if time.time() > self.current_work.value['last_update'] + 60:
         raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
     
     if self.merged_work.value:
         tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
         mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
         mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
             merkle_root=bitcoin_data.merkle_hash(mm_hashes),
             size=size,
             nonce=0,
         ))
         mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
     else:
         mm_data = ''
         mm_later = []
     
     tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
     tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
     
     if True:
         share_info, gentx, other_transaction_hashes, get_share = p2pool_data.Share.generate_transaction(
             tracker=self.tracker,
             share_data=dict(
                 previous_share_hash=self.best_share_var.value,
                 coinbase=(script.create_push_script([
                     self.current_work.value['height'],
                     ] + ([mm_data] if mm_data else []) + [
                 ]) + self.current_work.value['coinbaseflags'])[:100],
                 nonce=random.randrange(2**32),
                 pubkey_hash=pubkey_hash,
                 subsidy=self.current_work.value['subsidy'],
                 donation=math.perfect_round(65535*self.donation_percentage/100),
                 stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
                     'orphan' if orphans > orphans_recorded_in_chain else
                     'doa' if doas > doas_recorded_in_chain else
                     None
                 )(*self.get_stale_counts()),
                 desired_version=5,
             ),
             block_target=self.current_work.value['bits'].target,
             desired_timestamp=int(time.time() + 0.5),
             desired_target=desired_share_target,
             ref_merkle_link=dict(branch=[], index=0),
             other_transaction_hashes=tx_hashes,
             net=self.net,
         )
     
     transactions = [gentx] + [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
     
     mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
     
     if desired_pseudoshare_target is None:
         target = 2**256-1
         if len(self.recent_shares_ts_work) == 50:
             hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
             if hash_rate:
                 target = min(target, int(2**256/hash_rate))
     else:
         target = desired_pseudoshare_target
     target = max(target, share_info['bits'].target)
     for aux_work, index, hashes in mm_later:
         target = max(target, aux_work['target'])
     target = math.clip(target, self.net.PARENT.SANE_TARGET_RANGE)
     
     getwork_time = time.time()
     lp_count = self.new_work_event.times
     merkle_link = bitcoin_data.calculate_merkle_link([bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions], 0)
     
     print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
         bitcoin_data.target_to_difficulty(target),
         bitcoin_data.target_to_difficulty(share_info['bits'].target),
         self.current_work.value['subsidy']*1e-8, self.net.PARENT.SYMBOL,
         len(self.current_work.value['transactions']),
     )
     
     ba = bitcoin_getwork.BlockAttempt(
         version=min(self.current_work.value['version'], 2),
         previous_block=self.current_work.value['previous_block'],
         merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(bitcoin_data.tx_type.pack(transactions[0])), merkle_link),
         timestamp=self.current_work.value['time'],
         bits=self.current_work.value['bits'],
         share_target=target,
     )
     
     received_header_hashes = set()
     
     def got_response(header, request):
         header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
         pow_hash = self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
         try:
             if pow_hash <= header['bits'].target or p2pool.DEBUG:
                 self.submit_block(dict(header=header, txs=transactions), ignore_failure=False)
                 if pow_hash <= header['bits'].target:
                     print
                     print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
                     print
         except:
             log.err(None, 'Error while processing potential block:')
         
         user, _, _, _ = self.get_user_details(request)
         assert header['previous_block'] == ba.previous_block
         assert header['merkle_root'] == ba.merkle_root
         assert header['bits'] == ba.bits
         
         on_time = self.new_work_event.times == lp_count
         
         for aux_work, index, hashes in mm_later:
             try:
                 if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                     df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
                         pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
                         bitcoin_data.aux_pow_type.pack(dict(
                             merkle_tx=dict(
                                 tx=transactions[0],
                                 block_hash=header_hash,
                                 merkle_link=merkle_link,
                             ),
                             merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                             parent_block_header=header,
                         )).encode('hex'),
                     )
                     @df.addCallback
                     def _(result, aux_work=aux_work):
                         if result != (pow_hash <= aux_work['target']):
                             print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                         else:
                             print 'Merged block submittal result: %s' % (result,)
                     @df.addErrback
                     def _(err):
                         log.err(err, 'Error submitting merged block:')
             except:
                 log.err(None, 'Error while processing merged mining POW:')
         
         if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
             share = get_share(header, transactions)
             
             print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                 request.getUser(),
                 p2pool_data.format_hash(share.hash),
                 p2pool_data.format_hash(share.previous_hash),
                 time.time() - getwork_time,
                 ' DEAD ON ARRIVAL' if not on_time else '',
             )
             self.my_share_hashes.add(share.hash)
             if not on_time:
                 self.my_doa_share_hashes.add(share.hash)
             
             self.tracker.add(share)
             if not p2pool.DEBUG:
                 self.tracker.verified.add(share)
             self.set_best_share()
             
             try:
                 if pow_hash <= header['bits'].target or p2pool.DEBUG:
                     self.broadcast_share(share.hash)
             except:
                 log.err(None, 'Error forwarding block solution:')
             
             self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)
         
         if pow_hash > target:
             print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
             print '    Hash:   %56x' % (pow_hash,)
             print '    Target: %56x' % (target,)
         elif header_hash in received_header_hashes:
             print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
         else:
             received_header_hashes.add(header_hash)
             
             self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
             self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
             while len(self.recent_shares_ts_work) > 50:
                 self.recent_shares_ts_work.pop(0)
             self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user))
         
         return on_time
     
     return ba, got_response